From 0ca0e48b1cc8a776a3e34859b6ef265f77a61576 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 21 Dec 2023 19:00:09 +0100 Subject: [PATCH 01/93] Skeleton of Test_New builder API --- .../Standard/Test/0.0.0-dev/src/Test_New.enso | 63 +++++++++++++++++++ test/Tests/src/Data/Bool_Spec_New.enso | 17 +++++ 2 files changed, 80 insertions(+) create mode 100644 distribution/lib/Standard/Test/0.0.0-dev/src/Test_New.enso create mode 100644 test/Tests/src/Data/Bool_Spec_New.enso diff --git a/distribution/lib/Standard/Test/0.0.0-dev/src/Test_New.enso b/distribution/lib/Standard/Test/0.0.0-dev/src/Test_New.enso new file mode 100644 index 000000000000..14b49ec5b4bf --- /dev/null +++ b/distribution/lib/Standard/Test/0.0.0-dev/src/Test_New.enso @@ -0,0 +1,63 @@ +from Standard.Base import all +from Standard.Base.Runtime import assert + +type Test_Options + + +type Test_Builder + ## PRIVATE + Impl builder + + ## Add a group to the builder. + group : Text -> (Group_Builder -> Any) -> Nothing + group self (name:Text) fn = + b = Vector.new_builder + fn (Group_Builder.Impl b) + self.builder.append <| Test.Group name b.to_vector + + +## Builder to create a group of tests. +type Group_Builder + ## PRIVATE + Impl builder + + specify : Text -> (Nothing -> Any) -> Nothing + specify self (name:Text) ~code = + self.builder.append <| Test.Spec name (_ -> code) + Nothing + + +type Test + All (groups : Vector Test) + Group (name : Text) (specs : Vector Test) + Spec (name : Text) (code : Any -> Any) + + ## Construct a Test object + build : (Test_Builder -> Any) -> Test + build fn = + b = Vector.new_builder + fn (Test_Builder.Impl b) + groups_vec = b.to_vector + Test.All groups_vec + + run_main self = + IO.println "Test.run_main" + self.groups.each group-> + self.run_group group + + ## PRIVATE + run_group self group = + case group of + Test.Group name specs -> + IO.println <| "Running group '" + group.name + "'" + group.specs.each spec-> + self.run_spec spec + _ -> Error.throw "Unreachable" + + ## PRIVATE + run_spec self spec = + case spec of + Test.Spec spec_name code -> + IO.println <| " Running spec '" + spec.name + "'" + spec.code Nothing + _ -> Error.throw "Unreachable" diff --git a/test/Tests/src/Data/Bool_Spec_New.enso b/test/Tests/src/Data/Bool_Spec_New.enso new file mode 100644 index 000000000000..bb640c72d3b5 --- /dev/null +++ b/test/Tests/src/Data/Bool_Spec_New.enso @@ -0,0 +1,17 @@ +from Standard.Base import all +from Standard.Test.Test_New import Test + + +collect_tests = Test.build builder-> + builder.group "Booleans" group_builder-> + group_builder.specify "should allow converting Bools to Text values" <| + IO.println "First spec" + True.to_text == "True" + + group_builder.specify "should allow for comparing Bools" <| + IO.println "Second spec" + (True == True) + + +main = + collect_tests.run_main From 1d6cdba3d9ffcb8d6d1c79da86db36eea7496730 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 22 Dec 2023 17:26:24 +0100 Subject: [PATCH 02/93] Add Test_New standard library. This lib will be developed alongside the old Test. --- .../lib/Standard/Test_New/0.0.0-dev/package.yaml | 10 ++++++++++ .../lib/Standard/Test_New/0.0.0-dev/src/Main.enso | 3 +++ .../{Test => Test_New}/0.0.0-dev/src/Test_New.enso | 0 project/Editions.scala | 1 + 4 files changed, 14 insertions(+) create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/package.yaml create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso rename distribution/lib/Standard/{Test => Test_New}/0.0.0-dev/src/Test_New.enso (100%) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/package.yaml b/distribution/lib/Standard/Test_New/0.0.0-dev/package.yaml new file mode 100644 index 000000000000..73ef40876908 --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/package.yaml @@ -0,0 +1,10 @@ +name: Test_New +namespace: Standard +version: 0.0.0-dev +license: APLv2 +authors: + - name: Enso Team + email: contact@enso.org +maintainers: + - name: Enso Team + email: contact@enso.org diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso new file mode 100644 index 000000000000..bf83cf71a59d --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso @@ -0,0 +1,3 @@ +import project.Test_New + +export project.Test_New diff --git a/distribution/lib/Standard/Test/0.0.0-dev/src/Test_New.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso similarity index 100% rename from distribution/lib/Standard/Test/0.0.0-dev/src/Test_New.enso rename to distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso diff --git a/project/Editions.scala b/project/Editions.scala index 97b09c24869a..ecaba3a6ccfe 100644 --- a/project/Editions.scala +++ b/project/Editions.scala @@ -10,6 +10,7 @@ object Editions { val standardLibraries: Seq[String] = Seq( "Standard.Base", "Standard.Test", + "Standard.Test_New", "Standard.Table", "Standard.Database", "Standard.AWS", From f31fd100af637982fa0c95463b5d818148851e4d Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 22 Dec 2023 18:55:39 +0100 Subject: [PATCH 03/93] Copy some stuff from Test to Test_New --- .../Test_New/0.0.0-dev/src/Extensions.enso | 654 ++++++++++++++++++ .../Standard/Test_New/0.0.0-dev/src/Main.enso | 2 + .../Test_New/0.0.0-dev/src/Suite_Config.enso | 67 ++ .../Test_New/0.0.0-dev/src/Test_Result.enso | 36 + 4 files changed, 759 insertions(+) create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso new file mode 100644 index 000000000000..0755b1ec76de --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso @@ -0,0 +1,654 @@ +from Standard.Base import all +import Standard.Base.Errors.Common.No_Such_Method +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +import project.Test_Result.Test_Result +from project.Test_New import Test + +## Expect a function to fail with the provided dataflow error. + + Arguments: + - matcher: The expected type of dataflow error contained in `self`. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that a computation should return an error of a given type. + + import Standard.Examples + from Standard.Test import Test + + example_should_fail_with = + Examples.throw_error . should_fail_with Examples.My_Error +Any.should_fail_with : Any -> Integer -> Test_Result +Any.should_fail_with self matcher frames_to_skip=0 = + loc = Meta.get_source_location 1+frames_to_skip + matcher_text = matcher . to_text + Test.fail ("Expected an error " + matcher_text + " but no error occurred, instead got: " + self.to_text + " (at " + loc + ").") + +## Expect a function to fail with the provided dataflow error. + + Arguments: + - matcher: The expected type of dataflow error contained in `self`. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that a computation should return an error of a given type. + + import Standard.Examples + from Standard.Test import Test + + example_should_fail_with = + Examples.throw_error . should_fail_with Examples.My_Error +Error.should_fail_with : Any -> Integer -> Test_Result +Error.should_fail_with self matcher frames_to_skip=0 = + caught = self.catch + if caught == matcher || caught.is_a matcher then Nothing else + loc = Meta.get_source_location 2+frames_to_skip + matcher_text = matcher . to_text + Test.fail ("Expected error "+matcher_text+", but error " + caught.to_text + " has been returned (at " + loc + ").") + +## Asserts that `self` value is equal to the expected value. + + Arguments: + - that: The value to check `self` for equality with. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one value should equal another, + + import Standard.Examples + from Standard.Test import Test + + example_should_equal = Examples.add_1_to 1 . should_equal 2 +Any.should_equal : Any -> Integer -> Test_Result +Any.should_equal self that frames_to_skip=0 = case self == that of + True -> Test_Result.Success + False -> + loc = Meta.get_source_location 2+frames_to_skip + additional_comment = case self of + _ : Vector -> case that of + _ : Vector -> + case self.length == that.length of + True -> + diff = self.zip that . index_of p-> + p.first != p.second + "; first difference at index " + diff.to_text + " " + False -> "; lengths differ (" + self.length.to_text + " != " + that.length.to_text + ") " + _ -> "" + _ -> "" + msg = self.pretty + " did not equal " + that.pretty + additional_comment + " (at " + loc + ")." + Test.fail msg + +## Asserts that `self` value is equal to the expected type value. + + Arguments: + - that: The type to check `self` for equality with. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that some type is equal to another., + + import Standard.Examples + from Standard.Test import Test + + example_should_equal = Examples.some_type . should_equal_type Vector +Any.should_equal_type : Any -> Integer -> Test_Result +Any.should_equal_type self that frames_to_skip=0 = case (self.is_same_object_as that) of + True -> Test_Result.Success + False -> + loc = Meta.get_source_location 2+frames_to_skip + msg = self.to_text + " did not equal type " + that.to_text + " (at " + loc + ")." + Test.fail msg + +## Added so that dataflow errors are not silently lost. +Error.should_equal_type self that frames_to_skip=0 = + _ = [that] + Test.fail_match_on_unexpected_error self 1+frames_to_skip + +## Asserts that `self` value is not equal to the expected value. + + Arguments: + - that: The value to check `self` for equality with. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one value should equal another, + + import Standard.Examples + from Standard.Test import Test + + example_should_not_equal = Examples.add_1_to 1 . should_not_equal 2 +Any.should_not_equal : Any -> Integer -> Test_Result +Any.should_not_equal self that frames_to_skip=0 = case self != that of + True -> Test_Result.Success + False -> + loc = Meta.get_source_location 2+frames_to_skip + msg = self.to_text + " did equal " + that.to_text + " (at " + loc + ")." + Test.fail msg + +## Added so that dataflow errors are not silently lost. +Error.should_not_equal self that frames_to_skip=0 = + _ = [that] + Test.fail_match_on_unexpected_error self 1+frames_to_skip + +## Asserts that `self` value is not equal to the expected type value. + + Arguments: + - that: The type to check `self` for equality with. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that some type is equal to another., + + import Standard.Examples + from Standard.Test import Test + + example_should_not_equal = Examples.some_type . should_not_equal_type Vector +Any.should_not_equal_type : Any -> Integer -> Test_Result +Any.should_not_equal_type self that frames_to_skip=0 = case (self.is_same_object_as that . not) of + True -> Test_Result.Success + False -> + loc = Meta.get_source_location 2+frames_to_skip + msg = self.to_text + " did equal type " + that.to_text + " (at " + loc + ")." + Test.fail msg + +## Added so that dataflow errors are not silently lost. +Error.should_not_equal_type self that frames_to_skip=0 = + _ = [that] + Test.fail_match_on_unexpected_error self 1+frames_to_skip + +## Asserts that `self` value is a Text value and starts with `that`. + + Arguments: + - that: The value to check `self` starts with. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one value should start with another. + + from Standard.Test import Test + + example_should_start_with = "Hello World!" . should_start_with "Hello" +Any.should_start_with : Text -> Integer -> Test_Result +Any.should_start_with self that frames_to_skip=0 = case self of + _ : Text -> if self.starts_with that then Test_Result.Success else + loc = Meta.get_source_location 3+frames_to_skip + msg = self.to_text + " does not start with " + that.to_text + " (at " + loc + ")." + Test.fail msg + _ -> + loc = Meta.get_source_location 2+frames_to_skip + msg = self.to_text + " is not a `Text` value (at " + loc + ")." + Test.fail msg + +## Asserts that `self` value is a Text value and ends with `that`. + + Arguments: + - that: The value to check `self` ends with. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one value should end with another. + + from Standard.Test import Test + + example_should_end_with = "Hello World!" . should_end_with "ld!" +Any.should_end_with : Text -> Integer -> Test_Result +Any.should_end_with self that frames_to_skip=0 = case self of + _ : Text -> if self.ends_with that then Test_Result.Success else + loc = Meta.get_source_location 3+frames_to_skip + msg = self.to_text + " does not end with " + that.to_text + " (at " + loc + ")." + Test.fail msg + _ -> + loc = Meta.get_source_location 2+frames_to_skip + msg = self.to_text + " is not a `Text` value (at " + loc + ")." + Test.fail msg + +## Asserts that `self` value is a Text value and starts with `that`. + + Arguments: + - that: The value to check `self` starts with. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one value should start with another. + + from Standard.Test import Test + + example_should_start_with = "Hello World!" . should_start_with "Hello" +Error.should_start_with : Any -> Integer -> Test_Result +Error.should_start_with self that frames_to_skip=0 = + _ = [that] + Test.fail_match_on_unexpected_error self 1+frames_to_skip + +## Asserts that `self` value is a Text value and ends with `that`. + + Arguments: + - that: The value to check `self` ends with. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one value should end with another. + + from Standard.Test import Test + + example_should_end_with = "Hello World!" . should_end_with "ld!" +Error.should_end_with : Any -> Integer -> Test_Result +Error.should_end_with self that frames_to_skip=0 = + _ = [that] + Test.fail_match_on_unexpected_error self 1+frames_to_skip + +## Asserts that `self` value is equal to the expected value. + + Arguments: + - _: The value to check `self` for equality with. + + > Example + Assert that one value should equal another, + + import Standard.Examples + from Standard.Test import Test + + example_should_equal = Examples.add_1_to 1 . should_equal 2 +Error.should_equal : Any -> Integer -> Test_Result +Error.should_equal self that frames_to_skip=0 = + _ = [that] + Test.fail_match_on_unexpected_error self 1+frames_to_skip + +## Asserts that `self` is within `epsilon` from `that`. + + Arguments: + - that: The value to compare `self` for equality with. + - epsilon: The epislon for comparing two float numbers. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Compare two float values. + + from Standard.Test import Test + + example_should_equal = 1.1 . should_equal 1.1 + + > Example + Compare two float values with an epsilon (tolerance). + + from Standard.Test import Test + + example_should_equal = + 1.00000001 . should_equal 1.00000002 epsilon=0.0001 +Number.should_equal : Float -> Float -> Integer -> Test_Result +Number.should_equal self that epsilon=0 frames_to_skip=0 = + matches = case that of + _ : Number -> self.equals that epsilon + _ -> False + case matches of + True -> Test_Result.Success + False -> + loc = Meta.get_source_location 2+frames_to_skip + msg = self.to_text + " did not equal " + that.to_text + " (at " + loc + ")." + Test.fail msg + +## Asserts that `self` value is not an error. + + It returns the original value, so that it can be inspected further. + + Arguments: + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that a given action did not result in errors or warnings. + + "foobar".write (enso_project.data / "f.txt") . should_succeed +Any.should_succeed : Integer -> Any +Any.should_succeed self frames_to_skip=0 = + _ = frames_to_skip + self + +## Asserts that `self` value is not an error. + + It returns the original value, so that it can be inspected further. + + Arguments: + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that a given action did not result in errors or warnings. + + "foobar".write (enso_project.data / "f.txt") . should_succeed +Error.should_succeed : Integer -> Any +Error.should_succeed self frames_to_skip=0 = + Test.fail_match_on_unexpected_error self 1+frames_to_skip + +## Handles an unexpected dataflow error. +Error.should_be_a : Any -> Integer -> Any +Error.should_be_a self typ frames_to_skip=0 = + _ = typ + Test.fail_match_on_unexpected_error self 1+frames_to_skip + +## Asserts that the given `Boolean` is `True` + + > Example + Assert that a boolean value is true. + + import Standard.Examples + from Standard.Test import Test + + example_should_be_true = Examples.get_boolean . should_be_true +Boolean.should_be_true : Test_Result +Boolean.should_be_true self = case self of + True -> Test_Result.Success + False -> + loc = Meta.get_source_location 2 + Test.fail "Expected False to be True (at "+loc+")." + +## Asserts that the given `Boolean` is `True`. + + > Example + Assert that a boolean value is true. + + import Standard.Examples + from Standard.Test import Test + + example_should_be_true = Examples.get_boolean . should_be_true +Error.should_be_true : Test_Result +Error.should_be_true self = Test.fail_match_on_unexpected_error self 1 + +## Asserts that the given `Boolean` is `False` + + > Example + Assert that a boolean value is false. + + import Standard.Examples + from Standard.Test import Test + + example_should_be_false = Examples.get_boolean . should_be_false +Boolean.should_be_false : Test_Result +Boolean.should_be_false self = case self of + True -> + loc = Meta.get_source_location 2 + Test.fail "Expected True to be False (at "+loc+")." + False -> Test_Result.Success + +## Asserts that the given `Boolean` is `False` + + > Example + Assert that a boolean value is false. + + import Standard.Examples + from Standard.Test import Test + + example_should_be_false = Examples.get_boolean . should_be_false +Error.should_be_false : Test_Result +Error.should_be_false self = Test.fail_match_on_unexpected_error self 1 + +## Asserts that a value is of a given type. + + Arguments: + - typ: The type to assert that `self` is a value of. + + > Examples + Assert that 1 is of type Boolean. + + from Standard.Test import Test + + example_should_be_a = 1.should_be_a Boolean +Any.should_be_a : Any -> Test_Result +Any.should_be_a self typ = + loc = Meta.get_source_location 1 + fail_on_wrong_arg_type = + Panic.throw <| + Illegal_Argument.Error "typ ("+typ.to_display_text+") must either be a type or a constructor. Use `should_equal` for value equality test instead." + case Meta.meta typ of + c : Meta.Constructor -> case Meta.meta self of + a : Meta.Atom -> + if a.constructor == c then Test_Result.Success else + expected_type = Meta.get_qualified_type_name typ + actual_type = Meta.get_qualified_type_name self + message = "Expected a value of type "+expected_type+", built with constructor "+c.name+", but got a value of type "+actual_type+", built with constructor "+a.constructor.name+" instead (at "+loc+")." + Test.fail message + _ -> + expected_type = Meta.get_qualified_type_name typ + actual_type = Meta.get_qualified_type_name self + message = "Expected a value of type "+expected_type+", built with constructor "+c.name+", but got a value of type "+actual_type+" instead (at "+loc+")." + Test.fail message + _ : Meta.Type -> + ok = self.is_a typ || self==typ + if ok then Test_Result.Success else + expected_type = Meta.get_qualified_type_name typ + actual_type = Meta.get_qualified_type_name self + message = "Expected a value of type "+expected_type+" but got a value of type "+actual_type+" instead (at "+loc+")." + Test.fail message + # Workaround for 0-argument atom constructors which 'unapplies' them. + atom : Meta.Atom -> + ctor = atom . constructor + if ctor.fields.not_empty then fail_on_wrong_arg_type else + self.should_be_a (ctor.value ...) + _ : Meta.Polyglot -> + ok = self.is_a typ + if ok then Test_Result.Success else + actual_type = Meta.get_qualified_type_name self + message = "Expected a value of Java class "+typ.to_text+" but got a value of type "+actual_type+" instead (at "+loc+")." + Test.fail message + Meta.Primitive.Value (b : Boolean) -> + ok = self == b + if ok then Test_Result.Success else + actual_type = Meta.get_qualified_type_name self + message = "Expected a value of "+typ.to_text+" but got a value of type "+actual_type+" instead (at "+loc+")." + Test.fail message + _ -> fail_on_wrong_arg_type + +## Asserts that `self` value contains the same elements as `that`. + + It only checks that all elements from one collection are also present in the + other one. Arities of elements are not checked, so the collections can still + differ in length by containing duplicate elements. + + It will work on any collection which supports the methods + `each : (Any -> Nothing) -> Any` and `contains : Any -> Boolean`. + + Arguments: + - that: The collection to compare. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one vector should contain the same elements as another. + + import Standard.Examples + from Standard.Test import Test + + example_should_equal = [1, 2] . should_contain_the_same_elements_as [2, 1] +Any.should_contain_the_same_elements_as : Any -> Integer -> Test_Result +Any.should_contain_the_same_elements_as self that frames_to_skip=0 = + loc = Meta.get_source_location 1+frames_to_skip + that.each element-> + if self.contains element . not then + msg = "The collection (" + self.to_text + ") did not contain "+element.to_text+" (at " + loc + ")." + Test.fail msg + self.each element-> + if that.contains element . not then + msg = "The collection contained an element ("+element.to_text+") which was not expected (at " + loc + ")." + Test.fail msg + Test_Result.Success + +## Asserts that `self` value contains the same elements as `that`. + + It only checks that all elements from one collection are also present in the + other one. Arities of elements are not checked, so the collections can still + differ in length by containing duplicate elements. + + It will work on any collection which supports the methods + `each : (Any -> Nothing) -> Any` and `contains : Any -> Boolean`. + + Arguments: + - _: The collection to compare. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one vector should contain the same elements as another. + + import Standard.Examples + from Standard.Test import Test + + example_should_equal = [1, 2] . should_contain_the_same_elements_as [2, 1] +Error.should_contain_the_same_elements_as : Any -> Integer -> Test_Result +Error.should_contain_the_same_elements_as self that frames_to_skip=0 = + _ = [that] + Test.fail_match_on_unexpected_error self 1+frames_to_skip + +## Asserts that `self` value contains only elements in `that`. + + It checks that all elements from `self` are also present in `that`. It does + not require that all elements of `that` are contained in `self`. Arities of + elements are not checked, so `self` may still contain more elements than + `that` by containing duplicates. + + It will work on any collection which supports the methods + `each : (Any -> Nothing) -> Any` and `contains : Any -> Boolean`. + + Arguments: + - that: The collection to compare. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one vector should contain only elements in another. + + import Standard.Examples + from Standard.Test import Test + + example_should_equal = [1, 2] . should_only_contain_elements_in [1, 2, 3, 4] +Any.should_only_contain_elements_in : Any -> Integer -> Test_Result +Any.should_only_contain_elements_in self that frames_to_skip=0 = + loc = Meta.get_source_location 1+frames_to_skip + self.each element-> + if that.contains element . not then + msg = "The collection contained an element ("+element.to_text+") which was not expected (at " + loc + ")." + Test.fail msg + Test_Result.Success + +## Asserts that `self` value contains only elements in `that`. + + It checks that all elements from `self` are also present in `that`. It does + not require that all elements of `that` are contained in `self`. Arities of + elements are not checked, so the collections can still differ in length by + containing duplicate elements. + + It will work on any collection which supports the methods + `each : (Any -> Nothing) -> Any` and `contains : Any -> Boolean`. + + Arguments: + - that: The collection to compare. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one vector should contain only elements in another. + + import Standard.Examples + from Standard.Test import Test + + example_should_equal = [1, 2] . should_only_contain_elements_in [1, 2, 3, 4] +Error.should_only_contain_elements_in : Any -> Integer -> Test_Result +Error.should_only_contain_elements_in self that frames_to_skip=0 = + _ = [that] + Test.fail_match_on_unexpected_error self 1+frames_to_skip + +## Asserts that `self` value contains an element. + + Arguments: + - element: The element to check. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + This method delegates to the `contains` method of `self` and will use the + rules of the particular type - be it a `Vector`, `Text` or any custom type + implementing a method `contains : a -> Boolean`. + + > Example + Assert that a string contains a substring. + + from Standard.Test import Test + + example_should_equal = "foobar".should_contain "foo" +Any.should_contain : Any -> Integer -> Test_Result +Any.should_contain self element frames_to_skip=0 = + loc = Meta.get_source_location 1+frames_to_skip + contains_result = Panic.catch No_Such_Method (self.contains element) caught_panic-> + if caught_panic.payload.method_name != "contains" then Panic.throw caught_panic else + msg = "The value (" + self.to_text + ") does not support the method `contains` (at " + loc + ")." + Test.fail msg + if contains_result then Test_Result.Success else + msg = "The value (" + self.to_text + ") did not contain the element (" + element.to_text + ") (at " + loc + ")." + Test.fail msg + +## Asserts that `self` value contains an element. + + Arguments: + - element: The element to check. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + This method delegates to the `contains` method of `self` and will use the + rules of the particular type - be it a `Vector`, `Text` or any custom type + implementing a method `contains : a -> Boolean`. + + > Example + Assert that a string contains a substring. + + from Standard.Test import Test + + example_should_equal = "foobar".should_contain "foo" +Error.should_contain : Any -> Integer -> Test_Result +Error.should_contain self element frames_to_skip=0 = + _ = [element] + Test.fail_match_on_unexpected_error self 1+frames_to_skip + +## Asserts that `self` value does not contain an element. + + Arguments: + - element: The element to check. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + This method delegates to the `contains` method of `self` and will use the + rules of the particular type - be it a `Vector`, `Text` or any custom type + implementing a method `contains : a -> Boolean`. +Any.should_not_contain : Any -> Integer -> Test_Result +Any.should_not_contain self element frames_to_skip=0 = + loc = Meta.get_source_location 1+frames_to_skip + contains_result = Panic.catch No_Such_Method (self.contains element) caught_panic-> + if caught_panic.payload.method_name != "contains" then Panic.throw caught_panic else + msg = "The value (" + self.to_text + ") does not support the method `contains` (at " + loc + ")." + Test.fail msg + if contains_result.not then Test_Result.Success else + msg = "The value (" + self.to_text + ") contained the element (" + element.to_text + "), but it was expected to not contain it (at " + loc + ")." + Test.fail msg + +## Asserts that `self` value does not contain an element. + + Arguments: + - element: The element to check. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + This method delegates to the `contains` method of `self` and will use the + rules of the particular type - be it a `Vector`, `Text` or any custom type + implementing a method `contains : a -> Boolean`. +Error.should_not_contain : Any -> Integer -> Test_Result +Error.should_not_contain self element frames_to_skip=0 = + _ = [element] + Test.fail_match_on_unexpected_error self 1+frames_to_skip diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso index bf83cf71a59d..2efa641188c3 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso @@ -1,3 +1,5 @@ import project.Test_New +from project.Extensions import all export project.Test_New +from project.Extensions export all diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso new file mode 100644 index 000000000000..cfc3a7f52981 --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso @@ -0,0 +1,67 @@ +from Standard.Base import all +import Standard.Base.Runtime.Source_Location.Source_Location +import Standard.Base.Runtime.Stack_Trace_Element + +polyglot java import java.lang.NullPointerException + +## PRIVATE +find_project_root : File -> File +find_project_root path = + if path.is_nothing then Nothing else + handler _ = Nothing + Panic.catch NullPointerException handler=handler <| + if path.name == "src" then path.parent else + @Tail_Call find_project_root path.parent + +## PRIVATE +find_caller_script : Vector Stack_Trace_Element -> File +find_caller_script stack = + find_main idx = + if stack.at idx . name == "Test_Suite.type.run_main" then idx else + @Tail_Call find_main (idx + 1) + main_index = find_main 0 + + find_caller idx = + source = stack.at idx . source_location + case source of + _ : Source_Location -> stack.at idx . source_location . file + _ -> + if (idx + 1 == stack.length) then Nothing else + @Tail_Call find_caller (idx + 1) + + find_caller (main_index + 1) + +## Holds configuration for a Test_Suite +type Suite_Config + ## Creates an Suite_Config based off environment and caller location + from_environment : Suite_Config + from_environment = + only_group_regexp = Environment.get "TEST_ONLY_GROUP" + print_only_failures = Environment.get "REPORT_ONLY_FAILED" != Nothing + junit_folder = Environment.get "ENSO_TEST_JUNIT_DIR" + results_path = if junit_folder.is_nothing then Nothing else + caller_script = find_caller_script Runtime.get_stack_trace + project_root = find_project_root caller_script + case project_root.is_nothing of + True -> + IO.println "Unable to determine root project path. JUnit output disabled." + Nothing + False -> + (File.new junit_folder) / project_root.name / "JUnit.xml" + + Suite_Config.Value only_group_regexp print_only_failures results_path + + ## PRIVATE + Construct a configuration + Value only_group_regexp print_only_failures output_path + + ## Should a specific group be run. + should_run_group self name = + regexp = self.only_group_regexp + case regexp of + _ : Text -> name.match regexp . catch Any (_->True) + _ -> True + + ## Should the results be written to JUnit XML file. + should_output_junit self = + self.output_path.is_nothing.not diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso new file mode 100644 index 000000000000..ab8b19279a01 --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso @@ -0,0 +1,36 @@ +from Standard.Base import all + +type Test_Result + ## Represents a successful behavioral test. + Success + + ## Represents a failing behavioral test. + + Arguments: + - message: The reason why the test failed. + - details: Additional context of the error, for example the stack trace. + Failure message details=Nothing + + ## Represents a pending behavioral test. + + Arguments: + - reason: Text describing why the test is pending. + Pending reason + + ## Checks if the Test_Result is pending. + is_pending : Boolean + is_pending self = case self of + Test_Result.Pending _ -> True + _ -> False + + ## Checks if the Test_Result is a failure. + is_fail : Boolean + is_fail self = case self of + Test_Result.Failure _ _ -> True + _ -> False + + ## Checks if the Test_Result is a success. + is_success : Boolean + is_success self = case self of + Test_Result.Success -> True + _ -> False From 0e005865806da168b3e9dcf913d8a91b84800dd6 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 22 Dec 2023 18:56:13 +0100 Subject: [PATCH 04/93] Test_New accepts filters --- .../Test_New/0.0.0-dev/src/Test_New.enso | 53 +++++++++++++++++-- test/Tests/src/Data/Bool_Spec_New.enso | 15 +++--- 2 files changed, 57 insertions(+), 11 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso index 14b49ec5b4bf..b66cc93717bd 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso @@ -1,5 +1,6 @@ from Standard.Base import all from Standard.Base.Runtime import assert +import project.Test_Result.Test_Result type Test_Options @@ -45,6 +46,22 @@ type Test self.groups.each group-> self.run_group group + run_with_filter : (Text|Regex|Nothing) -> (Text|Regex|Nothing) -> Test_Result + run_with_filter self group_filter=Nothing spec_filter=Nothing = + convert_filter filter = + case filter of + Nothing -> Regex.compile ".*" + txt : Text -> Regex.compile txt + rgx : Regex -> rgx + grp_filter = convert_filter group_filter + spc_filter = convert_filter spec_filter + IO.println "Test.run_with_filter" + groups_to_run = self.groups.filter (group-> grp_filter.matches group.name) + IO.println <| "Will run groups: " + groups_to_run.to_text + groups_to_run.each group-> + # TODO: Filter specs + self.run_group group + ## PRIVATE run_group self group = case group of @@ -52,12 +69,40 @@ type Test IO.println <| "Running group '" + group.name + "'" group.specs.each spec-> self.run_spec spec - _ -> Error.throw "Unreachable" + _ -> Panic.throw "Unreachable" ## PRIVATE run_spec self spec = case spec of Test.Spec spec_name code -> - IO.println <| " Running spec '" + spec.name + "'" - spec.code Nothing - _ -> Error.throw "Unreachable" + IO.println <| " Running spec '" + spec_name + "'" + pair = Duration.time_execution (code Nothing) + res = pair.first + duration = pair.second + IO.println <| " Spec '" + spec_name + "' finished in " + duration.to_text + ", result: " + res.to_text + _ -> Panic.throw "Unreachable" + + ## Fail a test with the given message. + + Arguments: + - message: The message printed when failing the test. + + > Example + Failing a test manually. + + from Standard.Test import Test + + example_fail = Test.fail "Something went wrong." + fail : Text -> Nothing|Text -> Test_Result + fail message details=Nothing = + failure = Test_Result.Failure message details + Panic.throw failure + + ## PRIVATE + Reports an unexpected dataflow error has occurred. + fail_match_on_unexpected_error : Error -> Integer -> Nothing + fail_match_on_unexpected_error error frames_to_skip = + payload = error.catch + loc = Meta.get_source_location 1+frames_to_skip + msg = "An unexpected dataflow error (" + payload.to_text + ") has been matched (at " + loc + ")." + Test.fail msg+'\n'+error.get_stack_trace_text diff --git a/test/Tests/src/Data/Bool_Spec_New.enso b/test/Tests/src/Data/Bool_Spec_New.enso index bb640c72d3b5..ad21cffb300f 100644 --- a/test/Tests/src/Data/Bool_Spec_New.enso +++ b/test/Tests/src/Data/Bool_Spec_New.enso @@ -1,17 +1,18 @@ from Standard.Base import all -from Standard.Test.Test_New import Test +from Standard.Test_New.Test_New import Test +import Standard.Test_New.Extensions collect_tests = Test.build builder-> builder.group "Booleans" group_builder-> group_builder.specify "should allow converting Bools to Text values" <| - IO.println "First spec" - True.to_text == "True" + IO.println " In first spec" + True.to_text.should_equal "True" - group_builder.specify "should allow for comparing Bools" <| - IO.println "Second spec" - (True == True) + group_builder.specify "test that should fail" <| + IO.println " In second spec" + True.to_text.should_equal "False" main = - collect_tests.run_main + collect_tests.run_with_filter Nothing Nothing From b2da2ce27bf009cbb56040b0e3398346042ff156 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 27 Dec 2023 12:27:47 +0100 Subject: [PATCH 05/93] Add Proposition_1.enso prototype --- test/Tests/src/Proposition_1.enso | 120 ++++++++++++++++++++++++++++++ 1 file changed, 120 insertions(+) create mode 100644 test/Tests/src/Proposition_1.enso diff --git a/test/Tests/src/Proposition_1.enso b/test/Tests/src/Proposition_1.enso new file mode 100644 index 000000000000..b5a1f4f1485d --- /dev/null +++ b/test/Tests/src/Proposition_1.enso @@ -0,0 +1,120 @@ +from Standard.Base import all +import Standard.Base.Runtime.State +polyglot java import java.lang.StringBuilder + + +type Group_Builder +type Test_Suite_Builder + + +type Spec + Impl name ~code + + to_text self = self.name + + run : Any + run self = + IO.println <| " Running spec '" + self.name + "'" + self.code + + +type Group + Impl (name : Text) (specs : Vector Spec) + + to_text self = + sb = StringBuilder.new + sb.append ("Group '" + self.name + "': specs = [") + self.specs.each spec-> + sb.append (spec.to_text + ", ") + sb.append "]" + sb.toString + + run : Nothing + run self = + IO.println ("Running group '" + self.name + "'") + self.specs.each spec-> + spec.run + + +## Contains just static methods +type Test + group : Text -> Vector Any -> Nothing + group group_name ~specs = + suite_bldr = State.get Test_Suite_Builder + grp_bldr = Vector.new_builder + State.run Group_Builder grp_bldr <| + specs + grp = Group.Impl group_name grp_bldr.to_vector + suite_bldr.append grp + + specify : Text -> (Any -> Any) -> Spec + specify spec_name ~code = + grp_bldr = State.get Group_Builder + grp_bldr.append (Spec.Impl spec_name code) + + +type Test_Suite + Impl (groups : Vector Group) + + collect : (Any -> Any) -> Test_Suite + collect ~groups = + suite_bldr = Vector.new_builder + State.run Test_Suite_Builder suite_bldr <| + groups + Test_Suite.Impl suite_bldr.to_vector + + run_all : Nothing + run_all self = + self.groups.each group-> + group.run + + ## Returns all test names, in form `:` + test_names : Vector Text + test_names self = + self.groups.fold [] acc->group-> + spec_names = group.specs.map spec-> + group.name + ":" + spec.name + acc + spec_names + + +## Encapsulates the data that are lazily initialized for the tests +type Data + Impl ~vec + + create_vec size = + IO.println "Creating test data" + bldr = Vector.new_builder + 0.up_to size . each (idx-> bldr.append (idx + 1)) + bldr.to_vector + + +specs = + data = Data.Impl (Data.create_vec 10) + + Test.group "my-group" <| + Test.specify "my-spec-1" <| + IO.println " In my-spec-1" + + Test.specify "my-spec-2" <| + IO.println " In my-spec-2" + res = data.vec.distinct.length == data.vec.length + IO.println <| " my-spec-2: res = " + res.to_text + res + + Test.group "my-group-2" <| + Test.specify "my-spec-3" <| + IO.println " In my-spec-3" + 1 == 1 + + +test_suite = Test_Suite.collect specs + + +main = + IO.println "All test names:" + test_suite.test_names.each test_name-> + IO.println <| " " + test_name + IO.println "Running tests..." + test_suite.run_all + IO.println "Done running tests." + From c72967fa2092bbbe71725f84aaea0aa240c91362 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 27 Dec 2023 12:33:10 +0100 Subject: [PATCH 06/93] Revert "Test_New accepts filters" This reverts commit 0e005865806da168b3e9dcf913d8a91b84800dd6. --- .../Test_New/0.0.0-dev/src/Test_New.enso | 53 ++----------------- test/Tests/src/Data/Bool_Spec_New.enso | 15 +++--- 2 files changed, 11 insertions(+), 57 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso index b66cc93717bd..14b49ec5b4bf 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso @@ -1,6 +1,5 @@ from Standard.Base import all from Standard.Base.Runtime import assert -import project.Test_Result.Test_Result type Test_Options @@ -46,22 +45,6 @@ type Test self.groups.each group-> self.run_group group - run_with_filter : (Text|Regex|Nothing) -> (Text|Regex|Nothing) -> Test_Result - run_with_filter self group_filter=Nothing spec_filter=Nothing = - convert_filter filter = - case filter of - Nothing -> Regex.compile ".*" - txt : Text -> Regex.compile txt - rgx : Regex -> rgx - grp_filter = convert_filter group_filter - spc_filter = convert_filter spec_filter - IO.println "Test.run_with_filter" - groups_to_run = self.groups.filter (group-> grp_filter.matches group.name) - IO.println <| "Will run groups: " + groups_to_run.to_text - groups_to_run.each group-> - # TODO: Filter specs - self.run_group group - ## PRIVATE run_group self group = case group of @@ -69,40 +52,12 @@ type Test IO.println <| "Running group '" + group.name + "'" group.specs.each spec-> self.run_spec spec - _ -> Panic.throw "Unreachable" + _ -> Error.throw "Unreachable" ## PRIVATE run_spec self spec = case spec of Test.Spec spec_name code -> - IO.println <| " Running spec '" + spec_name + "'" - pair = Duration.time_execution (code Nothing) - res = pair.first - duration = pair.second - IO.println <| " Spec '" + spec_name + "' finished in " + duration.to_text + ", result: " + res.to_text - _ -> Panic.throw "Unreachable" - - ## Fail a test with the given message. - - Arguments: - - message: The message printed when failing the test. - - > Example - Failing a test manually. - - from Standard.Test import Test - - example_fail = Test.fail "Something went wrong." - fail : Text -> Nothing|Text -> Test_Result - fail message details=Nothing = - failure = Test_Result.Failure message details - Panic.throw failure - - ## PRIVATE - Reports an unexpected dataflow error has occurred. - fail_match_on_unexpected_error : Error -> Integer -> Nothing - fail_match_on_unexpected_error error frames_to_skip = - payload = error.catch - loc = Meta.get_source_location 1+frames_to_skip - msg = "An unexpected dataflow error (" + payload.to_text + ") has been matched (at " + loc + ")." - Test.fail msg+'\n'+error.get_stack_trace_text + IO.println <| " Running spec '" + spec.name + "'" + spec.code Nothing + _ -> Error.throw "Unreachable" diff --git a/test/Tests/src/Data/Bool_Spec_New.enso b/test/Tests/src/Data/Bool_Spec_New.enso index ad21cffb300f..bb640c72d3b5 100644 --- a/test/Tests/src/Data/Bool_Spec_New.enso +++ b/test/Tests/src/Data/Bool_Spec_New.enso @@ -1,18 +1,17 @@ from Standard.Base import all -from Standard.Test_New.Test_New import Test -import Standard.Test_New.Extensions +from Standard.Test.Test_New import Test collect_tests = Test.build builder-> builder.group "Booleans" group_builder-> group_builder.specify "should allow converting Bools to Text values" <| - IO.println " In first spec" - True.to_text.should_equal "True" + IO.println "First spec" + True.to_text == "True" - group_builder.specify "test that should fail" <| - IO.println " In second spec" - True.to_text.should_equal "False" + group_builder.specify "should allow for comparing Bools" <| + IO.println "Second spec" + (True == True) main = - collect_tests.run_with_filter Nothing Nothing + collect_tests.run_main From 987efb6d846648dcf09e0b994950369e73eedb37 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 27 Dec 2023 12:38:00 +0100 Subject: [PATCH 07/93] Fix imports in Proposition (2) and add some prints --- .../lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso | 5 ++++- test/Tests/src/Data/Bool_Spec_New.enso | 6 +++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso index 14b49ec5b4bf..dfd4ccf1a62b 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso @@ -35,15 +35,18 @@ type Test ## Construct a Test object build : (Test_Builder -> Any) -> Test build fn = + IO.println "Running `Test.build` (collecting tests)..." b = Vector.new_builder fn (Test_Builder.Impl b) groups_vec = b.to_vector + IO.println "Done with `Test.build` (collecting tests)" Test.All groups_vec run_main self = - IO.println "Test.run_main" + IO.println "Running `Test.run_main` - running all tests" self.groups.each group-> self.run_group group + IO.println "Done with `Test.run_main` - running all tests" ## PRIVATE run_group self group = diff --git a/test/Tests/src/Data/Bool_Spec_New.enso b/test/Tests/src/Data/Bool_Spec_New.enso index bb640c72d3b5..7a23ced4de33 100644 --- a/test/Tests/src/Data/Bool_Spec_New.enso +++ b/test/Tests/src/Data/Bool_Spec_New.enso @@ -1,15 +1,15 @@ from Standard.Base import all -from Standard.Test.Test_New import Test +from Standard.Test_New.Test_New import Test collect_tests = Test.build builder-> builder.group "Booleans" group_builder-> group_builder.specify "should allow converting Bools to Text values" <| - IO.println "First spec" + IO.println " In First spec" True.to_text == "True" group_builder.specify "should allow for comparing Bools" <| - IO.println "Second spec" + IO.println " In Second spec" (True == True) From 38d85ffd8d11f404e5d1b5c2cfb0dfb94fbb9598 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 29 Dec 2023 17:14:56 +0100 Subject: [PATCH 08/93] Rename Test_New.enso to Test.enso --- .../lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso | 2 +- distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso | 4 ++-- .../Test_New/0.0.0-dev/src/{Test_New.enso => Test.enso} | 0 test/Tests/src/Data/Bool_Spec_New.enso | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) rename distribution/lib/Standard/Test_New/0.0.0-dev/src/{Test_New.enso => Test.enso} (100%) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso index 0755b1ec76de..237c9a68cf08 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso @@ -3,7 +3,7 @@ import Standard.Base.Errors.Common.No_Such_Method import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import project.Test_Result.Test_Result -from project.Test_New import Test +import project.Test.Test ## Expect a function to fail with the provided dataflow error. diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso index 2efa641188c3..147d8a02f173 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso @@ -1,5 +1,5 @@ -import project.Test_New +import project.Test.Test from project.Extensions import all -export project.Test_New +export project.Test.Test from project.Extensions export all diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso similarity index 100% rename from distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_New.enso rename to distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso diff --git a/test/Tests/src/Data/Bool_Spec_New.enso b/test/Tests/src/Data/Bool_Spec_New.enso index 7a23ced4de33..8961f3d725d6 100644 --- a/test/Tests/src/Data/Bool_Spec_New.enso +++ b/test/Tests/src/Data/Bool_Spec_New.enso @@ -1,5 +1,5 @@ from Standard.Base import all -from Standard.Test_New.Test_New import Test +from Standard.Test_New import Test collect_tests = Test.build builder-> From 268234febca4b2fc2112a88da6319ab4bae0fbcb Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 29 Dec 2023 18:24:00 +0100 Subject: [PATCH 09/93] Refactor Test_New/Test to multiple source files --- .../Standard/Test_New/0.0.0-dev/src/Clue.enso | 11 ++ .../Test_New/0.0.0-dev/src/Group.enso | 33 ++++ .../Standard/Test_New/0.0.0-dev/src/Main.enso | 2 + .../Standard/Test_New/0.0.0-dev/src/Spec.enso | 12 ++ .../Test_New/0.0.0-dev/src/Suite.enso | 35 ++++ .../Standard/Test_New/0.0.0-dev/src/Test.enso | 174 ++++++++++++------ test/Tests/src/Data/Bool_Spec_New.enso | 49 ++++- 7 files changed, 257 insertions(+), 59 deletions(-) create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Clue.enso create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Clue.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Clue.enso new file mode 100644 index 000000000000..970de99923ea --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Clue.enso @@ -0,0 +1,11 @@ +private + +## PRIVATE +type Clue + ## PRIVATE + + Represents a clue as to why a test failed + + Arguments: + - add_clue: either Nothing or a function which modifies a failure message + Value add_clue diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso new file mode 100644 index 000000000000..4cae58261baf --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso @@ -0,0 +1,33 @@ +private + +from Standard.Base import all +import project.Spec.Spec +polyglot java import java.lang.StringBuilder + +type Group_Builder + ## PRIVATE + Impl builder + + ## Specifies a single test. + specify : Text -> (Nothing -> Any) -> Nothing + specify self (name:Text) ~code = + self.builder.append <| Spec.Impl name (_ -> code) + Nothing + + +type Group + Impl (name : Text) (specs : Vector Spec) + + to_text self = + sb = StringBuilder.new + sb.append ("Group '" + self.name + "' specs=[") + self.specs.each spec-> + sb.append (spec.to_text + ", ") + sb.append "]" + sb.toString + + run self = + IO.println <| "Running group " + self.to_text + "..." + self.specs.each spec-> + spec.run + IO.println <| "Done running group" diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso index 147d8a02f173..5558bbccef71 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso @@ -1,5 +1,7 @@ +import project.Suite.Suite import project.Test.Test from project.Extensions import all +export project.Suite.Suite export project.Test.Test from project.Extensions export all diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso new file mode 100644 index 000000000000..e2d3ac1a3b4e --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso @@ -0,0 +1,12 @@ +from Standard.Base import all + +private + +type Spec + Impl (name : Text) (code : Any -> Any) + + to_text self = "Spec '" + self.name + "'" + + run self = + IO.println <| "Running spec " + self.to_text + "..." + self.code Nothing diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso new file mode 100644 index 000000000000..d9cc56194248 --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -0,0 +1,35 @@ + +from Standard.Base import all +import project.Group.Group +import project.Group.Group_Builder + +type Suite_Builder + ## PRIVATE + Impl builder + + ## Add a group to the builder. + group : Text -> (Group_Builder -> Any) -> Nothing + group self (name:Text) fn = + b = Vector.new_builder + fn (Group_Builder.Impl b) + self.builder.append <| Group.Impl name b.to_vector + + +type Suite + Impl (groups : Vector Group) + + ## Construct a Suite object + build : (Suite_Builder -> Any) -> Suite + build fn = + IO.println "Running `Suite.build` (collecting tests)..." + b = Vector.new_builder + fn (Suite_Builder.Impl b) + groups_vec = b.to_vector + IO.println "Done with `Suite.build` (collecting tests)" + Suite.Impl groups_vec + + run_all self = + IO.println "Running `Suite.run_all` - running all tests" + self.groups.each group-> + group.run + IO.println "Done with `Suite.run_all` - running all tests" diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso index dfd4ccf1a62b..7fea62520488 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso @@ -1,66 +1,136 @@ from Standard.Base import all -from Standard.Base.Runtime import assert +from Standard.Base.Runtime import State +import project.Test_Result.Test_Result +import project.Clue.Clue -type Test_Options +## Contains only static methods +type Test + ## Expect a function to fail with the provided panic. -type Test_Builder - ## PRIVATE - Impl builder + Arguments: + - action: The action to evaluate that is expected to fail with a panic. + - matcher: The expected type of the panic thrown by `action`. - ## Add a group to the builder. - group : Text -> (Group_Builder -> Any) -> Nothing - group self (name:Text) fn = - b = Vector.new_builder - fn (Group_Builder.Impl b) - self.builder.append <| Test.Group name b.to_vector + > Example + Expect that a computation should panic as part of a test. + import Standard.Examples + from Standard.Test import Test -## Builder to create a group of tests. -type Group_Builder - ## PRIVATE - Impl builder + example_expect_panic_with = + Test.expect_panic_with Examples.throw_panic Examples.My_Error + expect_panic_with : Any -> Any -> Test_Result + expect_panic_with ~action matcher = + res = Panic.recover Any action + case res of + _ -> + loc = Meta.get_source_location 2 + return_suffix = if res.is_nothing then "" else "and returned ["+res.to_text+"]" + Test.fail ("Expected a " + matcher.to_text + " to be thrown, but the action succeeded " + return_suffix + " (at "+loc+").") + err = res.catch + if err.is_a matcher then Nothing else + Test.fail ("Expected a " + matcher.to_text + ", but " + err.to_text + " was thrown instead.") - specify : Text -> (Nothing -> Any) -> Nothing - specify self (name:Text) ~code = - self.builder.append <| Test.Spec name (_ -> code) - Nothing + ## Expect a function to fail with the provided panic. + An alternative API to `expect_panic_with` where the order of arguments is + more natural - as it allows blocks without reordering the arguments. -type Test - All (groups : Vector Test) - Group (name : Text) (specs : Vector Test) - Spec (name : Text) (code : Any -> Any) - - ## Construct a Test object - build : (Test_Builder -> Any) -> Test - build fn = - IO.println "Running `Test.build` (collecting tests)..." - b = Vector.new_builder - fn (Test_Builder.Impl b) - groups_vec = b.to_vector - IO.println "Done with `Test.build` (collecting tests)" - Test.All groups_vec - - run_main self = - IO.println "Running `Test.run_main` - running all tests" - self.groups.each group-> - self.run_group group - IO.println "Done with `Test.run_main` - running all tests" + Arguments: + - matcher: The expected type of the panic thrown by `action`. + - action: The action to evaluate that is expected to fail with a panic. + + > Example + Expect that a computation should panic as part of a test. + + import Standard.Examples + from Standard.Test import Test + + example_expect_panic_with = + Test.expect_panic_with Examples.My_Error <| + IO.println 'hello' + Examples.throw_panic + IO.println 'this is not reached' + expect_panic : Any -> Any -> Test_Result + expect_panic matcher ~action = Test.expect_panic_with action matcher + + + ## Checks that the provided action returns without any errors or warnings. + + If you just want to check for errors, usage of the `.should_succeed` + extension function is preferred. + assert_no_problems value frames_to_skip=0 = + value.catch Any _-> + Test.fail_match_on_unexpected_error value 2+frames_to_skip + warnings = Warning.get_all value . map .value + if warnings.not_empty then + loc = Meta.get_source_location 2+frames_to_skip + msg = "The action returned unexpected warnings: " + warnings.to_text + " (at " + loc + ")." + Test.fail msg + + ## Fail a test with the given message. + + Arguments: + - message: The message printed when failing the test. + + > Example + Failing a test manually. + + from Standard.Test import Test + + example_fail = Test.fail "Something went wrong." + fail : Text -> Nothing|Text -> Test_Result + fail message details=Nothing = + failure = Test_Result.Failure (Test.enrich_message_with_clue message) details + Panic.throw failure ## PRIVATE - run_group self group = - case group of - Test.Group name specs -> - IO.println <| "Running group '" + group.name + "'" - group.specs.each spec-> - self.run_spec spec - _ -> Error.throw "Unreachable" + enrich_message_with_clue : Text -> Text + enrich_message_with_clue message = + case State.get Clue of + Clue.Value add_clue -> add_clue message + _ -> message ## PRIVATE - run_spec self spec = - case spec of - Test.Spec spec_name code -> - IO.println <| " Running spec '" + spec.name + "'" - spec.code Nothing - _ -> Error.throw "Unreachable" + Reports an unexpected dataflow error has occurred. + fail_match_on_unexpected_error : Error -> Integer -> Nothing + fail_match_on_unexpected_error error frames_to_skip = + payload = error.catch + loc = Meta.get_source_location 1+frames_to_skip + msg = "An unexpected dataflow error (" + payload.to_text + ") has been matched (at " + loc + ")." + Test.fail msg+'\n'+error.get_stack_trace_text + + ## Executes the block of code passed as behavior and adds a clue modifier which + changes how assertion failures are reported. + + Nesting with_clue invocations results in clue aggregation. + + Arguments: + - clue: either a text which gets prepended to the failure or a function which transforms the failure message + - behavior: the behavior to test + + > Example + Add a clue to a test + from Standard.Test import Test, Test_Suite + import Standard.Test.Extensions + main = Test_Suite.run_main <| + Test.group "Tests" <| + Test.specify "some property" <| + xs = Vector.new 100 (n -> n) + xs.each x-> + Test.with_clue ("["+x.to_text+"] ") <| + x . should_equal 0 + with_clue : Text|(Text -> Text) -> Any -> Any + with_clue ~clue ~behavior = + add_clue x = case clue of + _ : Text -> clue + x + _ : Function -> clue x + prev_clue = State.get Clue + next_clue = case prev_clue of + Clue.Value prev_add_clue -> (x -> prev_add_clue (add_clue x)) + _ -> add_clue + State.put Clue (Clue.Value next_clue) + result = behavior + State.put Clue prev_clue + result diff --git a/test/Tests/src/Data/Bool_Spec_New.enso b/test/Tests/src/Data/Bool_Spec_New.enso index 8961f3d725d6..f41041490036 100644 --- a/test/Tests/src/Data/Bool_Spec_New.enso +++ b/test/Tests/src/Data/Bool_Spec_New.enso @@ -1,17 +1,52 @@ from Standard.Base import all -from Standard.Test_New import Test +from Standard.Test_New import all +Boolean.method self = self -collect_tests = Test.build builder-> +type My_Error + Value a + +crash = + Error.throw (My_Error.Value "foo") + +suite = Suite.build builder-> builder.group "Booleans" group_builder-> group_builder.specify "should allow converting Bools to Text values" <| - IO.println " In First spec" - True.to_text == "True" + True.to_text . should_equal "True" + False.to_text . should_equal "False" group_builder.specify "should allow for comparing Bools" <| - IO.println " In Second spec" - (True == True) + (True == True) . should_be_true + (False == False) . should_be_true + (True > False) . should_be_true + (False < True) . should_be_true + + group_builder.specify "should allow == operator" <| + True.should_equal True + False.should_equal False + True.should_not_equal False + False.should_not_equal True + (1 == 1).should_equal True + + group_builder.specify "should allow for extending Bools in a local module" <| + test = 1 == 2 + test.method . should_equal test + + group_builder.specify "should short-circuit ||" <| + (1 == 1) || (crash) . should_equal True + (1 == 0) || (1 == 1) . should_equal True + (1 == 0) || (crash) . should_fail_with My_Error + (1 == 1) || "foo" . should_equal True + (1 == 0) || "foo" . should_equal "foo" + + group_builder.specify "should short-circuit &&" <| + (1 == 0) && (crash) . should_equal False + (1 == 1) && (1 == 0) . should_equal False + (1 == 1) && (1 == 1) . should_equal True + (1 == 1) && (crash) . should_fail_with My_Error + (1 == 0) && "foo" . should_equal False + (1 == 1) && "foo" . should_equal "foo" main = - collect_tests.run_main + suite.run_all From b55b595c340a01055dc93680266f63e47c99f5dd Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 29 Dec 2023 18:30:07 +0100 Subject: [PATCH 10/93] Implement Suite.merge, add prototype of Test/Main_New.enso --- .../lib/Standard/Test_New/0.0.0-dev/src/Suite.enso | 8 ++++++++ test/Tests/src/Main_New.enso | 13 +++++++++++++ 2 files changed, 21 insertions(+) create mode 100644 test/Tests/src/Main_New.enso diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index d9cc56194248..fa7b69068d24 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -28,6 +28,14 @@ type Suite IO.println "Done with `Suite.build` (collecting tests)" Suite.Impl groups_vec + ## Merges multiple suites into one suite + merge : (Vector Suite) -> Suite + merge suites = + all_groups = suites.map (_.groups) . flatten + all_group_names = all_groups.map (_.name) + if all_group_names.distinct.length != all_group_names.length then Panic.throw "Group names not unique" else + Suite.Impl all_groups + run_all self = IO.println "Running `Suite.run_all` - running all tests" self.groups.each group-> diff --git a/test/Tests/src/Main_New.enso b/test/Tests/src/Main_New.enso new file mode 100644 index 000000000000..66899432d121 --- /dev/null +++ b/test/Tests/src/Main_New.enso @@ -0,0 +1,13 @@ +from Standard.Base import all +from Standard.Test_New import all + +import project.Data.Bool_Spec_New + + +main = + suites = Vector.new_builder + . append Bool_Spec_New.suite + . to_vector + + uber_suite = Suite.merge suites + uber_suite.run_all From 4255c94c63bb78d48a8cbde5bfab1f6387774ff9 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 5 Jan 2024 11:02:24 +0100 Subject: [PATCH 11/93] Initial version of SQLite_Spec migration --- .../Standard/Test_New/0.0.0-dev/src/Spec.enso | 3 +- .../Common/Default_Ordering_Spec_New.enso | 66 ++++++++++ .../src/Database/SQLite_Spec_New.enso | 119 ++++++++++++++++++ 3 files changed, 187 insertions(+), 1 deletion(-) create mode 100644 test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso create mode 100644 test/Table_Tests/src/Database/SQLite_Spec_New.enso diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso index e2d3ac1a3b4e..5f81bd447ac7 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso @@ -1,6 +1,7 @@ +private + from Standard.Base import all -private type Spec Impl (name : Text) (code : Any -> Any) diff --git a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso new file mode 100644 index 000000000000..004e01adf548 --- /dev/null +++ b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso @@ -0,0 +1,66 @@ +from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +from Standard.Base.Runtime import assert + +from Standard.Table import Table, Sort_Column, Aggregate_Column +from Standard.Table.Errors import all + +from Standard.Database import all +from Standard.Database.Errors import all + +from Standard.Test_New import all + +import project.Util +import project.Database.Helpers.Name_Generator + +type Data + Impl ~db_table_without_key ~db_table_with_key + + create connection_func = + src_table = Table.new [["X", [1, 2, 3]], ["Y", [30, 20, 10]]] + Data.Impl (create_db_table_without_key src_table (connection_func Nothing)) (create_db_table_with_key src_table (connection_func Nothing)) + + +create_db_table_without_key src_table connection = + src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=Nothing + +create_db_table_with_key src_table connection = + src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=["X"] + + +add_default_ordering_specs suite_builder prefix connection_func = + group_name = prefix + "Table.default_ordering" + data = Data.create connection_func + + suite_builder.group group_name group_builder-> + group_builder.specify "will return Nothing if no primary key is defined" <| + data.db_table_without_key.default_ordering . should_equal Nothing + + group_builder.specify "will return the key for a table with a primary key" <| + v1 = data.db_table_with_key.default_ordering + v1.length . should_equal 1 + v1.first.expression.name . should_equal "X" + v1.first.direction . should_equal Sort_Direction.Ascending + + t2 = data.db_table_with_key.set "10 - [X]" "X" + v2 = t2.default_ordering + v2.length . should_equal 1 + v2.first.expression.name . should_equal "X" + + group_builder.specify "will return Nothing for composite tables (join, aggregate)" + data.db_table_with_key.join data.db_table_with_key . default_ordering . should_equal Nothing + data.db_table_with_key.aggregate [Aggregate_Column.Group_By "X"] . default_ordering . should_equal Nothing + + group_builder.specify "will return the ordering determined by order_by" <| + v1 = data.db_table_with_key.order_by ["Y", Sort_Column.Name "X" Sort_Direction.Descending] . default_ordering + v1.length . should_equal 2 + v1.first.expression.name . should_equal "Y" + v1.first.direction . should_equal Sort_Direction.Ascending + v1.second.expression.name . should_equal "X" + v1.second.direction . should_equal Sort_Direction.Descending + + v2 = data.db_table_without_key.order_by ["Y"] . default_ordering + v2.length . should_equal 1 + v2.first.expression.name . should_equal "Y" + v2.first.direction . should_equal Sort_Direction.Ascending + diff --git a/test/Table_Tests/src/Database/SQLite_Spec_New.enso b/test/Table_Tests/src/Database/SQLite_Spec_New.enso new file mode 100644 index 000000000000..cf9023cd5f5a --- /dev/null +++ b/test/Table_Tests/src/Database/SQLite_Spec_New.enso @@ -0,0 +1,119 @@ +from Standard.Base import all +import Standard.Base.Runtime.Ref.Ref +import Standard.Base.Errors.File_Error.File_Error +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +import Standard.Table.Data.Type.Value_Type.Bits +from Standard.Table import Table, Value_Type +from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names + +import Standard.Database.Data.Column.Column +import Standard.Database.Internal.Replace_Params.Replace_Params +from Standard.Database import all +from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation + +from Standard.Test_New import all + +import project.Database.Common.Default_Ordering_Spec_New +import project.Database.Helpers.Name_Generator + +## + sqlite_specific_spec suite_builder prefix connection setup = + table_builder = setup.table_builder + + suite_builder.group prefix+"Schemas and Databases" group_builder-> + group_builder.specify "should be able to get current database and list databases" <| + connection.database . should_equal Nothing + connection.databases . should_equal [Nothing] + Meta.is_same_object connection (connection.set_database Nothing) . should_be_true + + group_builder.specify "should be able to get current schema and list schemas" <| + connection.schema . should_equal Nothing + connection.schemas . should_equal [Nothing] + Meta.is_same_object connection (connection.set_schema Nothing) . should_be_true + + group_builder.specify "does not allow changing schema or database" <| + connection.set_schema "foo" . should_fail_with SQL_Error + connection.set_database "foo" . should_fail_with SQL_Error + + Test.group prefix+"Tables and Table Types" <| + tinfo = Name_Generator.random_name "TestTable" + connection.execute_update 'CREATE TABLE "'+tinfo+'" ("A" VARCHAR)' + + vinfo = Name_Generator.random_name "TestView" + connection.execute_update 'CREATE VIEW "'+vinfo+'" AS SELECT "A" FROM "'+tinfo+'";' + + temporary_table = Name_Generator.random_name "TemporaryTable" + (Table.new [["X", [1, 2, 3]]]).select_into_database_table connection temporary_table temporary=True + + Test.specify "should be able to list table types" <| + table_types = connection.table_types + table_types.length . should_not_equal 0 + table_types.contains "TABLE" . should_be_true + table_types.contains "VIEW" . should_be_true + + + Test.group prefix+"math functions" <| + do_op n op = + table = table_builder [["x", [n]]] + result = table.at "x" |> op + result.to_vector.at 0 + do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers) + + Test.specify "Can round correctly near the precision limit" <| + # This value varies depending on the version of SQLite. + do_round 1.2222222222222225 15 . should_equal 1.222222222222223 0.000000000000002 + do_round -1.2222222222222225 15 . should_equal -1.222222222222223 0.000000000000002 + do_round 1.2222222222222235 15 . should_equal 1.222222222222223 + do_round -1.2222222222222235 15 . should_equal -1.222222222222223 + + +sqlite_spec suite_builder connection_func prefix = + Default_Ordering_Spec_New.add_default_ordering_specs suite_builder prefix connection_func + +type Data + Impl ~file ~connection_file + + create = + Data.Impl create_file create_file_connection + +# Must be run only in suite +create_file = + enso_project.data.create_directory + file = enso_project.data / "transient" / "sqlite_test.db" + file.delete_if_exists + file + +create_file_connection = + file = create_file + connection = Database.connect (SQLite file) + connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' + connection + +create_inmem_connection = + Database.connect (SQLite In_Memory) + + +suite = + Test.build suite_builder-> + in_file_prefix = "[SQLite File] " + sqlite_spec suite_builder (_-> create_file_connection) in_file_prefix + + in_memory_prefix = "[SQLite In-Memory] " + sqlite_spec suite_builder (_-> create_inmem_connection) in_memory_prefix + + suite_builder.group "SQLite_Format should allow connecting to SQLite files" group_builder-> + data = Data.create + + group_builder.specify "should recognise a SQLite database file" <| + Auto_Detect.get_reading_format data.file . should_be_a SQLite_Format + + group_builder.specify 'should not duplicate warnings' <| + c = Database.connect (SQLite In_Memory) + t0 = Table.new [["X", ["a", "bc", "def"]]] + t1 = t0.select_into_database_table c "Tabela" + t2 = t1.cast "X" (Value_Type.Char size=1) + Warning.get_all t2 . length . should_equal 1 + +main = + suite.group_names From 9e0a498e228e92d3ee3a2a5d7916a2f544c3558c Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 5 Jan 2024 11:03:03 +0100 Subject: [PATCH 12/93] Move Suite.build to Test.build --- .../Test_New/0.0.0-dev/src/Phases.enso | 6 +++++ .../Test_New/0.0.0-dev/src/Suite.enso | 22 +++++++------------ .../Standard/Test_New/0.0.0-dev/src/Test.enso | 10 +++++++++ test/Tests/src/Data/Bool_Spec_New.enso | 2 +- 4 files changed, 25 insertions(+), 15 deletions(-) create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Phases.enso diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Phases.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Phases.enso new file mode 100644 index 000000000000..ae4c95f7ea74 --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Phases.enso @@ -0,0 +1,6 @@ +private + +type Building_Tests + +type Running_Tests + diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index fa7b69068d24..bb93c43535da 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -1,7 +1,9 @@ from Standard.Base import all +import Standard.Base.Runtime.State import project.Group.Group import project.Group.Group_Builder +import project.Phases.Running_Tests type Suite_Builder ## PRIVATE @@ -18,16 +20,6 @@ type Suite_Builder type Suite Impl (groups : Vector Group) - ## Construct a Suite object - build : (Suite_Builder -> Any) -> Suite - build fn = - IO.println "Running `Suite.build` (collecting tests)..." - b = Vector.new_builder - fn (Suite_Builder.Impl b) - groups_vec = b.to_vector - IO.println "Done with `Suite.build` (collecting tests)" - Suite.Impl groups_vec - ## Merges multiple suites into one suite merge : (Vector Suite) -> Suite merge suites = @@ -37,7 +29,9 @@ type Suite Suite.Impl all_groups run_all self = - IO.println "Running `Suite.run_all` - running all tests" - self.groups.each group-> - group.run - IO.println "Done with `Suite.run_all` - running all tests" + State.run Running_Tests True <| + self.groups.each group-> + group.run + + group_names self = + self.groups.map (_.name) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso index 7fea62520488..19251f59e929 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso @@ -2,10 +2,20 @@ from Standard.Base import all from Standard.Base.Runtime import State import project.Test_Result.Test_Result import project.Clue.Clue +import project.Suite.Suite +import project.Suite.Suite_Builder ## Contains only static methods type Test + ## Construct a Test Suite object + build : (Suite_Builder -> Any) -> Suite + build fn = + b = Vector.new_builder + fn (Suite_Builder.Impl b) + groups_vec = b.to_vector + Suite.Impl groups_vec + ## Expect a function to fail with the provided panic. Arguments: diff --git a/test/Tests/src/Data/Bool_Spec_New.enso b/test/Tests/src/Data/Bool_Spec_New.enso index f41041490036..bffe69c14fb3 100644 --- a/test/Tests/src/Data/Bool_Spec_New.enso +++ b/test/Tests/src/Data/Bool_Spec_New.enso @@ -9,7 +9,7 @@ type My_Error crash = Error.throw (My_Error.Value "foo") -suite = Suite.build builder-> +suite = Test.build builder-> builder.group "Booleans" group_builder-> group_builder.specify "should allow converting Bools to Text values" <| True.to_text . should_equal "True" From c65efc3d49647e1cdfcf3d8c86f57f241ed9dc01 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Mon, 8 Jan 2024 19:12:17 +0100 Subject: [PATCH 13/93] Split Test_Result into Spec_Result --- .../Test_New/0.0.0-dev/src/Extensions.enso | 86 +++++++++---------- .../Test_New/0.0.0-dev/src/Spec_Result.enso | 39 +++++++++ .../Standard/Test_New/0.0.0-dev/src/Test.enso | 8 +- .../Test_New/0.0.0-dev/src/Test_Result.enso | 48 ++++------- 4 files changed, 104 insertions(+), 77 deletions(-) create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec_Result.enso diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso index 237c9a68cf08..4f400efaafa9 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso @@ -2,7 +2,7 @@ from Standard.Base import all import Standard.Base.Errors.Common.No_Such_Method import Standard.Base.Errors.Illegal_Argument.Illegal_Argument -import project.Test_Result.Test_Result +import project.Spec_Result.Spec_Result import project.Test.Test ## Expect a function to fail with the provided dataflow error. @@ -20,7 +20,7 @@ import project.Test.Test example_should_fail_with = Examples.throw_error . should_fail_with Examples.My_Error -Any.should_fail_with : Any -> Integer -> Test_Result +Any.should_fail_with : Any -> Integer -> Spec_Result Any.should_fail_with self matcher frames_to_skip=0 = loc = Meta.get_source_location 1+frames_to_skip matcher_text = matcher . to_text @@ -41,7 +41,7 @@ Any.should_fail_with self matcher frames_to_skip=0 = example_should_fail_with = Examples.throw_error . should_fail_with Examples.My_Error -Error.should_fail_with : Any -> Integer -> Test_Result +Error.should_fail_with : Any -> Integer -> Spec_Result Error.should_fail_with self matcher frames_to_skip=0 = caught = self.catch if caught == matcher || caught.is_a matcher then Nothing else @@ -63,9 +63,9 @@ Error.should_fail_with self matcher frames_to_skip=0 = from Standard.Test import Test example_should_equal = Examples.add_1_to 1 . should_equal 2 -Any.should_equal : Any -> Integer -> Test_Result +Any.should_equal : Any -> Integer -> Spec_Result Any.should_equal self that frames_to_skip=0 = case self == that of - True -> Test_Result.Success + True -> Spec_Result.Success False -> loc = Meta.get_source_location 2+frames_to_skip additional_comment = case self of @@ -96,9 +96,9 @@ Any.should_equal self that frames_to_skip=0 = case self == that of from Standard.Test import Test example_should_equal = Examples.some_type . should_equal_type Vector -Any.should_equal_type : Any -> Integer -> Test_Result +Any.should_equal_type : Any -> Integer -> Spec_Result Any.should_equal_type self that frames_to_skip=0 = case (self.is_same_object_as that) of - True -> Test_Result.Success + True -> Spec_Result.Success False -> loc = Meta.get_source_location 2+frames_to_skip msg = self.to_text + " did not equal type " + that.to_text + " (at " + loc + ")." @@ -123,9 +123,9 @@ Error.should_equal_type self that frames_to_skip=0 = from Standard.Test import Test example_should_not_equal = Examples.add_1_to 1 . should_not_equal 2 -Any.should_not_equal : Any -> Integer -> Test_Result +Any.should_not_equal : Any -> Integer -> Spec_Result Any.should_not_equal self that frames_to_skip=0 = case self != that of - True -> Test_Result.Success + True -> Spec_Result.Success False -> loc = Meta.get_source_location 2+frames_to_skip msg = self.to_text + " did equal " + that.to_text + " (at " + loc + ")." @@ -150,9 +150,9 @@ Error.should_not_equal self that frames_to_skip=0 = from Standard.Test import Test example_should_not_equal = Examples.some_type . should_not_equal_type Vector -Any.should_not_equal_type : Any -> Integer -> Test_Result +Any.should_not_equal_type : Any -> Integer -> Spec_Result Any.should_not_equal_type self that frames_to_skip=0 = case (self.is_same_object_as that . not) of - True -> Test_Result.Success + True -> Spec_Result.Success False -> loc = Meta.get_source_location 2+frames_to_skip msg = self.to_text + " did equal type " + that.to_text + " (at " + loc + ")." @@ -176,9 +176,9 @@ Error.should_not_equal_type self that frames_to_skip=0 = from Standard.Test import Test example_should_start_with = "Hello World!" . should_start_with "Hello" -Any.should_start_with : Text -> Integer -> Test_Result +Any.should_start_with : Text -> Integer -> Spec_Result Any.should_start_with self that frames_to_skip=0 = case self of - _ : Text -> if self.starts_with that then Test_Result.Success else + _ : Text -> if self.starts_with that then Spec_Result.Success else loc = Meta.get_source_location 3+frames_to_skip msg = self.to_text + " does not start with " + that.to_text + " (at " + loc + ")." Test.fail msg @@ -200,9 +200,9 @@ Any.should_start_with self that frames_to_skip=0 = case self of from Standard.Test import Test example_should_end_with = "Hello World!" . should_end_with "ld!" -Any.should_end_with : Text -> Integer -> Test_Result +Any.should_end_with : Text -> Integer -> Spec_Result Any.should_end_with self that frames_to_skip=0 = case self of - _ : Text -> if self.ends_with that then Test_Result.Success else + _ : Text -> if self.ends_with that then Spec_Result.Success else loc = Meta.get_source_location 3+frames_to_skip msg = self.to_text + " does not end with " + that.to_text + " (at " + loc + ")." Test.fail msg @@ -224,7 +224,7 @@ Any.should_end_with self that frames_to_skip=0 = case self of from Standard.Test import Test example_should_start_with = "Hello World!" . should_start_with "Hello" -Error.should_start_with : Any -> Integer -> Test_Result +Error.should_start_with : Any -> Integer -> Spec_Result Error.should_start_with self that frames_to_skip=0 = _ = [that] Test.fail_match_on_unexpected_error self 1+frames_to_skip @@ -242,7 +242,7 @@ Error.should_start_with self that frames_to_skip=0 = from Standard.Test import Test example_should_end_with = "Hello World!" . should_end_with "ld!" -Error.should_end_with : Any -> Integer -> Test_Result +Error.should_end_with : Any -> Integer -> Spec_Result Error.should_end_with self that frames_to_skip=0 = _ = [that] Test.fail_match_on_unexpected_error self 1+frames_to_skip @@ -259,7 +259,7 @@ Error.should_end_with self that frames_to_skip=0 = from Standard.Test import Test example_should_equal = Examples.add_1_to 1 . should_equal 2 -Error.should_equal : Any -> Integer -> Test_Result +Error.should_equal : Any -> Integer -> Spec_Result Error.should_equal self that frames_to_skip=0 = _ = [that] Test.fail_match_on_unexpected_error self 1+frames_to_skip @@ -286,13 +286,13 @@ Error.should_equal self that frames_to_skip=0 = example_should_equal = 1.00000001 . should_equal 1.00000002 epsilon=0.0001 -Number.should_equal : Float -> Float -> Integer -> Test_Result +Number.should_equal : Float -> Float -> Integer -> Spec_Result Number.should_equal self that epsilon=0 frames_to_skip=0 = matches = case that of _ : Number -> self.equals that epsilon _ -> False case matches of - True -> Test_Result.Success + True -> Spec_Result.Success False -> loc = Meta.get_source_location 2+frames_to_skip msg = self.to_text + " did not equal " + that.to_text + " (at " + loc + ")." @@ -346,9 +346,9 @@ Error.should_be_a self typ frames_to_skip=0 = from Standard.Test import Test example_should_be_true = Examples.get_boolean . should_be_true -Boolean.should_be_true : Test_Result +Boolean.should_be_true : Spec_Result Boolean.should_be_true self = case self of - True -> Test_Result.Success + True -> Spec_Result.Success False -> loc = Meta.get_source_location 2 Test.fail "Expected False to be True (at "+loc+")." @@ -362,7 +362,7 @@ Boolean.should_be_true self = case self of from Standard.Test import Test example_should_be_true = Examples.get_boolean . should_be_true -Error.should_be_true : Test_Result +Error.should_be_true : Spec_Result Error.should_be_true self = Test.fail_match_on_unexpected_error self 1 ## Asserts that the given `Boolean` is `False` @@ -374,12 +374,12 @@ Error.should_be_true self = Test.fail_match_on_unexpected_error self 1 from Standard.Test import Test example_should_be_false = Examples.get_boolean . should_be_false -Boolean.should_be_false : Test_Result +Boolean.should_be_false : Spec_Result Boolean.should_be_false self = case self of True -> loc = Meta.get_source_location 2 Test.fail "Expected True to be False (at "+loc+")." - False -> Test_Result.Success + False -> Spec_Result.Success ## Asserts that the given `Boolean` is `False` @@ -390,7 +390,7 @@ Boolean.should_be_false self = case self of from Standard.Test import Test example_should_be_false = Examples.get_boolean . should_be_false -Error.should_be_false : Test_Result +Error.should_be_false : Spec_Result Error.should_be_false self = Test.fail_match_on_unexpected_error self 1 ## Asserts that a value is of a given type. @@ -404,7 +404,7 @@ Error.should_be_false self = Test.fail_match_on_unexpected_error self 1 from Standard.Test import Test example_should_be_a = 1.should_be_a Boolean -Any.should_be_a : Any -> Test_Result +Any.should_be_a : Any -> Spec_Result Any.should_be_a self typ = loc = Meta.get_source_location 1 fail_on_wrong_arg_type = @@ -413,7 +413,7 @@ Any.should_be_a self typ = case Meta.meta typ of c : Meta.Constructor -> case Meta.meta self of a : Meta.Atom -> - if a.constructor == c then Test_Result.Success else + if a.constructor == c then Spec_Result.Success else expected_type = Meta.get_qualified_type_name typ actual_type = Meta.get_qualified_type_name self message = "Expected a value of type "+expected_type+", built with constructor "+c.name+", but got a value of type "+actual_type+", built with constructor "+a.constructor.name+" instead (at "+loc+")." @@ -425,7 +425,7 @@ Any.should_be_a self typ = Test.fail message _ : Meta.Type -> ok = self.is_a typ || self==typ - if ok then Test_Result.Success else + if ok then Spec_Result.Success else expected_type = Meta.get_qualified_type_name typ actual_type = Meta.get_qualified_type_name self message = "Expected a value of type "+expected_type+" but got a value of type "+actual_type+" instead (at "+loc+")." @@ -437,13 +437,13 @@ Any.should_be_a self typ = self.should_be_a (ctor.value ...) _ : Meta.Polyglot -> ok = self.is_a typ - if ok then Test_Result.Success else + if ok then Spec_Result.Success else actual_type = Meta.get_qualified_type_name self message = "Expected a value of Java class "+typ.to_text+" but got a value of type "+actual_type+" instead (at "+loc+")." Test.fail message Meta.Primitive.Value (b : Boolean) -> ok = self == b - if ok then Test_Result.Success else + if ok then Spec_Result.Success else actual_type = Meta.get_qualified_type_name self message = "Expected a value of "+typ.to_text+" but got a value of type "+actual_type+" instead (at "+loc+")." Test.fail message @@ -470,7 +470,7 @@ Any.should_be_a self typ = from Standard.Test import Test example_should_equal = [1, 2] . should_contain_the_same_elements_as [2, 1] -Any.should_contain_the_same_elements_as : Any -> Integer -> Test_Result +Any.should_contain_the_same_elements_as : Any -> Integer -> Spec_Result Any.should_contain_the_same_elements_as self that frames_to_skip=0 = loc = Meta.get_source_location 1+frames_to_skip that.each element-> @@ -481,7 +481,7 @@ Any.should_contain_the_same_elements_as self that frames_to_skip=0 = if that.contains element . not then msg = "The collection contained an element ("+element.to_text+") which was not expected (at " + loc + ")." Test.fail msg - Test_Result.Success + Spec_Result.Success ## Asserts that `self` value contains the same elements as `that`. @@ -504,7 +504,7 @@ Any.should_contain_the_same_elements_as self that frames_to_skip=0 = from Standard.Test import Test example_should_equal = [1, 2] . should_contain_the_same_elements_as [2, 1] -Error.should_contain_the_same_elements_as : Any -> Integer -> Test_Result +Error.should_contain_the_same_elements_as : Any -> Integer -> Spec_Result Error.should_contain_the_same_elements_as self that frames_to_skip=0 = _ = [that] Test.fail_match_on_unexpected_error self 1+frames_to_skip @@ -531,14 +531,14 @@ Error.should_contain_the_same_elements_as self that frames_to_skip=0 = from Standard.Test import Test example_should_equal = [1, 2] . should_only_contain_elements_in [1, 2, 3, 4] -Any.should_only_contain_elements_in : Any -> Integer -> Test_Result +Any.should_only_contain_elements_in : Any -> Integer -> Spec_Result Any.should_only_contain_elements_in self that frames_to_skip=0 = loc = Meta.get_source_location 1+frames_to_skip self.each element-> if that.contains element . not then msg = "The collection contained an element ("+element.to_text+") which was not expected (at " + loc + ")." Test.fail msg - Test_Result.Success + Spec_Result.Success ## Asserts that `self` value contains only elements in `that`. @@ -562,7 +562,7 @@ Any.should_only_contain_elements_in self that frames_to_skip=0 = from Standard.Test import Test example_should_equal = [1, 2] . should_only_contain_elements_in [1, 2, 3, 4] -Error.should_only_contain_elements_in : Any -> Integer -> Test_Result +Error.should_only_contain_elements_in : Any -> Integer -> Spec_Result Error.should_only_contain_elements_in self that frames_to_skip=0 = _ = [that] Test.fail_match_on_unexpected_error self 1+frames_to_skip @@ -584,14 +584,14 @@ Error.should_only_contain_elements_in self that frames_to_skip=0 = from Standard.Test import Test example_should_equal = "foobar".should_contain "foo" -Any.should_contain : Any -> Integer -> Test_Result +Any.should_contain : Any -> Integer -> Spec_Result Any.should_contain self element frames_to_skip=0 = loc = Meta.get_source_location 1+frames_to_skip contains_result = Panic.catch No_Such_Method (self.contains element) caught_panic-> if caught_panic.payload.method_name != "contains" then Panic.throw caught_panic else msg = "The value (" + self.to_text + ") does not support the method `contains` (at " + loc + ")." Test.fail msg - if contains_result then Test_Result.Success else + if contains_result then Spec_Result.Success else msg = "The value (" + self.to_text + ") did not contain the element (" + element.to_text + ") (at " + loc + ")." Test.fail msg @@ -612,7 +612,7 @@ Any.should_contain self element frames_to_skip=0 = from Standard.Test import Test example_should_equal = "foobar".should_contain "foo" -Error.should_contain : Any -> Integer -> Test_Result +Error.should_contain : Any -> Integer -> Spec_Result Error.should_contain self element frames_to_skip=0 = _ = [element] Test.fail_match_on_unexpected_error self 1+frames_to_skip @@ -627,14 +627,14 @@ Error.should_contain self element frames_to_skip=0 = This method delegates to the `contains` method of `self` and will use the rules of the particular type - be it a `Vector`, `Text` or any custom type implementing a method `contains : a -> Boolean`. -Any.should_not_contain : Any -> Integer -> Test_Result +Any.should_not_contain : Any -> Integer -> Spec_Result Any.should_not_contain self element frames_to_skip=0 = loc = Meta.get_source_location 1+frames_to_skip contains_result = Panic.catch No_Such_Method (self.contains element) caught_panic-> if caught_panic.payload.method_name != "contains" then Panic.throw caught_panic else msg = "The value (" + self.to_text + ") does not support the method `contains` (at " + loc + ")." Test.fail msg - if contains_result.not then Test_Result.Success else + if contains_result.not then Spec_Result.Success else msg = "The value (" + self.to_text + ") contained the element (" + element.to_text + "), but it was expected to not contain it (at " + loc + ")." Test.fail msg @@ -648,7 +648,7 @@ Any.should_not_contain self element frames_to_skip=0 = This method delegates to the `contains` method of `self` and will use the rules of the particular type - be it a `Vector`, `Text` or any custom type implementing a method `contains : a -> Boolean`. -Error.should_not_contain : Any -> Integer -> Test_Result +Error.should_not_contain : Any -> Integer -> Spec_Result Error.should_not_contain self element frames_to_skip=0 = _ = [element] Test.fail_match_on_unexpected_error self 1+frames_to_skip diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec_Result.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec_Result.enso new file mode 100644 index 000000000000..f4a3bb14c99c --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec_Result.enso @@ -0,0 +1,39 @@ +private + +from Standard.Base import all + +## Result of one test Spec +type Spec_Result + ## Represents a successful behavioral test. + Success + + ## Represents a failing behavioral test. + + Arguments: + - message: The reason why the test failed. + - details: Additional context of the error, for example the stack trace. + Failure message details=Nothing + + ## Represents a pending behavioral test. + + Arguments: + - reason: Text describing why the test is pending. + Pending reason + + ## Checks if the Spec_Result is pending. + is_pending : Boolean + is_pending self = case self of + Spec_Result.Pending _ -> True + _ -> False + + ## Checks if the Spec_Result is a failure. + is_fail : Boolean + is_fail self = case self of + Spec_Result.Failure _ _ -> True + _ -> False + + ## Checks if the Spec_Result is a success. + is_success : Boolean + is_success self = case self of + Spec_Result.Success -> True + _ -> False diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso index 19251f59e929..cd77bd08d636 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test.enso @@ -2,8 +2,11 @@ from Standard.Base import all from Standard.Base.Runtime import State import project.Test_Result.Test_Result import project.Clue.Clue +import project.Group.Group import project.Suite.Suite import project.Suite.Suite_Builder +import project.Spec.Spec +import project.Spec_Result.Spec_Result ## Contains only static methods @@ -90,9 +93,9 @@ type Test from Standard.Test import Test example_fail = Test.fail "Something went wrong." - fail : Text -> Nothing|Text -> Test_Result + fail : Text -> Nothing|Text -> Spec_Result fail message details=Nothing = - failure = Test_Result.Failure (Test.enrich_message_with_clue message) details + failure = Spec_Result.Failure (Test.enrich_message_with_clue message) details Panic.throw failure ## PRIVATE @@ -144,3 +147,4 @@ type Test result = behavior State.put Clue prev_clue result + diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso index ab8b19279a01..078afb916155 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso @@ -1,36 +1,20 @@ from Standard.Base import all -type Test_Result - ## Represents a successful behavioral test. - Success - - ## Represents a failing behavioral test. - - Arguments: - - message: The reason why the test failed. - - details: Additional context of the error, for example the stack trace. - Failure message details=Nothing - - ## Represents a pending behavioral test. +import project.Spec_Result.Spec_Result - Arguments: - - reason: Text describing why the test is pending. - Pending reason - ## Checks if the Test_Result is pending. - is_pending : Boolean - is_pending self = case self of - Test_Result.Pending _ -> True - _ -> False - - ## Checks if the Test_Result is a failure. - is_fail : Boolean - is_fail self = case self of - Test_Result.Failure _ _ -> True - _ -> False - - ## Checks if the Test_Result is a success. - is_success : Boolean - is_success self = case self of - Test_Result.Success -> True - _ -> False +type Test_Result + Impl (group_name : Text) (spec_name : Text) (spec_result : Spec_Result) (time_taken : Duration) + + to_text self = + "'" + self.group_name + "' '" + self.spec_name + "': " + self.spec_result.to_text + + is_pending self = + self.spec_result.is_pending + + is_success self = + self.spec_result.is_success + + is_fail self = + self.spec_result.is_fail + From 7a749ed464761e8f749d319d76d4c30aacc32525 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Mon, 8 Jan 2024 19:13:19 +0100 Subject: [PATCH 14/93] Move test running logic into Helpers module --- .../Test_New/0.0.0-dev/src/Group.enso | 5 -- .../Test_New/0.0.0-dev/src/Helpers.enso | 78 +++++++++++++++++++ .../Standard/Test_New/0.0.0-dev/src/Spec.enso | 9 +-- 3 files changed, 82 insertions(+), 10 deletions(-) create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso index 4cae58261baf..de08e0caf2fd 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso @@ -26,8 +26,3 @@ type Group sb.append "]" sb.toString - run self = - IO.println <| "Running group " + self.to_text + "..." - self.specs.each spec-> - spec.run - IO.println <| "Done running group" diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso new file mode 100644 index 000000000000..a176828563f5 --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso @@ -0,0 +1,78 @@ +private + +from Standard.Base import all +from Standard.Base.Runtime import State + +import project.Clue.Clue +import project.Group.Group +import project.Spec_Result.Spec_Result +import project.Spec.Spec +import project.Suite.Suite +import project.Suite_Config.Suite_Config +import project.Test_Result.Test_Result +import project.Test_Reporter +import project.Test.Test + +run_group_with_filter : Group -> Regex -> Vector Test_Result +run_group_with_filter (group : Group) (spec_filter : Regex) = + filtered_specs = group.specs.filter spec-> + (spec_filter.match spec.name) != Nothing + run_specs_from_group filtered_specs group.name + + +run_group : Group -> Vector Test_Result +run_group (group : Group) = + run_specs_from_group group.specs group.name + + +run_specs_from_group : Vector Spec -> Text -> Vector Test_Result +run_specs_from_group (specs : Vector Spec) (group_name : Text) = + IO.println <| "Running tests in group '" + group_name + "'..." + specs.map spec-> + IO.println <| " Running spec '" + spec.name + "'..." + pair = run_spec spec + spec_res = pair.second + time_taken = pair.first + Test_Result.Impl group_name spec.name spec_res time_taken + +## PRIVATE +run_spec : Spec -> Pair Duration Spec_Result +run_spec (spec : Spec) = + pair = case spec.pending of + Nothing -> Duration.time_execution <| + State.run Clue Nothing (execute_spec_code spec.code) + reason -> Pair.new Duration.zero (Spec_Result.Pending reason) + pair + + +## PRIVATE +execute_spec_code : (Nothing -> Any) -> Spec_Result +execute_spec_code spec_code = + recovery = Panic.recover Any <| + result = spec_code Nothing + result.catch Any err-> + Panic.throw (Finished_With.Error err result.get_stack_trace_text) + Nothing + maybeExc = case recovery of + _ -> Spec_Result.Success + result = maybeExc.catch Any ex-> + case ex of + Spec_Result.Failure _ _ -> ex + Finished_With.Error err stack_trace_text -> + Spec_Result.Failure (Test.enrich_message_with_clue ("An unexpected error was returned: " + err.to_text)) details=stack_trace_text + _ -> Spec_Result.Failure (Test.enrich_message_with_clue ("An unexpected panic was thrown: " + ex.to_text)) details=maybeExc.get_stack_trace_text + result + +## PRIVATE + + An error describing that a test finished with an unexpected error. +type Finished_With + ## PRIVATE + + An error describing that a test finished with an unexpected error. + + Arguments: + - err: The payload of the error that triggered this error. + - stack_trace_text: A textual representation of the stack trace for the + error. + Error err stack_trace_text diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso index 5f81bd447ac7..674876c68cd2 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso @@ -6,8 +6,7 @@ from Standard.Base import all type Spec Impl (name : Text) (code : Any -> Any) - to_text self = "Spec '" + self.name + "'" - - run self = - IO.println <| "Running spec " + self.to_text + "..." - self.code Nothing + to_text self = + pend = if self.pending.is_nothing then "" else + "pending = " + self.pending + "Spec '" + self.name + "' " + pend From 40a2873588a92dd0890768d7353e0c46836df576 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Mon, 8 Jan 2024 19:13:42 +0100 Subject: [PATCH 15/93] Add Test_Reporter from Test lib --- .../Test_New/0.0.0-dev/src/Suite_Config.enso | 8 +- .../Test_New/0.0.0-dev/src/Test_Reporter.enso | 150 ++++++++++++++++++ 2 files changed, 155 insertions(+), 3 deletions(-) create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Reporter.enso diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso index cfc3a7f52981..a9ff17a956b0 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso @@ -1,3 +1,5 @@ +private + from Standard.Base import all import Standard.Base.Runtime.Source_Location.Source_Location import Standard.Base.Runtime.Stack_Trace_Element @@ -36,9 +38,9 @@ type Suite_Config ## Creates an Suite_Config based off environment and caller location from_environment : Suite_Config from_environment = - only_group_regexp = Environment.get "TEST_ONLY_GROUP" print_only_failures = Environment.get "REPORT_ONLY_FAILED" != Nothing junit_folder = Environment.get "ENSO_TEST_JUNIT_DIR" + use_ansi_colors = Environment.get "ENSO_TEST_ANSI_COLORS" . is_nothing . not results_path = if junit_folder.is_nothing then Nothing else caller_script = find_caller_script Runtime.get_stack_trace project_root = find_project_root caller_script @@ -49,11 +51,11 @@ type Suite_Config False -> (File.new junit_folder) / project_root.name / "JUnit.xml" - Suite_Config.Value only_group_regexp print_only_failures results_path + Suite_Config.Value print_only_failures results_path use_ansi_colors ## PRIVATE Construct a configuration - Value only_group_regexp print_only_failures output_path + Value (print_only_failures : Boolean) (output_path : (File | Nothing)) (use_ansi_colors : Boolean) ## Should a specific group be run. should_run_group self name = diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Reporter.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Reporter.enso new file mode 100644 index 000000000000..04886cf86834 --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Reporter.enso @@ -0,0 +1,150 @@ +private + +from Standard.Base import all +from Standard.Base.Runtime import assert +import Standard.Base.Runtime.Context + +import project.Suite_Config.Suite_Config +import project.Spec_Result.Spec_Result +import project.Test.Test +import project.Test_Result.Test_Result + +polyglot java import java.lang.StringBuilder + +## PRIVATE + Write the JUnit XML header. +wrap_junit_testsuites : Suite_Config -> (StringBuilder|Nothing) -> Any -> Nothing +wrap_junit_testsuites config builder ~action = + if config.should_output_junit then + builder.append '\n' + builder.append '\n' + + result = action + + if config.should_output_junit then + builder.append '\n' + + Context.Output.with_enabled <| + config.output_path.parent.create_directory + builder.toString.write config.output_path + + result + +## PRIVATE +use_ansi_colors : Boolean +use_ansi_colors = Environment.get "ENSO_TEST_ANSI_COLORS" . is_nothing . not + +red text = + '\u001b[31;1m' + text + '\u001b[0m' + +green text = + '\u001b[32;1m' + text + '\u001b[0m' + +maybe_red_text (text : Text) (config : Suite_Config) = + if config.use_ansi_colors then (red text) else text + +maybe_green_text (text : Text) (config : Suite_Config) = + if config.use_ansi_colors then (green text) else text + +## Print result for a single Spec run +print_single_result : Test_Result -> Suite_Config -> Nothing +print_single_result (test_result : Test_Result) (config : Suite_Config) = + times_suffix = + times = test_result.time_taken.total_milliseconds.to_text + "ms" + "[" + times + "]" + + case test_result.spec_result of + Spec_Result.Success -> + if config.print_only_failures.not then + txt = " - " + test_result.spec_name + " " + times_suffix + IO.println (maybe_green_text txt config) + Spec_Result.Failure msg details -> + txt = " - [FAILED] " + test_result.spec_name + " " + times_suffix + IO.println (maybe_red_text txt config) + IO.println (" Reason: " + msg) + if details.is_nothing.not then + IO.println details + Spec_Result.Pending reason -> + if config.print_only_failures.not then + IO.println (" - [PENDING] " + test_result.spec_name) + IO.println (" Reason: " + reason) + + +## Prints all the results, optionally writing them to a jUnit XML output. + + Arguments: + - test_results: Vector of `Test_Result`. Can be empty. Can contain results from multiple + groups. + - builder: StringBuilder or Nothing. If StringBuilder, then a jUnit XML format is appended to + that StringBuilder. +print_report : Vector Test_Result -> Suite_Config -> (StringBuilder|Nothing) -> Nothing +print_report (test_results : Vector Test_Result) (config : Suite_Config) (builder : (StringBuilder | Nothing)) = + distinct_group_names = test_results.map (_.group_name) . distinct + results_per_group = distinct_group_names.fold Map.empty acc-> group_name-> + group_results = test_results.filter res-> + res.group_name == group_name + assert (group_results.length > 0) + acc.insert group_name group_results + results_per_group.each_with_key group_name-> group_results-> + print_group_report group_name group_results config builder + + +## Prints report for test_results from a single group. + + Arguments: + - test_results: Test test_results from a single group +print_group_report : Text -> Vector Test_Result -> Suite_Config -> (StringBuilder|Nothing) -> Nothing +print_group_report group_name test_results config builder = + distinct_groups = test_results.distinct (res-> res.group_name) + assert (distinct_groups.length == 1) + total_time = test_results.fold Duration.zero acc-> res-> + acc + res.time_taken + if config.should_output_junit then + builder.append (' \n') + + test_results.each result-> + builder.append (' ') + case result.spec_result of + Spec_Result.Success -> Nothing + Spec_Result.Failure msg details -> + escaped_message = escape_xml msg . replace '\n' ' ' + builder.append ('\n \n') + # We always print the message again as content - otherwise the GitHub action may fail to parse it. + builder.append (escape_xml msg) + if details.is_nothing.not then + ## If there are additional details, we print them as well. + builder.append '\n\n' + builder.append (escape_xml details) + builder.append '\n \n' + Spec_Result.Pending msg -> builder.append ('\n \n ') + builder.append ' \n' + builder.append ' \n' + + should_print_behavior = config.print_only_failures.not || test_results.any (r -> r.is_fail) + if should_print_behavior then + tests_succeeded = test_results.fold 0 acc-> res-> + if res.is_success then acc + 1 else acc + tests_failed = test_results.fold 0 acc-> res-> + if res.is_fail then acc + 1 else acc + some_test_failed = tests_failed > 0 + tests_executed = tests_succeeded + tests_failed + group_description = + counts = tests_succeeded.to_text + "/" + tests_executed.to_text + times = total_time.total_milliseconds.to_text + "ms" + group_name + ": " + "[" + counts + ", " + times + "]" + IO.println <| case some_test_failed of + True -> maybe_red_text ("[FAILED] " + group_description) config + False -> maybe_green_text group_description config + test_results.each result-> + print_single_result result config + +## PRIVATE + Escape Text for XML +escape_xml : Text -> Text +escape_xml input = + input.replace '&' '&' . replace '"' '"' . replace "'" ''' . replace '<' '<' . replace '>' '>' From 2ef29f2c91f7cd2e2ef71f3f7c57828a60f8c53a Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Mon, 8 Jan 2024 19:13:57 +0100 Subject: [PATCH 16/93] Group and Spec can be pending --- distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso | 6 +++--- distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso index de08e0caf2fd..e21376cb149b 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso @@ -9,9 +9,9 @@ type Group_Builder Impl builder ## Specifies a single test. - specify : Text -> (Nothing -> Any) -> Nothing - specify self (name:Text) ~code = - self.builder.append <| Spec.Impl name (_ -> code) + specify : Text -> (Nothing | Text) -> (Nothing -> Any) -> Nothing + specify self name ~code pending=Nothing = + self.builder.append <| Spec.Impl name (_ -> code) pending Nothing diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso index 674876c68cd2..ff6544050f54 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Spec.enso @@ -4,7 +4,7 @@ from Standard.Base import all type Spec - Impl (name : Text) (code : Any -> Any) + Impl (name : Text) (code : Any -> Any) (pending : (Nothing | Text)) to_text self = pend = if self.pending.is_nothing then "" else From 22bc28f97accb0edf5b8b9a65a7ea3f3cee316cc Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Mon, 8 Jan 2024 19:14:16 +0100 Subject: [PATCH 17/93] Add Suite.run_with_filter --- .../Test_New/0.0.0-dev/src/Suite.enso | 40 +++++++++++++++++-- 1 file changed, 37 insertions(+), 3 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index bb93c43535da..0d0ae9ecce1b 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -1,9 +1,14 @@ from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Runtime.State + import project.Group.Group import project.Group.Group_Builder +import project.Helpers import project.Phases.Running_Tests +import project.Suite_Config.Suite_Config +import project.Test_Reporter type Suite_Builder ## PRIVATE @@ -29,9 +34,38 @@ type Suite Suite.Impl all_groups run_all self = - State.run Running_Tests True <| - self.groups.each group-> - group.run + self.run_with_filter Nothing Nothing + + ## Run only tests that match the provided filters. The filters are converted to + a regular expression. If a filter is Nothing, it matches all the names. + + Arguments: + - group_filter: Filter for group names. + - spec_filter: Filter for spec names. + run_with_filter : (Regex | Text | Nothing) -> (Regex | Text | Nothing) -> Nothing + run_with_filter self group_filter=Nothing spec_filter=Nothing = + config = Suite_Config.from_environment + convert_filter filter = + case filter of + r : Regex -> r + t : Text -> Regex.compile t + Nothing -> Regex.compile ".*" + _ -> Error.throw (Illegal_Argument.Error "Wrong specification of a filter: "+filter.to_text) + group_filter_conv = convert_filter group_filter + spec_filter_conv = convert_filter spec_filter + filtered_groups = self.groups.filter group-> + (group_filter_conv.match group.name) != Nothing + filtered_groups.each group-> + State.run Running_Tests True <| + results = Helpers.run_group_with_filter group spec_filter_conv + Test_Reporter.print_report results config Nothing group_names self = self.groups.map (_.name) + + print_all self = + IO.println "Test Suite:" + self.groups.each group-> + IO.println <| " Group '" + group.name + "':" + group.specs.each spec-> + IO.println <| " " + spec.name From 5b961c45668baa70fdf366500f2111712f210b4d Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 9 Jan 2024 13:44:44 +0100 Subject: [PATCH 18/93] Introduce group_with_setup and specify_with_args methods to the API. --- .../Test_New/0.0.0-dev/src/Group.enso | 24 ++++++++++--- .../Test_New/0.0.0-dev/src/Helpers.enso | 34 ++++++++++++------- .../Test_New/0.0.0-dev/src/Suite.enso | 18 ++++++++-- .../src/Database/SQLite_Spec_New.enso | 21 +++++++++--- 4 files changed, 71 insertions(+), 26 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso index e21376cb149b..5f41121dd8ed 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso @@ -6,17 +6,31 @@ polyglot java import java.lang.StringBuilder type Group_Builder ## PRIVATE - Impl builder + Impl builder has_setup=False ## Specifies a single test. - specify : Text -> (Nothing | Text) -> (Nothing -> Any) -> Nothing - specify self name ~code pending=Nothing = + + Arguments: + - code: A thunk that takes no arguments. + - pending: Contains a reason for why the test should be ignored. If Nothing, the test + is not ignored. + specify self (name : Text) (~code : Any) (pending : (Text | Nothing) = Nothing) = self.builder.append <| Spec.Impl name (_ -> code) pending - Nothing + + ## Specifies a single test that takes arguments from Group.setup + + Arguments: + - code: Spec function that takes arguments provided by Group.setup method. + specify_with_args self (name : Text) (code : (Any -> Any)) (pending : (Text | Nothing) = Nothing) = + case self.has_setup of + False -> + Panic.throw ("specify_with_args can only be used on a group that was created via Group.group_with_setup") + True -> + self.builder.append <| Spec.Impl name code pending type Group - Impl (name : Text) (specs : Vector Spec) + Impl (name : Text) (setup : (Nothing -> Any)) (specs : Vector Spec) to_text self = sb = StringBuilder.new diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso index a176828563f5..5416019d835b 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso @@ -17,39 +17,47 @@ run_group_with_filter : Group -> Regex -> Vector Test_Result run_group_with_filter (group : Group) (spec_filter : Regex) = filtered_specs = group.specs.filter spec-> (spec_filter.match spec.name) != Nothing - run_specs_from_group filtered_specs group.name + run_specs_from_group filtered_specs group run_group : Group -> Vector Test_Result run_group (group : Group) = - run_specs_from_group group.specs group.name + run_specs_from_group group.specs group run_specs_from_group : Vector Spec -> Text -> Vector Test_Result -run_specs_from_group (specs : Vector Spec) (group_name : Text) = - IO.println <| "Running tests in group '" + group_name + "'..." +run_specs_from_group (specs : Vector Spec) (group : Group) = + if specs.not_empty then + IO.println <| "Running tests in group '" + group.name + "'..." specs.map spec-> IO.println <| " Running spec '" + spec.name + "'..." - pair = run_spec spec + pair = run_spec spec group.setup spec_res = pair.second time_taken = pair.first - Test_Result.Impl group_name spec.name spec_res time_taken + Test_Result.Impl group.name spec.name spec_res time_taken ## PRIVATE -run_spec : Spec -> Pair Duration Spec_Result -run_spec (spec : Spec) = + Runs a single test spec. + + Arguments: + - setup_fn: Either Nothing or a function that returns arguments that should be passed + to the spec test. Alias for `before_each`. +run_spec : Spec -> (Nothing -> Any) -> Pair Duration Spec_Result +run_spec (spec : Spec) (setup_fn : (Nothing -> Any)) = pair = case spec.pending of - Nothing -> Duration.time_execution <| - State.run Clue Nothing (execute_spec_code spec.code) + Nothing -> + spec_args = setup_fn Nothing + Duration.time_execution <| + State.run Clue Nothing (execute_spec_code spec.code spec_args) reason -> Pair.new Duration.zero (Spec_Result.Pending reason) pair ## PRIVATE -execute_spec_code : (Nothing -> Any) -> Spec_Result -execute_spec_code spec_code = +execute_spec_code : (Nothing -> Any) -> Any -> Spec_Result +execute_spec_code spec_code spec_args = recovery = Panic.recover Any <| - result = spec_code Nothing + result = spec_code spec_args result.catch Any err-> Panic.throw (Finished_With.Error err result.get_stack_trace_text) Nothing diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index 0d0ae9ecce1b..cb5cd21581bd 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -16,10 +16,22 @@ type Suite_Builder ## Add a group to the builder. group : Text -> (Group_Builder -> Any) -> Nothing - group self (name:Text) fn = + group self (name:Text) (fn : (Group_Builder -> Any)) = + self.group_with_setup name (_ -> Nothing) fn + + ## Add a group with a setup function to the builder. The setup function is invoked + before every spec and the arguments are provided to the spec. To use the arguments + from Group.setup function, use `Group_Builder.specify_with_args` method. + + Arguments: + - setup_fn: Function that takes no arguments and provides a value that will be fed into + every test. This function will be invoked before every test. + + group_with_setup : Text -> (Nothing -> Any) -> (Group_Builder -> Any) -> Nothing + group_with_setup self (name : Text) (~setup_fn : Any) (build_fn : (Group_Builder -> Any)) = b = Vector.new_builder - fn (Group_Builder.Impl b) - self.builder.append <| Group.Impl name b.to_vector + build_fn (Group_Builder.Impl b has_setup=True) + self.builder.append <| Group.Impl name (_ -> setup_fn) b.to_vector type Suite diff --git a/test/Table_Tests/src/Database/SQLite_Spec_New.enso b/test/Table_Tests/src/Database/SQLite_Spec_New.enso index cf9023cd5f5a..84fa13031289 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec_New.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec_New.enso @@ -94,6 +94,16 @@ create_inmem_connection = Database.connect (SQLite In_Memory) +setup = + enso_project.data.create_directory + file = enso_project.data / "transient" / "sqlite_test.db" + file.delete_if_exists + connection = Database.connect (SQLite file) + connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' + connection.close + Data.Impl file connection + + suite = Test.build suite_builder-> in_file_prefix = "[SQLite File] " @@ -102,11 +112,10 @@ suite = in_memory_prefix = "[SQLite In-Memory] " sqlite_spec suite_builder (_-> create_inmem_connection) in_memory_prefix - suite_builder.group "SQLite_Format should allow connecting to SQLite files" group_builder-> - data = Data.create + suite_builder.group_with_setup "SQLite_Format should allow connecting to SQLite files" setup group_builder-> - group_builder.specify "should recognise a SQLite database file" <| - Auto_Detect.get_reading_format data.file . should_be_a SQLite_Format + group_builder.specify_with_args "should recognise a SQLite database file" args-> + Auto_Detect.get_reading_format args.file . should_be_a SQLite_Format group_builder.specify 'should not duplicate warnings' <| c = Database.connect (SQLite In_Memory) @@ -116,4 +125,6 @@ suite = Warning.get_all t2 . length . should_equal 1 main = - suite.group_names + suite.print_all + regex = Regex.compile "(should recognise a SQLite database file|should not duplicate warnings)" + suite.run_with_filter spec_filter=regex From 25fbe49173403ced792f6fcf5a4df5cca5d5c72d Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 10 Jan 2024 11:24:13 +0100 Subject: [PATCH 19/93] Remove group_with_setup and specify_with_args and replace it with teardown --- .../Test_New/0.0.0-dev/src/Group.enso | 22 +++++++------- .../Test_New/0.0.0-dev/src/Helpers.enso | 30 +++++++++---------- .../Test_New/0.0.0-dev/src/Suite.enso | 19 +++--------- 3 files changed, 30 insertions(+), 41 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso index 5f41121dd8ed..13bf70626e0b 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso @@ -1,12 +1,16 @@ private from Standard.Base import all +import Standard.Base.Runtime.Ref.Ref import project.Spec.Spec polyglot java import java.lang.StringBuilder type Group_Builder ## PRIVATE - Impl builder has_setup=False + + Arguments: + - teardown_ref: A reference to a teardown method. + Impl (builder = Vector.new_builder) (teardown_ref = Ref.new (_ -> Nothing)) ## Specifies a single test. @@ -17,20 +21,18 @@ type Group_Builder specify self (name : Text) (~code : Any) (pending : (Text | Nothing) = Nothing) = self.builder.append <| Spec.Impl name (_ -> code) pending - ## Specifies a single test that takes arguments from Group.setup + ## Provide a teardown of a group - a method that will be called once all the tests from the + group are finished. Arguments: - - code: Spec function that takes arguments provided by Group.setup method. - specify_with_args self (name : Text) (code : (Any -> Any)) (pending : (Text | Nothing) = Nothing) = - case self.has_setup of - False -> - Panic.throw ("specify_with_args can only be used on a group that was created via Group.group_with_setup") - True -> - self.builder.append <| Spec.Impl name code pending + - code: A function that does the teardown of a group. This function does not take any arguments, + and its return value is ignored. + teardown self (~code : Any) = + self.teardown_ref.put (_ -> code) type Group - Impl (name : Text) (setup : (Nothing -> Any)) (specs : Vector Spec) + Impl (name : Text) (teardown : (Any -> Any)) (specs : Vector Spec) to_text self = sb = StringBuilder.new diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso index 5416019d835b..c82c36d12993 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso @@ -29,35 +29,32 @@ run_specs_from_group : Vector Spec -> Text -> Vector Test_Result run_specs_from_group (specs : Vector Spec) (group : Group) = if specs.not_empty then IO.println <| "Running tests in group '" + group.name + "'..." - specs.map spec-> + test_results = specs.map spec-> IO.println <| " Running spec '" + spec.name + "'..." - pair = run_spec spec group.setup + pair = run_spec spec spec_res = pair.second time_taken = pair.first Test_Result.Impl group.name spec.name spec_res time_taken + # Invoke the teardown of the group + group.teardown Nothing + test_results -## PRIVATE - Runs a single test spec. - Arguments: - - setup_fn: Either Nothing or a function that returns arguments that should be passed - to the spec test. Alias for `before_each`. -run_spec : Spec -> (Nothing -> Any) -> Pair Duration Spec_Result -run_spec (spec : Spec) (setup_fn : (Nothing -> Any)) = +## PRIVATE +run_spec : Spec -> Pair Duration Spec_Result +run_spec (spec : Spec) = pair = case spec.pending of - Nothing -> - spec_args = setup_fn Nothing - Duration.time_execution <| - State.run Clue Nothing (execute_spec_code spec.code spec_args) + Nothing -> Duration.time_execution <| + State.run Clue Nothing (execute_spec_code spec.code) reason -> Pair.new Duration.zero (Spec_Result.Pending reason) pair ## PRIVATE -execute_spec_code : (Nothing -> Any) -> Any -> Spec_Result -execute_spec_code spec_code spec_args = +execute_spec_code : (Nothing -> Any) -> Spec_Result +execute_spec_code spec_code = recovery = Panic.recover Any <| - result = spec_code spec_args + result = spec_code Nothing result.catch Any err-> Panic.throw (Finished_With.Error err result.get_stack_trace_text) Nothing @@ -71,6 +68,7 @@ execute_spec_code spec_code spec_args = _ -> Spec_Result.Failure (Test.enrich_message_with_clue ("An unexpected panic was thrown: " + ex.to_text)) details=maybeExc.get_stack_trace_text result + ## PRIVATE An error describing that a test finished with an unexpected error. diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index cb5cd21581bd..a538fafa9a89 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -17,21 +17,10 @@ type Suite_Builder ## Add a group to the builder. group : Text -> (Group_Builder -> Any) -> Nothing group self (name:Text) (fn : (Group_Builder -> Any)) = - self.group_with_setup name (_ -> Nothing) fn - - ## Add a group with a setup function to the builder. The setup function is invoked - before every spec and the arguments are provided to the spec. To use the arguments - from Group.setup function, use `Group_Builder.specify_with_args` method. - - Arguments: - - setup_fn: Function that takes no arguments and provides a value that will be fed into - every test. This function will be invoked before every test. - - group_with_setup : Text -> (Nothing -> Any) -> (Group_Builder -> Any) -> Nothing - group_with_setup self (name : Text) (~setup_fn : Any) (build_fn : (Group_Builder -> Any)) = - b = Vector.new_builder - build_fn (Group_Builder.Impl b has_setup=True) - self.builder.append <| Group.Impl name (_ -> setup_fn) b.to_vector + group_builder = Group_Builder.Impl + fn group_builder + group = Group.Impl name group_builder.teardown_ref.get group_builder.builder.to_vector + self.builder.append group type Suite From 4795e18fc91bc15dfdaa9627dc6c277b1768999e Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 10 Jan 2024 11:24:29 +0100 Subject: [PATCH 20/93] Remove Proposition_1.enso This was discarded from the first round of reviews. --- test/Tests/src/Proposition_1.enso | 120 ------------------------------ 1 file changed, 120 deletions(-) delete mode 100644 test/Tests/src/Proposition_1.enso diff --git a/test/Tests/src/Proposition_1.enso b/test/Tests/src/Proposition_1.enso deleted file mode 100644 index b5a1f4f1485d..000000000000 --- a/test/Tests/src/Proposition_1.enso +++ /dev/null @@ -1,120 +0,0 @@ -from Standard.Base import all -import Standard.Base.Runtime.State -polyglot java import java.lang.StringBuilder - - -type Group_Builder -type Test_Suite_Builder - - -type Spec - Impl name ~code - - to_text self = self.name - - run : Any - run self = - IO.println <| " Running spec '" + self.name + "'" - self.code - - -type Group - Impl (name : Text) (specs : Vector Spec) - - to_text self = - sb = StringBuilder.new - sb.append ("Group '" + self.name + "': specs = [") - self.specs.each spec-> - sb.append (spec.to_text + ", ") - sb.append "]" - sb.toString - - run : Nothing - run self = - IO.println ("Running group '" + self.name + "'") - self.specs.each spec-> - spec.run - - -## Contains just static methods -type Test - group : Text -> Vector Any -> Nothing - group group_name ~specs = - suite_bldr = State.get Test_Suite_Builder - grp_bldr = Vector.new_builder - State.run Group_Builder grp_bldr <| - specs - grp = Group.Impl group_name grp_bldr.to_vector - suite_bldr.append grp - - specify : Text -> (Any -> Any) -> Spec - specify spec_name ~code = - grp_bldr = State.get Group_Builder - grp_bldr.append (Spec.Impl spec_name code) - - -type Test_Suite - Impl (groups : Vector Group) - - collect : (Any -> Any) -> Test_Suite - collect ~groups = - suite_bldr = Vector.new_builder - State.run Test_Suite_Builder suite_bldr <| - groups - Test_Suite.Impl suite_bldr.to_vector - - run_all : Nothing - run_all self = - self.groups.each group-> - group.run - - ## Returns all test names, in form `:` - test_names : Vector Text - test_names self = - self.groups.fold [] acc->group-> - spec_names = group.specs.map spec-> - group.name + ":" + spec.name - acc + spec_names - - -## Encapsulates the data that are lazily initialized for the tests -type Data - Impl ~vec - - create_vec size = - IO.println "Creating test data" - bldr = Vector.new_builder - 0.up_to size . each (idx-> bldr.append (idx + 1)) - bldr.to_vector - - -specs = - data = Data.Impl (Data.create_vec 10) - - Test.group "my-group" <| - Test.specify "my-spec-1" <| - IO.println " In my-spec-1" - - Test.specify "my-spec-2" <| - IO.println " In my-spec-2" - res = data.vec.distinct.length == data.vec.length - IO.println <| " my-spec-2: res = " + res.to_text - res - - Test.group "my-group-2" <| - Test.specify "my-spec-3" <| - IO.println " In my-spec-3" - 1 == 1 - - -test_suite = Test_Suite.collect specs - - -main = - IO.println "All test names:" - test_suite.test_names.each test_name-> - IO.println <| " " + test_name - IO.println "Running tests..." - test_suite.run_all - IO.println "Done running tests." - From 29baa8e90bd994a0dde509a7b0fe7f6bf456e572 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 11 Jan 2024 15:51:28 +0100 Subject: [PATCH 21/93] Default_Ordering_Spec_New gets connection_provider instead of connection_func --- .../Common/Default_Ordering_Spec_New.enso | 46 +++++++--- .../src/Database/SQLite_Spec_New.enso | 87 +++++++++++-------- 2 files changed, 86 insertions(+), 47 deletions(-) diff --git a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso index 004e01adf548..aa9d71f7d181 100644 --- a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso +++ b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso @@ -9,6 +9,7 @@ from Standard.Database import all from Standard.Database.Errors import all from Standard.Test_New import all +import Standard.Test_New.Suite.Suite_Builder import project.Util import project.Database.Helpers.Name_Generator @@ -16,23 +17,44 @@ import project.Database.Helpers.Name_Generator type Data Impl ~db_table_without_key ~db_table_with_key - create connection_func = + create connection_provider = src_table = Table.new [["X", [1, 2, 3]], ["Y", [30, 20, 10]]] - Data.Impl (create_db_table_without_key src_table (connection_func Nothing)) (create_db_table_with_key src_table (connection_func Nothing)) + Data.Impl (Data.create_db_table_without_key src_table connection_provider) (Data.create_db_table_with_key src_table connection_provider) + + create_db_table_without_key src_table connection_provider = + src_table.select_into_database_table connection_provider.connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=Nothing + + create_db_table_with_key src_table connection_provider = + src_table.select_into_database_table connection_provider.connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=["X"] + + +## Adds test specifications for default ordering to the given `suite_builder`. Adds it as + group with the given `prefix` as its name prefix. + + Arguments: + - suite_builder: A Suite_Builder in which a new group will be created + - connection_provider: An atom with `connection` method that returns a database connection. + Preferably, this method should be lazy. It must contain `teardown` method as well. +add_default_ordering_specs (suite_builder : Suite_Builder) (prefix : Text) (connection_provider : Any) = + expected_methods_in_provider = ["connection", "teardown"] + provider_type = Meta.type_of connection_provider + is_correct_provider = expected_methods_in_provider.all expected_method-> + provider_methods = Meta.get_type_methods provider_type + provider_methods.contains expected_method + if is_correct_provider.not then + # Print the message to stderr to make sure that it is not abbreviated in the Panic message + msg = "connection_provider must contain all these methods: " + expected_methods_in_provider.to_text + ". Actual type is " + provider_type.to_text + IO.print_err msg + Panic.throw <| Illegal_Argument.Error "connection_provider" - -create_db_table_without_key src_table connection = - src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=Nothing - -create_db_table_with_key src_table connection = - src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=["X"] - - -add_default_ordering_specs suite_builder prefix connection_func = group_name = prefix + "Table.default_ordering" - data = Data.create connection_func suite_builder.group group_name group_builder-> + data = Data.create connection_provider + + group_builder.teardown <| + connection_provider.teardown + group_builder.specify "will return Nothing if no primary key is defined" <| data.db_table_without_key.default_ordering . should_equal Nothing diff --git a/test/Table_Tests/src/Database/SQLite_Spec_New.enso b/test/Table_Tests/src/Database/SQLite_Spec_New.enso index 84fa13031289..8c52b31a27ba 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec_New.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec_New.enso @@ -1,5 +1,6 @@ from Standard.Base import all import Standard.Base.Runtime.Ref.Ref +from Standard.Base.Runtime import assert import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument @@ -14,7 +15,7 @@ from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation from Standard.Test_New import all -import project.Database.Common.Default_Ordering_Spec_New +import project.Database.Common.Common_Spec_New import project.Database.Helpers.Name_Generator ## @@ -68,54 +69,67 @@ import project.Database.Helpers.Name_Generator do_round -1.2222222222222235 15 . should_equal -1.222222222222223 -sqlite_spec suite_builder connection_func prefix = - Default_Ordering_Spec_New.add_default_ordering_specs suite_builder prefix connection_func +sqlite_spec suite_builder prefix connection_provider = + Common_Spec_New.add_common_specs suite_builder prefix connection_provider -type Data - Impl ~file ~connection_file - create = - Data.Impl create_file create_file_connection +type Inmem_Connection + Impl ~connection -# Must be run only in suite -create_file = - enso_project.data.create_directory - file = enso_project.data / "transient" / "sqlite_test.db" - file.delete_if_exists - file + create = + Inmem_Connection.Impl Inmem_Connection.create_connection -create_file_connection = - file = create_file - connection = Database.connect (SQLite file) - connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' - connection + create_connection = + IO.println <| " SQLite_Spec_New.Inmem_Connection.create_connection" + Database.connect (SQLite In_Memory) -create_inmem_connection = - Database.connect (SQLite In_Memory) + teardown self = Nothing -setup = - enso_project.data.create_directory - file = enso_project.data / "transient" / "sqlite_test.db" - file.delete_if_exists - connection = Database.connect (SQLite file) - connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' - connection.close - Data.Impl file connection +type File_Connection + Impl ~file + create = + File_Connection.Impl File_Connection.create_file + + create_file = + IO.println <| " SQLite_Spec_New.File_Connection.create_file" + transient_dir = enso_project.data / "transient" + assert transient_dir.exists ("There should be .gitignore file in the " + transient_dir.path + " directory") + file = transient_dir / "sqlite_test.db" + IO.println <| " SQLite_Spec_New.File_Connection.create_file: file.exists = " + file.exists.to_text + if file.exists.not then Panic.throw "Assertion error" + assert file.exists.not + connection = Database.connect (SQLite file) + connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' + connection.close + assert file.exists + file + + connection self = + Database.connect (SQLite self.file) + + teardown self = + IO.println <| " SQLite_Spec_New.File_Connection.teardown" + assert self.file.exists + self.file.delete suite = Test.build suite_builder-> in_file_prefix = "[SQLite File] " - sqlite_spec suite_builder (_-> create_file_connection) in_file_prefix + sqlite_spec suite_builder in_file_prefix File_Connection.create in_memory_prefix = "[SQLite In-Memory] " - sqlite_spec suite_builder (_-> create_inmem_connection) in_memory_prefix + sqlite_spec suite_builder in_memory_prefix Inmem_Connection.create + + suite_builder.group "SQLite_Format should allow connecting to SQLite files" group_builder-> + data = File_Connection.create - suite_builder.group_with_setup "SQLite_Format should allow connecting to SQLite files" setup group_builder-> + group_builder.teardown <| + data.teardown - group_builder.specify_with_args "should recognise a SQLite database file" args-> - Auto_Detect.get_reading_format args.file . should_be_a SQLite_Format + group_builder.specify "should recognise a SQLite database file" <| + Auto_Detect.get_reading_format data.file . should_be_a SQLite_Format group_builder.specify 'should not duplicate warnings' <| c = Database.connect (SQLite In_Memory) @@ -125,6 +139,9 @@ suite = Warning.get_all t2 . length . should_equal 1 main = + IO.println <| "==============" suite.print_all - regex = Regex.compile "(should recognise a SQLite database file|should not duplicate warnings)" - suite.run_with_filter spec_filter=regex + IO.println <| "==============" + group_filter = "Connection.query" + spec_filter = "should allow to access a Table by name" + suite.run_with_filter group_filter spec_filter From aae5c6c007aadd5131501c444ea29dbf687f3d12 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 11 Jan 2024 15:52:04 +0100 Subject: [PATCH 22/93] teardown is not called if the group is empty --- .../Test_New/0.0.0-dev/src/Helpers.enso | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso index c82c36d12993..56d4ee4d6fc3 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso @@ -27,17 +27,20 @@ run_group (group : Group) = run_specs_from_group : Vector Spec -> Text -> Vector Test_Result run_specs_from_group (specs : Vector Spec) (group : Group) = - if specs.not_empty then - IO.println <| "Running tests in group '" + group.name + "'..." - test_results = specs.map spec-> - IO.println <| " Running spec '" + spec.name + "'..." - pair = run_spec spec - spec_res = pair.second - time_taken = pair.first - Test_Result.Impl group.name spec.name spec_res time_taken - # Invoke the teardown of the group - group.teardown Nothing - test_results + case specs.is_empty of + True -> [] + False -> + IO.println <| "Running tests in group '" + group.name + "'..." + test_results = specs.map spec-> + IO.println <| " Running spec '" + spec.name + "'..." + pair = run_spec spec + IO.println <| " Finished spec '" + spec.name + "'." + spec_res = pair.second + time_taken = pair.first + Test_Result.Impl group.name spec.name spec_res time_taken + # Invoke the teardown of the group + group.teardown Nothing + test_results ## PRIVATE From 75842f0a98aee688d77579c24f27bc8d374e0448 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 11 Jan 2024 15:52:19 +0100 Subject: [PATCH 23/93] Copy Problems from Test into Test_New --- .../Test_New/0.0.0-dev/src/Problems.enso | 135 ++++++++++++++++++ 1 file changed, 135 insertions(+) create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Problems.enso diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Problems.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Problems.enso new file mode 100644 index 000000000000..b99874586d47 --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Problems.enso @@ -0,0 +1,135 @@ +from Standard.Base import all + +from project import Test +from project.Extensions import all + +## Returns values of warnings attached to the value. +get_attached_warnings v = + Warning.get_all v . map .value + +## UNSTABLE + Tests how a specific operation behaves depending on the requested + `Problem_Behavior`. + + Arguments: + - action: The action to execute. It takes a `Problem_Behavior` which + specifies whether it should ignore problems, report them as warnings or + raise a dataflow error on the first encountered problem. + - expected_problems: a list of expected problems, in the order that they are + expected to be reported. It should not be empty. The problems are assumed + to be Atoms. + - result_checker: A function which should verify that the result generated by + the action is correct. It does not return anything, instead it should use + the standard testing approach, like `x.should_equal y`. +test_problem_handling : (Problem_Behavior -> Any) -> Vector Any -> (Any -> Nothing) -> Nothing +test_problem_handling action expected_problems result_checker = + error_checker error_result = + first_problem = expected_problems.first + first_problem_type = Meta.type_of first_problem + error_result . should_fail_with first_problem_type frames_to_skip=3 + error_result.catch . should_equal first_problem frames_to_skip=3 + warnings_checker warnings = + ## TODO [RW] we are not checking if there are no duplicate warnings, because the warnings are in fact duplicated - we should figure out how to handle that and then possibly modify the test + Test.with_clue "The warnings were "+warnings.to_text+'.\n' <| + warnings . should_contain_the_same_elements_as expected_problems frames_to_skip=5 + test_advanced_problem_handling action error_checker warnings_checker result_checker frames_to_skip=1 + +## UNSTABLE + Tests how a specific operation behaves depending on the requested + `Problem_Behavior`. A variant that allows more customization over how + expected problems are checked. + + Arguments: + - action: The action to execute. It takes a `Problem_Behavior` which + specifies whether it should ignore problems, report them as warnings or + raise a dataflow error on the first encountered problem. + - error_checker: A function which should verify that the returned error is as + expected. + - warnings_checker: A function which should verify that the returned warnings + are as expected. + - result_checker: A function which should verify that the result generated by + the action is correct. It does not return anything, instead it should use + the standard testing approach, like `x.should_equal y`. +test_advanced_problem_handling : (Problem_Behavior -> Any) -> (Any -> Nothing) -> (Vector Any -> Nothing) -> (Any -> Nothing) -> Integer -> Nothing +test_advanced_problem_handling action error_checker warnings_checker result_checker frames_to_skip=0 = + # First, we check the action ignoring any warnings. + result_ignoring = action Problem_Behavior.Ignore + result_checker result_ignoring + get_attached_warnings result_ignoring . should_equal [] frames_to_skip=frames_to_skip+1 + + # Then, we check the fail-on-first-error mode. + error_result = action Problem_Behavior.Report_Error + error_checker error_result + + # Lastly, we check the report warnings mode and ensure that both the result is correct and the warnings are as expected. + result_warning = action Problem_Behavior.Report_Warning + result_checker result_warning + warnings_checker (get_attached_warnings result_warning) + +## UNSTABLE + Checks if the provided value does not have any attached problems. +assume_no_problems result = + loc = Meta.get_source_location 1 + if result.is_error then + Test.fail "Expected the result to not be an error, but a dataflow error has been matched: "+result.catch.to_display_text+" (at "+loc+")." + warnings = get_attached_warnings result + if warnings.not_empty then + Test.fail "Expected the result to not contain any warnings, but it did: "+warnings.to_text+" (at "+loc+")." + +## UNSTABLE + Checks if the provided value has a specific warning attached. + It allows other warnings to be present also. + + Arguments: + - expected_warning: The expected warning. It can either by a warning type or + a concrete value. + - result: The value to check. +expect_warning : Any -> Any -> Nothing +expect_warning expected_warning result = + loc = Meta.get_source_location 1 + if result.is_error then + Test.fail "Expected a warning "+expected_warning.to_text+", but a dataflow error has been matched: "+result.catch.to_display_text+" (at "+loc+")." + warnings = get_attached_warnings result + found = warnings.find if_missing=Nothing x-> + (x == expected_warning) || (x.is_a expected_warning) + found.if_nothing <| + Test.fail "Expected the result to contain a warning: "+expected_warning.to_text+", but it did not. The warnings were "+warnings.short_display_text+' (at '+loc+').' + +## UNSTABLE + Checks if the provided value has a specific warning attached and if there are + no other warnings. + + As a utility, it also returns the found warning. + + Arguments: + - expected_warning: The expected warning. It can either by a warning type or + a concrete value. + - result: The value to check. +expect_only_warning : Any -> Any -> Any +expect_only_warning expected_warning result = + loc = Meta.get_source_location 1 + if result.is_error then + Test.fail "Expected only warning "+expected_warning.to_text+", but a dataflow error has been matched: "+result.catch.to_display_text+" (at "+loc+")." + warnings = get_attached_warnings result + is_expected x = + (x == expected_warning) || (x.is_a expected_warning) + found = warnings.find if_missing=Nothing is_expected + if found.is_nothing then + Test.fail "Expected the result to contain a warning: "+expected_warning.to_text+", but it did not. The warnings were "+warnings.short_display_text+' (at '+loc+').' + invalid = warnings.filter x-> is_expected x . not + if invalid.not_empty then + Test.fail "Expected the result to contain only the warning: "+found.to_text+", but it also contained: "+invalid.to_text+' (at '+loc+').' + found + + +## UNSTABLE + Checks if the provided value does _not_ have a warning of the specified type. + + It allows other warnings to be present also. +not_expect_warning : Any -> Any -> Nothing +not_expect_warning expected_warning_type result = + warnings = get_attached_warnings result + found = warnings.find if_missing=Nothing x-> x.is_a expected_warning_type + if found.is_nothing.not then + loc = Meta.get_source_location 3 + Test.fail 'The result contained a warning it was not supposed to: '+found.to_text+' (at '+loc+').' From 61a8acca0093951a950565c8c8409606048d93c8 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 12 Jan 2024 13:59:12 +0100 Subject: [PATCH 24/93] Removed the bugged assertion Should be fixed by #8742 --- test/Table_Tests/src/Database/SQLite_Spec_New.enso | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/Table_Tests/src/Database/SQLite_Spec_New.enso b/test/Table_Tests/src/Database/SQLite_Spec_New.enso index 8c52b31a27ba..fb17cf2a59cd 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec_New.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec_New.enso @@ -97,9 +97,7 @@ type File_Connection transient_dir = enso_project.data / "transient" assert transient_dir.exists ("There should be .gitignore file in the " + transient_dir.path + " directory") file = transient_dir / "sqlite_test.db" - IO.println <| " SQLite_Spec_New.File_Connection.create_file: file.exists = " + file.exists.to_text - if file.exists.not then Panic.throw "Assertion error" - assert file.exists.not + if file.exists then Panic.throw "Assertion error: File should not exist" connection = Database.connect (SQLite file) connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' connection.close From 99ce8497a7506c2a6c576a7105f0bf612675a52f Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 12 Jan 2024 14:20:16 +0100 Subject: [PATCH 25/93] Run only the failing test --- test/Table_Tests/src/Database/SQLite_Spec_New.enso | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/Table_Tests/src/Database/SQLite_Spec_New.enso b/test/Table_Tests/src/Database/SQLite_Spec_New.enso index fb17cf2a59cd..44e91a381ba5 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec_New.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec_New.enso @@ -140,6 +140,6 @@ main = IO.println <| "==============" suite.print_all IO.println <| "==============" - group_filter = "Connection.query" + group_filter = Regex.compile "\[SQLite File\] Connection.query" spec_filter = "should allow to access a Table by name" suite.run_with_filter group_filter spec_filter From 172eaf98019c4a2d85870d55160cb041d5ead084 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 12 Jan 2024 14:20:31 +0100 Subject: [PATCH 26/93] Add Common_Spec_New --- .../src/Database/Common/Common_Spec_New.enso | 461 ++++++++++++++++++ 1 file changed, 461 insertions(+) create mode 100644 test/Table_Tests/src/Database/Common/Common_Spec_New.enso diff --git a/test/Table_Tests/src/Database/Common/Common_Spec_New.enso b/test/Table_Tests/src/Database/Common/Common_Spec_New.enso new file mode 100644 index 000000000000..0e567859f034 --- /dev/null +++ b/test/Table_Tests/src/Database/Common/Common_Spec_New.enso @@ -0,0 +1,461 @@ +from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +from Standard.Table import Table, Sort_Column +from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all hiding First, Last +from Standard.Table.Errors import all + +from Standard.Database import all +from Standard.Database.Errors import all + +from Standard.Test_New import all +import Standard.Test_New.Problems +import Standard.Test_New.Suite.Suite_Builder + +import project.Database.Common.Default_Ordering_Spec +import project.Database.Common.Names_Length_Limits_Spec + +import project.Util +import project.Database.Helpers.Name_Generator +import project.Database.Common.Default_Ordering_Spec_New + + +upload connection prefix data temporary=True = + name = Name_Generator.random_name prefix + table = data.select_into_database_table connection name temporary=temporary primary_key=Nothing + IO.println <| " upload: Created table with name " + name + table + + +drop_table connection name = + IO.println <| " drop_table: Dropping table with name " + name + Panic.catch Any (connection.drop_table name) caught_panic-> + IO.println <| "Failed to drop table: " + name + " because of: " + caught_panic.payload.to_display_text + + +type Basic_Data + Impl ~connection ~t1 ~t2 ~t4 ~big_table big_size + + create connection_provider = + big_size = 1000 + Basic_Data.Impl (Basic_Data.create_connection connection_provider) (Basic_Data.create_t1 connection_provider) (Basic_Data.create_t2 connection_provider) (Basic_Data.create_t4 connection_provider) (Basic_Data.create_big_table connection_provider big_size) big_size + + create_connection connection_provider = + connection_provider.connection + + create_t1 connection_provider = + t1 = upload connection_provider.connection "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]]) + t1 + + create_t2 connection_provider = + t2 = upload connection_provider.connection "T2" (Table.new [["d", [100, 200]]]) + t2 + + create_t4 connection_provider = + t4 = upload connection_provider.connection "T4" <| + Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]]] + t4 + + create_big_table connection_provider size = + big = Table.new [["a", Vector.new size ix->ix], ["b", Vector.new size ix-> ix * 3.1415926], ["c", Vector.new size ix-> ix.to_text]] + big_table = upload connection_provider.connection "Big" big + big_table + + teardown self = + IO.println <| " Common_Spec_New.Basic_Data.teardown" + drop_table self.connection self.t1.name + drop_table self.connection self.t2.name + drop_table self.connection self.t4.name + drop_table self.connection self.big_table.name + + +type Sorting_Data + Impl ~connection ~df ints reals bools texts ~t8 + + create connection_provider = + ints = [1, 2, 3, 4, 5] + reals = [1.3, 4.6, 3.2, 5.2, 1.6] + bools = [False, False, True, True, False] + texts = ["foo", "foo", "bar", "baz", "spam"] + Sorting_Data.Impl connection_provider.connection (Sorting_Data.create_df connection_provider) ints reals bools texts (Sorting_Data.create_t8 ints reals bools texts) + + create_df connection_provider = + df = upload connection_provider.connection "clothes" <| + Table.new [["id", [1,2,3,4,5,6]], ["name", ["shoes","trousers","dress","skirt","blouse","t-shirt"]], ["quantity", [20,10,20,10,30,30]], ["rating", [3.0,Nothing,7.3,3.0,2.2,Nothing]], ["price", [37.2,42.1,64.1,87.4,13.5,64.2]]] + df + + create_t8 connection_provider ints reals bools texts = + t8 = upload connection_provider.connection "T8" <| + Table.new [["ord", [0,3,2,4,1]], ["ints", ints], ["reals", reals], ["bools", bools], ["texts", texts]] + t8 + + teardown self = + IO.println <| " Common_Spec_New.Sorting_Data.teardown" + drop_table self.connection self.df.name + drop_table self.connection self.t8.name + + +type Aggregation_Data + Impl ~connection ~t9 + + create connection_provider = + Aggregation_Data.Impl connection_provider.connection (Aggregation_Data.create_t9 connection_provider) + + create_t9 connection_provider = + builders = [Vector.new_builder,Vector.new_builder,Vector.new_builder] + insert v = + builders.zip v .append + insert ["foo", 0.4, 50] + insert ["foo", 0.2, 10] + insert ["foo", 0.4, 30] + insert ["bar", 3.5, 20] + insert ["foo", Nothing, 20] + insert ["baz", 6.7, 40] + insert ["foo", Nothing, 10] + insert ["bar", 97, 60] + insert ["quux", Nothing, 70] + insert ["zzzz", Nothing, Nothing] + insert ["zzzz", 1, 1] + insert ["zzzz", 0, 0] + insert ["zzzz", 0, 1] + insert ["zzzz", 1, 0] + insert ["zzzz", 0, 0] + insert ["zzzz", Nothing, Nothing] + t9 = upload connection_provider.connection "T9" <| + Table.new [["name", builders.at 0 . to_vector], ["price", builders.at 1 . to_vector], ["quantity", builders.at 2 . to_vector]] + t9 + + teardown self = + IO.println <| " Common_Spec_New.Aggregation_Data.teardown" + drop_table self.connection self.t9.name + + +type Missing_Values_Data + Impl ~connection ~t4 + + create connection_provider + Missing_Values_Data.Impl (Missing_Values_Data.create_connection connection_provider) (Missing_Values_Data.create_t4 connection_provider) + + create_connection connection_provider = + connection_provider.connection + + create_t4 connection_provider = + t4 = upload connection_provider.connection "T4" <| + Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]]] + t4 + + teardown self = + IO.println <| " Common_Spec_New.Missing_Values_Data.teardown" + drop_table self.connection self.t4.name + + + +add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (connection_provider : Any) = + + Default_Ordering_Spec_New.add_default_ordering_specs suite_builder prefix connection_provider + # TODO: + # Names_Length_Limits_Spec_New.add_specs suite_builder prefix connection_provider + + suite_builder.group (prefix + "Basic Table Access") group_builder-> + data = Basic_Data.create connection_provider + + group_builder.teardown <| + data.teardown + connection_provider.teardown + + group_builder.specify "should allow to materialize tables and columns into local memory" <| + df = data.t1.read + a = data.t1.at 'a' . read + df.at 'a' . to_vector . should_equal [1, 4] + a.to_vector . should_equal [1, 4] + + group_builder.specify "should allow to materialize columns directly into a Vector" <| + v = data.t1.at 'a' . to_vector + v . should_equal [1, 4] + + group_builder.specify "should handle bigger result sets" <| + data.big_table.read.row_count . should_equal data.big_size + + group_builder.specify "should not allow to set a column coming from another table" <| + data.t1.set (data.t2.at "d") . should_fail_with Integrity_Error + + + suite_builder.group (prefix + "Connection.query") group_builder-> + data = Basic_Data.create connection_provider + + group_builder.teardown <| + data.teardown + connection_provider.teardown + + group_builder.specify "should allow to access a Table by name" <| + name = data.t1.name + IO.println <| " Querying table with name " + name + tmp = data.connection.query (SQL_Query.Table_Name name) + tmp.read . should_equal data.t1.read + + group_builder.specify "should allow to access a Table by an SQL query" <| + name = data.t1.name + t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3')) + m2 = t2.read + m2.column_names . should_equal ["a", "b"] + m2.at "a" . to_vector . should_equal [4] + m2.at "b" . to_vector . should_equal [5] + m2.at "c" . should_fail_with No_Such_Column + + group_builder.specify "should allow to access a Table by an SQL query" <| + name = data.t1.name + t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3')) + m2 = t2.read + m2.column_names . should_equal ["a", "b"] + m2.at "a" . to_vector . should_equal [4] + m2.at "b" . to_vector . should_equal [5] + m2.at "c" . should_fail_with No_Such_Column + + t3 = data.connection.query (SQL_Query.Raw_SQL ('SELECT 1+2')) + m3 = t3.read + m3.at 0 . to_vector . should_equal [3] + + group_builder.specify "should use labels for column names" <| + name = data.t1.name + t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a AS c, b FROM "' + name + '" WHERE a >= 3')) + m2 = t2.read + m2.column_names . should_equal ["c", "b"] + m2.at "c" . to_vector . should_equal [4] + m2.at "b" . to_vector . should_equal [5] + m2.at "a" . should_fail_with No_Such_Column + + group_builder.specify "should allow a shorthand trying to deduce if the query is a table name or an SQL query" <| + name = data.t1.name + t2 = data.connection.query name + t2.read . should_equal data.t1.read + + t3 = data.connection.query ('SELECT a, b FROM "' + name + '" WHERE a >= 3') + m3 = t3.read + m3.column_names . should_equal ["a", "b"] + m3.at "a" . to_vector . should_equal [4] + + t5 = data.connection.query data.t4.name + m5 = t5.read + m5.column_names . should_equal ["X", "Y"] + m5.at "X" . to_vector . should_equal ["a", "B"] + m5.at "Y" . to_vector . should_equal [2, 5] + + group_builder.specify "should report an error depending on input SQL_Query type" <| + r2 = data.connection.query (SQL_Query.Table_Name "NONEXISTENT-TABLE") + r2.should_fail_with Table_Not_Found + r2.catch.name . should_equal "NONEXISTENT-TABLE" + r2.catch.to_display_text . should_equal "Table NONEXISTENT-TABLE was not found in the database." + + r3 = data.connection.query (SQL_Query.Raw_SQL "MALFORMED-QUERY") + r3.should_fail_with SQL_Error + + group_builder.specify "should not allow interpolations in raw user-built queries" <| + r = data.connection.query (SQL_Query.Raw_SQL "SELECT 1 + ?") + r.should_fail_with Illegal_Argument + + group_builder.specify "should make a best-effort attempt at returning a reasonable error for the short-hand" <| + r2 = data.connection.query "NONEXISTENT-TABLE" + r2.should_fail_with Table_Not_Found + r2.catch.name . should_equal "NONEXISTENT-TABLE" + r2.catch.treated_as_query . should_be_true + error_text = r2.catch.to_display_text + Test.with_clue "r2.catch.to_display_text = "+error_text <| + error_text.starts_with "The name NONEXISTENT-TABLE was treated as a query, but the query failed" . should_be_true + error_text.ends_with "wrap it in `SQL_Query.Table_Name`." . should_be_true + + r3 = data.connection.query "SELECT * FROM ........" + r3.should_fail_with SQL_Error + + group_builder.specify "will fail if the table is modified and a column gets removed" <| + name = Name_Generator.random_name "removing-column" + Problems.assume_no_problems <| + (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True + + t1 = data.connection.query name + m1 = t1.read + Problems.assume_no_problems m1 + m1.at "a" . to_vector . should_equal [1, 2, 3] + m1.at "b" . to_vector . should_equal [4, 5, 6] + + Problems.assume_no_problems <| data.connection.drop_table name + Problems.assume_no_problems <| + (Table.new [["a", [100, 200]]]).select_into_database_table data.connection name temporary=True + + # Reading a column that was kept will work OK + t1.at "a" . to_vector . should_equal [100, 200] + + # But reading the whole table will fail on the missing column: + m2 = t1.read + m2.should_fail_with SQL_Error + + group_builder.specify "will not fail if the table is modified and a column gets added" <| + name = Name_Generator.random_name "adding-column" + Problems.assume_no_problems <| + (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True + + t1 = data.connection.query name + m1 = t1.read + Problems.assume_no_problems m1 + m1.at "a" . to_vector . should_equal [1, 2, 3] + m1.at "b" . to_vector . should_equal [4, 5, 6] + + Problems.assume_no_problems <| data.connection.drop_table name + Problems.assume_no_problems <| + (Table.new [["a", [100, 200]], ["b", [300, 400]], ["c", [500, 600]]]).select_into_database_table data.connection name temporary=True + + m2 = t1.read + Problems.assume_no_problems m2 + m2.column_names . should_equal ["a", "b"] + m2.at "a" . to_vector . should_equal [100, 200] + m2.at "b" . to_vector . should_equal [300, 400] + + t1.at "c" . should_fail_with No_Such_Column + + t2 = data.connection.query name + t2.column_names . should_equal ["a", "b", "c"] + + + suite_builder.group (prefix + "Masking Tables") group_builder-> + data = Basic_Data.create connection_provider + + group_builder.teardown <| + data.teardown + connection_provider.teardown + + group_builder.specify "should allow to select rows from a table or column based on an expression" <| + t2 = data.t1.filter (data.t1.at "a" == 1) + df = t2.read + df.at "a" . to_vector . should_equal [1] + df.at "b" . to_vector . should_equal [2] + df.at "c" . to_vector . should_equal [3] + t2.at "a" . to_vector . should_equal [1] + t2.at "b" . to_vector . should_equal [2] + t2.at "c" . to_vector . should_equal [3] + + suite_builder.group (prefix + "Missing Values") group_builder-> + data = Basic_Data.create connection_provider + + group_builder.teardown <| + data.teardown + connection_provider.teardown + + group_builder.specify "fill_nothing should replace nulls" <| + data.t4.at 'a' . fill_nothing 10 . to_vector . should_equal [0, 1, 10, 42, 10] + data.t4.at 'b' . fill_nothing False . to_vector . should_equal [True, False, True, False, False] + data.t4.at 'c' . fill_nothing "NA" . to_vector . should_equal ["", "foo", "bar", "NA", "NA"] + + group_builder.specify "should correctly be counted" <| + data.t4.row_count . should_equal 5 + col = data.t4.at 'a' + col.length . should_equal 5 + col.count . should_equal 3 + col.count_nothing . should_equal 2 + + + suite_builder.group (prefix + "Sorting") group_builder-> + data = Sorting_Data.create connection_provider + + group_builder.teardown <| + data.teardown + connection_provider.teardown + + group_builder.specify "should allow sorting by a single column name" <| + r_1 = data.df.order_by ([Sort_Column.Name 'quantity']) + r_1.at 'id' . to_vector . should_equal [2,4,1,3,5,6] + + r_3 = data.df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending]) + r_3.at 'id' . to_vector . should_equal [3,1,4,5,2,6] + + group_builder.specify 'should allow sorting by multiple column names' <| + r_1 = data.df.order_by ([Sort_Column.Name 'quantity', Sort_Column.Name 'rating']) + r_1.at 'id' . to_vector . should_equal [2,4,1,3,6,5] + + r_2 = data.df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending, Sort_Column.Name 'quantity' Sort_Direction.Descending]) + r_2.at 'id' . to_vector . should_equal [3,1,4,5,6,2] + + + group_builder.specify 'should allow sorting with specific by-column rules' <| + r_1 = data.df.order_by ([Sort_Column.Name "quantity", Sort_Column.Name "price" Sort_Direction.Descending]) + r_1.at 'id' . to_vector . should_equal [4,2,3,1,6,5] + + group_builder.specify 'should correctly reorder all kinds of columns and leave the original columns untouched' <| + r = data.t8.order_by ([Sort_Column.Name 'ord']) + + r.at 'ints' . to_vector . should_equal [1, 5, 3, 2, 4] + data.t8.at 'ints' . to_vector . should_equal data.ints + + r.at 'reals' . to_vector . should_equal [1.3, 1.6, 3.2, 4.6, 5.2] + data.t8.at 'reals' . to_vector . should_equal data.reals + + r.at 'bools' . to_vector . should_equal [False, False, True, False, True] + data.t8.at 'bools' . to_vector . should_equal data.bools + + r.at 'texts' . to_vector . should_equal ['foo', 'spam', 'bar', 'foo', 'baz'] + data.t8.at 'texts' . to_vector . should_equal data.texts + + group_builder.specify 'should sort columns with specified ordering and missing placement' <| + c = data.df.at 'rating' + + r_1 = c.sort + r_1.to_vector.should_equal [Nothing, Nothing, 2.2, 3.0, 3.0, 7.3] + + r_2 = c.sort Sort_Direction.Descending + r_2.to_vector.should_equal [7.3, 3.0, 3.0, 2.2, Nothing, Nothing] + + + suite_builder.group prefix+"Aggregation" group_builder-> + data = Aggregation_Data.create connection_provider + + group_builder.teardown <| + data.teardown + connection_provider.teardown + + ## A helper which makes sure that the groups in a materialized + (InMemory) table are ordered according to a specified column or list + of columns. + determinize_by order_column table = + table.order_by ([Sort_Column.Name order_column]) + + group_builder.specify "should allow counting group sizes and elements" <| + ## Names set to lower case to avoid issue with Redshift where columns are + returned in lower case. + aggregates = [Count "count", Count_Not_Nothing "price" "count not nothing price", Count_Nothing "price" "count nothing price"] + + t1 = determinize_by "name" (data.t9.aggregate ([Group_By "name"] + aggregates) . read) + t1.at "name" . to_vector . should_equal ["bar", "baz", "foo", "quux", "zzzz"] + t1.at "count" . to_vector . should_equal [2, 1, 5, 1, 7] + t1.at "count not nothing price" . to_vector . should_equal [2, 1, 3, 0, 5] + t1.at "count nothing price" . to_vector . should_equal [0, 0, 2, 1, 2] + + t2 = data.t9.aggregate aggregates . read + t2.at "count" . to_vector . should_equal [16] + t2.at "count not nothing price" . to_vector . should_equal [11] + t2.at "count nothing price" . to_vector . should_equal [5] + + group_builder.specify "should allow simple arithmetic aggregations" <| + ## Names set to lower case to avoid issue with Redshift where columns are + returned in lower case. + aggregates = [Sum "price" "sum price", Sum "quantity" "sum quantity", Average "price" "avg price"] + ## TODO can check the datatypes + + t1 = determinize_by "name" (data.t9.aggregate ([Group_By "name"] + aggregates) . read) + t1.at "name" . to_vector . should_equal ["bar", "baz", "foo", "quux", "zzzz"] + t1.at "sum price" . to_vector . should_equal [100.5, 6.7, 1, Nothing, 2] + t1.at "sum quantity" . to_vector . should_equal [80, 40, 120, 70, 2] + t1.at "avg price" . to_vector . should_equal [50.25, 6.7, (1/3), Nothing, (2/5)] + + t2 = data.t9.aggregate aggregates . read + t2.at "sum price" . to_vector . should_equal [110.2] + t2.at "sum quantity" . to_vector . should_equal [312] + t2.at "avg price" . to_vector . should_equal [(110.2 / 11)] + + suite_builder.group prefix+"Table.filter" group_builder-> + data = Basic_Data.create connection_provider + + group_builder.teardown <| + data.teardown + connection_provider.teardown + + group_builder.specify "report error when trying to filter by a custom predicate" <| + data.t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation + From a9cf3ee775f6a99b9861de30cbe467186f9d478e Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 16 Jan 2024 11:32:30 +0100 Subject: [PATCH 27/93] Integrate simpler lazy field initialization. From https://github.com/enso-org/enso/pull/8622#issuecomment-1893168547 --- .../src/Database/Common/Common_Spec_New.enso | 160 +++++++++--------- .../Common/Default_Ordering_Spec_New.enso | 40 ++--- .../src/Database/SQLite_Spec_New.enso | 63 ++++--- 3 files changed, 125 insertions(+), 138 deletions(-) diff --git a/test/Table_Tests/src/Database/Common/Common_Spec_New.enso b/test/Table_Tests/src/Database/Common/Common_Spec_New.enso index 0e567859f034..287b1fe04199 100644 --- a/test/Table_Tests/src/Database/Common/Common_Spec_New.enso +++ b/test/Table_Tests/src/Database/Common/Common_Spec_New.enso @@ -34,74 +34,82 @@ drop_table connection name = type Basic_Data - Impl ~connection ~t1 ~t2 ~t4 ~big_table big_size + Value ~data - create connection_provider = - big_size = 1000 - Basic_Data.Impl (Basic_Data.create_connection connection_provider) (Basic_Data.create_t1 connection_provider) (Basic_Data.create_t2 connection_provider) (Basic_Data.create_t4 connection_provider) (Basic_Data.create_big_table connection_provider big_size) big_size - - create_connection connection_provider = - connection_provider.connection - - create_t1 connection_provider = - t1 = upload connection_provider.connection "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]]) - t1 + connection self = self.data.at 0 + t1 self = self.data.at 1 + t2 self = self.data.at 2 + t4 self = self.data.at 3 + big_table self = self.data.at 4 + big_size self = self.data.at 5 - create_t2 connection_provider = - t2 = upload connection_provider.connection "T2" (Table.new [["d", [100, 200]]]) - t2 - - create_t4 connection_provider = - t4 = upload connection_provider.connection "T4" <| - Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]]] - t4 + setup create_connection_fn = Basic_Data.Value <| + IO.println <| " Common_Spec_New.Basic_Data.setup" + big_size = 1000 + connection = create_connection_fn Nothing + t1 = upload connection "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]]) + t2 = upload connection "T2" (Table.new [["d", [100, 200]]]) + ## The effective name may get a deduplication prefix/suffix so we + need to use `t4.name` instead of the literal string. Still it + will contain the weird characters we wanted. + + Also, the table name cannot be too long as Postgres truncates at + 63 chars (and we append 37 chars of uniqueness suffix) and the + test logic will break then. + t4 = upload connection 'aSELECT "A",\'B\' FROM t;--' (Table.new [["X", ["a", "B"]], ["Y", [2, 5]]]) + big = Table.new [["a", Vector.new big_size ix->ix], ["b", Vector.new big_size ix-> ix * 3.1415926], ["c", Vector.new big_size ix-> ix.to_text]] + big_table = upload connection "Big" big + [connection, t1, t2, t4, big_table, big_size] - create_big_table connection_provider size = - big = Table.new [["a", Vector.new size ix->ix], ["b", Vector.new size ix-> ix * 3.1415926], ["c", Vector.new size ix-> ix.to_text]] - big_table = upload connection_provider.connection "Big" big - big_table - teardown self = - IO.println <| " Common_Spec_New.Basic_Data.teardown" - drop_table self.connection self.t1.name - drop_table self.connection self.t2.name - drop_table self.connection self.t4.name - drop_table self.connection self.big_table.name + data = self + IO.println <| " Common_Spec_New.Basic_Data.teardown" + drop_table data.connection data.t1.name + drop_table data.connection data.t2.name + drop_table data.connection data.t4.name + drop_table data.connection data.big_table.name type Sorting_Data - Impl ~connection ~df ints reals bools texts ~t8 - - create connection_provider = + Value ~data + + connection self = self.data.at 0 + df self = self.data.at 1 + ints self = self.data.at 2 + reals self = self.data.at 3 + bools self = self.data.at 4 + texts self = self.data.at 5 + t8 self = self.data.at 6 + + setup create_connection_fn = Sorting_Data.Value <| + IO.println <| " Common_Spec_New.Sorting_Data.setup" + connection = create_connection_fn Nothing ints = [1, 2, 3, 4, 5] reals = [1.3, 4.6, 3.2, 5.2, 1.6] bools = [False, False, True, True, False] texts = ["foo", "foo", "bar", "baz", "spam"] - Sorting_Data.Impl connection_provider.connection (Sorting_Data.create_df connection_provider) ints reals bools texts (Sorting_Data.create_t8 ints reals bools texts) - - create_df connection_provider = - df = upload connection_provider.connection "clothes" <| + df = upload connection "clothes" <| Table.new [["id", [1,2,3,4,5,6]], ["name", ["shoes","trousers","dress","skirt","blouse","t-shirt"]], ["quantity", [20,10,20,10,30,30]], ["rating", [3.0,Nothing,7.3,3.0,2.2,Nothing]], ["price", [37.2,42.1,64.1,87.4,13.5,64.2]]] - df - - create_t8 connection_provider ints reals bools texts = - t8 = upload connection_provider.connection "T8" <| + t8 = upload connection "T8" <| Table.new [["ord", [0,3,2,4,1]], ["ints", ints], ["reals", reals], ["bools", bools], ["texts", texts]] - t8 - + [connection, df, ints, reals, bools, texts, t8] + teardown self = - IO.println <| " Common_Spec_New.Sorting_Data.teardown" + IO.println <| " Common_Spec_New.Sorting_Data.teardown" drop_table self.connection self.df.name drop_table self.connection self.t8.name + type Aggregation_Data - Impl ~connection ~t9 + Value ~data - create connection_provider = - Aggregation_Data.Impl connection_provider.connection (Aggregation_Data.create_t9 connection_provider) + connection self = self.data.first + t9 self = self.data.second - create_t9 connection_provider = + setup create_connection_fn = Aggregation_Data.Value <| + IO.println <| " Common_Spec_New.Aggregation_Data.setup" + connection = create_connection_fn Nothing builders = [Vector.new_builder,Vector.new_builder,Vector.new_builder] insert v = builders.zip v .append @@ -121,47 +129,51 @@ type Aggregation_Data insert ["zzzz", 1, 0] insert ["zzzz", 0, 0] insert ["zzzz", Nothing, Nothing] - t9 = upload connection_provider.connection "T9" <| + t9 = upload connection "T9" <| Table.new [["name", builders.at 0 . to_vector], ["price", builders.at 1 . to_vector], ["quantity", builders.at 2 . to_vector]] - t9 + [connection, t9] teardown self = - IO.println <| " Common_Spec_New.Aggregation_Data.teardown" + IO.println <| " Common_Spec_New.Aggregation_Data.teardown" drop_table self.connection self.t9.name + type Missing_Values_Data - Impl ~connection ~t4 + Value ~data - create connection_provider - Missing_Values_Data.Impl (Missing_Values_Data.create_connection connection_provider) (Missing_Values_Data.create_t4 connection_provider) + connection self = self.data.first + t4 self = self.data.second - create_connection connection_provider = - connection_provider.connection - - create_t4 connection_provider = - t4 = upload connection_provider.connection "T4" <| + setup create_connection_fn = Missing_Values_Data.Value <| + IO.println <| " Common_Spec_New.Missing_Values_Data.setup" + connection = create_connection_fn Nothing + t4 = upload connection "T4" <| Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]]] - t4 + [connection, t4] teardown self = - IO.println <| " Common_Spec_New.Missing_Values_Data.teardown" + IO.println <| " Common_Spec_New.Missing_Values_Data.teardown" drop_table self.connection self.t4.name -add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (connection_provider : Any) = - Default_Ordering_Spec_New.add_default_ordering_specs suite_builder prefix connection_provider +## Docs + + Arguments: + - create_connection_fn: A function that creates an appropriate Connection to the database backend. +add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : (Nothing -> Any)) = + + Default_Ordering_Spec_New.add_default_ordering_specs suite_builder prefix create_connection_fn # TODO: - # Names_Length_Limits_Spec_New.add_specs suite_builder prefix connection_provider + # Names_Length_Limits_Spec_New.add_specs suite_builder prefix create_connection_fn suite_builder.group (prefix + "Basic Table Access") group_builder-> - data = Basic_Data.create connection_provider + data = Basic_Data.setup create_connection_fn group_builder.teardown <| data.teardown - connection_provider.teardown group_builder.specify "should allow to materialize tables and columns into local memory" <| df = data.t1.read @@ -181,11 +193,10 @@ add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (connection_pro suite_builder.group (prefix + "Connection.query") group_builder-> - data = Basic_Data.create connection_provider + data = Basic_Data.setup create_connection_fn group_builder.teardown <| data.teardown - connection_provider.teardown group_builder.specify "should allow to access a Table by name" <| name = data.t1.name @@ -316,11 +327,10 @@ add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (connection_pro suite_builder.group (prefix + "Masking Tables") group_builder-> - data = Basic_Data.create connection_provider + data = Basic_Data.setup create_connection_fn group_builder.teardown <| data.teardown - connection_provider.teardown group_builder.specify "should allow to select rows from a table or column based on an expression" <| t2 = data.t1.filter (data.t1.at "a" == 1) @@ -333,11 +343,10 @@ add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (connection_pro t2.at "c" . to_vector . should_equal [3] suite_builder.group (prefix + "Missing Values") group_builder-> - data = Basic_Data.create connection_provider + data = Missing_Values_Data.setup create_connection_fn group_builder.teardown <| data.teardown - connection_provider.teardown group_builder.specify "fill_nothing should replace nulls" <| data.t4.at 'a' . fill_nothing 10 . to_vector . should_equal [0, 1, 10, 42, 10] @@ -353,11 +362,10 @@ add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (connection_pro suite_builder.group (prefix + "Sorting") group_builder-> - data = Sorting_Data.create connection_provider + data = Sorting_Data.setup create_connection_fn group_builder.teardown <| data.teardown - connection_provider.teardown group_builder.specify "should allow sorting by a single column name" <| r_1 = data.df.order_by ([Sort_Column.Name 'quantity']) @@ -404,11 +412,10 @@ add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (connection_pro suite_builder.group prefix+"Aggregation" group_builder-> - data = Aggregation_Data.create connection_provider + data = Aggregation_Data.setup create_connection_fn group_builder.teardown <| data.teardown - connection_provider.teardown ## A helper which makes sure that the groups in a materialized (InMemory) table are ordered according to a specified column or list @@ -450,11 +457,10 @@ add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (connection_pro t2.at "avg price" . to_vector . should_equal [(110.2 / 11)] suite_builder.group prefix+"Table.filter" group_builder-> - data = Basic_Data.create connection_provider + data = Basic_Data.setup create_connection_fn group_builder.teardown <| data.teardown - connection_provider.teardown group_builder.specify "report error when trying to filter by a custom predicate" <| data.t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation diff --git a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso index aa9d71f7d181..f5c972caed61 100644 --- a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso +++ b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso @@ -15,17 +15,18 @@ import project.Util import project.Database.Helpers.Name_Generator type Data - Impl ~db_table_without_key ~db_table_with_key + Value ~data - create connection_provider = - src_table = Table.new [["X", [1, 2, 3]], ["Y", [30, 20, 10]]] - Data.Impl (Data.create_db_table_without_key src_table connection_provider) (Data.create_db_table_with_key src_table connection_provider) - - create_db_table_without_key src_table connection_provider = - src_table.select_into_database_table connection_provider.connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=Nothing + db_table_without_key self = self.data.first + db_table_with_key self = self.data.second - create_db_table_with_key src_table connection_provider = - src_table.select_into_database_table connection_provider.connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=["X"] + setup create_connection_func = Data.Value <| + IO.println <| " Default_Ordering_Spec_New.setup" + connection = create_connection_func Nothing + src_table = Table.new [["X", [1, 2, 3]], ["Y", [30, 20, 10]]] + db_table_without_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=Nothing + db_table_with_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=["X"] + [db_table_without_key, db_table_with_key] ## Adds test specifications for default ordering to the given `suite_builder`. Adds it as @@ -33,27 +34,12 @@ type Data Arguments: - suite_builder: A Suite_Builder in which a new group will be created - - connection_provider: An atom with `connection` method that returns a database connection. - Preferably, this method should be lazy. It must contain `teardown` method as well. -add_default_ordering_specs (suite_builder : Suite_Builder) (prefix : Text) (connection_provider : Any) = - expected_methods_in_provider = ["connection", "teardown"] - provider_type = Meta.type_of connection_provider - is_correct_provider = expected_methods_in_provider.all expected_method-> - provider_methods = Meta.get_type_methods provider_type - provider_methods.contains expected_method - if is_correct_provider.not then - # Print the message to stderr to make sure that it is not abbreviated in the Panic message - msg = "connection_provider must contain all these methods: " + expected_methods_in_provider.to_text + ". Actual type is " + provider_type.to_text - IO.print_err msg - Panic.throw <| Illegal_Argument.Error "connection_provider" - + - create_connection_fn: A function that creates an appropriate Connection to the database backend. +add_default_ordering_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : Any) = group_name = prefix + "Table.default_ordering" suite_builder.group group_name group_builder-> - data = Data.create connection_provider - - group_builder.teardown <| - connection_provider.teardown + data = Data.setup create_connection_fn group_builder.specify "will return Nothing if no primary key is defined" <| data.db_table_without_key.default_ordering . should_equal Nothing diff --git a/test/Table_Tests/src/Database/SQLite_Spec_New.enso b/test/Table_Tests/src/Database/SQLite_Spec_New.enso index 44e91a381ba5..49cc9d184072 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec_New.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec_New.enso @@ -73,55 +73,50 @@ sqlite_spec suite_builder prefix connection_provider = Common_Spec_New.add_common_specs suite_builder prefix connection_provider -type Inmem_Connection - Impl ~connection +backing_file = + transient_dir = enso_project.data / "transient" + assert transient_dir.exists ("There should be .gitignore file in the " + transient_dir.path + " directory") + transient_dir / "sqlite_test.db" - create = - Inmem_Connection.Impl Inmem_Connection.create_connection - create_connection = - IO.println <| " SQLite_Spec_New.Inmem_Connection.create_connection" - Database.connect (SQLite In_Memory) +create_inmem_connection = + Database.connect (SQLite In_Memory) - teardown self = Nothing + +create_file_connection file = + connection = Database.connect (SQLite file) + connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' + connection type File_Connection - Impl ~file - - create = - File_Connection.Impl File_Connection.create_file - - create_file = - IO.println <| " SQLite_Spec_New.File_Connection.create_file" - transient_dir = enso_project.data / "transient" - assert transient_dir.exists ("There should be .gitignore file in the " + transient_dir.path + " directory") - file = transient_dir / "sqlite_test.db" - if file.exists then Panic.throw "Assertion error: File should not exist" - connection = Database.connect (SQLite file) - connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' - connection.close - assert file.exists - file - - connection self = - Database.connect (SQLite self.file) + Value ~file + + setup = File_Connection.Value <| + IO.println <| " SQLite_Spec_New.File_Connection.setup" + tmp_file = backing_file + con = create_file_connection backing_file + con.close + assert tmp_file.exists + tmp_file + teardown self = - IO.println <| " SQLite_Spec_New.File_Connection.teardown" + IO.println <| " SQLite_Spec_New.File_Connection.teardown" assert self.file.exists self.file.delete + suite = Test.build suite_builder-> in_file_prefix = "[SQLite File] " - sqlite_spec suite_builder in_file_prefix File_Connection.create + sqlite_spec suite_builder in_file_prefix (_ -> create_file_connection backing_file) in_memory_prefix = "[SQLite In-Memory] " - sqlite_spec suite_builder in_memory_prefix Inmem_Connection.create + sqlite_spec suite_builder in_memory_prefix (_ -> create_inmem_connection) suite_builder.group "SQLite_Format should allow connecting to SQLite files" group_builder-> - data = File_Connection.create + data = File_Connection.setup group_builder.teardown <| data.teardown @@ -129,7 +124,7 @@ suite = group_builder.specify "should recognise a SQLite database file" <| Auto_Detect.get_reading_format data.file . should_be_a SQLite_Format - group_builder.specify 'should not duplicate warnings' <| + group_builder.specify "should not duplicate warnings" <| c = Database.connect (SQLite In_Memory) t0 = Table.new [["X", ["a", "bc", "def"]]] t1 = t0.select_into_database_table c "Tabela" @@ -140,6 +135,6 @@ main = IO.println <| "==============" suite.print_all IO.println <| "==============" - group_filter = Regex.compile "\[SQLite File\] Connection.query" - spec_filter = "should allow to access a Table by name" + group_filter = Nothing + spec_filter = Nothing suite.run_with_filter group_filter spec_filter From 5b51c656f6448cc4608835ef3cf6c1f0c8f83262 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 16 Jan 2024 11:48:20 +0100 Subject: [PATCH 28/93] Replace Default_Ordering_Spec --- .../Common/Default_Ordering_Spec.enso | 54 +++++++++----- .../Common/Default_Ordering_Spec_New.enso | 74 ------------------- 2 files changed, 37 insertions(+), 91 deletions(-) delete mode 100644 test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso diff --git a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso index a307b6976bee..f5c972caed61 100644 --- a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso @@ -1,5 +1,6 @@ from Standard.Base import all import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +from Standard.Base.Runtime import assert from Standard.Table import Table, Sort_Column, Aggregate_Column from Standard.Table.Errors import all @@ -7,48 +8,67 @@ from Standard.Table.Errors import all from Standard.Database import all from Standard.Database.Errors import all -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all +import Standard.Test_New.Suite.Suite_Builder import project.Util import project.Database.Helpers.Name_Generator -spec prefix connection = - Test.group prefix+"Table.default_ordering" <| +type Data + Value ~data + + db_table_without_key self = self.data.first + db_table_with_key self = self.data.second + + setup create_connection_func = Data.Value <| + IO.println <| " Default_Ordering_Spec_New.setup" + connection = create_connection_func Nothing src_table = Table.new [["X", [1, 2, 3]], ["Y", [30, 20, 10]]] db_table_without_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=Nothing db_table_with_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=["X"] + [db_table_without_key, db_table_with_key] + + +## Adds test specifications for default ordering to the given `suite_builder`. Adds it as + group with the given `prefix` as its name prefix. + + Arguments: + - suite_builder: A Suite_Builder in which a new group will be created + - create_connection_fn: A function that creates an appropriate Connection to the database backend. +add_default_ordering_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : Any) = + group_name = prefix + "Table.default_ordering" + + suite_builder.group group_name group_builder-> + data = Data.setup create_connection_fn - Test.specify "will return Nothing if no primary key is defined" <| - db_table_without_key.default_ordering . should_equal Nothing + group_builder.specify "will return Nothing if no primary key is defined" <| + data.db_table_without_key.default_ordering . should_equal Nothing - Test.specify "will return the key for a table with a primary key" <| - v1 = db_table_with_key.default_ordering + group_builder.specify "will return the key for a table with a primary key" <| + v1 = data.db_table_with_key.default_ordering v1.length . should_equal 1 v1.first.expression.name . should_equal "X" v1.first.direction . should_equal Sort_Direction.Ascending - t2 = db_table_with_key.set "10 - [X]" "X" + t2 = data.db_table_with_key.set "10 - [X]" "X" v2 = t2.default_ordering v2.length . should_equal 1 v2.first.expression.name . should_equal "X" - Test.specify "will return Nothing for composite tables (join, aggregate)" - db_table_with_key.join db_table_with_key . default_ordering . should_equal Nothing - db_table_with_key.aggregate [Aggregate_Column.Group_By "X"] . default_ordering . should_equal Nothing + group_builder.specify "will return Nothing for composite tables (join, aggregate)" + data.db_table_with_key.join data.db_table_with_key . default_ordering . should_equal Nothing + data.db_table_with_key.aggregate [Aggregate_Column.Group_By "X"] . default_ordering . should_equal Nothing - Test.specify "will return the ordering determined by order_by" <| - v1 = db_table_with_key.order_by ["Y", Sort_Column.Name "X" Sort_Direction.Descending] . default_ordering + group_builder.specify "will return the ordering determined by order_by" <| + v1 = data.db_table_with_key.order_by ["Y", Sort_Column.Name "X" Sort_Direction.Descending] . default_ordering v1.length . should_equal 2 v1.first.expression.name . should_equal "Y" v1.first.direction . should_equal Sort_Direction.Ascending v1.second.expression.name . should_equal "X" v1.second.direction . should_equal Sort_Direction.Descending - v2 = db_table_without_key.order_by ["Y"] . default_ordering + v2 = data.db_table_without_key.order_by ["Y"] . default_ordering v2.length . should_equal 1 v2.first.expression.name . should_equal "Y" v2.first.direction . should_equal Sort_Direction.Ascending -main = Test_Suite.run_main <| - spec "[SQLite] " (Database.connect (SQLite In_Memory)) diff --git a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso deleted file mode 100644 index f5c972caed61..000000000000 --- a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec_New.enso +++ /dev/null @@ -1,74 +0,0 @@ -from Standard.Base import all -import Standard.Base.Errors.Illegal_Argument.Illegal_Argument -from Standard.Base.Runtime import assert - -from Standard.Table import Table, Sort_Column, Aggregate_Column -from Standard.Table.Errors import all - -from Standard.Database import all -from Standard.Database.Errors import all - -from Standard.Test_New import all -import Standard.Test_New.Suite.Suite_Builder - -import project.Util -import project.Database.Helpers.Name_Generator - -type Data - Value ~data - - db_table_without_key self = self.data.first - db_table_with_key self = self.data.second - - setup create_connection_func = Data.Value <| - IO.println <| " Default_Ordering_Spec_New.setup" - connection = create_connection_func Nothing - src_table = Table.new [["X", [1, 2, 3]], ["Y", [30, 20, 10]]] - db_table_without_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=Nothing - db_table_with_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=["X"] - [db_table_without_key, db_table_with_key] - - -## Adds test specifications for default ordering to the given `suite_builder`. Adds it as - group with the given `prefix` as its name prefix. - - Arguments: - - suite_builder: A Suite_Builder in which a new group will be created - - create_connection_fn: A function that creates an appropriate Connection to the database backend. -add_default_ordering_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : Any) = - group_name = prefix + "Table.default_ordering" - - suite_builder.group group_name group_builder-> - data = Data.setup create_connection_fn - - group_builder.specify "will return Nothing if no primary key is defined" <| - data.db_table_without_key.default_ordering . should_equal Nothing - - group_builder.specify "will return the key for a table with a primary key" <| - v1 = data.db_table_with_key.default_ordering - v1.length . should_equal 1 - v1.first.expression.name . should_equal "X" - v1.first.direction . should_equal Sort_Direction.Ascending - - t2 = data.db_table_with_key.set "10 - [X]" "X" - v2 = t2.default_ordering - v2.length . should_equal 1 - v2.first.expression.name . should_equal "X" - - group_builder.specify "will return Nothing for composite tables (join, aggregate)" - data.db_table_with_key.join data.db_table_with_key . default_ordering . should_equal Nothing - data.db_table_with_key.aggregate [Aggregate_Column.Group_By "X"] . default_ordering . should_equal Nothing - - group_builder.specify "will return the ordering determined by order_by" <| - v1 = data.db_table_with_key.order_by ["Y", Sort_Column.Name "X" Sort_Direction.Descending] . default_ordering - v1.length . should_equal 2 - v1.first.expression.name . should_equal "Y" - v1.first.direction . should_equal Sort_Direction.Ascending - v1.second.expression.name . should_equal "X" - v1.second.direction . should_equal Sort_Direction.Descending - - v2 = data.db_table_without_key.order_by ["Y"] . default_ordering - v2.length . should_equal 1 - v2.first.expression.name . should_equal "Y" - v2.first.direction . should_equal Sort_Direction.Ascending - From eeaa5d267e8bdcb47599ba5ecc4839ef5ca28b83 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 16 Jan 2024 11:48:55 +0100 Subject: [PATCH 29/93] Replace Common_Spec with the builder API --- .../src/Database/Common/Common_Spec.enso | 437 ++++++++++------ .../src/Database/Common/Common_Spec_New.enso | 467 ------------------ 2 files changed, 284 insertions(+), 620 deletions(-) delete mode 100644 test/Table_Tests/src/Database/Common/Common_Spec_New.enso diff --git a/test/Table_Tests/src/Database/Common/Common_Spec.enso b/test/Table_Tests/src/Database/Common/Common_Spec.enso index 4d8a23f16653..03cdfa4c3c98 100644 --- a/test/Table_Tests/src/Database/Common/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Common_Spec.enso @@ -8,122 +8,262 @@ from Standard.Table.Errors import all from Standard.Database import all from Standard.Database.Errors import all -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all +import Standard.Test_New.Problems +import Standard.Test_New.Suite.Suite_Builder import project.Database.Common.Default_Ordering_Spec import project.Database.Common.Names_Length_Limits_Spec import project.Util import project.Database.Helpers.Name_Generator +import project.Database.Common.Default_Ordering_Spec_New + + +upload connection prefix data temporary=True = + name = Name_Generator.random_name prefix + table = data.select_into_database_table connection name temporary=temporary primary_key=Nothing + IO.println <| " upload: Created table with name " + name + table + + +drop_table connection name = + IO.println <| " drop_table: Dropping table with name " + name + Panic.catch Any (connection.drop_table name) caught_panic-> + IO.println <| "Failed to drop table: " + name + " because of: " + caught_panic.payload.to_display_text + + +type Basic_Data + Value ~data + + connection self = self.data.at 0 + t1 self = self.data.at 1 + t2 self = self.data.at 2 + t4 self = self.data.at 3 + big_table self = self.data.at 4 + big_size self = self.data.at 5 + + setup create_connection_fn = Basic_Data.Value <| + IO.println <| " Common_Spec_New.Basic_Data.setup" + big_size = 1000 + connection = create_connection_fn Nothing + t1 = upload connection "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]]) + t2 = upload connection "T2" (Table.new [["d", [100, 200]]]) + ## The effective name may get a deduplication prefix/suffix so we + need to use `t4.name` instead of the literal string. Still it + will contain the weird characters we wanted. + + Also, the table name cannot be too long as Postgres truncates at + 63 chars (and we append 37 chars of uniqueness suffix) and the + test logic will break then. + t4 = upload connection 'aSELECT "A",\'B\' FROM t;--' (Table.new [["X", ["a", "B"]], ["Y", [2, 5]]]) + big = Table.new [["a", Vector.new big_size ix->ix], ["b", Vector.new big_size ix-> ix * 3.1415926], ["c", Vector.new big_size ix-> ix.to_text]] + big_table = upload connection "Big" big + [connection, t1, t2, t4, big_table, big_size] + + teardown self = + data = self + IO.println <| " Common_Spec_New.Basic_Data.teardown" + drop_table data.connection data.t1.name + drop_table data.connection data.t2.name + drop_table data.connection data.t4.name + drop_table data.connection data.big_table.name + + +type Sorting_Data + Value ~data + + connection self = self.data.at 0 + df self = self.data.at 1 + ints self = self.data.at 2 + reals self = self.data.at 3 + bools self = self.data.at 4 + texts self = self.data.at 5 + t8 self = self.data.at 6 + + setup create_connection_fn = Sorting_Data.Value <| + IO.println <| " Common_Spec_New.Sorting_Data.setup" + connection = create_connection_fn Nothing + ints = [1, 2, 3, 4, 5] + reals = [1.3, 4.6, 3.2, 5.2, 1.6] + bools = [False, False, True, True, False] + texts = ["foo", "foo", "bar", "baz", "spam"] + df = upload connection "clothes" <| + Table.new [["id", [1,2,3,4,5,6]], ["name", ["shoes","trousers","dress","skirt","blouse","t-shirt"]], ["quantity", [20,10,20,10,30,30]], ["rating", [3.0,Nothing,7.3,3.0,2.2,Nothing]], ["price", [37.2,42.1,64.1,87.4,13.5,64.2]]] + t8 = upload connection "T8" <| + Table.new [["ord", [0,3,2,4,1]], ["ints", ints], ["reals", reals], ["bools", bools], ["texts", texts]] + [connection, df, ints, reals, bools, texts, t8] + + teardown self = + IO.println <| " Common_Spec_New.Sorting_Data.teardown" + drop_table self.connection self.df.name + drop_table self.connection self.t8.name + + + +type Aggregation_Data + Value ~data + + connection self = self.data.first + t9 self = self.data.second + + setup create_connection_fn = Aggregation_Data.Value <| + IO.println <| " Common_Spec_New.Aggregation_Data.setup" + connection = create_connection_fn Nothing + builders = [Vector.new_builder,Vector.new_builder,Vector.new_builder] + insert v = + builders.zip v .append + insert ["foo", 0.4, 50] + insert ["foo", 0.2, 10] + insert ["foo", 0.4, 30] + insert ["bar", 3.5, 20] + insert ["foo", Nothing, 20] + insert ["baz", 6.7, 40] + insert ["foo", Nothing, 10] + insert ["bar", 97, 60] + insert ["quux", Nothing, 70] + insert ["zzzz", Nothing, Nothing] + insert ["zzzz", 1, 1] + insert ["zzzz", 0, 0] + insert ["zzzz", 0, 1] + insert ["zzzz", 1, 0] + insert ["zzzz", 0, 0] + insert ["zzzz", Nothing, Nothing] + t9 = upload connection "T9" <| + Table.new [["name", builders.at 0 . to_vector], ["price", builders.at 1 . to_vector], ["quantity", builders.at 2 . to_vector]] + [connection, t9] + + teardown self = + IO.println <| " Common_Spec_New.Aggregation_Data.teardown" + drop_table self.connection self.t9.name + + + +type Missing_Values_Data + Value ~data + + connection self = self.data.first + t4 self = self.data.second + + setup create_connection_fn = Missing_Values_Data.Value <| + IO.println <| " Common_Spec_New.Missing_Values_Data.setup" + connection = create_connection_fn Nothing + t4 = upload connection "T4" <| + Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]]] + [connection, t4] + + teardown self = + IO.println <| " Common_Spec_New.Missing_Values_Data.teardown" + drop_table self.connection self.t4.name + -spec prefix connection = - tables_to_clean = Vector.new_builder - upload prefix data temporary=True = - name = Name_Generator.random_name prefix - table = data.select_into_database_table connection name temporary=temporary primary_key=Nothing - tables_to_clean.append table.name - table - - clean_tables = - tables_to_clean.to_vector.each name-> - Panic.catch Any (connection.drop_table name) caught_panic-> - IO.println "Failed to drop table: " + name + " because of: " + caught_panic.payload.to_display_text - Panic.with_finalizer clean_tables <| - run_tests prefix connection upload - -run_tests prefix connection upload = - t1 = upload "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]]) - Test.group prefix+"Basic Table Access" <| - Test.specify "should allow to materialize tables and columns into local memory" <| - df = t1.read - a = t1.at 'a' . read +## Docs + + Arguments: + - create_connection_fn: A function that creates an appropriate Connection to the database backend. +add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : (Nothing -> Any)) = + + Default_Ordering_Spec.add_default_ordering_specs suite_builder prefix create_connection_fn + # TODO: + # Names_Length_Limits_Spec.add_specs suite_builder prefix create_connection_fn + + suite_builder.group (prefix + "Basic Table Access") group_builder-> + data = Basic_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "should allow to materialize tables and columns into local memory" <| + df = data.t1.read + a = data.t1.at 'a' . read df.at 'a' . to_vector . should_equal [1, 4] a.to_vector . should_equal [1, 4] - Test.specify "should allow to materialize columns directly into a Vector" <| - v = t1.at 'a' . to_vector + + group_builder.specify "should allow to materialize columns directly into a Vector" <| + v = data.t1.at 'a' . to_vector v . should_equal [1, 4] - Test.specify "should handle bigger result sets" <| - n = 1000 - original = Table.new [["a", Vector.new n ix->ix], ["b", Vector.new n ix-> ix * 3.1415926], ["c", Vector.new n ix-> ix.to_text]] - table = upload "Big" original - table.read.row_count . should_equal n - Test.specify "should not allow to set a column coming from another table" <| - t2 = upload "T2" (Table.new [["d", [100, 200]]]) - t1.set (t2.at "d") . should_fail_with Integrity_Error - - Test.group prefix+"Connection.query" <| - name = t1.name - Test.specify "should allow to access a Table by name" <| - t2 = connection.query (SQL_Query.Table_Name name) - t2.read . should_equal t1.read - - Test.specify "should allow to access a Table by an SQL query" <| - t2 = connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3')) + + group_builder.specify "should handle bigger result sets" <| + data.big_table.read.row_count . should_equal data.big_size + + group_builder.specify "should not allow to set a column coming from another table" <| + data.t1.set (data.t2.at "d") . should_fail_with Integrity_Error + + + suite_builder.group (prefix + "Connection.query") group_builder-> + data = Basic_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "should allow to access a Table by name" <| + name = data.t1.name + IO.println <| " Querying table with name " + name + tmp = data.connection.query (SQL_Query.Table_Name name) + tmp.read . should_equal data.t1.read + + group_builder.specify "should allow to access a Table by an SQL query" <| + name = data.t1.name + t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3')) m2 = t2.read m2.column_names . should_equal ["a", "b"] m2.at "a" . to_vector . should_equal [4] m2.at "b" . to_vector . should_equal [5] m2.at "c" . should_fail_with No_Such_Column - Test.specify "should allow to access a Table by an SQL query" <| - t2 = connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3')) + group_builder.specify "should allow to access a Table by an SQL query" <| + name = data.t1.name + t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3')) m2 = t2.read m2.column_names . should_equal ["a", "b"] m2.at "a" . to_vector . should_equal [4] m2.at "b" . to_vector . should_equal [5] m2.at "c" . should_fail_with No_Such_Column - t3 = connection.query (SQL_Query.Raw_SQL ('SELECT 1+2')) + t3 = data.connection.query (SQL_Query.Raw_SQL ('SELECT 1+2')) m3 = t3.read m3.at 0 . to_vector . should_equal [3] - Test.specify "should use labels for column names" <| - t2 = connection.query (SQL_Query.Raw_SQL ('SELECT a AS c, b FROM "' + name + '" WHERE a >= 3')) + group_builder.specify "should use labels for column names" <| + name = data.t1.name + t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a AS c, b FROM "' + name + '" WHERE a >= 3')) m2 = t2.read m2.column_names . should_equal ["c", "b"] m2.at "c" . to_vector . should_equal [4] m2.at "b" . to_vector . should_equal [5] m2.at "a" . should_fail_with No_Such_Column - Test.specify "should allow a shorthand trying to deduce if the query is a table name or an SQL query" <| - t2 = connection.query name - t2.read . should_equal t1.read + group_builder.specify "should allow a shorthand trying to deduce if the query is a table name or an SQL query" <| + name = data.t1.name + t2 = data.connection.query name + t2.read . should_equal data.t1.read - t3 = connection.query ('SELECT a, b FROM "' + name + '" WHERE a >= 3') + t3 = data.connection.query ('SELECT a, b FROM "' + name + '" WHERE a >= 3') m3 = t3.read m3.column_names . should_equal ["a", "b"] m3.at "a" . to_vector . should_equal [4] - ## The effective name may get a deduplication prefix/suffix so we - need to use `t4.name` instead of the literal string. Still it - will contain the weird characters we wanted. - - Also, the table name cannot be too long as Postgres truncates at - 63 chars (and we append 37 chars of uniqueness suffix) and the - test logic will break then. - t4 = upload 'aSELECT "A",\'B\' FROM t;--' (Table.new [["X", ["a", "B"]], ["Y", [2, 5]]]) - t5 = connection.query t4.name + t5 = data.connection.query data.t4.name m5 = t5.read m5.column_names . should_equal ["X", "Y"] m5.at "X" . to_vector . should_equal ["a", "B"] m5.at "Y" . to_vector . should_equal [2, 5] - Test.specify "should report an error depending on input SQL_Query type" <| - r2 = connection.query (SQL_Query.Table_Name "NONEXISTENT-TABLE") + group_builder.specify "should report an error depending on input SQL_Query type" <| + r2 = data.connection.query (SQL_Query.Table_Name "NONEXISTENT-TABLE") r2.should_fail_with Table_Not_Found r2.catch.name . should_equal "NONEXISTENT-TABLE" r2.catch.to_display_text . should_equal "Table NONEXISTENT-TABLE was not found in the database." - r3 = connection.query (SQL_Query.Raw_SQL "MALFORMED-QUERY") + r3 = data.connection.query (SQL_Query.Raw_SQL "MALFORMED-QUERY") r3.should_fail_with SQL_Error - Test.specify "should not allow interpolations in raw user-built queries" <| - r = connection.query (SQL_Query.Raw_SQL "SELECT 1 + ?") + group_builder.specify "should not allow interpolations in raw user-built queries" <| + r = data.connection.query (SQL_Query.Raw_SQL "SELECT 1 + ?") r.should_fail_with Illegal_Argument - Test.specify "should make a best-effort attempt at returning a reasonable error for the short-hand" <| - r2 = connection.query "NONEXISTENT-TABLE" + group_builder.specify "should make a best-effort attempt at returning a reasonable error for the short-hand" <| + r2 = data.connection.query "NONEXISTENT-TABLE" r2.should_fail_with Table_Not_Found r2.catch.name . should_equal "NONEXISTENT-TABLE" r2.catch.treated_as_query . should_be_true @@ -132,23 +272,23 @@ run_tests prefix connection upload = error_text.starts_with "The name NONEXISTENT-TABLE was treated as a query, but the query failed" . should_be_true error_text.ends_with "wrap it in `SQL_Query.Table_Name`." . should_be_true - r3 = connection.query "SELECT * FROM ........" + r3 = data.connection.query "SELECT * FROM ........" r3.should_fail_with SQL_Error - Test.specify "will fail if the table is modified and a column gets removed" <| + group_builder.specify "will fail if the table is modified and a column gets removed" <| name = Name_Generator.random_name "removing-column" Problems.assume_no_problems <| - (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table connection name temporary=True + (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True - t1 = connection.query name + t1 = data.connection.query name m1 = t1.read Problems.assume_no_problems m1 m1.at "a" . to_vector . should_equal [1, 2, 3] m1.at "b" . to_vector . should_equal [4, 5, 6] - Problems.assume_no_problems <| connection.drop_table name + Problems.assume_no_problems <| data.connection.drop_table name Problems.assume_no_problems <| - (Table.new [["a", [100, 200]]]).select_into_database_table connection name temporary=True + (Table.new [["a", [100, 200]]]).select_into_database_table data.connection name temporary=True # Reading a column that was kept will work OK t1.at "a" . to_vector . should_equal [100, 200] @@ -157,20 +297,20 @@ run_tests prefix connection upload = m2 = t1.read m2.should_fail_with SQL_Error - Test.specify "will not fail if the table is modified and a column gets added" <| + group_builder.specify "will not fail if the table is modified and a column gets added" <| name = Name_Generator.random_name "adding-column" Problems.assume_no_problems <| - (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table connection name temporary=True + (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True - t1 = connection.query name + t1 = data.connection.query name m1 = t1.read Problems.assume_no_problems m1 m1.at "a" . to_vector . should_equal [1, 2, 3] m1.at "b" . to_vector . should_equal [4, 5, 6] - Problems.assume_no_problems <| connection.drop_table name + Problems.assume_no_problems <| data.connection.drop_table name Problems.assume_no_problems <| - (Table.new [["a", [100, 200]], ["b", [300, 400]], ["c", [500, 600]]]).select_into_database_table connection name temporary=True + (Table.new [["a", [100, 200]], ["b", [300, 400]], ["c", [500, 600]]]).select_into_database_table data.connection name temporary=True m2 = t1.read Problems.assume_no_problems m2 @@ -180,12 +320,18 @@ run_tests prefix connection upload = t1.at "c" . should_fail_with No_Such_Column - t2 = connection.query name + t2 = data.connection.query name t2.column_names . should_equal ["a", "b", "c"] - Test.group prefix+"Masking Tables" <| - Test.specify "should allow to select rows from a table or column based on an expression" <| - t2 = t1.filter (t1.at "a" == 1) + + suite_builder.group (prefix + "Masking Tables") group_builder-> + data = Basic_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "should allow to select rows from a table or column based on an expression" <| + t2 = data.t1.filter (data.t1.at "a" == 1) df = t2.read df.at "a" . to_vector . should_equal [1] df.at "b" . to_vector . should_equal [2] @@ -194,66 +340,67 @@ run_tests prefix connection upload = t2.at "b" . to_vector . should_equal [2] t2.at "c" . to_vector . should_equal [3] - Test.group prefix+"Missing Values" <| - t4 = upload "T4" <| - Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]]] - Test.specify "fill_nothing should replace nulls" <| - t4.at 'a' . fill_nothing 10 . to_vector . should_equal [0, 1, 10, 42, 10] - t4.at 'b' . fill_nothing False . to_vector . should_equal [True, False, True, False, False] - t4.at 'c' . fill_nothing "NA" . to_vector . should_equal ["", "foo", "bar", "NA", "NA"] - - Test.specify "should correctly be counted" <| - t4.row_count . should_equal 5 - col = t4.at 'a' + suite_builder.group (prefix + "Missing Values") group_builder-> + data = Missing_Values_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "fill_nothing should replace nulls" <| + data.t4.at 'a' . fill_nothing 10 . to_vector . should_equal [0, 1, 10, 42, 10] + data.t4.at 'b' . fill_nothing False . to_vector . should_equal [True, False, True, False, False] + data.t4.at 'c' . fill_nothing "NA" . to_vector . should_equal ["", "foo", "bar", "NA", "NA"] + + group_builder.specify "should correctly be counted" <| + data.t4.row_count . should_equal 5 + col = data.t4.at 'a' col.length . should_equal 5 col.count . should_equal 3 col.count_nothing . should_equal 2 - Test.group prefix+"Sorting" <| - df = upload "clothes" <| - Table.new [["id", [1,2,3,4,5,6]], ["name", ["shoes","trousers","dress","skirt","blouse","t-shirt"]], ["quantity", [20,10,20,10,30,30]], ["rating", [3.0,Nothing,7.3,3.0,2.2,Nothing]], ["price", [37.2,42.1,64.1,87.4,13.5,64.2]]] - Test.specify "should allow sorting by a single column name" <| - r_1 = df.order_by ([Sort_Column.Name 'quantity']) + suite_builder.group (prefix + "Sorting") group_builder-> + data = Sorting_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "should allow sorting by a single column name" <| + r_1 = data.df.order_by ([Sort_Column.Name 'quantity']) r_1.at 'id' . to_vector . should_equal [2,4,1,3,5,6] - r_3 = df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending]) + r_3 = data.df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending]) r_3.at 'id' . to_vector . should_equal [3,1,4,5,2,6] - Test.specify 'should allow sorting by multiple column names' <| - r_1 = df.order_by ([Sort_Column.Name 'quantity', Sort_Column.Name 'rating']) + group_builder.specify 'should allow sorting by multiple column names' <| + r_1 = data.df.order_by ([Sort_Column.Name 'quantity', Sort_Column.Name 'rating']) r_1.at 'id' . to_vector . should_equal [2,4,1,3,6,5] - r_2 = df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending, Sort_Column.Name 'quantity' Sort_Direction.Descending]) + r_2 = data.df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending, Sort_Column.Name 'quantity' Sort_Direction.Descending]) r_2.at 'id' . to_vector . should_equal [3,1,4,5,6,2] - Test.specify 'should allow sorting with specific by-column rules' <| - r_1 = df.order_by ([Sort_Column.Name "quantity", Sort_Column.Name "price" Sort_Direction.Descending]) + + group_builder.specify 'should allow sorting with specific by-column rules' <| + r_1 = data.df.order_by ([Sort_Column.Name "quantity", Sort_Column.Name "price" Sort_Direction.Descending]) r_1.at 'id' . to_vector . should_equal [4,2,3,1,6,5] - Test.specify 'should correctly reorder all kinds of columns and leave the original columns untouched' <| - ints = [1, 2, 3, 4, 5] - reals = [1.3, 4.6, 3.2, 5.2, 1.6] - bools = [False, False, True, True, False] - texts = ["foo", "foo", "bar", "baz", "spam"] - df = upload "T8" <| - Table.new [["ord", [0,3,2,4,1]], ["ints", ints], ["reals", reals], ["bools", bools], ["texts", texts]] - r = df.order_by ([Sort_Column.Name 'ord']) + group_builder.specify 'should correctly reorder all kinds of columns and leave the original columns untouched' <| + r = data.t8.order_by ([Sort_Column.Name 'ord']) r.at 'ints' . to_vector . should_equal [1, 5, 3, 2, 4] - df.at 'ints' . to_vector . should_equal ints + data.t8.at 'ints' . to_vector . should_equal data.ints r.at 'reals' . to_vector . should_equal [1.3, 1.6, 3.2, 4.6, 5.2] - df.at 'reals' . to_vector . should_equal reals + data.t8.at 'reals' . to_vector . should_equal data.reals r.at 'bools' . to_vector . should_equal [False, False, True, False, True] - df.at 'bools' . to_vector . should_equal bools + data.t8.at 'bools' . to_vector . should_equal data.bools r.at 'texts' . to_vector . should_equal ['foo', 'spam', 'bar', 'foo', 'baz'] - df.at 'texts' . to_vector . should_equal texts + data.t8.at 'texts' . to_vector . should_equal data.texts - Test.specify 'should sort columns with specified ordering and missing placement' <| - c = df.at 'rating' + group_builder.specify 'should sort columns with specified ordering and missing placement' <| + c = data.df.at 'rating' r_1 = c.sort r_1.to_vector.should_equal [Nothing, Nothing, 2.2, 3.0, 3.0, 7.3] @@ -261,28 +408,12 @@ run_tests prefix connection upload = r_2 = c.sort Sort_Direction.Descending r_2.to_vector.should_equal [7.3, 3.0, 3.0, 2.2, Nothing, Nothing] - Test.group prefix+"Aggregation" <| - builders = [Vector.new_builder,Vector.new_builder,Vector.new_builder] - insert v = - builders.zip v .append - insert ["foo", 0.4, 50] - insert ["foo", 0.2, 10] - insert ["foo", 0.4, 30] - insert ["bar", 3.5, 20] - insert ["foo", Nothing, 20] - insert ["baz", 6.7, 40] - insert ["foo", Nothing, 10] - insert ["bar", 97, 60] - insert ["quux", Nothing, 70] - insert ["zzzz", Nothing, Nothing] - insert ["zzzz", 1, 1] - insert ["zzzz", 0, 0] - insert ["zzzz", 0, 1] - insert ["zzzz", 1, 0] - insert ["zzzz", 0, 0] - insert ["zzzz", Nothing, Nothing] - t = upload "T9" <| - Table.new [["name", builders.at 0 . to_vector], ["price", builders.at 1 . to_vector], ["quantity", builders.at 2 . to_vector]] + + suite_builder.group prefix+"Aggregation" group_builder-> + data = Aggregation_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown ## A helper which makes sure that the groups in a materialized (InMemory) table are ordered according to a specified column or list @@ -290,45 +421,45 @@ run_tests prefix connection upload = determinize_by order_column table = table.order_by ([Sort_Column.Name order_column]) - Test.specify "should allow counting group sizes and elements" <| + group_builder.specify "should allow counting group sizes and elements" <| ## Names set to lower case to avoid issue with Redshift where columns are returned in lower case. aggregates = [Count "count", Count_Not_Nothing "price" "count not nothing price", Count_Nothing "price" "count nothing price"] - t1 = determinize_by "name" (t.aggregate ([Group_By "name"] + aggregates) . read) + t1 = determinize_by "name" (data.t9.aggregate ([Group_By "name"] + aggregates) . read) t1.at "name" . to_vector . should_equal ["bar", "baz", "foo", "quux", "zzzz"] t1.at "count" . to_vector . should_equal [2, 1, 5, 1, 7] t1.at "count not nothing price" . to_vector . should_equal [2, 1, 3, 0, 5] t1.at "count nothing price" . to_vector . should_equal [0, 0, 2, 1, 2] - t2 = t.aggregate aggregates . read + t2 = data.t9.aggregate aggregates . read t2.at "count" . to_vector . should_equal [16] t2.at "count not nothing price" . to_vector . should_equal [11] t2.at "count nothing price" . to_vector . should_equal [5] - Test.specify "should allow simple arithmetic aggregations" <| + group_builder.specify "should allow simple arithmetic aggregations" <| ## Names set to lower case to avoid issue with Redshift where columns are returned in lower case. aggregates = [Sum "price" "sum price", Sum "quantity" "sum quantity", Average "price" "avg price"] ## TODO can check the datatypes - t1 = determinize_by "name" (t.aggregate ([Group_By "name"] + aggregates) . read) + t1 = determinize_by "name" (data.t9.aggregate ([Group_By "name"] + aggregates) . read) t1.at "name" . to_vector . should_equal ["bar", "baz", "foo", "quux", "zzzz"] t1.at "sum price" . to_vector . should_equal [100.5, 6.7, 1, Nothing, 2] t1.at "sum quantity" . to_vector . should_equal [80, 40, 120, 70, 2] t1.at "avg price" . to_vector . should_equal [50.25, 6.7, (1/3), Nothing, (2/5)] - t2 = t.aggregate aggregates . read + t2 = data.t9.aggregate aggregates . read t2.at "sum price" . to_vector . should_equal [110.2] t2.at "sum quantity" . to_vector . should_equal [312] t2.at "avg price" . to_vector . should_equal [(110.2 / 11)] - Test.group prefix+"Table.filter" <| - Test.specify "report error when trying to filter by a custom predicate" <| - t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation + suite_builder.group prefix+"Table.filter" group_builder-> + data = Basic_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown - Default_Ordering_Spec.spec prefix connection - Names_Length_Limits_Spec.spec prefix connection + group_builder.specify "report error when trying to filter by a custom predicate" <| + data.t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation -main = Test_Suite.run_main <| - spec "[SQLite] " (Database.connect (SQLite In_Memory)) diff --git a/test/Table_Tests/src/Database/Common/Common_Spec_New.enso b/test/Table_Tests/src/Database/Common/Common_Spec_New.enso deleted file mode 100644 index 287b1fe04199..000000000000 --- a/test/Table_Tests/src/Database/Common/Common_Spec_New.enso +++ /dev/null @@ -1,467 +0,0 @@ -from Standard.Base import all -import Standard.Base.Errors.Illegal_Argument.Illegal_Argument - -from Standard.Table import Table, Sort_Column -from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all hiding First, Last -from Standard.Table.Errors import all - -from Standard.Database import all -from Standard.Database.Errors import all - -from Standard.Test_New import all -import Standard.Test_New.Problems -import Standard.Test_New.Suite.Suite_Builder - -import project.Database.Common.Default_Ordering_Spec -import project.Database.Common.Names_Length_Limits_Spec - -import project.Util -import project.Database.Helpers.Name_Generator -import project.Database.Common.Default_Ordering_Spec_New - - -upload connection prefix data temporary=True = - name = Name_Generator.random_name prefix - table = data.select_into_database_table connection name temporary=temporary primary_key=Nothing - IO.println <| " upload: Created table with name " + name - table - - -drop_table connection name = - IO.println <| " drop_table: Dropping table with name " + name - Panic.catch Any (connection.drop_table name) caught_panic-> - IO.println <| "Failed to drop table: " + name + " because of: " + caught_panic.payload.to_display_text - - -type Basic_Data - Value ~data - - connection self = self.data.at 0 - t1 self = self.data.at 1 - t2 self = self.data.at 2 - t4 self = self.data.at 3 - big_table self = self.data.at 4 - big_size self = self.data.at 5 - - setup create_connection_fn = Basic_Data.Value <| - IO.println <| " Common_Spec_New.Basic_Data.setup" - big_size = 1000 - connection = create_connection_fn Nothing - t1 = upload connection "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]]) - t2 = upload connection "T2" (Table.new [["d", [100, 200]]]) - ## The effective name may get a deduplication prefix/suffix so we - need to use `t4.name` instead of the literal string. Still it - will contain the weird characters we wanted. - - Also, the table name cannot be too long as Postgres truncates at - 63 chars (and we append 37 chars of uniqueness suffix) and the - test logic will break then. - t4 = upload connection 'aSELECT "A",\'B\' FROM t;--' (Table.new [["X", ["a", "B"]], ["Y", [2, 5]]]) - big = Table.new [["a", Vector.new big_size ix->ix], ["b", Vector.new big_size ix-> ix * 3.1415926], ["c", Vector.new big_size ix-> ix.to_text]] - big_table = upload connection "Big" big - [connection, t1, t2, t4, big_table, big_size] - - teardown self = - data = self - IO.println <| " Common_Spec_New.Basic_Data.teardown" - drop_table data.connection data.t1.name - drop_table data.connection data.t2.name - drop_table data.connection data.t4.name - drop_table data.connection data.big_table.name - - -type Sorting_Data - Value ~data - - connection self = self.data.at 0 - df self = self.data.at 1 - ints self = self.data.at 2 - reals self = self.data.at 3 - bools self = self.data.at 4 - texts self = self.data.at 5 - t8 self = self.data.at 6 - - setup create_connection_fn = Sorting_Data.Value <| - IO.println <| " Common_Spec_New.Sorting_Data.setup" - connection = create_connection_fn Nothing - ints = [1, 2, 3, 4, 5] - reals = [1.3, 4.6, 3.2, 5.2, 1.6] - bools = [False, False, True, True, False] - texts = ["foo", "foo", "bar", "baz", "spam"] - df = upload connection "clothes" <| - Table.new [["id", [1,2,3,4,5,6]], ["name", ["shoes","trousers","dress","skirt","blouse","t-shirt"]], ["quantity", [20,10,20,10,30,30]], ["rating", [3.0,Nothing,7.3,3.0,2.2,Nothing]], ["price", [37.2,42.1,64.1,87.4,13.5,64.2]]] - t8 = upload connection "T8" <| - Table.new [["ord", [0,3,2,4,1]], ["ints", ints], ["reals", reals], ["bools", bools], ["texts", texts]] - [connection, df, ints, reals, bools, texts, t8] - - teardown self = - IO.println <| " Common_Spec_New.Sorting_Data.teardown" - drop_table self.connection self.df.name - drop_table self.connection self.t8.name - - - -type Aggregation_Data - Value ~data - - connection self = self.data.first - t9 self = self.data.second - - setup create_connection_fn = Aggregation_Data.Value <| - IO.println <| " Common_Spec_New.Aggregation_Data.setup" - connection = create_connection_fn Nothing - builders = [Vector.new_builder,Vector.new_builder,Vector.new_builder] - insert v = - builders.zip v .append - insert ["foo", 0.4, 50] - insert ["foo", 0.2, 10] - insert ["foo", 0.4, 30] - insert ["bar", 3.5, 20] - insert ["foo", Nothing, 20] - insert ["baz", 6.7, 40] - insert ["foo", Nothing, 10] - insert ["bar", 97, 60] - insert ["quux", Nothing, 70] - insert ["zzzz", Nothing, Nothing] - insert ["zzzz", 1, 1] - insert ["zzzz", 0, 0] - insert ["zzzz", 0, 1] - insert ["zzzz", 1, 0] - insert ["zzzz", 0, 0] - insert ["zzzz", Nothing, Nothing] - t9 = upload connection "T9" <| - Table.new [["name", builders.at 0 . to_vector], ["price", builders.at 1 . to_vector], ["quantity", builders.at 2 . to_vector]] - [connection, t9] - - teardown self = - IO.println <| " Common_Spec_New.Aggregation_Data.teardown" - drop_table self.connection self.t9.name - - - -type Missing_Values_Data - Value ~data - - connection self = self.data.first - t4 self = self.data.second - - setup create_connection_fn = Missing_Values_Data.Value <| - IO.println <| " Common_Spec_New.Missing_Values_Data.setup" - connection = create_connection_fn Nothing - t4 = upload connection "T4" <| - Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]]] - [connection, t4] - - teardown self = - IO.println <| " Common_Spec_New.Missing_Values_Data.teardown" - drop_table self.connection self.t4.name - - - - -## Docs - - Arguments: - - create_connection_fn: A function that creates an appropriate Connection to the database backend. -add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : (Nothing -> Any)) = - - Default_Ordering_Spec_New.add_default_ordering_specs suite_builder prefix create_connection_fn - # TODO: - # Names_Length_Limits_Spec_New.add_specs suite_builder prefix create_connection_fn - - suite_builder.group (prefix + "Basic Table Access") group_builder-> - data = Basic_Data.setup create_connection_fn - - group_builder.teardown <| - data.teardown - - group_builder.specify "should allow to materialize tables and columns into local memory" <| - df = data.t1.read - a = data.t1.at 'a' . read - df.at 'a' . to_vector . should_equal [1, 4] - a.to_vector . should_equal [1, 4] - - group_builder.specify "should allow to materialize columns directly into a Vector" <| - v = data.t1.at 'a' . to_vector - v . should_equal [1, 4] - - group_builder.specify "should handle bigger result sets" <| - data.big_table.read.row_count . should_equal data.big_size - - group_builder.specify "should not allow to set a column coming from another table" <| - data.t1.set (data.t2.at "d") . should_fail_with Integrity_Error - - - suite_builder.group (prefix + "Connection.query") group_builder-> - data = Basic_Data.setup create_connection_fn - - group_builder.teardown <| - data.teardown - - group_builder.specify "should allow to access a Table by name" <| - name = data.t1.name - IO.println <| " Querying table with name " + name - tmp = data.connection.query (SQL_Query.Table_Name name) - tmp.read . should_equal data.t1.read - - group_builder.specify "should allow to access a Table by an SQL query" <| - name = data.t1.name - t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3')) - m2 = t2.read - m2.column_names . should_equal ["a", "b"] - m2.at "a" . to_vector . should_equal [4] - m2.at "b" . to_vector . should_equal [5] - m2.at "c" . should_fail_with No_Such_Column - - group_builder.specify "should allow to access a Table by an SQL query" <| - name = data.t1.name - t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3')) - m2 = t2.read - m2.column_names . should_equal ["a", "b"] - m2.at "a" . to_vector . should_equal [4] - m2.at "b" . to_vector . should_equal [5] - m2.at "c" . should_fail_with No_Such_Column - - t3 = data.connection.query (SQL_Query.Raw_SQL ('SELECT 1+2')) - m3 = t3.read - m3.at 0 . to_vector . should_equal [3] - - group_builder.specify "should use labels for column names" <| - name = data.t1.name - t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a AS c, b FROM "' + name + '" WHERE a >= 3')) - m2 = t2.read - m2.column_names . should_equal ["c", "b"] - m2.at "c" . to_vector . should_equal [4] - m2.at "b" . to_vector . should_equal [5] - m2.at "a" . should_fail_with No_Such_Column - - group_builder.specify "should allow a shorthand trying to deduce if the query is a table name or an SQL query" <| - name = data.t1.name - t2 = data.connection.query name - t2.read . should_equal data.t1.read - - t3 = data.connection.query ('SELECT a, b FROM "' + name + '" WHERE a >= 3') - m3 = t3.read - m3.column_names . should_equal ["a", "b"] - m3.at "a" . to_vector . should_equal [4] - - t5 = data.connection.query data.t4.name - m5 = t5.read - m5.column_names . should_equal ["X", "Y"] - m5.at "X" . to_vector . should_equal ["a", "B"] - m5.at "Y" . to_vector . should_equal [2, 5] - - group_builder.specify "should report an error depending on input SQL_Query type" <| - r2 = data.connection.query (SQL_Query.Table_Name "NONEXISTENT-TABLE") - r2.should_fail_with Table_Not_Found - r2.catch.name . should_equal "NONEXISTENT-TABLE" - r2.catch.to_display_text . should_equal "Table NONEXISTENT-TABLE was not found in the database." - - r3 = data.connection.query (SQL_Query.Raw_SQL "MALFORMED-QUERY") - r3.should_fail_with SQL_Error - - group_builder.specify "should not allow interpolations in raw user-built queries" <| - r = data.connection.query (SQL_Query.Raw_SQL "SELECT 1 + ?") - r.should_fail_with Illegal_Argument - - group_builder.specify "should make a best-effort attempt at returning a reasonable error for the short-hand" <| - r2 = data.connection.query "NONEXISTENT-TABLE" - r2.should_fail_with Table_Not_Found - r2.catch.name . should_equal "NONEXISTENT-TABLE" - r2.catch.treated_as_query . should_be_true - error_text = r2.catch.to_display_text - Test.with_clue "r2.catch.to_display_text = "+error_text <| - error_text.starts_with "The name NONEXISTENT-TABLE was treated as a query, but the query failed" . should_be_true - error_text.ends_with "wrap it in `SQL_Query.Table_Name`." . should_be_true - - r3 = data.connection.query "SELECT * FROM ........" - r3.should_fail_with SQL_Error - - group_builder.specify "will fail if the table is modified and a column gets removed" <| - name = Name_Generator.random_name "removing-column" - Problems.assume_no_problems <| - (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True - - t1 = data.connection.query name - m1 = t1.read - Problems.assume_no_problems m1 - m1.at "a" . to_vector . should_equal [1, 2, 3] - m1.at "b" . to_vector . should_equal [4, 5, 6] - - Problems.assume_no_problems <| data.connection.drop_table name - Problems.assume_no_problems <| - (Table.new [["a", [100, 200]]]).select_into_database_table data.connection name temporary=True - - # Reading a column that was kept will work OK - t1.at "a" . to_vector . should_equal [100, 200] - - # But reading the whole table will fail on the missing column: - m2 = t1.read - m2.should_fail_with SQL_Error - - group_builder.specify "will not fail if the table is modified and a column gets added" <| - name = Name_Generator.random_name "adding-column" - Problems.assume_no_problems <| - (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True - - t1 = data.connection.query name - m1 = t1.read - Problems.assume_no_problems m1 - m1.at "a" . to_vector . should_equal [1, 2, 3] - m1.at "b" . to_vector . should_equal [4, 5, 6] - - Problems.assume_no_problems <| data.connection.drop_table name - Problems.assume_no_problems <| - (Table.new [["a", [100, 200]], ["b", [300, 400]], ["c", [500, 600]]]).select_into_database_table data.connection name temporary=True - - m2 = t1.read - Problems.assume_no_problems m2 - m2.column_names . should_equal ["a", "b"] - m2.at "a" . to_vector . should_equal [100, 200] - m2.at "b" . to_vector . should_equal [300, 400] - - t1.at "c" . should_fail_with No_Such_Column - - t2 = data.connection.query name - t2.column_names . should_equal ["a", "b", "c"] - - - suite_builder.group (prefix + "Masking Tables") group_builder-> - data = Basic_Data.setup create_connection_fn - - group_builder.teardown <| - data.teardown - - group_builder.specify "should allow to select rows from a table or column based on an expression" <| - t2 = data.t1.filter (data.t1.at "a" == 1) - df = t2.read - df.at "a" . to_vector . should_equal [1] - df.at "b" . to_vector . should_equal [2] - df.at "c" . to_vector . should_equal [3] - t2.at "a" . to_vector . should_equal [1] - t2.at "b" . to_vector . should_equal [2] - t2.at "c" . to_vector . should_equal [3] - - suite_builder.group (prefix + "Missing Values") group_builder-> - data = Missing_Values_Data.setup create_connection_fn - - group_builder.teardown <| - data.teardown - - group_builder.specify "fill_nothing should replace nulls" <| - data.t4.at 'a' . fill_nothing 10 . to_vector . should_equal [0, 1, 10, 42, 10] - data.t4.at 'b' . fill_nothing False . to_vector . should_equal [True, False, True, False, False] - data.t4.at 'c' . fill_nothing "NA" . to_vector . should_equal ["", "foo", "bar", "NA", "NA"] - - group_builder.specify "should correctly be counted" <| - data.t4.row_count . should_equal 5 - col = data.t4.at 'a' - col.length . should_equal 5 - col.count . should_equal 3 - col.count_nothing . should_equal 2 - - - suite_builder.group (prefix + "Sorting") group_builder-> - data = Sorting_Data.setup create_connection_fn - - group_builder.teardown <| - data.teardown - - group_builder.specify "should allow sorting by a single column name" <| - r_1 = data.df.order_by ([Sort_Column.Name 'quantity']) - r_1.at 'id' . to_vector . should_equal [2,4,1,3,5,6] - - r_3 = data.df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending]) - r_3.at 'id' . to_vector . should_equal [3,1,4,5,2,6] - - group_builder.specify 'should allow sorting by multiple column names' <| - r_1 = data.df.order_by ([Sort_Column.Name 'quantity', Sort_Column.Name 'rating']) - r_1.at 'id' . to_vector . should_equal [2,4,1,3,6,5] - - r_2 = data.df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending, Sort_Column.Name 'quantity' Sort_Direction.Descending]) - r_2.at 'id' . to_vector . should_equal [3,1,4,5,6,2] - - - group_builder.specify 'should allow sorting with specific by-column rules' <| - r_1 = data.df.order_by ([Sort_Column.Name "quantity", Sort_Column.Name "price" Sort_Direction.Descending]) - r_1.at 'id' . to_vector . should_equal [4,2,3,1,6,5] - - group_builder.specify 'should correctly reorder all kinds of columns and leave the original columns untouched' <| - r = data.t8.order_by ([Sort_Column.Name 'ord']) - - r.at 'ints' . to_vector . should_equal [1, 5, 3, 2, 4] - data.t8.at 'ints' . to_vector . should_equal data.ints - - r.at 'reals' . to_vector . should_equal [1.3, 1.6, 3.2, 4.6, 5.2] - data.t8.at 'reals' . to_vector . should_equal data.reals - - r.at 'bools' . to_vector . should_equal [False, False, True, False, True] - data.t8.at 'bools' . to_vector . should_equal data.bools - - r.at 'texts' . to_vector . should_equal ['foo', 'spam', 'bar', 'foo', 'baz'] - data.t8.at 'texts' . to_vector . should_equal data.texts - - group_builder.specify 'should sort columns with specified ordering and missing placement' <| - c = data.df.at 'rating' - - r_1 = c.sort - r_1.to_vector.should_equal [Nothing, Nothing, 2.2, 3.0, 3.0, 7.3] - - r_2 = c.sort Sort_Direction.Descending - r_2.to_vector.should_equal [7.3, 3.0, 3.0, 2.2, Nothing, Nothing] - - - suite_builder.group prefix+"Aggregation" group_builder-> - data = Aggregation_Data.setup create_connection_fn - - group_builder.teardown <| - data.teardown - - ## A helper which makes sure that the groups in a materialized - (InMemory) table are ordered according to a specified column or list - of columns. - determinize_by order_column table = - table.order_by ([Sort_Column.Name order_column]) - - group_builder.specify "should allow counting group sizes and elements" <| - ## Names set to lower case to avoid issue with Redshift where columns are - returned in lower case. - aggregates = [Count "count", Count_Not_Nothing "price" "count not nothing price", Count_Nothing "price" "count nothing price"] - - t1 = determinize_by "name" (data.t9.aggregate ([Group_By "name"] + aggregates) . read) - t1.at "name" . to_vector . should_equal ["bar", "baz", "foo", "quux", "zzzz"] - t1.at "count" . to_vector . should_equal [2, 1, 5, 1, 7] - t1.at "count not nothing price" . to_vector . should_equal [2, 1, 3, 0, 5] - t1.at "count nothing price" . to_vector . should_equal [0, 0, 2, 1, 2] - - t2 = data.t9.aggregate aggregates . read - t2.at "count" . to_vector . should_equal [16] - t2.at "count not nothing price" . to_vector . should_equal [11] - t2.at "count nothing price" . to_vector . should_equal [5] - - group_builder.specify "should allow simple arithmetic aggregations" <| - ## Names set to lower case to avoid issue with Redshift where columns are - returned in lower case. - aggregates = [Sum "price" "sum price", Sum "quantity" "sum quantity", Average "price" "avg price"] - ## TODO can check the datatypes - - t1 = determinize_by "name" (data.t9.aggregate ([Group_By "name"] + aggregates) . read) - t1.at "name" . to_vector . should_equal ["bar", "baz", "foo", "quux", "zzzz"] - t1.at "sum price" . to_vector . should_equal [100.5, 6.7, 1, Nothing, 2] - t1.at "sum quantity" . to_vector . should_equal [80, 40, 120, 70, 2] - t1.at "avg price" . to_vector . should_equal [50.25, 6.7, (1/3), Nothing, (2/5)] - - t2 = data.t9.aggregate aggregates . read - t2.at "sum price" . to_vector . should_equal [110.2] - t2.at "sum quantity" . to_vector . should_equal [312] - t2.at "avg price" . to_vector . should_equal [(110.2 / 11)] - - suite_builder.group prefix+"Table.filter" group_builder-> - data = Basic_Data.setup create_connection_fn - - group_builder.teardown <| - data.teardown - - group_builder.specify "report error when trying to filter by a custom predicate" <| - data.t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation - From b25ea2cf6e234bfa702d1ab27dee9d50688efe69 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 16 Jan 2024 18:40:43 +0100 Subject: [PATCH 30/93] Group can be pending --- .../lib/Standard/Test_New/0.0.0-dev/src/Group.enso | 2 +- .../lib/Standard/Test_New/0.0.0-dev/src/Suite.enso | 13 ++++++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso index 13bf70626e0b..c556c73a39c2 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso @@ -32,7 +32,7 @@ type Group_Builder type Group - Impl (name : Text) (teardown : (Any -> Any)) (specs : Vector Spec) + Impl (name : Text) (teardown : (Any -> Any)) (specs : Vector Spec) (pending : (Text | Nothing) = Nothing) to_text self = sb = StringBuilder.new diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index a538fafa9a89..06b12976a7fc 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -15,11 +15,15 @@ type Suite_Builder Impl builder ## Add a group to the builder. + + Arguments: + - pending: Contains a reason for why the test group should be ignored. If Nothing, the test + is not ignored. group : Text -> (Group_Builder -> Any) -> Nothing - group self (name:Text) (fn : (Group_Builder -> Any)) = + group self (name:Text) (fn : (Group_Builder -> Any)) (pending : (Text | Nothing) = Nothing) = group_builder = Group_Builder.Impl fn group_builder - group = Group.Impl name group_builder.teardown_ref.get group_builder.builder.to_vector + group = Group.Impl name group_builder.teardown_ref.get group_builder.builder.to_vector pending self.builder.append group @@ -55,7 +59,10 @@ type Suite group_filter_conv = convert_filter group_filter spec_filter_conv = convert_filter spec_filter filtered_groups = self.groups.filter group-> - (group_filter_conv.match group.name) != Nothing + group_name_matches = (group_filter_conv.match group.name) != Nothing + case group_name_matches of + False -> False + True -> group.pending == Nothing filtered_groups.each group-> State.run Running_Tests True <| results = Helpers.run_group_with_filter group spec_filter_conv From 80375e8a0a2b69f24db055201424657cb05f845a Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 17 Jan 2024 10:39:55 +0100 Subject: [PATCH 31/93] Refactor bunch of Table_Test files to the new builder API Mostly just applying a series of regex substitutions. Printing of all the SQLite_Spec tests work. --- .../Standard/Test_New/0.0.0-dev/src/Main.enso | 2 + .../Add_Row_Number_Spec.enso | 43 +- .../Aggregate_Spec.enso | 442 +++++++------- .../Column_Name_Edge_Cases_Spec.enso | 23 +- .../Column_Operations_Spec.enso | 323 +++++----- .../Conversion_Spec.enso | 128 ++-- .../Common_Table_Operations/Core_Spec.enso | 80 ++- .../Cross_Tab_Spec.enso | 55 +- .../Date_Time_Spec.enso | 69 ++- .../Derived_Columns_Spec.enso | 46 +- .../Distinct_Spec.enso | 23 +- .../Expression_Spec.enso | 127 ++-- .../Common_Table_Operations/Filter_Spec.enso | 73 ++- .../Integration_Tests.enso | 33 +- .../Join/Cross_Join_Spec.enso | 26 +- .../Join/Join_Spec.enso | 85 ++- .../Join/Lookup_Spec.enso | 51 +- .../Join/Union_Spec.enso | 59 +- .../Join/Zip_Spec.enso | 33 +- .../src/Common_Table_Operations/Main.enso | 68 ++- .../src/Common_Table_Operations/Map_Spec.enso | 39 +- .../Missing_Values_Spec.enso | 49 +- .../Order_By_Spec.enso | 56 +- .../Select_Columns_Spec.enso | 140 +++-- .../Take_Drop_Spec.enso | 48 +- .../Temp_Column_Spec.enso | 13 +- .../Transpose_Spec.enso | 25 +- .../src/Database/Common/Common_Spec.enso | 8 +- .../Common/Default_Ordering_Spec.enso | 2 +- .../Table_Tests/src/Database/SQLite_Spec.enso | 316 ++++++---- .../src/Database/Transaction_Spec.enso | 57 +- .../Types/SQLite_Type_Mapping_Spec.enso | 52 +- .../Table_Tests/src/Database/Upload_Spec.enso | 562 +++++++++--------- test/Tests/src/Data/Round_Spec.enso | 44 +- 34 files changed, 1651 insertions(+), 1549 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso index 5558bbccef71..d5ebf1f29321 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Main.enso @@ -1,7 +1,9 @@ import project.Suite.Suite import project.Test.Test +import project.Problems from project.Extensions import all export project.Suite.Suite export project.Test.Test +export project.Problems from project.Extensions export all diff --git a/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso index 6584789018d0..5e5e715706fe 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso @@ -8,23 +8,22 @@ from Standard.Table import Sort_Column from Standard.Table.Data.Aggregate_Column.Aggregate_Column import Group_By, Sum from Standard.Table.Errors import Missing_Input_Columns, Duplicate_Output_Column_Names, Floating_Point_Equality -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + import project.Database.Helpers.Name_Generator from project.Common_Table_Operations.Util import run_default_backend polyglot java import java.lang.Long as Java_Long -main = run_default_backend spec -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder materialize = setup.materialize - Test.group prefix+"Table.add_row_number (common)" <| - Test.specify "should rename existing column upon a name clash" <| + suite_builder.group prefix+"Table.add_row_number (common)" group_builder-> + group_builder.specify "should rename existing column upon a name clash" <| t1 = table_builder [["X", ['a', 'b']], ["Y", ['c', 'd']], ["Z", [40, 20]]] t2 = t1.add_row_number name="Y" order_by=["X"] |> materialize |> _.order_by "X" @@ -38,13 +37,13 @@ spec setup = r3 = t1.add_row_number name="X" on_problems=Problem_Behavior.Report_Error r3.should_fail_with Duplicate_Output_Column_Names - Test.specify "should allow to order the row numbers by some columns" <| + group_builder.specify "should allow to order the row numbers by some columns" <| t2 = table_builder [["X", ["a", "b", "a", "a"]], ["Y", [1, 2, 3, 4]]] t3 = t2.add_row_number order_by=["X", (Sort_Column.Name "Y" Sort_Direction.Descending)] |> materialize |> _.order_by "Y" t3.at "Y" . to_vector . should_equal [1, 2, 3, 4] t3.at "Row" . to_vector . should_equal [3, 4, 2, 1] - Test.specify "should allow mixing grouping with ordering and custom start and step" <| + group_builder.specify "should allow mixing grouping with ordering and custom start and step" <| vx = ['a', 'b', 'a', 'a', 'a', 'b', 'c', 'c'] vy = [9, 8, 7, 6, 5, 4, 100, 200] vr = [1, 2, 3, 4, 5, 6, 7, 8] @@ -56,7 +55,7 @@ spec setup = t1.at "Y" . to_vector . should_equal vy t1.at "Row" . to_vector . should_equal [400, 200, 300, 200, 100, 100, 100, 200] - Test.specify "should report floating point equality warning when grouping on float columns" <| + group_builder.specify "should report floating point equality warning when grouping on float columns" <| t = table_builder [["X", [1.0, 1.5, 1.0, 2.5, 2.5]], ["row_id", [1, 2, 3, 4, 5]]] t1 = t.add_row_number group_by=["X"] order_by=["row_id"] |> materialize |> _.order_by "row_id" Problems.expect_warning Floating_Point_Equality t1 @@ -75,7 +74,7 @@ spec setup = Problems.expect_warning Floating_Point_Equality t5 t5.at "Row" . to_vector . should_equal [1, 1, 2, 1, 2, 3, 4] - Test.specify "should fail if columns provided in ordering/grouping do not exist" <| + group_builder.specify "should fail if columns provided in ordering/grouping do not exist" <| t = table_builder [["X", [20, 30, 10]]] r1 = t.add_row_number group_by=["X", "Y", "Z"] order_by=["X"] r1.should_fail_with Missing_Input_Columns @@ -88,7 +87,7 @@ spec setup = r3 = t.add_row_number group_by=[44] order_by=["X"] r3.should_fail_with Missing_Input_Columns - Test.specify "will respect the row order of order_by" <| + group_builder.specify "will respect the row order of order_by" <| t = table_builder [["X", [1, 2, 3, 4]], ["Y", [40, 30, 20, 10]]] t1 = t.order_by "Y" @@ -98,7 +97,7 @@ spec setup = t2.at "Y" . to_vector . should_equal [10, 20, 30, 40] t2.at "Row" . to_vector . should_equal [1, 2, 3, 4] - Test.specify "Should work correctly after aggregation" <| + group_builder.specify "Should work correctly after aggregation" <| t0 = table_builder [["X", ["a", "b", "a", "c"]], ["Y", [1, 2, 4, 8]]] t1 = t0.aggregate [Group_By "X", Sum "Y"] @@ -107,8 +106,8 @@ spec setup = t2.at "Sum Y" . to_vector . should_equal [5.0, 2.0, 8.0] t2.at "Row" . to_vector . should_equal [1, 2, 3] - if setup.is_database.not then Test.group prefix+"Table.add_row_number (in-memory specific)" <| - Test.specify "should add a row numbering column" <| + if setup.is_database.not then suite_builder.group prefix+"Table.add_row_number (in-memory specific)" group_builder-> + group_builder.specify "should add a row numbering column" <| t = table_builder [["X", ['a', 'b', 'a', 'a', 'c']]] t1 = t.add_row_number rows = t1.rows.to_vector . map .to_vector @@ -116,7 +115,7 @@ spec setup = t1.at "Row" . to_vector . should_equal [1, 2, 3, 4, 5] t1.at "Row" . value_type . is_integer . should_be_true - Test.specify "should allow customizing the starting index and step" <| + group_builder.specify "should allow customizing the starting index and step" <| t = table_builder [["X", ['a', 'b', 'a']]] t1 = t.add_row_number from=10 t1.at "Row" . to_vector . should_equal [10, 11, 12] @@ -133,7 +132,7 @@ spec setup = t5 = t.add_row_number from=(-1) t5.at "Row" . to_vector . should_equal [-1, 0, 1] - Test.specify "should allow to assign row numbers separately within each group" <| + group_builder.specify "should allow to assign row numbers separately within each group" <| t = table_builder [["X", ['a', 'a', 'a', 'a', 'b', 'b']], ["Y", [40, 30, 20, 40, 20, 10]]] t1 = t.add_row_number group_by=["X"] t1.at "Row" . to_vector . should_equal [1, 2, 3, 4, 1, 2] @@ -142,7 +141,7 @@ spec setup = t3 = t2.add_row_number group_by=["X", "Y"] t3.at "Row" . to_vector . should_equal [1, 2, 1, 2, 1, 1] - Test.specify "should allow to assign row numbers separately within scattered groups, preserving the row layout" <| + group_builder.specify "should allow to assign row numbers separately within scattered groups, preserving the row layout" <| v = ['a', 'b', 'a', 'b', 'b', 'b', 'c', 'a'] t = table_builder [["X", v]] t1 = t.add_row_number group_by=["X"] @@ -150,7 +149,7 @@ spec setup = t1.at "X" . to_vector . should_equal v t1.at "Row" . to_vector . should_equal [1, 1, 2, 2, 3, 4, 1, 3] - Test.specify "should allow mixing grouping with ordering and custom start and step, preserving the original row layout" <| + group_builder.specify "should allow mixing grouping with ordering and custom start and step, preserving the original row layout" <| vx = ['a', 'b', 'a', 'a', 'a', 'b', 'c', 'c'] vy = [9, 8, 7, 6, 5, 4, 100, 200] vr = [1, 2, 3, 4, 5, 6, 7, 8] @@ -163,7 +162,7 @@ spec setup = t1.at "Y" . to_vector . should_equal vy t1.at "Row" . to_vector . should_equal [400, 200, 300, 200, 100, 100, 100, 200] - Test.specify "should allow to order the row numbers by some columns, keeping the row ordering intact" <| + group_builder.specify "should allow to order the row numbers by some columns, keeping the row ordering intact" <| v = [9, 8, 7, 6, 5, 4, 100, 200] t = table_builder [["X", v]] t1 = t.add_row_number order_by=["X"] @@ -175,7 +174,7 @@ spec setup = t3 = t2.add_row_number order_by=["X", (Sort_Column.Name "Y" Sort_Direction.Descending)] t3.at "Row" . to_vector . should_equal [3, 4, 2, 1] - Test.specify "will fail if the row number exceeds Long range" <| + group_builder.specify "will fail if the row number exceeds Long range" <| max_long = Java_Long.MAX_VALUE t1 = table_builder [["X", [1, 2, 3, 4, 5]], ["Y", [1, 2, 2, 2, 2]], ["Z", [1, 5, 3, 4, 2]]] @@ -195,8 +194,8 @@ spec setup = t1.add_row_number from=(max_long + 10) order_by=["Z"] . should_fail_with Illegal_Argument t1.add_row_number from=(max_long + 10) group_by=["Y"] order_by=["Z"] . should_fail_with Illegal_Argument - if setup.is_database then Test.group prefix+"Table.add_row_number (Database specific)" <| - Test.specify "will use the primary key by default" <| + if setup.is_database then suite_builder.group prefix+"Table.add_row_number (Database specific)" group_builder-> + group_builder.specify "will use the primary key by default" <| src = table_builder [["X", [500, 400, 30, 1, 2]], ["Y", [10, 20, 30, 40, 50]]] db_table = src.select_into_database_table setup.connection (Name_Generator.random_name "add-row-number-test-1") temporary=True primary_key=["X"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso index 6a04f584e6f8..d28cca666d4d 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso @@ -7,23 +7,33 @@ from Standard.Table.Errors import all from Standard.Database.Errors import Unsupported_Database_Operation -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Common_Table_Operations.Util import run_default_backend polyglot java import java.lang.Double -main = run_default_backend spec - type Test_Selection Config problem_handling=True advanced_stats=True text_concat=True text_shortest_longest=True first_last=True first_last_row_order=True std_dev=True multi_distinct=True aggregation_problems=True nan=True date_support=True +type Data + Value ~data + + table self = self.data.at 0 + empty_table self = self.data.at 1 + + setup table_fn empty_table_fn = Data.Value <| + table = table_fn Nothing + empty_table = empty_table_fn Nothing + [table, empty_table] + + ## Runs the common aggregate tests. -spec setup = +add_specs suite_builder setup = prefix = setup.prefix - table = setup.table - empty_table = setup.empty_table + table_fn = setup.table_fn + empty_table_fn = setup.empty_table_fn table_builder = setup.table_builder materialize = setup.materialize is_database = setup.is_database @@ -40,9 +50,11 @@ spec setup = resolve_pending enabled_flag=Nothing = if enabled_flag.not then "Not supported." else Nothing - Test.group prefix+"Table.aggregate should summarize whole table" <| - Test.specify "should be able to count" <| - grouped = table.aggregate [Count] + suite_builder.group prefix+"Table.aggregate should summarize whole table" group_builder-> + data = Data.setup table_fn empty_table_fn + + group_builder.specify "should be able to count" <| + grouped = data.table.aggregate [Count] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -50,8 +62,8 @@ spec setup = materialized.columns.at 0 . name . should_equal "Count" materialized.columns.at 0 . at 0 . should_equal 2500 - Test.specify "should be able to count missing values" <| - grouped = table.aggregate [Count_Nothing "Hexadecimal", Count_Not_Nothing "Hexadecimal", Count_Empty "TextWithNothing", Count_Not_Empty "TextWithNothing"] + group_builder.specify "should be able to count missing values" <| + grouped = data.table.aggregate [Count_Nothing "Hexadecimal", Count_Not_Nothing "Hexadecimal", Count_Empty "TextWithNothing", Count_Not_Empty "TextWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -65,8 +77,8 @@ spec setup = materialized.columns.at 3 . name . should_equal "Count Not Empty TextWithNothing" materialized.columns.at 3 . at 0 . should_equal 2251 - Test.specify "should be able to count distinct values" <| - grouped = table.aggregate [Count_Distinct "Code", Count_Distinct "Index", Count_Distinct "Flag"] + group_builder.specify "should be able to count distinct values" <| + grouped = data.table.aggregate [Count_Distinct "Code", Count_Distinct "Index", Count_Distinct "Flag"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -78,8 +90,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "Count Distinct Flag" materialized.columns.at 2 . at 0 . should_equal 2 - Test.specify "should be able to count distinct values over multiple columns" (pending = resolve_pending test_selection.multi_distinct) <| - grouped = table.aggregate [Count_Distinct ["Index", "Flag"]] + group_builder.specify "should be able to count distinct values over multiple columns" (pending = resolve_pending test_selection.multi_distinct) <| + grouped = data.table.aggregate [Count_Distinct ["Index", "Flag"]] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -87,8 +99,8 @@ spec setup = materialized.columns.at 0 . name . should_equal "Count Distinct Index Flag" materialized.columns.at 0 . at 0 . should_equal 20 - Test.specify "should be able to compute sum and average of values" <| - grouped = table.aggregate [Sum "Value", Sum "ValueWithNothing", Average "Value", Average "ValueWithNothing"] + group_builder.specify "should be able to compute sum and average of values" <| + grouped = data.table.aggregate [Sum "Value", Sum "ValueWithNothing", Average "Value", Average "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -102,8 +114,8 @@ spec setup = materialized.columns.at 3 . name . should_equal "Average ValueWithNothing" materialized.columns.at 3 . at 0 . should_equal 1.228650 epsilon=0.000001 - Test.specify "should be able to compute standard deviation of values" (pending = resolve_pending test_selection.std_dev) <| - grouped = table.aggregate [Standard_Deviation "Value", Standard_Deviation "ValueWithNothing", (Standard_Deviation "Value" population=True), (Standard_Deviation "ValueWithNothing" population=True)] + group_builder.specify "should be able to compute standard deviation of values" (pending = resolve_pending test_selection.std_dev) <| + grouped = data.table.aggregate [Standard_Deviation "Value", Standard_Deviation "ValueWithNothing", (Standard_Deviation "Value" population=True), (Standard_Deviation "ValueWithNothing" population=True)] materialized = materialize grouped grouped.row_count . should_equal 1 materialized.column_count . should_equal 4 @@ -116,8 +128,8 @@ spec setup = materialized.columns.at 3 . name . should_equal "Standard Deviation ValueWithNothing 1" materialized.columns.at 3 . at 0 . should_equal 58.575554 epsilon=0.000001 - Test.specify "should be able to create median, mode and percentile values" (pending = resolve_pending test_selection.advanced_stats) <| - grouped = table.aggregate [Median "Index", Median "Value", Median "ValueWithNothing", Mode "Index", Percentile 0.25 "Value", Percentile 0.40 "ValueWithNothing"] + group_builder.specify "should be able to create median, mode and percentile values" (pending = resolve_pending test_selection.advanced_stats) <| + grouped = data.table.aggregate [Median "Index", Median "Value", Median "ValueWithNothing", Mode "Index", Percentile 0.25 "Value", Percentile 0.40 "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -135,8 +147,8 @@ spec setup = materialized.columns.at 5 . name . should_equal "40%-ile ValueWithNothing" materialized.columns.at 5 . at 0 . should_equal -17.960000 epsilon=0.000001 - Test.specify "should be able to get first and last values" (pending = resolve_pending test_selection.first_last) <| - grouped = table.aggregate [First "Index" (order_by = [Sort_Column.Name "Value", Sort_Column.Name "TextWithNothing"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value"])] + group_builder.specify "should be able to get first and last values" (pending = resolve_pending test_selection.first_last) <| + grouped = data.table.aggregate [First "Index" (order_by = [Sort_Column.Name "Value", Sort_Column.Name "TextWithNothing"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value"])] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -146,8 +158,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Last ValueWithNothing" materialized.columns.at 1 . at 0 . should_equal -89.78 epsilon=0.000001 - Test.specify "should be able to get first and last values with mixed ordering" (pending = resolve_pending test_selection.first_last) <| - grouped = table.aggregate [First "TextWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending, Sort_Column.Name "Code"]), First "TextWithNothing" (order_by = [Sort_Column.Name "Code", Sort_Column.Name "Value" Sort_Direction.Descending]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending])] + group_builder.specify "should be able to get first and last values with mixed ordering" (pending = resolve_pending test_selection.first_last) <| + grouped = data.table.aggregate [First "TextWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending, Sort_Column.Name "Code"]), First "TextWithNothing" (order_by = [Sort_Column.Name "Code", Sort_Column.Name "Value" Sort_Direction.Descending]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending])] materialized = materialize grouped grouped.row_count . should_equal 1 materialized.column_count . should_equal 3 @@ -158,8 +170,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "Last ValueWithNothing" materialized.columns.at 2 . at 0 . should_equal -38.56 epsilon=0.000001 - Test.specify "should be able to get first and last values with default row order" (pending = resolve_pending test_selection.first_last_row_order) <| - grouped = table.aggregate [First "Index", Last "Value"] + group_builder.specify "should be able to get first and last values with default row order" (pending = resolve_pending test_selection.first_last_row_order) <| + grouped = data.table.aggregate [First "Index", Last "Value"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -169,8 +181,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Last Value" materialized.columns.at 1 . at 0 . should_equal 70.99931 epsilon=0.000001 - Test.specify "should be able to get minimum and maximum values" <| - grouped = table.aggregate [Minimum "Value", Maximum "Value", Minimum "ValueWithNothing", Maximum "ValueWithNothing"] + group_builder.specify "should be able to get minimum and maximum values" <| + grouped = data.table.aggregate [Minimum "Value", Maximum "Value", Minimum "ValueWithNothing", Maximum "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -184,8 +196,8 @@ spec setup = materialized.columns.at 3 . name . should_equal "Maximum ValueWithNothing" materialized.columns.at 3 . at 0 . should_equal 99.95 epsilon=0.000001 - Test.specify "should be able to get shortest and longest text values" (pending = resolve_pending test_selection.text_shortest_longest) <| - grouped = table.aggregate [Shortest "TextWithNothing", Longest "TextWithNothing"] + group_builder.specify "should be able to get shortest and longest text values" (pending = resolve_pending test_selection.text_shortest_longest) <| + grouped = data.table.aggregate [Shortest "TextWithNothing", Longest "TextWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -195,8 +207,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Longest TextWithNothing" materialized.columns.at 1 . at 0 . should_equal "setp295gjvbanana" - Test.specify "should be able to get concatenated text values" (pending = resolve_pending test_selection.text_concat) <| - grouped = table.aggregate [Concatenate "Code"] + group_builder.specify "should be able to get concatenated text values" (pending = resolve_pending test_selection.text_concat) <| + grouped = data.table.aggregate [Concatenate "Code"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -204,9 +216,11 @@ spec setup = materialized.columns.at 0 . name . should_equal "Concatenate Code" materialized.columns.at 0 . at 0 . length . should_equal 7500 - Test.group prefix+"Table.aggregate should summarize empty table" <| - Test.specify "should be able to count" <| - grouped = empty_table.aggregate [Count] + suite_builder.group prefix+"Table.aggregate should summarize empty table" group_builder-> + data = Data.setup table_fn empty_table_fn + + group_builder.specify "should be able to count" <| + grouped = data.empty_table.aggregate [Count] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -214,8 +228,8 @@ spec setup = materialized.columns.at 0 . name . should_equal "Count" materialized.columns.at 0 . at 0 . should_equal 0 - Test.specify "should be able to count missing values" <| - grouped = empty_table.aggregate [Count_Nothing "Hexadecimal", Count_Not_Nothing "Hexadecimal", Count_Empty "TextWithNothing", Count_Not_Empty "TextWithNothing"] + group_builder.specify "should be able to count missing values" <| + grouped = data.empty_table.aggregate [Count_Nothing "Hexadecimal", Count_Not_Nothing "Hexadecimal", Count_Empty "TextWithNothing", Count_Not_Empty "TextWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -229,8 +243,8 @@ spec setup = materialized.columns.at 3 . name . should_equal "Count Not Empty TextWithNothing" materialized.columns.at 3 . at 0 . should_equal 0 - Test.specify "should be able to count distinct values" <| - grouped = empty_table.aggregate [Count_Distinct "Code" (ignore_nothing=False), Count_Distinct "Code" (ignore_nothing=True)] + group_builder.specify "should be able to count distinct values" <| + grouped = data.empty_table.aggregate [Count_Distinct "Code" (ignore_nothing=False), Count_Distinct "Code" (ignore_nothing=True)] materialized = materialize grouped grouped.row_count . should_equal 1 materialized.column_count . should_equal 2 @@ -239,8 +253,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Count Distinct Code 1" materialized.columns.at 1 . at 0 . should_equal 0 - Test.specify "should be able to compute sum and average of values" <| - grouped = empty_table.aggregate [Sum "Value", Average "ValueWithNothing"] + group_builder.specify "should be able to compute sum and average of values" <| + grouped = data.empty_table.aggregate [Sum "Value", Average "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -250,8 +264,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Average ValueWithNothing" materialized.columns.at 1 . at 0 . should_equal Nothing - Test.specify "should be able to compute standard deviation of values" (pending = resolve_pending test_selection.std_dev) <| - grouped = empty_table.aggregate [Standard_Deviation "Value", (Standard_Deviation "ValueWithNothing" population=True)] + group_builder.specify "should be able to compute standard deviation of values" (pending = resolve_pending test_selection.std_dev) <| + grouped = data.empty_table.aggregate [Standard_Deviation "Value", (Standard_Deviation "ValueWithNothing" population=True)] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -261,8 +275,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Standard Deviation ValueWithNothing" materialized.columns.at 1 . at 0 . should_equal Nothing - Test.specify "should be able to create median, mode and percentile values" (pending = resolve_pending test_selection.advanced_stats) <| - grouped = empty_table.aggregate [Median "Index", Mode "Index", Percentile 0.25 "Value"] + group_builder.specify "should be able to create median, mode and percentile values" (pending = resolve_pending test_selection.advanced_stats) <| + grouped = data.empty_table.aggregate [Median "Index", Mode "Index", Percentile 0.25 "Value"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -274,8 +288,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "25%-ile Value" materialized.columns.at 2 . at 0 . should_equal Nothing - Test.specify "should be able to get first and last values" (pending = resolve_pending test_selection.first_last) <| - grouped = empty_table.aggregate [First "Index" (order_by = [Sort_Column.Name "Hexadecimal", Sort_Column.Name "TextWithNothing"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value"])] + group_builder.specify "should be able to get first and last values" (pending = resolve_pending test_selection.first_last) <| + grouped = data.empty_table.aggregate [First "Index" (order_by = [Sort_Column.Name "Hexadecimal", Sort_Column.Name "TextWithNothing"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value"])] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -285,8 +299,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Last ValueWithNothing" materialized.columns.at 1 . at 0 . should_equal Nothing - Test.specify "should be able to get first and last values with default row order" (pending = resolve_pending test_selection.first_last_row_order) <| - grouped = empty_table.aggregate [First "Index", Last "Value"] + group_builder.specify "should be able to get first and last values with default row order" (pending = resolve_pending test_selection.first_last_row_order) <| + grouped = data.empty_table.aggregate [First "Index", Last "Value"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -296,8 +310,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Last Value" materialized.columns.at 1 . at 0 . should_equal Nothing - Test.specify "should be able to get minimum and maximum values" <| - grouped = empty_table.aggregate [Minimum "Value", Maximum "ValueWithNothing"] + group_builder.specify "should be able to get minimum and maximum values" <| + grouped = data.empty_table.aggregate [Minimum "Value", Maximum "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -307,8 +321,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Maximum ValueWithNothing" materialized.columns.at 1 . at 0 . should_equal Nothing - Test.specify "should be able to get shortest and longest text values" (pending = resolve_pending test_selection.text_shortest_longest) <| - grouped = empty_table.aggregate [Shortest "TextWithNothing", Longest "TextWithNothing"] + group_builder.specify "should be able to get shortest and longest text values" (pending = resolve_pending test_selection.text_shortest_longest) <| + grouped = data.empty_table.aggregate [Shortest "TextWithNothing", Longest "TextWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -318,8 +332,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Longest TextWithNothing" materialized.columns.at 1 . at 0 . should_equal Nothing - Test.specify "should be able to get concatenated text values" (pending = resolve_pending test_selection.text_concat) <| - grouped = empty_table.aggregate [Concatenate "Code"] + group_builder.specify "should be able to get concatenated text values" (pending = resolve_pending test_selection.text_concat) <| + grouped = data.empty_table.aggregate [Concatenate "Code"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 1 @@ -327,9 +341,11 @@ spec setup = materialized.columns.at 0 . name . should_equal "Concatenate Code" materialized.columns.at 0 . at 0 . should_equal Nothing - Test.group prefix+"Table.aggregate should not summarize empty table when grouped" <| - Test.specify "should be able to count" <| - grouped = empty_table.aggregate [Group_By 0, Count] + suite_builder.group prefix+"Table.aggregate should not summarize empty table when grouped" group_builder-> + data = Data.setup table_fn empty_table_fn + + group_builder.specify "should be able to count" <| + grouped = data.empty_table.aggregate [Group_By 0, Count] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 0 @@ -337,8 +353,8 @@ spec setup = materialized.columns.at 0 . name . should_equal "Code" materialized.columns.at 1 . name . should_equal "Count" - Test.specify "should be able to count missing values" <| - grouped = empty_table.aggregate [Group_By 0, Count_Nothing "Hexadecimal", Count_Not_Nothing "Hexadecimal", Count_Empty "TextWithNothing", Count_Not_Empty "TextWithNothing"] + group_builder.specify "should be able to count missing values" <| + grouped = data.empty_table.aggregate [Group_By 0, Count_Nothing "Hexadecimal", Count_Not_Nothing "Hexadecimal", Count_Empty "TextWithNothing", Count_Not_Empty "TextWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 0 @@ -349,8 +365,8 @@ spec setup = materialized.columns.at 3 . name . should_equal "Count Empty TextWithNothing" materialized.columns.at 4 . name . should_equal "Count Not Empty TextWithNothing" - Test.specify "should be able to count distinct values" <| - grouped = empty_table.aggregate [Group_By 0, Count_Distinct "Code"] + group_builder.specify "should be able to count distinct values" <| + grouped = data.empty_table.aggregate [Group_By 0, Count_Distinct "Code"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 0 @@ -358,8 +374,8 @@ spec setup = materialized.columns.at 0 . name . should_equal "Code" materialized.columns.at 1 . name . should_equal "Count Distinct Code" - Test.specify "should be able to compute sum and average of values" <| - grouped = empty_table.aggregate [Group_By 0, Sum "Value", Average "ValueWithNothing"] + group_builder.specify "should be able to compute sum and average of values" <| + grouped = data.empty_table.aggregate [Group_By 0, Sum "Value", Average "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 0 @@ -368,8 +384,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Sum Value" materialized.columns.at 2 . name . should_equal "Average ValueWithNothing" - Test.specify "should be able to compute standard deviation of values" (pending = resolve_pending test_selection.std_dev) <| - grouped = empty_table.aggregate [Group_By 0, Standard_Deviation "Value", (Standard_Deviation "ValueWithNothing" population=True)] + group_builder.specify "should be able to compute standard deviation of values" (pending = resolve_pending test_selection.std_dev) <| + grouped = data.empty_table.aggregate [Group_By 0, Standard_Deviation "Value", (Standard_Deviation "ValueWithNothing" population=True)] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 0 @@ -378,8 +394,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Standard Deviation Value" materialized.columns.at 2 . name . should_equal "Standard Deviation ValueWithNothing" - Test.specify "should be able to create median values" (pending = resolve_pending test_selection.advanced_stats) <| - grouped = empty_table.aggregate [Group_By 0, Median "Index", Mode "Index", Percentile 0.25 "Value"] + group_builder.specify "should be able to create median values" (pending = resolve_pending test_selection.advanced_stats) <| + grouped = data.empty_table.aggregate [Group_By 0, Median "Index", Mode "Index", Percentile 0.25 "Value"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 0 @@ -389,8 +405,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "Mode Index" materialized.columns.at 3 . name . should_equal "25%-ile Value" - Test.specify "should be able to get first and last values" (pending = resolve_pending test_selection.first_last) <| - grouped = empty_table.aggregate [Group_By 0, First "Index" (order_by = [Sort_Column.Name "Hexadecimal", Sort_Column.Name "TextWithNothing"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value"])] + group_builder.specify "should be able to get first and last values" (pending = resolve_pending test_selection.first_last) <| + grouped = data.empty_table.aggregate [Group_By 0, First "Index" (order_by = [Sort_Column.Name "Hexadecimal", Sort_Column.Name "TextWithNothing"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value"])] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 0 @@ -399,8 +415,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "First Index" materialized.columns.at 2 . name . should_equal "Last ValueWithNothing" - Test.specify "should be able to get first and last values with default row order" (pending = resolve_pending test_selection.first_last_row_order) <| - grouped = empty_table.aggregate [Group_By 0, First "Index", Last "Value"] + group_builder.specify "should be able to get first and last values with default row order" (pending = resolve_pending test_selection.first_last_row_order) <| + grouped = data.empty_table.aggregate [Group_By 0, First "Index", Last "Value"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 0 @@ -409,8 +425,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "First Index" materialized.columns.at 2 . name . should_equal "Last Value" - Test.specify "should be able to get minimum and maximum values" <| - grouped = empty_table.aggregate [Group_By 0, Minimum "Value", Maximum "ValueWithNothing"] + group_builder.specify "should be able to get minimum and maximum values" <| + grouped = data.empty_table.aggregate [Group_By 0, Minimum "Value", Maximum "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 0 @@ -419,8 +435,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Minimum Value" materialized.columns.at 2 . name . should_equal "Maximum ValueWithNothing" - Test.specify "should be able to get shortest and longest text values" (pending = resolve_pending test_selection.text_shortest_longest) <| - grouped = empty_table.aggregate [Group_By 0, Shortest "TextWithNothing", Longest "TextWithNothing"] + group_builder.specify "should be able to get shortest and longest text values" (pending = resolve_pending test_selection.text_shortest_longest) <| + grouped = data.empty_table.aggregate [Group_By 0, Shortest "TextWithNothing", Longest "TextWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 0 @@ -429,8 +445,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Shortest TextWithNothing" materialized.columns.at 2 . name . should_equal "Longest TextWithNothing" - Test.specify "should be able to get concatenated text values" (pending = resolve_pending test_selection.text_concat) <| - grouped = empty_table.aggregate [Group_By 0, Concatenate "Code"] + group_builder.specify "should be able to get concatenated text values" (pending = resolve_pending test_selection.text_concat) <| + grouped = data.empty_table.aggregate [Group_By 0, Concatenate "Code"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 0 @@ -438,9 +454,11 @@ spec setup = materialized.columns.at 0 . name . should_equal "Code" materialized.columns.at 1 . name . should_equal "Concatenate Code" - Test.group prefix+"Table.aggregate should be able to group on single field" <| - Test.specify "should be able to count" <| - grouped = table.aggregate [Group_By "Index", Count] + suite_builder.group prefix+"Table.aggregate should be able to group on single field" group_builder-> + data = Data.setup table_fn empty_table_fn + + group_builder.specify "should be able to count" <| + grouped = data.table.aggregate [Group_By "Index", Count] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -451,8 +469,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Count" materialized.columns.at 1 . at idx . should_equal 261 - Test.specify "should be able to count missing values" <| - grouped = table.aggregate [Group_By "Index", Count_Nothing "Hexadecimal", Count_Not_Nothing "Hexadecimal", Count_Empty "TextWithNothing", Count_Not_Empty "TextWithNothing"] + group_builder.specify "should be able to count missing values" <| + grouped = data.table.aggregate [Group_By "Index", Count_Nothing "Hexadecimal", Count_Not_Nothing "Hexadecimal", Count_Empty "TextWithNothing", Count_Not_Empty "TextWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -469,8 +487,8 @@ spec setup = materialized.columns.at 4 . name . should_equal "Count Not Empty TextWithNothing" materialized.columns.at 4 . at idx . should_equal 230 - Test.specify "should be able to count distinct values" <| - grouped = table.aggregate [Group_By "Index", Count_Distinct "Code", Count_Distinct "Index", Count_Distinct "Flag"] + group_builder.specify "should be able to count distinct values" <| + grouped = data.table.aggregate [Group_By "Index", Count_Distinct "Code", Count_Distinct "Index", Count_Distinct "Flag"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -485,8 +503,8 @@ spec setup = materialized.columns.at 3 . name . should_equal "Count Distinct Flag" materialized.columns.at 3 . at idx . should_equal 2 - Test.specify "should be able to count distinct values over multiple columns" (pending = resolve_pending test_selection.multi_distinct) <| - grouped = table.aggregate [Group_By "Index", Count_Distinct ["Index", "Flag"]] + group_builder.specify "should be able to count distinct values over multiple columns" (pending = resolve_pending test_selection.multi_distinct) <| + grouped = data.table.aggregate [Group_By "Index", Count_Distinct ["Index", "Flag"]] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -497,8 +515,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Count Distinct Index Flag" materialized.columns.at 1 . at idx . should_equal 2 - Test.specify "should be able to compute sum and average of values" <| - grouped = table.aggregate [Group_By "Index", Sum "Value", Sum "ValueWithNothing", Average "Value", Average "ValueWithNothing"] + group_builder.specify "should be able to compute sum and average of values" <| + grouped = data.table.aggregate [Group_By "Index", Sum "Value", Sum "ValueWithNothing", Average "Value", Average "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -515,8 +533,8 @@ spec setup = materialized.columns.at 4 . name . should_equal "Average ValueWithNothing" materialized.columns.at 4 . at idx . should_equal 0.646213 epsilon=0.000001 - Test.specify "should be able to compute standard deviation of values" (pending = resolve_pending test_selection.std_dev) <| - grouped = table.aggregate [Group_By "Index", Standard_Deviation "Value", Standard_Deviation "ValueWithNothing", (Standard_Deviation "Value" population=True), (Standard_Deviation "ValueWithNothing" population=True)] + group_builder.specify "should be able to compute standard deviation of values" (pending = resolve_pending test_selection.std_dev) <| + grouped = data.table.aggregate [Group_By "Index", Standard_Deviation "Value", Standard_Deviation "ValueWithNothing", (Standard_Deviation "Value" population=True), (Standard_Deviation "ValueWithNothing" population=True)] materialized = materialize grouped grouped.row_count . should_equal 10 materialized.column_count . should_equal 5 @@ -532,8 +550,8 @@ spec setup = materialized.columns.at 4 . name . should_equal "Standard Deviation ValueWithNothing 1" materialized.columns.at 4 . at idx . should_equal 56.677714 epsilon=0.000001 - Test.specify "should be able to create median values" (pending = resolve_pending test_selection.advanced_stats) <| - grouped = table.aggregate [Group_By "Index", Median "Index", Median "Value", Median "ValueWithNothing", Mode "Index", Percentile 0.25 "Value", Percentile 0.40 "ValueWithNothing"] + group_builder.specify "should be able to create median values" (pending = resolve_pending test_selection.advanced_stats) <| + grouped = data.table.aggregate [Group_By "Index", Median "Index", Median "Value", Median "ValueWithNothing", Mode "Index", Percentile 0.25 "Value", Percentile 0.40 "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -554,8 +572,8 @@ spec setup = materialized.columns.at 6 . name . should_equal "40%-ile ValueWithNothing" materialized.columns.at 6 . at idx . should_equal -18.802000 epsilon=0.000001 - Test.specify "should be able to get first and last values" (pending = resolve_pending test_selection.first_last) <| - grouped = table.aggregate [Group_By "Index", First "TextWithNothing" (order_by = [Sort_Column.Name "Value", Sort_Column.Name "Flag"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value"])] + group_builder.specify "should be able to get first and last values" (pending = resolve_pending test_selection.first_last) <| + grouped = data.table.aggregate [Group_By "Index", First "TextWithNothing" (order_by = [Sort_Column.Name "Value", Sort_Column.Name "Flag"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value"])] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -568,8 +586,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "Last ValueWithNothing" materialized.columns.at 2 . at idx . should_equal -89.78 epsilon=0.000001 - Test.specify "should be able to get first and last values with mixed ordering" (pending = resolve_pending test_selection.first_last) <| - grouped = table.aggregate [Group_By "Index", First "TextWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending, Sort_Column.Name "Flag"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending])] + group_builder.specify "should be able to get first and last values with mixed ordering" (pending = resolve_pending test_selection.first_last) <| + grouped = data.table.aggregate [Group_By "Index", First "TextWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending, Sort_Column.Name "Flag"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending])] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -582,8 +600,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "Last ValueWithNothing" materialized.columns.at 2 . at idx . should_equal -63.75 epsilon=0.000001 - Test.specify "should be able to get first and last values with default row order" (pending = resolve_pending test_selection.first_last_row_order) <| - grouped = table.aggregate [Group_By "Index", First "TextWithNothing", Last "Value"] + group_builder.specify "should be able to get first and last values with default row order" (pending = resolve_pending test_selection.first_last_row_order) <| + grouped = data.table.aggregate [Group_By "Index", First "TextWithNothing", Last "Value"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -596,8 +614,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "Last Value" materialized.columns.at 2 . at idx . should_equal 56.15916 epsilon=0.000001 - Test.specify "should be able to get minimum and maximum values" <| - grouped = table.aggregate [Group_By "Index", Minimum "Value", Maximum "Value", Minimum "ValueWithNothing", Maximum "ValueWithNothing"] + group_builder.specify "should be able to get minimum and maximum values" <| + grouped = data.table.aggregate [Group_By "Index", Minimum "Value", Maximum "Value", Minimum "ValueWithNothing", Maximum "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -614,8 +632,8 @@ spec setup = materialized.columns.at 4 . name . should_equal "Maximum ValueWithNothing" materialized.columns.at 4 . at idx . should_equal 99.79 epsilon=0.000001 - Test.specify "should be able to get shortest and longest text values" (pending = resolve_pending test_selection.text_shortest_longest) <| - grouped = table.aggregate [Group_By "Index", Shortest "TextWithNothing", Longest "TextWithNothing"] + group_builder.specify "should be able to get shortest and longest text values" (pending = resolve_pending test_selection.text_shortest_longest) <| + grouped = data.table.aggregate [Group_By "Index", Shortest "TextWithNothing", Longest "TextWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -628,8 +646,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "Longest TextWithNothing" materialized.columns.at 2 . at idx . should_equal "byo6kn5l3sz" - Test.specify "should be able to get concatenated text values" (pending = resolve_pending test_selection.text_concat) <| - grouped = table.aggregate [Group_By "Index", Concatenate "Code"] + group_builder.specify "should be able to get concatenated text values" (pending = resolve_pending test_selection.text_concat) <| + grouped = data.table.aggregate [Group_By "Index", Concatenate "Code"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 10 @@ -640,9 +658,11 @@ spec setup = materialized.columns.at 1 . name . should_equal "Concatenate Code" materialized.columns.at 1 . at idx . length . should_equal 783 - Test.group prefix+"Table.aggregate should be able to group on multiple fields not in left columns" <| - Test.specify "should be able to count" <| - grouped = table.aggregate [Group_By "Flag", Count, Group_By "Index"] + suite_builder.group prefix+"Table.aggregate should be able to group on multiple fields not in left columns" group_builder-> + data = Data.setup table_fn empty_table_fn + + group_builder.specify "should be able to count" <| + grouped = data.table.aggregate [Group_By "Flag", Count, Group_By "Index"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -654,8 +674,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Count" materialized.columns.at 1 . at idx . should_equal 127 - Test.specify "should be able to count missing values" <| - grouped = table.aggregate [Count_Nothing "Hexadecimal", Count_Not_Nothing "Hexadecimal", Group_By "Index", Count_Empty "TextWithNothing", Group_By "Flag", Count_Not_Empty "TextWithNothing"] + group_builder.specify "should be able to count missing values" <| + grouped = data.table.aggregate [Count_Nothing "Hexadecimal", Count_Not_Nothing "Hexadecimal", Group_By "Index", Count_Empty "TextWithNothing", Group_By "Flag", Count_Not_Empty "TextWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -673,8 +693,8 @@ spec setup = materialized.columns.at 5 . name . should_equal "Count Not Empty TextWithNothing" materialized.columns.at 5 . at idx . should_equal 115 - Test.specify "should be able to count distinct values" <| - grouped = table.aggregate [Group_By "Index", Count_Distinct "Code", Count_Distinct "Index", Count_Distinct "Flag", Group_By "Flag"] + group_builder.specify "should be able to count distinct values" <| + grouped = data.table.aggregate [Group_By "Index", Count_Distinct "Code", Count_Distinct "Index", Count_Distinct "Flag", Group_By "Flag"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -690,8 +710,8 @@ spec setup = materialized.columns.at 3 . name . should_equal "Count Distinct Flag" materialized.columns.at 3 . at idx . should_equal 1 - Test.specify "should be able to count distinct values over multiple columns" (pending = resolve_pending test_selection.multi_distinct) <| - grouped = table.aggregate [Group_By "Index", Count_Distinct ["Index", "Flag"], Group_By "Flag"] + group_builder.specify "should be able to count distinct values over multiple columns" (pending = resolve_pending test_selection.multi_distinct) <| + grouped = data.table.aggregate [Group_By "Index", Count_Distinct ["Index", "Flag"], Group_By "Flag"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -703,8 +723,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Count Distinct Index Flag" materialized.columns.at 1 . at idx . should_equal 1 - Test.specify "should be able to compute sum and average of values" <| - grouped = table.aggregate [Group_By "Index", Sum "Value", Sum "ValueWithNothing", Average "Value", Average "ValueWithNothing", Group_By "Flag"] + group_builder.specify "should be able to compute sum and average of values" <| + grouped = data.table.aggregate [Group_By "Index", Sum "Value", Sum "ValueWithNothing", Average "Value", Average "ValueWithNothing", Group_By "Flag"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -722,8 +742,8 @@ spec setup = materialized.columns.at 4 . name . should_equal "Average ValueWithNothing" materialized.columns.at 4 . at idx . should_equal 4.721858 epsilon=0.000001 - Test.specify "should be able to compute standard deviation of values" (pending = resolve_pending test_selection.std_dev) <| - grouped = table.aggregate [Group_By "Index", Group_By "Flag", Standard_Deviation "Value", Standard_Deviation "ValueWithNothing", (Standard_Deviation "Value" population=True), (Standard_Deviation "ValueWithNothing" population=True)] + group_builder.specify "should be able to compute standard deviation of values" (pending = resolve_pending test_selection.std_dev) <| + grouped = data.table.aggregate [Group_By "Index", Group_By "Flag", Standard_Deviation "Value", Standard_Deviation "ValueWithNothing", (Standard_Deviation "Value" population=True), (Standard_Deviation "ValueWithNothing" population=True)] materialized = materialize grouped grouped.row_count . should_equal 20 materialized.column_count . should_equal 6 @@ -740,8 +760,8 @@ spec setup = materialized.columns.at 5 . name . should_equal "Standard Deviation ValueWithNothing 1" materialized.columns.at 5 . at idx . should_equal 57.306492 epsilon=0.000001 - Test.specify "should be able to create median values" (pending = resolve_pending test_selection.advanced_stats) <| - grouped = table.aggregate [Median "Index", Median "Value", Median "ValueWithNothing", Mode "Index", Group_By "Index", Group_By "Flag", Percentile 0.25 "Value", Percentile 0.40 "ValueWithNothing"] + group_builder.specify "should be able to create median values" (pending = resolve_pending test_selection.advanced_stats) <| + grouped = data.table.aggregate [Median "Index", Median "Value", Median "ValueWithNothing", Mode "Index", Group_By "Index", Group_By "Flag", Percentile 0.25 "Value", Percentile 0.40 "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -763,8 +783,8 @@ spec setup = materialized.columns.at 7 . name . should_equal "40%-ile ValueWithNothing" materialized.columns.at 7 . at idx . should_equal -17.174000 epsilon=0.000001 - Test.specify "should be able to get first and last values" (pending = resolve_pending test_selection.first_last) <| - grouped = table.aggregate [Group_By "Flag", First "TextWithNothing" (order_by = [Sort_Column.Name "Value", Sort_Column.Name "Flag"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value"]), Group_By "Index"] + group_builder.specify "should be able to get first and last values" (pending = resolve_pending test_selection.first_last) <| + grouped = data.table.aggregate [Group_By "Flag", First "TextWithNothing" (order_by = [Sort_Column.Name "Value", Sort_Column.Name "Flag"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value"]), Group_By "Index"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -778,8 +798,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "Last ValueWithNothing" materialized.columns.at 2 . at idx . should_equal -89.78 epsilon=0.000001 - Test.specify "should be able to get first and last values with mixed ordering" (pending = resolve_pending test_selection.first_last) <| - grouped = table.aggregate [Group_By "Flag", First "TextWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending, Sort_Column.Name "Flag"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending]), Group_By "Index"] + group_builder.specify "should be able to get first and last values with mixed ordering" (pending = resolve_pending test_selection.first_last) <| + grouped = data.table.aggregate [Group_By "Flag", First "TextWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending, Sort_Column.Name "Flag"]), Last "ValueWithNothing" (order_by = [Sort_Column.Name "Value" Sort_Direction.Descending]), Group_By "Index"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -793,8 +813,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "Last ValueWithNothing" materialized.columns.at 2 . at idx . should_equal 54.48 epsilon=0.000001 - Test.specify "should be able to get first and last values with default row order" (pending = resolve_pending test_selection.first_last_row_order) <| - grouped = table.aggregate [Group_By "Flag", First "TextWithNothing", Last "Value", Group_By "Index"] + group_builder.specify "should be able to get first and last values with default row order" (pending = resolve_pending test_selection.first_last_row_order) <| + grouped = data.table.aggregate [Group_By "Flag", First "TextWithNothing", Last "Value", Group_By "Index"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -808,8 +828,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "Last Value" materialized.columns.at 2 . at idx . should_equal 56.15916 epsilon=0.000001 - Test.specify "should be able to get minimum and maximum values" <| - grouped = table.aggregate [Group_By "Index", Minimum "Value", Maximum "Value", Group_By "Flag", Minimum "ValueWithNothing", Maximum "ValueWithNothing"] + group_builder.specify "should be able to get minimum and maximum values" <| + grouped = data.table.aggregate [Group_By "Index", Minimum "Value", Maximum "Value", Group_By "Flag", Minimum "ValueWithNothing", Maximum "ValueWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -827,8 +847,8 @@ spec setup = materialized.columns.at 5 . name . should_equal "Maximum ValueWithNothing" materialized.columns.at 5 . at idx . should_equal 97.17 epsilon=0.000001 - Test.specify "should be able to get shortest and longest text values" (pending = resolve_pending test_selection.text_shortest_longest) <| - grouped = table.aggregate [Group_By "Index", Group_By "Flag", Shortest "TextWithNothing", Longest "TextWithNothing"] + group_builder.specify "should be able to get shortest and longest text values" (pending = resolve_pending test_selection.text_shortest_longest) <| + grouped = data.table.aggregate [Group_By "Index", Group_By "Flag", Shortest "TextWithNothing", Longest "TextWithNothing"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -842,8 +862,8 @@ spec setup = materialized.columns.at 3 . name . should_equal "Longest TextWithNothing" materialized.columns.at 3 . at idx . should_equal "byo6kn5l3sz" - Test.specify "should be able to get concatenated text values" (pending = resolve_pending test_selection.text_concat) <| - grouped = table.aggregate [Group_By "Index", Group_By "Flag", Concatenate "Code"] + group_builder.specify "should be able to get concatenated text values" (pending = resolve_pending test_selection.text_concat) <| + grouped = data.table.aggregate [Group_By "Index", Group_By "Flag", Concatenate "Code"] materialized = materialize grouped Problems.assume_no_problems materialized grouped.row_count . should_equal 20 @@ -855,8 +875,8 @@ spec setup = materialized.columns.at 2 . name . should_equal "Concatenate Code" materialized.columns.at 2 . at idx . length . should_equal 381 - Test.group prefix+"Table.aggregate Shortest" (pending = resolve_pending test_selection.text_shortest_longest) <| - Test.specify "should correctly handle empty strings versus missing (null) strings" <| + suite_builder.group prefix+"Table.aggregate Shortest" (pending = resolve_pending test_selection.text_shortest_longest) group_builder-> + group_builder.specify "should correctly handle empty strings versus missing (null) strings" <| table = table_builder [["A", ["abcd", "f", ""]], ["B", [Nothing, "f", "abc"]]] result = table.aggregate [Shortest "A", Shortest "B"] result.row_count . should_equal 1 @@ -868,8 +888,8 @@ spec setup = materialized.columns.at 1 . name . should_equal "Shortest B" materialized.columns.at 1 . to_vector . should_equal ["f"] - Test.group prefix+"Table.aggregate Concatenate" (pending = resolve_pending test_selection.text_concat) <| - Test.specify "should insert the separator, add prefix and suffix" <| + suite_builder.group prefix+"Table.aggregate Concatenate" (pending = resolve_pending test_selection.text_concat) group_builder-> + group_builder.specify "should insert the separator, add prefix and suffix" <| table = table_builder [["A", ["foo", "bar", "foo", "foo"]], ["B", ["a", "b", "c", "d"]]] result = table.aggregate [Group_By "A", (Concatenate "B" prefix="[[" suffix="]]" separator="; ")] result.row_count . should_equal 2 @@ -881,7 +901,7 @@ spec setup = materialized.columns.at 1 . name . should_equal "Concatenate B" materialized.columns.at 1 . to_vector . should_equal ["[[b]]", "[[a; c; d]]"] - Test.specify "should correctly escape separator and quote characters but only if necessary" <| + group_builder.specify "should correctly escape separator and quote characters but only if necessary" <| table = table_builder [["A", ["1,0", "b", "'c", "''", ","]]] result = table.aggregate [(Concatenate "A" prefix="[[" suffix="]]" separator="," quote_char="'")] result.row_count . should_equal 1 @@ -891,7 +911,7 @@ spec setup = materialized.columns.at 0 . name . should_equal "Concatenate A" materialized.columns.at 0 . to_vector . should_equal ["[['1,0',b,'''c','''''',',']]"] - Test.specify "should correctly handle missing values and empty values with quote character" <| + group_builder.specify "should correctly handle missing values and empty values with quote character" <| table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] result = table.aggregate [(Concatenate "A" prefix="[[" suffix="]]" separator="," quote_char="'")] result.row_count . should_equal 1 @@ -901,7 +921,7 @@ spec setup = materialized.columns.at 0 . name . should_equal "Concatenate A" materialized.columns.at 0 . to_vector . should_equal ["[['1,0',A,'','',B,,,C]]"] - Test.specify "will not be able to distinguish missing values from empty values without quote character" <| + group_builder.specify "will not be able to distinguish missing values from empty values without quote character" <| table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] result = table.aggregate [(Concatenate "A" prefix="[[" suffix="]]" separator=",")] result.row_count . should_equal 1 @@ -916,7 +936,7 @@ spec setup = materialized.columns.at 0 . name . should_equal "Concatenate A" materialized.columns.at 0 . to_vector . should_equal ["[[1,0,A,,,B,,,C]]"] - Test.specify "should work with empty separator" <| + group_builder.specify "should work with empty separator" <| table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] result = table.aggregate [(Concatenate "A")] result.row_count . should_equal 1 @@ -926,7 +946,7 @@ spec setup = materialized.columns.at 0 . name . should_equal "Concatenate A" materialized.columns.at 0 . to_vector . should_equal ["1,0ABC"] - Test.specify "should work with empty separator but non-empty quote" <| + group_builder.specify "should work with empty separator but non-empty quote" <| table = table_builder [["A", ["1'0", "A", "", "", "B", Nothing, Nothing, "C"]]] result = table.aggregate [(Concatenate "A" quote_char="'")] result.row_count . should_equal 1 @@ -936,8 +956,8 @@ spec setup = materialized.columns.at 0 . name . should_equal "Concatenate A" materialized.columns.at 0 . to_vector . should_equal ["'1''0'A''''BC"] - Test.group prefix+"Table.aggregate Count_Distinct" <| - Test.specify "should correctly count missing values" <| + suite_builder.group prefix+"Table.aggregate Count_Distinct" group_builder-> + group_builder.specify "should correctly count missing values" <| get_value t = columns = materialize t . columns columns.length . should_equal 1 frames_to_skip=1 @@ -988,7 +1008,7 @@ spec setup = m2.columns.first.to_vector . should_equal ["bar", "foo"] m2.columns.second.to_vector . should_equal [1, 2] - Test.specify "should correctly count all-null keys in multi-column mode" (pending = resolve_pending test_selection.multi_distinct) <| + group_builder.specify "should correctly count all-null keys in multi-column mode" (pending = resolve_pending test_selection.multi_distinct) <| table = table_builder [["A", ["foo", "foo", Nothing, Nothing, Nothing]], ["B", ["baz", Nothing, Nothing, Nothing, "baz"]], ["C", [1, 2, 3, Nothing, 5]]] r2 = table.aggregate [Count_Distinct ["A", "B"] (ignore_nothing=False)] @@ -1005,8 +1025,8 @@ spec setup = m1.columns.first.name . should_equal "Count Distinct A B" m1.columns.first.to_vector . should_equal [3] - Test.group prefix+"Table.aggregate Standard_Deviation" pending=(resolve_pending test_selection.std_dev) <| - Test.specify "should correctly handle single elements" <| + suite_builder.group prefix+"Table.aggregate Standard_Deviation" pending=(resolve_pending test_selection.std_dev) group_builder-> + group_builder.specify "should correctly handle single elements" <| r1 = table_builder [["X", [1]]] . aggregate [Standard_Deviation "X" (population=False), Standard_Deviation "X" (population=True)] r1.row_count.should_equal 1 m1 = materialize r1 @@ -1014,8 +1034,8 @@ spec setup = m1.columns.first.at 0 . should_equal Nothing m1.columns.second.at 0 . should_equal 0 - Test.group prefix+"Table.aggregate should correctly select result types" <| - Test.specify "widening to decimals on Average" <| + suite_builder.group prefix+"Table.aggregate should correctly select result types" group_builder-> + group_builder.specify "widening to decimals on Average" <| table = table_builder [["G", ["a", "a", "b", "b"]], ["X", [0, 1, 1, Nothing]]] r1 = table.aggregate [Average "X"] r1.row_count.should_equal 1 @@ -1030,7 +1050,7 @@ spec setup = m2.columns.first.to_vector . should_equal ["a", "b"] m2.columns.second.to_vector . should_equal [0.5, 1] - Test.specify "widening to decimals on Median" (pending = resolve_pending test_selection.advanced_stats) <| + group_builder.specify "widening to decimals on Median" (pending = resolve_pending test_selection.advanced_stats) <| table = table_builder [["X", [-1000, 0, 1, 100000, Nothing]]] r1 = table.aggregate [Median "X"] r1.row_count.should_equal 1 @@ -1038,7 +1058,7 @@ spec setup = m1.column_count . should_equal 1 m1.columns.first.to_vector . should_equal [0.5] - Test.specify "widening to decimals on Percentile" (pending = resolve_pending test_selection.advanced_stats) <| + group_builder.specify "widening to decimals on Percentile" (pending = resolve_pending test_selection.advanced_stats) <| table = table_builder [["X", [1, 2, 3, 4, 5, 6, Nothing]]] r1 = table.aggregate [Percentile 0.3 "X"] r1.row_count.should_equal 1 @@ -1046,7 +1066,7 @@ spec setup = m1.column_count . should_equal 1 m1.columns.first.to_vector . should_equal [2.5] - Test.specify "widening to decimals on Standard_Deviation" (pending = resolve_pending test_selection.std_dev) <| + group_builder.specify "widening to decimals on Standard_Deviation" (pending = resolve_pending test_selection.std_dev) <| table = table_builder [["X", [1, 2, 3, 4, Nothing]]] r1 = table.aggregate [Standard_Deviation "X" (population=True), Standard_Deviation "X" (population=False)] r1.row_count.should_equal 1 @@ -1064,10 +1084,10 @@ spec setup = loc = Meta.get_source_location 2 Test.fail "Expected a Nothing or NaN but got: "+value.to_text+" (at "+loc+")." - Test.group prefix+"Table.aggregate should correctly handle infinities" <| + suite_builder.group prefix+"Table.aggregate should correctly handle infinities" group_builder-> pos_inf = 1/0 neg_inf = -1/0 - Test.specify "on Average" <| + group_builder.specify "on Average" <| t1 = table_builder [["X", [Nothing, pos_inf, pos_inf, 0]]] r1 = t1.aggregate [Average "X"] r1.row_count.should_equal 1 @@ -1082,7 +1102,7 @@ spec setup = m2.column_count . should_equal 1 expect_null_or_nan <| m2.columns.first.at 0 - Test.specify "on Median" (pending = resolve_pending test_selection.advanced_stats) <| + group_builder.specify "on Median" (pending = resolve_pending test_selection.advanced_stats) <| t1 = table_builder [["X", [Nothing, neg_inf, pos_inf, 0, pos_inf, pos_inf]]] r1 = t1.aggregate [Median "X"] r1.row_count.should_equal 1 @@ -1111,7 +1131,7 @@ spec setup = m4.column_count . should_equal 1 m4.columns.first.at 0 . should_equal pos_inf - Test.specify "on Percentile" (pending = resolve_pending test_selection.advanced_stats) <| + group_builder.specify "on Percentile" (pending = resolve_pending test_selection.advanced_stats) <| t1 = table_builder [["X", [Nothing, neg_inf, 2, 3, 4, pos_inf]]] r1 = t1.aggregate [Percentile 0.3 "X"] r1.row_count.should_equal 1 @@ -1133,7 +1153,7 @@ spec setup = m3.column_count . should_equal 1 expect_null_or_nan <| m3.columns.first.at 0 - Test.specify "on Standard_Deviation" (pending = resolve_pending test_selection.std_dev) <| + group_builder.specify "on Standard_Deviation" (pending = resolve_pending test_selection.std_dev) <| t1 = table_builder [["X", [neg_inf, 1]]] r1 = t1.aggregate [Standard_Deviation "X" (population=True), Standard_Deviation "X" (population=False)] r1.row_count.should_equal 1 @@ -1142,9 +1162,9 @@ spec setup = expect_null_or_nan <| m1.columns.first.at 0 expect_null_or_nan <| m1.columns.second.at 0 - Test.group prefix+"Table.aggregate should correctly handle NaN" pending=(resolve_pending test_selection.nan) <| + suite_builder.group prefix+"Table.aggregate should correctly handle NaN" pending=(resolve_pending test_selection.nan) group_builder-> nan = 0.log 0 - Test.specify "on Average" <| + group_builder.specify "on Average" <| t1 = table_builder [["X", [Nothing, nan, 0, 1, 2]]] r1 = t1.aggregate [Average "X"] r1.row_count.should_equal 1 @@ -1152,7 +1172,7 @@ spec setup = m1.column_count . should_equal 1 Double.isNaN (m1.columns.first.at 0) . should_be_true - Test.specify "on Median" (pending = resolve_pending test_selection.advanced_stats) <| + group_builder.specify "on Median" (pending = resolve_pending test_selection.advanced_stats) <| t1 = table_builder [["X", [Nothing, nan, 0, 1, 2]]] r1 = t1.aggregate [Median "X"] r1.row_count.should_equal 1 @@ -1160,7 +1180,7 @@ spec setup = m1.column_count . should_equal 1 Double.isNaN (m1.columns.first.at 0) . should_be_true - Test.specify "on Percentile" (pending = resolve_pending test_selection.advanced_stats) <| + group_builder.specify "on Percentile" (pending = resolve_pending test_selection.advanced_stats) <| t1 = table_builder [["X", [Nothing, nan, 0, 1, 2, 4, 5]]] r1 = t1.aggregate [Percentile 0.3 "X"] r1.row_count.should_equal 1 @@ -1168,7 +1188,7 @@ spec setup = m1.column_count . should_equal 1 Double.isNaN (m1.columns.first.at 0) . should_be_true - Test.specify "on Mode" (pending = resolve_pending test_selection.advanced_stats) <| + group_builder.specify "on Mode" (pending = resolve_pending test_selection.advanced_stats) <| t1 = table_builder [["X", [Nothing, nan, nan, nan, nan, 4, 5]]] r1 = t1.aggregate [Mode "X"] r1.row_count.should_equal 1 @@ -1176,7 +1196,7 @@ spec setup = m1.column_count . should_equal 1 Double.isNaN (m1.columns.first.at 0) . should_be_true - Test.specify "on Standard_Deviation" (pending = resolve_pending test_selection.std_dev) <| + group_builder.specify "on Standard_Deviation" (pending = resolve_pending test_selection.std_dev) <| t1 = table_builder [["X", [Nothing, nan, 0, 1, 2]]] r1 = t1.aggregate [Standard_Deviation "X" (population=False), Standard_Deviation "X" (population=True)] r1.row_count.should_equal 1 @@ -1185,8 +1205,8 @@ spec setup = Double.isNaN (m1.columns.first.at 0) . should_be_true Double.isNaN (m1.columns.second.at 0) . should_be_true - Test.group prefix+"Table.aggregate Mode" (pending = resolve_pending test_selection.advanced_stats) <| - Test.specify "should ignore missing values" <| + suite_builder.group prefix+"Table.aggregate Mode" (pending = resolve_pending test_selection.advanced_stats) group_builder-> + group_builder.specify "should ignore missing values" <| t1 = table_builder [["X", [Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 2, 2, 1]]] r1 = t1.aggregate [Mode "X"] r1.row_count.should_equal 1 @@ -1194,8 +1214,8 @@ spec setup = m1.column_count . should_equal 1 m1.columns.first.at 0 . should_equal 2 - Test.group prefix+"Table.aggregate First and Last" <| - Test.specify "should not return the same value for groups with different values but equal ordering keys" (pending = resolve_pending test_selection.first_last) <| + suite_builder.group prefix+"Table.aggregate First and Last" group_builder-> + group_builder.specify "should not return the same value for groups with different values but equal ordering keys" (pending = resolve_pending test_selection.first_last) <| t1 = table_builder [["G", ["a", "a"]], ["X", [1, 2]]] order = [Sort_Column.Name "G"] r1 = t1.aggregate [First "X" (order_by=order), Last "X" (order_by=order)] @@ -1206,8 +1226,8 @@ spec setup = last = m1.columns.second.at 0 (first != last).should_be_true - Test.group prefix+"Table.aggregate" <| - Test.specify "should work even if no aggregations apart from groupings are specified" <| + suite_builder.group prefix+"Table.aggregate" group_builder-> + group_builder.specify "should work even if no aggregations apart from groupings are specified" <| table = table_builder [["A", [1, 1, 2, 1]], ["B", [3, 2, 2, 3]], ["C", [11, 12, 13, 14]]] grouped = table.aggregate [Group_By "B", Group_By "A"] grouped.row_count . should_equal 3 @@ -1220,7 +1240,7 @@ spec setup = materialized.columns.at 0 . to_vector . should_equal [2, 3, 2] if setup.test_selection.supports_unicode_normalization then - Test.specify "should correctly handle Unicode normalization within grouping" <| + group_builder.specify "should correctly handle Unicode normalization within grouping" <| table = table_builder [["A", ['s', 's\u0301', 'ś', 's\u0301']], ["B", [1, 2, 4, 8]]] grouped = table.aggregate [Group_By "A", Sum "B"] grouped.row_count . should_equal 2 @@ -1233,7 +1253,7 @@ spec setup = materialized.columns.at 1 . to_vector . should_equal [1, 14] if test_selection.date_support then - Test.specify "should allow grouping by dates" <| + group_builder.specify "should allow grouping by dates" <| dates = ["Date", [Date.new 1997, Date.new 2000 2 2, Date.new 2022 12 31, Date.new 2000 2 2, Date.new 1997]] times = ["Time", [Time_Of_Day.new, Time_Of_Day.new 0 0 0 500, Time_Of_Day.new 1 2 3, Time_Of_Day.new 0 0 0, Time_Of_Day.new 11 25 40]] datetimes = ["DateTime", [Date_Time.new 1999, Date_Time.new 2022 8 29 17 28 5, Date_Time.new 1999 1 1 0 0 0, Date_Time.new 1998, Date_Time.new 1998]] @@ -1260,7 +1280,7 @@ spec setup = m3.at "Sum Int" . to_vector . should_equal [24, 5, 2] if test_selection.first_last && test_selection.first_last_row_order.not then - Test.specify "should report a warning and ignore problematic columns if a feature is not supported" <| + group_builder.specify "should report a warning and ignore problematic columns if a feature is not supported" <| table = table_builder [["A", [1,2,Nothing,3]]] action = table.aggregate [Sum "A", First "A", Last "A"] on_problems=_ tester result = @@ -1272,7 +1292,7 @@ spec setup = problems = [Unsupported_Database_Operation.Error "`First` aggregation requires at least one `order_by` column.", Unsupported_Database_Operation.Error "`Last` aggregation requires at least one `order_by` column."] Problems.test_problem_handling action problems tester - Test.specify "will include unsupported feature problem in No_Output_Columns" <| + group_builder.specify "will include unsupported feature problem in No_Output_Columns" <| table = table_builder [["A", [1,2,Nothing,3]]] r1 = table.aggregate [First "A"] r1.should_fail_with No_Output_Columns @@ -1280,10 +1300,12 @@ spec setup = r1.to_display_text . should_contain "No columns in the result" r1.to_display_text . should_contain "`First`" - Test.group prefix+"Table.aggregate+Expressions" <| + suite_builder.group prefix+"Table.aggregate+Expressions" group_builder-> + data = Data.setup table_fn empty_table_fn + ## TODO we probably should check all kinds of aggregate columns to verify that all of them correctly support expressions. - Test.specify "should allow expressions in aggregates" <| + group_builder.specify "should allow expressions in aggregates" <| table = table_builder [["Index", [1, 1, 2, 2]], ["Value", [1, 2, 3, 4]]] t1 = table.aggregate [Group_By "Index", Sum "Value", Sum "[Value]*[Value]"] t1.column_count . should_equal 3 @@ -1294,32 +1316,32 @@ spec setup = # r1.at "Sum [Value]*[Value]" . to_vector . should_equal [5, 25] r1.at -1 . to_vector . should_equal [5, 25] - Test.specify "should warn when encountering invalid expressions, but try to perform the aggregations that are still valid" <| - action1 = table.aggregate [Group_By "Index", Sum "Value", Sum "[MISSING]*[MISSING]"] on_problems=_ + group_builder.specify "should warn when encountering invalid expressions, but try to perform the aggregations that are still valid" <| + action1 = data.table.aggregate [Group_By "Index", Sum "Value", Sum "[MISSING]*[MISSING]"] on_problems=_ tester1 = expect_column_names ["Index", "Sum Value"] problems1 = [Invalid_Aggregate_Column.Error "[MISSING]*[MISSING]" (No_Such_Column.Error "MISSING")] Problems.test_problem_handling action1 problems1 tester1 - t2 = table.aggregate [Group_By "Index", Sum "Value", Sum "[[["] on_problems=Problem_Behavior.Ignore + t2 = data.table.aggregate [Group_By "Index", Sum "Value", Sum "[[["] on_problems=Problem_Behavior.Ignore expect_column_names ["Index", "Sum Value"] t2 - err3 = table.aggregate [Group_By "Index", Sum "Value", Sum "[[["] on_problems=Problem_Behavior.Report_Error + err3 = data.table.aggregate [Group_By "Index", Sum "Value", Sum "[[["] on_problems=Problem_Behavior.Report_Error err3.should_fail_with Invalid_Aggregate_Column err3.catch.name . should_equal "[[[" err3.catch.expression_error . should_be_a Expression_Error.Syntax_Error - t4 = table.aggregate [Sum "[MISSING]*[MISSING]"] + t4 = data.table.aggregate [Sum "[MISSING]*[MISSING]"] t4 . should_fail_with Invalid_Aggregate_Column err4 = t4.catch err4.name.should_equal "[MISSING]*[MISSING]" err4.expression_error.should_equal (No_Such_Column.Error "MISSING") - Test.group prefix+"Table.aggregate should raise warnings when there are issues" <| + suite_builder.group prefix+"Table.aggregate should raise warnings when there are issues" group_builder-> table = col1 = ["Index", [1, 2, 3]] col2 = ["Value", [1, 2, 3]] table_builder [col1, col2] - Test.specify "should fail if there are no output columns, and promote any warnings to errors" <| + group_builder.specify "should fail if there are no output columns, and promote any warnings to errors" <| [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> Test.with_clue "Problem_Behavior="+pb.to_text+": " <| t1 = table.aggregate [] on_problems=pb t1.should_fail_with No_Output_Columns @@ -1332,7 +1354,7 @@ spec setup = t3.should_fail_with Missing_Input_Columns t3.catch.criteria.should_equal [42] - Test.specify "should raise a warning when can't find a column by name, but a hard error if the missing column is in a Group_By" <| + group_builder.specify "should raise a warning when can't find a column by name, but a hard error if the missing column is in a Group_By" <| err1 = table.aggregate [Group_By "Missing", Group_By "Index", Group_By "Other_Missing"] on_problems=Problem_Behavior.Ignore err1.should_fail_with Invalid_Aggregate_Column err1.catch.name . should_equal "Missing" @@ -1371,38 +1393,38 @@ spec setup = err6 = table.aggregate [Group_By "Index", Sum "Value", Sum 42] on_problems=Problem_Behavior.Ignore error_on_missing_columns=True err6.catch . should_equal (Missing_Input_Columns.Error [42]) - Test.specify "should raise a warning when a duplicate column name" <| + group_builder.specify "should raise a warning when a duplicate column name" <| action = table.aggregate [Group_By "Index", Group_By 0] on_problems=_ problems = [Duplicate_Output_Column_Names.Error ["Index"]] tester = expect_column_names ["Index", "Index 1"] Problems.test_problem_handling action problems tester - Test.specify "should raise a warning when a duplicate column name and rename default names first" <| + group_builder.specify "should raise a warning when a duplicate column name and rename default names first" <| action = table.aggregate [Group_By "Value", Group_By "Index" "Value"] on_problems=_ problems = [Duplicate_Output_Column_Names.Error ["Value"]] tester = expect_column_names ["Value 1", "Value"] Problems.test_problem_handling action problems tester - Test.specify "should raise a warning when duplicate column names" <| + group_builder.specify "should raise a warning when duplicate column names" <| action = table.aggregate [Sum "Value" new_name="AGG1", Count new_name="AGG1"] on_problems=_ problems = [Duplicate_Output_Column_Names.Error ["AGG1"]] tester = expect_column_names ["AGG1", "AGG1 1"] Problems.test_problem_handling action problems tester - Test.specify "should allow partial matches on Count_Distinct" <| + group_builder.specify "should allow partial matches on Count_Distinct" <| action = table.aggregate [Count_Distinct ["Missing", "Value"]] on_problems=_ problems = [Missing_Input_Columns.Error ["Missing"]] tester = expect_column_names ["Count Distinct Value"] Problems.test_problem_handling action problems tester - Test.specify "should ignore Count_Distinct if no columns matched" <| + group_builder.specify "should ignore Count_Distinct if no columns matched" <| action = table.aggregate [Count_Distinct [-100], Count] on_problems=_ problems = [Missing_Input_Columns.Error [-100]] tester = expect_column_names ["Count"] Problems.test_problem_handling action problems tester - Test.group prefix+"Table.aggregate should report warnings and errors based on types" <| - Test.specify "should warn if grouping on a floating point" <| + suite_builder.group prefix+"Table.aggregate should report warnings and errors based on types" group_builder-> + group_builder.specify "should warn if grouping on a floating point" <| t = table_builder [["X", [1.1, 2.2, 3.3, 2.2]]] action = t.aggregate [Group_By "X"] on_problems=_ problems = [Floating_Point_Equality.Error "X"] @@ -1411,19 +1433,19 @@ spec setup = case test_selection.advanced_stats of True -> - Test.specify "should warn if computing an aggregation relying on floating point equality" <| + group_builder.specify "should warn if computing an aggregation relying on floating point equality" <| t = table_builder [["X", [1.5, 2.0, 1.5, 1.0]]] action = t.aggregate [Mode "X"] on_problems=_ problems = [Floating_Point_Equality.Error "Mode X"] tester = expect_column_names ["Mode X"] Problems.test_problem_handling action problems tester False -> - Test.specify "should error if unsupported operations are selected" <| + group_builder.specify "should error if unsupported operations are selected" <| t1 = table_builder [["X", [1.5, 2.0, 1.5, 1.0]]] t2 = t1.aggregate [Mode "X"] on_problems=Problem_Behavior.Ignore t2.should_fail_with No_Output_Columns - Test.specify "should check types" <| + group_builder.specify "should check types" <| table = table_builder [["Text", ["a", "b"]], ["Int", [1, 2]], ["Float", [1.1, 2.2]]] [Problem_Behavior.Report_Error, Problem_Behavior.Report_Warning, Problem_Behavior.Ignore].each pb-> Test.with_clue "Problem_Behavior="+pb.to_text+" " <| non_numbers = [Average "Text", Standard_Deviation "Text", Median "Text", Sum "Text"] @@ -1438,7 +1460,7 @@ spec setup = err.should_fail_with Invalid_Value_Type err.catch.related_column.should_equal "Int" - Test.specify "should return predictable types" <| + group_builder.specify "should return predictable types" <| table = table_builder [["Text", ["a", "b"]], ["Int", [1, 2]], ["Float", [1.1, 2.2]]] t1 = table.aggregate [Group_By "Text", Group_By "Int", Group_By "Float"] @@ -1454,7 +1476,7 @@ spec setup = t2.at "Average Int" . value_type . is_numeric . should_be_true t2.at "Concatenate Text" . value_type . is_text . should_be_true - Test.group prefix+"Table.aggregate should raise warnings when there are issues computing aggregation" pending=(resolve_pending test_selection.aggregation_problems) <| + suite_builder.group prefix+"Table.aggregate should raise warnings when there are issues computing aggregation" pending=(resolve_pending test_selection.aggregation_problems) group_builder-> table = col1 = ["Index", [1, 2, 3]] col2 = ["Value", [1, 2, 3.1]] @@ -1462,7 +1484,7 @@ spec setup = col4 = ["Mixed", ["A", 1, "C"]] table_builder [col1, col2, col3, col4] - Test.specify "should not fail if trying concatenate unquoted delimiters with no separator" <| + group_builder.specify "should not fail if trying concatenate unquoted delimiters with no separator" <| column = Concatenate "Text" separator="" t = table_builder [["Text", ["A", "BC", "def"]]] result = t.aggregate [column] on_problems=Report_Error @@ -1470,7 +1492,7 @@ spec setup = result.column_names . should_equal ["Concatenate Text"] result.at "Concatenate Text" . to_vector . should_equal ["ABCdef"] - Test.specify "should warn if can't compare value for Min or Max" <| + group_builder.specify "should warn if can't compare value for Min or Max" <| [Problem_Behavior.Report_Error, Problem_Behavior.Report_Warning, Problem_Behavior.Ignore].each pb-> Test.with_clue "Problem_Behavior="+pb.to_text+" " <| err = table.aggregate [Maximum "Mixed"] on_problems=pb err.should_fail_with Invalid_Aggregation @@ -1478,15 +1500,15 @@ spec setup = err.catch.message.should_start_with "Cannot compare values" err.catch.rows.should_equal [1] - Test.specify "should warn if trying concatenate unquoted delimiters" <| + group_builder.specify "should warn if trying concatenate unquoted delimiters" <| column = Concatenate "Text" separator="," action = table.aggregate [column] on_problems=_ problems = [Unquoted_Delimiter.Error "Concatenate Text" [1]] tester = expect_column_names ["Concatenate Text"] Problems.test_problem_handling action problems tester - Test.group prefix+"Table.aggregate should merge warnings when issues computing aggregation" pending=(resolve_pending test_selection.aggregation_problems) <| - Test.specify "should merge Invalid Aggregation warnings" <| + suite_builder.group prefix+"Table.aggregate should merge warnings when issues computing aggregation" pending=(resolve_pending test_selection.aggregation_problems) group_builder-> + group_builder.specify "should merge Invalid Aggregation warnings" <| table = table_builder [["X", (0.up_to 16).map (_-> ",")]] new_table = table.aggregate [Concatenate "X" separator=","] problems = Problems.get_attached_warnings new_table @@ -1495,7 +1517,7 @@ spec setup = warning.column . should_equal "Concatenate X" warning.rows . length . should_equal 16 - Test.specify "should merge Floating Point Grouping warnings" <| + group_builder.specify "should merge Floating Point Grouping warnings" <| table = col1 = ["Key", ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O"]] col2 = ["Value", [1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5]] @@ -1508,7 +1530,7 @@ spec setup = problems.at 0 . location . should_equal "Float" if is_database then - Test.group prefix+"Table.aggregate should report unsupported operations but not block other aggregations in warning mode" <| + suite_builder.group prefix+"Table.aggregate should report unsupported operations but not block other aggregations in warning mode" group_builder-> expect_sum_and_unsupported_errors error_count result = result.column_count . should_equal 1 result.row_count . should_equal 1 @@ -1519,44 +1541,44 @@ spec setup = warning.should_be_a Unsupported_Database_Operation.Error if test_selection.first_last_row_order.not then - Test.specify "with First and Last in row order" <| + group_builder.specify "with First and Last in row order" <| table = table_builder [["X", [1,2,3]]] expect_sum_and_unsupported_errors 2 <| table.aggregate [Sum "X", First "X", Last "X"] if test_selection.first_last.not then - Test.specify "with First and Last with ordering" <| + group_builder.specify "with First and Last with ordering" <| table = table_builder [["A", [3,2,1]], ["X", [1,2,3]]] order = [Sort_Column.Name "A"] expect_sum_and_unsupported_errors 2 <| table.aggregate [Sum "X", First "X" (order_by=order), Last "X" (order_by=order)] if test_selection.advanced_stats.not then - Test.specify "with Median, Mode and Percentile" <| + group_builder.specify "with Median, Mode and Percentile" <| table = table_builder [["X", [1,2,3]]] expect_sum_and_unsupported_errors 3 <| table.aggregate [Sum "X", Median "X", Mode "X", Percentile 0.3 "X"] if test_selection.std_dev.not then - Test.specify "with Standard_Deviation" <| + group_builder.specify "with Standard_Deviation" <| table = table_builder [["X", [1,2,3]]] expect_sum_and_unsupported_errors 1 <| table.aggregate [Sum "X", Standard_Deviation "X"] if test_selection.text_shortest_longest.not then - Test.specify "with Shortest and Longest" <| + group_builder.specify "with Shortest and Longest" <| table = table_builder [["X", [1,2,3]], ["Y", ["a", "bb", "ccc"]]] expect_sum_and_unsupported_errors 2 <| table.aggregate [Sum "X", Shortest "Y", Longest "Y"] if test_selection.text_concat.not then - Test.specify "with Concatenate" <| + group_builder.specify "with Concatenate" <| table = table_builder [["X", [1,2,3]], ["Y", ["a", "bb", "ccc"]]] expect_sum_and_unsupported_errors 1 <| table.aggregate [Sum "X", Concatenate "Y"] if test_selection.multi_distinct.not then - Test.specify "with Count_Distinct on multiple fields" <| + group_builder.specify "with Count_Distinct on multiple fields" <| table = table_builder [["X", [1,2,3]], ["Y", ["a", "bb", "ccc"]]] expect_sum_and_unsupported_errors 1 <| table.aggregate [Sum "X", Count_Distinct ["X", "Y"]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso index abb76b116340..8909f635a37b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso @@ -6,20 +6,19 @@ from Standard.Table.Errors import Clashing_Column_Name, Duplicate_Output_Column_ from Standard.Database.Errors import Unsupported_Database_Operation -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Common_Table_Operations.Util import run_default_backend from project.Common_Table_Operations.Core_Spec import weird_names -main = run_default_backend spec -spec setup = +add_specs suite_builder setup = table_builder = setup.table_builder materialize = setup.materialize is_case_sensitive = setup.test_selection.supports_case_sensitive_columns - Test.group setup.prefix+"Column Naming edge cases" <| - Test.specify "case insensitive name collisions - set" <| + suite_builder.group setup.prefix+"Column Naming edge cases" group_builder-> + group_builder.specify "case insensitive name collisions - set" <| t1 = table_builder [["X", [1]]] Problems.assume_no_problems (t1.at "X" . rename "x") t2 = t1.set "[X] + 100" "x" @@ -51,7 +50,7 @@ spec setup = t6.at "Right X" . to_vector . should_equal [1, 1, 1, 1] t6.at "Right x" . to_vector . should_equal [101, 101, 101, 101] - Test.specify "case insensitive name collisions - rename" <| + group_builder.specify "case insensitive name collisions - rename" <| t1 = table_builder [["X", [1]], ["Y", [2]]] t2 = t1.rename_columns [["X", "A"], ["Y", "a"]] case is_case_sensitive of @@ -71,7 +70,7 @@ spec setup = t3.column_names . should_equal ["X 1", "x"] Problems.expect_only_warning Duplicate_Output_Column_Names t3 - Test.specify "case insensitive name collisions - aggregate" <| + group_builder.specify "case insensitive name collisions - aggregate" <| t1 = table_builder [["X", [2, 1, 3, 2]]] t2 = t1.aggregate [Aggregate_Column.Maximum "X" "A", Aggregate_Column.Minimum "X" "a"] @@ -91,7 +90,7 @@ spec setup = t3.at 0 . to_vector . should_equal [3] t3.at 1 . to_vector . should_equal [1] - Test.specify "case insensitive name collisions - joins" <| + group_builder.specify "case insensitive name collisions - joins" <| t1 = table_builder [["X", [1, 2]], ["a", [3, 4]]] t2 = table_builder [["X", [2, 1]], ["A", [5, 6]]] @@ -118,7 +117,7 @@ spec setup = t5 = t1.join t2 on="X" join_kind=Join_Kind.Left_Exclusive t5.column_names . should_equal ["X", "a"] - Test.specify "case insensitive name collisions - cross_tab" <| + group_builder.specify "case insensitive name collisions - cross_tab" <| t0 = table_builder [["X", ["a", "A", "b"]], ["Y", [4, 5, 6]]] t1 = t0.cross_tab group_by=[] name_column="X" values=[Aggregate_Column.First "Y"] . sort_columns case setup.is_database of @@ -136,7 +135,7 @@ spec setup = # TODO possibly ensure a more detailed error message is included here so that the user knows the column names come from cross_tab t1.should_fail_with Clashing_Column_Name - Test.specify "case insensitive name collisions - transpose" <| + group_builder.specify "case insensitive name collisions - transpose" <| t0 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t1 = t0.transpose attribute_column_name="a" value_column_name="A" case setup.is_database of @@ -152,7 +151,7 @@ spec setup = t1.column_names . should_equal ["a", "A 1"] Problems.expect_only_warning Duplicate_Output_Column_Names t1 - Test.specify "unicode-normalized-equality vs selecting columns" <| + group_builder.specify "unicode-normalized-equality vs selecting columns" <| ## In Enso column 'ś' and 's\u0301' are the same entity. But in Databases, quite not necessarily. t1 = table_builder [['ś', [1, 2]], ['X', ['a', 'b']]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso index b981ab02e4cc..06f632df1a4e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso @@ -13,16 +13,14 @@ from Standard.Table.Errors import all from Standard.Database.Errors import all -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + import enso_dev.Tests.Data.Round_Spec from project.Common_Table_Operations.Util import run_default_backend -main = run_default_backend spec - -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend." @@ -31,9 +29,10 @@ spec setup = table = table_builder [["x", [n]]] result = table.at "x" |> op result.to_vector.at 0 + do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers) - Round_Spec.spec prefix do_round + Round_Spec.add_specs suite_builder prefix do_round ## Runs the provided callback with a few combinations of columns, where some of them are made Mixed (but still contain only the original values). @@ -62,8 +61,8 @@ spec setup = aligned_table = mixed_table.drop 1 callback_with_clue aligned_table - Test.group prefix+"Boolean Column Operations" <| - Test.specify "iif" <| + suite_builder.group prefix+"Boolean Column Operations" group_builder-> + group_builder.specify "iif" <| t = table_builder [["X", [True, False, Nothing, True]]] c1 = t.at "X" . iif 22 33 c1.to_vector . should_equal [22, 33, Nothing, 22] @@ -91,7 +90,7 @@ spec setup = t.at "X" . iif 22.0 False . should_fail_with No_Common_Type t.at "X" . iif 22 "0" . should_fail_with No_Common_Type - Test.specify "iif on Columns" <| + group_builder.specify "iif on Columns" <| with_mixed_columns_if_supported [["X", [True, False, Nothing, False]], ["Y", [1, 2, 3, 4]], ["Z", [1.5, 2.0, 3.5, 4.0]]] t1-> c1 = t1.at "X" . iif (t1.at "Y") (t1.at "Z") c1.to_vector . should_equal [1, 2.0, Nothing, 4.0] @@ -124,7 +123,7 @@ spec setup = c2.to_vector . should_equal [10, ""] False -> c2.should_fail_with No_Common_Type - Test.specify "iif should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <| + group_builder.specify "iif should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <| t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]] t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=3 variable_length=False) @@ -150,7 +149,7 @@ spec setup = Test.with_clue "g.value_type="+g.value_type.to_display_text+": " <| g.value_type.variable_length.should_be_true - Test.specify "should allow to compute &&, || and not" <| + group_builder.specify "should allow to compute &&, || and not" <| with_mixed_columns_if_supported [["X", [True, False, True]], ["Y", [True, False, False]]] t-> x = t.at "X" y = t.at "Y" @@ -162,13 +161,13 @@ spec setup = (x && y).to_vector . should_equal [True, False, False] (x || y.not).to_vector . should_equal [True, True, True] - Test.specify "should handle nulls correctly in not" <| + group_builder.specify "should handle nulls correctly in not" <| t = table_builder [["A", [True, False, Nothing]]] a = t.at "A" a_not = a.not a_not.to_vector . should_equal [False, True, Nothing] - Test.specify "should handle nulls correctly in &&" <| + group_builder.specify "should handle nulls correctly in &&" <| t = table_builder [["A", [True, True, True, False, False, False, Nothing, Nothing, Nothing]], ["B", [True, False, Nothing, True, False, Nothing, True, False, Nothing]]] a = t.at "A" (a && True).to_vector . should_equal [True, True, True, False, False, False, Nothing, Nothing, Nothing] @@ -184,7 +183,7 @@ spec setup = (a && b).to_vector . should_equal [True, False, Nothing, False, False, False, Nothing, False, Nothing] (a_not && b).to_vector . should_equal [False, False, False, True, False, Nothing, Nothing, False, Nothing] - Test.specify "should handle nulls correctly in ||" <| + group_builder.specify "should handle nulls correctly in ||" <| t = table_builder [["A", [True, True, True, False, False, False, Nothing, Nothing, Nothing]], ["B", [True, False, Nothing, True, False, Nothing, True, False, Nothing]]] a = t.at "A" (a || True).to_vector . should_equal [True, True, True, True, True, True, True, True, True] @@ -200,7 +199,7 @@ spec setup = (a || b).to_vector . should_equal [True, True, True, True, False, Nothing, True, Nothing, Nothing] (a_not || b).to_vector . should_equal [True, False, Nothing, True, True, True, True, Nothing, Nothing] - Test.specify "should check types" <| + group_builder.specify "should check types" <| t = table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [True, False, Nothing]]] ((t.at "X") && (t.at "Z")) . should_fail_with Invalid_Value_Type @@ -214,8 +213,8 @@ spec setup = ((t.at "X") . not) . should_fail_with Invalid_Value_Type ((t.at "Y") . iif 10 20) . should_fail_with Invalid_Value_Type - Test.group prefix+"Column Operations - Equality & Null Handling" <| - Test.specify "should provide basic == and != comparisons" pending="TODO figure out proper null handling" <| + suite_builder.group prefix+"Column Operations - Equality & Null Handling" group_builder-> + group_builder.specify "should provide basic == and != comparisons" pending="TODO figure out proper null handling" <| with_mixed_columns_if_supported [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]]] t2-> x = t2.at "x" y = t2.at "y" @@ -224,7 +223,7 @@ spec setup = (x == 4).to_vector . should_equal [False, True, False, Nothing] (x == Nothing).to_vector . should_equal [Nothing, Nothing, Nothing, Nothing] - Test.specify "should allow to check which values are null" + group_builder.specify "should allow to check which values are null" <| with_mixed_columns_if_supported [["x", [1, 4, 5, Nothing]]] t-> x = t.at "x" x.is_nothing.to_vector . should_equal [False, False, False, True] @@ -232,7 +231,7 @@ spec setup = x.is_present.to_vector . should_equal [True, True, True, False] (x + Nothing).is_present.to_vector . should_equal [False, False, False, False] - Test.specify "Column equality should handle nulls correctly" pending="TODO" <| + group_builder.specify "Column equality should handle nulls correctly" pending="TODO" <| a = [2, 3, Nothing, Nothing] b = [2, 4, Nothing, 5] r = [True, False, True, False] @@ -243,7 +242,7 @@ spec setup = c.to_vector . should_equal r c.value_type.should_equal Value_Type.Boolean - Test.specify "equals_ignore_case for ASCII strings" <| + group_builder.specify "equals_ignore_case for ASCII strings" <| x = ["a", "B", "c", "DEF"] y = ["aa", "b", "c", "dEf"] r = [False, True, True, True] @@ -256,7 +255,7 @@ spec setup = c.value_type.should_equal Value_Type.Boolean (t.at "X") . equals_ignore_case "Def" . to_vector . should_equal [False, False, False, True] - Test.specify "equals_ignore_case should check types" <| + group_builder.specify "equals_ignore_case should check types" <| t = table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] r1 = (t.at "X") . equals_ignore_case (t.at "Y") . to_vector @@ -268,7 +267,7 @@ spec setup = r3 = (t.at "Y") . equals_ignore_case 42 . to_vector r3.should_fail_with Invalid_Value_Type - Test.specify "Text Column equality (including case-insensitive) should handle nulls correctly" pending="TODO" <| + group_builder.specify "Text Column equality (including case-insensitive) should handle nulls correctly" pending="TODO" <| a = ["Z", "a", "b", Nothing, Nothing] b = ["Z", "A", "C", Nothing, "d"] r_sensitive = [True, False, False, True, False] @@ -281,7 +280,7 @@ spec setup = ((t.at "A") == (t.at "B")) . to_vector . should_equal r_sensitive ((t.at "A").equals_ignore_case (t.at "B")) . to_vector . should_equal r_insensitive - Test.specify "should report a warning if checking equality on floating point columns" <| + group_builder.specify "should report a warning if checking equality on floating point columns" <| t = table_builder [["X", [1.0, 2.1, 3.2]], ["Y", [1.0, 2.0, 3.2]]] r1 = (t.at "X") == (t.at "Y") @@ -292,42 +291,42 @@ spec setup = r2.to_vector . should_equal [False, True, False] Problems.expect_warning Floating_Point_Equality r2 - Test.group prefix+"Column.fill_nothing/empty" <| - Test.specify "should allow to fill_nothing from a value" <| + suite_builder.group prefix+"Column.fill_nothing/empty" group_builder-> + group_builder.specify "should allow to fill_nothing from a value" <| col = table_builder [["col", [0, Nothing, 4, 5, Nothing, Nothing]]] . at "col" default = 300 r = col.fill_nothing default r.to_vector . should_equal [0, 300, 4, 5, 300, 300] r.name . should_equal "col" - Test.specify "should allow to fill_nothing from another column" <| + group_builder.specify "should allow to fill_nothing from another column" <| t = table_builder [["col", [0, Nothing, 4, 5, Nothing, Nothing]], ["def", [1, 2, 10, 20, Nothing, 30]]] r = t.at "col" . fill_nothing (t.at "def") r.to_vector . should_equal [0, 2, 4, 5, Nothing, 30] r.name . should_equal "col" - Test.specify "should allow to fill_empty from a value" <| + group_builder.specify "should allow to fill_empty from a value" <| col = table_builder [["col", ["0", Nothing, "4", "5", Nothing, Nothing]]] . at "col" default = "default" r = col.fill_empty default r.to_vector . should_equal ["0", "default", "4", "5", "default", "default"] r.name . should_equal "col" - Test.specify "should allow to fill_empty from another column" <| + group_builder.specify "should allow to fill_empty from another column" <| t = table_builder [["col", ["0", Nothing, "4", "5", Nothing, Nothing]], ["def", ["1", "2", "10", "20", Nothing, "30"]]] r = t.at "col" . fill_nothing (t.at "def") r.to_vector . should_equal ["0", "2", "4", "5", Nothing, "30"] r.name . should_equal "col" - Test.group prefix+"Table.fill_nothing/empty" <| - Test.specify "should allow to fill_nothing from a value" <| + suite_builder.group prefix+"Table.fill_nothing/empty" group_builder-> + group_builder.specify "should allow to fill_nothing from a value" <| t = table_builder [["col0", [0, Nothing, 4, 5, Nothing, Nothing]], ["col1", [Nothing, 200, Nothing, 400, 500, Nothing]]] default = 1000 actual = t.fill_nothing ["col0", "col1"] default actual.at "col0" . to_vector . should_equal [0, 1000, 4, 5, 1000, 1000] actual.at "col1" . to_vector . should_equal [1000, 200, 1000, 400, 500, 1000] - Test.specify "should allow to fill_nothing from larger string and expands type to fit" <| + group_builder.specify "should allow to fill_nothing from larger string and expands type to fit" <| t = table_builder [["col0", ["0", Nothing, "4", "5", Nothing, Nothing]]] . cast "col0" (Value_Type.Char size=1 variable_length=False) actual = t.fill_nothing ["col0"] "ABCDE" actual.at "col0" . to_vector . should_equal ["0", "ABCDE", "4", "5", "ABCDE", "ABCDE"] @@ -335,13 +334,13 @@ spec setup = True -> actual.at "col0" . value_type . should_equal (Value_Type.Char size=5 variable_length=True) False -> actual.at "col0" . value_type . should_equal (Value_Type.Char variable_length=True) - Test.specify "should allow to fill_nothing from an empty string" <| + group_builder.specify "should allow to fill_nothing from an empty string" <| t = table_builder [["col0", ["0", Nothing, "4", "5", Nothing, Nothing]], ["col1", [Nothing, "200", Nothing, "400", "500", Nothing]]] actual = t.fill_nothing ["col0", "col1"] "" actual.at "col0" . to_vector . should_equal ["0", "", "4", "5", "", ""] actual.at "col1" . to_vector . should_equal ["", "200", "", "400", "500", ""] - Test.specify "should allow to fill_nothing a fixed width from an empty string" <| + group_builder.specify "should allow to fill_nothing a fixed width from an empty string" <| t = table_builder [["col0", [Nothing, "200", Nothing, "400", "500", Nothing]]] . cast "col0" (Value_Type.Char size=3 variable_length=False) actual = t.fill_nothing ["col0"] "" actual.at "col0" . to_vector . should_equal ["", "200", "", "400", "500", ""] @@ -349,7 +348,7 @@ spec setup = True -> actual.at "col0" . value_type . should_equal (Value_Type.Char size=3 variable_length=True) False -> actual.at "col0" . value_type . should_equal (Value_Type.Char variable_length=True) - Test.specify "should allow to fill_nothing a fixed width of width 1 correctly expanding ouptut types" <| + group_builder.specify "should allow to fill_nothing a fixed width of width 1 correctly expanding ouptut types" <| t = table_builder [["col0", ["a", Nothing, " "]]] . cast "col0" (Value_Type.Char size=1 variable_length=False) fillBlank = t.fill_nothing ["col0"] "" fillOneSpace = t.fill_nothing ["col0"] " " @@ -372,7 +371,7 @@ spec setup = True -> fillTwoSpaces.at "col0" . to_vector . should_equal ["a", "", ""] False -> fillTwoSpaces.at "col0" . to_vector . should_equal ["a", " ", " "] - Test.specify "should allow to fill_nothing a fixed width with a string of correct length without changing the type" <| + group_builder.specify "should allow to fill_nothing a fixed width with a string of correct length without changing the type" <| t = table_builder [["col0", [Nothing, "200", Nothing, "400", "500", Nothing]]] . cast "col0" (Value_Type.Char size=3 variable_length=False) actual = t.fill_nothing ["col0"] " " actual.at "col0" . to_vector . should_equal [" ", "200", " ", "400", "500", " "] @@ -380,26 +379,26 @@ spec setup = True -> actual.at "col0" . value_type . should_equal (Value_Type.Char size=3 variable_length=False) False -> actual.at "col0" . value_type . should_equal (Value_Type.Char variable_length=True) - Test.specify "should allow to fill_nothing from other columns" <| + group_builder.specify "should allow to fill_nothing from other columns" <| t = table_builder [["col0", [0, Nothing, 4, 5, Nothing, Nothing]], ["col1", [Nothing, 200, Nothing, 400, 500, Nothing]], ["def", [1, 2, 10, 20, Nothing, 30]]] actual = t.fill_nothing ["col0", "col1"] (t.at "def") actual.at "col0" . to_vector . should_equal [0, 2, 4, 5, Nothing, 30] actual.at "col1" . to_vector . should_equal [1, 200, 10, 400, 500, 30] - Test.specify "should allow to fill_empty from a value" <| + group_builder.specify "should allow to fill_empty from a value" <| t = table_builder [["col0", ["0", Nothing, "4", "5", Nothing, Nothing]], ["col1", [Nothing, "200", Nothing, "400", "500", Nothing]]] default = "1000" actual = t.fill_empty ["col0", "col1"] default actual.at "col0" . to_vector . should_equal ["0", "1000", "4", "5", "1000", "1000"] actual.at "col1" . to_vector . should_equal ["1000", "200", "1000", "400", "500", "1000"] - Test.specify "should allow to fill_empty from other columns" <| + group_builder.specify "should allow to fill_empty from other columns" <| t = table_builder [["col0", ["0", Nothing, "4", "5", Nothing, Nothing]], ["col1", [Nothing, "200", Nothing, "400", "500", Nothing]], ["def", ["1", "2", "10", "20", Nothing, "30"]]] actual = t.fill_empty ["col0", "col1"] (t.at "def") actual.at "col0" . to_vector . should_equal ["0", "2", "4", "5", Nothing, "30"] actual.at "col1" . to_vector . should_equal ["1", "200", "10", "400", "500", "30"] - Test.specify "fill_nothing should leave other columns alone" <| + group_builder.specify "fill_nothing should leave other columns alone" <| t = table_builder [["col0", [0, Nothing, 4, 5, Nothing, Nothing]], ["col_between", [3, 4, 5, 6, 7, 8]], ["col1", [Nothing, 200, Nothing, 400, 500, Nothing]], ["def", [1, 2, 10, 20, Nothing, 30]]] default = 1000 actual = t.fill_nothing ["col0", "col1"] default @@ -408,7 +407,7 @@ spec setup = actual.at "col1" . to_vector . should_equal [1000, 200, 1000, 400, 500, 1000] actual.column_names . should_equal ["col0", "col_between", "col1", "def"] - Test.specify "fill_nothing should work with integer column selectors" <| + group_builder.specify "fill_nothing should work with integer column selectors" <| t = table_builder [["col0", [0, Nothing, 4, 5, Nothing, Nothing]], ["col_between", [3, 4, 5, 6, 7, 8]], ["col1", [Nothing, 200, Nothing, 400, 500, Nothing]], ["def", [1, 2, 10, 20, Nothing, 30]]] default = 1000 actual = t.fill_nothing [0, 2] default @@ -417,14 +416,14 @@ spec setup = actual.at "col1" . to_vector . should_equal [1000, 200, 1000, 400, 500, 1000] actual.column_names . should_equal ["col0", "col_between", "col1", "def"] - Test.group prefix+"Table.text_replace" <| - Test.specify "should allow to replace values in a table" <| + suite_builder.group prefix+"Table.text_replace" group_builder-> + group_builder.specify "should allow to replace values in a table" <| with_mixed_columns_if_supported [["col0", ["abc", "def", "ghi"]], ["col1", ["nabc", "ndef", "asdf"]]] t-> actual = t.text_replace ["col0", "col1"] "ab" "xy" actual.at "col0" . to_vector . should_equal ["xyc", "def", "ghi"] actual.at "col1" . to_vector . should_equal ["nxyc", "ndef", "asdf"] - Test.specify "should allow to replace values in a table with a regex" <| + group_builder.specify "should allow to replace values in a table with a regex" <| t = table_builder [["col0", ["abc", "def", "ghi"]], ["col1", ["nabc", "ndef", "asdf"]]] actual = t.text_replace ["col0", "col1"] "[bdi]".to_regex "xy" case actual.is_error && setup.is_database of @@ -434,7 +433,7 @@ spec setup = actual.at "col0" . to_vector . should_equal ["axyc", "xyef", "ghxy"] actual.at "col1" . to_vector . should_equal ["naxyc", "nxyef", "asxyf"] - Test.specify "should allow to replace values in a table with a column" <| + group_builder.specify "should allow to replace values in a table with a column" <| t = table_builder [["col0", ["abc", "def", "ghi"]], ["col1", ["nabc", "ndef", "asdf"]], ["col2", ["xy", "yx", "zz"]]] actual = t.text_replace ["col0", "col1"] "[bdi]".to_regex (t.at "col2") case actual.is_error && setup.is_database of @@ -445,7 +444,7 @@ spec setup = actual.at "col1" . to_vector . should_equal ["naxyc", "nyxef", "aszzf"] actual.at "col2" . to_vector . should_equal ["xy", "yx", "zz"] - Test.specify "should allow to use Column_Ref in replace" <| + group_builder.specify "should allow to use Column_Ref in replace" <| t = table_builder [["txt", ["abc", "def", "ghi"]], ["term", ["b", "d", "i"]], ["new", ["X", "Y", "Z"]]] t1 = t.text_replace "txt" (Column_Ref.Name "term") (Column_Ref.Name "new") case t1.is_error && setup.is_database of @@ -454,8 +453,8 @@ spec setup = False -> t1.at "txt" . to_vector . should_equal ["aXc", "Yef", "ghZ"] - Test.group prefix+"Column Comparisons" <| - Test.specify "should allow to compare numbers" <| + suite_builder.group prefix+"Column Comparisons" group_builder-> + group_builder.specify "should allow to compare numbers" <| with_mixed_columns_if_supported [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]]] t2-> x = t2.at "x" y = t2.at "y" @@ -476,7 +475,7 @@ spec setup = op y 23 . to_vector . should_succeed op x 1.5 . to_vector . should_succeed - Test.specify "should allow to compare texts" <| + group_builder.specify "should allow to compare texts" <| t0 = table_builder [["X", ["a", "b", "c"]], ["Y", ["a", "b", "d"]]] t = t0.cast "X" (Value_Type.Char size=1 variable_length=False) @@ -485,7 +484,7 @@ spec setup = op (t.at "X") (t.at "Y") . to_vector . should_succeed op (t.at "X") "abc" . to_vector . should_succeed - Test.specify "should allow to compare booleans" <| + group_builder.specify "should allow to compare booleans" <| t = table_builder [["X", [True, False, True]], ["Y", [False, True, True]]] ((t.at "X") < (t.at "Y")).to_vector . should_equal [False, True, False] @@ -498,7 +497,7 @@ spec setup = ((t.at "X") <= True).to_vector . should_equal [True, True, True] ((t.at "X") > True).to_vector . should_equal [False, False, False] - Test.specify "should report error if incomparable types are compared" <| + group_builder.specify "should report error if incomparable types are compared" <| t = table_builder [["X", [1, 2]], ["Y", ["a", "b"]], ["Z", [True, False]]] [(<), (<=), (>), (>=)].each op-> @@ -513,7 +512,7 @@ spec setup = op (t.at "Z") (t.at "X") . should_fail_with Invalid_Value_Type if setup.test_selection.supports_mixed_columns then - Test.specify "should allow comparing Mixed columns" <| + group_builder.specify "should allow comparing Mixed columns" <| t1 = table_builder [["X", ["a", 23]], ["Y", ["b", 1]]] ((t1.at "X") == (t1.at "Y")) . to_vector . should_equal [False, False] ((t1.at "X") <= (t1.at "Y")) . to_vector . should_equal [True, False] @@ -553,7 +552,7 @@ spec setup = ((t7.at "X") <= (t7.at "Y")) . to_vector . should_fail_with Incomparable_Values ((t7.at "Y") > (t7.at "X")) . to_vector . should_fail_with Incomparable_Values - Test.specify "Between should return null if any of the values are null" pending="TODO" <| + group_builder.specify "Between should return null if any of the values are null" pending="TODO" <| a = [2, 3, Nothing, 7, 5, Nothing] b = [0, 5, 7, Nothing, 7, Nothing] c = [9, 8, 7, 7, Nothing, Nothing] @@ -562,21 +561,21 @@ spec setup = t = table_builder [["A", a], ["B", b], ["C", c]] ((t.at "A").between (t.at "B") (t.at "C")) . to_vector . should_equal r - Test.group prefix+"Arithmetic Column Operations" <| + suite_builder.group prefix+"Arithmetic Column Operations" group_builder-> t2 = table_builder [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]]] x = t2.at "x" y = t2.at "y" - Test.specify "should allow basic operations" <| + group_builder.specify "should allow basic operations" <| (x + y).to_vector . should_equal [3, 7.25, 10, Nothing] (x - y).to_vector . should_equal [-1.0, 0.75, 0.0, Nothing] (x * y).to_vector . should_equal [2.0, 13.0, 25.0, Nothing] - Test.specify "should allow combining a column with a scalar" <| + group_builder.specify "should allow combining a column with a scalar" <| (x + 100).to_vector . should_equal [101, 104, 105, Nothing] (x * 10).to_vector . should_equal [10, 40, 50, Nothing] (x - 10).to_vector . should_equal [-9, -6, -5, Nothing] - Test.specify "should work with mixed types" <| + group_builder.specify "should work with mixed types" <| with_mixed_columns_if_supported [["X", [100, 25]], ["Y", [2, 5]]] t-> x = t.at "X" y = t.at "Y" @@ -587,7 +586,7 @@ spec setup = (x % y).to_vector . should_equal [0, 0] (x ^ y).to_vector . should_equal [10000, 9765625] - Test.specify "should correctly infer the types" <| + group_builder.specify "should correctly infer the types" <| (x + x).value_type . is_integer . should_be_true (x + y).value_type . is_floating_point . should_be_true (x + 2).value_type . is_integer . should_be_true @@ -605,7 +604,7 @@ spec setup = (x ^ x).value_type . is_numeric . should_be_true - Test.specify "should check types" <| + group_builder.specify "should check types" <| t = table_builder [["X", [1, 2]], ["Y", ["a", "b"]], ["Z", [True, False]]] x = t.at "X" y = t.at "Y" @@ -642,32 +641,32 @@ spec setup = True -> t = table_builder [["X", [1.5, 3.0, Number.positive_infinity, Number.negative_infinity, Number.nan, Nothing]], ["Y", [1, 2, 3, 4, 5, Nothing]], ["Z", ["1", "2", "3", "4", "5", Nothing]]] - Test.specify "should support is_nan" <| + group_builder.specify "should support is_nan" <| t.at "X" . is_nan . to_vector . should_equal [False, False, False, False, True, Nothing] t.at "Y" . is_nan . to_vector . should_equal [False, False, False, False, False, Nothing] t.at "Z" . is_nan . to_vector . should_fail_with Invalid_Value_Type - Test.specify "should support is_infinite" <| + group_builder.specify "should support is_infinite" <| t.at "X" . is_infinite . to_vector . should_equal [False, False, True, True, False, Nothing] t.at "Y" . is_infinite . to_vector . should_equal [False, False, False, False, False, Nothing] t.at "Z" . is_infinite . to_vector . should_fail_with Invalid_Value_Type False -> - Test.specify "should report that is_nan is not supported" <| + group_builder.specify "should report that is_nan is not supported" <| t = table_builder [["X", [1.5]]] t.at "X" . is_nan . should_fail_with Unsupported_Database_Operation - Test.specify "should support is_infinite" <| + group_builder.specify "should support is_infinite" <| t = table_builder [["X", [1.5, 3.0, Number.positive_infinity, Number.negative_infinity, Nothing]], ["Y", [1, 2, 3, 4, Nothing]], ["Z", ["1", "2", "3", "4", Nothing]]] t.at "X" . is_infinite . to_vector . should_equal [False, False, True, True, Nothing] t.at "Y" . is_infinite . to_vector . should_equal [False, False, False, False, Nothing] t.at "Z" . is_infinite . to_vector . should_fail_with Invalid_Value_Type - Test.specify "should support is_blank" <| + group_builder.specify "should support is_blank" <| t = table_builder [["X", [1.5, 2, Number.nan, Nothing]], ["Y", [1, Nothing, 3, 4]]] t.at "X" . is_blank treat_nans_as_blank=True . to_vector . should_equal [False, False, True, True] t.at "Y" . is_blank treat_nans_as_blank=True . to_vector . should_equal [False, True, False, False] - Test.specify "division should be aligned with the Enso arithmetic" <| + group_builder.specify "division should be aligned with the Enso arithmetic" <| a = [1, 5, 10, 100] b = [2, 2, 4, 5] r = [0.5, 2.5, 2.5, 20.0] @@ -703,7 +702,7 @@ spec setup = r6.value_type . is_floating_point . should_be_true db_pending = if setup.is_database then "Arithmetic error handling is currently not implemented for the Database backend." - Test.specify "should allow division by 0 and report warnings" pending=db_pending <| + group_builder.specify "should allow division by 0 and report warnings" pending=db_pending <| t = table_builder [["a", [3, 1, 0]], ["b", [2, 0, 0]], ["c", [1.5, 1.5, 0.0]], ["d", [1.5, 0.0, 0.0]]] a = t.at "a" b = t.at "b" @@ -748,7 +747,7 @@ spec setup = r10.to_vector.to_text . should_equal "[NaN, NaN, NaN]" has_div_warnings "[c] % 0" [0, 1, 2] r10 - Test.specify "should limit the number of rows listed in the message" pending=db_pending <| + group_builder.specify "should limit the number of rows listed in the message" pending=db_pending <| t1 = table_builder [["a", 0.up_to 200 . to_vector]] zero = t1.at "a" - t1.at "a" div = t1.at "a" / zero @@ -758,50 +757,50 @@ spec setup = warning.should_be_a Arithmetic_Error warning.message . should_equal "Division by zero (at rows [0, 1, 2, 3, 4, 5, 6, 7, 8, ...])." - Test.specify "should return null if one of arguments is missing" <| + group_builder.specify "should return null if one of arguments is missing" <| nulls = [Nothing, Nothing, Nothing, Nothing] (x + Nothing).to_vector . should_equal nulls (x - Nothing).to_vector . should_equal nulls (x * Nothing).to_vector . should_equal nulls (x / Nothing).to_vector . should_equal nulls - Test.group prefix+"Rounding-like operations" <| - Test.specify "should name a rounding column correctly" <| + suite_builder.group prefix+"Rounding-like operations" group_builder-> + group_builder.specify "should name a rounding column correctly" <| table = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] table.at "x" . round . name . should_equal "round([x])" test_floatlike type = - Test.specify "should allow round on a "+type.to_text+" column" <| + group_builder.specify "should allow round on a "+type.to_text+" column" <| table = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] result = table.at "x" . cast type . round result.to_vector.should_equal [0, 1, 3, 4, 0, -1, -3, -4] result.name . should_equal "round([x])" - Test.specify "should allow round on a float column (to >0 decimal places)" <| + group_builder.specify "should allow round on a float column (to >0 decimal places)" <| table = table_builder [["x", [0.51, 0.59, 3.51, 3.59, -0.51, -0.59, -3.51, -3.59]]] result = table.at "x" . cast type . round 1 result.to_vector.should_equal [0.5, 0.6, 3.5, 3.6, -0.5, -0.6, -3.5, -3.6] result.name . should_equal "round([x])" - Test.specify "should allow round on a float column (to <0 decimal places)" <| + group_builder.specify "should allow round on a float column (to <0 decimal places)" <| table = table_builder [["x", [51.2, 59.3, 351.45, 359.11, -51.2, -59.3, -351.23, -359.69]]] result = table.at "x" . cast type . round -1 result.to_vector.should_equal [50.0, 60.0, 350.0, 360.0, -50.0, -60.0, -350.0, -360.0] result.name . should_equal "round([x])" - Test.specify "should allow truncate on a "+type.to_text+" column" <| + group_builder.specify "should allow truncate on a "+type.to_text+" column" <| table = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] result = table.at "x" . cast type . truncate result.to_vector.should_equal [0, 0, 3, 3, 0, 0, -3, -3] result.name . should_equal "truncate([x])" - Test.specify "should allow ceil on a "+type.to_text+" column" <| + group_builder.specify "should allow ceil on a "+type.to_text+" column" <| table = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] result = table.at "x" . cast type . ceil result.to_vector.should_equal [1, 1, 4, 4, 0, 0, -3, -3] result.name . should_equal "ceil([x])" - Test.specify "should allow floor on a "+type.to_text+" column" <| + group_builder.specify "should allow floor on a "+type.to_text+" column" <| table = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] result = table.at "x" . cast type . floor result.to_vector.should_equal [0, 0, 3, 3, -1, -1, -4, -4] @@ -811,36 +810,36 @@ spec setup = if setup.test_selection.supports_decimal_type then test_floatlike Value_Type.Decimal - Test.specify "should allow round on an int column" <| + group_builder.specify "should allow round on an int column" <| table = table_builder [["x", [1, 9, 31, 39, -1, -9, -31, -39]]] result = table.at "x" . round -1 result.to_vector.should_equal [0, 10, 30, 40, 0, -10, -30, -40] result.name . should_equal "round([x])" - Test.specify "should allow truncate on an int column" <| + group_builder.specify "should allow truncate on an int column" <| table = table_builder [["x", [0, 3, -3, 1, -2]]] result = table.at "x" . truncate result.to_vector.should_equal [0, 3, -3, 1, -2] result.name . should_equal "truncate([x])" - Test.specify "should allow ceil on an int column" <| + group_builder.specify "should allow ceil on an int column" <| table = table_builder [["x", [0, 3, -3, 1, -2]]] result = table.at "x" . ceil result.to_vector.should_equal [0, 3, -3, 1, -2] result.name . should_equal "ceil([x])" - Test.specify "should allow floor on an int column" <| + group_builder.specify "should allow floor on an int column" <| table = table_builder [["x", [0, 3, -3, 1, -2]]] result = table.at "x" . floor result.to_vector.should_equal [0, 3, -3, 1, -2] result.name . should_equal "floor([x])" - Test.specify "should fail on decimal_places out of range" <| + group_builder.specify "should fail on decimal_places out of range" <| table = table_builder [["x", [0, 3, -3, 1, -2]]] table.at "x" . round 16 . should_fail_with Illegal_Argument if setup.test_selection.supports_decimal_type then - Test.specify "should return decimals when rounding decimals" <| + group_builder.specify "should return decimals when rounding decimals" <| i1 = 9223372036854775807 - 1 c = table_builder [["X", [i1]]] . at "X" decimal_col = c.cast Value_Type.Decimal @@ -849,16 +848,16 @@ spec setup = [(.floor), (.ceil), (.truncate), (x-> x.round 0), (x-> x.round 2)].each op-> op decimal_col2 . to_vector . should_equal [i1 + i1*i1] - Test.specify "should allow Nothing/NULL" <| + group_builder.specify "should allow Nothing/NULL" <| table = table_builder [["x", [Nothing, 0.51, 0.59, 3.51, Nothing, 3.59, -0.51, -0.59, -3.51, -3.59]]] result = table.at "x" . round 1 result.to_vector.should_equal [Nothing, 0.5, 0.6, 3.5, Nothing, 3.6, -0.5, -0.6, -3.5, -3.6] - Test.specify "should fail on bad column type" <| + group_builder.specify "should fail on bad column type" <| table = table_builder [["x", ["a", "b"]]] table.at "x" . round . should_fail_with Invalid_Value_Type - Test.specify "should work with mixed type columns" <| + group_builder.specify "should work with mixed type columns" <| with_mixed_columns_if_supported [["X", [112, 210, 222]]] t-> x = t.at "X" x.floor . to_vector . should_equal [112, 210, 222] @@ -874,8 +873,8 @@ spec setup = x.round . to_vector . should_equal [1, -2, 4] x.truncate . to_vector . should_equal [1, -2, 3] - Test.group prefix+"Date truncation" pending=pending_datetime <| - Test.specify "should be able to truncate a column of Date_Times" <| + suite_builder.group prefix+"Date truncation" pending=pending_datetime group_builder-> + group_builder.specify "should be able to truncate a column of Date_Times" <| dates = [Date_Time.new 2020 10 24 1 2 3, Date_Time.new 2020 10 24 1 2 3] table = table_builder [["foo", dates]] table.at "foo" . value_type . should_equal (Value_Type.Date_Time with_timezone=True) @@ -884,8 +883,8 @@ spec setup = truncated . value_type . should_equal Value_Type.Date truncated.name . should_equal "truncate([foo])" - Test.group prefix+"Text Column Operations" <| - Test.specify "should handle operations like starts_with, ends_with, contains" <| + suite_builder.group prefix+"Text Column Operations" group_builder-> + group_builder.specify "should handle operations like starts_with, ends_with, contains" <| with_mixed_columns_if_supported [["s1", ["foobar", "bar", "baz", "BAB", Nothing]], ["s2", ["foo", "ar", "a", "b", Nothing]]] t3-> s1 = t3.at "s1" s2 = t3.at "s2" @@ -911,7 +910,7 @@ spec setup = s1.like (s2+"%r") . to_vector . should_equal [True, False, False, False, Nothing] s1.like "%r%" . to_vector . should_equal [True, True, False, False, Nothing] - Test.specify "should handle operation text_length" <| + group_builder.specify "should handle operation text_length" <| with_mixed_columns_if_supported [["strings", ["foobar", "", Nothing, "👩‍🔬", "café", "It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness, it was the epoch of belief, it was the epoch of incredulity, it was the season of light, it was the season of darkness, it was the spring of hope, it was the winter of despair."]]] t-> col = t.at "strings" res = col.text_length @@ -921,12 +920,12 @@ spec setup = False -> res . to_vector . should_equal [6, 0, Nothing, 1, 4, 286] # Grapheme Length True -> res . to_vector . should_equal [6, 0, Nothing, 3, 4, 286] # Storage Length - Test.specify "text_length should error on non-string columns" <| + group_builder.specify "text_length should error on non-string columns" <| t = table_builder [["numbers", [1, 2, 3]]] col = t.at "numbers" col.text_length . should_fail_with Invalid_Value_Type - Test.specify "should handle operation text_left and text_right with length 1" <| + group_builder.specify "should handle operation text_left and text_right with length 1" <| with_mixed_columns_if_supported [["strings", ["a", "foobar", "", Nothing, "café", "It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness, it was the epoch of belief, it was the epoch of incredulity, it was the season of light, it was the season of darkness, it was the spring of hope, it was the winter of despair."]]] t-> col = t.at "strings" . cast (Value_Type.Char size=286 variable_length=True) resLeft = col.text_left 1 @@ -942,7 +941,7 @@ spec setup = False -> resRight . value_type . should_equal (Value_Type.Char size=286 variable_length=True) True -> resRight . value_type . should_equal (Value_Type.Char variable_length=True) - Test.specify "should handle operation text_left and text_right of grapheme and non-grapheme" <| + group_builder.specify "should handle operation text_left and text_right of grapheme and non-grapheme" <| with_mixed_columns_if_supported [["strings", ["a", "foobar", "", Nothing, "👩‍🔬👩‍🔬V👩‍🔬👩‍🔬", "café", "It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness, it was the epoch of belief, it was the epoch of incredulity, it was the season of light, it was the season of darkness, it was the spring of hope, it was the winter of despair."]]] t-> col = t.at "strings" resLeft = col.text_left 3 @@ -957,13 +956,13 @@ spec setup = False -> resRight . to_vector . should_equal ["a", "bar", "", Nothing, "V👩‍🔬👩‍🔬", "afé", "ir."] # Grapheme Length True -> resRight . to_vector . should_equal ["a", "bar", "", Nothing, "👩‍🔬", "afé", "ir."] # Storage Length - Test.specify "text_left and text_right should error on non-string columns" <| + group_builder.specify "text_left and text_right should error on non-string columns" <| t = table_builder [["numbers", [1, 2, 3]]] col = t.at "numbers" col.text_left 6 . should_fail_with Invalid_Value_Type col.text_right 6 . should_fail_with Invalid_Value_Type - Test.specify "text_left and text_right should error on non integer parameters" <| + group_builder.specify "text_left and text_right should error on non integer parameters" <| t = table_builder [["numbers", [1, 2, 3]]] col = t.at "numbers" col.text_left 3.14 . should_fail_with Invalid_Value_Type @@ -971,7 +970,7 @@ spec setup = col.text_left "7" . should_fail_with Invalid_Value_Type col.text_left "7" . should_fail_with Invalid_Value_Type - Test.specify "text_left and text_right should return empty on zero argument" <| + group_builder.specify "text_left and text_right should return empty on zero argument" <| with_mixed_columns_if_supported [["strings", ["a", "foobar", "", Nothing, "👩‍🔬👩‍🔬V👩‍🔬👩‍🔬", "café", "It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness, it was the epoch of belief, it was the epoch of incredulity, it was the season of light, it was the season of darkness, it was the spring of hope, it was the winter of despair."]]] t-> col = t.at "strings" resLeft = col.text_left 0 @@ -981,7 +980,7 @@ spec setup = resLeft . to_vector . should_equal ["", "", "", Nothing, "", "", ""] resRight . to_vector . should_equal ["", "", "", Nothing, "", "", ""] - Test.specify "text_left and text_right should return empty on negative arguments" <| + group_builder.specify "text_left and text_right should return empty on negative arguments" <| with_mixed_columns_if_supported [["strings", ["a", "foobar", "", Nothing, "👩‍🔬👩‍🔬V👩‍🔬👩‍🔬", "café", "It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness, it was the epoch of belief, it was the epoch of incredulity, it was the season of light, it was the season of darkness, it was the spring of hope, it was the winter of despair."]]] t-> col = t.at "strings" resLeft = col.text_left -3 @@ -991,7 +990,7 @@ spec setup = resLeft . to_vector . should_equal ["", "", "", Nothing, "", "", ""] resRight . to_vector . should_equal ["", "", "", Nothing, "", "", ""] - Test.specify "should handle operations like is_empty, is_blank, fill_empty" <| + group_builder.specify "should handle operations like is_empty, is_blank, fill_empty" <| with_mixed_columns_if_supported [["s", ["", " ", " ", Nothing, "foo"]], ["letters", ["a", "b", "c", "d", "e"]]] t-> s = t.at "s" s.is_empty . to_vector . should_equal [True, False, False, True, False] @@ -999,7 +998,7 @@ spec setup = s.fill_empty "<>" . to_vector . should_equal ["<>", " ", " ", "<>", "foo"] s.fill_empty (t.at "letters") . to_vector . should_equal ["a", " ", " ", "d", "foo"] - Test.specify "should check types" <| + group_builder.specify "should check types" <| t4 = table_builder [["str", ['a', 'b']], ["int", [1, 2]]] str = t4.at "str" int = t4.at "int" @@ -1025,7 +1024,7 @@ spec setup = int.is_empty . should_fail_with Invalid_Value_Type - Test.specify "should return right types" <| + group_builder.specify "should return right types" <| t3 = table_builder [["s1", ["foobar", "bar", "baz", "BAB", Nothing]], ["s2", ["foo", "ar", "a", "b", Nothing]]] s1 = t3.at "s1" s2 = t3.at "s2" @@ -1046,7 +1045,7 @@ spec setup = s1.fill_empty "<>" . value_type . is_text . should_be_true s1.fill_empty s2 . value_type . is_text . should_be_true - Test.specify "should support text concatenation with the + operator" <| + group_builder.specify "should support text concatenation with the + operator" <| with_mixed_columns_if_supported [["s1", ["foobar", "bar", "baz", "BAB", Nothing]], ["s2", ["foo", "ar", "a", "b", Nothing]]] t3-> s1 = t3.at "s1" s2 = t3.at "s2" @@ -1062,12 +1061,12 @@ spec setup = c3.to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing] c3.value_type.is_text . should_be_true - Test.group prefix+"Min/Max Operations" <| + suite_builder.group prefix+"Min/Max Operations" group_builder-> t = table_builder [["a", [1, 2, 3]], ["b", [4.5, 5.5, 6.5]], ["c", ['a', 'b', 'c']], ["d", [True, False, True]]] a = t.at "a" b = t.at "b" c = t.at "c" - Test.specify "should allow one or more args and return the correct type" <| + group_builder.specify "should allow one or more args and return the correct type" <| c1 = a.min 2 c1.to_vector . should_equal [1, 2, 2] c1.value_type.is_integer . should_be_true @@ -1110,7 +1109,7 @@ spec setup = c10.to_vector . should_equal [True, False, True] c10.value_type.is_boolean . should_be_true - Test.specify "should check types" <| + group_builder.specify "should check types" <| [(.min), (.max)].each op-> op a c . should_fail_with Invalid_Value_Type op a [1, 2, c] . should_fail_with Invalid_Value_Type @@ -1134,84 +1133,84 @@ spec setup = result.value_type . should_equal Value_Type.Char result . to_vector . should_equal expected - Test.group prefix+"replace: literal text pattern and replacement" <| + suite_builder.group prefix+"replace: literal text pattern and replacement" group_builder-> col0 = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO']]] . at "x" col1 = table_builder [["x", ['a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']]] . at "x" - Test.specify "case_sensitivity=sensitive/default use_regex=false only_first=false" + group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=false" do_replace col0 'hello' 'bye' expected=['bye Hello', 'bye bye', 'HELLO HELLO'] do_replace col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Default expected=['bye Hello', 'bye bye', 'HELLO HELLO'] do_replace col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Sensitive expected=['bye Hello', 'bye bye', 'HELLO HELLO'] do_replace col1 'a[bcd]' 'hey' expected=['hey A[bCd] hey', 'abac ad Ab aCAd'] - Test.specify "case_sensitivity=sensitive/default use_regex=false only_first=true" + group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=true" do_replace col0 'hello' 'bye' only_first=True expected=['bye Hello', 'bye hello', 'HELLO HELLO'] do_replace col1 'a[bcd]' 'hey' only_first=True expected=['hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] - Test.specify "case_sensitivity=insensitive use_regex=false only_first=false" + group_builder.specify "case_sensitivity=insensitive use_regex=false only_first=false" do_replace col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Insensitive expected=['bye bye', 'bye bye', 'bye bye'] do_replace col1 'a[bcd]' 'hey' case_sensitivity=Case_Sensitivity.Insensitive expected=['hey hey hey', 'abac ad Ab aCAd'] - Test.specify "case_sensitivity=insensitive use_regex=false only_first=true" + group_builder.specify "case_sensitivity=insensitive use_regex=false only_first=true" do_replace col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['bye Hello', 'bye hello', 'bye HELLO'] do_replace col1 'a[bcd]' 'hey' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] - Test.group prefix+"replace: literal regex pattern and replacement" <| + suite_builder.group prefix+"replace: literal regex pattern and replacement" group_builder-> col1 = table_builder [["x", ['a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']]] . at "x" - Test.specify "case_sensitivity=sensitive/default use_regex=True only_first=false" + group_builder.specify "case_sensitivity=sensitive/default use_regex=True only_first=false" do_replace col1 'a[bcd]'.to_regex 'hey' expected=['a[bcd] A[bCd] a[bcd]', 'heyhey hey Ab aCAd'] - Test.specify "case_sensitivity=sensitive/default use_regex=True only_first=true" + group_builder.specify "case_sensitivity=sensitive/default use_regex=True only_first=true" do_replace col1 'a[bcd]'.to_regex 'hey' only_first=True expected=['a[bcd] A[bCd] a[bcd]', 'heyac ad Ab aCAd'] - Test.specify "case_sensitivity=insensitive use_regex=True only_first=false" + group_builder.specify "case_sensitivity=insensitive use_regex=True only_first=false" do_replace col1 'a[bcd]'.to_regex 'hey' case_sensitivity=Case_Sensitivity.Insensitive expected=['a[bcd] A[bCd] a[bcd]', 'heyhey hey hey heyhey'] - Test.specify "case_sensitivity=insensitive use_regex=True only_first=true" + group_builder.specify "case_sensitivity=insensitive use_regex=True only_first=true" do_replace col1 'a[bcd]'.to_regex 'hey' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['a[bcd] A[bCd] a[bcd]', 'heyac ad Ab aCAd'] - Test.specify "can properly escape complex regexes" <| + group_builder.specify "can properly escape complex regexes" <| regex = "^([^\(]+)|(?\w\d[a-z])+$" col = table_builder [["x", [regex]]] . at "x" do_replace col regex "asdf" ["asdf"] - Test.group prefix+"replace: pattern and replacement columns" <| + suite_builder.group prefix+"replace: pattern and replacement columns" group_builder-> table = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO', 'a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']], ["patterns", ['hello', 'hello', 'hello', 'a[bcd]', 'a[bcd]']], ["replacements", ['bye', 'bye', 'bye', 'hey', 'hey']]] col = table.at "x" patterns = table.at "patterns" replacements = table.at "replacements" - Test.specify "case_sensitivity=sensitive/default use_regex=false only_first=false" + group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=false" do_replace col patterns replacements expected=['bye Hello', 'bye bye', 'HELLO HELLO', 'hey A[bCd] hey', 'abac ad Ab aCAd'] do_replace col patterns replacements case_sensitivity=Case_Sensitivity.Default expected=['bye Hello', 'bye bye', 'HELLO HELLO', 'hey A[bCd] hey', 'abac ad Ab aCAd'] do_replace col patterns replacements case_sensitivity=Case_Sensitivity.Sensitive expected=['bye Hello', 'bye bye', 'HELLO HELLO', 'hey A[bCd] hey', 'abac ad Ab aCAd'] - Test.specify "case_sensitivity=sensitive/default use_regex=false only_first=true" + group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=true" do_replace col patterns replacements only_first=True expected=['bye Hello', 'bye hello', 'HELLO HELLO', 'hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] - Test.specify "case_sensitivity=insensitive use_regex=false only_first=false" + group_builder.specify "case_sensitivity=insensitive use_regex=false only_first=false" do_replace col patterns replacements case_sensitivity=Case_Sensitivity.Insensitive expected=['bye bye', 'bye bye', 'bye bye', 'hey hey hey', 'abac ad Ab aCAd'] - Test.specify "case_sensitivity=insensitive use_regex=false only_first=true" + group_builder.specify "case_sensitivity=insensitive use_regex=false only_first=true" do_replace col patterns replacements case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['bye Hello', 'bye hello', 'bye HELLO', 'hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] - Test.group prefix+"replace: empty table and nothings" <| - Test.specify "should work on empty tables" <| + suite_builder.group prefix+"replace: empty table and nothings" group_builder-> + group_builder.specify "should work on empty tables" <| col = table_builder [["x", ['hello Hello']]] . filter "x" (Filter_Condition.Is_Nothing) . at "x" do_replace col 'hello' 'bye' expected=[] - Test.specify "should work on Nothing text column" <| + group_builder.specify "should work on Nothing text column" <| col = table_builder [["x", ['hello Hello', Nothing]]] . filter "x" (Filter_Condition.Is_Nothing) . at "x" do_replace col 'hello' 'bye' expected=[Nothing] - if setup.is_database then Test.group prefix+"replace: DB specific edge-cases" <| + if setup.is_database then suite_builder.group prefix+"replace: DB specific edge-cases" group_builder-> col = table_builder [["A", ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]]] . at 'A' - Test.specify "should not allow Case_Sensitivity.Insensitive with a non-default locale" <| + group_builder.specify "should not allow Case_Sensitivity.Insensitive with a non-default locale" <| locale = Locale.new "en" "GB" "UTF-8" col.replace 'asdf' 'zxcv' case_sensitivity=(Case_Sensitivity.Insensitive locale) . should_fail_with Illegal_Argument - Test.specify "column name" <| + group_builder.specify "column name" <| table = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO', 'a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']], ["patterns", ['hello', 'hello', 'hello', 'a[bcd]', 'a[bcd]']], ["replacements", ['bye', 'bye', 'bye', 'hey', 'hey']]] col = table.at "x" patterns = table.at "patterns" @@ -1225,7 +1224,7 @@ spec setup = if supported_replace_params.contains (Replace_Params.Value Column Case_Sensitivity.Default False) then col.replace patterns replacements . name . should_equal 'replace([x], [patterns], [replacements])' - Test.group prefix+"Column Operations - Text Replace (in-memory only)" <| + suite_builder.group prefix+"Column Operations - Text Replace (in-memory only)" group_builder-> if setup.is_database.not then t4 = table_builder [["A", ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]], ["B", ["A","O","a","E","o","O"]], ["C", [1,2,3,4,5,6]], ["D", ['',Nothing,'',Nothing,'','']]] a = t4.at "A" @@ -1233,7 +1232,7 @@ spec setup = c = t4.at "C" d = t4.at "D" - Test.specify "should allow simple replacing" <| + group_builder.specify "should allow simple replacing" <| a.replace "a" "#" . to_vector . should_equal ["Alph#", "Br#vo", "Ch#rlie", "Delt#", "Echo", "Foxtrot"] a.replace "o" "#" . to_vector . should_equal ["Alpha", "Brav#", "Charlie", "Delta", "Ech#", "F#xtr#t"] a.replace b "#" . to_vector . should_equal ["#lpha", "Bravo", "Ch#rlie", "Delta", "Ech#", "Foxtrot"] @@ -1242,12 +1241,12 @@ spec setup = a.replace b "#" Case_Sensitivity.Insensitive . to_vector . should_equal ["#lph#", "Brav#", "Ch#rlie", "D#lta", "Ech#", "F#xtr#t"] a.replace b "#" Case_Sensitivity.Insensitive only_first=True . to_vector . should_equal ["#lpha", "Brav#", "Ch#rlie", "D#lta", "Ech#", "F#xtrot"] - Test.specify "should allow regex based replacing" <| + group_builder.specify "should allow regex based replacing" <| a.replace "[aeiou]".to_regex "#" . to_vector . should_equal ["Alph#", "Br#v#", "Ch#rl##", "D#lt#", "Ech#", "F#xtr#t"] a.replace "[aeiou]".to_regex "#" . to_vector . should_equal ["Alph#", "Br#v#", "Ch#rl##", "D#lt#", "Ech#", "F#xtr#t"] a.replace "([aeiou])(.*?)[aeiou]".to_regex "$1$2$1" . to_vector . should_equal ["Alpha", "Brava", "Charlae", "Delte", "Echo", "Foxtrot"] - Test.specify "should handle unicode" <| + group_builder.specify "should handle unicode" <| table = table_builder [["x", ["śćxx", "ąąasdfąą", "affib"]], ["patterns", ["ć", "ąą", "ffi"]], ["replacements", ["abc", "def", "ghi"]]] col = table.at "x" patterns = table.at "patterns" @@ -1255,7 +1254,7 @@ spec setup = col.replace patterns replacements . to_vector . should_equal ["śabcxx", "defasdfdef", "aghib"] - Test.specify "should take pattern and replacement string columns" <| + group_builder.specify "should take pattern and replacement string columns" <| t = table_builder [["x", ["hello", "what", "yes"]], ["patterns", ["ell", "wh", "es"]], ["replacements", ["xyz", "qwer", "asdf"]]] col = t.at "x" patterns = t.at "patterns" @@ -1263,18 +1262,18 @@ spec setup = col.replace patterns replacements . to_vector . should_equal ["hxyzo", "qwerat", "yasdf"] - Test.specify "should only allow replace on Text columns" <| + group_builder.specify "should only allow replace on Text columns" <| c.replace "a" "#" . should_fail_with Invalid_Value_Type a.replace 1 "#" . should_fail_with Invalid_Value_Type a.replace c "#" . should_fail_with Invalid_Value_Type a.replace "a" 1 . should_fail_with Invalid_Value_Type a.replace "a" c . should_fail_with Invalid_Value_Type - Test.specify "should not replace if Empty term" <| + group_builder.specify "should not replace if Empty term" <| a.replace '' "#" . to_vector . should_equal ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"] a.replace d "#" . to_vector . should_equal ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"] - Test.specify "should infer correct return type" <| + group_builder.specify "should infer correct return type" <| c = table_builder [["texts", ["foo", "bar"]]] . at "texts" c1 = c.replace "a" "---" @@ -1284,7 +1283,7 @@ spec setup = vt1.should_be_a (Value_Type.Char ...) vt1.variable_length.should_be_true - Test.specify "should infer correct return type (2)" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <| + group_builder.specify "should infer correct return type (2)" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <| c = table_builder [["texts", ["foo", "bar"]]] . at "texts" c2 = c.cast (Value_Type.Char size=2 variable_length=False) c3 = c2.replace "a" "---" @@ -1295,20 +1294,20 @@ spec setup = vt3.should_be_a (Value_Type.Char ...) vt3.variable_length.should_be_true - Test.group prefix+"Column Operations - Text Trim" <| + suite_builder.group prefix+"Column Operations - Text Trim" group_builder-> t5 = table_builder [["A", [" A ", ' \t\n\rA\r\n\t ', "xxxAxx"]], ["B", [" ",' \t',"x"]], ["C", [1,2,3]]] a = t5.at "A" b = t5.at "B" c = t5.at "C" - Test.specify "should trim whitespace by default" <| + group_builder.specify "should trim whitespace by default" <| with_mixed_columns_if_supported [["A", [" A ", ' \t\n\rA\r\n\t ', "xxxAxx"]]] t-> a = t.at "A" a.trim . to_vector . should_equal ["A", "A", "xxxAxx"] a.trim Location.Start . to_vector . should_equal ["A ", 'A\r\n\t ', "xxxAxx"] a.trim Location.End . to_vector . should_equal [" A", ' \t\n\rA', "xxxAxx"] - Test.specify "should trim custom characters" <| + group_builder.specify "should trim custom characters" <| a.trim what='x' . to_vector . should_equal [" A ", ' \t\n\rA\r\n\t ', "A"] a.trim what='x' Location.Start . to_vector . should_equal [" A ", ' \t\n\rA\r\n\t ', "Axx"] a.trim what='x' Location.End . to_vector . should_equal [" A ", ' \t\n\rA\r\n\t ', "xxxA"] @@ -1317,13 +1316,13 @@ spec setup = a.trim what=' \r' . to_vector . should_equal ["A", '\t\n\rA\r\n\t', "xxxAxx"] a.trim what=b . to_vector . should_equal ["A", '\n\rA\r\n', "A"] - Test.specify "should only allow trim on Text columns" <| + group_builder.specify "should only allow trim on Text columns" <| c.trim what="a" . should_fail_with Invalid_Value_Type a.trim what=1 . should_fail_with Invalid_Value_Type a.trim what=c . should_fail_with Invalid_Value_Type - Test.group prefix+"Other Column Operations" <| - Test.specify "is_in" <| + suite_builder.group prefix+"Other Column Operations" group_builder-> + group_builder.specify "is_in" <| t = table_builder [["X", [1, 2, 3, 4]], ["Y", [4, 3, 100, 200]]] x = t.at "X" y = t.at "Y" @@ -1336,9 +1335,9 @@ spec setup = c2.to_vector . should_equal [False, False, True, True] c2.value_type.should_equal Value_Type.Boolean - Test.group prefix+"Colum Operations - Names" <| + suite_builder.group prefix+"Colum Operations - Names" group_builder-> t = table_builder [["a", [1, 2, 3]], ["b", ['x', 'y', 'z']], ["c", [1.0, 2.0, 3.0]], ["d", [True, False, True]]] - Test.specify "arithmetic" <| + group_builder.specify "arithmetic" <| ((t.at "a") + 42) . name . should_equal "[a] + 42" ((t.at "a") - (t.at "c")) . name . should_equal "[a] - [c]" ((t.at "a") * (t.at "c")) . name . should_equal "[a] * [c]" @@ -1348,7 +1347,7 @@ spec setup = ((t.at "b") + 'xyz') . name . should_equal "[b] + 'xyz'" ((t.at "b") + '\0') . name . should_equal "[b] + '\0'" - Test.specify "comparison" <| + group_builder.specify "comparison" <| ((t.at "b") == '\0\0') . name . should_equal "[b] == '\0\0'" ((t.at "b") != '\0\0') . name . should_equal "[b] != '\0\0'" ((t.at "a") < 0) . name . should_equal "[a] < 0" @@ -1357,13 +1356,13 @@ spec setup = ((t.at "b") >= 'X') . name . should_equal "[b] >= 'X'" ((t.at "a").between (t.at "c") 42) . name . should_equal "[a] between [c] and 42" - Test.specify "logical" <| + group_builder.specify "logical" <| ((t.at "d") || False) . name . should_equal "[d] || False" ((t.at "d") && True) . name . should_equal "[d] && True" ((t.at "d").not) . name . should_equal "not [d]" ((t.at "d").iif 10 20) . name . should_equal "if [d] then 10 else 20" - Test.specify "text" <| + group_builder.specify "text" <| t.at "b" . equals_ignore_case "abc" . name . should_equal "equals_ignore_case([b], 'abc')" t.at "b" . starts_with "abc" . name . should_equal "starts_with([b], 'abc')" t.at "b" . contains "abc" . name . should_equal "contains([b], 'abc')" @@ -1372,37 +1371,37 @@ spec setup = t.at "b" . is_empty . name . should_equal "[b] is empty" t.at "b" . fill_empty "" . name . should_equal "b" - Test.specify "nulls" <| + group_builder.specify "nulls" <| t.at "a" . coalesce [Nothing, 42] . name . should_equal "coalesce([a], Nothing, 42)" t.at "a" . is_nothing . name . should_equal "[a] is Nothing" t.at "a" . is_present . name . should_equal "is_present([a])" t.at "a" . is_blank . name . should_equal "is_blank([a])" t.at "a" . fill_nothing 100 . name . should_equal "a" - Test.specify "misc" + group_builder.specify "misc" t.at "a" . min [1, 2] . name . should_equal "min([a], 1, 2)" t.at "a" . max 33 . name . should_equal "max([a], 33)" t.at "a" . is_in [1, 2, 3] . name . should_equal "[a] in [1, 2, 3]" - Test.specify "composed operations" <| + group_builder.specify "composed operations" <| # These look a bit weird, but they are consistent with the column name escaping scheme. ((t.at "a" + 42) * (t.at "c")) . name . should_equal "[[a] + 42] * [c]" ((t.at "a" + 42) * (t.at "c") - 33) . name . should_equal "[[[a] + 42] * [c]] - 33" - Test.specify "sort" <| + group_builder.specify "sort" <| t.at "a" . sort . name . should_equal "a" if setup.is_database.not then - Test.specify "parse" <| + group_builder.specify "parse" <| t2 = table_builder [["X", ["1", "2", "3"]]] t2.at "X" . parse . name . should_equal "X" - Test.specify "map and zip" <| + group_builder.specify "map and zip" <| t.at "a" . map (x -> x + 1) . name . should_equal "a" t.at "a" . zip (t.at "b") [_, _] . name . should_equal "[a] x [b]" - Test.group prefix+"Column.rename" <| - Test.specify "should not allow illegal names" <| + suite_builder.group prefix+"Column.rename" group_builder-> + group_builder.specify "should not allow illegal names" <| t = table_builder [["a", [1, 2, 3]]] c = t.at "a" @@ -1411,38 +1410,38 @@ spec setup = c.rename 'a\0b' . should_fail_with Invalid_Column_Names c.rename '\0' . should_fail_with Invalid_Column_Names - Test.group prefix+"Column.const" <| - Test.specify "Should allow the creation of constant columns" <| + suite_builder.group prefix+"Column.const" group_builder-> + group_builder.specify "Should allow the creation of constant columns" <| t = table_builder [["x", ["1", "2", "3"]]] t.at "x" . const True . to_vector . should_equal [True, True, True] t.at "x" . const 12 . to_vector . should_equal [12, 12, 12] t.at "x" . const 12.3 . to_vector . should_equal [12.3, 12.3, 12.3] t.at "x" . const "asdf" . to_vector . should_equal ["asdf", "asdf", "asdf"] - Test.specify "Should create the correct column name" <| + group_builder.specify "Should create the correct column name" <| t = table_builder [["x", ["1", "2", "3"]]] t.at "x" . const 12 . name . should_equal "12" - Test.specify "Should not allow the creation of a constant column of columns" <| + group_builder.specify "Should not allow the creation of a constant column of columns" <| t = table_builder [["x", ["1", "2", "3"]]] t.at "x" . const (t.at "x") . should_fail_with Illegal_Argument - Test.group prefix+"Table.make_constant_column" <| - Test.specify "Should allow the creation of constant columns" <| + suite_builder.group prefix+"Table.make_constant_column" group_builder-> + group_builder.specify "Should allow the creation of constant columns" <| t = table_builder [["x", ["1", "2", "3"]]] t.make_constant_column True . to_vector . should_equal [True, True, True] t.make_constant_column 12 . to_vector . should_equal [12, 12, 12] t.make_constant_column 12.3 . to_vector . should_equal [12.3, 12.3, 12.3] t.make_constant_column "asdf" . to_vector . should_equal ["asdf", "asdf", "asdf"] - Test.specify "Should allow the creation of constant columns on a table with no rows" <| + group_builder.specify "Should allow the creation of constant columns on a table with no rows" <| t = table_builder [["x", ["1", "2", "3"]]] empty = t.take 0 constant = empty.make_constant_column 42 empty2 = empty.set constant empty2.column_names.should_equal ['x', '42'] - Test.specify "Should create a column of the correct type on a table with no rows" <| + group_builder.specify "Should create a column of the correct type on a table with no rows" <| t = table_builder [["x", ["1", "2", "3"]]] empty = t.take 0 [[False, .is_boolean], [42, .is_integer], ["42", .is_text], ["foo", .is_text], [1.1, .is_floating_point]].map pair-> @@ -1453,7 +1452,7 @@ spec setup = pred ((empty.set c).at c.name . value_type) . should_be_true nulls_db_pending = if setup.is_database then "Empty NULL columns are unsupported in the database backends" - Test.specify "Should create a column of the correct type on a table with no rows" pending=nulls_db_pending <| + group_builder.specify "Should create a column of the correct type on a table with no rows" pending=nulls_db_pending <| t = table_builder [["x", ["1", "2", "3"]]] empty = t.take 0 c = empty.make_constant_column Nothing diff --git a/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso index 4dc02069e592..eb9c937ce41b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso @@ -7,14 +7,12 @@ from Standard.Table.Errors import Missing_Input_Columns, Conversion_Failure from Standard.Database.Errors import Unsupported_Database_Operation -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend polyglot java import java.lang.Long as Java_Long -main = run_default_backend spec type My_Type Value x @@ -22,14 +20,14 @@ type My_Type to_text : Text to_text self = "{{{MY Type [x="+self.x.to_text+"] }}}" -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder materialize = setup.materialize supports_dates = setup.test_selection.date_time supports_conversion_failure_reporting = setup.is_database.not - Test.group prefix+"Table/Column.cast - to text" <| - Test.specify "should allow to cast columns of various basic types to text" <| + suite_builder.group prefix+"Table/Column.cast - to text" group_builder-> + group_builder.specify "should allow to cast columns of various basic types to text" <| t = table_builder [["X", [1, 2, 3000]], ["Y", [True, False, True]], ["Z", [1.5, 0.125, -2.5]], ["W", ["a", "DEF", "a slightly longer text"]]] t2 = t.cast t.column_names Value_Type.Char t2.at "X" . value_type . is_text . should_be_true @@ -44,7 +42,7 @@ spec setup = t2.at "W" . to_vector . should_equal ["a", "DEF", "a slightly longer text"] if supports_dates then - Test.specify "should allow to cast date/time columns to text" <| + group_builder.specify "should allow to cast date/time columns to text" <| t = table_builder [["X", [Date.new 2015 1 1, Date.new 2023 12 31]], ["Y", [Time_Of_Day.new 1 2 3, Time_Of_Day.new 23 57 59]], ["Z", [Date_Time.new 2015 1 1 1 2 3, Date_Time.new 2023 11 30 22 45 44]]] t2 = t.cast t.column_names Value_Type.Char t2.at "X" . value_type . is_text . should_be_true @@ -61,20 +59,20 @@ spec setup = vz.second . should_contain "22:45:44" if setup.is_database.not then - Test.specify "should allow to cast a column of objects to text" <| + group_builder.specify "should allow to cast a column of objects to text" <| t = table_builder [["X", [My_Type.Value 42, My_Type.Value "X"]]] c = t.at "X" . cast Value_Type.Char c.value_type.is_text . should_be_true c.to_vector . should_equal ["{{{MY Type [x=42] }}}", "{{{MY Type [x=X] }}}"] - Test.specify "should allow to cast an integer column to a decimal type" <| + group_builder.specify "should allow to cast an integer column to a decimal type" <| t = table_builder [["X", [1, 2, 3]]] c = t.at "X" . cast Value_Type.Decimal c.value_type.is_decimal . should_be_true c.to_vector . should_equal [1, 2, 3] if setup.test_selection.fixed_length_text_columns then - Test.specify "should allow to cast a text column to fixed-length" <| + group_builder.specify "should allow to cast a text column to fixed-length" <| t = table_builder [["X", ["a", "DEF", "a slightly longer text"]]] c = t.at "X" . cast (Value_Type.Char size=3 variable_length=False) c.value_type . should_equal (Value_Type.Char size=3 variable_length=False) @@ -83,7 +81,7 @@ spec setup = # No Conversion_Failure warning here, because we started with text, so it was expected we will trim it if needed. Problems.assume_no_problems c - Test.specify "should allow to cast a text column to variable-length with a max size" <| + group_builder.specify "should allow to cast a text column to variable-length with a max size" <| t = table_builder [["X", ["a", "DEF", "a slightly longer text"]]] c = t.at "X" . cast (Value_Type.Char size=3 variable_length=True) c.value_type . should_equal (Value_Type.Char size=3 variable_length=True) @@ -92,14 +90,14 @@ spec setup = # No Conversion_Failure warning here, because we started with text, so it was expected we will trim it if needed. Problems.assume_no_problems c - Test.specify "should allow casting a non-text column to fixed-length text" <| + group_builder.specify "should allow casting a non-text column to fixed-length text" <| t = table_builder [["X", [1, 22, 333]]] c = t.at "X" . cast (Value_Type.Char size=3 variable_length=False) c.value_type . should_equal (Value_Type.Char size=3 variable_length=False) c.to_vector . should_equal ["1 ", "22 ", "333"] Problems.assume_no_problems c - Test.specify "should warn when losing data if the fixed-length text length is too short to fit the data" pending=(if supports_conversion_failure_reporting.not then "Conversion_Failure is not supported in Database yet.") <| + group_builder.specify "should warn when losing data if the fixed-length text length is too short to fit the data" pending=(if supports_conversion_failure_reporting.not then "Conversion_Failure is not supported in Database yet.") <| t = table_builder [["X", [15, 1000000, 123456, 1000, 1000]]] c1 = t.at "X" . cast (Value_Type.Char size=3 variable_length=False) c1.value_type . should_equal (Value_Type.Char size=3 variable_length=False) @@ -115,7 +113,7 @@ spec setup = w2 = Problems.expect_warning Conversion_Failure c2 w2.affected_rows_count . should_equal 4 - Test.specify "should not allow 0-length Char type" <| + group_builder.specify "should not allow 0-length Char type" <| c1 = table_builder [["X", ["a", "", "bcd"]]] . at "X" r1 = c1.cast (Value_Type.Char size=0 variable_length=False) r1.should_fail_with Illegal_Argument @@ -124,8 +122,8 @@ spec setup = r2 = c1.cast (Value_Type.Char size=0 variable_length=True) r2.should_fail_with Illegal_Argument - Test.group prefix+"Table/Column.cast - numeric" <| - Test.specify "should allow to cast a boolean column to integer" <| + suite_builder.group prefix+"Table/Column.cast - numeric" group_builder-> + group_builder.specify "should allow to cast a boolean column to integer" <| t = table_builder [["X", [True, False, True]]] c = t.at "X" . cast Value_Type.Integer vt = c.value_type @@ -133,14 +131,14 @@ spec setup = vt.is_integer . should_be_true c.to_vector . should_equal [1, 0, 1] - Test.specify "should allow to cast an integer column to floating point" <| + group_builder.specify "should allow to cast an integer column to floating point" <| t = table_builder [["X", [1, 2, 3]]] c = t.at "X" . cast Value_Type.Float c.value_type.is_floating_point . should_be_true c.to_vector . should_equal [1.0, 2.0, 3.0] if setup.test_selection.different_size_integer_types then - Test.specify "should allow to cast an integer column to a smaller bit-width and larger bit-width" <| + group_builder.specify "should allow to cast an integer column to a smaller bit-width and larger bit-width" <| t = table_builder [["X", [1, 2, 3]]] c = t.at "X" . cast (Value_Type.Integer Bits.Bits_16) c.value_type . should_equal (Value_Type.Integer Bits.Bits_16) @@ -166,7 +164,7 @@ spec setup = c3.to_vector . should_equal [1, 2, 3] if setup.test_selection.supports_8bit_integer then - Test.specify "should allow to cast an integer column to a byte and back" <| + group_builder.specify "should allow to cast an integer column to a byte and back" <| t = table_builder [["X", [1, 2, 3]]] c1 = t.at "X" . cast Value_Type.Byte c1.value_type . should_equal Value_Type.Byte @@ -194,7 +192,7 @@ spec setup = c3.value_type . should_equal (Value_Type.Integer Bits.Bits_32) c3.to_vector . should_equal [1, 2, Nothing, Nothing, 0] - Test.specify "should allow to cast a floating point column to integer" <| + group_builder.specify "should allow to cast a floating point column to integer" <| t = table_builder [["X", [1.0001, 2.25, 4.0]]] c = t.at "X" . cast Value_Type.Integer vt = c.value_type @@ -211,7 +209,7 @@ spec setup = [[1, 4], [1, 5]] . should_contain v2 if setup.is_database.not then - Test.specify "should report Conversion_Failure if converting a huge float to an integer overflows it" <| + group_builder.specify "should report Conversion_Failure if converting a huge float to an integer overflows it" <| max_long = Java_Long.MAX_VALUE too_big_double = (max_long + 1.0) * 1000.0 (too_big_double > max_long) . should_be_true @@ -233,20 +231,20 @@ spec setup = warning.to_display_text . should_contain too_big_double.to_text if supports_dates then - Test.group prefix+"Table/Column.cast - date/time" <| - Test.specify "should allow to get the Date part from a Date_Time" <| + suite_builder.group prefix+"Table/Column.cast - date/time" group_builder-> + group_builder.specify "should allow to get the Date part from a Date_Time" <| t = table_builder [["X", [Date_Time.new 2015 1 2 3 4 5, Date_Time.new 2023 12 31 23 56 59]]] c = t.at "X" . cast Value_Type.Date c.value_type . should_equal Value_Type.Date c.to_vector . should_equal [Date.new 2015 1 2, Date.new 2023 12 31] - Test.specify "should allow to get the Time_Of_Day part from a Date_Time" <| + group_builder.specify "should allow to get the Time_Of_Day part from a Date_Time" <| t = table_builder [["X", [Date_Time.new 2015 1 2 3 4 5, Date_Time.new 2023 12 31 23 56 59]]] c = t.at "X" . cast Value_Type.Time c.value_type . should_equal Value_Type.Time c.to_vector . should_equal [Time_Of_Day.new 3 4 5, Time_Of_Day.new 23 56 59] - Test.specify "should allow to convert a Date into Date_Time" <| + group_builder.specify "should allow to convert a Date into Date_Time" <| day1 = Date.new 2015 1 2 day2 = Date.new 2023 12 31 t = table_builder [["X", [day1, day2]]] @@ -258,19 +256,19 @@ spec setup = expected_diff = Duration.between day1.to_date_time day2.to_date_time diff . should_equal expected_diff - Test.group prefix+"Table/Column.cast - checking invariants" <| - Test.specify "should report an error for unsupported conversions" <| + suite_builder.group prefix+"Table/Column.cast - checking invariants" group_builder-> + group_builder.specify "should report an error for unsupported conversions" <| t = table_builder [["X", [1, 2, 3]]] r1 = t.at "X" . cast Value_Type.Boolean r1.should_fail_with Illegal_Argument - Test.specify "should report an error pointing to the Table.parse method where applicable" <| + group_builder.specify "should report an error pointing to the Table.parse method where applicable" <| t = table_builder [["X", ["1", "2", "3"]]] r1 = t.at "X" . cast Value_Type.Integer r1.should_fail_with Illegal_Argument r1.to_display_text . should_contain "`parse` should be used instead" - Test.specify "should report missing columns" <| + group_builder.specify "should report missing columns" <| t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] r1 = t.cast ["X", "Z"] Value_Type.Char @@ -287,7 +285,7 @@ spec setup = t3.at "X" . to_vector . should_equal [1, 2, 3] t3.at "Y" . to_vector . should_equal [4, 5, 6] - Test.specify "should work if the first row is NULL" <| + group_builder.specify "should work if the first row is NULL" <| t = table_builder [["X", [Nothing, 1, 2, 3000]], ["Y", [Nothing, True, False, True]]] c1 = t.at "X" . cast Value_Type.Char @@ -298,7 +296,7 @@ spec setup = c2.value_type . should_equal Value_Type.Integer c2.to_vector . should_equal [Nothing, 1, 0, 1] - Test.specify "should not lose the type after further operations were performed on the result" <| + group_builder.specify "should not lose the type after further operations were performed on the result" <| t = table_builder [["X", [1, 2, 3000]], ["Y", [True, False, True]]] c1 = t.at "X" . cast Value_Type.Char c2 = t.at "Y" . cast Value_Type.Integer @@ -313,7 +311,7 @@ spec setup = vt4.is_integer . should_be_true c4.to_vector . should_equal [1001, 1000, 1001] - Test.specify "should not lose the type after further operations were performed on the result, even if the first row is NULL" <| + group_builder.specify "should not lose the type after further operations were performed on the result, even if the first row is NULL" <| t = table_builder [["X", [Nothing, 1, 2, 3000]], ["Y", [Nothing, True, False, True]]] c1 = t.at "X" . cast Value_Type.Char c2 = t.at "Y" . cast Value_Type.Integer @@ -328,7 +326,7 @@ spec setup = vt4.is_integer . should_be_true c4.to_vector . should_equal [Nothing, 1001, 1000, 1001] - Test.specify 'Table.cast should cast the columns "in-place" and not reorder them' <| + group_builder.specify 'Table.cast should cast the columns "in-place" and not reorder them' <| t = table_builder [["X", [1, 2, 3000]], ["Y", [4, 5, 6]], ["Z", [7, 8, 9]], ["A", [True, False, True]]] t2 = t.cast ["Z", "Y"] Value_Type.Char t2.column_names . should_equal ["X", "Y", "Z", "A"] @@ -344,14 +342,14 @@ spec setup = t2.at "A" . to_vector . should_equal [True, False, True] if setup.test_selection.different_size_integer_types then - Test.specify "should preserve the overridden types when materialized (Integer)" <| + group_builder.specify "should preserve the overridden types when materialized (Integer)" <| t = table_builder [["X", [1, 2, 100]]] t2 = t . cast "X" (Value_Type.Integer Bits.Bits_16) t3 = materialize t2 t3.at "X" . value_type . should_equal (Value_Type.Integer Bits.Bits_16) if setup.test_selection.fixed_length_text_columns then - Test.specify "should preserve the overridden types when materialized (Char)" <| + group_builder.specify "should preserve the overridden types when materialized (Char)" <| t = table_builder [["Y", ["a", "abcdef", "abc"]]] t2 = t . cast "Y" (Value_Type.Char size=3 variable_length=False) t3 = materialize t2 @@ -359,7 +357,7 @@ spec setup = t3.at "Y" . to_vector . should_equal ["a ", "abc", "abc"] if setup.is_database.not then - Test.specify "should allow converting a Mixed type back to a specific type" <| + group_builder.specify "should allow converting a Mixed type back to a specific type" <| t1 = table_builder [["A", [1, Nothing, 2]], ["B", [1.5, Nothing, 2.5]], ["C", [Nothing, "x", "y"]], ["D", [Nothing, True, False]]] m1 = t1.cast t1.column_names Value_Type.Mixed ["A", "B", "C", "D"].each c-> @@ -381,7 +379,7 @@ spec setup = m2.at "F" . cast Value_Type.Time . value_type . should_equal Value_Type.Time m2.at "G" . cast Value_Type.Date_Time . value_type . should_equal Value_Type.Date_Time - Test.specify "will extract matching values from a mixed column and replace unconvertible ones with Nothing" <| + group_builder.specify "will extract matching values from a mixed column and replace unconvertible ones with Nothing" <| t0 = table_builder [["X", ["a", "b", "c", My_Type.Value 42]]] t0.at "X" . value_type . should_equal Value_Type.Mixed r0 = t0.cast ["X"] Value_Type.Integer @@ -459,7 +457,7 @@ spec setup = w7.affected_rows_count . should_equal 6+3+1 if setup.is_database.not then - Test.specify "should fail if there is no conversion available for a given type" <| + group_builder.specify "should fail if there is no conversion available for a given type" <| t = table_builder [["X", [1, 2, 3]]] # currently unsupported @@ -470,8 +468,8 @@ spec setup = r3 = t.cast "X" (Value_Type.Unsupported_Data_Type "foobar" "foobar") r3.should_fail_with Illegal_Argument - Test.group prefix+"Simple variant of Table/Column.parse in all backends" <| - Test.specify "should be able to parse simple integers" <| + suite_builder.group prefix+"Simple variant of Table/Column.parse in all backends" group_builder-> + group_builder.specify "should be able to parse simple integers" <| t = table_builder [["X", ["42", "0", "-1"]]] c1 = t.at "X" . parse Value_Type.Integer @@ -482,7 +480,7 @@ spec setup = c2.value_type.is_integer . should_be_true c2.to_vector . should_equal [42, 0, -1] - Test.specify "should be able to parse simple floats" <| + group_builder.specify "should be able to parse simple floats" <| t = table_builder [["X", ["42.5", "0.25", "-1.0"]]] c1 = t.at "X" . parse Value_Type.Float @@ -494,7 +492,7 @@ spec setup = c2.to_vector . should_equal [42.5, 0.25, -1.0] if supports_dates then - Test.specify "should be able to parse dates using a default format" <| + group_builder.specify "should be able to parse dates using a default format" <| t = table_builder [["X", ["2018-01-01", "2023-12-31"]]] c1 = t.at "X" . parse Value_Type.Date @@ -505,7 +503,7 @@ spec setup = c2.value_type.should_equal Value_Type.Date c2.to_vector . should_equal [Date.new 2018 1 1, Date.new 2023 12 31] if supports_dates.not then - Test.specify "should report that date parsing is unsupported" <| + group_builder.specify "should report that date parsing is unsupported" <| t = table_builder [["X", ["2018-01-01", "2023-12-31"]]] r1 = t.at "X" . parse Value_Type.Date @@ -514,7 +512,7 @@ spec setup = r2 = t.parse ["X"] Value_Type.Date r2.should_fail_with Unsupported_Database_Operation - Test.specify "should be able to parse booleans with default format" <| + group_builder.specify "should be able to parse booleans with default format" <| t = table_builder [["X", ["true", "false", "true"]]] c1 = t.at "X" . parse Value_Type.Boolean @@ -525,7 +523,7 @@ spec setup = c2.value_type.should_equal Value_Type.Boolean c2.to_vector . should_equal [True, False, True] - Test.specify "should report missing columns" <| + group_builder.specify "should report missing columns" <| t = table_builder [["X", ["42", "0", "-1"]]] t1 = t.parse ["X", "Y"] Value_Type.Integer error_on_missing_columns=False @@ -541,26 +539,26 @@ spec setup = r3.should_fail_with Missing_Input_Columns r3.catch.criteria . should_equal ["Y"] - if setup.is_database then Test.group prefix+"Table/Column auto value type" <| - Test.specify "should report unsupported" <| + if setup.is_database then suite_builder.group prefix+"Table/Column auto value type" group_builder-> + group_builder.specify "should report unsupported" <| t = table_builder [["X", [1, 2, 3]]] t.auto_value_types . should_fail_with Unsupported_Database_Operation t.at "X" . auto_value_type . should_fail_with Unsupported_Database_Operation # The in-memory functionality of `expand_column` is tested in test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso - if setup.is_database then Test.group prefix+"Table.expand_column" <| - Test.specify "should report unsupported" <| + if setup.is_database then suite_builder.group prefix+"Table.expand_column" group_builder-> + group_builder.specify "should report unsupported" <| table = table_builder [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]] table.expand_column "bbb" . should_fail_with Unsupported_Database_Operation # The in-memory functionality of `expand_to_rows` is tested in test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso - if setup.is_database then Test.group prefix+"Table.expand_to_rows" <| - Test.specify "should report unsupported" <| + if setup.is_database then suite_builder.group prefix+"Table.expand_to_rows" group_builder-> + group_builder.specify "should report unsupported" <| table = table_builder [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]] table.expand_to_rows "bbb" . should_fail_with Unsupported_Database_Operation - if setup.is_database.not then Test.group prefix+"Table/Column auto value type" <| - Test.specify "should allow to narrow down types of a Mixed column" <| + if setup.is_database.not then suite_builder.group prefix+"Table/Column auto value type" group_builder-> + group_builder.specify "should allow to narrow down types of a Mixed column" <| [True, False].each shrink_types-> mixer = My_Type.Value 1 t0 = table_builder [["strs", [mixer, "a", "b"]], ["ints", [mixer, 2, 3]], ["floats", [mixer, 1.5, 2.5]], ["mix", [1, mixer, "a"]], ["dates", [mixer, Date.new 2022, Date.new 2020]], ["datetimes", [mixer, Date_Time.new 2022 12 30 13 45, Date_Time.new 2020]], ["times", [mixer, Time_Of_Day.new 12 30, Time_Of_Day.new 13 45]], ["mixed_time", [Date.new 2022, Time_Of_Day.new 12 30, Date_Time.new 2019]], ["bools", [mixer, True, False]]] @@ -588,7 +586,7 @@ spec setup = t2.at "mixed_time" . value_type . should_equal Value_Type.Mixed t2.at "bools" . value_type . should_equal Value_Type.Boolean - Test.specify "will only modify selected columns" <| + group_builder.specify "will only modify selected columns" <| mixer = My_Type.Value 1 t0 = table_builder [["strs", [mixer, "a", "b"]], ["ints", [mixer, 2, 3]], ["floats", [mixer, 1.5, 2.5]]] t1 = t0.drop 1 @@ -609,7 +607,7 @@ spec setup = t4.at "ints" . value_type . should_equal Value_Type.Integer t4.at "floats" . value_type . should_equal Value_Type.Float - Test.specify "will convert a Float column to Integer if all values can be represented as long" <| + group_builder.specify "will convert a Float column to Integer if all values can be represented as long" <| t1 = table_builder [["X", [1.0, 2.0, 3.0]], ["Y", [1.0, 2.5, 3.0]], ["Z", [1.0, 2.0, (2.0^100)]]] t1.at "X" . value_type . should_equal Value_Type.Float t1.at "Y" . value_type . should_equal Value_Type.Float @@ -627,12 +625,12 @@ spec setup = automatically. t2.at "Z" . value_type . should_equal Value_Type.Float - Test.specify "will not parse text columns" <| + group_builder.specify "will not parse text columns" <| t1 = table_builder [["X", ["1", "2", "3"]]] c2 = t1.at "X" . auto_value_type c2.value_type . should_equal Value_Type.Char - Test.specify "will 'undo' a cast to Mixed" <| + group_builder.specify "will 'undo' a cast to Mixed" <| t1 = table_builder [["X", [1, 2, 3]], ["Y", ["a", "b", "c"]]] t2 = t1.cast ["X", "Y"] Value_Type.Mixed t2.at "X" . value_type . should_equal Value_Type.Mixed @@ -642,7 +640,7 @@ spec setup = t3.at "X" . value_type . should_equal Value_Type.Integer t3.at "Y" . value_type . should_equal Value_Type.Char - Test.specify "will choose Decimal type if all values are integers but cannot fit long" <| + group_builder.specify "will choose Decimal type if all values are integers but cannot fit long" <| c0 = table_builder [["X", [My_Type.Value 42, 1, 2, (2^100)+1]]] . at "X" c1 = c0.drop 1 @@ -651,7 +649,7 @@ spec setup = c2.value_type . should_be_a (Value_Type.Decimal ...) c2.to_vector . should_equal [1, 2, (2^100)+1] - Test.specify "will try to find the smallest integer type to fit the value (if shrink_types=True)" <| + group_builder.specify "will try to find the smallest integer type to fit the value (if shrink_types=True)" <| [False, True].each is_mixed-> prefix = if is_mixed then "mixed" else 0 t0 = table_builder [["X", [prefix, 1, 2, 3]], ["Y", [prefix, 2^20, 2, 3]], ["Z", [prefix, 2^50, 2, 3]], ["F", [prefix, 1.0, 2.0, 3.0]]] @@ -679,7 +677,7 @@ spec setup = # Shrinking Floats also finds the smallest type that fits. t3.at "F" . value_type . should_equal (Value_Type.Integer Bits.Bits_16) - Test.specify "will not return Byte columns by default, but should leave existing Byte columns intact" <| + group_builder.specify "will not return Byte columns by default, but should leave existing Byte columns intact" <| c1 = table_builder [["X", [1, 2, 3]]] . at "X" . cast Value_Type.Byte c1.value_type . should_equal Value_Type.Byte @@ -687,7 +685,7 @@ spec setup = c2 = c1.auto_value_type shrink_types=shrink_types c2.value_type . should_equal Value_Type.Byte - Test.specify "Decimal (scale=0, i.e. integer) columns should also be shrinked if possible and shrink_types=True" <| + group_builder.specify "Decimal (scale=0, i.e. integer) columns should also be shrinked if possible and shrink_types=True" <| t0 = table_builder [["X", [2^100, 1, 2, 3]], ["Y", [10, 20, 2^100, 30]], ["Z", [1, 2, 3, 4]]] . cast "Z" (Value_Type.Decimal scale=0) t1 = t0.drop 1 @@ -708,7 +706,7 @@ spec setup = t3.at "Y" . value_type . should_equal (Value_Type.Decimal scale=0) t3.at "Z" . value_type . should_equal (Value_Type.Integer Bits.Bits_16) - Test.specify "if all text values have the same length, will change the type to fixed-length string (if shrink_types=True)" <| + group_builder.specify "if all text values have the same length, will change the type to fixed-length string (if shrink_types=True)" <| [False, True].each is_mixed-> prefix = if is_mixed then 42 else "FOOBARBAZ" c0 = table_builder [["X", [prefix, "aa", "bb", "cc"]]] . at "X" @@ -735,7 +733,7 @@ spec setup = c6 = c4.auto_value_type shrink_types=True c6.value_type . should_equal (Value_Type.Char size=1 variable_length=False) - Test.specify "if all text values are empty string, the type will remain unchanged" <| + group_builder.specify "if all text values are empty string, the type will remain unchanged" <| c1 = table_builder [["X", ["", ""]]] . at "X" c2 = c1.cast (Value_Type.Char size=100 variable_length=True) @@ -749,7 +747,7 @@ spec setup = c2_b = c2.auto_value_type shrink_types=shrink_types c2_b.value_type . should_equal (Value_Type.Char size=100 variable_length=True) - Test.specify "if all text values fit under 255 characters, will add a 255 length limit (if shrink_types=True)" <| + group_builder.specify "if all text values fit under 255 characters, will add a 255 length limit (if shrink_types=True)" <| t1 = table_builder [["short_unbounded", ["a", "bb", "ccc"]], ["long_unbounded", ["a"*100, "b"*200, "c"*300]]] t2 = t1 . set (t1.at "short_unbounded" . cast (Value_Type.Char size=1000)) "short_1000" . set (t1.at "short_unbounded" . cast (Value_Type.Char size=10)) "short_10" . set (t1.at "long_unbounded" . cast (Value_Type.Char size=400)) "long_400" . set (t1.at "short_unbounded" . cast Value_Type.Mixed) "short_mixed" @@ -774,7 +772,7 @@ spec setup = t4.at "long_unbounded" . value_type . should_equal (Value_Type.Char size=Nothing variable_length=True) t4.at "long_400" . value_type . should_equal (Value_Type.Char size=400 variable_length=True) - Test.specify "can deal with all-null columns" <| + group_builder.specify "can deal with all-null columns" <| t0 = table_builder [["mix", [My_Type.Value 1, Nothing, Nothing]], ["int", [42, Nothing, Nothing]], ["str", ["a", Nothing, Nothing]], ["float", [1.5, Nothing, Nothing]], ["decimal", [2^100, 2^10, 2]]] t1 = t0.drop 1 diff --git a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso index c4507ce198ee..f79c0e6cc3bd 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso @@ -9,14 +9,12 @@ import Standard.Table.Data.Expression.Expression_Error from Standard.Database import all from Standard.Database.Errors import Integrity_Error -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all -from project.Common_Table_Operations.Util import run_default_backend -main = run_default_backend spec +from project.Common_Table_Operations.Util import run_default_backend -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder table_fn = @@ -30,15 +28,15 @@ spec setup = table_builder [col1, col2, col3, col4, col5, col6, col7] table = table_fn - Test.group prefix+"Table.at" <| - Test.specify "should allow selecting columns by name" <| + suite_builder.group prefix+"Table.at" group_builder-> + group_builder.specify "should allow selecting columns by name" <| column_1 = table.at "bar" column_1.name . should_equal "bar" column_1.to_vector . should_equal [4, 5, 6] table.at "nonexistent column name" . should_fail_with No_Such_Column - Test.specify "should allow selecting columns by index" <| + group_builder.specify "should allow selecting columns by index" <| column_1 = table.at column_1.name . should_equal "foo" column_1.to_vector . should_equal [1, 2, 3] @@ -65,12 +63,12 @@ spec setup = table.at 100 . should_fail_with Index_Out_Of_Bounds - Test.specify "should fail with Type Error is not an Integer or Text" <| + group_builder.specify "should fail with Type Error is not an Integer or Text" <| table.at (Pair.new 1 2) . should_fail_with Illegal_Argument table.at (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair." - Test.group prefix+"Table.get" <| - Test.specify "should allow selecting columns by name" <| + suite_builder.group prefix+"Table.get" group_builder-> + group_builder.specify "should allow selecting columns by name" <| column_1 = table.get "bar" column_1.name . should_equal "bar" column_1.to_vector . should_equal [4, 5, 6] @@ -78,7 +76,7 @@ spec setup = table.get "nonexistent column name" . should_equal Nothing table.get "nonexistent column name" column_1 . name . should_equal "bar" - Test.specify "should allow selecting columns by index" <| + group_builder.specify "should allow selecting columns by index" <| column_1 = table.get column_1.name . should_equal "foo" column_1.to_vector . should_equal [1, 2, 3] @@ -94,12 +92,12 @@ spec setup = table.get 100 . should_equal Nothing table.get 100 column_1 . name . should_equal "foo" - Test.specify "should fail with Type Error is not an Integer or Text" <| + group_builder.specify "should fail with Type Error is not an Integer or Text" <| table.get (Pair.new 1 2) . should_fail_with Illegal_Argument table.get (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair." - Test.group prefix+"Table.set" <| - Test.specify "should allow adding a column" <| + suite_builder.group prefix+"Table.set" group_builder-> + group_builder.specify "should allow adding a column" <| bar2 = table.get "bar" . rename "bar2" t2 = table.set bar2 t2.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar2"] @@ -108,10 +106,10 @@ spec setup = t3 = t2.set bar2 "bar3" t3.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar2", "bar3"] - Test.specify "should not allow illegal column names" <| + group_builder.specify "should not allow illegal column names" <| table.set (table.get "bar") new_name='a\0b' . should_fail_with Invalid_Column_Names - Test.specify "should allow replacing a column" <| + group_builder.specify "should allow replacing a column" <| foo = table.get "bar" . rename "foo" t2 = table.set foo t2.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] @@ -120,14 +118,14 @@ spec setup = t3 = t2.set foo "bar3" t3.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar3"] - Test.specify "should allow adding a column" <| + group_builder.specify "should allow adding a column" <| bar2 = table.get "bar" . rename "bar2" table.set bar2 set_mode=Set_Mode.Update . should_fail_with Missing_Column foo = table.get "bar" . rename "foo" table.set foo set_mode=Set_Mode.Add . should_fail_with Existing_Column - Test.specify "should not affect existing columns that depended on the old column being replaced" <| + group_builder.specify "should not affect existing columns that depended on the old column being replaced" <| t1 = table_builder [["X", [1,2,3]]] t2 = t1.set (t1.at "X" * 100) new_name="Y" t3 = t2.set "[X] + 10" new_name="Z" @@ -137,13 +135,13 @@ spec setup = t4.at "Y" . to_vector . should_equal [100, 200, 300] t4.at "Z" . to_vector . should_equal [11, 12, 13] - Test.specify "should gracefully handle expression failures" <| + group_builder.specify "should gracefully handle expression failures" <| t1 = table_builder [["X", [1,2,3]]] t1.set "[unknown] + 10" new_name="Z" . should_fail_with No_Such_Column t1.set "[[[[" . should_fail_with Expression_Error t1.set "[[[[" . catch . should_be_a Expression_Error.Syntax_Error - Test.specify "should forward expression problems" <| + group_builder.specify "should forward expression problems" <| t1 = table_builder [["X", [1.5, 2.0, 0.0]]] r1 = t1.set "([X] == 2) || ([X] + 0.5 == 2)" on_problems=Problem_Behavior.Ignore @@ -165,7 +163,7 @@ spec setup = problems2 = [Arithmetic_Error.Error "Division by zero (at rows [2])."] Problems.test_problem_handling action2 problems2 tester2 - Test.specify "should gracefully handle columns from different backends" <| + group_builder.specify "should gracefully handle columns from different backends" <| t1 = table_builder [["A", [1, 2, 3]]] alternative_connection = Database.connect (SQLite In_Memory) t0 = (Table.new [["X", [3, 2, 1]]]).select_into_database_table alternative_connection "T0" temporary=True @@ -176,11 +174,11 @@ spec setup = True -> r1.should_fail_with Integrity_Error False -> r1.should_fail_with Illegal_Argument - Test.group prefix+"Table.column_names" <| - Test.specify "should return the names of all columns" <| + suite_builder.group prefix+"Table.column_names" group_builder-> + group_builder.specify "should return the names of all columns" <| table.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] - Test.specify "should allow weird column names in all backends" <| + group_builder.specify "should allow weird column names in all backends" <| columns = weird_names.map_with_index ix-> name-> [name, [100+ix, 2, 3]] table = table_builder columns @@ -189,13 +187,13 @@ spec setup = weird_names.map_with_index ix-> name-> table.at name . to_vector . should_equal [100+ix, 2, 3] - Test.group prefix+"Table.column_count" <| - Test.specify "should allow getting the column count" <| + suite_builder.group prefix+"Table.column_count" group_builder-> + group_builder.specify "should allow getting the column count" <| table.column_count . should_equal 7 - Test.group prefix+"Table.rows" <| + suite_builder.group prefix+"Table.rows" group_builder-> table = table_builder [["X", [1, 2, 3, 4]], ["Y", [5, 6, 7, 8]], ["Z", ["A", "B", "C", "D"]]] - Test.specify "should allow to get a Vector of Table rows" <| + group_builder.specify "should allow to get a Vector of Table rows" <| rows = table.rows rows.length . should_equal 4 @@ -227,31 +225,31 @@ spec setup = rows.map .to_vector . should_equal [[1, 5, "A"], [2, 6, "B"], [3, 7, "C"], [4, 8, "D"]] - Test.specify "should let you get the first row" <| + group_builder.specify "should let you get the first row" <| first_row = table.first_row first_row . length . should_equal 3 first_row.at "X" . should_equal 1 first_row.at "Y" . should_equal 5 first_row.at "Z" . should_equal "A" - Test.specify "should let you get the second row" <| + group_builder.specify "should let you get the second row" <| second_row = table.second_row second_row . length . should_equal 3 second_row.at "X" . should_equal 2 second_row.at "Y" . should_equal 6 second_row.at "Z" . should_equal "B" - Test.specify "should let you get the last row" <| + group_builder.specify "should let you get the last row" <| last_row = table.last_row last_row . length . should_equal 3 last_row.at "X" . should_equal 4 last_row.at "Y" . should_equal 8 last_row.at "Z" . should_equal "D" - Test.specify "should fetch rows up to the specified limit" <| + group_builder.specify "should fetch rows up to the specified limit" <| table.rows max_rows=2 . map .to_vector . should_equal [[1, 5, "A"], [2, 6, "B"]] - Test.specify "should correctly handle errors" <| + group_builder.specify "should correctly handle errors" <| table.rows.at 5 . should_fail_with Index_Out_Of_Bounds err = table.rows.at -6 err.should_fail_with Index_Out_Of_Bounds @@ -262,13 +260,13 @@ spec setup = table.rows . at 0 . at "unknown" . should_fail_with No_Such_Column - Test.group prefix+"Table.read" <| + suite_builder.group prefix+"Table.read" group_builder-> t_big = table_builder [["X", (0.up_to 1500)]] t_small = table_builder [["X", (0.up_to 10)]] has_default_row_limit = setup.is_database - Test.specify "should have a row limit by default and warn about it" <| + group_builder.specify "should have a row limit by default and warn about it" <| t_big.row_count . should_equal 1500 t_small.row_count . should_equal 10 @@ -286,7 +284,7 @@ spec setup = t2.row_count . should_equal 10 Problems.assume_no_problems t2 - Test.specify "should allow to set the row limit" <| + group_builder.specify "should allow to set the row limit" <| t1 = t_big.read max_rows=23 t1.row_count . should_equal 23 w1 = Problems.expect_only_warning Not_All_Rows_Downloaded t1 @@ -302,12 +300,12 @@ spec setup = w3 = Problems.expect_only_warning Not_All_Rows_Downloaded t3 w3.max_rows . should_equal 1 - Test.specify "should allow to have no row limit" <| + group_builder.specify "should allow to have no row limit" <| t1 = t_big.read max_rows=Nothing t1.row_count . should_equal 1500 Problems.assume_no_problems t1 - Test.specify "should allow to turn off the warning" <| + group_builder.specify "should allow to turn off the warning" <| t1 = t_big.read warn_if_more_rows=False t1.row_count . should_equal (if has_default_row_limit then 1000 else 1500) Problems.assume_no_problems t1 @@ -320,7 +318,7 @@ spec setup = t3.row_count . should_equal 1500 Problems.assume_no_problems t3 - Test.specify "should also work as Column.read" <| + group_builder.specify "should also work as Column.read" <| c1 = t_big.at "X" c1.length . should_equal 1500 @@ -349,7 +347,7 @@ spec setup = r5.length . should_equal 3 Problems.assume_no_problems r5 - if setup.is_database then Test.specify "should allow similar API on Connection.read" <| + if setup.is_database then group_builder.specify "should allow similar API on Connection.read" <| connection = setup.connection connection.query t_big.name . row_count . should_equal 1500 diff --git a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso index 4addfcec3513..84304226cd90 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso @@ -7,22 +7,21 @@ from Standard.Table.Errors import all from Standard.Database.Errors import SQL_Error -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Common_Table_Operations.Util import run_default_backend -main = run_default_backend spec -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder db_todo = if setup.is_database.not then Nothing else "Table.cross_tab is not implemented yet in Database." - Test.group prefix+"Table.cross_tab" pending=db_todo <| + suite_builder.group prefix+"Table.cross_tab" pending=db_todo group_builder-> table = table_builder [["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] table2 = table_builder [["Group", ["A","B","A","B","A","B","A","B","A"]], ["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] - Test.specify "should cross_tab counts by default using first column as names" <| + group_builder.specify "should cross_tab counts by default using first column as names" <| t1 = table.cross_tab [] "Key" t1.column_names . should_equal ["x", "y", "z"] t1.row_count . should_equal 1 @@ -30,7 +29,7 @@ spec setup = t1.at "y" . to_vector . should_equal [3] t1.at "z" . to_vector . should_equal [2] - Test.specify "should allow a different aggregate" <| + group_builder.specify "should allow a different aggregate" <| t1 = table.cross_tab [] "Key" values=[Sum "Value"] t1.column_names . should_equal ["x", "y", "z"] t1.row_count . should_equal 1 @@ -38,7 +37,7 @@ spec setup = t1.at "y" . to_vector . should_equal [18] t1.at "z" . to_vector . should_equal [17] - Test.specify "should allow a custom expression for the aggregate" <| + group_builder.specify "should allow a custom expression for the aggregate" <| t1 = table.cross_tab [] "Key" values=[Sum "[Value]*[Value]"] t1.column_names . should_equal ["x", "y", "z"] t1.row_count . should_equal 1 @@ -46,7 +45,7 @@ spec setup = t1.at "y" . to_vector . should_equal [110] t1.at "z" . to_vector . should_equal [145] - Test.specify "should allow a chosen column" <| + group_builder.specify "should allow a chosen column" <| t = table_builder [["Group", ["A","B","A","B","A","B","A","B","A"]], ["Species", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] t1 = t.cross_tab [] "Species" t1.column_names . should_equal ["x", "y", "z"] @@ -62,7 +61,7 @@ spec setup = t2.at "y" . to_vector . should_equal [3] t2.at "z" . to_vector . should_equal [2] - Test.specify "should allow a grouping" <| + group_builder.specify "should allow a grouping" <| t1 = table2.cross_tab ["Group"] "Key" t1.column_names . should_equal ["Group", "x", "y", "z"] t1.row_count . should_equal 2 @@ -71,7 +70,7 @@ spec setup = t1.at "y" . to_vector . should_equal [2, 1] t1.at "z" . to_vector . should_equal [1, 1] - Test.specify "should allow a grouping by Aggregate_Column" <| + group_builder.specify "should allow a grouping by Aggregate_Column" <| t1 = table2.cross_tab [Group_By "Group"] "Key" t1.column_names . should_equal ["Group", "x", "y", "z"] t1.row_count . should_equal 2 @@ -82,7 +81,7 @@ spec setup = table2.cross_tab [Sum "Group"] "Key" . should_fail_with Illegal_Argument - Test.specify "should allow a grouping by Aggregate_Colum, with some empty bins" <| + group_builder.specify "should allow a grouping by Aggregate_Colum, with some empty bins" <| table3 = table_builder [["Group", ["B","A","B","A","A"]], ["Key", ["x", "y", "y", "y", "z"]], ["Value", [4, 5, 6, 7, 8]]] t1 = table3.cross_tab [Group_By "Group"] "Key" t1.column_names . should_equal ["Group", "x", "y", "z"] @@ -92,7 +91,7 @@ spec setup = t1.at "y" . to_vector . should_equal [2, 1] t1.at "z" . to_vector . should_equal [1, 0] - Test.specify "should allow a grouping by text" <| + group_builder.specify "should allow a grouping by text" <| t1 = table2.cross_tab "Group" "Key" t1.column_names . should_equal ["Group", "x", "y", "z"] t1.row_count . should_equal 2 @@ -104,7 +103,7 @@ spec setup = t2 = table2.cross_tab ["Group", "Group"] "Key" t2.column_names . should_equal ["Group", "x", "y", "z"] - Test.specify "should allow multiple values aggregates" <| + group_builder.specify "should allow multiple values aggregates" <| t1 = table.cross_tab [] "Key" values=[Count, Sum "Value"] t1.column_names . should_equal ["x Count", "x Sum", "y Count", "y Sum", "z Count", "z Sum"] t1.row_count . should_equal 1 @@ -115,7 +114,7 @@ spec setup = t1.at "z Count" . to_vector . should_equal [2] t1.at "z Sum" . to_vector . should_equal [17] - Test.specify "should fail if name_column is not found" <| + group_builder.specify "should fail if name_column is not found" <| err1 = table.cross_tab [] "Name" err1.should_fail_with Missing_Input_Columns err1.catch.criteria . should_equal ["Name"] @@ -124,7 +123,7 @@ spec setup = err2.should_fail_with Missing_Input_Columns err2.catch.criteria . should_equal [42] - Test.specify "should fail if group-by contains missing columns" <| + group_builder.specify "should fail if group-by contains missing columns" <| err1 = table2.cross_tab ["Group", "Nonexistent Group", "OTHER"] "Key" err1.should_fail_with Missing_Input_Columns err1.catch.criteria . should_equal ["Nonexistent Group", "OTHER"] @@ -133,7 +132,7 @@ spec setup = err2.should_fail_with Missing_Input_Columns err2.catch.criteria . should_equal [42] - Test.specify "should fail if aggregate values contain missing columns" <| + group_builder.specify "should fail if aggregate values contain missing columns" <| err1 = table.cross_tab [] "Key" values=[Count, Sum "Nonexistent Value", Sum "Value", Sum "OTHER"] err1.should_fail_with Invalid_Aggregate_Column err1.catch.name . should_equal "Nonexistent Value" @@ -142,7 +141,7 @@ spec setup = err2.should_fail_with Missing_Input_Columns err2.catch.criteria . should_equal [42] - Test.specify "should fail if aggregate values contain invalid expressions" <| + group_builder.specify "should fail if aggregate values contain invalid expressions" <| err1 = table.cross_tab [] "Key" values=[Sum "[MISSING]*10"] err1.should_fail_with Invalid_Aggregate_Column err1.catch.name . should_equal "[MISSING]*10" @@ -153,11 +152,11 @@ spec setup = err2.catch.name . should_equal "[[[" err2.catch.expression_error . should_be_a Expression_Error.Syntax_Error - Test.specify "should not allow Group_By for values" <| + group_builder.specify "should not allow Group_By for values" <| err1 = table.cross_tab [] "Key" values=[Count, Group_By "Value"] on_problems=Problem_Behavior.Ignore err1.should_fail_with Illegal_Argument - Test.specify "should gracefully handle duplicate aggregate names" <| + group_builder.specify "should gracefully handle duplicate aggregate names" <| action = table.cross_tab [] "Key" values=[Count new_name="Agg1", Sum "Value" new_name="Agg1"] on_problems=_ tester table = table.column_names . should_equal ["x Agg1", "x Agg1 1", "y Agg1", "y Agg1 1", "z Agg1", "z Agg1 1"] @@ -171,7 +170,7 @@ spec setup = problems3 = [Duplicate_Output_Column_Names.Error ["x"]] Problems.test_problem_handling action3 problems3 tester3 - Test.specify "should allow non-Text columns to be used as name" <| + group_builder.specify "should allow non-Text columns to be used as name" <| table = table_builder [["Key", [1, 1, 1, 2, 2, 1, 3, 3, 1]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] t1 = table.cross_tab [] "Key" t1.column_names . should_equal ["1", "2", "3"] @@ -180,7 +179,7 @@ spec setup = t1.at "2" . to_vector . should_equal [2] t1.at "3" . to_vector . should_equal [2] - Test.specify "should correctly handle uncommon characters in fields becoming column names" <| + group_builder.specify "should correctly handle uncommon characters in fields becoming column names" <| table = table_builder [["Key", ["💡🎉🌻", "ąęź", "ąęź", '\n\n', "😊", "😊", "🌻", "😊", "🌻", " "]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]]] t1 = table.cross_tab [] "Key" . sort_columns t1.column_names . should_equal ['\n\n', ' ', 'ąęź', '🌻', '💡🎉🌻', '😊'] @@ -192,7 +191,7 @@ spec setup = t1.at '\n\n' . to_vector . should_equal [1] t1.at " " . to_vector . should_equal [1] - Test.specify "should report Floating_Point_Equality if the group or name column is floating point" <| + group_builder.specify "should report Floating_Point_Equality if the group or name column is floating point" <| t = table_builder [["X", [1.5, 2.5, 2.5, 1.5]], ["Y", [1, 2, 3, 4]], ["Z", ["a", "b", "b", "b"]]] t1 = t.cross_tab [] "X" Problems.expect_warning Floating_Point_Equality t1 @@ -209,7 +208,7 @@ spec setup = t2.at "a" . to_vector . should_equal [1, 0] t2.at "b" . to_vector . should_equal [1, 2] - Test.specify "should check types of aggregates" <| + group_builder.specify "should check types of aggregates" <| t = table_builder [["Key", ["a", "a", "b", "b"]], ["ints", [1, 2, 3, 4]], ["texts", ["a", "b", "c", "d"]]] [Problem_Behavior.Report_Error, Problem_Behavior.Report_Warning, Problem_Behavior.Ignore].each pb-> Test.with_clue "Problem_Behavior="+pb.to_text+" " <| @@ -220,7 +219,7 @@ spec setup = t3 = t.cross_tab [] "Key" values=[Concatenate "ints"] on_problems=pb t3.should_fail_with Invalid_Value_Type - Test.specify "should return predictable types" <| + group_builder.specify "should return predictable types" <| table = table_builder [["Text", ["a", "b"]], ["Int", [1, 2]], ["Float", [1.1, 2.2]]] t1 = table.cross_tab ["Int"] "Text" t1.column_names . should_equal ["Int", "a", "b"] @@ -235,7 +234,7 @@ spec setup = t2.at "2 Average Float" . value_type . is_floating_point . should_be_true t2.at "2 Concatenate Text" . value_type . is_text . should_be_true - Test.specify "should fail gracefully if an effective column name would contain invalid characters" <| + group_builder.specify "should fail gracefully if an effective column name would contain invalid characters" <| table = table_builder [["Key", ['x', 'x', 'y\0', '\0', 'y\0', 'z', 'z', 'z', 'z']], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] r1 = table.cross_tab [] "Key" r1.should_fail_with Invalid_Column_Names @@ -246,7 +245,7 @@ spec setup = r2.should_fail_with Invalid_Column_Names r2.catch.to_display_text . should_contain "cannot contain the NUL character" - Test.specify "should fail gracefully if an effective column name would be empty or null" <| + group_builder.specify "should fail gracefully if an effective column name would be empty or null" <| table = table_builder [["Key", [" ", "x", "x", "x", "", "", "", "y", "y"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] r1 = table.cross_tab [] "Key" r1.should_fail_with Invalid_Column_Names @@ -257,7 +256,7 @@ spec setup = r2 . should_fail_with Invalid_Column_Names r2.catch.to_display_text . should_contain "cannot be Nothing" - Test.specify "should fail gracefully if producing too many columns in a table" <| + group_builder.specify "should fail gracefully if producing too many columns in a table" <| table = table_builder [["Key", 0.up_to 25000 . to_vector]] r1 = table.cross_tab [] "Key" r1 . should_fail_with Column_Count_Exceeded diff --git a/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso index c226e5cd4ba4..1b7f0b0c8299 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso @@ -6,28 +6,27 @@ from Standard.Table.Errors import Inexact_Type_Coercion, Invalid_Value_Type from Standard.Database.Errors import Unsupported_Database_Operation -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Common_Table_Operations.Util import all -main = run_default_backend spec -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend." - Test.group prefix+"Date-Time support" pending=pending_datetime <| - Test.specify "should allow to create Table with Date columns and round-trip them back to Enso" <| + suite_builder.group prefix+"Date-Time support" pending=pending_datetime group_builder-> + group_builder.specify "should allow to create Table with Date columns and round-trip them back to Enso" <| d = Date.new 2020 10 24 table = table_builder [["A", [d]], ["X", [123]]] table.at "A" . value_type . should_equal Value_Type.Date table.at "A" . to_vector . should_equal [d] table.at "X" . to_vector . should_equal [123] - Test.specify "should allow to create Table with Time Of Day columns and round-trip them back to Enso" <| + group_builder.specify "should allow to create Table with Time Of Day columns and round-trip them back to Enso" <| # We do not check nanoseconds, as nano support may be optional. t = Time_Of_Day.new hour=12 minute=3 second=5 millisecond=6 microsecond=7 table = table_builder [["B", [t]], ["X", [123]]] @@ -35,7 +34,7 @@ spec setup = table.at "B" . to_vector . should_equal [t] table.at "X" . to_vector . should_equal [123] - Test.specify "should allow to create Table with Date Time columns and round-trip them back to Enso, preserving the exact instant in time (even if losing the timezone)" <| + group_builder.specify "should allow to create Table with Date Time columns and round-trip them back to Enso, preserving the exact instant in time (even if losing the timezone)" <| dt1 = Date_Time.new 2020 10 24 1 2 3 millisecond=4 microsecond=5 offset_zone = Time_Zone.new hours=(-11) minutes=(-30) dt2 = Date_Time.new 2021 11 23 1 2 3 zone=offset_zone @@ -49,11 +48,11 @@ spec setup = table.at "C" . to_vector . should_equal_tz_agnostic dates table.at "X" . to_vector . should_equal xs - Test.group prefix+"Date-Time operations" pending=pending_datetime <| + suite_builder.group prefix+"Date-Time operations" pending=pending_datetime group_builder-> dates = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [2020, 29, 1, 100]]] times = table_builder [["A", [Time_Of_Day.new 23 59 59 millisecond=567 nanosecond=123, Time_Of_Day.new 2 30 44 nanosecond=1002000, Time_Of_Day.new 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]] datetimes = table_builder [["A", [Date_Time.new 2020 12 31 23 59 59 millisecond=567 nanosecond=123, Date_Time.new 2024 2 29 2 30 44 nanosecond=1002000, Date_Time.new 1990 1 1 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]] - Test.specify "should allow to get the year/month/day of a Date" <| + group_builder.specify "should allow to get the year/month/day of a Date" <| t = dates a = t.at "A" a.year . to_vector . should_equal [2020, 2024, 1990, Nothing] @@ -68,7 +67,7 @@ spec setup = ((a.month) == (t.at "X")).to_vector . should_equal [False, False, True, Nothing] ((a.day) == (t.at "X")).to_vector . should_equal [False, True, True, Nothing] - Test.specify "should allow to get the year/month/day of a Date_Time" <| + group_builder.specify "should allow to get the year/month/day of a Date_Time" <| t = datetimes a = t.at "A" a.year . to_vector . should_equal [2020, 2024, 1990, Nothing] @@ -82,14 +81,14 @@ spec setup = ((a.month) == (t.at "X")).to_vector . should_equal [False, False, True, Nothing] ((a.day) == (t.at "X")).to_vector . should_equal [False, True, True, Nothing] - Test.specify "should allow to evaluate expressions with year/month/day" <| + group_builder.specify "should allow to evaluate expressions with year/month/day" <| t = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [0, 2, 1, 100]], ["B", [Date_Time.new 2020 10 31 23 59 59, Date_Time.new 2024 4 29 2 30 44, Date_Time.new 1990 10 1 0 0 0, Nothing]]] c = t.evaluate_expression "year([A]) + [X] + day([A]) * month([B])" Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <| c.value_type.is_integer.should_be_true c.to_vector . should_equal [(2020 + 0 + 31 * 10), (2024 + 2 + 29 * 4), (1990 + 1 + 1 * 10), Nothing] - Test.specify "should allow to get hour/minute/second of a Time_Of_Day" <| + group_builder.specify "should allow to get hour/minute/second of a Time_Of_Day" <| a = times.at "A" a.hour . to_vector . should_equal [23, 2, 0, Nothing] a.minute . to_vector . should_equal [59, 30, 0, Nothing] @@ -103,7 +102,7 @@ spec setup = Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <| c.value_type.is_integer.should_be_true - Test.specify "should allow to get hour/minute/second of a Date_Time" <| + group_builder.specify "should allow to get hour/minute/second of a Date_Time" <| a = datetimes.at "A" a.hour . to_vector . should_equal [23, 2, 0, Nothing] a.minute . to_vector . should_equal [59, 30, 0, Nothing] @@ -117,7 +116,7 @@ spec setup = Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <| c.value_type.is_integer.should_be_true - Test.specify "should allow to get millisecond/nanosecond of Time_Of_Day through date_part" <| + group_builder.specify "should allow to get millisecond/nanosecond of Time_Of_Day through date_part" <| a = times.at "A" a.date_part Time_Period.Second . to_vector . should_equal [59, 44, 0, Nothing] a.date_part Time_Period.Millisecond . to_vector . should_equal [567, 1, 0, Nothing] @@ -132,7 +131,7 @@ spec setup = Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <| c.value_type.is_integer.should_be_true - Test.specify "should allow to get week/quarter of Date through date_part" <| + group_builder.specify "should allow to get week/quarter of Date through date_part" <| a = dates.at "A" a.date_part Date_Period.Quarter . to_vector . should_equal [4, 1, 1, Nothing] a.date_part Date_Period.Week . to_vector . should_equal [53, 9, 1, Nothing] @@ -141,7 +140,7 @@ spec setup = Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <| c.value_type.is_integer.should_be_true - Test.specify "should allow to get various date_part of Date_Time" <| + group_builder.specify "should allow to get various date_part of Date_Time" <| a = datetimes.at "A" a.date_part Date_Period.Quarter . to_vector . should_equal [4, 1, 1, Nothing] a.date_part Date_Period.Week . to_vector . should_equal [53, 9, 1, Nothing] @@ -158,7 +157,7 @@ spec setup = c.value_type.is_integer.should_be_true - Test.specify "should allow to compare dates" <| + group_builder.specify "should allow to compare dates" <| t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date.new 2021 12 5]]] [(<), (<=), (>), (>=), (==), (!=)].each op-> @@ -166,7 +165,7 @@ spec setup = op (t.at "X") (t.at "Y") . to_vector . should_succeed op (t.at "X") (Date.new 2021 12 4) . to_vector . should_succeed - Test.specify "should allow to compare date-times" <| + group_builder.specify "should allow to compare date-times" <| t = table_builder [["X", [Date_Time.new 2021 12 3 12 30 0]], ["Y", [Date_Time.new 2021 12 5 12 30 0]]] [(<), (<=), (>), (>=), (==), (!=)].each op-> @@ -174,7 +173,7 @@ spec setup = op (t.at "X") (t.at "Y") . to_vector . should_succeed op (t.at "X") (Date_Time.new 2021 12 4 12 30 0) . to_vector . should_succeed - Test.specify "should allow to compare time-of-day" <| + group_builder.specify "should allow to compare time-of-day" <| t = table_builder [["X", [Time_Of_Day.new 12 30 0]], ["Y", [Time_Of_Day.new 12 30 1]]] [(<), (<=), (>), (>=), (==), (!=)].each op-> @@ -182,7 +181,7 @@ spec setup = op (t.at "X") (t.at "Y") . to_vector . should_succeed op (t.at "X") (Time_Of_Day.new 12 30 0) . to_vector . should_succeed - Test.specify "should not allow to mix types in ordering comparisons" <| + group_builder.specify "should not allow to mix types in ordering comparisons" <| t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]] [(<), (<=), (>), (>=)].each op-> @@ -190,13 +189,13 @@ spec setup = op (t.at "X") (t.at "Z") . should_fail_with Invalid_Value_Type if setup.test_selection.supports_time_duration then - Test.specify "should allow to subtract two Dates" <| + group_builder.specify "should allow to subtract two Dates" <| t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] ((t.at "Y") - (t.at "X")) . to_vector . should_equal [Period.new months=1 days=2] ((t.at "Y") - (Date.new 2020 12 5)) . to_vector . should_equal [Period.new years=1] - Test.specify "should allow to subtract two Date_Times" <| + group_builder.specify "should allow to subtract two Date_Times" <| dx = Date_Time.new 2021 11 30 10 15 0 t = table_builder [["X", [dx]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]] @@ -205,14 +204,14 @@ spec setup = ((t.at "Y") - (t.at "X")) . to_vector . should_equal [diff] ((t.at "Y") - dx) . to_vector . should_equal [diff] - Test.specify "should allow to subtract two Time_Of_Days" <| + group_builder.specify "should allow to subtract two Time_Of_Days" <| t = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]] ((t.at "Y") - (t.at "X")) . to_vector . should_equal [Duration.new hours=2 minutes=15 seconds=20, Duration.new hours=(-1) minutes=0 seconds=0] ((t.at "Y") - (Time_Of_Day.new 0 0 0)) . to_vector . should_equal [Duration.new hours=12 minutes=30 seconds=20, Duration.zero] if setup.test_selection.supports_time_duration.not then - Test.specify "should report unsupported operation for subtracting date/time" <| + group_builder.specify "should report unsupported operation for subtracting date/time" <| t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]] t3 = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]] @@ -224,7 +223,7 @@ spec setup = ((t3.at "Y") - (t3.at "X")) . should_fail_with Unsupported_Database_Operation ((t3.at "Y") - (Time_Of_Day.new 0 0 0)) . should_fail_with Unsupported_Database_Operation - Test.specify "should report an Invalid_Value_Type error when subtracting mixed date/time types" <| + group_builder.specify "should report an Invalid_Value_Type error when subtracting mixed date/time types" <| t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]] ((t.at "Y") - (t.at "X")) . should_fail_with Invalid_Value_Type @@ -234,7 +233,7 @@ spec setup = ((t.at "Z") - (t.at "Y")) . should_fail_with Invalid_Value_Type ((t.at "Z") - (Date.new 2021 11 3)) . should_fail_with Invalid_Value_Type - Test.specify "should allow computing a SQL-like difference" <| + group_builder.specify "should allow computing a SQL-like difference" <| t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] (t1.at "X").date_diff (t1.at "Y") Date_Period.Day . to_vector . should_equal [32] @@ -334,7 +333,7 @@ spec setup = (t3.at "X").date_diff (t3.at "Y") Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation (t3.at "X").date_diff (Time_Of_Day.new 10 15 12 34 56 78) Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation - Test.specify "date_diff should return integers" <| + group_builder.specify "date_diff should return integers" <| t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]] time_periods = [Time_Period.Hour, Time_Period.Minute, Time_Period.Second] @@ -349,7 +348,7 @@ spec setup = (date_periods+time_periods).each p-> (t.at "Z").date_diff (Date_Time.new 2021 12 05 01 02) p . value_type . is_integer . should_be_true - Test.specify "should not allow mixing types in date_diff" <| + group_builder.specify "should not allow mixing types in date_diff" <| t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]] (t.at "X").date_diff (t.at "Y") Date_Period.Day . should_fail_with Invalid_Value_Type (t.at "Z").date_diff (t.at "X") Date_Period.Day . should_fail_with Invalid_Value_Type @@ -361,7 +360,7 @@ spec setup = (t.at "Y").date_diff (Date.new 2021 12 5) Date_Period.Day . should_fail_with Invalid_Value_Type (t.at "Z").date_diff (Time_Of_Day.new 12 30 20) Time_Period.Hour . should_fail_with Invalid_Value_Type - Test.specify "should allow an SQL-like shift" <| + group_builder.specify "should allow an SQL-like shift" <| t1 = table_builder [["X", [Date.new 2021 01 31, Date.new 2021 01 01, Date.new 2021 12 31]], ["Y", [5, -1, 0]]] (t1.at "X").date_add (t1.at "Y") Date_Period.Day . to_vector . should_equal [Date.new 2021 02 05, Date.new 2020 12 31, Date.new 2021 12 31] (t1.at "X").date_add -1 Date_Period.Day . to_vector . should_equal [Date.new 2021 01 30, Date.new 2020 12 31, Date.new 2021 12 30] @@ -427,11 +426,11 @@ spec setup = # and defaults to Hour for time-of-day (t3.at "X").date_add (t3.at "Y") . to_vector . should_equal [Time_Of_Day.new 17 30 0, Time_Of_Day.new 22 45 0, Time_Of_Day.new 1 30 0] - Test.specify "should check shift_amount type in date_add" <| + group_builder.specify "should check shift_amount type in date_add" <| t = table_builder [["X", [Date.new 2021 01 31]]] t.at "X" . date_add "text" Date_Period.Day . should_fail_with Invalid_Value_Type - Test.specify "date_diff and date_add should work correctly with DST" pending="May be failing on some Database configurations. ToDo: investigate - https://github.com/enso-org/enso/issues/7326" <| + group_builder.specify "date_diff and date_add should work correctly with DST" pending="May be failing on some Database configurations. ToDo: investigate - https://github.com/enso-org/enso/issues/7326" <| zone = Time_Zone.parse "Europe/Warsaw" dt1 = Date_Time.new 2023 03 26 00 30 00 zone=zone t = table_builder [["X", [dt1]]] @@ -469,13 +468,13 @@ spec setup = (t2.at "X").date_diff dt4 Time_Period.Hour . to_vector . should_equal [23] if setup.test_selection.date_time.not then - Test.group prefix+"partial Date-Time support" <| - Test.specify "will fail when uploading a Table containing Dates" <| + suite_builder.group prefix+"partial Date-Time support" group_builder-> + group_builder.specify "will fail when uploading a Table containing Dates" <| d = Date.new 2020 10 24 table = table_builder [["A", [d]], ["X", [123]]] table.should_fail_with Unsupported_Database_Operation - Test.specify "should report a type error when date operations are invoked on a non-date column" <| + group_builder.specify "should report a type error when date operations are invoked on a non-date column" <| t = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, True]]] r1 = t.at "A" . year r1.should_fail_with Invalid_Value_Type diff --git a/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso index 50d06bda45ff..988a5f2991ab 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso @@ -6,19 +6,17 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument from Standard.Table import all from Standard.Table.Errors import all -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all -from project.Common_Table_Operations.Util import all -main = run_default_backend spec +from project.Common_Table_Operations.Util import all -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend." - Test.group prefix+"Table.set with Column_Operation" <| - Test.specify "arithmetics" <| + suite_builder.group prefix+"Table.set with Column_Operation" group_builder-> + group_builder.specify "arithmetics" <| t = table_builder [["A", [1, 2]], ["B", [10, 40]]] t.set (Column_Operation.Add (Column_Ref.Name "A") (Column_Ref.Name "B")) "C" . at "C" . to_vector . should_equal [11, 42] t.set (Column_Operation.Add 100 (Column_Ref.Name "B")) "C" . at "C" . to_vector . should_equal [110, 140] @@ -53,7 +51,7 @@ spec setup = Test.expect_panic Type_Error <| t.set (Column_Operation.Subtract "x" "y") t.set (Column_Operation.Add 42 "y") . should_fail_with Illegal_Argument - Test.specify "rounding" <| + group_builder.specify "rounding" <| t = table_builder [["A", [1.13333, 122.74463, 32.52424, -12.7]]] t.set (Column_Operation.Round (Column_Ref.Name "A")) "Z" . at "Z" . to_vector . should_equal [1, 123, 33, -13] t.set (Column_Operation.Round (Column_Ref.Name "A") precision=1) "Z" . at "Z" . to_vector . should_equal [1.1, 122.7, 32.5, -12.7] @@ -66,7 +64,7 @@ spec setup = Test.expect_panic Type_Error <| t.set (Column_Operation.Round "1.23") Test.expect_panic Type_Error <| t.set (Column_Operation.Truncate "1.23") - Test.specify "date/time" pending=pending_datetime <| + group_builder.specify "date/time" pending=pending_datetime <| t = table_builder [["A", [Date_Time.new 2023 1 12 12 45, Date_Time.new 2020 5 12 1 45]], ["B", [Date_Time.new 2023 1 15 18 45, Date_Time.new 2020 6 12 22 20]], ["x", [1, 3]]] # TODO ticket for truncate for DB @@ -98,7 +96,7 @@ spec setup = t.set (Column_Operation.Date_Part (Column_Ref.Name "x") Date_Period.Year) . should_fail_with Invalid_Value_Type Test.expect_panic Type_Error <| t2.set (Column_Operation.Date_Diff 42 "x" Date_Period.Year) - Test.specify "boolean" <| + group_builder.specify "boolean" <| t = table_builder [["A", [True, False]], ["T", [True, True]]] t.set (Column_Operation.And (Column_Ref.Name "A") (Column_Ref.Name "T")) "Z" . at "Z" . to_vector . should_equal [True, False] @@ -115,7 +113,7 @@ spec setup = Test.expect_panic_with (t.set (Column_Operation.And 42 True)) Type_Error Test.expect_panic_with (t.set (Column_Operation.Or (Column_Ref.Name "A") "x")) Type_Error - Test.specify "if" <| + group_builder.specify "if" <| t = table_builder [["A", [1, 100]], ["B", [10, 40]], ["C", [23, 55]]] t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=(Column_Ref.Name "B"))) "Z" . at "Z" . to_vector . should_equal [False, True] t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=20) "T" "F") "Z" . at "Z" . to_vector . should_equal ["F", "T"] @@ -144,7 +142,7 @@ spec setup = t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Not_In (Column_Ref.Name "B")) "TT" "FF") . should_fail_with Illegal_Argument t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Not_In [Column_Ref.Name "B", "X"]) "TT" "FF") . should_fail_with Illegal_Argument - Test.specify "text" <| + group_builder.specify "text" <| t = table_builder [["A", [" a ", "b"]], ["B", ["c", " d "]]] t.set (Column_Operation.Trim (Column_Ref.Name "A")) "Z" . at "Z" . to_vector . should_equal ["a", "b"] @@ -161,7 +159,7 @@ spec setup = t2 = table_builder [["A", [42]]] t2.set (Column_Operation.Trim (Column_Ref.Name "A")) . should_fail_with Invalid_Value_Type - Test.specify "min/max" <| + group_builder.specify "min/max" <| t = table_builder [["A", [1, 20]], ["B", [10, 2]]] t.set (Column_Operation.Min (Column_Ref.Name "A") (Column_Ref.Name "B")) "Z" . at "Z" . to_vector . should_equal [1, 2] @@ -189,18 +187,18 @@ spec setup = t3.set (Column_Operation.Min (Column_Ref.Name "A") (Date.new 2003)) "Z" . at "Z" . to_vector . should_equal [Date.new 2002 12 10, Date.new 2003 01 01] t3.set (Column_Operation.Max (Column_Ref.Name "A") (Date.new 2003)) "Z" . at "Z" . to_vector . should_equal [Date.new 2003 01 01, Date.new 2005 01 01] - Test.specify "allows also indexing columns numerically" <| + group_builder.specify "allows also indexing columns numerically" <| t = table_builder [["X", [1, 2]], ["Y", [3, 4]]] t.set (Column_Operation.Add (Column_Ref.Index 0) (Column_Ref.Index 1)) "Z" . at "Z" . to_vector . should_equal [4, 6] - Test.specify "will forward column resolution errors" <| + group_builder.specify "will forward column resolution errors" <| t = table_builder [["X", [1, 2]], ["Y", [3, 4]]] t.set (Column_Operation.Add (Column_Ref.Name "X") (Column_Ref.Name "Z")) . should_fail_with No_Such_Column t.set (Column_Operation.Not (Column_Ref.Name "zzz")) . should_fail_with No_Such_Column t.set (Column_Operation.Not (Column_Ref.Index 42)) . should_fail_with Index_Out_Of_Bounds - Test.group prefix+"Unique derived column names" <| - Test.specify "Should not disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add_Or_Update" <| + suite_builder.group prefix+"Unique derived column names" group_builder-> + group_builder.specify "Should not disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add_Or_Update" <| t = table_builder [["X", [1, 2, 3]]] column_op = Column_Operation.Power 2 (Column_Ref.Name "X") t2 = t.set column_op . set column_op @@ -208,7 +206,7 @@ spec setup = t2.at "X" . to_vector . should_equal [1, 2, 3] t2.at "[2] ^ [X]" . to_vector . should_equal [2, 4, 8] - Test.specify "Should disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add" <| + group_builder.specify "Should disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add" <| t = table_builder [["X", [1, 2, 3]]] column_op = Column_Operation.Power 2 (Column_Ref.Name "X") t2 = t.set column_op set_mode=Set_Mode.Add . set column_op set_mode=Set_Mode.Add @@ -217,7 +215,7 @@ spec setup = t2.at "[2] ^ [X]" . to_vector . should_equal [2, 4, 8] t2.at "[2] ^ [X] 1" . to_vector . should_equal [2, 4, 8] - Test.specify "Should disambiguate two derived columns that would otherwise have had the same name, within the same expression" <| + group_builder.specify "Should disambiguate two derived columns that would otherwise have had the same name, within the same expression" <| t = table_builder [["X", [1, 2, 3]]] expression = "2 + (2 * 2) + (2 ^ [X])" t2 = t.set expression @@ -225,7 +223,7 @@ spec setup = t2.at "X" . to_vector . should_equal [1, 2, 3] t2.at expression . to_vector . should_equal [8, 10, 14] - Test.specify "Should use .pretty to distinguish string constants from regular column names" <| + group_builder.specify "Should use .pretty to distinguish string constants from regular column names" <| t = table_builder [["X", ["a", "b", "c"]]] expression = '"foo" + [X] + "bar"' t2 = t.set expression @@ -233,7 +231,7 @@ spec setup = t2.at "X" . to_vector . should_equal ["a", "b", "c"] t2.at expression . to_vector . should_equal ["fooabar", "foobbar", "foocbar"] - Test.specify "Should disambiguate between a column reference and a literal string" <| + group_builder.specify "Should disambiguate between a column reference and a literal string" <| t = table_builder [["X", ["a", "b", "c"]]] t2 = t.set (Column_Operation.Add "prefix" (Column_Ref.Name "X")) t3 = t2.set (Column_Operation.Add "prefix" "X") @@ -242,13 +240,13 @@ spec setup = t3.at "['prefix'] + [X]" . to_vector . should_equal ["prefixa", "prefixb", "prefixc"] t3.at "['prefix'] + 'X'" . to_vector . should_equal ["prefixX", "prefixX", "prefixX"] - Test.specify "Should not disambiguate if set_mode is Update" <| + group_builder.specify "Should not disambiguate if set_mode is Update" <| t = table_builder [["X", [1, 2, 3]]] t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1) set_mode=Set_Mode.Update t2.column_names . should_equal ["X"] t2.at "X" . to_vector . should_equal [2, 3, 4] - Test.specify "Should not disambiguate if set_mode is Add_Or_Update" <| + group_builder.specify "Should not disambiguate if set_mode is Add_Or_Update" <| t = table_builder [["X", [1, 2, 3]], ["[X] + 1", [10, 20, 30]]] # set_mode=Set_Mode.Add_Or_Update is the default t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1) @@ -256,7 +254,7 @@ spec setup = t2.at "X" . to_vector . should_equal [1, 2, 3] t2.at "[X] + 1" . to_vector . should_equal [2, 3, 4] - Test.specify "Should not disambiguate if the new name is explicitly set" <| + group_builder.specify "Should not disambiguate if the new name is explicitly set" <| t = table_builder [["X", [1, 2, 3]]] t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1) new_name="X" t2.column_names . should_equal ["X"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso index 84d7d698cc34..ea12d2f65d1e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso @@ -3,18 +3,17 @@ from Standard.Base import all from Standard.Table import Sort_Column from Standard.Table.Errors import all -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Common_Table_Operations.Util import run_default_backend -main = run_default_backend spec -spec setup = +add_specs suite_builder setup = table_builder = setup.table_builder materialize = setup.materialize - Test.group setup.prefix+"Table.distinct" <| - Test.specify "should group by all columns by default" <| + suite_builder.group setup.prefix+"Table.distinct" group_builder-> + group_builder.specify "should group by all columns by default" <| a = ["A", ["a", "b", "a", "b", "a", "b"]] b = ["B", [2, 1, 2, 2, 2, 1]] t = table_builder [a, b] @@ -22,7 +21,7 @@ spec setup = r.at "A" . to_vector . should_equal ["a", "b", "b"] r.at "B" . to_vector . should_equal [2, 1, 2] - Test.specify "should allow to select distinct rows based on a subset of columns, returning any row from each group" <| + group_builder.specify "should allow to select distinct rows based on a subset of columns, returning any row from each group" <| a = ["A", ["a", "a", "a", "a", "a", "a"]] b = ["B", [1, 1, 2, 2, 1, 2]] c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]] @@ -41,7 +40,7 @@ spec setup = [0.3, 0.4, 0.6].contains (cv.at 1) . should_be_true if setup.test_selection.distinct_returns_first_row_from_group_if_ordered then - Test.specify "should allow to select distinct rows based on a subset of columns, returning any first from each group if the table was ordered" <| + group_builder.specify "should allow to select distinct rows based on a subset of columns, returning any first from each group if the table was ordered" <| a = ["A", ["a", "a", "a", "a", "a", "a"]] b = ["B", [1, 1, 2, 2, 1, 2]] c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]] @@ -52,7 +51,7 @@ spec setup = r2.at "B" . to_vector . should_equal [1, 2] r2.at "C" . to_vector . should_equal [0.5, 0.6] - Test.specify "should allow to control case-sensitivity of keys" <| + group_builder.specify "should allow to control case-sensitivity of keys" <| x = ["X", ['A', 'a', 'enso', 'Enso', 'A']] t1 = table_builder [x] d1 = t1.distinct ["X"] on_problems=Report_Error |> materialize |> _.order_by ["X"] @@ -64,7 +63,7 @@ spec setup = v.filter (_.equals_ignore_case "enso") . length . should_equal 1 v.filter (_.equals_ignore_case "a") . length . should_equal 1 - Test.specify "should report a warning if the key contains floating point values" <| + group_builder.specify "should report a warning if the key contains floating point values" <| t1 = table_builder [["X", [3.0, 1.0, 2.0, 2.0, 1.0]]] action1 = t1.distinct on_problems=_ tester1 table = @@ -74,7 +73,7 @@ spec setup = problems1 = [Floating_Point_Equality.Error "X"] Problems.test_problem_handling action1 problems1 tester1 - Test.specify "should handle nulls correctly" <| + group_builder.specify "should handle nulls correctly" <| a = ["A", ["a", Nothing, "b", "a", "b", Nothing, "a", "b"]] b = ["B", [1, 2, 3, 4, 5, 6, 7, 8]] t = table_builder [a, b] @@ -92,7 +91,7 @@ spec setup = va.at 2 . should_equal "b" [3, 5, 8].contains (vb.at 2) . should_be_true - Test.specify "should report missing input columns" <| + group_builder.specify "should report missing input columns" <| t1 = table_builder [["X", [1, 2, 3, 2, 2]]] [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> t2 = t1.distinct ["Y", "Z"] on_problems=pb diff --git a/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso index 644ae854863c..a19442e018d9 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso @@ -9,15 +9,14 @@ import Standard.Table.Data.Expression.Expression_Error from Standard.Database.Errors import SQL_Error -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Util import all from project.Common_Table_Operations.Util import run_default_backend -main = run_default_backend (spec detailed=True) -spec detailed setup = +add_specs suite_builder detailed setup = prefix = setup.prefix table_builder = setup.table_builder @@ -50,32 +49,32 @@ spec detailed setup = new_column.name . should_equal expression - specify_test label action expression_test=tester pending=Nothing = case pending of + specify_test label group_builder action expression_test=tester pending=Nothing = case pending of Nothing -> case detailed of True -> specify_tester expression value = - Test.specify (label + ": " + expression) <| + group_builder.specify (label + ": " + expression) <| expression_test expression value action specify_tester False -> - Test.specify label (action expression_test) - _ -> Test.specify label Nothing pending + group_builder.specify label (action expression_test) + _ -> group_builder.specify label Nothing pending - Test.group prefix+"Expression Integer literals" <| - specify_test "should be able to add an integer column" expression_test-> + suite_builder.group prefix+"Expression Integer literals" group_builder-> + specify_test "should be able to add an integer column" group_builder expression_test-> expression_test "1" 1 expression_test "-3" -3 expression_test "1_000" 1000 - Test.group prefix+"Expression Float literals" <| - specify_test "should be able to add an decimal column" expression_test-> + suite_builder.group prefix+"Expression Float literals" group_builder-> + specify_test "should be able to add an decimal column" group_builder expression_test-> expression_test "1.23" 1.23 expression_test "-3.1415" -3.1415 expression_test "1_000.456" 1000.456 - Test.group prefix+"Expression Boolean literals" <| - specify_test "should be able to add a boolean column" expression_test-> + suite_builder.group prefix+"Expression Boolean literals" group_builder-> + specify_test "should be able to add a boolean column" group_builder expression_test-> expression_test "True" True expression_test "true" True expression_test "TRUE" True @@ -85,8 +84,8 @@ spec detailed setup = expression_test "FALSE" False expression_test "FaLsE" False - Test.group prefix+"Expression Text literals" <| - specify_test "should be able to add a text column" expression_test-> + suite_builder.group prefix+"Expression Text literals" group_builder-> + specify_test "should be able to add a text column" group_builder expression_test-> expression_test "'Hello World'" 'Hello World' expression_test "'Hello \'World\''" "Hello 'World'" expression_test '"Hello World"' 'Hello World' @@ -94,12 +93,12 @@ spec detailed setup = expression_test '"Hello \\""World"""' 'Hello \\"World"' expression_test "'Alpha\r\n\gBeta'" 'Alpha\r\n\\gBeta' - Test.group prefix+"Expression Text literals" <| - specify_test "should be able to get a Column" expression_test-> + suite_builder.group prefix+"Expression Text literals" group_builder-> + specify_test "should be able to get a Column" group_builder expression_test-> expression_test "[A]" (column_a.at 1) expression_test "[Bad]] Name]" (column_odd.at 1) - Test.specify "should sanitize names" <| + group_builder.specify "should sanitize names" <| t = table_builder [["X", ['\0', 'x', '']]] c = t.evaluate_expression '[X] == \'\0\'' . catch SQL_Error # We ignore the SQL error - some backends just do not support `\0` values. This is not the main point of this test. @@ -109,14 +108,14 @@ spec detailed setup = c.name.should_equal "[X] == '\0'" c.to_vector.should_equal [True, False, False] - Test.group prefix+"Expression Nothing literals" <| - specify_test "should be able to add an nothing column" expression_test-> + suite_builder.group prefix+"Expression Nothing literals" group_builder-> + specify_test "should be able to add an nothing column" group_builder expression_test-> expression_test "null" Nothing expression_test "nUlL" Nothing expression_test "Nothing" Nothing expression_test "NOTHING" Nothing - Test.specify "should allow to create a NULL column" <| + group_builder.specify "should allow to create a NULL column" <| t = table_builder [["X", [1, 2, 3]]] c = t.evaluate_expression "null" c.name . should_equal "null" @@ -132,7 +131,7 @@ spec detailed setup = introduce a Value_Type.Null and make it accepted by all `Value_Type.expect_*` checks. See: https://github.com/enso-org/enso/issues/6281 - Test.specify "a null column may fail typechecks that expect a concrete type" <| + group_builder.specify "a null column may fail typechecks that expect a concrete type" <| t = table_builder [["X", [1, 2, 3]]] c = t.evaluate_expression "null" (c + c) . should_fail_with Illegal_Argument @@ -147,13 +146,13 @@ spec detailed setup = t.evaluate_expression "[X] + Nothing" . to_vector . should_equal [Nothing, Nothing, Nothing] t.evaluate_expression "Nothing + [X]" . should_fail_with Illegal_Argument - Test.group prefix+"Expression Date and Time literals" <| - specify_test "should be able to add a date or time column" pending=pending_datetime expression_test-> + suite_builder.group prefix+"Expression Date and Time literals" group_builder-> + specify_test "should be able to add a date or time column" group_builder pending=pending_datetime expression_test-> expression_test "#2020-12-23#" (Date.new 2020 12 23) expression_test "#12:34#" (Time_Of_Day.new 12 34) expression_test "#12:34:56#" (Time_Of_Day.new 12 34 56) - Test.specify "should be able to add a date-time column" pending=pending_datetime <| + group_builder.specify "should be able to add a date-time column" pending=pending_datetime <| ## We cannot just test equality as the Database may change the timezone, so all we can do is check that the values are accepted and can be compared with other values in the database. @@ -161,18 +160,18 @@ spec detailed setup = c = t.evaluate_expression "#2020-12-23 12:34:56# == [X]" c.to_vector . should_equal [True, False] - specify_test "should be able to add a time column with sub-millisecond accuracy" pending=pending_datetime expression_test-> + specify_test "should be able to add a time column with sub-millisecond accuracy" group_builder pending=pending_datetime expression_test-> expression_test "#12:34:56.789#" (Time_Of_Day.new 12 34 56 789) expression_test "#12:34:56.000123#" (Time_Of_Day.new 12 34 56 millisecond=0 microsecond=123 nanosecond=0) if setup.is_database.not then - specify_test "should be able to add a date time column with timezone" expression_test-> + specify_test "should be able to add a date time column with timezone" group_builder expression_test-> expression_test "#2020-12-23 12:34:56Z[UTC]#" (Date_Time.new 2020 12 23 12 34 56 zone=Time_Zone.utc) expression_test "#2020-12-23 12:34:56+02:30[UTC]#" (Date_Time.new 2020 12 23 10 04 56 zone=Time_Zone.utc) expression_test "#2020-12-23 12:34:56.157+01[UTC]#" (Date_Time.new 2020 12 23 11 34 56 157 zone=Time_Zone.utc) expression_test "#2020-12-23T12:34[Europe/Warsaw]#" (Date_Time.new 2020 12 23 12 34 zone=Time_Zone.parse("Europe/Warsaw")) - Test.specify "should correctly handle timezones" pending=pending_datetime <| + group_builder.specify "should correctly handle timezones" pending=pending_datetime <| ## We cannot just test equality as the Database may change the timezone, so all we can do is check that the values are accepted and can be compared with other values in the database. @@ -182,8 +181,8 @@ spec detailed setup = c2 = t.evaluate_expression "#2010-01-02 12:34:56[Europe/Warsaw]# == [X]" c2.to_vector . should_equal [False, True] - Test.group prefix+"Expression Arithmetic" <| - specify_test "should be able to do basic arithmetic" expression_test-> + suite_builder.group prefix+"Expression Arithmetic" group_builder-> + specify_test "should be able to do basic arithmetic" group_builder expression_test-> expression_test "1+1" 2 expression_test "23-15" 8 expression_test "2.5*4.2" 10.5 @@ -195,13 +194,13 @@ spec detailed setup = expression_test "floor(5.3)" 5 expression_test "round(5.5)" 6 - specify_test "should be able to do basic arithmetic with order" expression_test-> + specify_test "should be able to do basic arithmetic with order" group_builder expression_test-> expression_test "1+1*2+2" 5 expression_test "23-15/3+6" 24 expression_test "52.92/4.2^2" 3 expression_test "(1+1)*2+2" 6 - specify_test "should be able to do basic arithmetic with whitespace" expression_test-> + specify_test "should be able to do basic arithmetic with whitespace" group_builder expression_test-> expression_test "1 + 1" 2 expression_test " 23 -15 " 8 expression_test "2.5* 4.2" 10.5 @@ -211,8 +210,8 @@ spec detailed setup = expression_test "1+1 * 2" 3 expression_test "1 + 1*2" 3 - Test.group prefix+"Expression Column Arithmetic" <| - specify_test "should be able to perform arithmetic on columns" expression_test-> + suite_builder.group prefix+"Expression Column Arithmetic" group_builder-> + specify_test "should be able to perform arithmetic on columns" group_builder expression_test-> expression_test "[A] + 2" [3, 4, 5, 6, 7] expression_test "[B] - 2" [-1, -0.5, 0.5, 2, 4] expression_test "[A] * 4" [4, 8, 12, 16, 20] @@ -230,7 +229,7 @@ spec detailed setup = expression_test "[A] ^ [B] * [A]" [1, 5.65685424949238, 46.7653718043597, 1024, 78125] expression_test "[A] % [B] % [A]" [0, 0.5, 0.5, 0, 0] - specify_test "should be able to perform arithmetic on columns with order" expression_test-> + specify_test "should be able to perform arithmetic on columns with order" group_builder expression_test-> expression_test "([A] + [B]) * 3" [6, 10.5, 16.5, 24, 33] expression_test "[A] * (4 + [B])" [5, 11, 19.5, 32, 50] expression_test "[A] * [B] + [A]" [2, 5, 10.5, 20, 35] @@ -242,8 +241,8 @@ spec detailed setup = expression_test "([A] - [B]) ^ [A]" [0, 0.25, 0.125, 0, -1] expression_test "[A] ^ ([B] - [A])" [1, 0.707106781186547, 0.577350269189626, 1, 5] - Test.group prefix+"Expression Comparison Operators" <| - specify_test "should be able to compare equality" expression_test-> + suite_builder.group prefix+"Expression Comparison Operators" group_builder-> + specify_test "should be able to compare equality" group_builder expression_test-> expression_test "2 = 1 + 1" True expression_test "2 == 1 + 1" True expression_test "[A] = 2" [False, True, False, False, False] @@ -253,7 +252,7 @@ spec detailed setup = expression_test "[A] != 2" [True, False, True, True, True] expression_test "[A] <> 2" [True, False, True, True, True] - specify_test "should be able to compare ordering" expression_test-> + specify_test "should be able to compare ordering" group_builder expression_test-> expression_test "1 > 2" False expression_test "1 < 2" True expression_test "[A] > 2" [False, False, True, True, True] @@ -261,7 +260,7 @@ spec detailed setup = expression_test "[A] < 2" [True, False, False, False, False] expression_test "[A] <= 2" [True, True, False, False, False] - specify_test "should be able to use between" expression_test-> + specify_test "should be able to use between" group_builder expression_test-> expression_test "1 + 1 BETWEEN 1 AND 3" True expression_test "1 + 1 between 2 AND 3" True expression_test "1 + 1 bETWEEN 1 AND 2" True @@ -269,7 +268,7 @@ spec detailed setup = expression_test "1 + 1 NOT BETWEEN 1 AND 3" False expression_test "[A] not between 2 AND 3" [True, False, False, True, True] - specify_test "should be able to use in" expression_test-> + specify_test "should be able to use in" group_builder expression_test-> expression_test "1 + 1 IN (2, 4, 6)" True expression_test "[A] IN (2, 4, 6)" [False, True, False, True, False] expression_test "1 + 1 NOT IN (2, 4, 6)" False @@ -277,7 +276,7 @@ spec detailed setup = expression_test "[A] IN (3)" [False, False, True, False, False] expression_test "[A] NOT IN (3)" [True, True, False, True, True] - specify_test "should be able to check null" expression_test-> + specify_test "should be able to check null" group_builder expression_test-> expression_test "1 IS NULL" False expression_test "1 IS NoTHing" False expression_test "Nothing IS NULL" True @@ -288,20 +287,20 @@ spec detailed setup = expression_test "[A] IS NOT NULL" [True, True, True, True, True] expression_test "[C] IS NOT NULL" [True, True, True, True, False] - specify_test "should be able to check empty" expression_test-> + specify_test "should be able to check empty" group_builder expression_test-> expression_test "'Hello World' IS EMPTY" False expression_test "'' IS EMPTY" True expression_test "'Hello World' IS NOT EMPTY" True expression_test "'' IS NOT EMPTY" False - Test.group prefix+"Expression Text Operators" <| - specify_test "should be able to concatenate text" expression_test-> + suite_builder.group prefix+"Expression Text Operators" group_builder-> + specify_test "should be able to concatenate text" group_builder expression_test-> expression_test "'Hello ' + 'World'" "Hello World" expression_test "[C] + ' World'" ["Hello World", "World World", "Hello World! World", " World", Nothing] expression_test "'Hello ' + [C]" ["Hello Hello", "Hello World", "Hello Hello World!", "Hello ", Nothing] expression_test "[C] + [C]" ["HelloHello", "WorldWorld", "Hello World!Hello World!", "", Nothing] - specify_test "should be able to use like" expression_test-> + specify_test "should be able to use like" group_builder expression_test-> expression_test "'Hello World' LIKE 'Hello%'" True expression_test "'Hello' LIKE 'H_llo'" True expression_test "'Hello' LIKE 'H_l%'" True @@ -311,63 +310,63 @@ spec detailed setup = expression_test "[C] LIKE 'Hello%'" [True, False, True, False, Nothing] expression_test "[C] NOT LIKE 'Hello%'" [False, True, False, True, Nothing] - Test.group prefix+"Expression Boolean Operators" <| - specify_test "should be able to AND booleans" expression_test-> + suite_builder.group prefix+"Expression Boolean Operators" group_builder-> + specify_test "should be able to AND booleans" group_builder expression_test-> expression_test "True && TRUE" True expression_test "True && Nothing" Nothing expression_test "True AND False" False expression_test "True && [Bad]] Name]" [True, False, True, False, True] expression_test "False AND [Bad]] Name]" False - specify_test "should be able to OR booleans" expression_test-> + specify_test "should be able to OR booleans" group_builder expression_test-> expression_test "True || TRUE" True expression_test "True OR False" True expression_test "False OR False" False expression_test "True OR [Bad]] Name]" True expression_test "False || [Bad]] Name]" [True, False, True, False, True] - specify_test "should be able to NOT booleans" expression_test-> + specify_test "should be able to NOT booleans" group_builder expression_test-> expression_test "!TRUE" False expression_test "Not False" True expression_test "NOT [Bad]] Name]" [False, True, False, True, False] - specify_test "should be able to use IF" expression_test-> + specify_test "should be able to use IF" group_builder expression_test-> expression_test "IF True THEN 1 ELSE 0" 1 expression_test "IF False THEN 'A' ELSE 'B' END" 'B' expression_test "IF [Bad]] Name] THEN [A] ELSE [B] ENDIF" [1, 1.5, 3, 4, 5] - Test.group prefix+"Function invocation" <| - specify_test "should be able to call a function with arguments" expression_test-> + suite_builder.group prefix+"Function invocation" group_builder-> + specify_test "should be able to call a function with arguments" group_builder expression_test-> expression_test "Not(True)" False expression_test "not(False)" True expression_test "iif(True, 1, 3)" 1 expression_test "iif([Bad]] Name], 2, 3)" [2, 3, 2, 3, 2] - specify_test "should be able to call a variable args function" expression_test-> + specify_test "should be able to call a variable args function" group_builder expression_test-> expression_test "min(10, 3, 8)" 3 expression_test "max([A], [B], 3)" [3, 3, 3, 4, 6] - Test.group prefix+"Expression Errors should be handled" <| + suite_builder.group prefix+"Expression Errors should be handled" group_builder-> error_tester expression fail_ctor = test_table.set expression new_name="NEW_COL" . should_fail_with Expression_Error test_table.set expression new_name="NEW_COL" . catch . should_be_a fail_ctor - specify_test "should fail with Syntax_Error if badly formed" expression_test=error_tester expression_test-> + specify_test "should fail with Syntax_Error if badly formed" group_builder expression_test=error_tester expression_test-> expression_test "IIF [A] THEN 1 ELSE 2" Expression_Error.Syntax_Error expression_test "A + B" Expression_Error.Syntax_Error expression_test "#2022-31-21#" Expression_Error.Syntax_Error - specify_test "should fail with Unsupported_Operation if not sufficient arguments" expression_test=error_tester expression_test-> + specify_test "should fail with Unsupported_Operation if not sufficient arguments" group_builder expression_test=error_tester expression_test-> expression_test "unknown([C])" Expression_Error.Unsupported_Operation - specify_test "should fail with Argument_Mismatch if not sufficient arguments" expression_test=error_tester expression_test-> + specify_test "should fail with Argument_Mismatch if not sufficient arguments" group_builder expression_test=error_tester expression_test-> expression_test "starts_with([C])" Expression_Error.Argument_Mismatch - specify_test "should fail with Argument_Mismatch if too many arguments" expression_test=error_tester expression_test-> + specify_test "should fail with Argument_Mismatch if too many arguments" group_builder expression_test=error_tester expression_test-> expression_test "is_empty([C], 'Hello')" Expression_Error.Argument_Mismatch - Test.group prefix+"Expression Warnings should be reported" <| - Test.specify "should report floating point equality" <| + suite_builder.group prefix+"Expression Warnings should be reported" group_builder-> + group_builder.specify "should report floating point equality" <| t1 = table_builder [["X", [1.5, 2.0, 0.0]]] r1 = t1.evaluate_expression "([X] == 2) || ([X] + 0.5 == 2)" on_problems=Problem_Behavior.Ignore @@ -387,7 +386,7 @@ spec detailed setup = Problems.expect_warning Floating_Point_Equality r4 db_pending = if setup.is_database then "Arithmetic error handling is currently not implemented for the Database backend." - Test.specify "should report arithmetic errors" pending=db_pending <| + group_builder.specify "should report arithmetic errors" pending=db_pending <| t1 = table_builder [["X", [1.5, 2.0, 0.0]]] action1 = t1.evaluate_expression "3 / [X]" on_problems=_ @@ -396,7 +395,7 @@ spec detailed setup = problems1 = [Arithmetic_Error.Error "Division by zero (at rows [2])."] Problems.test_problem_handling action1 problems1 tester1 - Test.specify "already existing warnings should not be escalated to errors in error handling mode (1)" pending=db_pending <| + group_builder.specify "already existing warnings should not be escalated to errors in error handling mode (1)" pending=db_pending <| t1 = table_builder [["X", [1.5, 2.0, 0.0]]] c1 = t1.evaluate_expression "3 / [X]" on_problems=Problem_Behavior.Report_Warning Problems.expect_warning Arithmetic_Error c1 @@ -420,7 +419,7 @@ spec detailed setup = # Should still keep the inherited warning from "Y". Problems.expect_warning Arithmetic_Error t4 - Test.specify "already existing warnings should not be escalated to errors in error handling mode (2)" <| + group_builder.specify "already existing warnings should not be escalated to errors in error handling mode (2)" <| t1 = table_builder [["X", [1.5, 2.0, 0.0]]] c1 = Warning.attach (Illegal_State.Error "FOO") (t1.evaluate_expression "[X] + 3.0") Problems.expect_warning Illegal_State c1 diff --git a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso index b208edc20f3f..ff0d3a6599ab 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso @@ -11,24 +11,23 @@ import Standard.Table.Data.Expression.Expression_Error from Standard.Database.Errors import SQL_Error -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + -from project.Common_Table_Operations.Util import run_default_backend -main = run_default_backend spec +from project.Common_Table_Operations.Util import run_default_backend ## Currently these tests rely on filtering preserving the insertion ordering within tables. This is not necessarily guaranteed by RDBMS, so we may adapt this in the future. For now we implicitly assume the ordering is preserved, as that seems to be the case. -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder test_selection = setup.test_selection - Test.group prefix+"Table.filter" <| - Test.specify "by integer comparisons" <| + suite_builder.group prefix+"Table.filter" group_builder-> + group_builder.specify "by integer comparisons" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]] t1 = t.filter "X" (Filter_Condition.Less than=10) t1.at "ix" . to_vector . should_equal [2, 4] @@ -71,7 +70,7 @@ spec setup = if test_selection.is_nan_and_nothing_distinct then t.filter "X" (Filter_Condition.Is_Nan) . at "ix" . to_vector . should_equal [] - Test.specify "by float operations" <| + group_builder.specify "by float operations" <| t = table_builder [["ix", [1, 2, 3, 4, 5, 6]], ["X", [100.0, 2.5, Nothing, Number.nan, Number.positive_infinity, Number.negative_infinity]]] t.filter "X" (Filter_Condition.Less than=10.0) . at "X" . to_vector . should_equal [2.5, Number.negative_infinity] @@ -90,7 +89,7 @@ spec setup = if test_selection.is_nan_and_nothing_distinct then t.filter "X" Filter_Condition.Is_Nan . at "ix" . to_vector . should_equal [4] - Test.specify "Not_Equal test cases" pending="Specification needs clarifying, see: https://github.com/enso-org/enso/issues/5241#issuecomment-1480167927" <| + group_builder.specify "Not_Equal test cases" pending="Specification needs clarifying, see: https://github.com/enso-org/enso/issues/5241#issuecomment-1480167927" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]] t3 = t.filter "X" (Filter_Condition.Not_Equal to=100) t3 . at "X" . to_vector . should_equal [3, Nothing, 4, 12] @@ -98,7 +97,7 @@ spec setup = t.filter "X" (Filter_Condition.Not_Equal to=(t.at "Y")) . at "X" . to_vector . should_equal [3, Nothing, 4, 12] t.filter "X" (Filter_Condition.Not_Equal to=(Column_Ref.Name "Y")) . at "X" . to_vector . should_equal [3, Nothing, 4, 12] - Test.specify "by text comparisons" <| + group_builder.specify "by text comparisons" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "baca", "b", Nothing, "c"]], ["Y", ["a", "b", "b", "c", "c"]]] t1 = t.filter "X" (Filter_Condition.Less than="c") t1.at "ix" . to_vector . should_equal [1, 2, 3] @@ -132,7 +131,7 @@ spec setup = t2.filter "X" (Filter_Condition.Equal_Ignore_Case to="a") . at "X" . to_vector . should_equal ["A", "a"] t2.filter "X" (Filter_Condition.Equal_Ignore_Case to=(Column_Ref.Name "Y")) . at "X" . to_vector . should_equal ["A", "b"] - Test.specify "by text search (contains, starts_with, ends_with, not_contains)" <| + group_builder.specify "by text search (contains, starts_with, ends_with, not_contains)" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]] t.filter "X" (Filter_Condition.Starts_With "ba") . at "X" . to_vector . should_equal ["bacb", "banana"] @@ -166,7 +165,7 @@ spec setup = t.filter "X" (Filter_Condition.Contains (Column_Ref.Name "Y") Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["abb", "bacb", "nana"] t.filter "X" (Filter_Condition.Not_Contains (Column_Ref.Name "Y") Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["banana"] - Test.specify "by text search (like, not_like)" <| + group_builder.specify "by text search (like, not_like)" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]] t.filter "X" (Filter_Condition.Like "%an%") . at "X" . to_vector . should_equal ["banana", "nana"] @@ -178,7 +177,7 @@ spec setup = t.filter "X" (Filter_Condition.Not_Like "%b") . at "X" . to_vector . should_equal ["banana", "nana"] t.filter "Z" (Filter_Condition.Not_Like "[ab]%") . at "Z" . to_vector . should_equal ["aaaaa", "bbbbb"] - Test.specify "text operations should also match newlines" <| + group_builder.specify "text operations should also match newlines" <| t = table_builder [["X", ['a\n\n\n', 'a\n', 'a\n\n\nb', 'a\nb', 'caa\nbb']]] t.filter "X" (Filter_Condition.Like 'a_') . at "X" . to_vector . should_equal ['a\n'] t.filter "X" (Filter_Condition.Like 'a%') . at "X" . to_vector . should_equal ['a\n\n\n', 'a\n', 'a\n\n\nb', 'a\nb'] @@ -193,7 +192,7 @@ spec setup = if test_selection.supports_unicode_normalization then t = table_builder [["X", ['śnieg', 's\u0301nieg', 'X', Nothing, 'połać', 'połac\u0301']]] - Test.specify "text operations should support Unicode normalization" <| + group_builder.specify "text operations should support Unicode normalization" <| t.filter "X" (Filter_Condition.Starts_With 'ś') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg'] t.filter "X" (Filter_Condition.Contains 'ś') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg'] t.filter "X" (Filter_Condition.Ends_With 'ś') . at "X" . to_vector . should_equal [] @@ -204,15 +203,15 @@ spec setup = t.filter "X" (Filter_Condition.Like 'ś%') . at "X" . to_vector . should_equal ['śnieg'] # This test is split off just to mark is as pending, once resolved it can be merged with the one above. - Test.specify "text operations should support Unicode normalization (like)" pending='There is a bug with Java Regex in Unicode normalized mode (CANON_EQ) with quoting.\nhttps://bugs.java.com/bugdatabase/view_bug.do?bug_id=8032926' <| + group_builder.specify "text operations should support Unicode normalization (like)" pending='There is a bug with Java Regex in Unicode normalized mode (CANON_EQ) with quoting.\nhttps://bugs.java.com/bugdatabase/view_bug.do?bug_id=8032926' <| t.filter "X" (Filter_Condition.Like 'ś%') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg'] - Test.specify "by empty text" <| + group_builder.specify "by empty text" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "", " ", Nothing, "nana"]]] t.filter "X" Filter_Condition.Is_Empty . at "X" . to_vector . should_equal ["", Nothing] t.filter "X" Filter_Condition.Not_Empty . at "X" . to_vector . should_equal ["abb", " ", "nana"] - Test.specify "should check types for text operations" <| + group_builder.specify "should check types for text operations" <| t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, "A", "", " "]]] check_problem result = result.should_fail_with Invalid_Value_Type @@ -255,7 +254,7 @@ spec setup = check_scalar_type_error_handling (t.filter "X" (Filter_Condition.Not_Like 42)) check_scalar_type_error_handling (t.filter "X" (Filter_Condition.Not_Contains 42)) - Test.specify "by nulls" <| + group_builder.specify "by nulls" <| t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, 1, Nothing, 4]]] t1 = t.filter "X" Filter_Condition.Is_Nothing t1.at "ix" . to_vector . should_equal [1, 3] @@ -265,7 +264,7 @@ spec setup = t2.at "ix" . to_vector . should_equal [2, 4] t2.at "X" . to_vector . should_equal [1, 4] - Test.specify "by an Is_In check" <| + group_builder.specify "by an Is_In check" <| t = table_builder [["ix", [1, 2, 3, Nothing, 5, 6]], ["X", ["a", "b", "ccc", "X", "f", "2"]]] t1 = table_builder [["txt", ["X", "a", "c", Nothing]], ["int", [Nothing, 2, 5, 4]], ["bool", [True, Nothing, Nothing, True]]] @@ -307,7 +306,7 @@ spec setup = t2.filter "B" (Filter_Condition.Is_In [False]) . at "B" . to_vector . should_equal [False, False, False] t2.filter "C" (Filter_Condition.Is_In [False, False]) . at "C" . to_vector . should_equal [False] - Test.specify "does not allow Column_Ref in Is_In/Not_In because that would be confusing" <| + group_builder.specify "does not allow Column_Ref in Is_In/Not_In because that would be confusing" <| ## Is In and Not In check if a value is contained anywhere in a provided collection (e.g. column), NOT on a row-by-row basis like all other operations. Column_Ref is used with row-by-row ops, so this would only cause confusion. Very rarely someone wants to filter a column by Is_In @@ -319,12 +318,12 @@ spec setup = # If the user really wants this, they pass it as a raw column: t.filter "A" (Filter_Condition.Is_In (t.at "B")) . at "A" . to_vector . should_equal [2, 3] - Test.specify "by a boolean mask" <| + group_builder.specify "by a boolean mask" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] t.filter "b" . at "ix" . to_vector . should_equal [1, 4, 5] t.filter "b" Filter_Condition.Is_False . at "ix" . to_vector . should_equal [2] - Test.specify "should correctly mask all kinds of columns" <| + group_builder.specify "should correctly mask all kinds of columns" <| t = table_builder [["ints", [1, 2, 3, Nothing, 4]], ["floats", [4.0, Nothing, 3.0, 2.0, 1.0]], ["bools", [False, False, True, Nothing, False]], ["strings", ["a", Nothing, "b", "c", "d"]], ["mask", [False, True, True, True, Nothing]]] t2 = t.filter "mask" t2.at "ints" . to_vector . should_equal [2, 3, Nothing] @@ -333,7 +332,7 @@ spec setup = t2.at "strings" . to_vector . should_equal [Nothing, "b", "c"] t2.at "mask" . to_vector . should_equal [True, True, True] - Test.specify "should check types of boolean operations" <| + group_builder.specify "should check types of boolean operations" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] check_problem result = result.should_fail_with Invalid_Value_Type @@ -342,7 +341,7 @@ spec setup = check_problem <| t.filter "ix" Filter_Condition.Is_True check_problem <| t.filter "ix" Filter_Condition.Is_False - Test.specify "should check types of numeric operations" <| + group_builder.specify "should check types of numeric operations" <| t = table_builder [["a", ["a", "b"]]] check_problem result = result.should_fail_with Invalid_Value_Type @@ -354,32 +353,32 @@ spec setup = if test_selection.is_nan_and_nothing_distinct then check_problem <| t.filter "a" Filter_Condition.Is_Nan - Test.specify "by a custom expression built from table's columns" <| + group_builder.specify "by a custom expression built from table's columns" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10, 20, 13, 4, 5]], ["Y", [0, -100, 8, 2, 5]]] t.filter (t.at "X" + t.at "Y" > 9) . at "ix" . to_vector . should_equal [1, 3, 5] - Test.specify "should handle selection errors: unknown column name" <| + group_builder.specify "should handle selection errors: unknown column name" <| t = table_builder [["X", [10, 20, 13, 4, 5]]] t.filter "unknown column" . should_fail_with No_Such_Column t.filter "unknown column" . catch . should_equal (No_Such_Column.Error "unknown column") - Test.specify "should handle selection errors: out of bounds index" <| + group_builder.specify "should handle selection errors: out of bounds index" <| t = table_builder [["X", [10, 20, 13, 4, 5]]] t.filter 4 . should_fail_with Index_Out_Of_Bounds t.filter 4 . catch . should_equal (Index_Out_Of_Bounds.Error 4 1) - Test.specify "should handle illegal arguments" <| + group_builder.specify "should handle illegal arguments" <| t = table_builder [["X", [10, 20, 13, 4, 5]]] Test.expect_panic_with (t.filter "X" "NOT A CONDITION") Type_Error - Test.specify "should nicely handle Filter_Condition with unapplied arguments" <| + group_builder.specify "should nicely handle Filter_Condition with unapplied arguments" <| t = table_builder [["X", [10, 20, 13, 4, 5]]] t.filter "X" (Filter_Condition.Equal) . should_fail_with Illegal_Argument t.filter "X" (Filter_Condition.Starts_With) . should_fail_with Illegal_Argument t.filter "X" (Filter_Condition.Between) . should_fail_with Illegal_Argument t.filter "X" (Filter_Condition.Between 1) . should_fail_with Illegal_Argument - Test.specify "should report issues: floating point equality" <| + group_builder.specify "should report issues: floating point equality" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, 2.0]]] r1 = t.filter "X" (Filter_Condition.Equal 2) on_problems=Problem_Behavior.Ignore @@ -397,7 +396,7 @@ spec setup = r4.at "ix" . to_vector . should_equal [1, 2, 4] Problems.expect_warning Floating_Point_Equality r4 - Test.specify "already existing warnings should not be escalated to errors in error handling mode" <| + group_builder.specify "already existing warnings should not be escalated to errors in error handling mode" <| t1 = table_builder [["X", [1.5, 2.0, 0.0]], ["ix", [1, 2, 3]]] c1 = Warning.attach (Illegal_State.Error "FOO") (t1.evaluate_expression "3.0 + [X]") Problems.expect_warning Illegal_State c1 @@ -431,18 +430,18 @@ spec setup = err2 = t2.filter "Y" (Filter_Condition.Not_Equal 5) on_problems=Problem_Behavior.Report_Error err2.should_fail_with Floating_Point_Equality - Test.group prefix+"Table.filter_by_expression" <| - Test.specify "by a boolean column" <| + suite_builder.group prefix+"Table.filter_by_expression" group_builder-> + group_builder.specify "by a boolean column" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] t.filter_by_expression "[b]" . at "ix" . to_vector . should_equal [1, 4, 5] t.filter_by_expression "![b]" . at "ix" . to_vector . should_equal [2] - Test.specify "by an integer comparison" <| + group_builder.specify "by an integer comparison" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] t.filter_by_expression "[ix]==3" . at "ix" . to_vector . should_equal [3] t.filter_by_expression "[ix]>2" . at "ix" . to_vector . should_equal [3, 4, 5] - Test.specify "fail gracefully" <| + group_builder.specify "fail gracefully" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] t.filter_by_expression "[ix" . should_fail_with Expression_Error t.filter_by_expression "[ix" . catch . should_be_a Expression_Error.Syntax_Error @@ -465,7 +464,7 @@ spec setup = t.filter_by_expression "is_empty('', 42)" . should_fail_with Expression_Error t.filter_by_expression "is_empty('', 42)" . catch . should_be_a Expression_Error.Argument_Mismatch - Test.specify "should report issues: floating point equality" <| + group_builder.specify "should report issues: floating point equality" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, -2.0]]] r1 = t.filter_by_expression "[X] * [X] == 4.0" on_problems=Problem_Behavior.Ignore Problems.assume_no_problems r1 @@ -484,7 +483,7 @@ spec setup = Problems.expect_warning Floating_Point_Equality r4 db_pending = if setup.is_database then "Arithmetic error handling is currently not implemented for the Database backend." - Test.specify "should report issues: arithmetic error" pending=db_pending <| + group_builder.specify "should report issues: arithmetic error" pending=db_pending <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [2.0, 2.0, 0.0, 1.0, 2.0]]] action = t.filter_by_expression "8.0 / [X] <= 4.0" on_problems=_ tester table = diff --git a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso index 7d4f103587a9..b32a2d196943 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso @@ -4,19 +4,18 @@ from Standard.Base import all from Standard.Table import all hiding Table from Standard.Table.Data.Aggregate_Column.Aggregate_Column import Group_By, Count, Sum -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + + from project.Common_Table_Operations.Util import run_default_backend from project.Common_Table_Operations.Core_Spec import weird_names -main = run_default_backend spec - -spec setup = +add_specs suite_builder setup = table_builder = setup.table_builder materialize = setup.materialize - Test.group setup.prefix+" Interactions Between various operations" <| - Test.specify "using weird column names with operations and filtering" <| + suite_builder.group setup.prefix+" Interactions Between various operations" group_builder-> + group_builder.specify "using weird column names with operations and filtering" <| columns = weird_names.map_with_index ix-> name-> [name, [100+ix, 200+ix, 300+ix]] table = table_builder columns @@ -31,7 +30,7 @@ spec setup = weird_names.map_with_index ix-> name-> result.at name . to_vector . should_equal [200+ix] - Test.specify "aggregates and joins" <| + group_builder.specify "aggregates and joins" <| t1 = table_builder [["Count", [1, 2, 3]], ["Class", ["X", "Y", "Z"]]] t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "A", "C", "D", "D", "B", "B"]]] @@ -44,7 +43,7 @@ spec setup = rows.at 2 . should_equal ["C", 2, 2, "Y"] rows.at 3 . should_equal ["D", 2, 2, "Y"] - Test.specify "aggregates and distinct" <| + group_builder.specify "aggregates and distinct" <| t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C"]], ["Points", [2, 5, 2, 1, 10, 3]]] t3 = t2.aggregate [Group_By "Letter", Sum "Points"] @@ -59,7 +58,7 @@ spec setup = a_or_b . should_be_true rows.at 1 . should_equal ["C", 13] - Test.specify "aggregates and filtering" <| + group_builder.specify "aggregates and filtering" <| t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C", "B"]], ["Points", [2, 5, 2, 1, 10, 3, 0]]] t3 = t2.aggregate [Group_By "Letter", Sum "Points"] @@ -69,7 +68,7 @@ spec setup = rows.at 0 . should_equal ["A", 5] rows.at 1 . should_equal ["B", 5] - Test.specify "aggregates and ordering" <| + group_builder.specify "aggregates and ordering" <| t1 = table_builder [["Letter", ["C", "A", "B", "A", "A", "C", "C", "B"]], ["Points", [0, -100, 5, 2, 1, 10, 3, 0]]] t2 = t1.aggregate [Group_By "Letter", Sum "Points"] t3 = t2.order_by "Sum Points" |> materialize @@ -77,7 +76,7 @@ spec setup = t3.at "Letter" . to_vector . should_equal ["A", "B", "C"] t3.at "Sum Points" . to_vector . should_equal [-97, 5, 13] - Test.specify "distinct and ordering" <| + group_builder.specify "distinct and ordering" <| t1 = table_builder [["X", [1, 2, 2, 1]], ["Y", ["a", "b", "b", "a"]], ["Z", [1, 2, 3, 4]]] # These are 'adversarial' white-box examples constructed knowing that Postgres' DISTINCT ON does not play too well with ORDER BY and it needs to be handled carefully. @@ -89,7 +88,7 @@ spec setup = t4.row_count . should_equal 2 if setup.test_selection.distinct_returns_first_row_from_group_if_ordered then - Test.specify "distinct and ordering if first row is returned after ordering" <| + group_builder.specify "distinct and ordering if first row is returned after ordering" <| a = ["A", ["a", "a", "a", "a", "a", "a"]] b = ["B", [1, 1, 2, 2, 1, 2]] c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]] @@ -112,7 +111,7 @@ spec setup = It is easier to test this if we can rely on distinct returning the first row, if it is returning any row, it is harder to write tests that distinguish the two cases (filter before and after). - Test.specify "distinct and filtering" <| + group_builder.specify "distinct and filtering" <| a = ["A", ["a", "a", "b", "a", "b"]] b = ["B", [1, 2, 5, 5, 2]] c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5]] @@ -136,7 +135,7 @@ spec setup = ## This mostly checks that various operations handle all kinds of Integer storage implementations (add_row_number may use a different storage than regular columns) - if setup.is_database.not then Test.specify "add_row_number and other operations" <| + if setup.is_database.not then group_builder.specify "add_row_number and other operations" <| t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]], ["Z", [0.25, 0.5, 0.75]]] t2 = table_builder [["X", ["ddd", "eee", "fff"]]] @@ -165,7 +164,7 @@ spec setup = r123.at "Row" . to_vector . should_equal [1, 2, 3, 1, 2, 3, 1.5, 2.5, 3.5] if setup.test_selection.fixed_length_text_columns then - Test.specify "types of unioned fixed-length columns should be correctly inferred after passing through other operations that infer types from Database, like aggregate Shortest" <| + group_builder.specify "types of unioned fixed-length columns should be correctly inferred after passing through other operations that infer types from Database, like aggregate Shortest" <| t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 0, 2]]] . cast "X" (Value_Type.Char 1 False) t2 = table_builder [["X", ["ddd", "eee", "fff"]], ["Y", [0, 1, 0]]] . cast "X" (Value_Type.Char 3 False) t3 = t1.union t2 @@ -184,7 +183,7 @@ spec setup = t5.at "Y" . to_vector . should_equal [0, 1, 2] t5.at "Shortest X" . to_vector . should_equal ["b", "a", "c"] - Test.specify "types should be correctly preserved after aggregation after iif" <| + group_builder.specify "types should be correctly preserved after aggregation after iif" <| t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]] t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=3 variable_length=False) diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso index 08280a532163..b1b4fc1b58ec 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso @@ -10,19 +10,17 @@ import Standard.Table.Data.Join_Kind_Cross.Join_Kind_Cross from Standard.Database import all from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Common_Table_Operations.Util import expect_column_names, run_default_backend -main = run_default_backend spec -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder materialize = setup.materialize - Test.group prefix+"Table.cross_join" <| - Test.specify "should allow to create a cross product of two tables in the right order" <| + suite_builder.group prefix+"Table.cross_join" group_builder-> + group_builder.specify "should allow to create a cross product of two tables in the right order" <| t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]] @@ -42,7 +40,7 @@ spec setup = True -> r.should_contain_the_same_elements_as expected_rows False -> r.should_equal expected_rows - Test.specify "should work correctly with empty tables" <| + group_builder.specify "should work correctly with empty tables" <| t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] t2 = table_builder [["Z", ['a']], ["W", ['c']]] # Workaround to easily create empty table until table builder allows that directly. @@ -59,7 +57,7 @@ spec setup = t4.row_count.should_equal 0 t4.at "X" . to_vector . should_equal [] - Test.specify "should respect the right row limit" <| + group_builder.specify "should respect the right row limit" <| t2 = table_builder [["X", [1, 2]]] t3 = table_builder [["X", [1, 2, 3]]] t100 = table_builder [["Y", 0.up_to 100 . to_vector]] @@ -76,7 +74,7 @@ spec setup = t2.cross_join t101 right_row_limit=Nothing . row_count . should_equal 202 t2.cross_join t3 right_row_limit=2 on_problems=Problem_Behavior.Report_Error . should_fail_with Cross_Join_Row_Limit_Exceeded - Test.specify "should ensure 1-1 mapping even with duplicate rows" <| + group_builder.specify "should ensure 1-1 mapping even with duplicate rows" <| t1 = table_builder [["X", [2, 1, 2, 2]], ["Y", [5, 4, 5, 5]]] t2 = table_builder [["Z", ['a', 'b', 'a', 'b']]] @@ -94,7 +92,7 @@ spec setup = True -> r.should_contain_the_same_elements_as expected_rows False -> r.should_equal expected_rows - Test.specify "should allow self-joins" <| + group_builder.specify "should allow self-joins" <| t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] t2 = t1.cross_join t1 @@ -111,7 +109,7 @@ spec setup = True -> r.should_contain_the_same_elements_as expected_rows False -> r.should_equal expected_rows - Test.specify "should rename columns of the right table to avoid duplicates" <| + group_builder.specify "should rename columns of the right table to avoid duplicates" <| t1 = table_builder [["X", [1]], ["Y", [5]], ["Right Y", [10]]] t2 = table_builder [["X", ['a']], ["Y", ['d']]] @@ -133,7 +131,7 @@ spec setup = expect_column_names ["X", "Y", "Right Y", "Right X 1", "Right X"] (t1.cross_join t4) expect_column_names ["X", "Right X", "Right X 1", "Y", "Right Y"] (t4.cross_join t1) - Test.specify "should respect the column ordering" <| + group_builder.specify "should respect the column ordering" <| t1 = table_builder [["X", [100, 2]], ["Y", [4, 5]]] t2 = table_builder [["Z", ['a', 'b', 'c']], ["W", ['x', 'd', 'd']]] @@ -157,12 +155,12 @@ spec setup = True -> r.should_contain_the_same_elements_as expected_rows False -> r.should_equal expected_rows - Test.specify "Cross join is not possible via call to .join" <| + group_builder.specify "Cross join is not possible via call to .join" <| t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]] Test.expect_panic_with (t1.join t2 join_kind=Join_Kind_Cross.Cross on=[]) Type_Error - Test.specify "should gracefully handle tables from different backends" <| + group_builder.specify "should gracefully handle tables from different backends" <| t1 = table_builder [["A", ["a", "b"]]] alternative_connection = Database.connect (SQLite In_Memory) t0 = (Table.new [["X", [1, 2, 4]], ["Z", [10, 20, 30]]]).select_into_database_table alternative_connection "T0" temporary=True diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso index 006ec3d395d4..c2e18d3c6e06 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso @@ -8,8 +8,7 @@ from Standard.Table.Errors import all from Standard.Database import all from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Common_Table_Operations.Util import expect_column_names, run_default_backend, within_table @@ -25,18 +24,16 @@ type My_Type_Comparator Comparable.from (_:My_Type) = My_Type_Comparator -main = run_default_backend spec - -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder materialize = setup.materialize db_todo = if setup.is_database.not then Nothing else "ToDo: handling NULLs in equality conditions." - Test.group prefix+"Table.join" <| + suite_builder.group prefix+"Table.join" group_builder-> t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t2 = table_builder [["Z", [2, 3, 2, 4]], ["W", [4, 5, 6, 7]]] - Test.specify "should by default do a Left Outer join on equality of first column in the left table, correlated with column of the same name in the right one" <| + group_builder.specify "should by default do a Left Outer join on equality of first column in the left table, correlated with column of the same name in the right one" <| t3 = table_builder [["Z", [4, 5, 6, 7]], ["X", [2, 3, 2, 4]]] t4 = t1.join t3 |> materialize |> _.order_by ["X", "Z"] expect_column_names ["X", "Y", "Z", "Right X"] t4 @@ -45,7 +42,7 @@ spec setup = t4.at "Right X" . to_vector . should_equal [Nothing, 2, 2, 3] t4.at "Z" . to_vector . should_equal [Nothing, 4, 6, 5] - Test.specify "should allow Inner join" <| + group_builder.specify "should allow Inner join" <| t3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals 0 0) expect_column_names ["X", "Y", "Z", "W"] t3 t4 = t3 |> materialize |> _.order_by ["X", "W"] @@ -54,7 +51,7 @@ spec setup = t4.at "Y" . to_vector . should_equal [5, 5, 6] t4.at "W" . to_vector . should_equal [4, 6, 5] - Test.specify "should allow Full join" <| + group_builder.specify "should allow Full join" <| t3 = t1.join t2 join_kind=Join_Kind.Full on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"] expect_column_names ["X", "Y", "Z", "W"] t3 t3.at "X" . to_vector . should_equal [Nothing, 1, 2, 2, 3] @@ -62,7 +59,7 @@ spec setup = t3.at "Z" . to_vector . should_equal [4, Nothing, 2, 2, 3] t3.at "W" . to_vector . should_equal [7, Nothing, 4, 6, 5] - Test.specify "should allow Right Outer join" <| + group_builder.specify "should allow Right Outer join" <| t5 = t1.join t2 join_kind=Join_Kind.Right_Outer on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"] expect_column_names ["X", "Y", "Z", "W"] t5 t5.at "X" . to_vector . should_equal [Nothing, 2, 2, 3] @@ -70,7 +67,7 @@ spec setup = t5.at "Z" . to_vector . should_equal [4, 2, 2, 3] t5.at "W" . to_vector . should_equal [7, 4, 6, 5] - Test.specify "should allow to perform anti-joins" <| + group_builder.specify "should allow to perform anti-joins" <| t6 = t1.join t2 join_kind=Join_Kind.Left_Exclusive on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X"] t6.columns.map .name . should_equal ["X", "Y"] t6.at "X" . to_vector . should_equal [1] @@ -90,7 +87,7 @@ spec setup = r.at "Z" . to_vector . should_equal [2, 2, 3, 3, 4, 5] r.at "Right Z" . to_vector . should_equal [1, 2, 1, 2, 4, 4] - Test.specify "should allow to join on equality of multiple columns and drop redundant columns if Inner join" <| + group_builder.specify "should allow to join on equality of multiple columns and drop redundant columns if Inner join" <| conditions = [Join_Condition.Equals "Y" "Y", Join_Condition.Equals "X" "X"] r = t3.join t4 join_kind=Join_Kind.Inner on=conditions |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] check_xy_joined r @@ -99,11 +96,11 @@ spec setup = r2 = t3.join t4 join_kind=kind on=conditions expect_column_names ["X", "Y", "Z", "Right X", "Right Y", "Right Z"] r2 - Test.specify "should support same-name column join shorthand" <| + group_builder.specify "should support same-name column join shorthand" <| r = t3.join t4 join_kind=Join_Kind.Inner on=["X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] check_xy_joined r - Test.specify "should correctly handle duplicated rows in Equals" <| + group_builder.specify "should correctly handle duplicated rows in Equals" <| t1 = table_builder [["X", [1, 2, 2, 3]]] t2 = table_builder [["X", [1, 2, 2, 4]]] r1 = t1.join t2 join_kind=Join_Kind.Full on="X" . order_by "X" @@ -112,7 +109,7 @@ spec setup = r1.at "X" . to_vector . should_equal [Nothing, 1, 2, 2, 2, 2, 3] r1.at "Right X" . to_vector . should_equal [4, 1, 2, 2, 2, 2, Nothing] - Test.specify "should allow to join on text equality ignoring case" <| + group_builder.specify "should allow to join on text equality ignoring case" <| t1 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]] t2 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]] @@ -130,7 +127,7 @@ spec setup = r2 . at "Z" . to_vector . should_equal [1, 2, 3] if setup.test_selection.supports_unicode_normalization then - Test.specify "should correctly handle Unicode equality" <| + group_builder.specify "should correctly handle Unicode equality" <| t1 = table_builder [["X", ['s\u0301', 'S\u0301']], ["Y", [1, 2]]] t2 = table_builder [["X", ['s', 'S', 'ś']], ["Z", [1, 2, 3]]] @@ -148,7 +145,7 @@ spec setup = r2 . at "Z" . to_vector . should_equal [3, 3] # This may need a test_selection toggle in the future, depending on how well databases like coercing decimals and integers. - Test.specify "should correctly handle Enso Float-Integer equality" <| + group_builder.specify "should correctly handle Enso Float-Integer equality" <| t1 = table_builder [["X", [1, 2]], ["Y", [10, 20]]] t2 = table_builder [["X", [2.0, 2.1, 0.0]], ["Z", [1, 2, 3]]] @@ -159,7 +156,7 @@ spec setup = r1 . at "Z" . to_vector . should_equal [1] if setup.supports_custom_objects then - Test.specify "should allow equality joins for custom objects" <| + group_builder.specify "should allow equality joins for custom objects" <| t1 = table_builder [["X", [My_Type.Value 1 2, My_Type.Value 2 3]], ["Y", [1, 2]]] t2 = table_builder [["X", [My_Type.Value 5 0, My_Type.Value 2 1]], ["Z", [10, 20]]] @@ -174,7 +171,7 @@ spec setup = r1 . at "Y" . to_vector . should_equal [1, 2] r1 . at "Z" . to_vector . should_equal [20, 10] - Test.specify "should allow range-based joins (using Between) for numbers" <| + group_builder.specify "should allow range-based joins (using Between) for numbers" <| t1 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]] t2 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]] @@ -186,7 +183,7 @@ spec setup = r1 . at "upper" . to_vector . should_equal [1, 12, 30, 12, 30] r1 . at "Z" . to_vector . should_equal [1, 2, 3, 2, 3] - Test.specify "should allow range-based joins (using Between) for text" <| + group_builder.specify "should allow range-based joins (using Between) for text" <| t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]]] t2 = table_builder [["lower", ["a", "b"]], ["upper", ["a", "ccc"]], ["Z", [10, 20]]] @@ -198,7 +195,7 @@ spec setup = r1 . at "upper" . to_vector . should_equal ["a", "ccc", "ccc"] r1 . at "Z" . to_vector . should_equal [10, 20, 20] - Test.specify "should correctly handle Between edge cases (1)" pending=(if prefix.contains "PostgreSQL" then "TODO: fix issue #8243") <| + group_builder.specify "should correctly handle Between edge cases (1)" pending=(if prefix.contains "PostgreSQL" then "TODO: fix issue #8243") <| # 1. multiple rows with the same key value on the left side # 2. fully duplicated rows (1, 7) on the left side # 3. empty bounds (lower > upper: 10 > 0) @@ -221,7 +218,7 @@ spec setup = rows.at 6 . should_equal [10, 2, 10, 10, 'b'] rows.at 7 . should_equal [Nothing, Nothing, 10, 0, 'c'] - Test.specify "should correctly handle Between edge cases (2)" <| + group_builder.specify "should correctly handle Between edge cases (2)" <| # 6. multiple Between conditions xs = [0, 0, 1, 1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 4] ys = [1, 2, 3, 1, 9, 2, 3, 2, 4, 2, 1, 1, 1, 2] @@ -240,7 +237,7 @@ spec setup = r3.at "X" . to_vector . should_equal [2, 3, 3] r3.at "Y" . to_vector . should_equal [2, 1, 2] - Test.specify "should correctly handle Between edge cases (3)" <| + group_builder.specify "should correctly handle Between edge cases (3)" <| # 7. duplicated rows on both sides t1 = table_builder [["X", [10, 20, 20]]] t2 = table_builder [["low", [15, 15]], ["high", [30, 30]]] @@ -257,7 +254,7 @@ spec setup = r2.at "X" . to_vector . should_equal [10] if setup.test_selection.supports_unicode_normalization then - Test.specify "should allow range-based joins (using Between) for text with Unicode normalization" <| + group_builder.specify "should allow range-based joins (using Between) for text with Unicode normalization" <| t1 = table_builder [["X", ['s\u0301', 's']], ["Y", [1, 2]]] t2 = table_builder [["lower", ['s', 'ś']], ["upper", ['sa', 'ś']], ["Z", [10, 20]]] @@ -270,7 +267,7 @@ spec setup = r1 . at "Z" . to_vector . should_equal [20, 10] if setup.supports_custom_objects then - Test.specify "should allow range-based joins (using Between) for custom objects" <| + group_builder.specify "should allow range-based joins (using Between) for custom objects" <| t1 = table_builder [["X", [My_Type.Value 20 30, My_Type.Value 1 2]], ["Y", [1, 2]]] t2 = table_builder [["lower", [My_Type.Value 3 0, My_Type.Value 10 10]], ["upper", [My_Type.Value 2 1, My_Type.Value 100 0]], ["Z", [10, 20]]] @@ -282,7 +279,7 @@ spec setup = r1 . at "upper" . to_vector . to_text . should_equal "[(My_Type.Value 2 1), (My_Type.Value 100 0)]" r1 . at "Z" . to_vector . should_equal [10, 20] - Test.specify "should allow to mix join conditions of various kinds" <| + group_builder.specify "should allow to mix join conditions of various kinds" <| t1 = table_builder [["X", [1, 12, 12, 0]], ["Y", [1, 2, 3, 4]], ["Z", ["a", "A", "a", "ą"]], ["W", [1, 2, 3, 4]]] t2 = table_builder [["X", [12, 12, 1]], ["l", [0, 100, 100]], ["u", [10, 100, 200]], ["Z", ["A", "A", "A"]], ["W'", [10, 20, 30]]] @@ -316,7 +313,7 @@ spec setup = r3.at "Z" . to_vector . should_equal ["A", "A"] r3.at "W'" . to_vector . should_equal [20, 30] - Test.specify "should work fine if the same condition is specified multiple times" <| + group_builder.specify "should work fine if the same condition is specified multiple times" <| r = t3.join t4 join_kind=Join_Kind.Inner on=["X", "X", "Y", "X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] check_xy_joined r @@ -336,7 +333,7 @@ spec setup = r2 . at "Right X" . to_vector . should_equal ["A", "a", "b"] r2 . at "Z" . to_vector . should_equal [1, 2, 3] - Test.specify "should correctly handle joining a table with itself" <| + group_builder.specify "should correctly handle joining a table with itself" <| t1 = table_builder [["X", [0, 1, 2, 3, 2]], ["Y", [1, 2, 3, 4, 100]], ["A", ["B", "C", "D", "E", "X"]]] t2 = t1.join t1 join_kind=Join_Kind.Inner on=(Join_Condition.Equals left="X" right="Y") |> materialize |> _.order_by ["X", "Y"] @@ -371,7 +368,7 @@ spec setup = # t5.at "Right Y" . to_vector . should_equal ["ą", Nothing] # t5.at "Right Z" . to_vector . should_equal [1, 3] - Test.specify "should gracefully handle unmatched columns in Join_Conditions" <| + group_builder.specify "should gracefully handle unmatched columns in Join_Conditions" <| t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] t2 = table_builder [["Z", [2, 1]], ["W", [5, 6]]] @@ -393,7 +390,7 @@ spec setup = r3.catch.criteria.should_equal ["baz", 42, -3] r3.catch.to_display_text.should_equal "The criteria 'baz', 42 (index), -3 (index) did not match any columns in the right table." - Test.specify "should report Invalid_Value_Type if non-text columns are provided to Equals_Ignore_Case" <| + group_builder.specify "should report Invalid_Value_Type if non-text columns are provided to Equals_Ignore_Case" <| t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]] @@ -406,14 +403,14 @@ spec setup = test <| t1.join t2 on=(Join_Condition.Equals_Ignore_Case "Y" "Z") on_problems=Problem_Behavior.Ignore - Test.specify "should report Invalid_Value_Type if incompatible types are correlated" <| + group_builder.specify "should report Invalid_Value_Type if incompatible types are correlated" <| t1 = table_builder [["X", ["1", "2", "c"]]] t2 = table_builder [["X", [1, 2, 3]]] r1 = t1.join t2 on_problems=Problem_Behavior.Ignore r1.should_fail_with Invalid_Value_Type - Test.specify "should report Invalid_Value_Type if incompatible columns types are correlated in Between" <| + group_builder.specify "should report Invalid_Value_Type if incompatible columns types are correlated in Between" <| t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]] @@ -421,7 +418,7 @@ spec setup = t1.join t2 on=(Join_Condition.Between "Y" "W" "Z") . should_fail_with Invalid_Value_Type t1.join t2 on=(Join_Condition.Between "Y" "Z" "W") . should_fail_with Invalid_Value_Type - Test.specify "should warn when joining on equality of Float columns" <| + group_builder.specify "should warn when joining on equality of Float columns" <| t1 = table_builder [["X", [1.5, 2.0, 2.00000000001]], ["Y", [10, 20, 30]]] t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] @@ -464,7 +461,7 @@ spec setup = expected_problems = [Floating_Point_Equality.Error "Z", Floating_Point_Equality.Error "X"] Problems.get_attached_warnings r3 . should_contain_the_same_elements_as expected_problems - Test.specify "should correctly handle nulls in equality conditions" pending=db_todo <| + group_builder.specify "should correctly handle nulls in equality conditions" pending=db_todo <| t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] @@ -474,7 +471,7 @@ spec setup = r1.at "Y" . to_vector . should_equal [1, 1, 2, 3, 3] r1.at "Z" . to_vector . should_equal [20, 30, 10, 20, 30] - Test.specify "should correctly handle nulls in case-insensitive equality conditions" pending=db_todo <| + group_builder.specify "should correctly handle nulls in case-insensitive equality conditions" pending=db_todo <| t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] @@ -485,7 +482,7 @@ spec setup = r1.at "Y" . to_vector . should_equal [0, 1, 1, 2, 3, 3] r1.at "Z" . to_vector . should_equal [10, 20, 30, 10, 20, 30] - Test.specify "should correctly handle nulls in Between conditions" <| + group_builder.specify "should correctly handle nulls in Between conditions" <| t1 = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [0, 1, 2, 3]]] t2 = table_builder [["l", [Nothing, 0, 1]], ["u", [100, 10, Nothing]], ["Z", [10, 20, 30]]] @@ -497,7 +494,7 @@ spec setup = r1.at "u" . to_vector . should_equal [10, 10] r1.at "Z" . to_vector . should_equal [20, 20] - Test.specify "should rename columns of the right table to avoid duplicates" <| + group_builder.specify "should rename columns of the right table to avoid duplicates" <| t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]], ["Right Y", [5, 6]]] t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]]] @@ -533,7 +530,7 @@ spec setup = t8 = t1.join t2 join_kind=Join_Kind.Inner right_prefix="P" t8.column_names.should_equal ["X", "Y", "Right Y", "PY"] - Test.specify "should warn about renamed columns" <| + group_builder.specify "should warn about renamed columns" <| t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]], ["Right Y", [2, 44]]] @@ -549,12 +546,12 @@ spec setup = problems2 = [Duplicate_Output_Column_Names.Error ["Right Y"]] Problems.test_problem_handling action2 problems2 tester2 - Test.specify "should pass dataflow errors through" <| + group_builder.specify "should pass dataflow errors through" <| error = Error.throw (Illegal_State.Error "FOO") t1.join error . should_fail_with Illegal_State t1.join t2 on=[error, "X"] . should_fail_with Illegal_State - Test.specify "should correctly handle all null rows" pending=db_todo <| + group_builder.specify "should correctly handle all null rows" pending=db_todo <| t1 = table_builder [["A", [Nothing, 2, Nothing, 1]], ["B", [Nothing, 3, 4, 7]]] t2 = table_builder [["C", [Nothing, 2, Nothing, 4]], ["D", [Nothing, 5, 6, Nothing]]] @@ -661,7 +658,7 @@ spec setup = r12.length . should_equal 1 r12.at 0 . should_equal [3, 5] - Test.specify "should work correctly when composing multiple join operations" <| + group_builder.specify "should work correctly when composing multiple join operations" <| ta = table_builder [["id", [0, 1]], ["name", ["Foo", "X"]]] tb = table_builder [["id", [2, 0]], ["name", ["Bar", "Y"]]] tc = table_builder [["id_a", [0, 1]], ["id_b", [0, 2]]] @@ -673,7 +670,7 @@ spec setup = r.at 0 . should_equal ["Foo", "Y"] r.at 1 . should_equal ["X", "Bar"] - Test.specify "should work correctly when the join is performed on a transformed table" <| + group_builder.specify "should work correctly when the join is performed on a transformed table" <| t1 = table_builder [["X", [1, 2, 3]]] t1_2 = t1.set "10*[X]+1" new_name="A" @@ -696,7 +693,7 @@ spec setup = r2.at 0 . should_equal [2, 20, 5, 5, 100] r2.at 1 . should_equal [3, 30, 7, 7, 200] - Test.specify "should allow full joins with more complex join conditions" <| + group_builder.specify "should allow full joins with more complex join conditions" <| t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [10, 20, 30]]] t2 = table_builder [["X", ["Ć", "A", "b"]], ["Z", [100, 200, 300]]] @@ -746,7 +743,7 @@ spec setup = r3.at 2 . should_equal [2, 20, 5, 2, 20, 5] r3.at 3 . should_equal [3, 30, 7, Nothing, Nothing, Nothing] - Test.specify "should gracefully handle tables from different backends" <| + group_builder.specify "should gracefully handle tables from different backends" <| alternative_connection = Database.connect (SQLite In_Memory) t0 = (Table.new [["X", [1, 2, 4]], ["Z", [10, 20, 30]]]).select_into_database_table alternative_connection "T0" temporary=True diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso index 0aab6d8b8141..64138696fd70 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso @@ -7,20 +7,17 @@ from Standard.Table.Errors import all from Standard.Database import all from Standard.Database.Errors import Unsupported_Database_Operation, Invariant_Violation, Integrity_Error -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend, within_table import project.Util -main = run_default_backend spec - -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder materialize = setup.materialize - Test.group prefix+"Table.merge" <| - Test.specify "should allow to simply update columns based on a lookup table" <| + suite_builder.group prefix+"Table.merge" group_builder-> + group_builder.specify "should allow to simply update columns based on a lookup table" <| lookup = table_builder [["Y", ["A", "B", "A"]], ["X", [1, 2, 3]]] my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] @@ -31,7 +28,7 @@ spec setup = m2.at "X" . to_vector . should_equal [1, 2, 3, 2] m2.at "Y" . to_vector . should_equal ["A", "B", "A", "B"] - Test.specify "should allow to add new columns from a lookup table" <| + group_builder.specify "should allow to add new columns from a lookup table" <| lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]] my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]] @@ -52,7 +49,7 @@ spec setup = m3.at "is_X" . to_vector . should_equal [True, True, False, True] m3.at "X" . to_vector . should_equal ["Yes", "Yes", "No", "Yes"] - Test.specify "will warn if extra columns are unexpected (add_new_columns=False) (default)" <| + group_builder.specify "will warn if extra columns are unexpected (add_new_columns=False) (default)" <| lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]] my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]] @@ -82,7 +79,7 @@ spec setup = w3 = Problems.expect_only_warning Unexpected_Extra_Columns t3 w3.columns . should_equal ["status"] - Test.specify "will only update rows that are matched and skip others (default - allow_unmatched_rows=True)" <| + group_builder.specify "will only update rows that are matched and skip others (default - allow_unmatched_rows=True)" <| lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] @@ -94,7 +91,7 @@ spec setup = m2.at "Y" . to_vector . should_equal ["A", "B", "ZZZ", "B"] m2.at "Y" . to_vector . should_equal ["A", "B", "ZZZ", "B"] - Test.specify "will fill new columns of unmatched rows with Nothing (allow_unmatched_rows=True)" <| + group_builder.specify "will fill new columns of unmatched rows with Nothing (allow_unmatched_rows=True)" <| lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]], ["W", [1.5, 2.0]]] my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] @@ -106,7 +103,7 @@ spec setup = m2.at "Y" . to_vector . should_equal ["A", "B", "ZZZ", "B"] m2.at "W" . to_vector . should_equal [1.5, 2.0, Nothing, 2.0] - Test.specify "will report unmatched rows (if allow_unmatched_rows=False)" <| + group_builder.specify "will report unmatched rows (if allow_unmatched_rows=False)" <| lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] @@ -124,7 +121,7 @@ spec setup = m3.at "Y" . to_vector . should_equal ["A", "B", "A", "A"] m3.at "Z" . to_vector . should_equal [10, 20, 30, 40] - Test.specify "will fail on missing key columns in either table" <| + group_builder.specify "will fail on missing key columns in either table" <| lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] my_table = table_builder [["X", [1, 2, 3, 2]], ["Z", [10, 20, 30, 40]]] @@ -137,7 +134,7 @@ spec setup = r3.catch.criteria . should_equal ["Z"] r3.catch.to_display_text . should_contain "in the lookup table" - Test.specify "should allow matching by multiple key columns" <| + group_builder.specify "should allow matching by multiple key columns" <| lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "B"]], ["Z", [100, 100, 200]]] my_table = table_builder [["X", [1, 1, 1, 2]], ["Y", ["A", "B", "A", "B"]], ["Z", [10, 20, 30, 40]], ["W", [1000, 2000, 3000, 4000]]] @@ -150,7 +147,7 @@ spec setup = m2.at "Y" . to_vector . should_equal ["A", "B", "A", "B"] m2.at "Z" . to_vector . should_equal [100, 200, 100, 100] - Test.specify "will fail on duplicate matches in the lookup table" <| + group_builder.specify "will fail on duplicate matches in the lookup table" <| lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "C"]]] my_table = table_builder [["X", [4, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] @@ -186,7 +183,7 @@ spec setup = r4.catch.clashing_example_key_values . should_equal [1, "A"] r4.catch.clashing_example_row_count . should_equal 2 - Test.specify "will preserve count of rows, even if there are duplicates" <| + group_builder.specify "will preserve count of rows, even if there are duplicates" <| lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] my_table = table_builder [["X", [1, 2, 2, 2, 1]], ["Z", [10, 20, 20, 20, 50]]] @@ -197,7 +194,7 @@ spec setup = m2.at "Y" . to_vector . should_equal ["A", "B", "B", "B", "A"] m2.at "Z" . to_vector . should_equal [10, 20, 20, 20, 50] - Test.specify "should correctly preserve types of original, merged and added columns" <| + group_builder.specify "should correctly preserve types of original, merged and added columns" <| table = table_builder [["key1", [0, 1]], ["key2", ["o", "?"]], ["X", [1, 10]], ["Y", ["A", "E"]], ["Z", [1.5, 2.0]], ["W", [True, False]], ["A", [2, 22]], ["B", ["1", "2"]], ["C", [2.0, 2.5]], ["D", [False, False]]] lookup = table_builder [["key1", [0, 2]], ["key2", ["o", "?"]], ["X2", [100, 1000]], ["Y2", ["foo", "bar"]], ["Z2", [0.5, 4.0]], ["W2", [False, True]], ["A", [3, 55]], ["B", ["F", "F"]], ["C", [3.0, 10.5]], ["D", [True, False]]] @@ -226,7 +223,7 @@ spec setup = t2.at "Z2" . value_type . should_equal Value_Type.Float t2.at "W2" . value_type . should_equal Value_Type.Boolean - if setup.test_selection.fixed_length_text_columns then Test.specify "should correctly preserve types of original, merged and added columns (various Char types test case)" <| + if setup.test_selection.fixed_length_text_columns then group_builder.specify "should correctly preserve types of original, merged and added columns (various Char types test case)" <| table2 = table_builder [["key", ["0"]], ["X", ["a"]], ["A", ["bbbbb"]]] . cast "key" (Value_Type.Char size=50) . cast "X" (Value_Type.Char size=1) . cast "A" (Value_Type.Char size=5 variable_length=False) lookup2 = table_builder [["key", ["0"]], ["X2", ["ccc"]], ["A", ["dddd"]]] . cast "key" (Value_Type.Char size=100) . cast "X2" (Value_Type.Char size=3 variable_length=False) . cast "A" (Value_Type.Char size=4 variable_length=False) @@ -255,7 +252,7 @@ spec setup = # If unmatched rows are not allowed, we can guarantee only _new_ values (from the lookup table) will be in the result, so instead of merging the type we inherit the type from the lookup table. t3.at "A" . value_type . should_equal (Value_Type.Char size=4 variable_length=False) - Test.specify "will report Floating_Point_Equality if floating-point columns are used as key" <| + group_builder.specify "will report Floating_Point_Equality if floating-point columns are used as key" <| lookup = table_builder [["X", [1.0, 2.0, 3.0]], ["Y", ["A", "B", "C"]]] my_table = table_builder [["X", [2.0, 3.0, 2.0, 3.0]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] lookup.at "X" . value_type . is_floating_point . should_be_true @@ -270,7 +267,7 @@ spec setup = w2 = Problems.expect_only_warning Floating_Point_Equality t2 w2.to_display_text . should_contain "X" - Test.specify "will fail with No_Common_Type if types of updated columns are not compatible" <| + group_builder.specify "will fail with No_Common_Type if types of updated columns are not compatible" <| lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]] my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] @@ -280,7 +277,7 @@ spec setup = r2.catch.to_display_text . should_contain "Char" r2.catch.to_display_text . should_contain "when unifying column [Y]" - Test.specify "will allow incompatible types if allow_unmatched_rows=False" <| + group_builder.specify "will allow incompatible types if allow_unmatched_rows=False" <| lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]] my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] my_table.at "Y" . value_type . is_text . should_be_true @@ -293,7 +290,7 @@ spec setup = m2.at "Y" . to_vector . should_equal [11, 111, 11, 111] m2.at "Z" . to_vector . should_equal [10, 20, 30, 40] - Test.specify "will fail if key columns of the lookup table contain Nothing" <| + group_builder.specify "will fail if key columns of the lookup table contain Nothing" <| lookup1 = table_builder [["X", [1, 2, Nothing]], ["Y", ["A", "B", "C"]]] my_table1 = table_builder [["X", [2, 3, 2, 3]], ["Z", [10, 20, 30, 40]]] r1 = my_table1.merge lookup1 key_columns="X" add_new_columns=True @@ -312,14 +309,14 @@ spec setup = r3 = my_table2.merge lookup2 key_columns="X" allow_unmatched_rows=False add_new_columns=True r3.should_fail_with Unmatched_Rows_In_Lookup - Test.specify "will not allow providing no key_columns" <| + group_builder.specify "will not allow providing no key_columns" <| lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] my_table = table_builder [["X", [2, 1]], ["Z", [10, 20]]] r2 = my_table.merge lookup key_columns=[] add_new_columns=True r2.should_fail_with Illegal_Argument - if setup.is_database.not then Test.specify "(in-memory only) will preserve the order of rows from the original table" <| + if setup.is_database.not then group_builder.specify "(in-memory only) will preserve the order of rows from the original table" <| lookup = table_builder [["Y", [1, 0]], ["V", ["TRUE", "FALSE"]]] xs = 0.up_to 50 . to_vector ys = xs.map x-> x%2 @@ -333,7 +330,7 @@ spec setup = vs = xs.map x-> if (x%2) == 1 then "TRUE" else "FALSE" t2.at "V" . to_vector . should_equal vs - if setup.is_database then Test.specify "(database-only) will fail if pre-checked invariants get invalidated between the query is constructed and then materialized" <| + if setup.is_database then group_builder.specify "(database-only) will fail if pre-checked invariants get invalidated between the query is constructed and then materialized" <| Test.with_clue "(lookup is unique check) " <| lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] @@ -419,7 +416,7 @@ spec setup = m2.should_fail_with Invariant_Violation # This does not seem useful really, but there is no reason to disallow it, so we should ensure it does not crash. - Test.specify "(edge-case) should allow lookup with itself" <| + group_builder.specify "(edge-case) should allow lookup with itself" <| table = table_builder [["X", [1, 2, 3]], ["Y", ["A", "B", "C"]]] t2 = table.merge table key_columns="X" t2.column_names . should_equal ["X", "Y"] @@ -428,7 +425,7 @@ spec setup = m2.at "X" . to_vector . should_equal [1, 2, 3] m2.at "Y" . to_vector . should_equal ["A", "B", "C"] - Test.specify "should gracefully handle tables from different backends" <| + group_builder.specify "should gracefully handle tables from different backends" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] alternative_connection = Database.connect (SQLite In_Memory) t0 = (Table.new [["A", [3, 2, 1]], ["B", ["x", "y", "z"]]]).select_into_database_table alternative_connection "T0" temporary=True diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso index c33796f8163c..44fd1b71610d 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso @@ -8,8 +8,7 @@ from Standard.Table.Errors import all from Standard.Database import all from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Common_Table_Operations.Util import expect_column_names, run_default_backend, within_table import project.Util @@ -17,13 +16,11 @@ import project.Util type My_Type Value x y -main = run_default_backend spec - -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder - Test.group prefix+"Table.union" <| - Test.specify "should merge columns from multiple tables" <| + suite_builder.group prefix+"Table.union" group_builder-> + group_builder.specify "should merge columns from multiple tables" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, True]]] t2 = table_builder [["A", [4, 5, 6]], ["B", ["d", "e", "f"]], ["C", [False, True, False]]] t3 = table_builder [["A", [7, 8, 9]], ["B", ["g", "h", "i"]], ["C", [True, False, False]]] @@ -40,7 +37,7 @@ spec setup = t5.at "B" . to_vector . should_equal ["g", "h", "i", "a", "b", "c", "d", "e", "f"] t5.at "C" . to_vector . should_equal [True, False, False, True, False, True, False, True, False] - Test.specify "should fill unmatched columns (by name matching) with nulls and report a warning by default" <| + group_builder.specify "should fill unmatched columns (by name matching) with nulls and report a warning by default" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] @@ -64,7 +61,7 @@ spec setup = problems2 = [Unmatched_Columns.Error ["A", "D"]] Problems.test_problem_handling action2 problems2 tester2 - Test.specify "should drop unmatched columns if asked to" <| + group_builder.specify "should drop unmatched columns if asked to" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] @@ -74,7 +71,7 @@ spec setup = expect_column_names ["A"] t4 t4.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6, Nothing, Nothing, 0] - Test.specify "should keep unmatched columns without errors if asked to" <| + group_builder.specify "should keep unmatched columns without errors if asked to" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] @@ -86,7 +83,7 @@ spec setup = t4.at "B" . to_vector . should_equal ["a", "b", "c", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing] t4.at "C" . to_vector . should_equal [Nothing, Nothing, Nothing, "d", "e", "f", "g", "h", "i"] - Test.specify "should fail if asked to drop unmatched columns but the set of common columns is empty" <| + group_builder.specify "should fail if asked to drop unmatched columns but the set of common columns is empty" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] @@ -95,7 +92,7 @@ spec setup = t4.should_fail_with No_Output_Columns t4.catch.to_display_text . should_equal "No columns in the result, because of another problem: Unmatched columns are set to be dropped, but no common column names were found." - Test.specify "should ignore column names when matching by position" <| + group_builder.specify "should ignore column names when matching by position" <| t1 = table_builder [["A", [1, 2, 3]], ["Y", ["a", "b", "c"]]] t2 = table_builder [["X", [4, 5, 6]], ["A", ["d", "e", "f"]]] @@ -104,7 +101,7 @@ spec setup = t3.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6] t3.at "Y" . to_vector . should_equal ["a", "b", "c", "d", "e", "f"] - Test.specify "should fill extra columns (positional matching) with nulls and report a warning by default" <| + group_builder.specify "should fill extra columns (positional matching) with nulls and report a warning by default" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] t3 = table_builder [["A2", [10, 20, 30]]] @@ -118,7 +115,7 @@ spec setup = problems = [Column_Count_Mismatch.Error 3 1] Problems.test_problem_handling action problems tester - Test.specify "should keep the least number of columns with positional matching if asked to drop unmatched ones" <| + group_builder.specify "should keep the least number of columns with positional matching if asked to drop unmatched ones" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] t3 = table_builder [["A2", [10, 20, 30]]] @@ -128,7 +125,7 @@ spec setup = expect_column_names ["A"] t4 t4.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6, 10, 20, 30] - Test.specify "should keep the greatest number of columns with positional matching if asked to keep unmatched ones, filling missing values with null and reporting no problems" <| + group_builder.specify "should keep the greatest number of columns with positional matching if asked to keep unmatched ones, filling missing values with null and reporting no problems" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] t3 = table_builder [["A2", [10, 20, 30]]] @@ -140,7 +137,7 @@ spec setup = t4.at "B1" . to_vector . should_equal ["a", "b", "c", "d", "e", "f", Nothing, Nothing, Nothing] t4.at "C" . to_vector . should_equal [Nothing, Nothing, Nothing, 7, 8, 9, Nothing, Nothing, Nothing] - Test.specify "should use column names from the first table that has enough columns in positional matching mode" <| + group_builder.specify "should use column names from the first table that has enough columns in positional matching mode" <| t1 = table_builder [["A", [1, 2, 3]]] t2 = table_builder [["X", [4, 5, 6]], ["A", ["a", "b", "c"]]] @@ -164,14 +161,14 @@ spec setup = t8 = t1.union [t2, t5, t6, t7] match_columns=Match_Columns.By_Position expect_column_names ["Y", "A", "Z"] t8 - Test.specify "should allow to merge a table with itself" <| + group_builder.specify "should allow to merge a table with itself" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] t2 = t1.union [t1, t1] expect_column_names ["A", "B"] t2 t2.at "A" . to_vector . should_equal [1, 2, 3, 1, 2, 3, 1, 2, 3] t2.at "B" . to_vector . should_equal ["a", "b", "c", "a", "b", "c", "a", "b", "c"] - Test.specify "should not de-duplicate rows" <| + group_builder.specify "should not de-duplicate rows" <| t1 = table_builder [["A", [1, 1, 3]], ["B", ["a", "a", "c"]]] t2 = table_builder [["A", [1, 2, 2]], ["B", ["a", "b", "b"]]] t3 = t1.union t2 @@ -179,7 +176,7 @@ spec setup = t3.at "A" . to_vector . should_equal [1, 1, 3, 1, 2, 2] t3.at "B" . to_vector . should_equal ["a", "a", "c", "a", "b", "b"] - Test.specify "should gracefully handle the case where no tables to union were provided" <| + group_builder.specify "should gracefully handle the case where no tables to union were provided" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] check_same table = @@ -196,7 +193,7 @@ spec setup = check_same <| t1.union [] keep_unmatched_columns=True check_same <| t1.union [] match_columns=Match_Columns.By_Position keep_unmatched_columns=True - Test.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <| + group_builder.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <| t1 = table_builder [["A", ["a", "b", "c"]]] . cast "A" (Value_Type.Char size=1 variable_length=False) t2 = table_builder [["A", ["xyz", "abc", "def"]]] . cast "A" (Value_Type.Char size=3 variable_length=False) @@ -210,7 +207,7 @@ spec setup = Test.with_clue "t3[A].value_type="+(t3.at "A").value_type.to_display_text+": " <| t3.at "A" . value_type . variable_length . should_be_true - Test.specify "should find a common type that will fit the merged columns" <| + group_builder.specify "should find a common type that will fit the merged columns" <| t1 = table_builder [["A", [0, 1, 2]]] t2 = table_builder [["A", [1.0, 2.0, 2.5]]] @@ -239,7 +236,7 @@ spec setup = # Database backends are not required to support Mixed types. if setup.is_database.not then - Test.specify "should resort to Mixed value type only if at least one column is already Mixed" <| + group_builder.specify "should resort to Mixed value type only if at least one column is already Mixed" <| ## TODO currently no way to retype a column to Mixed, so we are using a custom object t1 = table_builder [["A", [1, 2, 3]], ["mixed", ["a", My_Type.Value 1 2, Nothing]]] @@ -265,7 +262,7 @@ spec setup = t6.at "mixed" . to_vector . should_equal ["X", "y", "a", My_Type.Value 1 2, Nothing, 1, 2, 3, True, False] t6.at "mixed" . value_type . should_equal Value_Type.Mixed - Test.specify "if no common type can be found, should report error and drop the problematic column" <| + group_builder.specify "if no common type can be found, should report error and drop the problematic column" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, Nothing]]] t2 = table_builder [["C", ["x", "Y", "Z"]], ["A", [4, 5, 6]], ["B", [1, 2, 3]]] @@ -292,7 +289,7 @@ spec setup = r4 = t3.union t4 match_columns=Match_Columns.By_Position on_problems=Problem_Behavior.Report_Error r4.should_fail_with No_Common_Type - Test.specify "if type widening is not allowed, should use the type from first table that contained the given column" <| + group_builder.specify "if type widening is not allowed, should use the type from first table that contained the given column" <| t1 = table_builder [["A", [1, 2, 3]]] t2 = table_builder [["A", [4, 5, 6]], ["B", [1.2, 2.2, 3.1]]] @@ -306,7 +303,7 @@ spec setup = t2.at "B" . value_type . is_floating_point . should_be_true t3.at "B" . value_type . is_floating_point . should_be_true - Test.specify "if type widening is not allowed and types do not match, should report error and drop the problematic column" <| + group_builder.specify "if type widening is not allowed and types do not match, should report error and drop the problematic column" <| t1 = table_builder [["A", [1, 2, 3]], ["B", [1, 2, 3]], ["E", [1.1, 2.5, 3.2]]] t2 = table_builder [["A", [4, 5, 6]], ["B", [1.5, 2.5, 3.5]], ["E", [1, 2, 3]]] @@ -332,7 +329,7 @@ spec setup = # Database backends are not required to support Mixed types. if setup.is_database.not then - Test.specify "even if type widening is not allowed, if the first column is mixed, it should accept any column to be concatenated to it" <| + group_builder.specify "even if type widening is not allowed, if the first column is mixed, it should accept any column to be concatenated to it" <| t1 = table_builder [["X", ["a", 1, Nothing]]] t2 = table_builder [["X", [1]]] t3 = table_builder [["X", [1.2, 2.3, 3.4]]] @@ -347,7 +344,7 @@ spec setup = t6.at "X" . value_type . should_equal Value_Type.Mixed t6.at "X" . to_vector . should_equal ["a", 1, Nothing, 1, 1.2, 2.3, 3.4, "a", "b", True, False] - Test.specify "when finding a common type for numeric columns to be Float, any precision loss should be reported" <| + group_builder.specify "when finding a common type for numeric columns to be Float, any precision loss should be reported" <| t1 = table_builder [["X", [1, (2^62)-1, 3]]] t2 = table_builder [["X", [1.5, 2.5, 3.5]]] t3 = table_builder [["X", [(2^100)+1, 2^10, 2]]] @@ -365,7 +362,7 @@ spec setup = # Losing precision on (2^62)-1 and 2^100+1. w.affected_rows_count . should_equal 2 - Test.specify "if type mismatches cause all columns to be dropped, fail with No_Output_Columns" <| + group_builder.specify "if type mismatches cause all columns to be dropped, fail with No_Output_Columns" <| t1 = table_builder [["A", [1, 2, 3]]] t2 = table_builder [["A", ['x']]] @@ -380,7 +377,7 @@ spec setup = t2 = table_builder [["X", [3, 4, 5]], ["Y", ['x', 'y', 'z']]] . cast "X" (Value_Type.Integer Bits.Bits_32) . cast "Y" (Value_Type.Char size=1 variable_length=False) supports_complex_types = (t1.is_error || t2.is_error || Problems.get_attached_warnings t1 . not_empty).not if supports_complex_types then - Test.specify "should find a common type (2)" <| + group_builder.specify "should find a common type (2)" <| t12 = t1.union t2 Problems.assume_no_problems t12 t12.at "X" . value_type . should_equal (Value_Type.Integer Bits.Bits_32) @@ -389,7 +386,7 @@ spec setup = t12.at "X" . to_vector . should_equal [0, 1, 2, 3, 4, 5] t12.at "Y" . to_vector . should_equal ['aa', 'bb', 'cc', 'x', 'y', 'z'] - Test.specify "should fail to find a common type if widening is not allowed (2)" <| + group_builder.specify "should fail to find a common type if widening is not allowed (2)" <| r1 = t1.union t2 allow_type_widening=False r1.should_fail_with No_Output_Columns r1.catch.cause . should_be_a Column_Type_Mismatch @@ -398,7 +395,7 @@ spec setup = # And this should report Column_Type_Mismatch as the more important error too: t1.union t2 allow_type_widening=False on_problems=Problem_Behavior.Report_Error . should_fail_with Column_Type_Mismatch - Test.specify "should gracefully handle tables from different backends" <| + group_builder.specify "should gracefully handle tables from different backends" <| t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] alternative_connection = Database.connect (SQLite In_Memory) diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso index ebe88b0ecdfd..c933c745a69b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso @@ -8,22 +8,19 @@ from Standard.Table.Errors import all from Standard.Database import all from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Common_Table_Operations.Util import expect_column_names, run_default_backend -main = run_default_backend spec - -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder materialize = setup.materialize db_todo = if setup.is_database.not then Nothing else "Table.zip is still WIP for the DB backend." - Test.group prefix+"Table.zip" pending=db_todo <| + suite_builder.group prefix+"Table.zip" pending=db_todo group_builder-> if setup.is_database.not then - Test.specify "should allow to zip two tables, preserving memory layout order" <| + group_builder.specify "should allow to zip two tables, preserving memory layout order" <| t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t2 = table_builder [["Z", ['a', 'b', 'c']], ["W", ['x', 'y', 'z']]] @@ -38,7 +35,7 @@ spec setup = expected_rows = [r0, r1, r2] r.should_equal expected_rows - Test.specify "should allow to zip two tables, preserving the order defined by `order_by`" <| + group_builder.specify "should allow to zip two tables, preserving the order defined by `order_by`" <| t1 = table_builder [["X", [100, 2]], ["Y", [4, 5]]] t2 = table_builder [["Z", ['a', 'b']], ["W", ['x', 'd']]] @@ -56,7 +53,7 @@ spec setup = expected_rows = [r0, r1] r.should_equal expected_rows - Test.specify "should report unmatched rows if the row counts do not match and pad them with nulls" <| + group_builder.specify "should report unmatched rows if the row counts do not match and pad them with nulls" <| t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t2 = table_builder [["Z", ['a', 'b']], ["W", ['x', 'd']]] @@ -80,7 +77,7 @@ spec setup = problems_2 = [Row_Count_Mismatch.Error 2 3] Problems.test_problem_handling action_2 problems_2 tester_2 - Test.specify "should allow to keep the unmatched rows padded with nulls without reporting problems" <| + group_builder.specify "should allow to keep the unmatched rows padded with nulls without reporting problems" <| t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t2 = table_builder [["Z", ['a']], ["W", ['x']]] @@ -92,7 +89,7 @@ spec setup = t3.at "Z" . to_vector . should_equal ['a', Nothing, Nothing] t3.at "W" . to_vector . should_equal ['x', Nothing, Nothing] - Test.specify "should allow to drop the unmatched rows" <| + group_builder.specify "should allow to drop the unmatched rows" <| t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t2 = table_builder [["Z", ['a']], ["W", ['x']]] @@ -104,7 +101,7 @@ spec setup = t3.at "Z" . to_vector . should_equal ['a'] t3.at "W" . to_vector . should_equal ['x'] - Test.specify "should work when zipping with an empty table" <| + group_builder.specify "should work when zipping with an empty table" <| t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] t2 = table_builder [["Z", ['a']], ["W", ['c']]] # Workaround to easily create empty table until table builder allows that directly. @@ -137,7 +134,7 @@ spec setup = t6.row_count . should_equal 0 t6.at "X" . to_vector . should_equal [] - Test.specify "should not report unmatched rows for rows that simply are all null" <| + group_builder.specify "should not report unmatched rows for rows that simply are all null" <| t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t2 = table_builder [["Z", ['a', Nothing, Nothing]], ["W", ['b', Nothing, Nothing]]] t3 = t1.zip t2 on_problems=Problem_Behavior.Report_Error @@ -148,7 +145,7 @@ spec setup = t3.at "Z" . to_vector . should_equal ['a', Nothing, Nothing] t3.at "W" . to_vector . should_equal ['b', Nothing, Nothing] - Test.specify "should rename columns of the right table to avoid duplicates" <| + group_builder.specify "should rename columns of the right table to avoid duplicates" <| t1 = table_builder [["X", [1, 2]], ["Y", [5, 6]], ["Right Y", [7, 8]]] t2 = table_builder [["X", ['a']], ["Y", ['d']]] @@ -170,7 +167,7 @@ spec setup = expect_column_names ["X", "Y", "Right Y", "Right X 1", "Right X"] (t1.zip t4) expect_column_names ["X", "Right X", "Right X 1", "Y", "Right Y"] (t4.zip t1) - Test.specify "should report both row count mismatch and duplicate column warnings at the same time" <| + group_builder.specify "should report both row count mismatch and duplicate column warnings at the same time" <| t1 = table_builder [["X", [1, 2]], ["Right X", [5, 6]]] t2 = table_builder [["X", ['a']], ["Z", ['d']]] @@ -178,7 +175,7 @@ spec setup = expected_problems = [Row_Count_Mismatch.Error 2 1, Duplicate_Output_Column_Names.Error ["Right X"]] Problems.get_attached_warnings t3 . should_contain_the_same_elements_as expected_problems - Test.specify "should allow to zip the table with itself" <| + group_builder.specify "should allow to zip the table with itself" <| ## Even though this does not seem very useful, we should verify that this edge case works correctly. It may especially be fragile in the Database backend. @@ -192,7 +189,7 @@ spec setup = t2.at "Right Y" . to_vector . should_equal [4, 5] if setup.is_database.not then - Test.specify "should correctly pad/truncate all kinds of column types" <| + group_builder.specify "should correctly pad/truncate all kinds of column types" <| primitives = [["ints", [1, 2, 3]], ["strs", ['a', 'b', 'c']], ["bools", [True, Nothing, False]]] times = [["dates", [Date.new 1999 1 1, Date.new 2000 4 1, Date.new 2001 1 2]], ["times", [Time_Of_Day.new 23 59, Time_Of_Day.new 0 0, Time_Of_Day.new 12 34]], ["datetimes", [Date_Time.new 1999 1 1 23 59, Date_Time.new 2000 4 1 0 0, Date_Time.new 2001 1 2 12 34]]] t = table_builder <| @@ -239,7 +236,7 @@ spec setup = padded.at "datetimes" . value_type . should_equal Value_Type.Date_Time padded.at "mixed" . value_type . should_equal Value_Type.Mixed - Test.specify "should gracefully handle tables from different backends" <| + group_builder.specify "should gracefully handle tables from different backends" <| t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] alternative_connection = Database.connect (SQLite In_Memory) diff --git a/test/Table_Tests/src/Common_Table_Operations/Main.enso b/test/Table_Tests/src/Common_Table_Operations/Main.enso index 183b38fca99f..cc5e225fee34 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Main.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Main.enso @@ -31,15 +31,17 @@ import project.Common_Table_Operations.Transpose_Spec from project.Common_Table_Operations.Util import run_default_backend type Test_Setup + # TODO: Add teardown_connection_fn? ## A common test configuration for tests of shared operations on the Table API. Arguments: - prefix: A name to prepend to test groups to identify the tested backend. - - table: A table using the tested backend containing data from - `data/data.csv`. - - empty_table: An empty table using the tested backend. + - table_fn: A function that takes Nothing and returns a table using the + tested backend containing data from `data/data.csv`. + - empty_table_fn: A function that takes Nothing and returns an empty + table using the tested backend. - table_builder: A function used to build a table using the tested backend from a vector of columns represented as pairs of name and vector of values. @@ -53,9 +55,9 @@ type Test_Setup - aggregate_test_selection: A selection of which aggregate test suites should be run. Can be used to skip checks for backends which do not support particular features. - - connection: A related database connection or Nothing for in-memory - tests. - Config prefix table empty_table table_builder materialize is_database test_selection aggregate_test_selection connection + - create_connection_func: A function that takes Nothing and creates a related + database connection or Nothing for in-memory tests. + Config prefix table_fn empty_table_fn table_builder materialize is_database test_selection aggregate_test_selection create_connection_func ## Specifies if the given Table backend supports custom Enso types. @@ -118,31 +120,31 @@ type Test_Selection Replace_Params that a backend supports. Config supports_case_sensitive_columns=True order_by=True natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_and_nothing_distinct=True distinct_returns_first_row_from_group_if_ordered=True date_time=True fixed_length_text_columns=False length_restricted_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=False different_size_integer_types=True supports_8bit_integer=False supports_decimal_type=False supports_time_duration=False supports_nanoseconds_in_time=False supports_mixed_columns=False supported_replace_params=Nothing -spec setup = - Core_Spec.spec setup - Select_Columns_Spec.spec setup - Column_Name_Edge_Cases_Spec.spec setup - Column_Operations_Spec.spec setup - Derived_Columns_Spec.spec setup - Date_Time_Spec.spec setup - Conversion_Spec.spec setup - Aggregate_Spec.spec setup - Filter_Spec.spec setup - Map_Spec.spec setup - Missing_Values_Spec.spec setup - Order_By_Spec.spec setup - Take_Drop_Spec.spec setup - Expression_Spec.spec detailed=False setup - Join_Spec.spec setup - Cross_Join_Spec.spec setup - Zip_Spec.spec setup - Union_Spec.spec setup - Lookup_Spec.spec setup - Distinct_Spec.spec setup - Cross_Tab_Spec.spec setup - Transpose_Spec.spec setup - Add_Row_Number_Spec.spec setup - Integration_Tests.spec setup - Temp_Column_Spec.spec setup +add_specs suite_builder setup = + Core_Spec.add_specs suite_builder setup + Select_Columns_Spec.add_specs suite_builder setup + Column_Name_Edge_Cases_Spec.add_specs suite_builder setup + Column_Operations_Spec.add_specs suite_builder setup + Derived_Columns_Spec.add_specs suite_builder setup + Date_Time_Spec.add_specs suite_builder setup + Conversion_Spec.add_specs suite_builder setup + Aggregate_Spec.add_specs suite_builder setup + Filter_Spec.add_specs suite_builder setup + Map_Spec.add_specs suite_builder setup + Missing_Values_Spec.add_specs suite_builder setup + Order_By_Spec.add_specs suite_builder setup + Take_Drop_Spec.add_specs suite_builder setup + Expression_Spec.add_specs suite_builder detailed=False setup + Join_Spec.add_specs suite_builder setup + Cross_Join_Spec.add_specs suite_builder setup + Zip_Spec.add_specs suite_builder setup + Union_Spec.add_specs suite_builder setup + Lookup_Spec.add_specs suite_builder setup + Distinct_Spec.add_specs suite_builder setup + Cross_Tab_Spec.add_specs suite_builder setup + Transpose_Spec.add_specs suite_builder setup + Add_Row_Number_Spec.add_specs suite_builder setup + Integration_Tests.add_specs suite_builder setup + Temp_Column_Spec.add_specs suite_builder setup -main = run_default_backend spec +main = run_default_backend add_specs diff --git a/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso index 9a46772568cd..27506d019678 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso @@ -7,42 +7,41 @@ import Standard.Table.Data.Type.Value_Type.Bits from Standard.Database.Errors import Unsupported_Database_Operation -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Common_Table_Operations.Util import run_default_backend -main = run_default_backend spec -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder - Test.group prefix+"Column.map" <| + suite_builder.group prefix+"Column.map" group_builder-> if setup.is_database then - Test.specify "should report unsupported error" <| + group_builder.specify "should report unsupported error" <| t = table_builder [["X", [1, 2, 3]]] t.at "X" . map (x-> x + 1) . should_fail_with Unsupported_Database_Operation if setup.is_database.not then - Test.specify "should allow to map a column with an arbitrary function" <| + group_builder.specify "should allow to map a column with an arbitrary function" <| t = table_builder [["X", [1, 2, 3]]] f x = 2*x + 1 t.at "X" . map f . to_vector . should_equal [3, 5, 7] - Test.specify "should forward dataflow errors that occur within the map" <| + group_builder.specify "should forward dataflow errors that occur within the map" <| t = table_builder [["X", [1, 2, 3]]] f x = if x == 2 then Error.throw (Illegal_State.Error "MY ERROR") else 2*x + 1 c1 = t.at "X" . map f c1.should_fail_with Illegal_State c1.catch.message . should_equal "MY ERROR" - Test.specify "should forward panics that occur within the map" <| + group_builder.specify "should forward panics that occur within the map" <| t = table_builder [["X", [1, 2, 3]]] f x = if x == 2 then Panic.throw (Illegal_State.Error "MY PANIC") else 2*x + 1 Test.expect_panic_with (t.at "X" . map f) Illegal_State - Test.specify "should forward warnings that are attached to results of function invocation" <| + group_builder.specify "should forward warnings that are attached to results of function invocation" <| t = table_builder [["X", [1, 2, 3, 4]]] f x = if (x % 2) == 0 then Warning.attach (Illegal_State.Error "MY WARNING "+x.to_text) (2*x + 1) else 2*x + 1 @@ -51,7 +50,7 @@ spec setup = warnings.map .message . should_contain_the_same_elements_as ["MY WARNING 2", "MY WARNING 4"] c1.to_vector . should_equal [3, 5, 7, 9] - Test.specify "should respect the expected_value_type" <| + group_builder.specify "should respect the expected_value_type" <| t = table_builder [["X", [1, 2, 3]]] f x = 2*x + 1 c2 = t.at "X" . map f expected_value_type=Value_Type.Float @@ -89,7 +88,7 @@ spec setup = c7.value_type . should_equal Value_Type.Mixed c7.to_vector . should_equal [2, "A", 4] - Test.specify "should fail with Invalid_Value_Type if some results do not match the expected_value_type" <| + group_builder.specify "should fail with Invalid_Value_Type if some results do not match the expected_value_type" <| t = table_builder [["X", [1, 2, 3]]] c1 = t.at "X" @@ -132,32 +131,32 @@ spec setup = r8.should_fail_with Invalid_Value_Type r8.catch.to_display_text . should_contain "Expected type Date, but got a value 42 of type Integer (16 bits)" - Test.group prefix+"Column.zip" <| + suite_builder.group prefix+"Column.zip" group_builder-> if setup.is_database then - Test.specify "should report unsupported error" <| + group_builder.specify "should report unsupported error" <| t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] f x y = 10*x + y (t.at "X") . zip (t.at "Y") f . should_fail_with Unsupported_Database_Operation if setup.is_database.not then - Test.specify "should allow to zip two columns with an arbitrary function" <| + group_builder.specify "should allow to zip two columns with an arbitrary function" <| t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] f x y = 10*x + y (t.at "X") . zip (t.at "Y") f . to_vector . should_equal [14, 25, 36] - Test.specify "should forward dataflow errors that occur within the zip" <| + group_builder.specify "should forward dataflow errors that occur within the zip" <| t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] f x y = if x == 2 then Error.throw (Illegal_State.Error "MY ERROR") else 10*x + y c1 = (t.at "X") . zip (t.at "Y") f c1.should_fail_with Illegal_State c1.catch.message . should_equal "MY ERROR" - Test.specify "should forward panics that occur within the zip" <| + group_builder.specify "should forward panics that occur within the zip" <| t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] f x y = if x == 2 then Panic.throw (Illegal_State.Error "MY PANIC") else 10*x + y Test.expect_panic_with ((t.at "X") . zip (t.at "Y") f) Illegal_State - Test.specify "should forward warnings that are attached to results of function invocation" <| + group_builder.specify "should forward warnings that are attached to results of function invocation" <| t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] f x y = if x == 2 then Warning.attach (Illegal_State.Error "MY WARNING") (10*x + y) else 10*x + y @@ -165,7 +164,7 @@ spec setup = w1 = Problems.expect_only_warning Illegal_State c1 w1.message . should_equal "MY WARNING" c1.to_vector . should_equal [14, 25, 36] - Test.specify "should respect the expected_value_type" <| + group_builder.specify "should respect the expected_value_type" <| t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] f x y = 10*x + y @@ -188,7 +187,7 @@ spec setup = c4.value_type . should_equal (Value_Type.Char size=2 variable_length=False) c4.to_vector . should_equal ["a4", "b5", "c6"] - Test.specify "should fail with Invalid_Value_Type if some results do not match the expected_value_type" <| + group_builder.specify "should fail with Invalid_Value_Type if some results do not match the expected_value_type" <| t = table_builder [["X", [100, 110, 120]], ["Y", [1, 3, 1]]] r1 = (t.at "X") . zip (t.at "Y") (*) expected_value_type=Value_Type.Byte diff --git a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso index ccdca8cd3eaf..fd14817fa7de 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso @@ -6,18 +6,17 @@ from Standard.Table.Errors import all from Standard.Database.Errors import Unsupported_Database_Operation -from Standard.Test import Test -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Common_Table_Operations.Util import run_default_backend -main = run_default_backend spec -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder test_selection = setup.test_selection - Test.group prefix+"Dropping Missing Values" <| + suite_builder.group prefix+"Dropping Missing Values" group_builder-> t0 = table_builder [["a", [0, 1, Nothing, 42, Nothing, 5]], ["b", [True, Nothing, True, False, Nothing, False]], ["c", ["", "foo", "bar", Nothing, Nothing, " "]]] t1 = a = ["a", [1, Nothing, 3, 4]] @@ -28,20 +27,20 @@ spec setup = f = ["f", [Nothing, "", Nothing, ""]] table_builder [a, b, c, d, e, f] - Test.specify "filter_blank_rows should drop rows that contain at least one missing cell" <| + group_builder.specify "filter_blank_rows should drop rows that contain at least one missing cell" <| d = t0.filter_blank_rows when=Blank_Selector.Any_Cell d.row_count . should_equal 1 d.at "a" . to_vector . should_equal [5] d.at "b" . to_vector . should_equal [False] d.at "c" . to_vector . should_equal [" "] - Test.specify "filter_blank_rows should drop rows that are all blank" <| + group_builder.specify "filter_blank_rows should drop rows that are all blank" <| d2 = t0.filter_blank_rows when=Blank_Selector.All_Cells d2.at "a" . to_vector . should_equal [0, 1, Nothing, 42, 5] d2.at "b" . to_vector . should_equal [True, Nothing, True, False, False] d2.at "c" . to_vector . should_equal ["", "foo", "bar", Nothing, " "] - Test.specify "filter_blank_rows should deal with edge cases" <| + group_builder.specify "filter_blank_rows should deal with edge cases" <| ## TODO currently our builder does not allow all-null tables, so we create one with a 0 and remove it by filter. See #6159. t0 = table_builder [["X", [0, Nothing, Nothing, Nothing]]] @@ -58,7 +57,7 @@ spec setup = t4.row_count . should_equal 0 t4.at "X" . to_vector . should_equal [] - Test.specify "filter_blank_rows should work with a table with many columns" <| + group_builder.specify "filter_blank_rows should work with a table with many columns" <| cols = Vector.new 60 i-> ["col_"+i.to_text, [i, Nothing]] t1 = table_builder cols @@ -67,7 +66,7 @@ spec setup = t2.row_count . should_equal 1 t2.at 42 . to_vector . should_equal [42] - Test.specify "should allow to select blank columns" <| + group_builder.specify "should allow to select blank columns" <| r1 = t1.select_blank_columns r1.columns.map .name . should_equal ["f"] r1.at "f" . to_vector . should_equal [Nothing, "", Nothing, ""] @@ -76,7 +75,7 @@ spec setup = r2.columns.map .name . should_equal ["a", "b", "d", "e", "f"] r2.at "d" . to_vector . should_equal [Nothing, True, False, True] - Test.specify "should allow to remove blank columns" <| + group_builder.specify "should allow to remove blank columns" <| r1 = t1.remove_blank_columns r1.columns.map .name . should_equal ["a", "b", "c", "d", "e"] r1.at "a" . to_vector . should_equal [1, Nothing, 3, 4] @@ -92,7 +91,7 @@ spec setup = h = ["h", [Number.nan, Nothing, Number.nan, Nothing]] table_builder [c, g, h] if test_selection.is_nan_and_nothing_distinct then - Test.specify "should not treat NaNs as blank by default" <| + group_builder.specify "should not treat NaNs as blank by default" <| r1 = t3.filter_blank_rows when=Blank_Selector.Any_Cell # We cannot use `Vector.==` because `NaN != NaN`. r1.at "X" . to_vector . to_text . should_equal "[1.5, NaN]" @@ -114,7 +113,7 @@ spec setup = r5.columns.map .name . should_equal ["h"] r5.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]" - Test.specify "should allow to treat NaNs as blank if asked" <| + group_builder.specify "should allow to treat NaNs as blank if asked" <| r1 = t3.filter_blank_rows when=Blank_Selector.Any_Cell treat_nans_as_blank=True # We cannot use `Vector.==` because `NaN != NaN`. r1.at "X" . to_vector . should_equal [1.5] @@ -141,11 +140,11 @@ spec setup = r6.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]" if test_selection.is_nan_and_nothing_distinct.not then - Test.specify "this backend treats NaN as Nothing" <| + group_builder.specify "this backend treats NaN as Nothing" <| t3.at "X" . to_vector . should_equal [2.0, 1.5, Nothing, Nothing] t3.at "X" . is_nan . to_vector . should_fail_with Unsupported_Database_Operation - Test.specify "select_blank_columns and remove_blank_columns should deal with edge cases" <| + group_builder.specify "select_blank_columns and remove_blank_columns should deal with edge cases" <| t = table_builder [["X", [1, 2, 3, 4]]] no_rows = t.filter "X" (Filter_Condition.Equal to=0) no_rows.row_count . should_equal 0 @@ -160,8 +159,8 @@ spec setup = r3.should_fail_with No_Output_Columns r3.catch.to_display_text . should_equal "No columns in the result, because of another problem: No columns were blank." - Test.group prefix+"Filling Missing Values" <| - Test.specify "should coerce long and double types to double" <| + suite_builder.group prefix+"Filling Missing Values" group_builder-> + group_builder.specify "should coerce long and double types to double" <| table = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [0.5, Nothing, Nothing, 0.25]]] ints = table.at "X" ints_filled = ints.fill_nothing 0.5 @@ -198,7 +197,7 @@ spec setup = Test.with_clue "r4.value_type="+vt4.to_display_text+": " <| vt4.is_floating_point.should_be_true - Test.specify "should keep String, Boolean, Long and Double type" <| + group_builder.specify "should keep String, Boolean, Long and Double type" <| table = table_builder [["X", ["a", Nothing, "b", Nothing]], ["Y", [True, False, Nothing, Nothing]], ["Z", [1, Nothing, 2, Nothing]], ["W", [0.5, Nothing, Nothing, 0.25]]] strs = table.at "X" strs_filled = strs.fill_nothing "X" @@ -220,7 +219,7 @@ spec setup = decimals_filled.to_vector . should_equal [0.5, 1.0, 1.0, 0.25] decimals_filled.value_type.is_floating_point.should_be_true - Test.specify "should not allow mixing types by default" <| + group_builder.specify "should not allow mixing types by default" <| table = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [True, False, Nothing, Nothing]], ["Z", [0.5, Nothing, Nothing, 0.25]]] ints = table.at "X" ints_filled = ints.fill_nothing False @@ -233,7 +232,7 @@ spec setup = table.at "Z" . fill_nothing True . should_fail_with No_Common_Type if setup.is_database.not then - Test.specify "may allow mixed types if explicitly retyped" <| + group_builder.specify "may allow mixed types if explicitly retyped" <| table = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [True, False, Nothing, Nothing]], ["Z", [0.5, Nothing, Nothing, 0.25]]] mix = table.at "X" . cast Value_Type.Mixed mix.value_type . should_equal Value_Type.Mixed @@ -249,14 +248,14 @@ spec setup = c3.to_vector . should_equal [0.5, Nothing, 2, 0.25] c3.value_type . should_equal Value_Type.Mixed - Test.specify "will keep the Mixed type if was explicitly retyped" <| + group_builder.specify "will keep the Mixed type if was explicitly retyped" <| table = table_builder [["X", [1, Nothing, 2, Nothing]]] mix = table.at "X" . cast Value_Type.Mixed mix_filled = mix.fill_nothing 0 mix_filled.to_vector . should_equal [1, 0, 2, 0] mix_filled.value_type . should_equal Value_Type.Mixed - Test.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <| + group_builder.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <| t0 = table_builder [["A", ["a", Nothing, "c"]], ["B", ["X", "Y", "Z"]], ["C", ["xyz", "abc", "def"]]] t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=1 variable_length=False) . cast "C" (Value_Type.Char size=3 variable_length=False) @@ -276,7 +275,7 @@ spec setup = Test.with_clue "e.value_type="+e.value_type.to_display_text+": " <| e.value_type.variable_length.should_be_true - Test.specify "should allow setting a default column by reference" <| + group_builder.specify "should allow setting a default column by reference" <| t = table_builder [["A", ["x", "", Nothing]], ["B", ["a", "b", "c"]], ["C", [Nothing, Nothing, "ZZZ"]], ["D", [Nothing, "2", "3"]]] t1 = t.fill_nothing "A" (Column_Ref.Name "B") @@ -291,7 +290,7 @@ spec setup = t3.at "C" . to_vector . should_equal [Nothing, "2", "ZZZ"] t3.at "D" . to_vector . should_equal [Nothing, "2", "3"] - if setup.is_database.not then Test.specify "should allow filling rows with previous value" <| + if setup.is_database.not then group_builder.specify "should allow filling rows with previous value" <| t = table_builder [["A", ["a", "", Nothing, Nothing, "", "b", "c", Nothing]]] t1 = t.fill_nothing "A" Previous_Value @@ -317,7 +316,7 @@ spec setup = # C is unchanged t5.at "C" . to_vector . should_equal ["", "foo", Nothing, "bar"] - if setup.is_database then Test.specify "will for now report that Previous_Value is not supported" <| + if setup.is_database then group_builder.specify "will for now report that Previous_Value is not supported" <| t = table_builder [["A", ["a", "", Nothing, Nothing, "", "b", "c", Nothing]]] t.fill_nothing "A" Previous_Value . should_fail_with Unsupported_Database_Operation t.fill_empty "A" Previous_Value . should_fail_with Unsupported_Database_Operation diff --git a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso index d5ec2b6c9780..2eceb9652644 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso @@ -5,23 +5,21 @@ import Standard.Base.Errors.Common.Incomparable_Values from Standard.Table import Sort_Column from Standard.Table.Errors import all -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Common_Table_Operations.Util import run_default_backend type My_Type Foo x -main = run_default_backend spec - -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder test_selection = setup.test_selection order_by_pending = if test_selection.order_by.not then "ToDo: order_by is not yet supported by this backend." - Test.group prefix+"Table.order_by" pending=order_by_pending <| + suite_builder.group prefix+"Table.order_by" pending=order_by_pending group_builder-> mk_table = col1 = ["alpha", [3, 2, 1, 0]] col2 = ["beta", ["a", "b", "a", "b"]] @@ -36,7 +34,7 @@ spec setup = table_builder [col1, col2, col3, col4, col5, col6, col7, col8, col9, col10] table = mk_table - Test.specify "should work as shown in the doc examples" <| + group_builder.specify "should work as shown in the doc examples" <| t1 = table.order_by ["alpha"] t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] @@ -50,12 +48,12 @@ spec setup = t3 = table.order_by [Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive] t3.at "alpha" . to_vector . should_equal [0, 1, 2, 3] - Test.specify "should work with single column name" <| + group_builder.specify "should work with single column name" <| t1 = table.order_by "alpha" t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] - Test.specify "should work with single Sort_Column" <| + group_builder.specify "should work with single Sort_Column" <| t1 = table.order_by [Sort_Column.Name "alpha"] t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] @@ -72,17 +70,17 @@ spec setup = t4.at "alpha" . to_vector . should_equal [3, 2, 1, 0] t4.at "gamma" . to_vector . should_equal [1, 2, 3, 4] - Test.specify "should allow the selector to mix regex and case insensitive matching" <| + group_builder.specify "should allow the selector to mix regex and case insensitive matching" <| t4 = table.order_by [Sort_Column.Select_By_Name "A.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive] t4.at "alpha" . to_vector . should_equal [0, 1, 2, 3] - Test.specify "should correctly handle regexes matching multiple names" <| + group_builder.specify "should correctly handle regexes matching multiple names" <| t1 = table.order_by [Sort_Column.Select_By_Name ".*ta" Sort_Direction.Descending use_regex=True] t1.at "beta" . to_vector . should_equal ["b", "b", "a", "a"] t1.at "delta" . to_vector . should_equal ["a1", "a03", "a2", "a10"] t1.at "gamma" . to_vector . should_equal [2, 4, 3, 1] - Test.specify "should correctly handle problems: out of bounds indices" <| + group_builder.specify "should correctly handle problems: out of bounds indices" <| selector = [0, 100, Sort_Column.Index -200, Sort_Column.Index 300] expected_problem = Missing_Input_Columns.Error [100, -200, 300] t1 = table.order_by selector @@ -94,7 +92,7 @@ spec setup = table.at "alpha" . to_vector . should_equal [0, 1, 2, 3] Problems.test_problem_handling action [expected_problem] tester - Test.specify "should correctly handle edge-cases: duplicate selectors" <| + group_builder.specify "should correctly handle edge-cases: duplicate selectors" <| selector1 = ["alpha", Sort_Column.Name "alpha" Sort_Direction.Descending] t1 = table.order_by selector1 Problems.assume_no_problems t1 @@ -111,21 +109,21 @@ spec setup = t3.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t3.at "gamma" . to_vector . should_equal [4, 3, 2, 1] - Test.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| + group_builder.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| selector = [Sort_Column.Select_By_Name "ALPHA" case_sensitivity=Case_Sensitivity.Insensitive, Sort_Column.Select_By_Name "alpha" Sort_Direction.Descending] t1 = table.order_by selector Problems.assume_no_problems t1 t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] - Test.specify "should correctly handle edge-cases: duplicate matches due to regexes" <| + group_builder.specify "should correctly handle edge-cases: duplicate matches due to regexes" <| selector = [Sort_Column.Select_By_Name "a.*" use_regex=True, Sort_Column.Select_By_Name "alpha" Sort_Direction.Descending] t1 = table.order_by selector Problems.assume_no_problems t1 t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] - Test.specify "should correctly handle edge-cases: mixed selector types" <| + group_builder.specify "should correctly handle edge-cases: mixed selector types" <| t1 = table.order_by [Sort_Column.Name "alpha", Sort_Column.Index 1] t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "beta" . to_vector . should_equal ["b", "a", "b", "a"] @@ -136,7 +134,7 @@ spec setup = t2.at "beta" . to_vector . should_equal ["b", "a", "b", "a"] t2.at "gamma" . to_vector . should_equal [4, 3, 2, 1] - Test.specify "should correctly handle problems: unmatched names" <| + group_builder.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' selector = [Sort_Column.Name "alpha", "hmm", Sort_Column.Name weird_name] expected_problem = Missing_Input_Columns.Error ["hmm", weird_name] @@ -149,11 +147,11 @@ spec setup = table.at "alpha" . to_vector . should_equal [0, 1, 2, 3] Problems.test_problem_handling action [expected_problem] tester - Test.specify "should report a problem if no columns are selected for ordering" <| + group_builder.specify "should report a problem if no columns are selected for ordering" <| t2 = table.order_by [] t2.should_fail_with No_Input_Columns_Selected - Test.specify "should stack consecutive ordering operations" <| + group_builder.specify "should stack consecutive ordering operations" <| t1 = table.order_by [Sort_Column.Name "alpha"] t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "beta" . to_vector . should_equal ["b", "a", "b", "a"] @@ -172,7 +170,7 @@ spec setup = t4.at "beta" . to_vector . should_equal ["a", "a", "b", "b"] t4.at "alpha" . to_vector . should_equal [3, 1, 2, 0] - Test.specify "should give priority to the first selected column and use the next ones for breaking ties" <| + group_builder.specify "should give priority to the first selected column and use the next ones for breaking ties" <| t1 = table.order_by ["beta", Sort_Column.Name "alpha" Sort_Direction.Ascending] t1.at "beta" . to_vector . should_equal ["a", "a", "b", "b"] t1.at "alpha" . to_vector . should_equal [1, 3, 0, 2] @@ -198,13 +196,13 @@ spec setup = t4.at "alpha" . to_vector . should_equal [1, 3, 0, 2] t4.at "gamma" . to_vector . should_equal [3, 1, 4, 2] - Test.specify "should deal with real numbers, and not warn when ordering by floats" <| + group_builder.specify "should deal with real numbers, and not warn when ordering by floats" <| t1 = table.order_by ["tau"] t1.at "tau" . to_vector . should_equal [-0.1, 0.5, 1.6, 32.0] t1.at "alpha" . to_vector . should_equal [1, 2, 0, 3] Problems.assume_no_problems t1 - Test.specify "should deal with nulls" <| + group_builder.specify "should deal with nulls" <| t1 = table.order_by ["xi"] t1.at "xi" . to_vector . should_equal [Nothing, 0.5, 1.0, 1.5] t1.at "alpha" . to_vector . should_equal [1, 0, 3, 2] @@ -215,7 +213,7 @@ spec setup = t3 = table.order_by [Sort_Column.Name "rho" Sort_Direction.Descending] t3.at "rho" . to_vector . should_equal ["BB", "B", Nothing, Nothing] - Test.specify "should behave as expected with Unicode normalization, depending on the defaults settings" <| + group_builder.specify "should behave as expected with Unicode normalization, depending on the defaults settings" <| t1 = table.order_by [Sort_Column.Name "phi"] case test_selection.order_by_unicode_normalization_by_default of True -> @@ -225,7 +223,7 @@ spec setup = t1.at "phi" . to_vector . should_equal [Nothing, 's\u0301b', "śa", "śc"] t1.at "alpha" . to_vector . should_equal [2, 1, 0, 3] - Test.specify "should support natural ordering" pending=(if test_selection.natural_ordering.not then "Natural ordering is not supported.") <| + group_builder.specify "should support natural ordering" pending=(if test_selection.natural_ordering.not then "Natural ordering is not supported.") <| t1 = table.order_by [Sort_Column.Name "delta"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True) t1.at "delta" . to_vector . should_equal ["a1", "a2", "a03", "a10"] t1.at "alpha" . to_vector . should_equal [2, 1, 0, 3] @@ -234,7 +232,7 @@ spec setup = t2.at "delta" . to_vector . should_equal ["a03", "a1", "a10", "a2"] t2.at "alpha" . to_vector . should_equal [0, 2, 3, 1] - Test.specify "should support case insensitive ordering" pending=(if test_selection.case_insensitive_ordering.not then "Case insensitive ordering is not supported.") <| + group_builder.specify "should support case insensitive ordering" pending=(if test_selection.case_insensitive_ordering.not then "Case insensitive ordering is not supported.") <| t1 = table.order_by [Sort_Column.Name "eta"] text_ordering=(Text_Ordering.Case_Insensitive) expected = case test_selection.case_insensitive_ascii_only of True -> ["Aleph", "alpha", "Beta", "bądź"] @@ -250,7 +248,7 @@ spec setup = t4 = table.order_by [Sort_Column.Name "psi" Sort_Direction.Descending] text_ordering=(Text_Ordering.Case_Sensitive) t4.at "psi" . to_vector . should_equal ["c10", "c01", "C2", Nothing] - Test.specify "should support natural and case insensitive ordering at the same time" pending=(if (test_selection.natural_ordering.not || test_selection.case_insensitive_ordering.not) then "Natural ordering or case sensitive ordering is not supported.") <| + group_builder.specify "should support natural and case insensitive ordering at the same time" pending=(if (test_selection.natural_ordering.not || test_selection.case_insensitive_ordering.not) then "Natural ordering or case sensitive ordering is not supported.") <| t1 = table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) t1.at "psi" . to_vector . should_equal [Nothing, "c01", "C2", "c10"] @@ -263,17 +261,17 @@ spec setup = t4 = table.order_by [Sort_Column.Name "psi"] t4.at "psi" . to_vector . should_equal [Nothing, "C2", "c01", "c10"] - Test.specify "text ordering settings should not affect numeric columns" <| + group_builder.specify "text ordering settings should not affect numeric columns" <| ordering = Text_Ordering.Case_Insensitive sort_digits_as_numbers=True t1 = table.order_by [Sort_Column.Name "alpha"] text_ordering=ordering t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] if setup.is_database.not then - Test.specify "should allow ordering enso objects with a comparator" <| + group_builder.specify "should allow ordering enso objects with a comparator" <| t = table_builder [["X", [Day_Of_Week.Friday, Day_Of_Week.Monday, Nothing, Nothing, Day_Of_Week.Wednesday]]] t.order_by "X" . at "X" . to_vector . should_equal [Nothing, Nothing, Day_Of_Week.Monday, Day_Of_Week.Wednesday, Day_Of_Week.Friday] - Test.specify "should raise Incomparable_Values if ordering by incomparable values" <| + group_builder.specify "should raise Incomparable_Values if ordering by incomparable values" <| t = table_builder [["X", [My_Type.Foo 42, My_Type.Foo "a"]]] t.order_by "X" . should_fail_with Incomparable_Values diff --git a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso index 8457dcf4fe4c..9e63ee73f859 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso @@ -3,14 +3,12 @@ from Standard.Base import all from Standard.Table import Position from Standard.Table.Errors import all -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all -from project.Common_Table_Operations.Util import expect_column_names, run_default_backend -main = run_default_backend spec +from project.Common_Table_Operations.Util import expect_column_names, run_default_backend -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder test_selection = setup.test_selection @@ -24,36 +22,36 @@ spec setup = col7 = ["abcd123", [19,20,21]] table_builder [col1, col2, col3, col4, col5, col6, col7] - Test.group prefix+"Table.select_columns" <| - Test.specify "should work as shown in the doc examples" <| + suite_builder.group prefix+"Table.select_columns" group_builder-> + group_builder.specify "should work as shown in the doc examples" <| expect_column_names ["foo", "bar"] <| table.select_columns ["bar", "foo"] expect_column_names ["bar", "Baz", "foo 1", "foo 2"] <| table.select_columns ["foo.+".to_regex, "b.*".to_regex True] expect_column_names ["abcd123", "foo", "bar"] <| table.select_columns [-1, 0, 1] reorder=True - Test.specify "should allow to reorder columns if asked to" <| + group_builder.specify "should allow to reorder columns if asked to" <| table_2 = table.select_columns ["bar", "foo"] reorder=True expect_column_names ["bar", "foo"] table_2 table_2 . at "bar" . to_vector . should_equal [4,5,6] table_2 . at "foo" . to_vector . should_equal [1,2,3] - Test.specify "should correctly handle regex matching" <| + group_builder.specify "should correctly handle regex matching" <| expect_column_names ["foo"] <| table.select_columns ["foo".to_regex] expect_column_names ["ab.+123", "abcd123"] <| table.select_columns ["a.*".to_regex] expect_column_names ["ab.+123", "abcd123"] <| table.select_columns ["ab.+123".to_regex] expect_column_names ["ab.+123"] <| table.select_columns ["ab.+123"] expect_column_names ["abcd123"] <| table.select_columns ["abcd123".to_regex] - Test.specify "should allow negative indices" <| + group_builder.specify "should allow negative indices" <| expect_column_names ["foo", "bar", "foo 2"] <| table.select_columns [-3, 0, 1] - Test.specify "should allow mixed names and indexes" <| + group_builder.specify "should allow mixed names and indexes" <| expect_column_names ["foo", "bar", "foo 2"] <| table.select_columns [-3, "bar", 0] expect_column_names ["foo 2", "bar", "foo"] <| table.select_columns [-3, "bar", 0] reorder=True expect_column_names ["foo", "bar", "foo 1", "foo 2", "abcd123"] <| table.select_columns [-1, "bar", "foo.*".to_regex] expect_column_names ["foo", "foo 1", "foo 2", "bar", "abcd123"] <| table.select_columns ["foo.*".to_regex, "bar", "foo", -1] reorder=True if test_selection.supports_case_sensitive_columns then - Test.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| + group_builder.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| table = col1 = ["foo", [1,2,3]] col2 = ["bar", [4,5,6]] @@ -61,11 +59,11 @@ spec setup = table_builder [col1, col2, col3] expect_column_names ["bar", "Bar"] <| table.select_columns ["bar"] case_sensitivity=Case_Sensitivity.Insensitive - Test.specify "should correctly handle regexes matching multiple names" <| + group_builder.specify "should correctly handle regexes matching multiple names" <| expect_column_names ["foo", "bar", "foo 1", "foo 2"] <| table.select_columns ["b.*".to_regex, "f.+".to_regex] expect_column_names ["bar", "foo", "foo 1", "foo 2"] <| table.select_columns ["b.*".to_regex, "f.+".to_regex] reorder=True - Test.specify "should correctly handle problems: out of bounds indices" <| + group_builder.specify "should correctly handle problems: out of bounds indices" <| selector = [1, 0, 100, -200, 300] action = table.select_columns selector error_on_missing_columns=False on_problems=_ tester = expect_column_names ["foo", "bar"] @@ -75,7 +73,7 @@ spec setup = err = table.select_columns selector err.should_fail_with Missing_Input_Columns - Test.specify "should correctly handle edge-cases: duplicate indices" <| + group_builder.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] t = table.select_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["foo"] t @@ -83,12 +81,12 @@ spec setup = expect_column_names ["foo", "bar"] <| table.select_columns [0, 1, 0] - Test.specify "should correctly handle edge-cases: aliased indices" <| + group_builder.specify "should correctly handle edge-cases: aliased indices" <| selector = [0, -6, 1, -7] t = table.select_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["foo", "bar"] t - Test.specify "should correctly handle edge-cases: duplicate names" <| + group_builder.specify "should correctly handle edge-cases: duplicate names" <| selector = ["foo", "foo"] t = table.select_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["foo"] t @@ -102,7 +100,7 @@ spec setup = expect_column_names ["foo", "bar"] <| table.select_columns ["bar", "foo", "foo", "bar"] reorder=False - Test.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| + group_builder.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| selector = ["FOO", "foo"] t = table.select_columns selector case_sensitivity=Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error expect_column_names ["foo"] t @@ -110,7 +108,7 @@ spec setup = expect_column_names ["bar", "foo"] <| table.select_columns ["BAR", "foo", "bar"] reorder=True case_sensitivity=Case_Sensitivity.Insensitive - Test.specify "should correctly handle problems: unmatched names" <| + group_builder.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' selector = ["foo", "hmm", weird_name] action = table.select_columns selector error_on_missing_columns=False on_problems=_ @@ -122,12 +120,12 @@ spec setup = err.should_fail_with Missing_Input_Columns err.catch.criteria . should_equal ["hmm", weird_name] - Test.specify "should correctly handle problems in mixed case" <| + group_builder.specify "should correctly handle problems in mixed case" <| err = table.select_columns ["foo", "hmm", 99] on_problems=Problem_Behavior.Ignore err.should_fail_with Missing_Input_Columns err.catch.criteria . should_equal ["hmm", 99] - Test.specify "should correctly handle problems: no columns in the output" <| + group_builder.specify "should correctly handle problems: no columns in the output" <| [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> t = table.select_columns [] on_problems=pb t.should_fail_with No_Output_Columns @@ -141,13 +139,13 @@ spec setup = r2.catch.cause . should_be_a Missing_Input_Columns r2.catch.to_display_text . should_equal "No columns in the result, because of another problem: The criteria 'hmmm' did not match any columns." - Test.group prefix+"Table.remove_columns" <| - Test.specify "should work as shown in the doc examples" <| + suite_builder.group prefix+"Table.remove_columns" group_builder-> + group_builder.specify "should work as shown in the doc examples" <| expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| table.remove_columns ["bar", "foo"] expect_column_names ["foo", "ab.+123", "abcd123"] <| table.remove_columns ["foo.+".to_regex, "b.*".to_regex] Case_Sensitivity.Insensitive expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123"] <| table.remove_columns [-1, 0, 1] - Test.specify "should correctly handle regex matching" <| + group_builder.specify "should correctly handle regex matching" <| last_ones = table.columns.drop 1 . map .name expect_column_names last_ones <| table.remove_columns ["foo".to_regex] first_ones = ["foo", "bar", "Baz", "foo 1", "foo 2"] @@ -156,11 +154,11 @@ spec setup = expect_column_names first_ones+["abcd123"] <| table.remove_columns ["ab.+123"] Case_Sensitivity.Insensitive expect_column_names first_ones+["ab.+123"] <| table.remove_columns ["abcd123".to_regex] - Test.specify "should allow negative indices" <| + group_builder.specify "should allow negative indices" <| expect_column_names ["Baz", "foo 1", "ab.+123"] <| table.remove_columns [-1, -3, 0, 1] if test_selection.supports_case_sensitive_columns then - Test.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| + group_builder.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| table = col1 = ["foo", [1,2,3]] col2 = ["bar", [4,5,6]] @@ -168,10 +166,10 @@ spec setup = table_builder [col1, col2, col3] expect_column_names ["foo"] <| table.remove_columns "bar" Case_Sensitivity.Insensitive - Test.specify "should correctly handle regexes matching multiple names" <| + group_builder.specify "should correctly handle regexes matching multiple names" <| expect_column_names ["Baz", "ab.+123", "abcd123"] <| table.remove_columns ["f.+".to_regex, "b.*".to_regex] - Test.specify "should correctly handle problems: out of bounds indices" <| + group_builder.specify "should correctly handle problems: out of bounds indices" <| selector = [1, 0, 100, -200, 300] action = table.remove_columns selector on_problems=_ tester = expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] @@ -181,27 +179,27 @@ spec setup = err = table.remove_columns selector error_on_missing_columns=True err.should_fail_with Missing_Input_Columns - Test.specify "should correctly handle edge-cases: duplicate indices" <| + group_builder.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t - Test.specify "should correctly handle edge-cases: aliased indices" <| + group_builder.specify "should correctly handle edge-cases: aliased indices" <| selector = [0, -7, -6, 1] t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t - Test.specify "should correctly handle edge-cases: duplicate names" <| + group_builder.specify "should correctly handle edge-cases: duplicate names" <| selector = ["foo", "foo"] t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t - Test.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| + group_builder.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| selector = ["FOO", "foo"] t = table.remove_columns selector Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t - Test.specify "should correctly handle problems: unmatched names" <| + group_builder.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' selector = ["foo", "hmm", weird_name] action = table.remove_columns selector on_problems=_ @@ -212,7 +210,7 @@ spec setup = err = table.remove_columns selector error_on_missing_columns=True on_problems=Problem_Behavior.Ignore err.should_fail_with Missing_Input_Columns - Test.specify "should correctly handle problems: no columns in the output" <| + group_builder.specify "should correctly handle problems: no columns in the output" <| [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> selector = [".*".to_regex] t = table.remove_columns selector on_problems=pb @@ -224,15 +222,15 @@ spec setup = # No cause specified - even if some criteria were unmatched, that is not the reason for the No_Output_Columns (the reason is all other columns got deleted, by other criteria that _did_ match). t1.catch.cause . should_equal Nothing - Test.group prefix+"Table.reorder_columns" <| - Test.specify "should work as shown in the doc examples" <| + suite_builder.group prefix+"Table.reorder_columns" group_builder-> + group_builder.specify "should work as shown in the doc examples" <| expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns "foo" Position.After_Other_Columns expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] <| table.reorder_columns ["foo", "bar"] Position.After_Other_Columns expect_column_names ["foo 1", "foo 2", "bar", "Baz", "foo", "ab.+123", "abcd123"] <| table.reorder_columns ["foo.+".to_regex, "b.*".to_regex] case_sensitivity=Case_Sensitivity.Insensitive expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| table.reorder_columns [1, 0] Position.Before_Other_Columns expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns [0] Position.After_Other_Columns - Test.specify "should correctly handle regex matching" <| + group_builder.specify "should correctly handle regex matching" <| expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns ["foo".to_regex] Position.After_Other_Columns rest = ["foo", "bar", "Baz", "foo 1", "foo 2"] expect_column_names ["ab.+123", "abcd123"]+rest <| table.reorder_columns ["a.*".to_regex] @@ -240,11 +238,11 @@ spec setup = expect_column_names ["ab.+123"]+rest+["abcd123"] <| table.reorder_columns ["ab.+123"] expect_column_names ["abcd123"]+rest+["ab.+123"] <| table.reorder_columns ["abcd123".to_regex] - Test.specify "should allow negative indices" <| + group_builder.specify "should allow negative indices" <| expect_column_names ["abcd123", "foo 2", "foo", "bar", "Baz", "foo 1", "ab.+123"] <| table.reorder_columns [-1, -3, 0, 1] if test_selection.supports_case_sensitive_columns then - Test.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| + group_builder.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| table = col1 = ["foo", [1,2,3]] col2 = ["bar", [4,5,6]] @@ -252,10 +250,10 @@ spec setup = table_builder [col1, col2, col3] expect_column_names ["bar", "Bar", "foo"] <| table.reorder_columns ["bar"] case_sensitivity=Case_Sensitivity.Insensitive - Test.specify "should correctly handle regexes matching multiple names" <| + group_builder.specify "should correctly handle regexes matching multiple names" <| expect_column_names ["bar", "foo", "foo 1", "foo 2", "Baz", "ab.+123", "abcd123"] <| table.reorder_columns ["b.*".to_regex, "f.+".to_regex] - Test.specify "should correctly handle problems: out of bounds indices" <| + group_builder.specify "should correctly handle problems: out of bounds indices" <| selector = [1, 0, 100, -200, 300] action = table.reorder_columns selector on_problems=_ tester = expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] @@ -265,22 +263,22 @@ spec setup = err = table.reorder_columns selector error_on_missing_columns=True err.should_fail_with Missing_Input_Columns - Test.specify "should correctly handle edge-cases: duplicate indices" <| + group_builder.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t - Test.specify "should correctly handle edge-cases: aliased indices" <| + group_builder.specify "should correctly handle edge-cases: aliased indices" <| selector = [0, -7, -6, 1] t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] t - Test.specify "should correctly handle edge-cases: duplicate names" <| + group_builder.specify "should correctly handle edge-cases: duplicate names" <| selector = ["foo", "foo"] t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t - Test.specify "should correctly handle problems: unmatched names" <| + group_builder.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' selector = ["foo", "hmm", weird_name] action = table.reorder_columns selector Position.After_Other_Columns on_problems=_ @@ -291,7 +289,7 @@ spec setup = err = table.reorder_columns selector Position.After_Other_Columns error_on_missing_columns=True err.should_fail_with Missing_Input_Columns - Test.group prefix+"Table.sort_columns" <| + suite_builder.group prefix+"Table.sort_columns" group_builder-> table = col1 = ["foo 21", [1,2,3]] col2 = ["foo 100", [4,5,6]] @@ -302,7 +300,7 @@ spec setup = col7 = ["bar", [19,20,21]] table_builder [col1, col2, col3, col4, col5, col6, col7] - Test.specify "should work as shown in the doc examples" <| + group_builder.specify "should work as shown in the doc examples" <| sorted = table.sort_columns expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 100", "foo 21", "foo 3"] sorted sorted.columns.first.to_vector . should_equal [10,11,12] @@ -310,16 +308,16 @@ spec setup = expect_column_names ["bar", "foo 001", "foo 1", "Foo 2", "foo 3", "foo 21", "foo 100"] <| table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) expect_column_names ["foo 3", "foo 21", "foo 100", "foo 1", "foo 001", "bar", "Foo 2"] <| table.sort_columns Sort_Direction.Descending - Test.specify "should correctly handle case-insensitive sorting" <| + group_builder.specify "should correctly handle case-insensitive sorting" <| expect_column_names ["bar", "foo 001", "foo 1", "foo 100", "Foo 2", "foo 21", "foo 3"] <| table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive) - Test.specify "should correctly handle natural order sorting" <| + group_builder.specify "should correctly handle natural order sorting" <| expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 3", "foo 21", "foo 100"] <| table.sort_columns text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True) - Test.specify "should correctly handle various combinations of options" <| + group_builder.specify "should correctly handle various combinations of options" <| expect_column_names ["foo 100", "foo 21", "foo 3", "Foo 2", "foo 1", "foo 001", "bar"] <| table.sort_columns Sort_Direction.Descending text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) - Test.group prefix+"Table.rename_columns" <| + suite_builder.group prefix+"Table.rename_columns" group_builder-> table = col1 = ["alpha", [1,2,3]] col2 = ["beta", [4,5,6]] @@ -327,7 +325,7 @@ spec setup = col4 = ["delta", [19,20,21]] table_builder [col1, col2, col3, col4] - Test.specify "should work as shown in the doc examples" <| + group_builder.specify "should work as shown in the doc examples" <| expect_column_names ["FirstColumn", "beta", "gamma", "delta"] <| table.rename_columns ["FirstColumn"] @@ -338,52 +336,52 @@ spec setup = expect_column_names ["alpha", "key:123", "key: foo bar"] <| t1.rename_columns (Map.from_vector [["name=(.*)".to_regex, "key:$1"]]) - Test.specify "should work by index" <| + group_builder.specify "should work by index" <| map = Map.from_vector [[0, "FirstColumn"], [-2, "Another"]] expect_column_names ["FirstColumn", "beta", "Another", "delta"] <| table.rename_columns map - Test.specify "should work by position" <| + group_builder.specify "should work by position" <| vec = ["one", "two", "three"] expect_column_names ["one", "two", "three", "delta"] <| table.rename_columns vec - Test.specify "should work by Vector" <| + group_builder.specify "should work by Vector" <| vec = ["one", "two", "three"] expect_column_names ["one", "two", "three", "delta"] <| table.rename_columns vec - Test.specify "should work by Vector of Pairs" <| + group_builder.specify "should work by Vector of Pairs" <| vec = [["beta", "one"], ["delta", "two"], ["alpha", "three"]] expect_column_names ["three", "one", "gamma", "two"] <| table.rename_columns vec - Test.specify "should work by name" <| + group_builder.specify "should work by name" <| map = Map.from_vector [["alpha", "FirstColumn"], ["delta", "Another"]] expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <| table.rename_columns map - Test.specify "should work by mixed Map" <| + group_builder.specify "should work by mixed Map" <| map = Map.from_vector [["alpha", "FirstColumn"], [-1, "Another"]] expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <| table.rename_columns map - Test.specify "should work by name case-insensitively" <| + group_builder.specify "should work by name case-insensitively" <| map = Map.from_vector [["ALPHA", "FirstColumn"], ["DELTA", "Another"]] expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <| table.rename_columns map Case_Sensitivity.Insensitive - Test.specify "should work by name using regex" <| + group_builder.specify "should work by name using regex" <| map = Map.from_vector [["a.*".to_regex, "FirstColumn"]] expect_column_names ["FirstColumn", "beta", "gamma", "delta"] <| table.rename_columns map - Test.specify "should work by name using regex substitution" <| + group_builder.specify "should work by name using regex substitution" <| map = Map.from_vector [["a(.*)".to_regex, "$1"]] expect_column_names ["lpha", "beta", "gamma", "delta"] <| table.rename_columns map - Test.specify "should correctly handle problems: unmatched names" <| + group_builder.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' map = Map.from_vector [["alpha", "FirstColumn"], ["omicron", "Another"], [weird_name, "Fixed"]] action = table.rename_columns map error_on_missing_columns=False on_problems=_ @@ -396,7 +394,7 @@ spec setup = err = table.rename_columns map err.should_fail_with Missing_Input_Columns - Test.specify "should correctly handle problems: out of bounds indices" <| + group_builder.specify "should correctly handle problems: out of bounds indices" <| map = Map.from_vector [[0, "FirstColumn"], [-1, "Another"], [100, "Boo"], [-200, "Nothing"], [300, "Here"]] action = table.rename_columns map error_on_missing_columns=False on_problems=_ tester = expect_column_names ["FirstColumn", "beta", "gamma", "Another"] @@ -408,7 +406,7 @@ spec setup = err = table.rename_columns map err.should_fail_with Missing_Input_Columns - Test.specify "should correctly handle edge-cases: aliased indices" <| + group_builder.specify "should correctly handle edge-cases: aliased indices" <| map1 = Map.from_vector [[1, "FirstColumn"], [-3, "FirstColumn"]] t1 = table.rename_columns map1 on_problems=Problem_Behavior.Report_Error Problems.assume_no_problems t1 @@ -421,7 +419,7 @@ spec setup = err.column_name . should_equal "beta" err.new_names . should_equal ["FirstColumn", "DifferentName!"] - Test.specify "should correctly handle edge-cases: aliased selectors" <| + group_builder.specify "should correctly handle edge-cases: aliased selectors" <| t = table_builder [["alpha", [1,2,3]], ["bet", [4,5,6]]] map1 = Map.from_vector [["a.*".to_regex, "AA"], [".*a".to_regex, "AA"]] t1 = t.rename_columns map1 on_problems=Problem_Behavior.Report_Error @@ -446,32 +444,32 @@ spec setup = Problems.assume_no_problems t4 expect_column_names ["aaA", "bbb"] t4 - Test.specify "should correctly handle problems: invalid names ''" <| + group_builder.specify "should correctly handle problems: invalid names ''" <| map = Map.from_vector [[1, ""]] [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> r = table.rename_columns map on_problems=pb r.should_fail_with Invalid_Column_Names - Test.specify "should correctly handle problems: invalid names Nothing" <| + group_builder.specify "should correctly handle problems: invalid names Nothing" <| map = ["alpha", Nothing] [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> r = table.rename_columns map on_problems=pb r.should_fail_with Invalid_Column_Names - Test.specify "should correctly handle problems: invalid names null character" <| + group_builder.specify "should correctly handle problems: invalid names null character" <| map = ["alpha", 'a\0b'] [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> r = table.rename_columns map on_problems=pb r.should_fail_with Invalid_Column_Names - Test.specify "should correctly handle problems: duplicate names" <| + group_builder.specify "should correctly handle problems: duplicate names" <| map = ["Test", "Test", "Test", "Test"] action = table.rename_columns map on_problems=_ tester = expect_column_names ["Test 1", "Test 2", "Test 3", "Test"] problems = [Duplicate_Output_Column_Names.Error ["Test", "Test", "Test"]] Problems.test_problem_handling action problems tester - Test.specify "should correctly handle problems: new name is clashing with existing name of existing column" <| + group_builder.specify "should correctly handle problems: new name is clashing with existing name of existing column" <| map = Map.from_vector [["alpha", "beta"]] action = table.rename_columns map on_problems=_ tester = expect_column_names ["beta", "beta 1", "gamma", "delta"] @@ -484,7 +482,7 @@ spec setup = problems2 = [Duplicate_Output_Column_Names.Error ["alpha"]] Problems.test_problem_handling action2 problems2 tester2 - Test.specify "should correctly handle problems: too many input names" <| + group_builder.specify "should correctly handle problems: too many input names" <| map = ["A", "B", "C", "D", "E", "F"] action = table.rename_columns map on_problems=_ tester = expect_column_names ["A", "B", "C", "D"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso index 9c04d14eeb6d..f9b06b7661da 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso @@ -7,24 +7,23 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument from Standard.Table.Data.Aggregate_Column.Aggregate_Column import Group_By, Sum from Standard.Table.Errors import all -from Standard.Test import Test -import Standard.Test.Extensions +from Standard.Test_New import all from project.Util import all from project.Common_Table_Operations.Util import run_default_backend -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder - Test.group prefix+"Table.take/drop" <| + suite_builder.group prefix+"Table.take/drop" group_builder-> table = col1 = ["alpha", [1,2,3,4,5,6,7,8]] col2 = ["beta", ["A","B","C","D","E","F","G","H"]] table_builder [col1, col2] . order_by "alpha" empty = table.remove_all_rows - Test.specify "should allow selecting first or last N rows" <| + group_builder.specify "should allow selecting first or last N rows" <| table.take.at "alpha" . to_vector . should_equal [1] table.take.at "beta" . to_vector . should_equal ["A"] table.drop.at "alpha" . to_vector . should_equal [2,3,4,5,6,7,8] @@ -59,7 +58,7 @@ spec setup = table.drop (Last -1) . should_equal table table.drop (Last 100) . should_equal empty - Test.specify "should handle consecutive take/drops" <| + group_builder.specify "should handle consecutive take/drops" <| table.take 5 . order_by "alpha" . take 3 . at "alpha" . to_vector . should_equal [1, 2, 3] table.take 3 . order_by "alpha" . take 5 . at "alpha" . to_vector . should_equal [1, 2, 3] table.take 5 . order_by "alpha" . drop 3 . at "alpha" . to_vector . should_equal [4, 5] @@ -67,7 +66,7 @@ spec setup = table.drop 2 . order_by "alpha" . drop 3 . at "alpha" . to_vector . should_equal [6, 7, 8] table.drop 3 . order_by "alpha" . take 2 . at "alpha" . to_vector . should_equal [4, 5] - Test.specify "should allow selecting rows by ranges or indices" <| + group_builder.specify "should allow selecting rows by ranges or indices" <| table.take (2.up_to 4) . at "beta" . to_vector . should_equal ["C", "D"] table.take (0.up_to 0) . should_equal empty table.take (100.up_to 100) . should_fail_with Index_Out_Of_Bounds @@ -111,7 +110,7 @@ spec setup = table.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds table.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds - Test.specify "should allow selecting every Nth row" <| + group_builder.specify "should allow selecting every Nth row" <| table.take (Every 1) . should_equal table table.take (Every 3) . at "alpha" . to_vector . should_equal [1, 4, 7] table.take (Every 3 first=1) . at "alpha" . to_vector . should_equal [2, 5, 8] @@ -133,7 +132,7 @@ spec setup = empty.drop (Every 0) . should_fail_with Illegal_Argument if setup.is_database.not then - Test.specify "should allow sampling rows" <| + group_builder.specify "should allow sampling rows" <| one = table_builder [["X", ["a"]]] . order_by "X" two = table_builder [["X", ["a", "a"]]] . order_by "X" three = table_builder [["X", ["a", "a", "a"]]] . order_by "X" @@ -158,15 +157,15 @@ spec setup = rnd.at "alpha" . to_vector . should_equal alpha_sample rnd.at "beta" . to_vector . should_equal beta_sample - Test.specify "sampling should be deterministic when a seed is supplied" <| + group_builder.specify "sampling should be deterministic when a seed is supplied" <| table.take (Sample 3 seed=4200000) . should_equal (table.take (Sample 3 seed=4200000)) - Test.specify "sampling should be non-deterministic when a seed is not supplied" <| + group_builder.specify "sampling should be non-deterministic when a seed is not supplied" <| 0.up_to 3 . map _-> table.take (Sample 3) . should_not_equal (table.take (Sample 3)) if setup.is_database.not then - Test.specify "should allow selecting rows as long as they satisfy a predicate" <| + group_builder.specify "should allow selecting rows as long as they satisfy a predicate" <| t = table_builder [["a", [1, 2, 3, 4]], ["b", [5, 6, 7, 8]]] t2 = t.take (While (row -> row.at "a" < 3)) @@ -174,7 +173,7 @@ spec setup = t2.at "a" . to_vector . should_equal [1, 2] t2.at "b" . to_vector . should_equal [5, 6] - Test.specify "should gracefully handle missing constructor arguments" <| + group_builder.specify "should gracefully handle missing constructor arguments" <| t = table_builder [["X", [1, 2, 3]]] t.take "FOO" . should_fail_with Type_Error t.drop "FOO" . should_fail_with Type_Error @@ -191,7 +190,7 @@ spec setup = r3.should_fail_with Illegal_Argument r3.catch.to_display_text . should_contain "Got a Function instead of a range, is a constructor argument missing?" - Test.specify "unordered table" <| + group_builder.specify "unordered table" <| unordered_table = col1 = ["alpha", [1,2,3,4,5,6,7,8]] col2 = ["beta", ["A","B","C","D","E","F","G","H"]] @@ -200,7 +199,7 @@ spec setup = True -> unordered_table.take . should_fail_with Illegal_Argument False -> unordered_table.take . at "alpha" . to_vector . should_equal [1] - Test.specify "Should work correctly after aggregation" <| + group_builder.specify "Should work correctly after aggregation" <| t0 = table_builder [["X", ["a", "b", "a", "c"]], ["Y", [1, 2, 4, 8]]] t1 = t0.aggregate [Group_By "X", Sum "Y"] @@ -212,7 +211,7 @@ spec setup = t3.at "X" . to_vector . should_equal ['b', 'c'] t3.at "Sum Y" . to_vector . should_equal [2.0, 8.0] - Test.group prefix+"Column.take/drop" <| + suite_builder.group prefix+"Column.take/drop" group_builder-> table = col1 = ["alpha", [1,2,3,4,5,6,7,8]] col2 = ["beta", ["A","B","C","D","E","F","G","H"]] @@ -224,7 +223,7 @@ spec setup = empty_alpha = empty_table.at "alpha" empty_beta = empty_table.at "beta" - Test.specify "should allow selecting first or last N rows" <| + group_builder.specify "should allow selecting first or last N rows" <| alpha.take.to_vector . should_equal [1] beta.take.to_vector . should_equal ["A"] alpha.drop.to_vector . should_equal [2,3,4,5,6,7,8] @@ -259,7 +258,7 @@ spec setup = alpha.drop (Last -1) . should_equal alpha alpha.drop (Last 100) . should_equal empty_alpha - Test.specify "should handle consecutive take/drops" <| + group_builder.specify "should handle consecutive take/drops" <| alpha.take 5 . sort . take 3 . to_vector . should_equal [1, 2, 3] alpha.take 3 . sort . take 5 . to_vector . should_equal [1, 2, 3] alpha.take 5 . sort . drop 3 . to_vector . should_equal [4, 5] @@ -267,7 +266,7 @@ spec setup = alpha.drop 2 . sort . drop 3 . to_vector . should_equal [6, 7, 8] alpha.drop 3 . sort . take 2 . to_vector . should_equal [4, 5] - Test.specify "should allow selecting rows by ranges or indices" <| + group_builder.specify "should allow selecting rows by ranges or indices" <| beta.take (2.up_to 4) . to_vector . should_equal ["C", "D"] beta.take (0.up_to 0) . should_equal empty_beta beta.take (100.up_to 100) . should_fail_with Index_Out_Of_Bounds @@ -311,7 +310,7 @@ spec setup = alpha.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds alpha.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds - Test.specify "should allow selecting every Nth row" <| + group_builder.specify "should allow selecting every Nth row" <| alpha.take (Every 1) . should_equal alpha alpha.take (Every 3) . to_vector . should_equal [1, 4, 7] alpha.take (Every 3 first=1) . to_vector . should_equal [2, 5, 8] @@ -333,7 +332,7 @@ spec setup = empty_beta.drop (Every 0) . should_fail_with Illegal_Argument if setup.is_database.not then - Test.specify "should allow sampling rows" <| + group_builder.specify "should allow sampling rows" <| three = table_builder [["X", ["a", "a", "a"]]] . at "X" two = table_builder [["X", ["a", "a"]]] . at "X" one_table = table_builder [["X", ["a"]]] @@ -367,7 +366,7 @@ spec setup = rnd.should_equal sample if setup.is_database.not then - Test.specify "should allow selecting rows as long as they satisfy a predicate" <| + group_builder.specify "should allow selecting rows as long as they satisfy a predicate" <| col = table_builder [["X", [1, 3, 5, 6, 8, 9, 10, 11, 13]]] . at "X" col.take (While (x-> x%2 == 1)) . to_vector . should_equal [1, 3, 5] col.drop (While (x-> x%2 == 1)) . to_vector . should_equal [6, 8, 9, 10, 11, 13] @@ -381,7 +380,7 @@ spec setup = three.drop (While (_ > 10)) . should_equal three three.drop (While (_ < 10)) . should_equal empty - Test.specify "should gracefully handle missing constructor arguments" <| + group_builder.specify "should gracefully handle missing constructor arguments" <| c = table_builder [["X", [1, 2, 3]]] . at "X" c.take "FOO" . should_fail_with Type_Error c.drop "FOO" . should_fail_with Type_Error @@ -398,7 +397,7 @@ spec setup = r3.should_fail_with Illegal_Argument r3.catch.to_display_text . should_contain "Got a Function instead of a range, is a constructor argument missing?" - Test.specify "unordered table" <| + group_builder.specify "unordered table" <| unordered_table = col1 = ["alpha", [1,2,3,4,5,6,7,8]] col2 = ["beta", ["A","B","C","D","E","F","G","H"]] @@ -407,4 +406,3 @@ spec setup = True -> unordered_table.at "alpha" . take . should_fail_with Illegal_Argument False -> unordered_table.at "alpha" . take . to_vector . should_equal [1] -main = run_default_backend spec diff --git a/test/Table_Tests/src/Common_Table_Operations/Temp_Column_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Temp_Column_Spec.enso index e868c863d6d0..57823e5aa596 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Temp_Column_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Temp_Column_Spec.enso @@ -1,21 +1,20 @@ from Standard.Base import all -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Common_Table_Operations.Util import run_default_backend -main = run_default_backend spec -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder - Test.group prefix+"Temp column" <| - Test.specify "Can generate a temp column" <| + suite_builder.group prefix+"Temp column" group_builder-> + group_builder.specify "Can generate a temp column" <| t1 = table_builder [["X", ['a', 'b']], ["Y", ['c', 'd']], ["Z", [40, 20]]] t1.make_temp_column_name . should_equal "temp" - Test.specify "Can generate a temp column without name conflicts" <| + group_builder.specify "Can generate a temp column without name conflicts" <| t1 = table_builder [["X", ['a', 'b']], ["Y", ['c', 'd']], ["temp", [40, 20]]] t1.make_temp_column_name . should_equal "temp 1" diff --git a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso index cfcab8498c5f..494d941b3e04 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso @@ -2,19 +2,18 @@ from Standard.Base import all from Standard.Table.Errors import all -from Standard.Test import Test, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Common_Table_Operations.Util import run_default_backend -main = run_default_backend spec -spec setup = +add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder db_todo = if setup.is_database.not then Nothing else "Table.transpose is not implemented yet in Database." - Test.group prefix+"Table.transpose" pending=db_todo <| - Test.specify "should transpose all columns by default" <| + suite_builder.group prefix+"Table.transpose" pending=db_todo group_builder-> + group_builder.specify "should transpose all columns by default" <| t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]] t1 = t.transpose t1.row_count . should_equal 12 @@ -22,7 +21,7 @@ spec setup = t1.at "Name" . to_vector . should_equal ["Key", "Value", "Another", "Yet Another", "Key", "Value", "Another", "Yet Another", "Key", "Value", "Another", "Yet Another"] t1.at "Value" . to_vector . should_equal ["x", 1, 10, Nothing, "y", 2, Nothing, "Hello", "z", 3, 20, "World"] - Test.specify "should allow custom names" <| + group_builder.specify "should allow custom names" <| t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]] t1 = t.transpose attribute_column_name="Key" value_column_name="Object" t1.row_count . should_equal 12 @@ -30,7 +29,7 @@ spec setup = t1.at "Key" . to_vector . should_equal ["Key", "Value", "Another", "Yet Another", "Key", "Value", "Another", "Yet Another", "Key", "Value", "Another", "Yet Another"] t1.at "Object" . to_vector . should_equal ["x", 1, 10, Nothing, "y", 2, Nothing, "Hello", "z", 3, 20, "World"] - Test.specify "should allow id fields" <| + group_builder.specify "should allow id fields" <| t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]] t1 = t.transpose ["Key"] t1.row_count . should_equal 9 @@ -39,7 +38,7 @@ spec setup = t1.at "Name" . to_vector . should_equal ["Value", "Another", "Yet Another", "Value", "Another", "Yet Another", "Value", "Another", "Yet Another"] t1.at "Value" . to_vector . should_equal [1, 10, Nothing, 2, Nothing, "Hello", 3, 20, "World"] - Test.specify "should allow single id field" <| + group_builder.specify "should allow single id field" <| t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]] t1 = t.transpose "Key" t1.row_count . should_equal 9 @@ -48,7 +47,7 @@ spec setup = t1.at "Name" . to_vector . should_equal ["Value", "Another", "Yet Another", "Value", "Another", "Yet Another", "Value", "Another", "Yet Another"] t1.at "Value" . to_vector . should_equal [1, 10, Nothing, 2, Nothing, "Hello", 3, 20, "World"] - Test.specify "should allow fields selected by index" <| + group_builder.specify "should allow fields selected by index" <| t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]] t1 = t.transpose [0, -1] t1.column_names . should_equal ["Key", "Yet Another", "Name", "Value"] @@ -58,7 +57,7 @@ spec setup = t1.at "Name" . to_vector . should_equal ["Value", "Another", "Value", "Another", "Value", "Another"] t1.at "Value" . to_vector . should_equal [1, 10, 2, Nothing, 3, 20] - Test.specify "should allow all current columns to become id fields, without warning" <| + group_builder.specify "should allow all current columns to become id fields, without warning" <| t = table_builder [["Key", ["x", "y", "z"]], ["Foo", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]] t1 = t.transpose t.column_names t1.row_count . should_equal 3 @@ -70,7 +69,7 @@ spec setup = Problems.assume_no_problems t1 # ToDo: Verify the warnings and error handling within transpose. - Test.specify "should handle missing columns" <| + group_builder.specify "should handle missing columns" <| t1 = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]]] err1 = t1.transpose ["Key", "Missing", "Missing 2"] @@ -93,7 +92,7 @@ spec setup = problems2 = [Missing_Input_Columns.Error [42, -100]] Problems.test_problem_handling action2 problems2 tester2 - Test.specify "should warn on column name clashes" <| + group_builder.specify "should warn on column name clashes" <| t1 = table_builder [["X", ["x", "y", "z"]], ["Y", [1, 2, 3]], ["Z", [10, Nothing, 20]]] action1 = t1.transpose ["X", "Y", "Z"] attribute_column_name="Y" value_column_name="Z" on_problems=_ diff --git a/test/Table_Tests/src/Database/Common/Common_Spec.enso b/test/Table_Tests/src/Database/Common/Common_Spec.enso index 03cdfa4c3c98..a8248b61c428 100644 --- a/test/Table_Tests/src/Database/Common/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Common_Spec.enso @@ -9,7 +9,6 @@ from Standard.Database import all from Standard.Database.Errors import all from Standard.Test_New import all -import Standard.Test_New.Problems import Standard.Test_New.Suite.Suite_Builder import project.Database.Common.Default_Ordering_Spec @@ -17,7 +16,6 @@ import project.Database.Common.Names_Length_Limits_Spec import project.Util import project.Database.Helpers.Name_Generator -import project.Database.Common.Default_Ordering_Spec_New upload connection prefix data temporary=True = @@ -157,13 +155,13 @@ type Missing_Values_Data drop_table self.connection self.t4.name -## Docs +## Arguments: - create_connection_fn: A function that creates an appropriate Connection to the database backend. -add_common_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : (Nothing -> Any)) = +add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : (Nothing -> Any)) = - Default_Ordering_Spec.add_default_ordering_specs suite_builder prefix create_connection_fn + Default_Ordering_Spec.add_specs suite_builder prefix create_connection_fn # TODO: # Names_Length_Limits_Spec.add_specs suite_builder prefix create_connection_fn diff --git a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso index f5c972caed61..2b0bc5e2d95f 100644 --- a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso @@ -35,7 +35,7 @@ type Data Arguments: - suite_builder: A Suite_Builder in which a new group will be created - create_connection_fn: A function that creates an appropriate Connection to the database backend. -add_default_ordering_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : Any) = +add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : Any) = group_name = prefix + "Table.default_ordering" suite_builder.group group_name group_builder-> diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 6a898c6b4600..4a8030aa0d34 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -1,8 +1,10 @@ from Standard.Base import all import Standard.Base.Runtime.Ref.Ref +from Standard.Base.Runtime import assert import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + import Standard.Table.Data.Type.Value_Type.Bits from Standard.Table import Table, Value_Type from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names @@ -12,8 +14,7 @@ import Standard.Database.Internal.Replace_Params.Replace_Params from Standard.Database import all from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all import project.Database.Common.Common_Spec import project.Database.Transaction_Spec @@ -22,25 +23,43 @@ import project.Database.Types.SQLite_Type_Mapping_Spec import project.Database.Helpers.Name_Generator import project.Common_Table_Operations -sqlite_specific_spec prefix connection setup = - table_builder = setup.table_builder +type Data + Value ~connection + + setup create_connection_func = + connection = create_connection_func Nothing + Data.Value connection + + +type Metadata_Data + Value ~data + + connection self = self.data.at 0 + tinfo self = self.data.at 1 + t self = self.data.at 2 - Test.group prefix+"Schemas and Databases" <| - Test.specify "should be able to get current database and list databases" <| - connection.database . should_equal Nothing - connection.databases . should_equal [Nothing] - Meta.is_same_object connection (connection.set_database Nothing) . should_be_true + setup create_connection_func = + connection = create_connection_func Nothing + tinfo = Name_Generator.random_name "Tinfo" + connection.execute_update 'CREATE TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' + t = connection.query (SQL_Query.Table_Name tinfo) + row1 = ["a", Nothing, False, 1.2] + row2 = ["abc", Nothing, Nothing, 1.3] + row3 = ["def", 42, True, 1.4] + Panic.rethrow <| + t.update_rows (Table.from_rows ["strs", "ints", "bools", "reals"] [row1, row2, row3]) update_action=Update_Action.Insert + Data.Value [connection, tinfo, t] - Test.specify "should be able to get current schema and list schemas" <| - connection.schema . should_equal Nothing - connection.schemas . should_equal [Nothing] - Meta.is_same_object connection (connection.set_schema Nothing) . should_be_true +type Tables_And_Table_Types_Data + Value ~data - Test.specify "does not allow changing schema or database" <| - connection.set_schema "foo" . should_fail_with SQL_Error - connection.set_database "foo" . should_fail_with SQL_Error + connection self = self.data.at 0 + tinfo self = self.data.at 1 + vinfo self = self.data.at 2 + temporary_table self = self.data.at 3 - Test.group prefix+"Tables and Table Types" <| + setup create_connection_func = Tables_And_Table_Types_Data.Value <| + connection = create_connection_func Nothing tinfo = Name_Generator.random_name "TestTable" connection.execute_update 'CREATE TABLE "'+tinfo+'" ("A" VARCHAR)' @@ -50,113 +69,137 @@ sqlite_specific_spec prefix connection setup = temporary_table = Name_Generator.random_name "TemporaryTable" (Table.new [["X", [1, 2, 3]]]).select_into_database_table connection temporary_table temporary=True - Test.specify "should be able to list table types" <| - table_types = connection.table_types + [connection, tinfo, vinfo, temporary_table] + + + +sqlite_specific_spec suite_builder prefix create_connection_func setup = + table_builder = setup.table_builder + + suite_builder.group prefix+"Schemas and Databases" group_builder-> + data = Data.setup create_connection_func + + group_builder.specify "should be able to get current database and list databases" <| + data.connection.database . should_equal Nothing + data.connection.databases . should_equal [Nothing] + Meta.is_same_object data.connection (data.connection.set_database Nothing) . should_be_true + + group_builder.specify "should be able to get current schema and list schemas" <| + data.connection.schema . should_equal Nothing + data.connection.schemas . should_equal [Nothing] + Meta.is_same_object data.connection (data.connection.set_schema Nothing) . should_be_true + + group_builder.specify "does not allow changing schema or database" <| + data.connection.set_schema "foo" . should_fail_with SQL_Error + data.connection.set_database "foo" . should_fail_with SQL_Error + + suite_builder.group prefix+"Tables and Table Types" group_builder-> + data = Tables_And_Table_Types_Data.setup create_connection_func + + group_builder.specify "should be able to list table types" <| + table_types = data.connection.table_types table_types.length . should_not_equal 0 table_types.contains "TABLE" . should_be_true table_types.contains "VIEW" . should_be_true - Test.specify "should be able to list tables" <| - tables = connection.tables + group_builder.specify "should be able to list tables" <| + tables = data.connection.tables tables.row_count . should_not_equal 0 tables.columns.map .name . should_equal ["Database", "Schema", "Name", "Type", "Description"] table_names = tables.at "Name" . to_vector - table_names.should_contain tinfo - table_names.should_contain vinfo - table_names.should_contain temporary_table + table_names.should_contain data.tinfo + table_names.should_contain data.vinfo + table_names.should_contain data.temporary_table - Test.specify "should be able to filter tables by name" <| - tables = connection.tables tinfo + group_builder.specify "should be able to filter tables by name" <| + tables = data.connection.tables data.tinfo tables.row_count . should_equal 1 tables.at "Database" . to_vector . at 0 . should_equal Nothing tables.at "Schema" . to_vector . at 0 . should_equal Nothing - tables.at "Name" . to_vector . at 0 . should_equal tinfo + tables.at "Name" . to_vector . at 0 . should_equal data.tinfo tables.at "Type" . to_vector . at 0 . should_equal "TABLE" - connection.tables "TestT_ble%" . row_count . should_equal 1 - connection.tables "Temporary%ble%" . row_count . should_equal 1 - connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["GLOBAL TEMPORARY"] - connection.tables "N_nexistent%" . row_count . should_equal 0 + data.connection.tables "TestT_ble%" . row_count . should_equal 1 + data.connection.tables "Temporary%ble%" . row_count . should_equal 1 + data.connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["GLOBAL TEMPORARY"] + data.connection.tables "N_nexistent%" . row_count . should_equal 0 - Test.specify "should be able to filter tables by type" <| - tables = connection.tables types=["VIEW"] + group_builder.specify "should be able to filter tables by type" <| + tables = data.connection.tables types=["VIEW"] tables.row_count . should_not_equal 0 - tables.at "Name" . to_vector . contains tinfo . should_be_false - tables.at "Name" . to_vector . contains vinfo . should_be_true + tables.at "Name" . to_vector . contains data.tinfo . should_be_false + tables.at "Name" . to_vector . contains data.vinfo . should_be_true - Test.group prefix+"Error Handling" <| - Test.specify "should wrap errors" <| - connection.read (SQL_Query.Raw_SQL "foobar") . should_fail_with SQL_Error - connection.execute_update "foobar" . should_fail_with SQL_Error + suite_builder.group prefix+"Error Handling" group_builder-> + data = Data.setup create_connection_func - action = connection.read (SQL_Query.Raw_SQL "SELECT A FROM undefined_table") + group_builder.specify "should wrap errors" <| + data.connection.read (SQL_Query.Raw_SQL "foobar") . should_fail_with SQL_Error + data.connection.execute_update "foobar" . should_fail_with SQL_Error + + action = data.connection.read (SQL_Query.Raw_SQL "SELECT A FROM undefined_table") action . should_fail_with SQL_Error action.catch.to_text . should_equal "There was an SQL error: [SQLITE_ERROR] SQL error or missing database (no such table: undefined_table). [Query was: SELECT A FROM undefined_table]" - Test.specify "is capable of handling weird tables" <| + group_builder.specify "is capable of handling weird tables" <| Problems.assume_no_problems <| - connection.execute_update 'CREATE TEMPORARY TABLE "empty-column-name" ("" VARCHAR)' - t1 = connection.query (SQL_Query.Table_Name "empty-column-name") + data.connection.execute_update 'CREATE TEMPORARY TABLE "empty-column-name" ("" VARCHAR)' + t1 = data.connection.query (SQL_Query.Table_Name "empty-column-name") Problems.expect_only_warning Invalid_Column_Names t1 t1.column_names . should_equal ["Column 1"] m1 = t1.read m1.at "Column 1" . to_vector . should_equal [] Problems.assume_no_problems <| - connection.execute_update 'CREATE TEMPORARY TABLE "clashing-unicode-names" ("ś" VARCHAR, "s\u0301" INTEGER)' + data.connection.execute_update 'CREATE TEMPORARY TABLE "clashing-unicode-names" ("ś" VARCHAR, "s\u0301" INTEGER)' Problems.assume_no_problems <| - connection.execute_update 'INSERT INTO "clashing-unicode-names" VALUES (\'A\', 2)' - t2 = connection.query (SQL_Query.Table_Name "clashing-unicode-names") + data.connection.execute_update 'INSERT INTO "clashing-unicode-names" VALUES (\'A\', 2)' + t2 = data.connection.query (SQL_Query.Table_Name "clashing-unicode-names") Problems.expect_only_warning Duplicate_Output_Column_Names t2 t2.column_names . should_equal ["ś", "ś 1"] m2 = t2.read m2.at "ś" . to_vector . should_equal ["A"] m2.at "ś 1" . to_vector . should_equal [2] - r3 = connection.query 'SELECT 1 AS "A", 2 AS "A"' + r3 = data.connection.query 'SELECT 1 AS "A", 2 AS "A"' r3.should_fail_with Illegal_Argument r3.catch.cause . should_be_a Duplicate_Output_Column_Names - r4 = connection.query 'SELECT 1 AS ""' + r4 = data.connection.query 'SELECT 1 AS ""' r4.should_fail_with Illegal_Argument r4.catch.cause . should_be_a Invalid_Column_Names - tinfo = Name_Generator.random_name "Tinfo" - connection.execute_update 'CREATE TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' - Test.group prefix+"Metadata" <| - t = connection.query (SQL_Query.Table_Name tinfo) - row1 = ["a", Nothing, False, 1.2] - row2 = ["abc", Nothing, Nothing, 1.3] - row3 = ["def", 42, True, 1.4] - Panic.rethrow <| - t.update_rows (Table.from_rows ["strs", "ints", "bools", "reals"] [row1, row2, row3]) update_action=Update_Action.Insert + suite_builder.group prefix+"Metadata" group_builder-> + data = Metadata_Data.setup create_connection_func - Test.specify "should return Table information" <| - i = t.info + group_builder.specify "should return Table information" <| + i = data.t.info i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "reals"] i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3] i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Boolean, Value_Type.Float] - Test.specify "should infer standard types correctly" <| - t.at "strs" . value_type . is_text . should_be_true - t.at "ints" . value_type . is_integer . should_be_true - t.at "bools" . value_type . is_boolean . should_be_true - t.at "reals" . value_type . is_floating_point . should_be_true - - t.at "ints" . value_type . is_text . should_be_false - t.at "strs" . value_type . is_integer . should_be_false - t.at "reals" . value_type . is_boolean . should_be_false - t.at "bools" . value_type . is_floating_point . should_be_false - - Test.group prefix+"Dialect-specific codegen" <| - Test.specify "should generate queries for the Distinct operation" <| - t = connection.query (SQL_Query.Table_Name tinfo) + group_builder.specify "should infer standard types correctly" <| + data.t.at "strs" . value_type . is_text . should_be_true + data.t.at "ints" . value_type . is_integer . should_be_true + data.t.at "bools" . value_type . is_boolean . should_be_true + data.t.at "reals" . value_type . is_floating_point . should_be_true + + data.t.at "ints" . value_type . is_text . should_be_false + data.t.at "strs" . value_type . is_integer . should_be_false + data.t.at "reals" . value_type . is_boolean . should_be_false + data.t.at "bools" . value_type . is_floating_point . should_be_false + + suite_builder.group prefix+"Dialect-specific codegen" group_builder-> + data = Metadata_Data.setup create_connection_func + + group_builder.specify "should generate queries for the Distinct operation" <| + t = data.connection.query (SQL_Query.Table_Name data.tinfo) code_template = 'SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."reals" AS "reals" FROM (SELECT "{Tinfo}_inner"."strs" AS "strs", "{Tinfo}_inner"."ints" AS "ints", "{Tinfo}_inner"."bools" AS "bools", "{Tinfo}_inner"."reals" AS "reals" FROM (SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."reals" AS "reals" FROM "{Tinfo}" AS "{Tinfo}") AS "{Tinfo}_inner" GROUP BY "{Tinfo}_inner"."strs") AS "{Tinfo}"' - expected_code = code_template.replace "{Tinfo}" tinfo + expected_code = code_template.replace "{Tinfo}" data.tinfo t.distinct ["strs"] . to_sql . prepare . should_equal [expected_code, []] - Test.group prefix+"math functions" <| - Test.specify "round, trunc, ceil, floor" <| + suite_builder.group prefix+"math functions" group_builder-> + group_builder.specify "round, trunc, ceil, floor" <| col = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] . at "x" col . cast Value_Type.Float . round . value_type . should_equal Value_Type.Float @@ -189,20 +232,20 @@ sqlite_specific_spec prefix connection setup = result.to_vector.at 0 do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers) - Test.specify "Can round correctly near the precision limit" <| + group_builder.specify "Can round correctly near the precision limit" <| # This value varies depending on the version of SQLite. do_round 1.2222222222222225 15 . should_equal 1.222222222222223 0.000000000000002 do_round -1.2222222222222225 15 . should_equal -1.222222222222223 0.000000000000002 do_round 1.2222222222222235 15 . should_equal 1.222222222222223 do_round -1.2222222222222235 15 . should_equal -1.222222222222223 - Test.specify "Can round correctly near the precision limit, using banker's rounding" <| + group_builder.specify "Can round correctly near the precision limit, using banker's rounding" <| do_round 1.2222222222222225 15 use_bankers=True . should_equal 1.222222222222222 do_round -1.2222222222222225 15 use_bankers=True . should_equal -1.222222222222222 do_round 1.2222222222222235 15 use_bankers=True . should_equal 1.222222222222224 do_round -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224 - Test.specify "Can handle NaN/Infinity" <| + group_builder.specify "Can handle NaN/Infinity" <| nan_result = if setup.test_selection.is_nan_and_nothing_distinct then Number.nan else Nothing ops = [.round, .truncate, .ceil, .floor] ops.each op-> @@ -210,26 +253,27 @@ sqlite_specific_spec prefix connection setup = do_op Number.positive_infinity op . should_equal Number.positive_infinity do_op Number.negative_infinity op . should_equal Number.negative_infinity - Test.specify "round returns the correct type" <| + group_builder.specify "round returns the correct type" <| do_round 231.2 1 . should_be_a Float do_round 231.2 0 . should_be_a Float do_round 231.2 . should_be_a Float do_round 231.2 -1 . should_be_a Float - Test.specify "round returns the correct type" <| + group_builder.specify "round returns the correct type" <| do_round 231 1 . should_be_a Float do_round 231 0 . should_be_a Float do_round 231 . should_be_a Float do_round 231 -1 . should_be_a Float - Test.group prefix+"Column.const" <| - Test.specify "Does not support making a constant column from a Date" <| + suite_builder.group prefix+"Column.const" group_builder-> + group_builder.specify "Does not support making a constant column from a Date" <| t = table_builder [["x", ["1", "2", "3"]]] t.at "x" . const (Date.new 12 4 12) . should_fail_with Unsupported_Database_Operation -sqlite_spec connection prefix = +sqlite_spec suite_builder prefix create_connection_func = name_counter = Ref.new 0 table_builder columns = + connection = create_connection_func Nothing ix = name_counter.get name_counter . put ix+1 name = Name_Generator.random_name "table_"+ix.to_text @@ -238,7 +282,7 @@ sqlite_spec connection prefix = in_mem_table.select_into_database_table connection name primary_key=Nothing materialize = .read - Common_Spec.spec prefix connection + Common_Spec.add_specs suite_builder prefix create_connection_func common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=False order_by=True natural_ordering=False case_insensitive_ordering=True case_insensitive_ascii_only=True is_nan_and_nothing_distinct=False date_time=False supported_replace_params=supported_replace_params different_size_integer_types=False length_restricted_text_columns=False @@ -252,14 +296,19 @@ sqlite_spec connection prefix = the missing statistics. aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config advanced_stats=False text_shortest_longest=False first_last=False first_last_row_order=False multi_distinct=False aggregation_problems=False nan=False date_support=False agg_in_memory_table = (enso_project.data / "data.csv") . read - agg_table = agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True - empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True - setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection connection=connection - sqlite_specific_spec prefix connection setup - Common_Table_Operations.Main.spec setup + agg_table_fn = _ -> + connection = create_connection_func Nothing + agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True + + empty_agg_table_fn = _ -> + connection = create_connection_func Nothing + (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True + + setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func + sqlite_specific_spec suite_builder prefix create_connection_func setup + Common_Table_Operations.Main.add_specs suite_builder setup - connection.close ## PRIVATE supported_replace_params : Set Replace_Params @@ -267,38 +316,68 @@ supported_replace_params = e = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Sensitive False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive True] Set.from_vector e -spec = - enso_project.data.create_directory - file = enso_project.data / "transient" / "sqlite_test.db" - file.delete_if_exists +backing_file = + transient_dir = enso_project.data / "transient" + assert transient_dir.exists ("There should be .gitignore file in the " + transient_dir.path + " directory") + transient_dir / "sqlite_test.db" + + +create_inmem_connection = + Database.connect (SQLite In_Memory) + + +create_file_connection file = + connection = Database.connect (SQLite file) + connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' + connection + + +type File_Connection + Value ~file + + setup = File_Connection.Value <| + IO.println <| " SQLite_Spec_New.File_Connection.setup" + tmp_file = backing_file + con = create_file_connection backing_file + con.close + assert tmp_file.exists + tmp_file + + + teardown self = + IO.println <| " SQLite_Spec_New.File_Connection.teardown" + assert self.file.exists + self.file.delete + + +suite = Test.build suite_builder-> in_file_prefix = "[SQLite File] " - sqlite_spec (Database.connect (SQLite file)) in_file_prefix - Transaction_Spec.spec (Database.connect (SQLite file)) in_file_prefix - Upload_Spec.spec (_ -> Database.connect (SQLite file)) in_file_prefix - file.delete + + sqlite_spec suite_builder in_file_prefix (_ -> create_file_connection backing_file) + Transaction_Spec.add_specs suite_builder (_ -> create_file_connection backing_file) in_file_prefix + Upload_Spec.add_specs suite_builder (_ -> create_file_connection backing_file) in_file_prefix in_memory_prefix = "[SQLite In-Memory] " - sqlite_spec (Database.connect (SQLite In_Memory)) in_memory_prefix - Transaction_Spec.spec (Database.connect (SQLite In_Memory)) in_memory_prefix - Upload_Spec.spec (_ -> Database.connect (SQLite In_Memory)) in_memory_prefix persistent_connector=False + sqlite_spec suite_builder in_memory_prefix (_ -> create_inmem_connection) + Transaction_Spec.add_specs suite_builder (_ -> create_inmem_connection) in_memory_prefix + Upload_Spec.add_specs suite_builder (_ -> create_inmem_connection) in_memory_prefix persistent_connector=False - SQLite_Type_Mapping_Spec.spec + SQLite_Type_Mapping_Spec.add_specs suite_builder - Test.group "SQLite_Format should allow connecting to SQLite files" <| - file.delete_if_exists + suite_builder.group "SQLite_Format should allow connecting to SQLite files" group_builder-> + data = File_Connection.setup - connection = Database.connect (SQLite file) - connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' - connection.close + group_builder.teardown <| + data.teardown - Test.specify "should recognise a SQLite database file" <| - Auto_Detect.get_reading_format file . should_be_a SQLite_Format + group_builder.specify "should recognise a SQLite database file" <| + Auto_Detect.get_reading_format data.file . should_be_a SQLite_Format - Test.specify "should recognise a sqlite file by extension for writing" <| + group_builder.specify "should recognise a sqlite file by extension for writing" <| Auto_Detect.get_writing_format (enso_project.data / "nonexistent-data.db") . should_be_a SQLite_Format Auto_Detect.get_writing_format (enso_project.data / "nonexistent-data.sqlite") . should_be_a SQLite_Format - Test.specify "should not recognise nonexistent or empty files for reading" <| + group_builder.specify "should not recognise nonexistent or empty files for reading" <| r1 = Data.read (enso_project.data / "nonexistent-data.db") r1.should_fail_with File_Error r1.catch . should_be_a File_Error.Not_Found @@ -317,19 +396,20 @@ spec = r3.catch . should_be_a File_Error.Unsupported_Type broken.delete_if_exists - Test.specify "should connect to a db file" <| - connection = Data.read file + group_builder.specify "should connect to a db file" <| + connection = Data.read data.file tables = connection.tables tables.row_count . should_not_equal 0 connection.close - file.delete_if_exists - - Test.specify 'should not duplicate warnings' <| + group_builder.specify 'should not duplicate warnings' <| c = Database.connect (SQLite In_Memory) t0 = Table.new [["X", ["a", "bc", "def"]]] t1 = t0.select_into_database_table c "Tabela" t2 = t1.cast "X" (Value_Type.Char size=1) Warning.get_all t2 . length . should_equal 1 -main = Test_Suite.run_main spec +main = + IO.println <| "=========" + suite.print_all + IO.println <| "=========" diff --git a/test/Table_Tests/src/Database/Transaction_Spec.enso b/test/Table_Tests/src/Database/Transaction_Spec.enso index fae8527d5042..c9bbec2fd70f 100644 --- a/test/Table_Tests/src/Database/Transaction_Spec.enso +++ b/test/Table_Tests/src/Database/Transaction_Spec.enso @@ -7,65 +7,74 @@ from Standard.Database.Errors import all import Standard.Database.Internal.IR.Query.Query import Standard.Database.Internal.IR.SQL_Expression.SQL_Expression -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all import project.Database.Helpers.Name_Generator type My_Error Error -main = Test_Suite.run_main <| - spec (Database.connect (SQLite In_Memory)) "[SQLite] " +type Data + Value ~data -spec connection prefix = - Test.group prefix+"Transaction Support" <| + connection self = self.data.at 0 + simple_table_structure self = self.data.at 1 + + setup create_connection_func = Data.Value <| + connection = create_connection_func Nothing simple_table_structure = [Column_Description.Value "X" Value_Type.Integer] + [connection, simple_table_structure] + +add_specs suite_builder create_connection_func prefix = + suite_builder.group prefix+"Transaction Support" group_builder-> + data = Data.setup create_connection_func + insert_value table_name x = pairs = [["X", SQL_Expression.Constant x]] - sql = connection.dialect.generate_sql <| Query.Insert table_name pairs - connection.execute_update sql . should_succeed - Test.specify "should commit the changes after the transaction returns a regular value" <| + sql = data.connection.dialect.generate_sql <| Query.Insert table_name pairs + data.connection.execute_update sql . should_succeed + + group_builder.specify "should commit the changes after the transaction returns a regular value" <| table_name = Name_Generator.random_name "transaction-test-1" - t1 = connection.create_table table_name=table_name structure=simple_table_structure temporary=True + t1 = data.connection.create_table table_name=table_name structure=data.simple_table_structure temporary=True t1.should_succeed - r1 = connection.jdbc_connection.run_within_transaction <| + r1 = data.connection.jdbc_connection.run_within_transaction <| insert_value table_name 1 42 r1.should_equal 42 t1.at "X" . to_vector . should_equal [1] - connection.drop_table table_name + data.connection.drop_table table_name - Test.specify "should rollback the changes after the inner action panics" <| + group_builder.specify "should rollback the changes after the inner action panics" <| table_name = Name_Generator.random_name "transaction-test-2" - t1 = connection.create_table table_name=table_name structure=simple_table_structure temporary=True + t1 = data.connection.create_table table_name=table_name structure=data.simple_table_structure temporary=True t1.should_succeed Test.expect_panic_with matcher=My_Error <| - connection.jdbc_connection.run_within_transaction <| + data.connection.jdbc_connection.run_within_transaction <| insert_value table_name 1 Panic.throw My_Error.Error t1.at "X" . to_vector . should_equal [] - connection.drop_table table_name + data.connection.drop_table table_name - Test.specify "should rollback the changes if the inner action returns a dataflow error" <| + group_builder.specify "should rollback the changes if the inner action returns a dataflow error" <| table_name = Name_Generator.random_name "transaction-test-3" - t1 = connection.create_table table_name=table_name structure=simple_table_structure temporary=True + t1 = data.connection.create_table table_name=table_name structure=data.simple_table_structure temporary=True t1.should_succeed - r1 = connection.jdbc_connection.run_within_transaction <| + r1 = data.connection.jdbc_connection.run_within_transaction <| insert_value table_name 1 Error.throw My_Error.Error r1.should_fail_with My_Error t1.at "X" . to_vector . should_equal [] - connection.drop_table table_name + data.connection.drop_table table_name - Test.specify "should commit the changes even if the inner action return value has warnings attached" <| + group_builder.specify "should commit the changes even if the inner action return value has warnings attached" <| table_name = Name_Generator.random_name "transaction-test-4" - t1 = connection.create_table table_name=table_name structure=simple_table_structure temporary=True + t1 = data.connection.create_table table_name=table_name structure=data.simple_table_structure temporary=True t1.should_succeed - r1 = connection.jdbc_connection.run_within_transaction <| + r1 = data.connection.jdbc_connection.run_within_transaction <| insert_value table_name 1 result = 43 with_warnings = Warning.attach My_Error.Error result @@ -74,4 +83,4 @@ spec connection prefix = Problems.expect_only_warning My_Error r1 t1.at "X" . to_vector . should_equal [1] - connection.drop_table table_name + data.connection.drop_table table_name diff --git a/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso b/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso index e5dd68550a19..e07e48e0539a 100644 --- a/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso +++ b/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso @@ -11,23 +11,30 @@ import Standard.Database.Internal.SQLite.SQLite_Type_Mapping from Standard.Database import Database, SQLite, In_Memory, SQL_Query from Standard.Database.Errors import Unsupported_Database_Operation -from Standard.Test import Problems, Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all import project.Database.Helpers.Name_Generator -spec = - connection = Database.connect (SQLite In_Memory) - make_table prefix columns = - name = prefix - column_exprs = columns.map col_def-> - col_def.first + " " + col_def.second - stmt = 'CREATE TABLE "'+name+'" ('+(column_exprs.join ', ')+');' - Problems.assume_no_problems <| connection.execute_update stmt - connection.query (SQL_Query.Table_Name name) - - Test.group "[SQLite] Type Mapping" <| - Test.specify "should map its basic affinities" <| +type Data + Value ~connection + + setup = + connection = Database.connect (SQLite In_Memory) + Data.Value connection + +add_specs suite_builder = + suite_builder.group "[SQLite] Type Mapping" group_builder-> + data = Data.setup + + make_table prefix columns = + name = prefix + column_exprs = columns.map col_def-> + col_def.first + " " + col_def.second + stmt = 'CREATE TABLE "'+name+'" ('+(column_exprs.join ', ')+');' + Problems.assume_no_problems <| data.connection.execute_update stmt + data.connection.query (SQL_Query.Table_Name name) + + group_builder.specify "should map its basic affinities" <| t = make_table "basics" [["int", "INTEGER"], ["real", "REAL"], ["text", "TEXT"], ["blob", "BLOB"], ["numeric", "NUMERIC"]] t.at "int" . value_type . should_equal (Value_Type.Integer Bits.Bits_64) t.at "real" . value_type . should_equal (Value_Type.Float Bits.Bits_64) @@ -36,7 +43,7 @@ spec = # We treat numeric as a float, since that is what really sits in SQLite under the hood. t.at "numeric" . value_type . should_equal (Value_Type.Float Bits.Bits_64) - Test.specify "should map complex types to their closest approximation" <| + group_builder.specify "should map complex types to their closest approximation" <| t = make_table "complex" [["a", "VARCHAR(15)"], ["b", "CHAR(10)"], ["c", "BINARY(10)"], ["d", "BIGINT"], ["e", "SMALLINT"], ["f", "TINYINT"], ["g", "FLOAT"], ["h", "DOUBLE"]] t.at "a" . value_type . should_equal Value_Type.Char t.at "b" . value_type . should_equal Value_Type.Char @@ -47,7 +54,7 @@ spec = t.at "g" . value_type . should_equal (Value_Type.Float Bits.Bits_64) t.at "h" . value_type . should_equal (Value_Type.Float Bits.Bits_64) - Test.specify "should allow for special handling of booleans to support the Boolean type that does not have a builtin affinity" <| + group_builder.specify "should allow for special handling of booleans to support the Boolean type that does not have a builtin affinity" <| t = make_table "bools" [["a", "BOOLEAN"], ["b", "BOOLEAN"]] t.at "b" . value_type . should_equal Value_Type.Boolean @@ -60,7 +67,7 @@ spec = c3 = t.evaluate_expression "[a] && 10" c3.should_fail_with Invalid_Value_Type - Test.specify "should correctly handle types through operations" <| + group_builder.specify "should correctly handle types through operations" <| t = make_table "foo" [["a", "int"], ["b", "text"], ["c", "boolean"], ["d", "double precision"]] t.evaluate_expression 'starts_with([b], "1")' . value_type . should_equal Value_Type.Boolean @@ -86,7 +93,7 @@ spec = # First is not currently implemented in SQLite # t2.at "First c" . value_type . should_equal Value_Type.Boolean - Test.specify "should approximate types to the closest supported one" <| + group_builder.specify "should approximate types to the closest supported one" <| t = make_table "approx-table" [["a", "BINARY"], ["b", "INT"]] t1 = t.cast "a" (Value_Type.Binary 10 variable_length=True) @@ -97,23 +104,22 @@ spec = t2.at "b" . value_type . should_equal Value_Type.Integer Problems.expect_warning Inexact_Type_Coercion t2 - Test.specify "will coerce date/time columns to Text in existing tables" <| + group_builder.specify "will coerce date/time columns to Text in existing tables" <| t = make_table "datetime-table" [["a", "DATE"], ["b", "TIME"], ["c", "DATETIME"], ["d", "TIMESTAMP"]] t.at "a" . value_type . should_equal Value_Type.Char t.at "b" . value_type . should_equal Value_Type.Char t.at "c" . value_type . should_equal Value_Type.Char t.at "d" . value_type . should_equal Value_Type.Char - Test.specify "does not support creating tables with date/time values" <| + group_builder.specify "does not support creating tables with date/time values" <| t = Table.new [["a", [Date.today]], ["b", [Time_Of_Day.now]], ["c", [Date_Time.now]]] - r1 = t.select_into_database_table connection table_name=(Name_Generator.random_name "date-time-table") temporary=True + r1 = t.select_into_database_table data.connection table_name=(Name_Generator.random_name "date-time-table") temporary=True r1.should_fail_with Unsupported_Database_Operation - Test.specify "should be able to infer types for all supported operations" <| + group_builder.specify "should be able to infer types for all supported operations" <| dialect = Dialect.sqlite internal_mapping = dialect.internal_generator_dialect.operation_map operation_type_mapping = SQLite_Type_Mapping.operations_map operation_type_mapping.keys.sort . should_equal internal_mapping.keys.sort -main = Test_Suite.run_main spec diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso index 6ad5f3b6ffb1..d4275e78cbb7 100644 --- a/test/Table_Tests/src/Database/Upload_Spec.enso +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -28,11 +28,30 @@ polyglot java import org.enso.table_test_helpers.ExplodingStorage polyglot java import org.enso.table_test_helpers.ExplodingStoragePayload polyglot java import java.lang.Thread -main = Test_Suite.run_main <| - spec (_ -> Database.connect (SQLite In_Memory)) "[SQLite] " persistent_connector=False +type Data + Value ~data + + connection self = self.data.at 0 + in_memory_table self = self.data.at 1 + + setup create_connection_func = Data.Value <| + connection = create_connection_func Nothing + in_memory_table = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] + [connection, in_memory_table] + + +in_memory_table_builder args primary_key=[] connection=Nothing = + _ = [primary_key, connection] + case args of + table : Table -> table + _ -> Table.new args + +database_table_builder name_prefix args primary_key=[] connection = + in_memory_table = in_memory_table_builder args + in_memory_table.select_into_database_table connection (Name_Generator.random_name name_prefix) temporary=True primary_key=primary_key ## PRIVATE - Tests uploading tables. + Adds uploading table specs to the suite builder. Arguments: - make_new_connection: a function that takes `Nothing` and returns a new @@ -42,11 +61,12 @@ main = Test_Suite.run_main <| connections. Should be `True` for all databases except SQLite in the `In_Memory` mode in which every re-connect creates a separate in-memory database, so features relying on persistence cannot really be tested. -spec make_new_connection prefix persistent_connector=True = - connection = make_new_connection Nothing - Test.group prefix+"Creating an empty table" <| - Test.specify "should allow to specify the column names and types" <| - t = connection.create_table (Name_Generator.random_name "creating-table") structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char] temporary=True +add_specs suite_builder make_new_connection prefix persistent_connector=True = + suite_builder.group prefix+"Creating an empty table" group_builder-> + data = Data.setup make_new_connection + + group_builder.specify "should allow to specify the column names and types" <| + t = data.connection.create_table (Name_Generator.random_name "creating-table") structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char] temporary=True t.column_names . should_equal ["X", "Y"] t.at "X" . to_vector . should_equal [] t.at "X" . value_type . is_integer . should_be_true @@ -55,9 +75,9 @@ spec make_new_connection prefix persistent_connector=True = t.row_count . should_equal 0 t.is_trivial_query . should_be_true - Test.specify "should allow to inherit the structure of an existing in-memory table" <| + group_builder.specify "should allow to inherit the structure of an existing in-memory table" <| t = Table.new [["X", [1, 2]], ["Y", ['a', 'b']]] - db_table = connection.create_table (Name_Generator.random_name "creating-table") structure=t temporary=True + db_table = data.connection.create_table (Name_Generator.random_name "creating-table") structure=t temporary=True db_table.column_names . should_equal ["X", "Y"] db_table.at "X" . to_vector . should_equal [] db_table.at "X" . value_type . is_integer . should_be_true @@ -65,12 +85,12 @@ spec make_new_connection prefix persistent_connector=True = db_table.at "Y" . value_type . is_text . should_be_true db_table.row_count . should_equal 0 - Test.specify "should allow to inherit the structure of an existing Database table" <| + group_builder.specify "should allow to inherit the structure of an existing Database table" <| t = Table.new [["X", [1, 2]], ["Y", ['a', 'b']]] - input_db_table = t.select_into_database_table connection (Name_Generator.random_name "input_table") temporary=True + input_db_table = t.select_into_database_table data.connection (Name_Generator.random_name "input_table") temporary=True input_db_table.at "X" . to_vector . should_equal [1, 2] - db_table = connection.create_table (Name_Generator.random_name "creating-table") structure=input_db_table temporary=True + db_table = data.connection.create_table (Name_Generator.random_name "creating-table") structure=input_db_table temporary=True db_table.column_names . should_equal ["X", "Y"] db_table.at "X" . to_vector . should_equal [] db_table.at "X" . value_type . is_integer . should_be_true @@ -78,28 +98,28 @@ spec make_new_connection prefix persistent_connector=True = db_table.at "Y" . value_type . is_text . should_be_true db_table.row_count . should_equal 0 - Test.specify "should fail if the table already exists" <| + group_builder.specify "should fail if the table already exists" <| name = Name_Generator.random_name "table-already-exists 1" structure = [Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char] - preexisting = connection.create_table name structure=structure temporary=True + preexisting = data.connection.create_table name structure=structure temporary=True preexisting.update_rows (Table.new [["X", [42]], ["Y", ["a"]]]) . should_succeed run_with_and_without_output <| - r1 = connection.create_table name structure=structure temporary=True + r1 = data.connection.create_table name structure=structure temporary=True r1.should_fail_with Table_Already_Exists - preexisting2 = connection.query name + preexisting2 = data.connection.query name preexisting2.column_names . should_equal ["X", "Y"] preexisting2.at "X" . to_vector . should_equal [42] preexisting2.at "Y" . to_vector . should_equal ["a"] - Test.specify "should not fail if the table exists, if `allow_existing=True`, even if the structure does not match" <| + group_builder.specify "should not fail if the table exists, if `allow_existing=True`, even if the structure does not match" <| name = Name_Generator.random_name "table-already-exists 2" - preexisting = connection.create_table name structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char] temporary=True + preexisting = data.connection.create_table name structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char] temporary=True preexisting.update_rows (Table.new [["X", [42]], ["Y", ["a"]]]) . should_succeed run_with_and_without_output <| - r1 = connection.create_table name structure=[Column_Description.Value "Z" Value_Type.Float] temporary=True allow_existing=True + r1 = data.connection.create_table name structure=[Column_Description.Value "Z" Value_Type.Float] temporary=True allow_existing=True ## Even in dry-run mode, if the table already exists - it is returned itself, not its temporary dry-run counterpart - as there is no need to create one. @@ -109,145 +129,145 @@ spec make_new_connection prefix persistent_connector=True = r1.at "X" . to_vector . should_equal [42] r1.at "Y" . to_vector . should_equal ["a"] - Test.specify "should fail if an unsupported type is specified" <| + group_builder.specify "should fail if an unsupported type is specified" <| run_with_and_without_output <| - r1 = connection.create_table (Name_Generator.random_name "creating-table") structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Mixed] temporary=True + r1 = data.connection.create_table (Name_Generator.random_name "creating-table") structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Mixed] temporary=True r1.should_fail_with Unsupported_Database_Operation - Test.specify "should fail if empty structure is provided" <| + group_builder.specify "should fail if empty structure is provided" <| run_with_and_without_output <| - r1 = connection.create_table (Name_Generator.random_name "creating-invalid-table") structure=[] temporary=True + r1 = data.connection.create_table (Name_Generator.random_name "creating-invalid-table") structure=[] temporary=True r1.should_fail_with Illegal_Argument - Test.specify "should not allow to create a table with duplicated column names" <| + group_builder.specify "should not allow to create a table with duplicated column names" <| run_with_and_without_output <| table_name = Name_Generator.random_name "creating-invalid-table" - r1 = connection.create_table table_name structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "X" Value_Type.Char] temporary=True + r1 = data.connection.create_table table_name structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "X" Value_Type.Char] temporary=True r1.should_fail_with Duplicate_Output_Column_Names # Ensure that the table was not created. - connection.tables . at "Name" . to_vector . should_not_contain table_name + data.connection.tables . at "Name" . to_vector . should_not_contain table_name - Test.specify "should not allow to create a table with invalid column names" <| + group_builder.specify "should not allow to create a table with invalid column names" <| Test.expect_panic_with (Column_Description.Value Nothing Value_Type.Char) Type_Error run_with_and_without_output <| table_name = Name_Generator.random_name "creating-invalid-table" - r1 = connection.create_table table_name structure=[Column_Description.Value "" Value_Type.Integer] temporary=True + r1 = data.connection.create_table table_name structure=[Column_Description.Value "" Value_Type.Integer] temporary=True r1.should_fail_with Invalid_Column_Names # Ensure that the table was not created. - connection.tables . at "Name" . to_vector . should_not_contain table_name + data.connection.tables . at "Name" . to_vector . should_not_contain table_name - r2 = connection.create_table table_name structure=[Column_Description.Value 'a\0b' Value_Type.Integer] temporary=True + r2 = data.connection.create_table table_name structure=[Column_Description.Value 'a\0b' Value_Type.Integer] temporary=True r2.should_fail_with Invalid_Column_Names - connection.tables . at "Name" . to_vector . should_not_contain table_name + data.connection.tables . at "Name" . to_vector . should_not_contain table_name - Test.specify "should not allow to create a table with duplicated column names, if the difference is just in Unicode normalization form" <| + group_builder.specify "should not allow to create a table with duplicated column names, if the difference is just in Unicode normalization form" <| run_with_and_without_output <| a = 'ś' b = 's\u0301' table_name = Name_Generator.random_name "creating-invalid-table" - r1 = connection.create_table table_name structure=[Column_Description.Value a Value_Type.Integer, Column_Description.Value b Value_Type.Char] temporary=True + r1 = data.connection.create_table table_name structure=[Column_Description.Value a Value_Type.Integer, Column_Description.Value b Value_Type.Char] temporary=True r1.should_fail_with Duplicate_Output_Column_Names - connection.tables . at "Name" . to_vector . should_not_contain table_name + data.connection.tables . at "Name" . to_vector . should_not_contain table_name - Test.specify "should include the created table in the tables directory" <| + group_builder.specify "should include the created table in the tables directory" <| name = Name_Generator.random_name "persistent_table 1" - db_table = connection.create_table name structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char] temporary=False - Panic.with_finalizer (connection.drop_table db_table.name) <| + db_table = data.connection.create_table name structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char] temporary=False + Panic.with_finalizer (data.connection.drop_table db_table.name) <| db_table.column_names . should_equal ["X", "Y"] db_table.at "X" . to_vector . should_equal [] - connection.tables.at "Name" . to_vector . should_contain name - connection.query name . column_names . should_equal ["X", "Y"] - connection.query name . at "X" . to_vector . should_equal [] + data.connection.tables.at "Name" . to_vector . should_contain name + data.connection.query name . column_names . should_equal ["X", "Y"] + data.connection.query name . at "X" . to_vector . should_equal [] - Test.specify "should include the temporary table in the tables directory" <| + group_builder.specify "should include the temporary table in the tables directory" <| name = Name_Generator.random_name "temporary_table 1" - db_table = connection.create_table name structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char] temporary=True + db_table = data.connection.create_table name structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char] temporary=True db_table.column_names . should_equal ["X", "Y"] db_table.at "X" . to_vector . should_equal [] - connection.tables.at "Name" . to_vector . should_contain name - connection.query name . column_names . should_equal ["X", "Y"] - connection.query name . at "X" . to_vector . should_equal [] + data.connection.tables.at "Name" . to_vector . should_contain name + data.connection.query name . column_names . should_equal ["X", "Y"] + data.connection.query name . at "X" . to_vector . should_equal [] if persistent_connector then - Test.specify "should drop the temporary table after the connection is closed" <| + group_builder.specify "should drop the temporary table after the connection is closed" <| name = Name_Generator.random_name "temporary_table 2" tmp_connection = make_new_connection Nothing tmp_connection.create_table name [Column_Description.Value "X" Value_Type.Integer] temporary=True tmp_connection.query (SQL_Query.Table_Name name) . column_names . should_equal ["X"] tmp_connection.close - wait_until_temporary_table_is_deleted_after_closing_connection connection name + wait_until_temporary_table_is_deleted_after_closing_connection data.connection name - connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found + data.connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found - Test.specify "should preserve the regular table after the connection is closed" <| + group_builder.specify "should preserve the regular table after the connection is closed" <| name = Name_Generator.random_name "persistent_table 2" tmp_connection = make_new_connection Nothing tmp_connection.create_table name [Column_Description.Value "X" Value_Type.Integer] temporary=False - Panic.with_finalizer (connection.drop_table name) <| + Panic.with_finalizer (data.connection.drop_table name) <| t1 = tmp_connection.query (SQL_Query.Table_Name name) t1.column_names . should_equal ["X"] t1.at "X" . value_type . is_integer . should_be_true tmp_connection.close - t2 = connection.query (SQL_Query.Table_Name name) + t2 = data.connection.query (SQL_Query.Table_Name name) t2.column_names . should_equal ["X"] t2.at "X" . value_type . is_integer . should_be_true - Test.specify "should be able to specify a primary key" <| + group_builder.specify "should be able to specify a primary key" <| name = Name_Generator.random_name "primary_key 1" - db_table = connection.create_table table_name=name structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char, Column_Description.Value "Z" Value_Type.Integer, Column_Description.Value "W" Value_Type.Float] primary_key=["Y", "Z"] temporary=False - Panic.with_finalizer (connection.drop_table db_table.name) <| + db_table = data.connection.create_table table_name=name structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char, Column_Description.Value "Z" Value_Type.Integer, Column_Description.Value "W" Value_Type.Float] primary_key=["Y", "Z"] temporary=False + Panic.with_finalizer (data.connection.drop_table db_table.name) <| db_table.get_primary_key . should_equal ["Y", "Z"] - Test.specify "should ensure that primary key columns specified are valid" <| + group_builder.specify "should ensure that primary key columns specified are valid" <| run_with_and_without_output <| - r1 = connection.create_table (Name_Generator.random_name "creating-table") structure=[Column_Description.Value "X" Value_Type.Integer] primary_key=["Y"] temporary=True + r1 = data.connection.create_table (Name_Generator.random_name "creating-table") structure=[Column_Description.Value "X" Value_Type.Integer] primary_key=["Y"] temporary=True r1.should_fail_with Missing_Input_Columns t = Table.new [["X", [1, 2, 3]]] - r2 = connection.create_table (Name_Generator.random_name "creating-table") structure=t primary_key=["Y"] temporary=True + r2 = data.connection.create_table (Name_Generator.random_name "creating-table") structure=t primary_key=["Y"] temporary=True r2.should_fail_with Missing_Input_Columns - Test.specify "should check types of primary key" <| + group_builder.specify "should check types of primary key" <| run_with_and_without_output <| - r1 = connection.create_table (Name_Generator.random_name "creating-table") structure=[Column_Description.Value "X" Value_Type.Integer] primary_key=[0] temporary=True + r1 = data.connection.create_table (Name_Generator.random_name "creating-table") structure=[Column_Description.Value "X" Value_Type.Integer] primary_key=[0] temporary=True r1.should_fail_with Illegal_Argument - Test.specify "should not issue a DELETE statement for the original table name in dry run mode, even if the table does not exist" <| + group_builder.specify "should not issue a DELETE statement for the original table name in dry run mode, even if the table does not exist" <| original_table_name = Name_Generator.random_name "no-delete-test" log_file = enso_project.data / "transient" / "sql.log" log_file.delete_if_exists Test_Environment.unsafe_with_environment_override "ENSO_SQL_LOG_PATH" log_file.absolute.path <| Context.Output.with_disabled <| - t1 = connection.create_table original_table_name structure=[Column_Description.Value "X" Value_Type.Integer] temporary=True + t1 = data.connection.create_table original_table_name structure=[Column_Description.Value "X" Value_Type.Integer] temporary=True t1.column_names . should_equal ["X"] logs = log_file.read Plain_Text . lines deletes = logs.filter (_.contains "DROP") - wrapped_name = connection.dialect.wrap_identifier original_table_name + wrapped_name = data.connection.dialect.wrap_identifier original_table_name deletes.each line-> if line.contains wrapped_name then Test.fail "The log file contains a dangerous DELETE statement for the original table: "+line log_file.delete_if_exists - Test.specify "should re-use the same temporary table name across dry runs but re-create it each time" <| + group_builder.specify "should re-use the same temporary table name across dry runs but re-create it each time" <| base_table_name = Name_Generator.random_name "multi-dry-create" Context.Output.with_disabled <| - t1 = connection.create_table base_table_name structure=[Column_Description.Value "X" Value_Type.Integer] temporary=True + t1 = data.connection.create_table base_table_name structure=[Column_Description.Value "X" Value_Type.Integer] temporary=True Problems.expect_only_warning Dry_Run_Operation t1 tmp_name = t1.name tmp_name . should_not_equal base_table_name - connection.query base_table_name . should_fail_with Table_Not_Found + data.connection.query base_table_name . should_fail_with Table_Not_Found - t2 = connection.create_table base_table_name structure=[Column_Description.Value "Y" Value_Type.Char] temporary=True + t2 = data.connection.create_table base_table_name structure=[Column_Description.Value "Y" Value_Type.Char] temporary=True Problems.expect_only_warning Dry_Run_Operation t2 t2.name . should_equal tmp_name t2.at "Y" . to_vector . should_equal [] @@ -256,7 +276,7 @@ spec make_new_connection prefix persistent_connector=True = Problems.assume_no_problems <| t2.remove_warnings.update_rows (Table.new [["Y", ['a']]]) t2.at "Y" . to_vector . should_equal ['a'] - t3 = connection.create_table base_table_name structure=[Column_Description.Value "Y" Value_Type.Char] temporary=True + t3 = data.connection.create_table base_table_name structure=[Column_Description.Value "Y" Value_Type.Char] temporary=True Problems.expect_only_warning Dry_Run_Operation t3 t3.name . should_equal tmp_name t3.at "Y" . to_vector . should_equal [] @@ -266,10 +286,12 @@ spec make_new_connection prefix persistent_connector=True = # The old dry run table will not work due to different structure: t1.at "X" . to_vector . should_fail_with SQL_Error - Test.group prefix+"Uploading an in-memory Table" <| - in_memory_table = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - Test.specify "should create a database table with the same contents as the source" <| - db_table = in_memory_table.select_into_database_table connection (Name_Generator.random_name "creating-table") temporary=True + + suite_builder.group prefix+"Uploading an in-memory Table" group_builder-> + data = Data.setup make_new_connection + + group_builder.specify "should create a database table with the same contents as the source" <| + db_table = data.in_memory_table.select_into_database_table data.connection (Name_Generator.random_name "creating-table") temporary=True db_table.column_names . should_equal ["X", "Y"] db_table.at "X" . to_vector . should_equal [1, 2, 3] db_table.at "Y" . to_vector . should_equal ['a', 'b', 'c'] @@ -278,93 +300,93 @@ spec make_new_connection prefix persistent_connector=True = db_table.row_count . should_equal 3 db_table.is_trivial_query . should_be_true - Test.specify "should include the created table in the tables directory" <| - db_table = in_memory_table.select_into_database_table connection (Name_Generator.random_name "permanent_table 1") temporary=False - Panic.with_finalizer (connection.drop_table db_table.name) <| + group_builder.specify "should include the created table in the tables directory" <| + db_table = data.in_memory_table.select_into_database_table data.connection (Name_Generator.random_name "permanent_table 1") temporary=False + Panic.with_finalizer (data.connection.drop_table db_table.name) <| db_table.at "X" . to_vector . should_equal [1, 2, 3] - connection.tables.at "Name" . to_vector . should_contain db_table.name - connection.query db_table.name . at "X" . to_vector . should_equal [1, 2, 3] + data.connection.tables.at "Name" . to_vector . should_contain db_table.name + data.connection.query db_table.name . at "X" . to_vector . should_equal [1, 2, 3] - Test.specify "should include the temporary table in the tables directory" <| - db_table = in_memory_table.select_into_database_table connection (Name_Generator.random_name "temporary_table 1") temporary=True + group_builder.specify "should include the temporary table in the tables directory" <| + db_table = data.in_memory_table.select_into_database_table data.connection (Name_Generator.random_name "temporary_table 1") temporary=True db_table.at "X" . to_vector . should_equal [1, 2, 3] - connection.tables.at "Name" . to_vector . should_contain db_table.name - connection.query db_table.name . at "X" . to_vector . should_equal [1, 2, 3] + data.connection.tables.at "Name" . to_vector . should_contain db_table.name + data.connection.query db_table.name . at "X" . to_vector . should_equal [1, 2, 3] if persistent_connector then - Test.specify "should drop the temporary table after the connection is closed" <| + group_builder.specify "should drop the temporary table after the connection is closed" <| tmp_connection = make_new_connection Nothing - db_table = in_memory_table.select_into_database_table tmp_connection (Name_Generator.random_name "temporary_table 2") temporary=True + db_table = data.in_memory_table.select_into_database_table tmp_connection (Name_Generator.random_name "temporary_table 2") temporary=True name = db_table.name tmp_connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] tmp_connection.close - wait_until_temporary_table_is_deleted_after_closing_connection connection name + wait_until_temporary_table_is_deleted_after_closing_connection data.connection name - connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found + data.connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found - Test.specify "should preserve the regular table after the connection is closed" <| + group_builder.specify "should preserve the regular table after the connection is closed" <| tmp_connection = make_new_connection Nothing - db_table = in_memory_table.select_into_database_table tmp_connection (Name_Generator.random_name "permanent_table 1") temporary=False + db_table = data.in_memory_table.select_into_database_table tmp_connection (Name_Generator.random_name "permanent_table 1") temporary=False name = db_table.name - Panic.with_finalizer (connection.drop_table name) <| + Panic.with_finalizer (data.connection.drop_table name) <| tmp_connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] tmp_connection.close - connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] + data.connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] - Test.specify "should not create any table if upload fails" <| + group_builder.specify "should not create any table if upload fails" <| normal_column = Column.from_vector "Y" ((100+0).up_to (100+1000)).to_vector exploding_column = make_mock_column "X" (0.up_to 1000).to_vector 512 exploding_table = Table.new [normal_column, exploding_column] name = Name_Generator.random_name "rolling-back-table" - connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found + data.connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found Test.expect_panic_with matcher=ExplodingStoragePayload <| - exploding_table.select_into_database_table connection name temporary=False primary_key=Nothing - connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found + exploding_table.select_into_database_table data.connection name temporary=False primary_key=Nothing + data.connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found - Test.specify "should set a primary key for the table" <| + group_builder.specify "should set a primary key for the table" <| t1 = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [1.0, 2.0, 3.0]]] - db_table_1 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-1") primary_key=["Y", "X"] - Panic.with_finalizer (connection.drop_table db_table_1.name) <| + db_table_1 = t1.select_into_database_table data.connection (Name_Generator.random_name "primary-key-1") primary_key=["Y", "X"] + Panic.with_finalizer (data.connection.drop_table db_table_1.name) <| db_table_1.at "X" . to_vector . should_equal [1, 2, 3] db_table_1.get_primary_key . should_equal ["Y", "X"] - db_table_2 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-2") - Panic.with_finalizer (connection.drop_table db_table_2.name) <| + db_table_2 = t1.select_into_database_table data.connection (Name_Generator.random_name "primary-key-2") + Panic.with_finalizer (data.connection.drop_table db_table_2.name) <| db_table_2.at "X" . to_vector . should_equal [1, 2, 3] db_table_2.get_primary_key . should_equal ["X"] - db_table_3 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-3") primary_key=Nothing - Panic.with_finalizer (connection.drop_table db_table_3.name) <| + db_table_3 = t1.select_into_database_table data.connection (Name_Generator.random_name "primary-key-3") primary_key=Nothing + Panic.with_finalizer (data.connection.drop_table db_table_3.name) <| db_table_3.at "X" . to_vector . should_equal [1, 2, 3] db_table_3.get_primary_key . should_equal Nothing - Test.specify "should ensure that primary key columns are valid" <| + group_builder.specify "should ensure that primary key columns are valid" <| run_with_and_without_output <| - r1 = in_memory_table.select_into_database_table connection (Name_Generator.random_name "primary-key-4") primary_key=["X", "nonexistent"] + r1 = data.in_memory_table.select_into_database_table data.connection (Name_Generator.random_name "primary-key-4") primary_key=["X", "nonexistent"] r1.should_fail_with Missing_Input_Columns - Test.specify "should fail if the primary key is not unique" <| + group_builder.specify "should fail if the primary key is not unique" <| t1 = Table.new [["X", [1, 2, 1]], ["Y", ['b', 'b', 'a']]] run_with_and_without_output <| - r1 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-6") temporary=True primary_key=["X"] + r1 = t1.select_into_database_table data.connection (Name_Generator.random_name "primary-key-6") temporary=True primary_key=["X"] r1.should_fail_with Non_Unique_Key e1 = r1.catch e1.clashing_example_key_values . should_equal [1] e1.clashing_example_row_count . should_equal 2 e1.to_display_text . should_equal "The key [X] is not unique, for example key [1] corresponds to 2 rows." - r2 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-6") temporary=True primary_key=["Y"] + r2 = t1.select_into_database_table data.connection (Name_Generator.random_name "primary-key-6") temporary=True primary_key=["Y"] r2.should_fail_with Non_Unique_Key r2.catch . clashing_example_key_values . should_equal ['b'] - r3 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-7") temporary=True primary_key=["X", "Y"] + r3 = t1.select_into_database_table data.connection (Name_Generator.random_name "primary-key-7") temporary=True primary_key=["X", "Y"] r3.at "X" . to_vector . should_equal [1, 2, 1] t2 = Table.new [["X", [1, 2, 1]], ["Y", ['a', 'b', 'a']]] - r4 = t2.select_into_database_table connection (Name_Generator.random_name "primary-key-7") temporary=True primary_key=["X", "Y"] + r4 = t2.select_into_database_table data.connection (Name_Generator.random_name "primary-key-7") temporary=True primary_key=["X", "Y"] r4.should_fail_with Non_Unique_Key r4.catch . clashing_example_key_values . should_equal [1, 'a'] @@ -372,49 +394,49 @@ spec make_new_connection prefix persistent_connector=True = vec = (0.up_to 1010).to_vector t3 = Table.new [["X", vec+vec]] Context.Output.with_disabled <| - r5 = t3.select_into_database_table connection (Name_Generator.random_name "primary-key-8") temporary=True primary_key=["X"] + r5 = t3.select_into_database_table data.connection (Name_Generator.random_name "primary-key-8") temporary=True primary_key=["X"] r5.column_names . should_equal ["X"] # Only a sample of rows was uploaded. r5.row_count . should_equal 1000 Context.Output.with_enabled <| - r5 = t3.select_into_database_table connection (Name_Generator.random_name "primary-key-8") temporary=True primary_key=["X"] + r5 = t3.select_into_database_table data.connection (Name_Generator.random_name "primary-key-8") temporary=True primary_key=["X"] r5.should_fail_with Non_Unique_Key - Test.specify "should fail if the target table already exists" <| + group_builder.specify "should fail if the target table already exists" <| name = Name_Generator.random_name "table-already-exists" - preexisting = connection.create_table name [Column_Description.Value "X" Value_Type.Integer] temporary=True + preexisting = data.connection.create_table name [Column_Description.Value "X" Value_Type.Integer] temporary=True preexisting.update_rows (Table.new [["X", [42]]]) . should_succeed t = Table.new [["Y", ['a', 'b']]] run_with_and_without_output <| - r1 = t.select_into_database_table connection name temporary=True + r1 = t.select_into_database_table data.connection name temporary=True r1.should_fail_with Table_Already_Exists - r2 = t.select_into_database_table connection name temporary=False + r2 = t.select_into_database_table data.connection name temporary=False r2.should_fail_with Table_Already_Exists # Ensure that the table was not modified. - preexisting2 = connection.query name + preexisting2 = data.connection.query name preexisting2.column_names . should_equal ["X"] preexisting2.at "X" . to_vector . should_equal [42] - Test.specify "should re-use the same temporary table name across dry runs but re-create it each time" <| + group_builder.specify "should re-use the same temporary table name across dry runs but re-create it each time" <| base_table_name = Name_Generator.random_name "multi-dry-select" Context.Output.with_disabled <| - t1 = (Table.new [["X", [1, 2, 3]]]).select_into_database_table connection base_table_name temporary=True primary_key=[] + t1 = (Table.new [["X", [1, 2, 3]]]).select_into_database_table data.connection base_table_name temporary=True primary_key=[] Problems.expect_only_warning Dry_Run_Operation t1 tmp_name = t1.name tmp_name . should_not_equal base_table_name - connection.query base_table_name . should_fail_with Table_Not_Found + data.connection.query base_table_name . should_fail_with Table_Not_Found t1.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] - t2 = (Table.new [["Y", [44]]]).select_into_database_table connection base_table_name temporary=False primary_key=[] + t2 = (Table.new [["Y", [44]]]).select_into_database_table data.connection base_table_name temporary=False primary_key=[] Problems.expect_only_warning Dry_Run_Operation t2 t2.name . should_equal tmp_name t2.at "Y" . to_vector . should_equal [44] - t3 = (Table.new [["Y", [55]]]).select_into_database_table connection base_table_name temporary=True primary_key=[] + t3 = (Table.new [["Y", [55]]]).select_into_database_table data.connection base_table_name temporary=True primary_key=[] Problems.expect_only_warning Dry_Run_Operation t3 t3.name . should_equal tmp_name t3.at "Y" . to_vector . should_equal [55] @@ -422,8 +444,10 @@ spec make_new_connection prefix persistent_connector=True = # The old dry run table has been invalidated due to overwrite t1.at "X" . to_vector . should_fail_with SQL_Error - Test.group prefix+"Persisting a Database Table (query)" <| - Test.specify "should be able to create a persistent copy of a DB table" <| + suite_builder.group prefix+"Persisting a Database Table (query)" group_builder-> + data = Data.setup make_new_connection + + group_builder.specify "should be able to create a persistent copy of a DB table" <| t = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [1.0, 2.0, 3.0]]] tmp_connection = make_new_connection Nothing db_table = t.select_into_database_table tmp_connection (Name_Generator.random_name "source-table") temporary=True @@ -431,7 +455,7 @@ spec make_new_connection prefix persistent_connector=True = copied_table = db_table.select_into_database_table tmp_connection (Name_Generator.random_name "copied-table") temporary=False copied_table.is_trivial_query . should_be_true name = copied_table.name - Panic.with_finalizer (connection.drop_table name) <| + Panic.with_finalizer (data.connection.drop_table name) <| copied_table.at "X" . value_type . is_integer . should_be_true copied_table.at "Y" . value_type . is_text . should_be_true copied_table.at "Z" . value_type . is_floating_point . should_be_true @@ -440,12 +464,12 @@ spec make_new_connection prefix persistent_connector=True = tmp_connection.close if persistent_connector then - connection.query name . at "X" . to_vector . should_equal [1, 2, 3] + data.connection.query name . at "X" . to_vector . should_equal [1, 2, 3] - Test.specify "should be able to persist a complex query with generated columns, joins etc." <| + group_builder.specify "should be able to persist a complex query with generated columns, joins etc." <| t1 = Table.new [["X", [1, 1, 2]], ["Y", [1, 2, 3]]] - db_table_1 = t1.select_into_database_table connection (Name_Generator.random_name "source-table-1") temporary=True primary_key=Nothing + db_table_1 = t1.select_into_database_table data.connection (Name_Generator.random_name "source-table-1") temporary=True primary_key=Nothing db_table_2 = db_table_1.set "[Y] + 100 * [X]" "C1" . set '"constant_text"' "C2" db_table_3 = db_table_1.aggregate [Aggregate_Column.Group_By "X", Aggregate_Column.Sum "[Y]*[Y]" "C3"] . set "[X] + 1" "X" @@ -453,7 +477,7 @@ spec make_new_connection prefix persistent_connector=True = db_table_4 = db_table_2.join db_table_3 join_kind=Join_Kind.Left_Outer db_table_4.is_trivial_query . should_fail_with Table_Not_Found - copied_table = db_table_4.select_into_database_table connection (Name_Generator.random_name "copied-table") temporary=True primary_key=Nothing + copied_table = db_table_4.select_into_database_table data.connection (Name_Generator.random_name "copied-table") temporary=True primary_key=Nothing copied_table.column_names . should_equal ["X", "Y", "C1", "C2", "Right X", "C3"] copied_table.at "X" . to_vector . should_equal [1, 1, 2] copied_table.at "C1" . to_vector . should_equal [101, 102, 203] @@ -469,7 +493,7 @@ spec make_new_connection prefix persistent_connector=True = sql.contains "JOIN" . should_be_false sql.contains "GROUP" . should_be_false - Test.specify "should be able to create a temporary copy of a query" <| + group_builder.specify "should be able to create a temporary copy of a query" <| tmp_connection = make_new_connection Nothing t = Table.new [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] db_table = t.select_into_database_table tmp_connection (Name_Generator.random_name "source-table") temporary=True @@ -483,40 +507,40 @@ spec make_new_connection prefix persistent_connector=True = copied_table_accessed.at "computed" . to_vector . should_equal [401, 502, 603] tmp_connection.close - connection.query name . should_fail_with Table_Not_Found + data.connection.query name . should_fail_with Table_Not_Found - Test.specify "should be able to specify a primary key" <| + group_builder.specify "should be able to specify a primary key" <| t = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - db_table = t.select_into_database_table connection (Name_Generator.random_name "source-table") temporary=True - db_table_2 = db_table.select_into_database_table connection (Name_Generator.random_name "copied-table") primary_key=["X"] - Panic.with_finalizer (connection.drop_table db_table_2.name) <| + db_table = t.select_into_database_table data.connection (Name_Generator.random_name "source-table") temporary=True + db_table_2 = db_table.select_into_database_table data.connection (Name_Generator.random_name "copied-table") primary_key=["X"] + Panic.with_finalizer (data.connection.drop_table db_table_2.name) <| db_table_2.get_primary_key . should_equal ["X"] - Test.specify "should ensure that primary key columns are valid" <| + group_builder.specify "should ensure that primary key columns are valid" <| t = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - db_table = t.select_into_database_table connection (Name_Generator.random_name "source-table") temporary=True + db_table = t.select_into_database_table data.connection (Name_Generator.random_name "source-table") temporary=True run_with_and_without_output <| - r1 = db_table.select_into_database_table connection (Name_Generator.random_name "copied-table") temporary=True primary_key=["nonexistent"] + r1 = db_table.select_into_database_table data.connection (Name_Generator.random_name "copied-table") temporary=True primary_key=["nonexistent"] r1.should_fail_with Missing_Input_Columns - Test.specify "should fail when the primary key is not unique" <| + group_builder.specify "should fail when the primary key is not unique" <| t = Table.new [["X", [1, 2, 1]], ["Y", ['b', 'b', 'a']]] - db_table = t.select_into_database_table connection (Name_Generator.random_name "source-table") temporary=True primary_key=Nothing + db_table = t.select_into_database_table data.connection (Name_Generator.random_name "source-table") temporary=True primary_key=Nothing Problems.assume_no_problems db_table run_with_and_without_output <| - r1 = db_table.select_into_database_table connection (Name_Generator.random_name "copied-table") temporary=True primary_key=["X"] + r1 = db_table.select_into_database_table data.connection (Name_Generator.random_name "copied-table") temporary=True primary_key=["X"] r1.should_fail_with Non_Unique_Key e1 = r1.catch e1.clashing_example_key_values . should_equal [1] e1.clashing_example_row_count . should_equal 2 t2 = Table.new [["X", [1, 3, 1, 2, 3, 2, 2, 2, 0]], ["Y", ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i']]] - db_table_2 = t2.select_into_database_table connection (Name_Generator.random_name "source-table-2") temporary=True primary_key=Nothing + db_table_2 = t2.select_into_database_table data.connection (Name_Generator.random_name "source-table-2") temporary=True primary_key=Nothing Problems.assume_no_problems db_table_2 run_with_and_without_output <| - r2 = db_table_2.select_into_database_table connection (Name_Generator.random_name "copied-table-2") temporary=True primary_key=["X"] + r2 = db_table_2.select_into_database_table data.connection (Name_Generator.random_name "copied-table-2") temporary=True primary_key=["X"] r2.should_fail_with Non_Unique_Key e2 = r2.catch e2.clashing_example_key_values.length . should_equal 1 @@ -528,19 +552,19 @@ spec make_new_connection prefix persistent_connector=True = # Will not find clashes if they are not in the first 1000 rows, in Output disabled mode. vec = (0.up_to 1010).to_vector t3 = Table.new [["X", vec+vec]] - db_table_3 = t3.select_into_database_table connection (Name_Generator.random_name "source-table-3") temporary=True primary_key=Nothing + db_table_3 = t3.select_into_database_table data.connection (Name_Generator.random_name "source-table-3") temporary=True primary_key=Nothing Context.Output.with_disabled <| - r5 = db_table_3.select_into_database_table connection (Name_Generator.random_name "primary-key-8") temporary=True primary_key=["X"] + r5 = db_table_3.select_into_database_table data.connection (Name_Generator.random_name "primary-key-8") temporary=True primary_key=["X"] r5.column_names . should_equal ["X"] # Only a sample of rows was uploaded. r5.row_count . should_equal 1000 Context.Output.with_enabled <| - r5 = db_table_3.select_into_database_table connection (Name_Generator.random_name "primary-key-8") temporary=True primary_key=["X"] + r5 = db_table_3.select_into_database_table data.connection (Name_Generator.random_name "primary-key-8") temporary=True primary_key=["X"] r5.should_fail_with Non_Unique_Key - Test.specify "will not allow to upload tables across connections" <| + group_builder.specify "will not allow to upload tables across connections" <| t = Table.new [["X", [1, 2, 1]], ["Y", ['b', 'b', 'a']]] - db_table = t.select_into_database_table connection (Name_Generator.random_name "source-table") temporary=True primary_key=Nothing + db_table = t.select_into_database_table data.connection (Name_Generator.random_name "source-table") temporary=True primary_key=Nothing connection_2 = make_new_connection Nothing run_with_and_without_output <| @@ -548,25 +572,27 @@ spec make_new_connection prefix persistent_connector=True = r1.should_fail_with Unsupported_Database_Operation r1.catch.message . should_contain "same connection" - Test.specify "should fail if the target table already exists" <| + group_builder.specify "should fail if the target table already exists" <| name = Name_Generator.random_name "table-already-exists" - preexisting = connection.create_table name [Column_Description.Value "X" Value_Type.Integer] temporary=True + preexisting = data.connection.create_table name [Column_Description.Value "X" Value_Type.Integer] temporary=True preexisting.update_rows (Table.new [["X", [42]]]) . should_succeed t = Table.new [["Y", ['a', 'b']]] - db_table_2 = t.select_into_database_table connection (Name_Generator.random_name "source-table") temporary=True + db_table_2 = t.select_into_database_table data.connection (Name_Generator.random_name "source-table") temporary=True run_with_and_without_output <| - r1 = db_table_2.select_into_database_table connection name temporary=True + r1 = db_table_2.select_into_database_table data.connection name temporary=True r1.should_fail_with Table_Already_Exists - r2 = db_table_2.select_into_database_table connection name temporary=False + r2 = db_table_2.select_into_database_table data.connection name temporary=False r2.should_fail_with Table_Already_Exists - preexisting2 = connection.query name + preexisting2 = data.connection.query name preexisting2.column_names . should_equal ["X"] preexisting2.at "X" . to_vector . should_equal [42] - test_table_append source_table_builder target_table_builder = - Test.specify "should be able to append new rows to a table" <| + test_table_append group_builder source_table_builder target_table_builder = + data = Data.setup make_new_connection + + group_builder.specify "should be able to append new rows to a table" <| dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] src = source_table_builder [["X", [4, 5, 6]], ["Y", ['d', 'e', 'f']]] @@ -580,7 +606,7 @@ spec make_new_connection prefix persistent_connector=True = rows1 = result.rows.to_vector.map .to_vector rows1.should_contain_the_same_elements_as expected_rows - Test.specify "should error if new rows clash with existing ones and mode is Insert, target table should remain unchanged" <| + group_builder.specify "should error if new rows clash with existing ones and mode is Insert, target table should remain unchanged" <| dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] src = source_table_builder [["X", [1, 5, 6]], ["Y", ['d', 'e', 'f']]] @@ -589,7 +615,7 @@ spec make_new_connection prefix persistent_connector=True = r1 = dest.update_rows src update_action=Update_Action.Insert key_columns=["X"] r1.should_fail_with Rows_Already_Present - Test.specify "should use the target table primary key for the key by default" <| + group_builder.specify "should use the target table primary key for the key by default" <| dest1 = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [4, 5, 6]]] primary_key=["Y", "Z"] default_key_columns dest1 . should_equal ["Y", "Z"] @@ -600,7 +626,7 @@ spec make_new_connection prefix persistent_connector=True = rows = r1.rows.to_vector.map .to_vector rows.should_contain_the_same_elements_as [[1, 'a'], [4, 'b'], [3, 'c'], [5, 'e']] - Test.specify "should be able to Update existing rows in a table" <| + group_builder.specify "should be able to Update existing rows in a table" <| dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] src = source_table_builder [["X", [2]], ["Y", ['ZZZ']]] @@ -611,7 +637,7 @@ spec make_new_connection prefix persistent_connector=True = rows = dest.rows.to_vector.map .to_vector rows.should_contain_the_same_elements_as [[1, 'a'], [2, 'ZZZ'], [3, 'c']] - Test.specify "should fail on unmatched rows in Update mode" <| + group_builder.specify "should fail on unmatched rows in Update mode" <| dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] src = source_table_builder [["X", [2, 100]], ["Y", ['d', 'e']]] @@ -624,7 +650,7 @@ spec make_new_connection prefix persistent_connector=True = rows = dest.rows.to_vector.map .to_vector rows.should_contain_the_same_elements_as [[1, 'a'], [2, 'b'], [3, 'c']] - Test.specify "should upsert by default (update existing rows, insert new rows)" <| + group_builder.specify "should upsert by default (update existing rows, insert new rows)" <| dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] src = source_table_builder [["X", [2, 100]], ["Y", ['D', 'E']]] r1 = dest.update_rows src key_columns=["X"] @@ -638,7 +664,7 @@ spec make_new_connection prefix persistent_connector=True = rows2 = dest.rows.to_vector.map .to_vector rows2.should_contain_the_same_elements_as expected_rows - Test.specify "should allow to align an existing table with a source (upsert + delete rows missing from source)" <| + group_builder.specify "should allow to align an existing table with a source (upsert + delete rows missing from source)" <| dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] src = source_table_builder [["X", [2, 100]], ["Y", ['D', 'E']]] r1 = dest.update_rows src update_action=Update_Action.Align_Records key_columns=["X"] @@ -650,7 +676,7 @@ spec make_new_connection prefix persistent_connector=True = rows2 = dest.rows.to_vector.map .to_vector rows2.should_contain_the_same_elements_as expected_rows - Test.specify "should match columns by name, reordering to destination order if needed (Insert)" <| + group_builder.specify "should match columns by name, reordering to destination order if needed (Insert)" <| dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [4, 5, 6]]] result = dest.update_rows src update_action=Update_Action.Insert key_columns=["X"] @@ -660,7 +686,7 @@ spec make_new_connection prefix persistent_connector=True = rows1 = result.rows.to_vector.map .to_vector rows1.should_contain_the_same_elements_as expected_rows - Test.specify "should match columns by name, reordering to destination order if needed (Upsert)" <| + group_builder.specify "should match columns by name, reordering to destination order if needed (Upsert)" <| dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [1, 5, 6]]] result = dest.update_rows src key_columns=["X"] @@ -670,7 +696,7 @@ spec make_new_connection prefix persistent_connector=True = rows1 = result.rows.to_vector.map .to_vector rows1.should_contain_the_same_elements_as expected_rows - Test.specify "should match columns by name, reordering to destination order if needed (Update)" <| + group_builder.specify "should match columns by name, reordering to destination order if needed (Update)" <| dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [3, 2, 1]]] result = dest.update_rows src update_action=Update_Action.Update key_columns=["X"] @@ -680,7 +706,7 @@ spec make_new_connection prefix persistent_connector=True = rows1 = result.rows.to_vector.map .to_vector rows1.should_contain_the_same_elements_as expected_rows - Test.specify "should match columns by name, reordering to destination order if needed (Align)" <| + group_builder.specify "should match columns by name, reordering to destination order if needed (Align)" <| dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [2, 1, 6]]] result = dest.update_rows src update_action=Update_Action.Align_Records key_columns=["X"] @@ -690,7 +716,7 @@ spec make_new_connection prefix persistent_connector=True = rows1 = result.rows.to_vector.map .to_vector rows1.should_contain_the_same_elements_as expected_rows - Test.specify "should allow to use a transformed table, with computed fields, as a source" <| + group_builder.specify "should allow to use a transformed table, with computed fields, as a source" <| dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] t1 = source_table_builder [["Z", [10, 20]], ["Y", ['D', 'E']]] t2 = source_table_builder [["Z", [20, 10]], ["X", [-99, 10]]] @@ -704,7 +730,7 @@ spec make_new_connection prefix persistent_connector=True = rows1 = r1.rows.to_vector.map .to_vector rows1.should_contain_the_same_elements_as expected_rows - Test.specify "should allow specifying no key in Insert mode" <| + group_builder.specify "should allow specifying no key in Insert mode" <| dest = target_table_builder [["X", [1, 10, 100]]] src = source_table_builder [["X", [1, 2, 3]]] result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] @@ -722,7 +748,7 @@ spec make_new_connection prefix persistent_connector=True = r3.column_names . should_equal ["X"] r3.at "X" . to_vector . should_contain_the_same_elements_as expected - Test.specify "should fail if no key is specified in other modes" <| + group_builder.specify "should fail if no key is specified in other modes" <| dest = target_table_builder [["X", [1, 10, 100]]] src = source_table_builder [["X", [1, 2, 3]]] @@ -745,7 +771,7 @@ spec make_new_connection prefix persistent_connector=True = r5 = dest.update_rows src update_action=Update_Action.Align_Records key_columns=[] r5.should_fail_with Illegal_Argument - Test.specify "should fail if the key is not unique in the input table" <| + group_builder.specify "should fail if the key is not unique in the input table" <| d1 = target_table_builder [["X", [0, 10, 100]]] primary_key=["X"] d2 = target_table_builder [["X", [0, 10, 100]]] src = source_table_builder [["X", [1, 1, 3]]] @@ -759,7 +785,7 @@ spec make_new_connection prefix persistent_connector=True = r2 = d2.update_rows src key_columns=["X"] update_action=Update_Action.Insert r2.should_fail_with Non_Unique_Key - Test.specify "will fail if source table contains null keys, unless only Inserting" <| + group_builder.specify "will fail if source table contains null keys, unless only Inserting" <| t1 = target_table_builder [["X", [0, 10, 100]], ["Y", ["a", "b", "c"]]] primary_key=[] s1 = source_table_builder [["X", [10, Nothing]], ["Y", ["x", "y"]]] run_with_and_without_output <| @@ -772,7 +798,7 @@ spec make_new_connection prefix persistent_connector=True = m2.at "Y" . to_vector . should_equal ["a", "b", "c", "x", "y"] m2.at "X" . to_vector . should_equal [0, 10, 100, 10, Nothing] - Test.specify "should fail if the key causes update of multiple values (it's not unique in the target table)" <| + group_builder.specify "should fail if the key causes update of multiple values (it's not unique in the target table)" <| dest = target_table_builder [["X", [1, 1, 2]], ["Y", ['a', 'b', 'c']]] src = source_table_builder [["X", [1, 2, 3]], ["Y", ['d', 'e', 'f']]] @@ -799,7 +825,7 @@ spec make_new_connection prefix persistent_connector=True = Problems.assume_no_problems <| dest.update_rows src3 key_columns=["X"] - Test.specify "should fail if the source table contains columns not present in the target (data loss)" <| + group_builder.specify "should fail if the source table contains columns not present in the target (data loss)" <| dest = target_table_builder [["X", [0, 10, 100]]] primary_key=["X"] src = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] run_with_and_without_output <| @@ -809,9 +835,9 @@ spec make_new_connection prefix persistent_connector=True = r1.catch.to_display_text . should_contain "columns were not present" r1.catch.to_display_text . should_contain "Y" - Test.specify "should use defaults when inserting" <| + group_builder.specify "should use defaults when inserting" <| dest_name = Name_Generator.random_name "table-defaults" - dest = connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "X" Value_Type.Integer] temporary=True primary_key=[] . should_succeed + dest = data.connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "X" Value_Type.Integer] temporary=True primary_key=[] . should_succeed src = source_table_builder [["X", [1, 2, 3]]] r1 = dest.update_rows src key_columns=[] update_action=Update_Action.Insert Problems.assume_no_problems r1 @@ -819,11 +845,11 @@ spec make_new_connection prefix persistent_connector=True = expected_rows = [[42, 1], [42, 2], [42, 3]] rows1 = r1.rows.to_vector.map .to_vector rows1.should_contain_the_same_elements_as expected_rows - connection.drop_table dest_name + data.connection.drop_table dest_name - Test.specify "should use defaults when inserting new values in upsert, but retain existing values" <| + group_builder.specify "should use defaults when inserting new values in upsert, but retain existing values" <| dest_name = Name_Generator.random_name "table-defaults" - dest = connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Z" Value_Type.Integer] temporary=True primary_key=[] . should_succeed + dest = data.connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Z" Value_Type.Integer] temporary=True primary_key=[] . should_succeed Problems.assume_no_problems <| dest.update_rows (Table.from_rows ["X", "Y", "Z"] [[1, 1000, 10]]) key_columns=[] update_action=Update_Action.Insert @@ -834,11 +860,11 @@ spec make_new_connection prefix persistent_connector=True = expected_rows = [[1000, 1, 100], [42, 2, 200], [42, 3, 300]] rows1 = r1.rows.to_vector.map .to_vector rows1.should_contain_the_same_elements_as expected_rows - connection.drop_table dest_name + data.connection.drop_table dest_name - Test.specify "should use defaults for missing input columns for newly inserted rows when Aligning the tables, but keep existing values for existing rows" <| + group_builder.specify "should use defaults for missing input columns for newly inserted rows when Aligning the tables, but keep existing values for existing rows" <| dest_name = Name_Generator.random_name "table-defaults-align" - dest = connection.create_table dest_name [Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "Z" Value_Type.Integer] temporary=True . should_succeed + dest = data.connection.create_table dest_name [Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "Z" Value_Type.Integer] temporary=True . should_succeed initial_data = Table.new [["X", [10, 20]], ["Y", [100, 200]], ["Z", [1000, 2000]]] dest.update_rows initial_data key_columns=[] update_action=Update_Action.Insert . should_succeed src = source_table_builder [["X", [10, 2, 3]], ["Z", [-1, -2, -3]]] @@ -849,18 +875,18 @@ spec make_new_connection prefix persistent_connector=True = expected_rows = [[10, 100, -1], [2, 42, -2], [3, 42, -3]] rows1 = r1.rows.to_vector.map .to_vector rows1.should_contain_the_same_elements_as expected_rows - connection.drop_table dest_name + data.connection.drop_table dest_name - Test.specify "should fail if the source table is missing some columns and the column in the target has no default value" <| + group_builder.specify "should fail if the source table is missing some columns and the column in the target has no default value" <| dest_name = Name_Generator.random_name "table-notnull" - dest = connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Not_Null], Column_Description.Value "X" Value_Type.Integer] temporary=True primary_key=[] . should_succeed + dest = data.connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Not_Null], Column_Description.Value "X" Value_Type.Integer] temporary=True primary_key=[] . should_succeed src = source_table_builder [["X", [1, 2, 3]]] r1 = dest.update_rows src key_columns=[] update_action=Update_Action.Insert # We may want a more specific error for missing columns without defaults, but for now it's just a SQL error. r1.should_fail_with SQL_Error - connection.drop_table dest_name + data.connection.drop_table dest_name - Test.specify "should fail if the source table is missing some columns, if asked to" <| + group_builder.specify "should fail if the source table is missing some columns, if asked to" <| dest = target_table_builder [["X", [0, 10, 100]], ["Y", ['a', 'b', 'c']]] src = source_table_builder [["X", [1, 2, 3]]] run_with_and_without_output <| @@ -868,7 +894,7 @@ spec make_new_connection prefix persistent_connector=True = r1.should_fail_with Missing_Input_Columns r1.catch.criteria . should_equal ["Y"] - Test.specify "should fail if some of key_columns do not exist in either table" <| + group_builder.specify "should fail if some of key_columns do not exist in either table" <| d1 = target_table_builder [["X", [0, 10, 100]]] d2 = target_table_builder [["X", [0, 10, 100]], ["Y", ['a', 'b', 'c']]] s1 = source_table_builder [["X", [1, 3]]] @@ -886,12 +912,12 @@ spec make_new_connection prefix persistent_connector=True = r3.should_fail_with Any ((r3.catch.is_a Missing_Input_Columns) || (r3.catch.is_a Unmatched_Columns)).should_be_true - Test.specify "should fail if the target table does not exist" <| + group_builder.specify "should fail if the target table does not exist" <| t = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] nonexistent_name = Name_Generator.random_name "nonexistent-table" - nonexistent_ref = connection.create_table nonexistent_name t + nonexistent_ref = data.connection.create_table nonexistent_name t # Dropping the table to make it not exist. - connection.drop_table nonexistent_ref.name + data.connection.drop_table nonexistent_ref.name run_with_and_without_output <| r1 = nonexistent_ref.update_rows t key_columns=[] @@ -901,7 +927,7 @@ spec make_new_connection prefix persistent_connector=True = r2 = nonexistent_ref.update_rows t r2.should_fail_with Table_Not_Found - Test.specify "should fail if the target table is in-memory" <| + group_builder.specify "should fail if the target table is in-memory" <| t = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] in_memory_table = Table.new [["X", [0]], ["Y", ['_']]] run_with_and_without_output <| @@ -911,7 +937,7 @@ spec make_new_connection prefix persistent_connector=True = r2 = in_memory_table.update_rows t r2.should_fail_with Illegal_Argument - Test.specify "should warn if type widening occurs" <| + group_builder.specify "should warn if type widening occurs" <| dest = target_table_builder [["X", [3.25, 4.25, 10.0]]] src = source_table_builder [["X", [1, 2, 0]]] @@ -931,7 +957,7 @@ spec make_new_connection prefix persistent_connector=True = result.column_names . should_equal ["X"] result.at "X" . to_vector . should_contain_the_same_elements_as [3.25, 4.25, 10.0, 1, 2, 0] - Test.specify "should fail if types of columns are not compatible" <| + group_builder.specify "should fail if types of columns are not compatible" <| dest = target_table_builder [["X", ["a", "B", "c"]]] src = source_table_builder [["X", [1, 2, 3]]] @@ -949,27 +975,27 @@ spec make_new_connection prefix persistent_connector=True = y = Column_Description.Value "Y" (Value_Type.Char size=4 variable_length=True) z = Column_Description.Value "Z" (Value_Type.Char size=5 variable_length=False) [x, y, z] - dest = connection.create_table dest_name structure temporary=True primary_key=[] + dest = data.connection.create_table dest_name structure temporary=True primary_key=[] non_trivial_types_supported = has_warning_or_error = dest.is_error || (Problems.get_attached_warnings dest . not_empty) has_warning_or_error.not if non_trivial_types_supported then src = source_table_builder [["X", [1, 2, 3]], ["Y", ["a", "xyz", "abcdefghijkl"]], ["Z", ["a", "pqrst", "abcdefghijkl"]]] - Test.specify "fails if the target type is more restrictive than source" <| + group_builder.specify "fails if the target type is more restrictive than source" <| result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] result.should_fail_with Column_Type_Mismatch - Test.specify "should not leave behind any garbage temporary tables if the upload fails" <| + group_builder.specify "should not leave behind any garbage temporary tables if the upload fails" <| dest_name = Name_Generator.random_name "dest-table" # We will make the upload fail by violating the NOT NULL constraint. - dest = connection.create_table dest_name [Column_Description.Value "X" Value_Type.Integer [Column_Constraint.Not_Null]] temporary=True primary_key=[] . should_succeed + dest = data.connection.create_table dest_name [Column_Description.Value "X" Value_Type.Integer [Column_Constraint.Not_Null]] temporary=True primary_key=[] . should_succeed src = source_table_builder [["X", [1, Nothing, 3]]] - existing_tables = connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector + existing_tables = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector res = dest.update_rows src update_action=Update_Action.Insert key_columns=[] res.should_fail_with SQL_Error - tables_immediately_after = connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector + tables_immediately_after = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector ## If there are some additional tables, we add some timeout to allow the database to do the cleaning up. @@ -977,37 +1003,30 @@ spec make_new_connection prefix persistent_connector=True = if additional_tables.is_empty then Nothing else additional_table = additional_tables.to_vector.first - wait_until_temporary_table_is_deleted_after_closing_connection connection additional_table + wait_until_temporary_table_is_deleted_after_closing_connection data.connection additional_table # After the wait we check again and now there should be no additional tables. - tables_after_wait = connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector + tables_after_wait = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector additional_tables_2 = (Set.from_vector tables_after_wait).difference (Set.from_vector existing_tables) additional_tables_2.to_vector . should_equal [] - in_memory_table_builder args primary_key=[] connection=connection = - _ = [primary_key, connection] - case args of - table : Table -> table - _ -> Table.new args - - database_table_builder name_prefix args primary_key=[] connection=connection = - in_memory_table = in_memory_table_builder args - in_memory_table.select_into_database_table connection (Name_Generator.random_name name_prefix) temporary=True primary_key=primary_key - Test.group prefix+"Appending an in-memory table to a Database table" <| - test_table_append in_memory_table_builder (database_table_builder "target-table") + suite_builder.group prefix+"Appending an in-memory table to a Database table" group_builder-> + test_table_append group_builder in_memory_table_builder (database_table_builder "target-table") - Test.specify "will issue a friendly error if using in-memory table as target" <| + group_builder.specify "will issue a friendly error if using in-memory table as target" <| t1 = Table.new [["X", [1, 2, 3]]] t2 = Table.new [["X", [1, 2, 3]]] r1 = t1.update_rows t2 r1.should_fail_with Illegal_Argument r1.to_display_text.should_contain "in-memory tables are immutable" - Test.group prefix+"Appending a Database table to a Database table" <| - test_table_append (database_table_builder "source-table") (database_table_builder "target-table") + suite_builder.group prefix+"Appending a Database table to a Database table" group_builder-> + test_table_append group_builder (database_table_builder "source-table") (database_table_builder "target-table") - test_table_delete source_table_builder target_table_builder = - Test.specify "should remove rows matching by key_columns" <| + test_table_delete group_builder source_table_builder target_table_builder = + data = Data.setup make_new_connection + + group_builder.specify "should remove rows matching by key_columns" <| table = target_table_builder [["student_id", [1, 2, 44, 100, 120]], ["first_name", ["Alice", "Bob", "Charlie", "David", "Eve"]], ["score", [100, 100, 44, 100, 120]]] primary_key=["student_id"] key_values_to_delete = source_table_builder [["student_id", [44, 100]]] # key columns should automatically be discovered by the primary key @@ -1015,7 +1034,7 @@ spec make_new_connection prefix persistent_connector=True = affected_rows . should_equal 2 table.rows.map .to_vector . should_equal [[1, "Alice", 100], [2, "Bob", 100], [120, "Eve", 120]] - Test.specify "will require key_columns if no default can be used as no primary key is set" <| + group_builder.specify "will require key_columns if no default can be used as no primary key is set" <| table = target_table_builder [["X", [1, 2, 3]]] primary_key=[] key_values_to_delete = source_table_builder [["X", [1, 2]]] @@ -1029,7 +1048,7 @@ spec make_new_connection prefix persistent_connector=True = r2.should_equal 2 table.at "X" . to_vector . should_equal [3] - Test.specify "does not fail if no rows matching the key_values_to_delete are found" <| + group_builder.specify "does not fail if no rows matching the key_values_to_delete are found" <| table = target_table_builder [["X", [1, 2, 3]]] primary_key=[] key_values_to_delete = source_table_builder [["X", [4, 5]]] r1 = table.delete_rows key_values_to_delete key_columns=["X"] @@ -1043,14 +1062,14 @@ spec make_new_connection prefix persistent_connector=True = Problems.assume_no_problems r2 table.at "X" . to_vector . should_equal [1, 2] - Test.specify "should allow to use multiple columns as key" <| + group_builder.specify "should allow to use multiple columns as key" <| table = target_table_builder [["X", [1, 2, 2, 3, 4, 4]], ["Y", ['a', 'b', 'c', 'd', 'e', 'f']]] primary_key=[] keys = source_table_builder [["X", [2, 4]], ["Y", ['b', 'f']]] affected_rows = table.delete_rows keys key_columns=["X", "Y"] affected_rows . should_equal 2 table.rows.map .to_vector . should_equal [[1, "a"], [2, "c"], [3, "d"], [4, "e"]] - Test.specify "should fail if key_columns are missing in source or target tables" <| + group_builder.specify "should fail if key_columns are missing in source or target tables" <| table = target_table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] primary_key=[] keys = source_table_builder [["Z", [7, 8]]] @@ -1069,13 +1088,13 @@ spec make_new_connection prefix persistent_connector=True = r3.should_fail_with Missing_Input_Columns r3.catch.criteria . should_equal ["neither"] - Test.specify "should fail if empty key_columns were provided" <| + group_builder.specify "should fail if empty key_columns were provided" <| table = target_table_builder [["X", [1, 2, 3]]] primary_key=["X"] keys = source_table_builder [["X", [1, 2]]] r1 = table.delete_rows keys key_columns=[] r1.should_fail_with Illegal_Argument - Test.specify "should fail if multiple rows match a single key_values_to_delete row, unless allow_duplicate_matches is set to True" <| + group_builder.specify "should fail if multiple rows match a single key_values_to_delete row, unless allow_duplicate_matches is set to True" <| table = target_table_builder [["X", [1, 2, 2, 3, 2]], ["Y", ['a', 'b', 'c', 'd', 'e']]] primary_key=[] keys = source_table_builder [["X", [2]], ["Y", ['b']]] @@ -1091,16 +1110,16 @@ spec make_new_connection prefix persistent_connector=True = r2.should_equal 3 table.rows.map .to_vector . should_equal [[1, "a"], [3, "d"]] - Test.specify "should fail if the target table does not exist" <| + group_builder.specify "should fail if the target table does not exist" <| table = target_table_builder [["X", [1, 2, 3]]] keys = source_table_builder [["X", [1, 2]]] - connection.drop_table table.name + data.connection.drop_table table.name run_with_and_without_output <| table.delete_rows keys key_columns=["X"] . should_fail_with Table_Not_Found table.delete_rows keys . should_fail_with Table_Not_Found - Test.specify "will warn if not all input rows were checked as part of a dry run" <| + group_builder.specify "will warn if not all input rows were checked as part of a dry run" <| target = target_table_builder [["X", [0, 1, 500, 1500, 3500]]] primary_key=["X"] source = source_table_builder [["X", (1.up_to 2000).to_vector]] primary_key=["X"] @@ -1122,7 +1141,7 @@ spec make_new_connection prefix persistent_connector=True = r2.should_equal 3 target.at "X" . to_vector . should_equal [0, 3500] - Test.specify "will work fine if the target table contains NULL keys" <| + group_builder.specify "will work fine if the target table contains NULL keys" <| t1 = target_table_builder [["X", ["a", "b", Nothing, "c"]], ["Y", [1, 2, 3, Nothing]]] s1 = source_table_builder [["X", ["b", "c"]]] t1.delete_rows s1 key_columns=["X"] . should_equal 2 @@ -1130,7 +1149,7 @@ spec make_new_connection prefix persistent_connector=True = m1.at "X" . to_vector . should_equal [Nothing, "a"] m1.at "Y" . to_vector . should_equal [3, 1] - Test.specify "will raise an error if they source table contains NULL keys" <| + group_builder.specify "will raise an error if they source table contains NULL keys" <| t2 = target_table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, Nothing]]] s2 = source_table_builder [["X", ["b", Nothing]], ["f", [10, 20]]] r1 = t2.delete_rows s2 key_columns=["X"] @@ -1138,70 +1157,74 @@ spec make_new_connection prefix persistent_connector=True = r1.catch.to_display_text . should_contain "Nothing values in key columns" r1.catch.to_display_text . should_contain "[Nothing, 20]" - Test.group prefix+"Deleting rows from a Database table (source=in-memory)" <| - test_table_delete in_memory_table_builder (database_table_builder "target-table") + suite_builder.group prefix+"Deleting rows from a Database table (source=in-memory)" group_builder-> + test_table_delete group_builder in_memory_table_builder (database_table_builder "target-table") + + suite_builder.group prefix+"Deleting rows from a Database table (source=Database)" group_builder-> + test_table_delete group_builder (database_table_builder "source-table") (database_table_builder "target-table") - Test.group prefix+"Deleting rows from a Database table (source=Database)" <| - test_table_delete (database_table_builder "source-table") (database_table_builder "target-table") + suite_builder.group prefix+"Deleting rows from a Database table" group_builder-> + data = Data.setup make_new_connection - Test.group prefix+"Deleting rows from a Database table" <| - Test.specify "[ADVANCED] it should be possible to truncate the whole table" <| + group_builder.specify "[ADVANCED] it should be possible to truncate the whole table" <| name = Name_Generator.random_name "table-to-truncate" - t = (Table.new [["X", [1, 2, 3]]]).select_into_database_table connection name temporary=True primary_key=[] + t = (Table.new [["X", [1, 2, 3]]]).select_into_database_table data.connection name temporary=True primary_key=[] t.at "X" . to_vector . should_equal [1, 2, 3] - connection.truncate_table name + data.connection.truncate_table name t.at "X" . to_vector . should_equal [] # The table still exists - t2 = connection.query (SQL_Query.Table_Name name) + t2 = data.connection.query (SQL_Query.Table_Name name) t2.at "X" . to_vector . should_equal [] - Test.specify "should allow to delete rows based on another query" <| + group_builder.specify "should allow to delete rows based on another query" <| table = database_table_builder "target-table" [["student_id", [1, 2, 44, 100, 120]], ["first_name", ["Alice", "Bob", "Charlie", "David", "Eve"]], ["graduation_year", [2023, 2024, 2021, 2020, 2019]]] primary_key=["student_id"] graduates = table.filter "graduation_year" (Filter_Condition.Less 2023) affected_rows = table.delete_rows graduates # uses the primary key by default affected_rows . should_equal 3 table.rows.map .to_vector . should_equal [[1, "Alice", 2023], [2, "Bob", 2024]] - Test.specify "will issue a friendly error if using in-memory table as target" <| + group_builder.specify "will issue a friendly error if using in-memory table as target" <| t1 = Table.new [["X", [1, 2, 3]]] t2 = Table.new [["X", [1, 2, 3]]] r1 = t1.delete_rows t2 r1.should_fail_with Illegal_Argument r1.to_display_text.should_contain "in-memory tables are immutable" - Test.specify "can use itself as source of rows to delete (even if that's an anti-pattern)" <| + group_builder.specify "can use itself as source of rows to delete (even if that's an anti-pattern)" <| t1 = database_table_builder "target-table" [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] affected_rows = t1.delete_rows t1 affected_rows . should_equal 3 t1.rows.should_equal [] execution_context_group_name = prefix+"Output Execution Context for Database operations" - Test.group execution_context_group_name <| - Test.specify "should forbid executing updates" <| + suite_builder.group execution_context_group_name group_builder-> + data = Data.setup make_new_connection + + group_builder.specify "should forbid executing updates" <| Context.Output.with_disabled <| - r1 = connection.execute_update "CREATE TEMPORARY TABLE foo (x INTEGER)" + r1 = data.connection.execute_update "CREATE TEMPORARY TABLE foo (x INTEGER)" r1.should_fail_with Forbidden_Operation - Test.specify "should return a temporary table for Connection.create_table" <| + group_builder.specify "should return a temporary table for Connection.create_table" <| Context.Output.with_disabled <| name = Name_Generator.random_name "table-foo" - r1 = connection.create_table name [Column_Description.Value "x" Value_Type.Integer] temporary=False primary_key=[] + r1 = data.connection.create_table name [Column_Description.Value "x" Value_Type.Integer] temporary=False primary_key=[] Problems.expect_only_warning Dry_Run_Operation r1 r1.column_names . should_equal ["x"] r1.name . should_not_equal name r1.is_trivial_query . should_be_true - Test.specify "will not show dry-run tables in the list by default" <| + group_builder.specify "will not show dry-run tables in the list by default" <| src = Table.new [["X", [1, 2, 3]]] name = Name_Generator.random_name "dry-run-list-test" table = Context.Output.with_disabled <| - src.select_into_database_table connection name primary_key=[] + src.select_into_database_table data.connection name primary_key=[] table.column_names . should_equal ["X"] # Workaround for bug #7093 dry_run_name = Warning.clear table.name - connection.tables . at "Name" . to_vector . should_not_contain dry_run_name - connection.base_connection.get_tables_advanced include_hidden=True . at "Name" . to_vector . should_contain dry_run_name + data.connection.tables . at "Name" . to_vector . should_not_contain dry_run_name + data.connection.base_connection.get_tables_advanced include_hidden=True . at "Name" . to_vector . should_contain dry_run_name table.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] @@ -1210,7 +1233,7 @@ spec make_new_connection prefix persistent_connector=True = due to GC nondeterminism, it may need to be disabled - currently we do not have a more robust way to test finalizers. gc_test_pending = "Relying on GC seems not stable enough. Keeping this test so it can be checked manually. In the future we may improve it with better instrumentation of Managed_Resource." - Test.specify "will drop a dry run table once it is garbage collected" pending=gc_test_pending <| + group_builder.specify "will drop a dry run table once it is garbage collected" pending=gc_test_pending <| src = Table.new [["X", [1, 2, 3]]] name = Name_Generator.random_name "dry-run-list-test" @@ -1218,10 +1241,10 @@ spec make_new_connection prefix persistent_connector=True = # `Warning.clear` is added as a workaround for bug #7093 dry_run_name = Warning.clear <| Context.Output.with_disabled <| - table = src.select_into_database_table connection name primary_key=[] + table = src.select_into_database_table data.connection name primary_key=[] sentinel = Managed_Resource.register "payload" (cleanup_sentinel was_cleanup_performed) table.column_names . should_equal ["X"] - connection.base_connection.get_tables_advanced include_hidden=True . at "Name" . to_vector . should_contain table.name + data.connection.base_connection.get_tables_advanced include_hidden=True . at "Name" . to_vector . should_contain table.name table.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] name = table.name payload = sentinel.with x-> "transformed_"+x @@ -1229,7 +1252,7 @@ spec make_new_connection prefix persistent_connector=True = name Runtime.gc - tables_after_potential_gc = connection.base_connection.get_tables_advanced include_hidden=True . at "Name" . to_vector + tables_after_potential_gc = data.connection.base_connection.get_tables_advanced include_hidden=True . at "Name" . to_vector case was_cleanup_performed.get of True -> @@ -1241,7 +1264,7 @@ spec make_new_connection prefix persistent_connector=True = IO.println "[WARNING] The GC was not performed on time in the "+execution_context_group_name+" test. The test did not check the invariants to avoid spurious failures." if persistent_connector then - Test.specify "will not overwrite an existing table with a dry-run table if the name is clashing (create_table)" <| + group_builder.specify "will not overwrite an existing table with a dry-run table if the name is clashing (create_table)" <| target_name = Name_Generator.random_name "test-table" dry_run_name = Context.Output.with_disabled <| tmp_connection1 = make_new_connection Nothing @@ -1252,14 +1275,14 @@ spec make_new_connection prefix persistent_connector=True = tmp_connection1.close name - wait_until_temporary_table_is_deleted_after_closing_connection connection dry_run_name + wait_until_temporary_table_is_deleted_after_closing_connection data.connection dry_run_name src = Table.new [["X", [1, 2, 3]]] # Create a table that has the same name as the dry run table normally would have. - pre_existing_table = src.select_into_database_table connection dry_run_name temporary=False . should_succeed + pre_existing_table = src.select_into_database_table data.connection dry_run_name temporary=False . should_succeed pre_existing_table.column_names . should_equal ["X"] pre_existing_table.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] - Panic.with_finalizer (connection.drop_table pre_existing_table.name if_exists=True) <| + Panic.with_finalizer (data.connection.drop_table pre_existing_table.name if_exists=True) <| new_dry_run_name = Context.Output.with_disabled <| tmp_connection2 = make_new_connection Nothing # Create a dry run table that is supposed to clash with pre_existing_table @@ -1276,12 +1299,14 @@ spec make_new_connection prefix persistent_connector=True = # The pre-existing table should not have been overwritten. pre_existing_table.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] - tests source_table_builder suffix = - Test.specify "should return a temporary table with a sample of the data for select_into_database_table"+suffix <| + tests group_builder source_table_builder suffix = + data = Data.setup make_new_connection + + group_builder.specify "should return a temporary table with a sample of the data for select_into_database_table"+suffix <| Context.Output.with_disabled <| src1 = source_table_builder [["X", [1, 2, 3]]] name = (Name_Generator.random_name "table-foo2") - r1 = src1.select_into_database_table connection name + r1 = src1.select_into_database_table data.connection name Problems.expect_only_warning Dry_Run_Operation r1 r1.column_names . should_equal ["X"] r1.name . should_not_equal name @@ -1294,16 +1319,16 @@ spec make_new_connection prefix persistent_connector=True = n = 2000 src2 = source_table_builder [["X", (0.up_to n).to_vector]] # We re-use the name - multiple dry-runs for the same table name should be allowed without issues. - r2 = src2.select_into_database_table connection name + r2 = src2.select_into_database_table data.connection name Problems.expect_only_warning Dry_Run_Operation r2 r2.column_names . should_equal ["X"] # Only a sample is uploaded. r2.row_count . should_equal 1000 r2.is_trivial_query . should_be_true - Test.specify "should return the target table unchanged for update_rows"+suffix <| + group_builder.specify "should return the target table unchanged for update_rows"+suffix <| dest_data = Table.new [["X", [1, 2, 3]]] - dest = dest_data.select_into_database_table connection (Name_Generator.random_name "target-table") temporary=True primary_key=[] + dest = dest_data.select_into_database_table data.connection (Name_Generator.random_name "target-table") temporary=True primary_key=[] Context.Output.with_disabled <| src = source_table_builder [["X", [4, 5, 6]]] r1 = dest.update_rows src update_action=Update_Action.Insert key_columns=[] @@ -1315,10 +1340,10 @@ spec make_new_connection prefix persistent_connector=True = r1.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] r1.is_trivial_query . should_be_true - Test.specify "should return the count of rows that would be deleted for delete_rows, but keep the table unchanged" <| + group_builder.specify "should return the count of rows that would be deleted for delete_rows, but keep the table unchanged" <| v = [1, 2, 3, 4, 4, 4, 1] dest_data = Table.new [["X", v]] - dest = dest_data.select_into_database_table connection (Name_Generator.random_name "table-delete-rows-dry") temporary=True primary_key=[] + dest = dest_data.select_into_database_table data.connection (Name_Generator.random_name "table-delete-rows-dry") temporary=True primary_key=[] Context.Output.with_disabled <| src = source_table_builder [["X", [2, 3]]] r1 = dest.delete_rows src key_columns=["X"] @@ -1337,7 +1362,7 @@ spec make_new_connection prefix persistent_connector=True = dest.at "X" . to_vector . should_equal v if persistent_connector then - Test.specify "will not overwrite an existing table with a dry-run table if the name is clashing (select_into_database_table)"+suffix <| + group_builder.specify "will not overwrite an existing table with a dry-run table if the name is clashing (select_into_database_table)"+suffix <| target_name = Name_Generator.random_name "test-table" dry_run_name = Context.Output.with_disabled <| tmp_connection1 = make_new_connection Nothing @@ -1349,14 +1374,14 @@ spec make_new_connection prefix persistent_connector=True = tmp_connection1.close name - wait_until_temporary_table_is_deleted_after_closing_connection connection dry_run_name + wait_until_temporary_table_is_deleted_after_closing_connection data.connection dry_run_name pre_existing_src = Table.new [["X", [4, 5, 6]]] # Create a table that has the same name as the dry run table normally would have. - pre_existing_table = pre_existing_src.select_into_database_table connection dry_run_name temporary=False . should_succeed + pre_existing_table = pre_existing_src.select_into_database_table data.connection dry_run_name temporary=False . should_succeed pre_existing_table.column_names . should_equal ["X"] pre_existing_table.at "X" . to_vector . should_contain_the_same_elements_as [4, 5, 6] - Panic.with_finalizer (connection.drop_table pre_existing_table.name if_exists=True) <| + Panic.with_finalizer (data.connection.drop_table pre_existing_table.name if_exists=True) <| new_dry_run_name = Context.Output.with_disabled <| tmp_connection2 = make_new_connection Nothing src3 = source_table_builder [["B", [7, 8, 9]]] connection=tmp_connection2 @@ -1375,8 +1400,8 @@ spec make_new_connection prefix persistent_connector=True = # The pre-existing table should not have been overwritten. pre_existing_table.at "X" . to_vector . should_contain_the_same_elements_as [4, 5, 6] - tests (in_memory_table_builder) " (from memory)" - tests (database_table_builder "ec-tests-table") " (from Database table)" + tests group_builder (in_memory_table_builder) " (from memory)" + tests group_builder (database_table_builder "ec-tests-table") " (from Database table)" ## PRIVATE Creates a mock column containing `values`. @@ -1407,4 +1432,3 @@ wait_until_temporary_table_is_deleted_after_closing_connection connection table_ go 0 -## TODO TEST DRY RUN TWICE! diff --git a/test/Tests/src/Data/Round_Spec.enso b/test/Tests/src/Data/Round_Spec.enso index 4e030bd3d9ce..2650746a3895 100644 --- a/test/Tests/src/Data/Round_Spec.enso +++ b/test/Tests/src/Data/Round_Spec.enso @@ -8,9 +8,9 @@ from Standard.Test import Test, Test_Suite polyglot java import java.math.BigInteger ## PRIVATE -spec prefix round_fun = - Test.group prefix+"Rounding numeric tests" <| - Test.specify "Can round positive decimals correctly" <| +add_specs suite_builder prefix round_fun = + suite_builder.group prefix+"Rounding numeric tests" group_builder-> + group_builder.specify "Can round positive decimals correctly" <| round_fun 3.0 . should_equal 3 round_fun 3.00001 . should_equal 3 round_fun 3.3 . should_equal 3 @@ -19,7 +19,7 @@ spec prefix round_fun = round_fun 3.50001 . should_equal 4 round_fun 3.99999 . should_equal 4 - Test.specify "Can round negative decimals correctly" <| + group_builder.specify "Can round negative decimals correctly" <| round_fun -3.0 . should_equal -3 round_fun -3.00001 . should_equal -3 round_fun -3.3 . should_equal -3 @@ -28,18 +28,18 @@ spec prefix round_fun = round_fun -3.50001 . should_equal -4 round_fun -3.99999 . should_equal -4 - Test.specify "Explicit and implicit 0 decimal places work the same" <| + group_builder.specify "Explicit and implicit 0 decimal places work the same" <| round_fun 3.00001 0 . should_equal 3 round_fun 3.3 0 . should_equal 3 round_fun 3.00001 . should_equal 3 round_fun 3.3 . should_equal 3 - Test.specify "Can round zero and small decimals correctly" <| + group_builder.specify "Can round zero and small decimals correctly" <| round_fun 0.0 . should_equal 0 round_fun 0.00001 . should_equal 0 round_fun -0.00001 . should_equal 0 - Test.specify "Can round positive decimals to a specified number of decimal places" <| + group_builder.specify "Can round positive decimals to a specified number of decimal places" <| round_fun 3.0001 2 . should_equal 3.0 round_fun 3.1414 2 . should_equal 3.14 round_fun 3.1415 2 . should_equal 3.14 @@ -52,7 +52,7 @@ spec prefix round_fun = round_fun 3.1416 3 . should_equal 3.142 round_fun 3.9999 3 . should_equal 4.0 - Test.specify "Can round negative decimals to a specified number of decimal places" <| + group_builder.specify "Can round negative decimals to a specified number of decimal places" <| round_fun -3.0001 2 . should_equal -3.0 round_fun -3.1414 2 . should_equal -3.14 round_fun -3.1415 2 . should_equal -3.14 @@ -65,7 +65,7 @@ spec prefix round_fun = round_fun -3.1416 3 . should_equal -3.142 round_fun -3.9999 3 . should_equal -4.0 - Test.specify "Can round positive decimals to a specified negative number of decimal places" <| + group_builder.specify "Can round positive decimals to a specified negative number of decimal places" <| round_fun 1234.0 -1 . should_equal 1230 round_fun 1234.0 -2 . should_equal 1200 round_fun 1234.0 -3 . should_equal 1000 @@ -80,7 +80,7 @@ spec prefix round_fun = round_fun 1495.0 -2 . should_equal 1500 round_fun 1494.0 -2 . should_equal 1500 - Test.specify "Can round negative decimals to a specified negative number of decimal places" <| + group_builder.specify "Can round negative decimals to a specified negative number of decimal places" <| round_fun -1234.0 -1 . should_equal -1230 round_fun -1234.0 -2 . should_equal -1200 round_fun -1234.0 -3 . should_equal -1000 @@ -95,7 +95,7 @@ spec prefix round_fun = round_fun -1495.0 -2 . should_equal -1500 round_fun -1494.0 -2 . should_equal -1500 - Test.specify "Banker's rounding handles half-way values correctly" <| + group_builder.specify "Banker's rounding handles half-way values correctly" <| round_fun -3.5 use_bankers=True . should_equal -4 round_fun -2.5 use_bankers=True . should_equal -2 round_fun -1.5 use_bankers=True . should_equal -2 @@ -115,7 +115,7 @@ spec prefix round_fun = round_fun -12350.0 -2 use_bankers=True . should_equal -12400 round_fun -12250.0 -2 use_bankers=True . should_equal -12200 - Test.specify "Banker's rounding handles non-half-way values just like normal rounding" <| + group_builder.specify "Banker's rounding handles non-half-way values just like normal rounding" <| round_fun 3.0 use_bankers=True . should_equal 3 round_fun 3.00001 use_bankers=True . should_equal 3 round_fun 3.3 use_bankers=True . should_equal 3 @@ -130,7 +130,7 @@ spec prefix round_fun = round_fun -3.50001 . should_equal -4 round_fun -3.99999 . should_equal -4 - Test.specify "Can round correctly near the precision limit" <| + group_builder.specify "Can round correctly near the precision limit" <| round_fun 1.22222222225 10 . should_equal 1.2222222223 round_fun 1.222222222225 11 . should_equal 1.22222222223 round_fun 1.2222222222225 12 . should_equal 1.222222222223 @@ -155,7 +155,7 @@ spec prefix round_fun = round_fun -1.22222222222235 13 . should_equal -1.2222222222224 round_fun -1.222222222222235 14 . should_equal -1.22222222222224 - Test.specify "Can round correctly near the precision limit, using banker's rounding" <| + group_builder.specify "Can round correctly near the precision limit, using banker's rounding" <| round_fun 1.22222222225 10 use_bankers=True . should_equal 1.2222222222 round_fun 1.222222222225 11 use_bankers=True . should_equal 1.22222222222 round_fun 1.2222222222225 12 use_bankers=True . should_equal 1.222222222222 @@ -180,15 +180,15 @@ spec prefix round_fun = round_fun -1.22222222222235 13 use_bankers=True . should_equal -1.2222222222224 round_fun -1.222222222222235 14 use_bankers=True . should_equal -1.22222222222224 - Test.specify "Decimal places out of range" <| + group_builder.specify "Decimal places out of range" <| round_fun 3.1 16 . should_fail_with Illegal_Argument round_fun 3.1 -16 . should_fail_with Illegal_Argument - Test.specify "Floating point imperfect representation counter-examples" <| + group_builder.specify "Floating point imperfect representation counter-examples" <| round_fun 1.225 2 use_bankers=True . should_equal 1.22 # Actual result 1.23 round_fun 37.785 2 . should_equal 37.79 - Test.specify "Can round small integers to a specified number of decimal places correctly (value is unchanged)" + group_builder.specify "Can round small integers to a specified number of decimal places correctly (value is unchanged)" round_fun 0 . should_equal 0 round_fun 3 . should_equal 3 round_fun -3 . should_equal -3 @@ -197,7 +197,7 @@ spec prefix round_fun = round_fun 3 1 . should_equal 3 round_fun -3 1 . should_equal -3 - Test.specify "Can round integers to a specified number of negative places correctly" + group_builder.specify "Can round integers to a specified number of negative places correctly" round_fun 0 -1 . should_equal 0 round_fun 4 -1 . should_equal 0 round_fun 5 -1 . should_equal 10 @@ -225,7 +225,7 @@ spec prefix round_fun = round_fun 3098 -3 . should_equal 3000 round_fun 3101 -3 . should_equal 3000 - Test.specify "Can round negative integers to a specified number of negative places correctly" + group_builder.specify "Can round negative integers to a specified number of negative places correctly" round_fun -4 -1 . should_equal 0 round_fun -5 -1 . should_equal -10 round_fun -6 -1 . should_equal -10 @@ -252,7 +252,7 @@ spec prefix round_fun = round_fun -3098 -3 . should_equal -3000 round_fun -3101 -3 . should_equal -3000 - Test.specify "Can round negative integers to a specified number of negative places with banker's rounding correctly" <| + group_builder.specify "Can round negative integers to a specified number of negative places with banker's rounding correctly" <| round_fun 12300 -2 use_bankers=True . should_equal 12300 round_fun 12301 -2 use_bankers=True . should_equal 12300 round_fun 12330 -2 use_bankers=True . should_equal 12300 @@ -281,10 +281,8 @@ spec prefix round_fun = round_fun -12250 -2 use_bankers=True . should_equal -12200 round_fun -12251 -2 use_bankers=True . should_equal -12300 - Test.specify "Handles incorrect argument types" <| + group_builder.specify "Handles incorrect argument types" <| Test.expect_panic_with (round_fun 123 "two") Type_Error Test.expect_panic_with (round_fun 123 use_bankers="no") Type_Error Test.expect_panic_with (round_fun 123 use_bankers=0) Type_Error -## PRIVATE -main = Test_Suite.run_main (spec "Number " .round) From d152c22c1250a0dab9a78c15cca2572fc2b50782 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 17 Jan 2024 10:40:13 +0100 Subject: [PATCH 32/93] Remove SQLite_Spec_New --- .../src/Database/SQLite_Spec_New.enso | 140 ------------------ 1 file changed, 140 deletions(-) delete mode 100644 test/Table_Tests/src/Database/SQLite_Spec_New.enso diff --git a/test/Table_Tests/src/Database/SQLite_Spec_New.enso b/test/Table_Tests/src/Database/SQLite_Spec_New.enso deleted file mode 100644 index 49cc9d184072..000000000000 --- a/test/Table_Tests/src/Database/SQLite_Spec_New.enso +++ /dev/null @@ -1,140 +0,0 @@ -from Standard.Base import all -import Standard.Base.Runtime.Ref.Ref -from Standard.Base.Runtime import assert -import Standard.Base.Errors.File_Error.File_Error -import Standard.Base.Errors.Illegal_Argument.Illegal_Argument - -import Standard.Table.Data.Type.Value_Type.Bits -from Standard.Table import Table, Value_Type -from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names - -import Standard.Database.Data.Column.Column -import Standard.Database.Internal.Replace_Params.Replace_Params -from Standard.Database import all -from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation - -from Standard.Test_New import all - -import project.Database.Common.Common_Spec_New -import project.Database.Helpers.Name_Generator - -## - sqlite_specific_spec suite_builder prefix connection setup = - table_builder = setup.table_builder - - suite_builder.group prefix+"Schemas and Databases" group_builder-> - group_builder.specify "should be able to get current database and list databases" <| - connection.database . should_equal Nothing - connection.databases . should_equal [Nothing] - Meta.is_same_object connection (connection.set_database Nothing) . should_be_true - - group_builder.specify "should be able to get current schema and list schemas" <| - connection.schema . should_equal Nothing - connection.schemas . should_equal [Nothing] - Meta.is_same_object connection (connection.set_schema Nothing) . should_be_true - - group_builder.specify "does not allow changing schema or database" <| - connection.set_schema "foo" . should_fail_with SQL_Error - connection.set_database "foo" . should_fail_with SQL_Error - - Test.group prefix+"Tables and Table Types" <| - tinfo = Name_Generator.random_name "TestTable" - connection.execute_update 'CREATE TABLE "'+tinfo+'" ("A" VARCHAR)' - - vinfo = Name_Generator.random_name "TestView" - connection.execute_update 'CREATE VIEW "'+vinfo+'" AS SELECT "A" FROM "'+tinfo+'";' - - temporary_table = Name_Generator.random_name "TemporaryTable" - (Table.new [["X", [1, 2, 3]]]).select_into_database_table connection temporary_table temporary=True - - Test.specify "should be able to list table types" <| - table_types = connection.table_types - table_types.length . should_not_equal 0 - table_types.contains "TABLE" . should_be_true - table_types.contains "VIEW" . should_be_true - - - Test.group prefix+"math functions" <| - do_op n op = - table = table_builder [["x", [n]]] - result = table.at "x" |> op - result.to_vector.at 0 - do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers) - - Test.specify "Can round correctly near the precision limit" <| - # This value varies depending on the version of SQLite. - do_round 1.2222222222222225 15 . should_equal 1.222222222222223 0.000000000000002 - do_round -1.2222222222222225 15 . should_equal -1.222222222222223 0.000000000000002 - do_round 1.2222222222222235 15 . should_equal 1.222222222222223 - do_round -1.2222222222222235 15 . should_equal -1.222222222222223 - - -sqlite_spec suite_builder prefix connection_provider = - Common_Spec_New.add_common_specs suite_builder prefix connection_provider - - -backing_file = - transient_dir = enso_project.data / "transient" - assert transient_dir.exists ("There should be .gitignore file in the " + transient_dir.path + " directory") - transient_dir / "sqlite_test.db" - - -create_inmem_connection = - Database.connect (SQLite In_Memory) - - -create_file_connection file = - connection = Database.connect (SQLite file) - connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' - connection - - -type File_Connection - Value ~file - - setup = File_Connection.Value <| - IO.println <| " SQLite_Spec_New.File_Connection.setup" - tmp_file = backing_file - con = create_file_connection backing_file - con.close - assert tmp_file.exists - tmp_file - - - teardown self = - IO.println <| " SQLite_Spec_New.File_Connection.teardown" - assert self.file.exists - self.file.delete - - -suite = - Test.build suite_builder-> - in_file_prefix = "[SQLite File] " - sqlite_spec suite_builder in_file_prefix (_ -> create_file_connection backing_file) - - in_memory_prefix = "[SQLite In-Memory] " - sqlite_spec suite_builder in_memory_prefix (_ -> create_inmem_connection) - - suite_builder.group "SQLite_Format should allow connecting to SQLite files" group_builder-> - data = File_Connection.setup - - group_builder.teardown <| - data.teardown - - group_builder.specify "should recognise a SQLite database file" <| - Auto_Detect.get_reading_format data.file . should_be_a SQLite_Format - - group_builder.specify "should not duplicate warnings" <| - c = Database.connect (SQLite In_Memory) - t0 = Table.new [["X", ["a", "bc", "def"]]] - t1 = t0.select_into_database_table c "Tabela" - t2 = t1.cast "X" (Value_Type.Char size=1) - Warning.get_all t2 . length . should_equal 1 - -main = - IO.println <| "==============" - suite.print_all - IO.println <| "==============" - group_filter = Nothing - spec_filter = Nothing - suite.run_with_filter group_filter spec_filter From ce8df7b2627d6743614383dbd6d32ba6f6e311c9 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 17 Jan 2024 13:01:33 +0100 Subject: [PATCH 33/93] Fix JUnit xml output --- .../Test_New/0.0.0-dev/src/Suite.enso | 12 ++++++--- .../Test_New/0.0.0-dev/src/Suite_Config.enso | 25 ++++++++----------- .../Test_New/0.0.0-dev/src/Test_Reporter.enso | 4 +-- 3 files changed, 20 insertions(+), 21 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index 06b12976a7fc..716a00e09082 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -10,6 +10,8 @@ import project.Phases.Running_Tests import project.Suite_Config.Suite_Config import project.Test_Reporter +polyglot java import java.lang.StringBuilder + type Suite_Builder ## PRIVATE Impl builder @@ -63,10 +65,12 @@ type Suite case group_name_matches of False -> False True -> group.pending == Nothing - filtered_groups.each group-> - State.run Running_Tests True <| - results = Helpers.run_group_with_filter group spec_filter_conv - Test_Reporter.print_report results config Nothing + junit_sb_builder = if config.should_output_junit then StringBuilder.new else Nothing + Test_Reporter.wrap_junit_testsuites config junit_sb_builder <| + filtered_groups.each group-> + State.run Running_Tests True <| + results = Helpers.run_group_with_filter group spec_filter_conv + Test_Reporter.print_report results config junit_sb_builder group_names self = self.groups.map (_.name) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso index a9ff17a956b0..e53451a6926c 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite_Config.enso @@ -19,7 +19,7 @@ find_project_root path = find_caller_script : Vector Stack_Trace_Element -> File find_caller_script stack = find_main idx = - if stack.at idx . name == "Test_Suite.type.run_main" then idx else + if stack.at idx . name . split "." . last == "main" then idx else @Tail_Call find_main (idx + 1) main_index = find_main 0 @@ -31,10 +31,18 @@ find_caller_script stack = if (idx + 1 == stack.length) then Nothing else @Tail_Call find_caller (idx + 1) - find_caller (main_index + 1) + find_caller main_index ## Holds configuration for a Test_Suite type Suite_Config + ## PRIVATE + Construct a configuration + + Arguments: + - output_path: The path to the JUnit XML file to write to. If Nothing, no JUnit XML file + will be written. + Value (print_only_failures : Boolean) (output_path : (File | Nothing)) (use_ansi_colors : Boolean) + ## Creates an Suite_Config based off environment and caller location from_environment : Suite_Config from_environment = @@ -44,7 +52,7 @@ type Suite_Config results_path = if junit_folder.is_nothing then Nothing else caller_script = find_caller_script Runtime.get_stack_trace project_root = find_project_root caller_script - case project_root.is_nothing of + case project_root.is_error || project_root.is_nothing of True -> IO.println "Unable to determine root project path. JUnit output disabled." Nothing @@ -53,17 +61,6 @@ type Suite_Config Suite_Config.Value print_only_failures results_path use_ansi_colors - ## PRIVATE - Construct a configuration - Value (print_only_failures : Boolean) (output_path : (File | Nothing)) (use_ansi_colors : Boolean) - - ## Should a specific group be run. - should_run_group self name = - regexp = self.only_group_regexp - case regexp of - _ : Text -> name.match regexp . catch Any (_->True) - _ -> True - ## Should the results be written to JUnit XML file. should_output_junit self = self.output_path.is_nothing.not diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Reporter.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Reporter.enso index 04886cf86834..fd9cc9321059 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Reporter.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Reporter.enso @@ -30,9 +30,6 @@ wrap_junit_testsuites config builder ~action = result -## PRIVATE -use_ansi_colors : Boolean -use_ansi_colors = Environment.get "ENSO_TEST_ANSI_COLORS" . is_nothing . not red text = '\u001b[31;1m' + text + '\u001b[0m' @@ -100,6 +97,7 @@ print_group_report group_name test_results config builder = total_time = test_results.fold Duration.zero acc-> res-> acc + res.time_taken if config.should_output_junit then + assert builder.is_nothing.not "Builder must be specified when JUnit output is enabled" builder.append (' Date: Wed, 17 Jan 2024 18:30:31 +0100 Subject: [PATCH 34/93] Fix Upload_Spec --- .../Table_Tests/src/Database/Upload_Spec.enso | 1348 +++++++++-------- 1 file changed, 681 insertions(+), 667 deletions(-) diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso index d4275e78cbb7..77cd5d789a3f 100644 --- a/test/Table_Tests/src/Database/Upload_Spec.enso +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -17,10 +17,9 @@ from Standard.Database.Errors import all from Standard.Database.Internal.Upload_Table import default_key_columns import Standard.Database.Data.Column_Constraint.Column_Constraint -import Standard.Test.Extensions -import Standard.Test.Test_Environment -from Standard.Test import Test, Test_Suite, Problems -from Standard.Test.Execution_Context_Helpers import run_with_and_without_output +from Standard.Test_New import all +import Standard.Test_New.Test_Environment +from Standard.Test_New.Execution_Context_Helpers import run_with_and_without_output import project.Database.Helpers.Name_Generator @@ -28,6 +27,12 @@ polyglot java import org.enso.table_test_helpers.ExplodingStorage polyglot java import org.enso.table_test_helpers.ExplodingStoragePayload polyglot java import java.lang.Thread +main = + create_connection_func _ = (Database.connect (SQLite In_Memory)) + suite = Test.build suite_builder-> + add_specs suite_builder create_connection_func "SQLite In-Memory" persistent_connector=False + suite.run_with_filter + type Data Value ~data @@ -40,15 +45,15 @@ type Data [connection, in_memory_table] -in_memory_table_builder args primary_key=[] connection=Nothing = - _ = [primary_key, connection] +in_memory_table_builder args primary_key=[] data=Nothing = + _ = [primary_key, data] case args of table : Table -> table _ -> Table.new args -database_table_builder name_prefix args primary_key=[] connection = +database_table_builder name_prefix args primary_key=[] (data : Data) = in_memory_table = in_memory_table_builder args - in_memory_table.select_into_database_table connection (Name_Generator.random_name name_prefix) temporary=True primary_key=primary_key + in_memory_table.select_into_database_table data.connection (Name_Generator.random_name name_prefix) temporary=True primary_key=primary_key ## PRIVATE Adds uploading table specs to the suite builder. @@ -589,429 +594,10 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = preexisting2.column_names . should_equal ["X"] preexisting2.at "X" . to_vector . should_equal [42] - test_table_append group_builder source_table_builder target_table_builder = + suite_builder.group prefix+"Appending an in-memory table to a Database table" group_builder-> data = Data.setup make_new_connection - group_builder.specify "should be able to append new rows to a table" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["X", [4, 5, 6]], ["Y", ['d', 'e', 'f']]] - - result = dest.update_rows src key_columns=["X"] - result.column_names . should_equal ["X", "Y"] - - result.is_trivial_query . should_be_true - (result == dest) . should_be_true - - expected_rows = [[1, 'a'], [2, 'b'], [3, 'c'], [4, 'd'], [5, 'e'], [6, 'f']] - rows1 = result.rows.to_vector.map .to_vector - rows1.should_contain_the_same_elements_as expected_rows - - group_builder.specify "should error if new rows clash with existing ones and mode is Insert, target table should remain unchanged" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["X", [1, 5, 6]], ["Y", ['d', 'e', 'f']]] - - # This is checked in dry-run mode but only for the first 1000 rows. - run_with_and_without_output <| - r1 = dest.update_rows src update_action=Update_Action.Insert key_columns=["X"] - r1.should_fail_with Rows_Already_Present - - group_builder.specify "should use the target table primary key for the key by default" <| - dest1 = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [4, 5, 6]]] primary_key=["Y", "Z"] - default_key_columns dest1 . should_equal ["Y", "Z"] - - dest2 = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["Y"] - src = source_table_builder [["X", [4, 5]], ["Y", ['b', 'e']]] - # Not specifying `key_columns`, rely on `default_key_columns` inferring Y as default based on the primary key. - r1 = dest2.update_rows src - rows = r1.rows.to_vector.map .to_vector - rows.should_contain_the_same_elements_as [[1, 'a'], [4, 'b'], [3, 'c'], [5, 'e']] - - group_builder.specify "should be able to Update existing rows in a table" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - src = source_table_builder [["X", [2]], ["Y", ['ZZZ']]] - - r1 = dest.update_rows src update_action=Update_Action.Update key_columns=["X"] - r1.column_names . should_equal ["X", "Y"] - r1.should_succeed - - rows = dest.rows.to_vector.map .to_vector - rows.should_contain_the_same_elements_as [[1, 'a'], [2, 'ZZZ'], [3, 'c']] - - group_builder.specify "should fail on unmatched rows in Update mode" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - src = source_table_builder [["X", [2, 100]], ["Y", ['d', 'e']]] - - # In dry run mode this will only check first 1000 rows. - run_with_and_without_output <| - r1 = dest.update_rows src update_action=Update_Action.Update key_columns=["X"] - r1.should_fail_with Unmatched_Rows - - # The table should remain unchanged. - rows = dest.rows.to_vector.map .to_vector - rows.should_contain_the_same_elements_as [[1, 'a'], [2, 'b'], [3, 'c']] - - group_builder.specify "should upsert by default (update existing rows, insert new rows)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["X", [2, 100]], ["Y", ['D', 'E']]] - r1 = dest.update_rows src key_columns=["X"] - Problems.assume_no_problems r1 - r1.column_names . should_equal ["X", "Y"] - expected_rows = [[1, 'a'], [2, 'D'], [3, 'c'], [100, 'E']] - rows1 = r1.rows.to_vector.map .to_vector - rows1.should_contain_the_same_elements_as expected_rows - - # The original table is updated too. - rows2 = dest.rows.to_vector.map .to_vector - rows2.should_contain_the_same_elements_as expected_rows - - group_builder.specify "should allow to align an existing table with a source (upsert + delete rows missing from source)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["X", [2, 100]], ["Y", ['D', 'E']]] - r1 = dest.update_rows src update_action=Update_Action.Align_Records key_columns=["X"] - Problems.assume_no_problems r1 - r1.column_names . should_equal ["X", "Y"] - expected_rows = [[2, 'D'], [100, 'E']] - rows1 = r1.rows.to_vector.map .to_vector - rows1.should_contain_the_same_elements_as expected_rows - rows2 = dest.rows.to_vector.map .to_vector - rows2.should_contain_the_same_elements_as expected_rows - - group_builder.specify "should match columns by name, reordering to destination order if needed (Insert)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [4, 5, 6]]] - result = dest.update_rows src update_action=Update_Action.Insert key_columns=["X"] - result.column_names . should_equal ["X", "Y"] - src.column_names . should_equal ["Y", "X"] - expected_rows = [[1, 'a'], [2, 'b'], [3, 'c'], [4, 'd'], [5, 'e'], [6, 'f']] - rows1 = result.rows.to_vector.map .to_vector - rows1.should_contain_the_same_elements_as expected_rows - - group_builder.specify "should match columns by name, reordering to destination order if needed (Upsert)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [1, 5, 6]]] - result = dest.update_rows src key_columns=["X"] - result.column_names . should_equal ["X", "Y"] - src.column_names . should_equal ["Y", "X"] - expected_rows = [[1, 'd'], [2, 'b'], [3, 'c'], [5, 'e'], [6, 'f']] - rows1 = result.rows.to_vector.map .to_vector - rows1.should_contain_the_same_elements_as expected_rows - - group_builder.specify "should match columns by name, reordering to destination order if needed (Update)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [3, 2, 1]]] - result = dest.update_rows src update_action=Update_Action.Update key_columns=["X"] - result.column_names . should_equal ["X", "Y"] - src.column_names . should_equal ["Y", "X"] - expected_rows = [[1, 'f'], [2, 'e'], [3, 'd']] - rows1 = result.rows.to_vector.map .to_vector - rows1.should_contain_the_same_elements_as expected_rows - - group_builder.specify "should match columns by name, reordering to destination order if needed (Align)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [2, 1, 6]]] - result = dest.update_rows src update_action=Update_Action.Align_Records key_columns=["X"] - result.column_names . should_equal ["X", "Y"] - src.column_names . should_equal ["Y", "X"] - expected_rows = [[1, 'e'], [2, 'd'], [6, 'f']] - rows1 = result.rows.to_vector.map .to_vector - rows1.should_contain_the_same_elements_as expected_rows - - group_builder.specify "should allow to use a transformed table, with computed fields, as a source" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - t1 = source_table_builder [["Z", [10, 20]], ["Y", ['D', 'E']]] - t2 = source_table_builder [["Z", [20, 10]], ["X", [-99, 10]]] - src = t1.join t2 on=["Z"] join_kind=Join_Kind.Inner . remove_columns "Z" . set "[X] + 100" "X" - src.at "X" . to_vector . should_contain_the_same_elements_as [1, 110] - - r1 = dest.update_rows src key_columns=["X"] - Problems.assume_no_problems r1 - r1.column_names . should_equal ["X", "Y"] - expected_rows = [[1, 'E'], [110, 'D'], [2, 'b'], [3, 'c']] - rows1 = r1.rows.to_vector.map .to_vector - rows1.should_contain_the_same_elements_as expected_rows - - group_builder.specify "should allow specifying no key in Insert mode" <| - dest = target_table_builder [["X", [1, 10, 100]]] - src = source_table_builder [["X", [1, 2, 3]]] - result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] - - expected = [1, 10, 100, 1, 2, 3] - result.column_names . should_equal ["X"] - result.at "X" . to_vector . should_contain_the_same_elements_as expected - - r2 = dest.update_rows src update_action=Update_Action.Insert key_columns=Nothing - r2.column_names . should_equal ["X"] - r2.at "X" . to_vector . should_contain_the_same_elements_as expected - - default_key_columns dest . should_equal Nothing - r3 = dest.update_rows src update_action=Update_Action.Insert - r3.column_names . should_equal ["X"] - r3.at "X" . to_vector . should_contain_the_same_elements_as expected - - group_builder.specify "should fail if no key is specified in other modes" <| - dest = target_table_builder [["X", [1, 10, 100]]] - src = source_table_builder [["X", [1, 2, 3]]] - - run_with_and_without_output <| - r1 = dest.update_rows src update_action=Update_Action.Update key_columns=[] - r1.should_fail_with Illegal_Argument - r1.catch.to_display_text.should_contain "`key_columns` must be specified" - - # The default will also fail because no primary key is detected in the DB. - default_key_columns dest . should_equal Nothing - r2 = dest.update_rows src update_action=Update_Action.Update - r2.should_fail_with Illegal_Argument - - r3 = dest.update_rows src update_action=Update_Action.Update_Or_Insert key_columns=[] - r3.should_fail_with Illegal_Argument - - r4 = dest.update_rows src key_columns=[] - r4.should_fail_with Illegal_Argument - - r5 = dest.update_rows src update_action=Update_Action.Align_Records key_columns=[] - r5.should_fail_with Illegal_Argument - - group_builder.specify "should fail if the key is not unique in the input table" <| - d1 = target_table_builder [["X", [0, 10, 100]]] primary_key=["X"] - d2 = target_table_builder [["X", [0, 10, 100]]] - src = source_table_builder [["X", [1, 1, 3]]] - - # Only checks 1000 rows in dry run mode. - run_with_and_without_output <| - # Relying on the default key based on primary key. - r1 = d1.update_rows src update_action=Update_Action.Insert - r1.should_fail_with Non_Unique_Key - - r2 = d2.update_rows src key_columns=["X"] update_action=Update_Action.Insert - r2.should_fail_with Non_Unique_Key - - group_builder.specify "will fail if source table contains null keys, unless only Inserting" <| - t1 = target_table_builder [["X", [0, 10, 100]], ["Y", ["a", "b", "c"]]] primary_key=[] - s1 = source_table_builder [["X", [10, Nothing]], ["Y", ["x", "y"]]] - run_with_and_without_output <| - r1 = t1.update_rows s1 key_columns=["X"] update_action=Update_Action.Update_Or_Insert - r1.should_fail_with Null_Values_In_Key_Columns - - r2 = t1.update_rows s1 update_action=Update_Action.Insert key_columns=[] - Problems.assume_no_problems r2 - m2 = r2.read . order_by "Y" - m2.at "Y" . to_vector . should_equal ["a", "b", "c", "x", "y"] - m2.at "X" . to_vector . should_equal [0, 10, 100, 10, Nothing] - - group_builder.specify "should fail if the key causes update of multiple values (it's not unique in the target table)" <| - dest = target_table_builder [["X", [1, 1, 2]], ["Y", ['a', 'b', 'c']]] - src = source_table_builder [["X", [1, 2, 3]], ["Y", ['d', 'e', 'f']]] - - run_with_and_without_output <| - r1 = dest.update_rows src key_columns=["X"] - r1.should_fail_with Multiple_Target_Rows_Matched_For_Update - r1.catch.to_display_text . should_contain "key [1] matched 2 rows" - - src2 = source_table_builder [["X", [1]], ["Y", ['d']]] - run_with_and_without_output <| - r2 = dest.update_rows src2 key_columns=["X"] update_action=Update_Action.Update - r2.should_fail_with Multiple_Target_Rows_Matched_For_Update - - ## In the future we may consider `Align_Records` to remove the - duplicated rows and keep just one of them. But that probably - should not be a default, so maybe only if we introduce a - parameter like `multi_row_update`. - r3 = dest.update_rows src key_columns=["X"] update_action=Update_Action.Align_Records - r3.should_fail_with Multiple_Target_Rows_Matched_For_Update - - ## BUT the check should not throw an error if the duplicated key is on an unaffected row! - (here key 1 is duplicated, but we are NOT updating it) - src3 = source_table_builder [["X", [2]], ["Y", ['f']]] - Problems.assume_no_problems <| - dest.update_rows src3 key_columns=["X"] - - group_builder.specify "should fail if the source table contains columns not present in the target (data loss)" <| - dest = target_table_builder [["X", [0, 10, 100]]] primary_key=["X"] - src = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - run_with_and_without_output <| - r1 = dest.update_rows src key_columns=["X"] - r1.should_fail_with Unmatched_Columns - r1.catch.column_names . should_equal ["Y"] - r1.catch.to_display_text . should_contain "columns were not present" - r1.catch.to_display_text . should_contain "Y" - - group_builder.specify "should use defaults when inserting" <| - dest_name = Name_Generator.random_name "table-defaults" - dest = data.connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "X" Value_Type.Integer] temporary=True primary_key=[] . should_succeed - src = source_table_builder [["X", [1, 2, 3]]] - r1 = dest.update_rows src key_columns=[] update_action=Update_Action.Insert - Problems.assume_no_problems r1 - r1.column_names . should_equal ["Y", "X"] - expected_rows = [[42, 1], [42, 2], [42, 3]] - rows1 = r1.rows.to_vector.map .to_vector - rows1.should_contain_the_same_elements_as expected_rows - data.connection.drop_table dest_name - - group_builder.specify "should use defaults when inserting new values in upsert, but retain existing values" <| - dest_name = Name_Generator.random_name "table-defaults" - dest = data.connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Z" Value_Type.Integer] temporary=True primary_key=[] . should_succeed - Problems.assume_no_problems <| - dest.update_rows (Table.from_rows ["X", "Y", "Z"] [[1, 1000, 10]]) key_columns=[] update_action=Update_Action.Insert - - src = source_table_builder [["X", [1, 2, 3]], ["Z", [100, 200, 300]]] - r1 = dest.update_rows src key_columns=["X"] update_action=Update_Action.Update_Or_Insert - Problems.assume_no_problems r1 - r1.column_names . should_equal ["Y", "X", "Z"] - expected_rows = [[1000, 1, 100], [42, 2, 200], [42, 3, 300]] - rows1 = r1.rows.to_vector.map .to_vector - rows1.should_contain_the_same_elements_as expected_rows - data.connection.drop_table dest_name - - group_builder.specify "should use defaults for missing input columns for newly inserted rows when Aligning the tables, but keep existing values for existing rows" <| - dest_name = Name_Generator.random_name "table-defaults-align" - dest = data.connection.create_table dest_name [Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "Z" Value_Type.Integer] temporary=True . should_succeed - initial_data = Table.new [["X", [10, 20]], ["Y", [100, 200]], ["Z", [1000, 2000]]] - dest.update_rows initial_data key_columns=[] update_action=Update_Action.Insert . should_succeed - src = source_table_builder [["X", [10, 2, 3]], ["Z", [-1, -2, -3]]] - r1 = dest.update_rows src update_action=Update_Action.Align_Records key_columns=["X"] - Problems.assume_no_problems r1 - r1.column_names . should_equal ["X", "Y", "Z"] - # The X=10 stays with Y=100, but the X=2 is inserted with the default Y=42 - expected_rows = [[10, 100, -1], [2, 42, -2], [3, 42, -3]] - rows1 = r1.rows.to_vector.map .to_vector - rows1.should_contain_the_same_elements_as expected_rows - data.connection.drop_table dest_name - - group_builder.specify "should fail if the source table is missing some columns and the column in the target has no default value" <| - dest_name = Name_Generator.random_name "table-notnull" - dest = data.connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Not_Null], Column_Description.Value "X" Value_Type.Integer] temporary=True primary_key=[] . should_succeed - src = source_table_builder [["X", [1, 2, 3]]] - r1 = dest.update_rows src key_columns=[] update_action=Update_Action.Insert - # We may want a more specific error for missing columns without defaults, but for now it's just a SQL error. - r1.should_fail_with SQL_Error - data.connection.drop_table dest_name - - group_builder.specify "should fail if the source table is missing some columns, if asked to" <| - dest = target_table_builder [["X", [0, 10, 100]], ["Y", ['a', 'b', 'c']]] - src = source_table_builder [["X", [1, 2, 3]]] - run_with_and_without_output <| - r1 = dest.update_rows src error_on_missing_columns=True update_action=Update_Action.Insert key_columns=[] - r1.should_fail_with Missing_Input_Columns - r1.catch.criteria . should_equal ["Y"] - - group_builder.specify "should fail if some of key_columns do not exist in either table" <| - d1 = target_table_builder [["X", [0, 10, 100]]] - d2 = target_table_builder [["X", [0, 10, 100]], ["Y", ['a', 'b', 'c']]] - s1 = source_table_builder [["X", [1, 3]]] - s2 = source_table_builder [["X", [1, 3]], ["Y", ['e', 'f']]] - - run_with_and_without_output <| - r1 = d1.update_rows s1 key_columns=["Y"] - r1.should_fail_with Missing_Input_Columns - - r2 = d2.update_rows s1 key_columns=["Y"] - r2.should_fail_with Missing_Input_Columns - - # This may be Missing_Input_Columns or Unmatched_Columns - r3 = d1.update_rows s2 key_columns=["Y"] - r3.should_fail_with Any - ((r3.catch.is_a Missing_Input_Columns) || (r3.catch.is_a Unmatched_Columns)).should_be_true - - group_builder.specify "should fail if the target table does not exist" <| - t = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - nonexistent_name = Name_Generator.random_name "nonexistent-table" - nonexistent_ref = data.connection.create_table nonexistent_name t - # Dropping the table to make it not exist. - data.connection.drop_table nonexistent_ref.name - - run_with_and_without_output <| - r1 = nonexistent_ref.update_rows t key_columns=[] - r1.should_fail_with Table_Not_Found - - default_key_columns nonexistent_ref . should_fail_with Table_Not_Found - r2 = nonexistent_ref.update_rows t - r2.should_fail_with Table_Not_Found - - group_builder.specify "should fail if the target table is in-memory" <| - t = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - in_memory_table = Table.new [["X", [0]], ["Y", ['_']]] - run_with_and_without_output <| - r1 = in_memory_table.update_rows t key_columns=[] - r1.should_fail_with Illegal_Argument - - r2 = in_memory_table.update_rows t - r2.should_fail_with Illegal_Argument - - group_builder.specify "should warn if type widening occurs" <| - dest = target_table_builder [["X", [3.25, 4.25, 10.0]]] - src = source_table_builder [["X", [1, 2, 0]]] - - # Warning should be present in dry-run mode too! - Context.Output.with_disabled <| - r2 = dest.update_rows src update_action=Update_Action.Insert key_columns=[] - Problems.expect_warning Inexact_Type_Coercion r2 - - # But in dry run the update is not actually performed: - r2.at "X" . to_vector . should_contain_the_same_elements_as [3.25, 4.25, 10.0] - - result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] - warning = Problems.expect_warning Inexact_Type_Coercion result - warning.requested_type.is_integer . should_be_true - warning.actual_type.is_floating_point . should_be_true - - result.column_names . should_equal ["X"] - result.at "X" . to_vector . should_contain_the_same_elements_as [3.25, 4.25, 10.0, 1, 2, 0] - - group_builder.specify "should fail if types of columns are not compatible" <| - dest = target_table_builder [["X", ["a", "B", "c"]]] - src = source_table_builder [["X", [1, 2, 3]]] - - run_with_and_without_output <| - result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] - result.should_fail_with Column_Type_Mismatch - err = result.catch - err.column_name.should_equal "X" - err.expected_type.is_text . should_be_true - err.got_type.is_numeric . should_be_true - - dest_name = Name_Generator.random_name "dest-table-test-types" - structure = - x = Column_Description.Value "X" (Value_Type.Integer Bits.Bits_16) - y = Column_Description.Value "Y" (Value_Type.Char size=4 variable_length=True) - z = Column_Description.Value "Z" (Value_Type.Char size=5 variable_length=False) - [x, y, z] - dest = data.connection.create_table dest_name structure temporary=True primary_key=[] - non_trivial_types_supported = - has_warning_or_error = dest.is_error || (Problems.get_attached_warnings dest . not_empty) - has_warning_or_error.not - if non_trivial_types_supported then - src = source_table_builder [["X", [1, 2, 3]], ["Y", ["a", "xyz", "abcdefghijkl"]], ["Z", ["a", "pqrst", "abcdefghijkl"]]] - group_builder.specify "fails if the target type is more restrictive than source" <| - result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] - result.should_fail_with Column_Type_Mismatch - - group_builder.specify "should not leave behind any garbage temporary tables if the upload fails" <| - dest_name = Name_Generator.random_name "dest-table" - # We will make the upload fail by violating the NOT NULL constraint. - dest = data.connection.create_table dest_name [Column_Description.Value "X" Value_Type.Integer [Column_Constraint.Not_Null]] temporary=True primary_key=[] . should_succeed - src = source_table_builder [["X", [1, Nothing, 3]]] - - existing_tables = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector - res = dest.update_rows src update_action=Update_Action.Insert key_columns=[] - res.should_fail_with SQL_Error - - tables_immediately_after = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector - - ## If there are some additional tables, we add some timeout to allow - the database to do the cleaning up. - additional_tables = (Set.from_vector tables_immediately_after).difference (Set.from_vector existing_tables) - if additional_tables.is_empty then Nothing else - additional_table = additional_tables.to_vector.first - - wait_until_temporary_table_is_deleted_after_closing_connection data.connection additional_table - # After the wait we check again and now there should be no additional tables. - tables_after_wait = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector - additional_tables_2 = (Set.from_vector tables_after_wait).difference (Set.from_vector existing_tables) - additional_tables_2.to_vector . should_equal [] - - - suite_builder.group prefix+"Appending an in-memory table to a Database table" group_builder-> - test_table_append group_builder in_memory_table_builder (database_table_builder "target-table") + test_table_append group_builder data in_memory_table_builder (database_table_builder "target-table" data=data) group_builder.specify "will issue a friendly error if using in-memory table as target" <| t1 = Table.new [["X", [1, 2, 3]]] @@ -1021,147 +607,19 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = r1.to_display_text.should_contain "in-memory tables are immutable" suite_builder.group prefix+"Appending a Database table to a Database table" group_builder-> - test_table_append group_builder (database_table_builder "source-table") (database_table_builder "target-table") - - test_table_delete group_builder source_table_builder target_table_builder = data = Data.setup make_new_connection - group_builder.specify "should remove rows matching by key_columns" <| - table = target_table_builder [["student_id", [1, 2, 44, 100, 120]], ["first_name", ["Alice", "Bob", "Charlie", "David", "Eve"]], ["score", [100, 100, 44, 100, 120]]] primary_key=["student_id"] - key_values_to_delete = source_table_builder [["student_id", [44, 100]]] - # key columns should automatically be discovered by the primary key - affected_rows = table.delete_rows key_values_to_delete - affected_rows . should_equal 2 - table.rows.map .to_vector . should_equal [[1, "Alice", 100], [2, "Bob", 100], [120, "Eve", 120]] - - group_builder.specify "will require key_columns if no default can be used as no primary key is set" <| - table = target_table_builder [["X", [1, 2, 3]]] primary_key=[] - key_values_to_delete = source_table_builder [["X", [1, 2]]] - - run_with_and_without_output <| - r1 = table.delete_rows key_values_to_delete - r1.should_fail_with Illegal_Argument - r1.to_display_text.should_contain "default value" - - r2 = table.delete_rows key_values_to_delete key_columns=["X"] - Problems.assume_no_problems r2 - r2.should_equal 2 - table.at "X" . to_vector . should_equal [3] - - group_builder.specify "does not fail if no rows matching the key_values_to_delete are found" <| - table = target_table_builder [["X", [1, 2, 3]]] primary_key=[] - key_values_to_delete = source_table_builder [["X", [4, 5]]] - r1 = table.delete_rows key_values_to_delete key_columns=["X"] - r1.should_equal 0 - Problems.assume_no_problems r1 - table.at "X" . to_vector . should_equal [1, 2, 3] - - key_values_2 = source_table_builder [["X", [4, 3, 5]]] - r2 = table.delete_rows key_values_2 key_columns=["X"] - r2.should_equal 1 - Problems.assume_no_problems r2 - table.at "X" . to_vector . should_equal [1, 2] - - group_builder.specify "should allow to use multiple columns as key" <| - table = target_table_builder [["X", [1, 2, 2, 3, 4, 4]], ["Y", ['a', 'b', 'c', 'd', 'e', 'f']]] primary_key=[] - keys = source_table_builder [["X", [2, 4]], ["Y", ['b', 'f']]] - affected_rows = table.delete_rows keys key_columns=["X", "Y"] - affected_rows . should_equal 2 - table.rows.map .to_vector . should_equal [[1, "a"], [2, "c"], [3, "d"], [4, "e"]] - - group_builder.specify "should fail if key_columns are missing in source or target tables" <| - table = target_table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] primary_key=[] - keys = source_table_builder [["Z", [7, 8]]] - - run_with_and_without_output <| - r1 = table.delete_rows keys key_columns=["Y"] - r1.should_fail_with Missing_Input_Columns - r1.catch.criteria . should_equal ["Y"] - r1.catch.where . should_contain "key values to delete" - - r2 = table.delete_rows keys key_columns=["Z"] - r2.should_fail_with Missing_Input_Columns - r2.catch.criteria . should_equal ["Z"] - r2.catch.where . should_contain "target" - - r3 = table.delete_rows keys key_columns=["neither"] - r3.should_fail_with Missing_Input_Columns - r3.catch.criteria . should_equal ["neither"] - - group_builder.specify "should fail if empty key_columns were provided" <| - table = target_table_builder [["X", [1, 2, 3]]] primary_key=["X"] - keys = source_table_builder [["X", [1, 2]]] - r1 = table.delete_rows keys key_columns=[] - r1.should_fail_with Illegal_Argument - - group_builder.specify "should fail if multiple rows match a single key_values_to_delete row, unless allow_duplicate_matches is set to True" <| - table = target_table_builder [["X", [1, 2, 2, 3, 2]], ["Y", ['a', 'b', 'c', 'd', 'e']]] primary_key=[] - keys = source_table_builder [["X", [2]], ["Y", ['b']]] - - run_with_and_without_output <| - r1 = table.delete_rows keys key_columns=["X"] - r1.should_fail_with Multiple_Target_Rows_Matched_For_Update - r1.catch.example_key . should_equal [2] - r1.catch.example_count . should_equal 3 - # no changes - table.at "X" . to_vector . should_equal [1, 2, 2, 3, 2] - - r2 = table.delete_rows keys key_columns=["X"] allow_duplicate_matches=True - r2.should_equal 3 - table.rows.map .to_vector . should_equal [[1, "a"], [3, "d"]] - - group_builder.specify "should fail if the target table does not exist" <| - table = target_table_builder [["X", [1, 2, 3]]] - keys = source_table_builder [["X", [1, 2]]] - data.connection.drop_table table.name - - run_with_and_without_output <| - table.delete_rows keys key_columns=["X"] . should_fail_with Table_Not_Found - table.delete_rows keys . should_fail_with Table_Not_Found - - group_builder.specify "will warn if not all input rows were checked as part of a dry run" <| - target = target_table_builder [["X", [0, 1, 500, 1500, 3500]]] primary_key=["X"] - source = source_table_builder [["X", (1.up_to 2000).to_vector]] primary_key=["X"] - - Context.Output.with_disabled <| - r1 = target.delete_rows source - # Values 1 and 500 are always checked; 1500 may exceed the dry run limit. - [2, 3].should_contain r1 - w1 = Problems.expect_warning Dry_Run_Operation r1 - # If not all rows were checked, a warning is expected: - if r1 == 2 then - w1.to_display_text . should_contain "Only the first 1000 distinct rows out of 1999 were used for the dry run" - - # Target remains unchanged - target.at "X" . to_vector . should_equal [0, 1, 500, 1500, 3500] - - r2 = target.delete_rows source - Problems.assume_no_problems r2 - # All 3 rows were deleted - r2.should_equal 3 - target.at "X" . to_vector . should_equal [0, 3500] - - group_builder.specify "will work fine if the target table contains NULL keys" <| - t1 = target_table_builder [["X", ["a", "b", Nothing, "c"]], ["Y", [1, 2, 3, Nothing]]] - s1 = source_table_builder [["X", ["b", "c"]]] - t1.delete_rows s1 key_columns=["X"] . should_equal 2 - m1 = t1.read . order_by "X" - m1.at "X" . to_vector . should_equal [Nothing, "a"] - m1.at "Y" . to_vector . should_equal [3, 1] - - group_builder.specify "will raise an error if they source table contains NULL keys" <| - t2 = target_table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, Nothing]]] - s2 = source_table_builder [["X", ["b", Nothing]], ["f", [10, 20]]] - r1 = t2.delete_rows s2 key_columns=["X"] - r1.should_fail_with Null_Values_In_Key_Columns - r1.catch.to_display_text . should_contain "Nothing values in key columns" - r1.catch.to_display_text . should_contain "[Nothing, 20]" + test_table_append group_builder data (database_table_builder "source-table" data=data) (database_table_builder "target-table" data=data) suite_builder.group prefix+"Deleting rows from a Database table (source=in-memory)" group_builder-> - test_table_delete group_builder in_memory_table_builder (database_table_builder "target-table") + data = Data.setup make_new_connection + + test_table_delete group_builder data in_memory_table_builder (database_table_builder "target-table" data=data) suite_builder.group prefix+"Deleting rows from a Database table (source=Database)" group_builder-> - test_table_delete group_builder (database_table_builder "source-table") (database_table_builder "target-table") + data = Data.setup make_new_connection + + test_table_delete group_builder data (database_table_builder "source-table" data=data) (database_table_builder "target-table" data=data) suite_builder.group prefix+"Deleting rows from a Database table" group_builder-> data = Data.setup make_new_connection @@ -1177,7 +635,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = t2.at "X" . to_vector . should_equal [] group_builder.specify "should allow to delete rows based on another query" <| - table = database_table_builder "target-table" [["student_id", [1, 2, 44, 100, 120]], ["first_name", ["Alice", "Bob", "Charlie", "David", "Eve"]], ["graduation_year", [2023, 2024, 2021, 2020, 2019]]] primary_key=["student_id"] + table = database_table_builder "target-table" [["student_id", [1, 2, 44, 100, 120]], ["first_name", ["Alice", "Bob", "Charlie", "David", "Eve"]], ["graduation_year", [2023, 2024, 2021, 2020, 2019]]] primary_key=["student_id"] data=data graduates = table.filter "graduation_year" (Filter_Condition.Less 2023) affected_rows = table.delete_rows graduates # uses the primary key by default affected_rows . should_equal 3 @@ -1191,7 +649,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = r1.to_display_text.should_contain "in-memory tables are immutable" group_builder.specify "can use itself as source of rows to delete (even if that's an anti-pattern)" <| - t1 = database_table_builder "target-table" [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] + t1 = database_table_builder "target-table" [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] data=data affected_rows = t1.delete_rows t1 affected_rows . should_equal 3 t1.rows.should_equal [] @@ -1299,109 +757,664 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = # The pre-existing table should not have been overwritten. pre_existing_table.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] - tests group_builder source_table_builder suffix = - data = Data.setup make_new_connection - group_builder.specify "should return a temporary table with a sample of the data for select_into_database_table"+suffix <| - Context.Output.with_disabled <| - src1 = source_table_builder [["X", [1, 2, 3]]] - name = (Name_Generator.random_name "table-foo2") - r1 = src1.select_into_database_table data.connection name - Problems.expect_only_warning Dry_Run_Operation r1 - r1.column_names . should_equal ["X"] - r1.name . should_not_equal name - # A small table is uploaded whole. - r1.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] - r1.row_count . should_equal 3 - r1.is_trivial_query . should_be_true - - # But a big one will be sampled. - n = 2000 - src2 = source_table_builder [["X", (0.up_to n).to_vector]] - # We re-use the name - multiple dry-runs for the same table name should be allowed without issues. - r2 = src2.select_into_database_table data.connection name - Problems.expect_only_warning Dry_Run_Operation r2 - r2.column_names . should_equal ["X"] - # Only a sample is uploaded. - r2.row_count . should_equal 1000 - r2.is_trivial_query . should_be_true - - group_builder.specify "should return the target table unchanged for update_rows"+suffix <| - dest_data = Table.new [["X", [1, 2, 3]]] - dest = dest_data.select_into_database_table data.connection (Name_Generator.random_name "target-table") temporary=True primary_key=[] - Context.Output.with_disabled <| - src = source_table_builder [["X", [4, 5, 6]]] - r1 = dest.update_rows src update_action=Update_Action.Insert key_columns=[] - Problems.expect_only_warning Dry_Run_Operation r1 - r1.column_names . should_equal ["X"] - # The target table is returned, as usually. - r1.name . should_equal dest.name - # But the data is not appended due to the dry-run - the table is unmodified. - r1.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] - r1.is_trivial_query . should_be_true - - group_builder.specify "should return the count of rows that would be deleted for delete_rows, but keep the table unchanged" <| - v = [1, 2, 3, 4, 4, 4, 1] - dest_data = Table.new [["X", v]] - dest = dest_data.select_into_database_table data.connection (Name_Generator.random_name "table-delete-rows-dry") temporary=True primary_key=[] - Context.Output.with_disabled <| - src = source_table_builder [["X", [2, 3]]] - r1 = dest.delete_rows src key_columns=["X"] - # 2 rows would be deleted - r1.should_equal 2 - Problems.expect_only_warning Dry_Run_Operation r1 - - # The target table is unaffected. - dest.at "X" . to_vector . should_equal v - - src2 = source_table_builder [["X", [4]]] - r2 = dest.delete_rows src2 key_columns=["X"] allow_duplicate_matches=True - # 3 rows would be deleted - r2.should_equal 3 - Problems.expect_only_warning Dry_Run_Operation r2 - dest.at "X" . to_vector . should_equal v - - if persistent_connector then - group_builder.specify "will not overwrite an existing table with a dry-run table if the name is clashing (select_into_database_table)"+suffix <| - target_name = Name_Generator.random_name "test-table" - dry_run_name = Context.Output.with_disabled <| - tmp_connection1 = make_new_connection Nothing - src1 = source_table_builder [["A", [1, 2, 3]]] connection=tmp_connection1 - dry_run_table = src1.select_into_database_table tmp_connection1 target_name temporary=True . should_succeed - Problems.expect_only_warning Dry_Run_Operation dry_run_table - dry_run_table.column_names . should_equal ["A"] - name = Warning.clear dry_run_table.name - tmp_connection1.close - name + tests group_builder data make_new_connection (in_memory_table_builder) " (from memory)" persistent_connector + tests group_builder data make_new_connection (database_table_builder "ec-tests-table" data=data) " (from Database table)" persistent_connector + + +test_table_append group_builder (data : Data) source_table_builder target_table_builder = + group_builder.specify "should be able to append new rows to a table" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] + src = source_table_builder [["X", [4, 5, 6]], ["Y", ['d', 'e', 'f']]] + + result = dest.update_rows src key_columns=["X"] + result.column_names . should_equal ["X", "Y"] + + result.is_trivial_query . should_be_true + (result == dest) . should_be_true + + expected_rows = [[1, 'a'], [2, 'b'], [3, 'c'], [4, 'd'], [5, 'e'], [6, 'f']] + rows1 = result.rows.to_vector.map .to_vector + rows1.should_contain_the_same_elements_as expected_rows + + group_builder.specify "should error if new rows clash with existing ones and mode is Insert, target table should remain unchanged" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] + src = source_table_builder [["X", [1, 5, 6]], ["Y", ['d', 'e', 'f']]] + + # This is checked in dry-run mode but only for the first 1000 rows. + run_with_and_without_output <| + r1 = dest.update_rows src update_action=Update_Action.Insert key_columns=["X"] + r1.should_fail_with Rows_Already_Present + + group_builder.specify "should use the target table primary key for the key by default" <| + dest1 = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [4, 5, 6]]] primary_key=["Y", "Z"] + default_key_columns dest1 . should_equal ["Y", "Z"] + + dest2 = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["Y"] + src = source_table_builder [["X", [4, 5]], ["Y", ['b', 'e']]] + # Not specifying `key_columns`, rely on `default_key_columns` inferring Y as default based on the primary key. + r1 = dest2.update_rows src + rows = r1.rows.to_vector.map .to_vector + rows.should_contain_the_same_elements_as [[1, 'a'], [4, 'b'], [3, 'c'], [5, 'e']] + + group_builder.specify "should be able to Update existing rows in a table" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] + src = source_table_builder [["X", [2]], ["Y", ['ZZZ']]] + + r1 = dest.update_rows src update_action=Update_Action.Update key_columns=["X"] + r1.column_names . should_equal ["X", "Y"] + r1.should_succeed + + rows = dest.rows.to_vector.map .to_vector + rows.should_contain_the_same_elements_as [[1, 'a'], [2, 'ZZZ'], [3, 'c']] + + group_builder.specify "should fail on unmatched rows in Update mode" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] + src = source_table_builder [["X", [2, 100]], ["Y", ['d', 'e']]] + + # In dry run mode this will only check first 1000 rows. + run_with_and_without_output <| + r1 = dest.update_rows src update_action=Update_Action.Update key_columns=["X"] + r1.should_fail_with Unmatched_Rows + + # The table should remain unchanged. + rows = dest.rows.to_vector.map .to_vector + rows.should_contain_the_same_elements_as [[1, 'a'], [2, 'b'], [3, 'c']] + + group_builder.specify "should upsert by default (update existing rows, insert new rows)" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] + src = source_table_builder [["X", [2, 100]], ["Y", ['D', 'E']]] + r1 = dest.update_rows src key_columns=["X"] + Problems.assume_no_problems r1 + r1.column_names . should_equal ["X", "Y"] + expected_rows = [[1, 'a'], [2, 'D'], [3, 'c'], [100, 'E']] + rows1 = r1.rows.to_vector.map .to_vector + rows1.should_contain_the_same_elements_as expected_rows + + # The original table is updated too. + rows2 = dest.rows.to_vector.map .to_vector + rows2.should_contain_the_same_elements_as expected_rows + + group_builder.specify "should allow to align an existing table with a source (upsert + delete rows missing from source)" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] + src = source_table_builder [["X", [2, 100]], ["Y", ['D', 'E']]] + r1 = dest.update_rows src update_action=Update_Action.Align_Records key_columns=["X"] + Problems.assume_no_problems r1 + r1.column_names . should_equal ["X", "Y"] + expected_rows = [[2, 'D'], [100, 'E']] + rows1 = r1.rows.to_vector.map .to_vector + rows1.should_contain_the_same_elements_as expected_rows + rows2 = dest.rows.to_vector.map .to_vector + rows2.should_contain_the_same_elements_as expected_rows + + group_builder.specify "should match columns by name, reordering to destination order if needed (Insert)" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] + src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [4, 5, 6]]] + result = dest.update_rows src update_action=Update_Action.Insert key_columns=["X"] + result.column_names . should_equal ["X", "Y"] + src.column_names . should_equal ["Y", "X"] + expected_rows = [[1, 'a'], [2, 'b'], [3, 'c'], [4, 'd'], [5, 'e'], [6, 'f']] + rows1 = result.rows.to_vector.map .to_vector + rows1.should_contain_the_same_elements_as expected_rows + + group_builder.specify "should match columns by name, reordering to destination order if needed (Upsert)" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] + src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [1, 5, 6]]] + result = dest.update_rows src key_columns=["X"] + result.column_names . should_equal ["X", "Y"] + src.column_names . should_equal ["Y", "X"] + expected_rows = [[1, 'd'], [2, 'b'], [3, 'c'], [5, 'e'], [6, 'f']] + rows1 = result.rows.to_vector.map .to_vector + rows1.should_contain_the_same_elements_as expected_rows + + group_builder.specify "should match columns by name, reordering to destination order if needed (Update)" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] + src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [3, 2, 1]]] + result = dest.update_rows src update_action=Update_Action.Update key_columns=["X"] + result.column_names . should_equal ["X", "Y"] + src.column_names . should_equal ["Y", "X"] + expected_rows = [[1, 'f'], [2, 'e'], [3, 'd']] + rows1 = result.rows.to_vector.map .to_vector + rows1.should_contain_the_same_elements_as expected_rows + + group_builder.specify "should match columns by name, reordering to destination order if needed (Align)" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] + src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [2, 1, 6]]] + result = dest.update_rows src update_action=Update_Action.Align_Records key_columns=["X"] + result.column_names . should_equal ["X", "Y"] + src.column_names . should_equal ["Y", "X"] + expected_rows = [[1, 'e'], [2, 'd'], [6, 'f']] + rows1 = result.rows.to_vector.map .to_vector + rows1.should_contain_the_same_elements_as expected_rows + + group_builder.specify "should allow to use a transformed table, with computed fields, as a source" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] + t1 = source_table_builder [["Z", [10, 20]], ["Y", ['D', 'E']]] + t2 = source_table_builder [["Z", [20, 10]], ["X", [-99, 10]]] + src = t1.join t2 on=["Z"] join_kind=Join_Kind.Inner . remove_columns "Z" . set "[X] + 100" "X" + src.at "X" . to_vector . should_contain_the_same_elements_as [1, 110] + + r1 = dest.update_rows src key_columns=["X"] + Problems.assume_no_problems r1 + r1.column_names . should_equal ["X", "Y"] + expected_rows = [[1, 'E'], [110, 'D'], [2, 'b'], [3, 'c']] + rows1 = r1.rows.to_vector.map .to_vector + rows1.should_contain_the_same_elements_as expected_rows + + group_builder.specify "should allow specifying no key in Insert mode" <| + dest = target_table_builder [["X", [1, 10, 100]]] + src = source_table_builder [["X", [1, 2, 3]]] + result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] + + expected = [1, 10, 100, 1, 2, 3] + result.column_names . should_equal ["X"] + result.at "X" . to_vector . should_contain_the_same_elements_as expected + + r2 = dest.update_rows src update_action=Update_Action.Insert key_columns=Nothing + r2.column_names . should_equal ["X"] + r2.at "X" . to_vector . should_contain_the_same_elements_as expected + + default_key_columns dest . should_equal Nothing + r3 = dest.update_rows src update_action=Update_Action.Insert + r3.column_names . should_equal ["X"] + r3.at "X" . to_vector . should_contain_the_same_elements_as expected + + group_builder.specify "should fail if no key is specified in other modes" <| + dest = target_table_builder [["X", [1, 10, 100]]] + src = source_table_builder [["X", [1, 2, 3]]] + + run_with_and_without_output <| + r1 = dest.update_rows src update_action=Update_Action.Update key_columns=[] + r1.should_fail_with Illegal_Argument + r1.catch.to_display_text.should_contain "`key_columns` must be specified" + + # The default will also fail because no primary key is detected in the DB. + default_key_columns dest . should_equal Nothing + r2 = dest.update_rows src update_action=Update_Action.Update + r2.should_fail_with Illegal_Argument + + r3 = dest.update_rows src update_action=Update_Action.Update_Or_Insert key_columns=[] + r3.should_fail_with Illegal_Argument + + r4 = dest.update_rows src key_columns=[] + r4.should_fail_with Illegal_Argument + + r5 = dest.update_rows src update_action=Update_Action.Align_Records key_columns=[] + r5.should_fail_with Illegal_Argument + + group_builder.specify "should fail if the key is not unique in the input table" <| + d1 = target_table_builder [["X", [0, 10, 100]]] primary_key=["X"] + d2 = target_table_builder [["X", [0, 10, 100]]] + src = source_table_builder [["X", [1, 1, 3]]] + + # Only checks 1000 rows in dry run mode. + run_with_and_without_output <| + # Relying on the default key based on primary key. + r1 = d1.update_rows src update_action=Update_Action.Insert + r1.should_fail_with Non_Unique_Key + + r2 = d2.update_rows src key_columns=["X"] update_action=Update_Action.Insert + r2.should_fail_with Non_Unique_Key + + group_builder.specify "will fail if source table contains null keys, unless only Inserting" <| + t1 = target_table_builder [["X", [0, 10, 100]], ["Y", ["a", "b", "c"]]] primary_key=[] + s1 = source_table_builder [["X", [10, Nothing]], ["Y", ["x", "y"]]] + run_with_and_without_output <| + r1 = t1.update_rows s1 key_columns=["X"] update_action=Update_Action.Update_Or_Insert + r1.should_fail_with Null_Values_In_Key_Columns + + r2 = t1.update_rows s1 update_action=Update_Action.Insert key_columns=[] + Problems.assume_no_problems r2 + m2 = r2.read . order_by "Y" + m2.at "Y" . to_vector . should_equal ["a", "b", "c", "x", "y"] + m2.at "X" . to_vector . should_equal [0, 10, 100, 10, Nothing] + + group_builder.specify "should fail if the key causes update of multiple values (it's not unique in the target table)" <| + dest = target_table_builder [["X", [1, 1, 2]], ["Y", ['a', 'b', 'c']]] + src = source_table_builder [["X", [1, 2, 3]], ["Y", ['d', 'e', 'f']]] + + run_with_and_without_output <| + r1 = dest.update_rows src key_columns=["X"] + r1.should_fail_with Multiple_Target_Rows_Matched_For_Update + r1.catch.to_display_text . should_contain "key [1] matched 2 rows" + + src2 = source_table_builder [["X", [1]], ["Y", ['d']]] + run_with_and_without_output <| + r2 = dest.update_rows src2 key_columns=["X"] update_action=Update_Action.Update + r2.should_fail_with Multiple_Target_Rows_Matched_For_Update + + ## In the future we may consider `Align_Records` to remove the + duplicated rows and keep just one of them. But that probably + should not be a default, so maybe only if we introduce a + parameter like `multi_row_update`. + r3 = dest.update_rows src key_columns=["X"] update_action=Update_Action.Align_Records + r3.should_fail_with Multiple_Target_Rows_Matched_For_Update + + ## BUT the check should not throw an error if the duplicated key is on an unaffected row! + (here key 1 is duplicated, but we are NOT updating it) + src3 = source_table_builder [["X", [2]], ["Y", ['f']]] + Problems.assume_no_problems <| + dest.update_rows src3 key_columns=["X"] + + group_builder.specify "should fail if the source table contains columns not present in the target (data loss)" <| + dest = target_table_builder [["X", [0, 10, 100]]] primary_key=["X"] + src = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] + run_with_and_without_output <| + r1 = dest.update_rows src key_columns=["X"] + r1.should_fail_with Unmatched_Columns + r1.catch.column_names . should_equal ["Y"] + r1.catch.to_display_text . should_contain "columns were not present" + r1.catch.to_display_text . should_contain "Y" + + group_builder.specify "should use defaults when inserting" <| + dest_name = Name_Generator.random_name "table-defaults" + dest = data.connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "X" Value_Type.Integer] temporary=True primary_key=[] . should_succeed + src = source_table_builder [["X", [1, 2, 3]]] + r1 = dest.update_rows src key_columns=[] update_action=Update_Action.Insert + Problems.assume_no_problems r1 + r1.column_names . should_equal ["Y", "X"] + expected_rows = [[42, 1], [42, 2], [42, 3]] + rows1 = r1.rows.to_vector.map .to_vector + rows1.should_contain_the_same_elements_as expected_rows + data.connection.drop_table dest_name + + group_builder.specify "should use defaults when inserting new values in upsert, but retain existing values" <| + dest_name = Name_Generator.random_name "table-defaults" + dest = data.connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Z" Value_Type.Integer] temporary=True primary_key=[] . should_succeed + Problems.assume_no_problems <| + dest.update_rows (Table.from_rows ["X", "Y", "Z"] [[1, 1000, 10]]) key_columns=[] update_action=Update_Action.Insert + + src = source_table_builder [["X", [1, 2, 3]], ["Z", [100, 200, 300]]] + r1 = dest.update_rows src key_columns=["X"] update_action=Update_Action.Update_Or_Insert + Problems.assume_no_problems r1 + r1.column_names . should_equal ["Y", "X", "Z"] + expected_rows = [[1000, 1, 100], [42, 2, 200], [42, 3, 300]] + rows1 = r1.rows.to_vector.map .to_vector + rows1.should_contain_the_same_elements_as expected_rows + data.connection.drop_table dest_name + + group_builder.specify "should use defaults for missing input columns for newly inserted rows when Aligning the tables, but keep existing values for existing rows" <| + dest_name = Name_Generator.random_name "table-defaults-align" + dest = data.connection.create_table dest_name [Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "Z" Value_Type.Integer] temporary=True . should_succeed + initial_data = Table.new [["X", [10, 20]], ["Y", [100, 200]], ["Z", [1000, 2000]]] + dest.update_rows initial_data key_columns=[] update_action=Update_Action.Insert . should_succeed + src = source_table_builder [["X", [10, 2, 3]], ["Z", [-1, -2, -3]]] + r1 = dest.update_rows src update_action=Update_Action.Align_Records key_columns=["X"] + Problems.assume_no_problems r1 + r1.column_names . should_equal ["X", "Y", "Z"] + # The X=10 stays with Y=100, but the X=2 is inserted with the default Y=42 + expected_rows = [[10, 100, -1], [2, 42, -2], [3, 42, -3]] + rows1 = r1.rows.to_vector.map .to_vector + rows1.should_contain_the_same_elements_as expected_rows + data.connection.drop_table dest_name + + group_builder.specify "should fail if the source table is missing some columns and the column in the target has no default value" <| + dest_name = Name_Generator.random_name "table-notnull" + dest = data.connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Not_Null], Column_Description.Value "X" Value_Type.Integer] temporary=True primary_key=[] . should_succeed + src = source_table_builder [["X", [1, 2, 3]]] + r1 = dest.update_rows src key_columns=[] update_action=Update_Action.Insert + # We may want a more specific error for missing columns without defaults, but for now it's just a SQL error. + r1.should_fail_with SQL_Error + data.connection.drop_table dest_name + + group_builder.specify "should fail if the source table is missing some columns, if asked to" <| + dest = target_table_builder [["X", [0, 10, 100]], ["Y", ['a', 'b', 'c']]] + src = source_table_builder [["X", [1, 2, 3]]] + run_with_and_without_output <| + r1 = dest.update_rows src error_on_missing_columns=True update_action=Update_Action.Insert key_columns=[] + r1.should_fail_with Missing_Input_Columns + r1.catch.criteria . should_equal ["Y"] + + group_builder.specify "should fail if some of key_columns do not exist in either table" <| + d1 = target_table_builder [["X", [0, 10, 100]]] + d2 = target_table_builder [["X", [0, 10, 100]], ["Y", ['a', 'b', 'c']]] + s1 = source_table_builder [["X", [1, 3]]] + s2 = source_table_builder [["X", [1, 3]], ["Y", ['e', 'f']]] + + run_with_and_without_output <| + r1 = d1.update_rows s1 key_columns=["Y"] + r1.should_fail_with Missing_Input_Columns + + r2 = d2.update_rows s1 key_columns=["Y"] + r2.should_fail_with Missing_Input_Columns + + # This may be Missing_Input_Columns or Unmatched_Columns + r3 = d1.update_rows s2 key_columns=["Y"] + r3.should_fail_with Any + ((r3.catch.is_a Missing_Input_Columns) || (r3.catch.is_a Unmatched_Columns)).should_be_true + + group_builder.specify "should fail if the target table does not exist" <| + t = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] + nonexistent_name = Name_Generator.random_name "nonexistent-table" + nonexistent_ref = data.connection.create_table nonexistent_name t + # Dropping the table to make it not exist. + data.connection.drop_table nonexistent_ref.name + + run_with_and_without_output <| + r1 = nonexistent_ref.update_rows t key_columns=[] + r1.should_fail_with Table_Not_Found + + default_key_columns nonexistent_ref . should_fail_with Table_Not_Found + r2 = nonexistent_ref.update_rows t + r2.should_fail_with Table_Not_Found + + group_builder.specify "should fail if the target table is in-memory" <| + t = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] + in_memory_table = Table.new [["X", [0]], ["Y", ['_']]] + run_with_and_without_output <| + r1 = in_memory_table.update_rows t key_columns=[] + r1.should_fail_with Illegal_Argument + + r2 = in_memory_table.update_rows t + r2.should_fail_with Illegal_Argument + + group_builder.specify "should warn if type widening occurs" <| + dest = target_table_builder [["X", [3.25, 4.25, 10.0]]] + src = source_table_builder [["X", [1, 2, 0]]] + + # Warning should be present in dry-run mode too! + Context.Output.with_disabled <| + r2 = dest.update_rows src update_action=Update_Action.Insert key_columns=[] + Problems.expect_warning Inexact_Type_Coercion r2 + + # But in dry run the update is not actually performed: + r2.at "X" . to_vector . should_contain_the_same_elements_as [3.25, 4.25, 10.0] + + result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] + warning = Problems.expect_warning Inexact_Type_Coercion result + warning.requested_type.is_integer . should_be_true + warning.actual_type.is_floating_point . should_be_true + + result.column_names . should_equal ["X"] + result.at "X" . to_vector . should_contain_the_same_elements_as [3.25, 4.25, 10.0, 1, 2, 0] + + group_builder.specify "should fail if types of columns are not compatible" <| + dest = target_table_builder [["X", ["a", "B", "c"]]] + src = source_table_builder [["X", [1, 2, 3]]] + + run_with_and_without_output <| + result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] + result.should_fail_with Column_Type_Mismatch + err = result.catch + err.column_name.should_equal "X" + err.expected_type.is_text . should_be_true + err.got_type.is_numeric . should_be_true + + group_builder.specify "fails if the target type is more restrictive than source" <| + src = source_table_builder [["X", [1, 2, 3]], ["Y", ["a", "xyz", "abcdefghijkl"]], ["Z", ["a", "pqrst", "abcdefghijkl"]]] + dest_name = Name_Generator.random_name "dest-table-test-types" + structure = + x = Column_Description.Value "X" (Value_Type.Integer Bits.Bits_16) + y = Column_Description.Value "Y" (Value_Type.Char size=4 variable_length=True) + z = Column_Description.Value "Z" (Value_Type.Char size=5 variable_length=False) + [x, y, z] + dest = data.connection.create_table dest_name structure temporary=True primary_key=[] + non_trivial_types_supported = + has_warning_or_error = dest.is_error || (Problems.get_attached_warnings dest . not_empty) + has_warning_or_error.not + case non_trivial_types_supported of + False -> Nothing # Skip the test + True -> + result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] + result.should_fail_with Column_Type_Mismatch + + group_builder.specify "should not leave behind any garbage temporary tables if the upload fails" <| + dest_name = Name_Generator.random_name "dest-table" + # We will make the upload fail by violating the NOT NULL constraint. + dest = data.connection.create_table dest_name [Column_Description.Value "X" Value_Type.Integer [Column_Constraint.Not_Null]] temporary=True primary_key=[] . should_succeed + src = source_table_builder [["X", [1, Nothing, 3]]] + + existing_tables = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector + res = dest.update_rows src update_action=Update_Action.Insert key_columns=[] + res.should_fail_with SQL_Error + + tables_immediately_after = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector + + ## If there are some additional tables, we add some timeout to allow + the database to do the cleaning up. + additional_tables = (Set.from_vector tables_immediately_after).difference (Set.from_vector existing_tables) + if additional_tables.is_empty then Nothing else + additional_table = additional_tables.to_vector.first + + wait_until_temporary_table_is_deleted_after_closing_connection data.connection additional_table + # After the wait we check again and now there should be no additional tables. + tables_after_wait = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector + additional_tables_2 = (Set.from_vector tables_after_wait).difference (Set.from_vector existing_tables) + additional_tables_2.to_vector . should_equal [] + + +test_table_delete group_builder (data : Data) source_table_builder target_table_builder = + group_builder.specify "should remove rows matching by key_columns" <| + table = target_table_builder [["student_id", [1, 2, 44, 100, 120]], ["first_name", ["Alice", "Bob", "Charlie", "David", "Eve"]], ["score", [100, 100, 44, 100, 120]]] primary_key=["student_id"] + key_values_to_delete = source_table_builder [["student_id", [44, 100]]] + # key columns should automatically be discovered by the primary key + affected_rows = table.delete_rows key_values_to_delete + affected_rows . should_equal 2 + table.rows.map .to_vector . should_equal [[1, "Alice", 100], [2, "Bob", 100], [120, "Eve", 120]] + + group_builder.specify "will require key_columns if no default can be used as no primary key is set" <| + table = target_table_builder [["X", [1, 2, 3]]] primary_key=[] + key_values_to_delete = source_table_builder [["X", [1, 2]]] + + run_with_and_without_output <| + r1 = table.delete_rows key_values_to_delete + r1.should_fail_with Illegal_Argument + r1.to_display_text.should_contain "default value" + + r2 = table.delete_rows key_values_to_delete key_columns=["X"] + Problems.assume_no_problems r2 + r2.should_equal 2 + table.at "X" . to_vector . should_equal [3] + + group_builder.specify "does not fail if no rows matching the key_values_to_delete are found" <| + table = target_table_builder [["X", [1, 2, 3]]] primary_key=[] + key_values_to_delete = source_table_builder [["X", [4, 5]]] + r1 = table.delete_rows key_values_to_delete key_columns=["X"] # Error: Unresolved method `delete_rows` + r1.should_equal 0 + Problems.assume_no_problems r1 + table.at "X" . to_vector . should_equal [1, 2, 3] + + key_values_2 = source_table_builder [["X", [4, 3, 5]]] + r2 = table.delete_rows key_values_2 key_columns=["X"] + r2.should_equal 1 + Problems.assume_no_problems r2 + table.at "X" . to_vector . should_equal [1, 2] + + group_builder.specify "should allow to use multiple columns as key" <| + table = target_table_builder [["X", [1, 2, 2, 3, 4, 4]], ["Y", ['a', 'b', 'c', 'd', 'e', 'f']]] primary_key=[] + keys = source_table_builder [["X", [2, 4]], ["Y", ['b', 'f']]] + affected_rows = table.delete_rows keys key_columns=["X", "Y"] + affected_rows . should_equal 2 + table.rows.map .to_vector . should_equal [[1, "a"], [2, "c"], [3, "d"], [4, "e"]] + + group_builder.specify "should fail if key_columns are missing in source or target tables" <| + table = target_table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] primary_key=[] + keys = source_table_builder [["Z", [7, 8]]] + + run_with_and_without_output <| + r1 = table.delete_rows keys key_columns=["Y"] + r1.should_fail_with Missing_Input_Columns + r1.catch.criteria . should_equal ["Y"] + r1.catch.where . should_contain "key values to delete" + + r2 = table.delete_rows keys key_columns=["Z"] + r2.should_fail_with Missing_Input_Columns + r2.catch.criteria . should_equal ["Z"] + r2.catch.where . should_contain "target" + + r3 = table.delete_rows keys key_columns=["neither"] + r3.should_fail_with Missing_Input_Columns + r3.catch.criteria . should_equal ["neither"] + + group_builder.specify "should fail if empty key_columns were provided" <| + table = target_table_builder [["X", [1, 2, 3]]] primary_key=["X"] + keys = source_table_builder [["X", [1, 2]]] + r1 = table.delete_rows keys key_columns=[] + r1.should_fail_with Illegal_Argument + + group_builder.specify "should fail if multiple rows match a single key_values_to_delete row, unless allow_duplicate_matches is set to True" <| + table = target_table_builder [["X", [1, 2, 2, 3, 2]], ["Y", ['a', 'b', 'c', 'd', 'e']]] primary_key=[] + keys = source_table_builder [["X", [2]], ["Y", ['b']]] + + run_with_and_without_output <| + r1 = table.delete_rows keys key_columns=["X"] + r1.should_fail_with Multiple_Target_Rows_Matched_For_Update + r1.catch.example_key . should_equal [2] + r1.catch.example_count . should_equal 3 + # no changes + table.at "X" . to_vector . should_equal [1, 2, 2, 3, 2] + + r2 = table.delete_rows keys key_columns=["X"] allow_duplicate_matches=True + r2.should_equal 3 + table.rows.map .to_vector . should_equal [[1, "a"], [3, "d"]] + + group_builder.specify "should fail if the target table does not exist" <| + table = target_table_builder [["X", [1, 2, 3]]] + keys = source_table_builder [["X", [1, 2]]] + data.connection.drop_table table.name + + run_with_and_without_output <| + table.delete_rows keys key_columns=["X"] . should_fail_with Table_Not_Found + table.delete_rows keys . should_fail_with Table_Not_Found + + group_builder.specify "will warn if not all input rows were checked as part of a dry run" <| + target = target_table_builder [["X", [0, 1, 500, 1500, 3500]]] primary_key=["X"] + source = source_table_builder [["X", (1.up_to 2000).to_vector]] primary_key=["X"] + + Context.Output.with_disabled <| + r1 = target.delete_rows source + # Values 1 and 500 are always checked; 1500 may exceed the dry run limit. + [2, 3].should_contain r1 + w1 = Problems.expect_warning Dry_Run_Operation r1 + # If not all rows were checked, a warning is expected: + if r1 == 2 then + w1.to_display_text . should_contain "Only the first 1000 distinct rows out of 1999 were used for the dry run" + + # Target remains unchanged + target.at "X" . to_vector . should_equal [0, 1, 500, 1500, 3500] + + r2 = target.delete_rows source + Problems.assume_no_problems r2 + # All 3 rows were deleted + r2.should_equal 3 + target.at "X" . to_vector . should_equal [0, 3500] + + group_builder.specify "will work fine if the target table contains NULL keys" <| + t1 = target_table_builder [["X", ["a", "b", Nothing, "c"]], ["Y", [1, 2, 3, Nothing]]] + s1 = source_table_builder [["X", ["b", "c"]]] + t1.delete_rows s1 key_columns=["X"] . should_equal 2 + m1 = t1.read . order_by "X" + m1.at "X" . to_vector . should_equal [Nothing, "a"] + m1.at "Y" . to_vector . should_equal [3, 1] + + group_builder.specify "will raise an error if they source table contains NULL keys" <| + t2 = target_table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, Nothing]]] + s2 = source_table_builder [["X", ["b", Nothing]], ["f", [10, 20]]] + r1 = t2.delete_rows s2 key_columns=["X"] + r1.should_fail_with Null_Values_In_Key_Columns + r1.catch.to_display_text . should_contain "Nothing values in key columns" + r1.catch.to_display_text . should_contain "[Nothing, 20]" + + +tests group_builder (data : Data) make_new_connection source_table_builder (suffix : Text) persistent_connector = + group_builder.specify "should return a temporary table with a sample of the data for select_into_database_table"+suffix <| + Context.Output.with_disabled <| + src1 = source_table_builder [["X", [1, 2, 3]]] + name = (Name_Generator.random_name "table-foo2") + r1 = src1.select_into_database_table data.connection name + Problems.expect_only_warning Dry_Run_Operation r1 + r1.column_names . should_equal ["X"] + r1.name . should_not_equal name + # A small table is uploaded whole. + r1.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] + r1.row_count . should_equal 3 + r1.is_trivial_query . should_be_true + + # But a big one will be sampled. + n = 2000 + src2 = source_table_builder [["X", (0.up_to n).to_vector]] + # We re-use the name - multiple dry-runs for the same table name should be allowed without issues. + r2 = src2.select_into_database_table data.connection name + Problems.expect_only_warning Dry_Run_Operation r2 + r2.column_names . should_equal ["X"] + # Only a sample is uploaded. + r2.row_count . should_equal 1000 + r2.is_trivial_query . should_be_true + + group_builder.specify "should return the target table unchanged for update_rows"+suffix <| + dest_data = Table.new [["X", [1, 2, 3]]] + dest = dest_data.select_into_database_table data.connection (Name_Generator.random_name "target-table") temporary=True primary_key=[] + Context.Output.with_disabled <| + src = source_table_builder [["X", [4, 5, 6]]] + r1 = dest.update_rows src update_action=Update_Action.Insert key_columns=[] + Problems.expect_only_warning Dry_Run_Operation r1 + r1.column_names . should_equal ["X"] + # The target table is returned, as usually. + r1.name . should_equal dest.name + # But the data is not appended due to the dry-run - the table is unmodified. + r1.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] + r1.is_trivial_query . should_be_true + + group_builder.specify "should return the count of rows that would be deleted for delete_rows, but keep the table unchanged"+suffix <| + v = [1, 2, 3, 4, 4, 4, 1] + dest_data = Table.new [["X", v]] + dest = dest_data.select_into_database_table data.connection (Name_Generator.random_name "table-delete-rows-dry") temporary=True primary_key=[] + Context.Output.with_disabled <| + src = source_table_builder [["X", [2, 3]]] + r1 = dest.delete_rows src key_columns=["X"] + # 2 rows would be deleted + r1.should_equal 2 + Problems.expect_only_warning Dry_Run_Operation r1 + + # The target table is unaffected. + dest.at "X" . to_vector . should_equal v + + src2 = source_table_builder [["X", [4]]] + r2 = dest.delete_rows src2 key_columns=["X"] allow_duplicate_matches=True + # 3 rows would be deleted + r2.should_equal 3 + Problems.expect_only_warning Dry_Run_Operation r2 + dest.at "X" . to_vector . should_equal v + + if persistent_connector then + group_builder.specify "will not overwrite an existing table with a dry-run table if the name is clashing (select_into_database_table)"+suffix <| + target_name = Name_Generator.random_name "test-table" + dry_run_name = Context.Output.with_disabled <| + tmp_connection1 = make_new_connection Nothing + src1 = source_table_builder [["A", [1, 2, 3]]] connection=tmp_connection1 + dry_run_table = src1.select_into_database_table tmp_connection1 target_name temporary=True . should_succeed + Problems.expect_only_warning Dry_Run_Operation dry_run_table + dry_run_table.column_names . should_equal ["A"] + name = Warning.clear dry_run_table.name + tmp_connection1.close + name + + wait_until_temporary_table_is_deleted_after_closing_connection data.connection dry_run_name + + pre_existing_src = Table.new [["X", [4, 5, 6]]] + # Create a table that has the same name as the dry run table normally would have. + pre_existing_table = pre_existing_src.select_into_database_table data.connection dry_run_name temporary=False . should_succeed + pre_existing_table.column_names . should_equal ["X"] + pre_existing_table.at "X" . to_vector . should_contain_the_same_elements_as [4, 5, 6] + Panic.with_finalizer (data.connection.drop_table pre_existing_table.name if_exists=True) <| + new_dry_run_name = Context.Output.with_disabled <| + tmp_connection2 = make_new_connection Nothing + src3 = source_table_builder [["B", [7, 8, 9]]] connection=tmp_connection2 + # Create a dry run table that is supposed to clash with pre_existing_table + dry_run_table = src3.select_into_database_table tmp_connection2 target_name temporary=True . should_succeed + Problems.expect_warning Dry_Run_Operation dry_run_table + dry_run_table.column_names . should_equal ["B"] + dry_run_table.at "B" . to_vector . should_contain_the_same_elements_as [7, 8, 9] + name = Warning.clear dry_run_table.name + tmp_connection2.close + name + + # Ensure that the created dry run table changed the name to avoid clash. + new_dry_run_name . should_not_equal dry_run_name + + # The pre-existing table should not have been overwritten. + pre_existing_table.at "X" . to_vector . should_contain_the_same_elements_as [4, 5, 6] - wait_until_temporary_table_is_deleted_after_closing_connection data.connection dry_run_name - - pre_existing_src = Table.new [["X", [4, 5, 6]]] - # Create a table that has the same name as the dry run table normally would have. - pre_existing_table = pre_existing_src.select_into_database_table data.connection dry_run_name temporary=False . should_succeed - pre_existing_table.column_names . should_equal ["X"] - pre_existing_table.at "X" . to_vector . should_contain_the_same_elements_as [4, 5, 6] - Panic.with_finalizer (data.connection.drop_table pre_existing_table.name if_exists=True) <| - new_dry_run_name = Context.Output.with_disabled <| - tmp_connection2 = make_new_connection Nothing - src3 = source_table_builder [["B", [7, 8, 9]]] connection=tmp_connection2 - # Create a dry run table that is supposed to clash with pre_existing_table - dry_run_table = src3.select_into_database_table tmp_connection2 target_name temporary=True . should_succeed - Problems.expect_warning Dry_Run_Operation dry_run_table - dry_run_table.column_names . should_equal ["B"] - dry_run_table.at "B" . to_vector . should_contain_the_same_elements_as [7, 8, 9] - name = Warning.clear dry_run_table.name - tmp_connection2.close - name - - # Ensure that the created dry run table changed the name to avoid clash. - new_dry_run_name . should_not_equal dry_run_name - - # The pre-existing table should not have been overwritten. - pre_existing_table.at "X" . to_vector . should_contain_the_same_elements_as [4, 5, 6] - - tests group_builder (in_memory_table_builder) " (from memory)" - tests group_builder (database_table_builder "ec-tests-table") " (from Database table)" ## PRIVATE Creates a mock column containing `values`. @@ -1411,6 +1424,7 @@ make_mock_column name values exploding_index = storage = ExplodingStorage.new values exploding_index Column.from_storage name storage + ## PRIVATE cleanup_sentinel ref _ = ref.put True From 9f0da9dd0ba409f10c118e37e4868c65111a8d29 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 10:40:42 +0100 Subject: [PATCH 35/93] Copy more sources from Test into Test_New --- .../src/Execution_Context_Helpers.enso | 15 +++++++++++++ .../0.0.0-dev/src/Test_Environment.enso | 22 +++++++++++++++++++ 2 files changed, 37 insertions(+) create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Execution_Context_Helpers.enso create mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Environment.enso diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Execution_Context_Helpers.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Execution_Context_Helpers.enso new file mode 100644 index 000000000000..9bdd633eef76 --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Execution_Context_Helpers.enso @@ -0,0 +1,15 @@ +from Standard.Base import all +import Standard.Base.Runtime.Context + +import project.Test.Test + +## PRIVATE + Runs the action twice, once with the Output context enabled and once with it + disabled, to check that the behaviour is the same regardless of context. +run_with_and_without_output ~action = + Context.Output.with_enabled <| + Test.with_clue "(normal mode - Output context enabled) " <| + action + Context.Output.with_disabled <| + Test.with_clue "(dry run - Output context disabled) " <| + action diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Environment.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Environment.enso new file mode 100644 index 000000000000..369605e4ad62 --- /dev/null +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Environment.enso @@ -0,0 +1,22 @@ +from Standard.Base import all + +polyglot java import org.enso.base.Environment_Utils + +## ADVANCED + UNSTABLE + + Runs a given action with an environment variable modified to a given value. + The environment variable is restored to its original value after the action. + The environment variable override is only visible to the Enso + `Environment.get` method, the environment as seen from a direct + `System.getenv` Java call remains unchanged. +unsafe_with_environment_override : Text -> Text -> Any -> Any +unsafe_with_environment_override key value ~action = + ## This has to be done in Enso, not in Java, due to the bug: https://github.com/enso-org/enso/issues/7117 + If done in Java, Enso test functions do not work correctly, because they cannot access State. + old_value = Environment_Utils.getOverride key + restore_previous = + if old_value.is_nothing then Environment_Utils.removeOverride key else Environment_Utils.setOverride key old_value + Panic.with_finalizer restore_previous <| + Environment_Utils.setOverride key value + action From a0a0a3a4e37078fa20b9a389e854ddf704908120 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 10:41:39 +0100 Subject: [PATCH 36/93] Fix some typos causing runtime errors --- .../src/Common_Table_Operations/Core_Spec.enso | 2 +- test/Table_Tests/src/Common_Table_Operations/Util.enso | 7 +++---- test/Table_Tests/src/Database/SQLite_Spec.enso | 10 +++++----- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso index f79c0e6cc3bd..3c4c82a8c614 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso @@ -348,7 +348,7 @@ add_specs suite_builder setup = Problems.assume_no_problems r5 if setup.is_database then group_builder.specify "should allow similar API on Connection.read" <| - connection = setup.connection + connection = setup.connection_fn Nothing connection.query t_big.name . row_count . should_equal 1500 t1 = connection.read t_big.name diff --git a/test/Table_Tests/src/Common_Table_Operations/Util.enso b/test/Table_Tests/src/Common_Table_Operations/Util.enso index 027f4fd2f278..2848feb15bb6 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Util.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Util.enso @@ -1,7 +1,6 @@ from Standard.Base import all -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all import project.In_Memory.Common_Spec as In_Memory_Table_Spec @@ -11,8 +10,8 @@ expect_column_names names table = ## These tests are parametrized by various backends and so they should be run in context of a specific backend. However, for the purpose of testing we provide a shortcut that allows to run these tests with the in-memory backend. -run_default_backend spec = - Test_Suite.run_main (In_Memory_Table_Spec.run_common_spec spec) +run_default_backend suite = + Panic.throw "Unimplemented" ## Adds a clue which will display the provided table next to the failed test description. diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 4a8030aa0d34..066ebc4aa5df 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -48,7 +48,7 @@ type Metadata_Data row3 = ["def", 42, True, 1.4] Panic.rethrow <| t.update_rows (Table.from_rows ["strs", "ints", "bools", "reals"] [row1, row2, row3]) update_action=Update_Action.Insert - Data.Value [connection, tinfo, t] + Metadata_Data.Value [connection, tinfo, t] type Tables_And_Table_Types_Data Value ~data @@ -378,26 +378,26 @@ suite = Test.build suite_builder-> Auto_Detect.get_writing_format (enso_project.data / "nonexistent-data.sqlite") . should_be_a SQLite_Format group_builder.specify "should not recognise nonexistent or empty files for reading" <| - r1 = Data.read (enso_project.data / "nonexistent-data.db") + r1 = data.read (enso_project.data / "nonexistent-data.db") r1.should_fail_with File_Error r1.catch . should_be_a File_Error.Not_Found empty = enso_project.data / "transient" / "empty-data.db" "".write empty on_existing_file=Existing_File_Behavior.Overwrite . should_succeed - r2 = Data.read empty + r2 = data.read empty r2.should_fail_with File_Error r2.catch . should_be_a File_Error.Unsupported_Type empty.delete_if_exists broken = enso_project.data / "transient" / "empty-data.db" "SOME_RANDOM_DATA".write empty on_existing_file=Existing_File_Behavior.Overwrite . should_succeed - r3 = Data.read broken + r3 = data.read broken r3.should_fail_with File_Error r3.catch . should_be_a File_Error.Unsupported_Type broken.delete_if_exists group_builder.specify "should connect to a db file" <| - connection = Data.read data.file + connection = data.read data.file tables = connection.tables tables.row_count . should_not_equal 0 connection.close From 8789d7b31626cffd84383b0df1335a0c37e2c75d Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 12:09:44 +0100 Subject: [PATCH 37/93] Test_Setup.Config.table_builder accepts connection parameter. This fixes a lot of errors with "Tables coming from different connections" --- .../Aggregate_Spec.enso | 73 ++++--- .../Column_Name_Edge_Cases_Spec.enso | 27 ++- .../Derived_Columns_Spec.enso | 60 +++--- .../Common_Table_Operations/Filter_Spec.enso | 77 ++++--- .../Join/Cross_Join_Spec.enso | 46 ++-- .../Join/Join_Spec.enso | 199 ++++++++++-------- .../src/Common_Table_Operations/Main.enso | 5 +- .../src/Common_Table_Operations/Util.enso | 1 + .../Table_Tests/src/Database/SQLite_Spec.enso | 6 +- 9 files changed, 283 insertions(+), 211 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso index d28cca666d4d..bbfabcedae0f 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso @@ -20,18 +20,21 @@ type Test_Selection type Data Value ~data - table self = self.data.at 0 - empty_table self = self.data.at 1 + connection self = self.data.at 0 + table self = self.data.at 1 + empty_table self = self.data.at 2 - setup table_fn empty_table_fn = Data.Value <| + setup create_connection_fn table_fn empty_table_fn = Data.Value <| + connection = create_connection_fn Nothing table = table_fn Nothing empty_table = empty_table_fn Nothing - [table, empty_table] + [connection, table, empty_table] ## Runs the common aggregate tests. add_specs suite_builder setup = prefix = setup.prefix + create_connection_fn = setup.create_connection_func table_fn = setup.table_fn empty_table_fn = setup.empty_table_fn table_builder = setup.table_builder @@ -51,7 +54,7 @@ add_specs suite_builder setup = if enabled_flag.not then "Not supported." else Nothing suite_builder.group prefix+"Table.aggregate should summarize whole table" group_builder-> - data = Data.setup table_fn empty_table_fn + data = Data.setup create_connection_fn table_fn empty_table_fn group_builder.specify "should be able to count" <| grouped = data.table.aggregate [Count] @@ -217,7 +220,7 @@ add_specs suite_builder setup = materialized.columns.at 0 . at 0 . length . should_equal 7500 suite_builder.group prefix+"Table.aggregate should summarize empty table" group_builder-> - data = Data.setup table_fn empty_table_fn + data = Data.setup create_connection_fn table_fn empty_table_fn group_builder.specify "should be able to count" <| grouped = data.empty_table.aggregate [Count] @@ -342,7 +345,7 @@ add_specs suite_builder setup = materialized.columns.at 0 . at 0 . should_equal Nothing suite_builder.group prefix+"Table.aggregate should not summarize empty table when grouped" group_builder-> - data = Data.setup table_fn empty_table_fn + data = Data.setup create_connection_fn table_fn empty_table_fn group_builder.specify "should be able to count" <| grouped = data.empty_table.aggregate [Group_By 0, Count] @@ -455,7 +458,7 @@ add_specs suite_builder setup = materialized.columns.at 1 . name . should_equal "Concatenate Code" suite_builder.group prefix+"Table.aggregate should be able to group on single field" group_builder-> - data = Data.setup table_fn empty_table_fn + data = Data.setup create_connection_fn table_fn empty_table_fn group_builder.specify "should be able to count" <| grouped = data.table.aggregate [Group_By "Index", Count] @@ -659,7 +662,7 @@ add_specs suite_builder setup = materialized.columns.at 1 . at idx . length . should_equal 783 suite_builder.group prefix+"Table.aggregate should be able to group on multiple fields not in left columns" group_builder-> - data = Data.setup table_fn empty_table_fn + data = Data.setup create_connection_fn table_fn empty_table_fn group_builder.specify "should be able to count" <| grouped = data.table.aggregate [Group_By "Flag", Count, Group_By "Index"] @@ -889,8 +892,10 @@ add_specs suite_builder setup = materialized.columns.at 1 . to_vector . should_equal ["f"] suite_builder.group prefix+"Table.aggregate Concatenate" (pending = resolve_pending test_selection.text_concat) group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + group_builder.specify "should insert the separator, add prefix and suffix" <| - table = table_builder [["A", ["foo", "bar", "foo", "foo"]], ["B", ["a", "b", "c", "d"]]] + table = table_builder [["A", ["foo", "bar", "foo", "foo"]], ["B", ["a", "b", "c", "d"]]] connection=data.connection result = table.aggregate [Group_By "A", (Concatenate "B" prefix="[[" suffix="]]" separator="; ")] result.row_count . should_equal 2 materialized = materialize result . order_by ([Sort_Column.Name "A"]) @@ -902,7 +907,7 @@ add_specs suite_builder setup = materialized.columns.at 1 . to_vector . should_equal ["[[b]]", "[[a; c; d]]"] group_builder.specify "should correctly escape separator and quote characters but only if necessary" <| - table = table_builder [["A", ["1,0", "b", "'c", "''", ","]]] + table = table_builder [["A", ["1,0", "b", "'c", "''", ","]]] connection=data.connection result = table.aggregate [(Concatenate "A" prefix="[[" suffix="]]" separator="," quote_char="'")] result.row_count . should_equal 1 materialized = materialize result @@ -912,7 +917,7 @@ add_specs suite_builder setup = materialized.columns.at 0 . to_vector . should_equal ["[['1,0',b,'''c','''''',',']]"] group_builder.specify "should correctly handle missing values and empty values with quote character" <| - table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] + table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] connection=data.connection result = table.aggregate [(Concatenate "A" prefix="[[" suffix="]]" separator="," quote_char="'")] result.row_count . should_equal 1 materialized = materialize result @@ -922,7 +927,7 @@ add_specs suite_builder setup = materialized.columns.at 0 . to_vector . should_equal ["[['1,0',A,'','',B,,,C]]"] group_builder.specify "will not be able to distinguish missing values from empty values without quote character" <| - table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] + table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] connection=data.connection result = table.aggregate [(Concatenate "A" prefix="[[" suffix="]]" separator=",")] result.row_count . should_equal 1 materialized = materialize result @@ -937,7 +942,7 @@ add_specs suite_builder setup = materialized.columns.at 0 . to_vector . should_equal ["[[1,0,A,,,B,,,C]]"] group_builder.specify "should work with empty separator" <| - table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] + table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] connection=data.connection result = table.aggregate [(Concatenate "A")] result.row_count . should_equal 1 materialized = materialize result @@ -947,7 +952,7 @@ add_specs suite_builder setup = materialized.columns.at 0 . to_vector . should_equal ["1,0ABC"] group_builder.specify "should work with empty separator but non-empty quote" <| - table = table_builder [["A", ["1'0", "A", "", "", "B", Nothing, Nothing, "C"]]] + table = table_builder [["A", ["1'0", "A", "", "", "B", Nothing, Nothing, "C"]]] connection=data.connection result = table.aggregate [(Concatenate "A" quote_char="'")] result.row_count . should_equal 1 materialized = materialize result @@ -957,6 +962,8 @@ add_specs suite_builder setup = materialized.columns.at 0 . to_vector . should_equal ["'1''0'A''''BC"] suite_builder.group prefix+"Table.aggregate Count_Distinct" group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + group_builder.specify "should correctly count missing values" <| get_value t = columns = materialize t . columns @@ -966,7 +973,7 @@ add_specs suite_builder setup = ## TODO currently our builder does not allow all-null tables, so we create one with a 0 and remove it by filter. See #6159. - t0 = table_builder [["A", [0]]] + t0 = table_builder [["A", [0]]] connection=data.connection t1 = t0.filter "A" (Filter_Condition.Is_Nothing) t1.row_count . should_equal 0 t1.at "A" . to_vector . should_equal [] @@ -976,7 +983,7 @@ add_specs suite_builder setup = ## TODO currently our builder does not allow all-null tables, so we create one with a 0 and remove it by filter. See #6159. - t0_2 = table_builder [["A", [0, Nothing, Nothing]]] + t0_2 = table_builder [["A", [0, Nothing, Nothing]]] connection=data.connection t2 = t0_2.filter "A" (Filter_Condition.Is_Nothing) t2.row_count . should_equal 2 t2.at "A" . to_vector . should_equal [Nothing, Nothing] @@ -984,15 +991,15 @@ add_specs suite_builder setup = get_value (t2.aggregate [Count_Distinct "A" (ignore_nothing=True)]) . should_equal 0 get_value (t2.aggregate [Count_Distinct "A" (ignore_nothing=False)]) . should_equal 1 - t3 = table_builder [["A", [1, 2]]] + t3 = table_builder [["A", [1, 2]]] connection=data.connection get_value (t3.aggregate [Count_Distinct "A" (ignore_nothing=True)]) . should_equal 2 get_value (t3.aggregate [Count_Distinct "A" (ignore_nothing=False)]) . should_equal 2 - t4 = table_builder [["A", [1, 2, Nothing, Nothing]]] + t4 = table_builder [["A", [1, 2, Nothing, Nothing]]] connection=data.connection get_value (t4.aggregate [Count_Distinct "A" (ignore_nothing=True)]) . should_equal 2 get_value (t4.aggregate [Count_Distinct "A" (ignore_nothing=False)]) . should_equal 3 - t5 = table_builder [["G", ["foo", "foo", "bar", "foo"]], ["A", [Nothing, 0, Nothing, Nothing]]] + t5 = table_builder [["G", ["foo", "foo", "bar", "foo"]], ["A", [Nothing, 0, Nothing, Nothing]]] connection=data.connection r1 = t5.aggregate [Group_By "G", Count_Distinct "A" (ignore_nothing=True)] r1.row_count . should_equal 2 @@ -1009,7 +1016,7 @@ add_specs suite_builder setup = m2.columns.second.to_vector . should_equal [1, 2] group_builder.specify "should correctly count all-null keys in multi-column mode" (pending = resolve_pending test_selection.multi_distinct) <| - table = table_builder [["A", ["foo", "foo", Nothing, Nothing, Nothing]], ["B", ["baz", Nothing, Nothing, Nothing, "baz"]], ["C", [1, 2, 3, Nothing, 5]]] + table = table_builder [["A", ["foo", "foo", Nothing, Nothing, Nothing]], ["B", ["baz", Nothing, Nothing, Nothing, "baz"]], ["C", [1, 2, 3, Nothing, 5]]] connection=data.connection r2 = table.aggregate [Count_Distinct ["A", "B"] (ignore_nothing=False)] r2.row_count.should_equal 1 @@ -1085,17 +1092,19 @@ add_specs suite_builder setup = Test.fail "Expected a Nothing or NaN but got: "+value.to_text+" (at "+loc+")." suite_builder.group prefix+"Table.aggregate should correctly handle infinities" group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn pos_inf = 1/0 neg_inf = -1/0 + group_builder.specify "on Average" <| - t1 = table_builder [["X", [Nothing, pos_inf, pos_inf, 0]]] + t1 = table_builder [["X", [Nothing, pos_inf, pos_inf, 0]]] connection=data.connection r1 = t1.aggregate [Average "X"] r1.row_count.should_equal 1 m1 = materialize r1 m1.column_count . should_equal 1 m1.columns.first.at 0 . should_equal pos_inf - t2 = table_builder [["X", [Nothing, pos_inf, neg_inf, 0]]] + t2 = table_builder [["X", [Nothing, pos_inf, neg_inf, 0]]] connection=data.connection r2 = t2.aggregate [Average "X"] r2.row_count.should_equal 1 m2 = materialize r2 @@ -1103,28 +1112,28 @@ add_specs suite_builder setup = expect_null_or_nan <| m2.columns.first.at 0 group_builder.specify "on Median" (pending = resolve_pending test_selection.advanced_stats) <| - t1 = table_builder [["X", [Nothing, neg_inf, pos_inf, 0, pos_inf, pos_inf]]] + t1 = table_builder [["X", [Nothing, neg_inf, pos_inf, 0, pos_inf, pos_inf]]] connection=data.connection r1 = t1.aggregate [Median "X"] r1.row_count.should_equal 1 m1 = materialize r1 m1.column_count . should_equal 1 m1.columns.first.at 0 . should_equal pos_inf - t2 = table_builder [["X", [pos_inf, pos_inf, neg_inf, neg_inf]]] + t2 = table_builder [["X", [pos_inf, pos_inf, neg_inf, neg_inf]]] connection=data.connection r2 = t2.aggregate [Median "X"] r2.row_count.should_equal 1 m2 = materialize r2 m2.column_count . should_equal 1 expect_null_or_nan <| m2.columns.first.at 0 - t3 = table_builder [["X", [pos_inf, pos_inf, Nothing, 0, 10, 20, neg_inf, neg_inf]]] + t3 = table_builder [["X", [pos_inf, pos_inf, Nothing, 0, 10, 20, neg_inf, neg_inf]]] connection=data.connection r3 = t3.aggregate [Median "X"] r3.row_count.should_equal 1 m3 = materialize r3 m3.column_count . should_equal 1 m3.columns.first.at 0 . should_equal 10 - t4 = table_builder [["X", [Nothing, pos_inf, pos_inf, 10, 12]]] + t4 = table_builder [["X", [Nothing, pos_inf, pos_inf, 10, 12]]] connection=data.connection r4 = t4.aggregate [Median "X"] r4.row_count.should_equal 1 m4 = materialize r4 @@ -1132,21 +1141,21 @@ add_specs suite_builder setup = m4.columns.first.at 0 . should_equal pos_inf group_builder.specify "on Percentile" (pending = resolve_pending test_selection.advanced_stats) <| - t1 = table_builder [["X", [Nothing, neg_inf, 2, 3, 4, pos_inf]]] + t1 = table_builder [["X", [Nothing, neg_inf, 2, 3, 4, pos_inf]]] connection=data.connection r1 = t1.aggregate [Percentile 0.3 "X"] r1.row_count.should_equal 1 m1 = materialize r1 m1.column_count . should_equal 1 m1.columns.first.at 0 . should_equal 2.2 - t2 = table_builder [["X", [Nothing, neg_inf, neg_inf, 3, 4, pos_inf]]] + t2 = table_builder [["X", [Nothing, neg_inf, neg_inf, 3, 4, pos_inf]]] connection=data.connection r2 = t2.aggregate [Percentile 0.25 "X"] r2.row_count.should_equal 1 m2 = materialize r2 m2.column_count . should_equal 1 m2.columns.first.at 0 . should_equal neg_inf - t3 = table_builder [["X", [Nothing, neg_inf, neg_inf, pos_inf, pos_inf, pos_inf]]] + t3 = table_builder [["X", [Nothing, neg_inf, neg_inf, pos_inf, pos_inf, pos_inf]]] connection=data.connection r3 = t3.aggregate [Percentile 0.3 "X"] r3.row_count.should_equal 1 m3 = materialize r3 @@ -1154,7 +1163,7 @@ add_specs suite_builder setup = expect_null_or_nan <| m3.columns.first.at 0 group_builder.specify "on Standard_Deviation" (pending = resolve_pending test_selection.std_dev) <| - t1 = table_builder [["X", [neg_inf, 1]]] + t1 = table_builder [["X", [neg_inf, 1]]] connection=data.connection r1 = t1.aggregate [Standard_Deviation "X" (population=True), Standard_Deviation "X" (population=False)] r1.row_count.should_equal 1 m1 = materialize r1 @@ -1301,7 +1310,7 @@ add_specs suite_builder setup = r1.to_display_text . should_contain "`First`" suite_builder.group prefix+"Table.aggregate+Expressions" group_builder-> - data = Data.setup table_fn empty_table_fn + data = Data.setup create_connection_fn table_fn empty_table_fn ## TODO we probably should check all kinds of aggregate columns to verify that all of them correctly support expressions. diff --git a/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso index 8909f635a37b..93e01998d0ce 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso @@ -12,14 +12,23 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend from project.Common_Table_Operations.Core_Spec import weird_names +type Data + Value ~connection + + setup create_connection_fn = + Data.Value (create_connection_fn Nothing) + add_specs suite_builder setup = table_builder = setup.table_builder materialize = setup.materialize + create_connection_fn = setup.create_connection_func is_case_sensitive = setup.test_selection.supports_case_sensitive_columns suite_builder.group setup.prefix+"Column Naming edge cases" group_builder-> + data = Data.setup create_connection_fn + group_builder.specify "case insensitive name collisions - set" <| - t1 = table_builder [["X", [1]]] + t1 = table_builder [["X", [1]]] connection=data.connection Problems.assume_no_problems (t1.at "X" . rename "x") t2 = t1.set "[X] + 100" "x" case is_case_sensitive of @@ -51,7 +60,7 @@ add_specs suite_builder setup = t6.at "Right x" . to_vector . should_equal [101, 101, 101, 101] group_builder.specify "case insensitive name collisions - rename" <| - t1 = table_builder [["X", [1]], ["Y", [2]]] + t1 = table_builder [["X", [1]], ["Y", [2]]] connection=data.connection t2 = t1.rename_columns [["X", "A"], ["Y", "a"]] case is_case_sensitive of True -> @@ -71,7 +80,7 @@ add_specs suite_builder setup = Problems.expect_only_warning Duplicate_Output_Column_Names t3 group_builder.specify "case insensitive name collisions - aggregate" <| - t1 = table_builder [["X", [2, 1, 3, 2]]] + t1 = table_builder [["X", [2, 1, 3, 2]]] connection=data.connection t2 = t1.aggregate [Aggregate_Column.Maximum "X" "A", Aggregate_Column.Minimum "X" "a"] case is_case_sensitive of @@ -91,8 +100,8 @@ add_specs suite_builder setup = t3.at 1 . to_vector . should_equal [1] group_builder.specify "case insensitive name collisions - joins" <| - t1 = table_builder [["X", [1, 2]], ["a", [3, 4]]] - t2 = table_builder [["X", [2, 1]], ["A", [5, 6]]] + t1 = table_builder [["X", [1, 2]], ["a", [3, 4]]] connection=data.connection + t2 = table_builder [["X", [2, 1]], ["A", [5, 6]]] connection=data.connection t3 = t1.join t2 on="X" join_kind=Join_Kind.Inner case is_case_sensitive of @@ -118,7 +127,7 @@ add_specs suite_builder setup = t5.column_names . should_equal ["X", "a"] group_builder.specify "case insensitive name collisions - cross_tab" <| - t0 = table_builder [["X", ["a", "A", "b"]], ["Y", [4, 5, 6]]] + t0 = table_builder [["X", ["a", "A", "b"]], ["Y", [4, 5, 6]]] connection=data.connection t1 = t0.cross_tab group_by=[] name_column="X" values=[Aggregate_Column.First "Y"] . sort_columns case setup.is_database of # TODO remove this check once implemented @@ -136,7 +145,7 @@ add_specs suite_builder setup = t1.should_fail_with Clashing_Column_Name group_builder.specify "case insensitive name collisions - transpose" <| - t0 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] + t0 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] connection=data.connection t1 = t0.transpose attribute_column_name="a" value_column_name="A" case setup.is_database of # TODO remove this check once implemented @@ -154,8 +163,8 @@ add_specs suite_builder setup = group_builder.specify "unicode-normalized-equality vs selecting columns" <| ## In Enso column 'ś' and 's\u0301' are the same entity. But in Databases, quite not necessarily. - t1 = table_builder [['ś', [1, 2]], ['X', ['a', 'b']]] - t2 = table_builder [['s\u0301', [2, 1]], ['Y', ['x', 'y']]] + t1 = table_builder [['ś', [1, 2]], ['X', ['a', 'b']]] connection=data.connection + t2 = table_builder [['s\u0301', [2, 1]], ['Y', ['x', 'y']]] connection=data.connection # The two representations of the same string just address the same column: t1.at 'ś' . to_vector . should_equal [1, 2] diff --git a/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso index 988a5f2991ab..5afdc64fe847 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso @@ -11,13 +11,23 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import all +type Data + Value ~connection + + setup create_connection_fn = + Data.Value (create_connection_fn Nothing) + + add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend." suite_builder.group prefix+"Table.set with Column_Operation" group_builder-> + data = Data.setup create_connection_fn + group_builder.specify "arithmetics" <| - t = table_builder [["A", [1, 2]], ["B", [10, 40]]] + t = table_builder [["A", [1, 2]], ["B", [10, 40]]] connection=data.connection t.set (Column_Operation.Add (Column_Ref.Name "A") (Column_Ref.Name "B")) "C" . at "C" . to_vector . should_equal [11, 42] t.set (Column_Operation.Add 100 (Column_Ref.Name "B")) "C" . at "C" . to_vector . should_equal [110, 140] t.set (Column_Operation.Add (Column_Ref.Name "A") 100) "C" . at "C" . to_vector . should_equal [101, 102] @@ -38,7 +48,7 @@ add_specs suite_builder setup = t.set (Column_Operation.Divide 1 (Column_Ref.Name "A")) "C" . at "C" . to_vector . should_equal [1, 0.5] t.set (Column_Operation.Divide 1 2) "C" . at "C" . to_vector . should_equal [0.5, 0.5] - t2 = table_builder [["A", [23, 42]], ["B", [10, 3]]] + t2 = table_builder [["A", [23, 42]], ["B", [10, 3]]] connection=data.connection t2.set (Column_Operation.Mod (Column_Ref.Name "A") (Column_Ref.Name "B")) "C" . at "C" . to_vector . should_equal [3, 0] t2.set (Column_Operation.Mod (Column_Ref.Name "A") 10) "C" . at "C" . to_vector . should_equal [3, 2] t2.set (Column_Operation.Mod 7 5) "C" . at "C" . to_vector . should_equal [2, 2] @@ -52,7 +62,7 @@ add_specs suite_builder setup = t.set (Column_Operation.Add 42 "y") . should_fail_with Illegal_Argument group_builder.specify "rounding" <| - t = table_builder [["A", [1.13333, 122.74463, 32.52424, -12.7]]] + t = table_builder [["A", [1.13333, 122.74463, 32.52424, -12.7]]] connection=data.connection t.set (Column_Operation.Round (Column_Ref.Name "A")) "Z" . at "Z" . to_vector . should_equal [1, 123, 33, -13] t.set (Column_Operation.Round (Column_Ref.Name "A") precision=1) "Z" . at "Z" . to_vector . should_equal [1.1, 122.7, 32.5, -12.7] t.set (Column_Operation.Round (Column_Ref.Name "A") precision=-1) "Z" . at "Z" . to_vector . should_equal [0, 120, 30, -10] @@ -65,7 +75,7 @@ add_specs suite_builder setup = Test.expect_panic Type_Error <| t.set (Column_Operation.Truncate "1.23") group_builder.specify "date/time" pending=pending_datetime <| - t = table_builder [["A", [Date_Time.new 2023 1 12 12 45, Date_Time.new 2020 5 12 1 45]], ["B", [Date_Time.new 2023 1 15 18 45, Date_Time.new 2020 6 12 22 20]], ["x", [1, 3]]] + t = table_builder [["A", [Date_Time.new 2023 1 12 12 45, Date_Time.new 2020 5 12 1 45]], ["B", [Date_Time.new 2023 1 15 18 45, Date_Time.new 2020 6 12 22 20]], ["x", [1, 3]]] connection=data.connection # TODO ticket for truncate for DB if setup.is_database.not then @@ -80,7 +90,7 @@ add_specs suite_builder setup = t.set (Column_Operation.Date_Part (Column_Ref.Name "A") Date_Period.Year) "Z" . at "Z" . to_vector . should_equal [2023, 2020] t.set (Column_Operation.Date_Part (Column_Ref.Name "A") Time_Period.Minute) "Z" . at "Z" . to_vector . should_equal [45, 45] - t2 = table_builder [["C", [Date.new 2002 12 10, Date.new 2005 01 01]], ["D", [Time_Of_Day.new 12 45, Time_Of_Day.new 01 01]]] + t2 = table_builder [["C", [Date.new 2002 12 10, Date.new 2005 01 01]], ["D", [Time_Of_Day.new 12 45, Time_Of_Day.new 01 01]]] connection=data.connection t2.set (Column_Operation.Date_Add (Column_Ref.Name "C") 5 Date_Period.Month) "Z" . at "Z" . to_vector . should_equal [Date.new 2003 5 10, Date.new 2005 6 01] t2.set (Column_Operation.Date_Add (Column_Ref.Name "D") 15 Time_Period.Hour) "Z" . at "Z" . to_vector . should_equal [Time_Of_Day.new 03 45, Time_Of_Day.new 16 01] @@ -97,7 +107,7 @@ add_specs suite_builder setup = Test.expect_panic Type_Error <| t2.set (Column_Operation.Date_Diff 42 "x" Date_Period.Year) group_builder.specify "boolean" <| - t = table_builder [["A", [True, False]], ["T", [True, True]]] + t = table_builder [["A", [True, False]], ["T", [True, True]]] connection=data.connection t.set (Column_Operation.And (Column_Ref.Name "A") (Column_Ref.Name "T")) "Z" . at "Z" . to_vector . should_equal [True, False] t.set (Column_Operation.And (Column_Ref.Name "A") False) "Z" . at "Z" . to_vector . should_equal [False, False] @@ -114,14 +124,14 @@ add_specs suite_builder setup = Test.expect_panic_with (t.set (Column_Operation.Or (Column_Ref.Name "A") "x")) Type_Error group_builder.specify "if" <| - t = table_builder [["A", [1, 100]], ["B", [10, 40]], ["C", [23, 55]]] + t = table_builder [["A", [1, 100]], ["B", [10, 40]], ["C", [23, 55]]] connection=data.connection t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=(Column_Ref.Name "B"))) "Z" . at "Z" . to_vector . should_equal [False, True] t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=20) "T" "F") "Z" . at "Z" . to_vector . should_equal ["F", "T"] t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Less than=20) (Column_Ref.Name "B") (Column_Ref.Name "C")) "Z" . at "Z" . to_vector . should_equal [10, 55] t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than="X") "T" "F") . should_fail_with Invalid_Value_Type - t2 = table_builder [["A", ["a", "c"]], ["B", ["c", "b"]], ["C", [23, 55]]] + t2 = table_builder [["A", ["a", "c"]], ["B", ["c", "b"]], ["C", [23, 55]]] connection=data.connection t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=(Column_Ref.Name "B"))) "Z" . at "Z" . to_vector . should_equal [False, True] t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=(Column_Ref.Name "B")) (Column_Ref.Name "C") 0) "Z" . at "Z" . to_vector . should_equal [0, 55] t2.set (Column_Operation.If "A" (Filter_Condition.Greater than="B") (Column_Ref.Name "C") 0) "Z" . at "Z" . to_vector . should_equal [0, 0] @@ -134,7 +144,7 @@ add_specs suite_builder setup = # Passing a column does not work row-by-row, but looks at whole column contents. t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Is_In (t2.at "B")) "TT" "FF") "Z" . at "Z" . to_vector . should_equal ["FF", "TT"] - t3 = table_builder [["x", ["e", "e", "a"]]] + t3 = table_builder [["x", ["e", "e", "a"]]] connection=data.connection t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Is_In (t3.at "x")) "TT" "FF") "Z" . at "Z" . to_vector . should_equal ["TT", "FF"] # Thus, passing a Column_Ref into Is_In/Not_In is not allowed as it would be confusing. @@ -143,7 +153,7 @@ add_specs suite_builder setup = t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Not_In [Column_Ref.Name "B", "X"]) "TT" "FF") . should_fail_with Illegal_Argument group_builder.specify "text" <| - t = table_builder [["A", [" a ", "b"]], ["B", ["c", " d "]]] + t = table_builder [["A", [" a ", "b"]], ["B", ["c", " d "]]] connection=data.connection t.set (Column_Operation.Trim (Column_Ref.Name "A")) "Z" . at "Z" . to_vector . should_equal ["a", "b"] t.set (Column_Operation.Trim (Column_Ref.Name "A") Location.End) "Z" . at "Z" . to_vector . should_equal [" a", "b"] @@ -156,11 +166,11 @@ add_specs suite_builder setup = t.set (Column_Operation.Add (Column_Ref.Name "A") "!") "Z" . at "Z" . to_vector . should_equal [" a !", "b!"] t.set (Column_Operation.Add "O" "!") "Z" . at "Z" . to_vector . should_equal ["O!", "O!"] - t2 = table_builder [["A", [42]]] + t2 = table_builder [["A", [42]]] connection=data.connection t2.set (Column_Operation.Trim (Column_Ref.Name "A")) . should_fail_with Invalid_Value_Type group_builder.specify "min/max" <| - t = table_builder [["A", [1, 20]], ["B", [10, 2]]] + t = table_builder [["A", [1, 20]], ["B", [10, 2]]] connection=data.connection t.set (Column_Operation.Min (Column_Ref.Name "A") (Column_Ref.Name "B")) "Z" . at "Z" . to_vector . should_equal [1, 2] t.set (Column_Operation.Min (Column_Ref.Name "A") 5) "Z" . at "Z" . to_vector . should_equal [1, 5] @@ -171,7 +181,7 @@ add_specs suite_builder setup = t.set (Column_Operation.Max 2 5) "Z" . at "Z" . to_vector . should_equal [5, 5] t.set (Column_Operation.Min 2 5) "Z" . at "Z" . to_vector . should_equal [2, 2] - t2 = table_builder [["A", ["aardvark", "zebra"]], ["B", ["cat", "dog"]], ["x", [1, 20]]] + t2 = table_builder [["A", ["aardvark", "zebra"]], ["B", ["cat", "dog"]], ["x", [1, 20]]] connection=data.connection t2.set (Column_Operation.Min (Column_Ref.Name "A") (Column_Ref.Name "B")) "Z" . at "Z" . to_vector . should_equal ["aardvark", "dog"] t2.set (Column_Operation.Max (Column_Ref.Name "A") (Column_Ref.Name "B")) "Z" . at "Z" . to_vector . should_equal ["cat", "zebra"] t2.set (Column_Operation.Min (Column_Ref.Name "A") "animal") "Z" . at "Z" . to_vector . should_equal ["aardvark", "animal"] @@ -183,23 +193,25 @@ add_specs suite_builder setup = t2.set (Column_Operation.Min (Column_Ref.Name "x") (Column_Ref.Name "A")) . should_fail_with Invalid_Value_Type if pending_datetime.is_nothing then - t3 = table_builder [["A", [Date.new 2002 12 10, Date.new 2005 01 01]]] + t3 = table_builder [["A", [Date.new 2002 12 10, Date.new 2005 01 01]]] connection=data.connection t3.set (Column_Operation.Min (Column_Ref.Name "A") (Date.new 2003)) "Z" . at "Z" . to_vector . should_equal [Date.new 2002 12 10, Date.new 2003 01 01] t3.set (Column_Operation.Max (Column_Ref.Name "A") (Date.new 2003)) "Z" . at "Z" . to_vector . should_equal [Date.new 2003 01 01, Date.new 2005 01 01] group_builder.specify "allows also indexing columns numerically" <| - t = table_builder [["X", [1, 2]], ["Y", [3, 4]]] + t = table_builder [["X", [1, 2]], ["Y", [3, 4]]] connection=data.connection t.set (Column_Operation.Add (Column_Ref.Index 0) (Column_Ref.Index 1)) "Z" . at "Z" . to_vector . should_equal [4, 6] group_builder.specify "will forward column resolution errors" <| - t = table_builder [["X", [1, 2]], ["Y", [3, 4]]] + t = table_builder [["X", [1, 2]], ["Y", [3, 4]]] connection=data.connection t.set (Column_Operation.Add (Column_Ref.Name "X") (Column_Ref.Name "Z")) . should_fail_with No_Such_Column t.set (Column_Operation.Not (Column_Ref.Name "zzz")) . should_fail_with No_Such_Column t.set (Column_Operation.Not (Column_Ref.Index 42)) . should_fail_with Index_Out_Of_Bounds suite_builder.group prefix+"Unique derived column names" group_builder-> + data = Data.setup create_connection_fn + group_builder.specify "Should not disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add_Or_Update" <| - t = table_builder [["X", [1, 2, 3]]] + t = table_builder [["X", [1, 2, 3]]] connection=data.connection column_op = Column_Operation.Power 2 (Column_Ref.Name "X") t2 = t.set column_op . set column_op t2.column_names . should_equal ["X", "[2] ^ [X]"] @@ -207,7 +219,7 @@ add_specs suite_builder setup = t2.at "[2] ^ [X]" . to_vector . should_equal [2, 4, 8] group_builder.specify "Should disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add" <| - t = table_builder [["X", [1, 2, 3]]] + t = table_builder [["X", [1, 2, 3]]] connection=data.connection column_op = Column_Operation.Power 2 (Column_Ref.Name "X") t2 = t.set column_op set_mode=Set_Mode.Add . set column_op set_mode=Set_Mode.Add t2.column_names . should_equal ["X", "[2] ^ [X]", "[2] ^ [X] 1"] @@ -216,7 +228,7 @@ add_specs suite_builder setup = t2.at "[2] ^ [X] 1" . to_vector . should_equal [2, 4, 8] group_builder.specify "Should disambiguate two derived columns that would otherwise have had the same name, within the same expression" <| - t = table_builder [["X", [1, 2, 3]]] + t = table_builder [["X", [1, 2, 3]]] connection=data.connection expression = "2 + (2 * 2) + (2 ^ [X])" t2 = t.set expression t2.column_names . should_equal ["X", expression] @@ -224,7 +236,7 @@ add_specs suite_builder setup = t2.at expression . to_vector . should_equal [8, 10, 14] group_builder.specify "Should use .pretty to distinguish string constants from regular column names" <| - t = table_builder [["X", ["a", "b", "c"]]] + t = table_builder [["X", ["a", "b", "c"]]] connection=data.connection expression = '"foo" + [X] + "bar"' t2 = t.set expression t2.column_names . should_equal ["X", expression] @@ -232,7 +244,7 @@ add_specs suite_builder setup = t2.at expression . to_vector . should_equal ["fooabar", "foobbar", "foocbar"] group_builder.specify "Should disambiguate between a column reference and a literal string" <| - t = table_builder [["X", ["a", "b", "c"]]] + t = table_builder [["X", ["a", "b", "c"]]] connection=data.connection t2 = t.set (Column_Operation.Add "prefix" (Column_Ref.Name "X")) t3 = t2.set (Column_Operation.Add "prefix" "X") @@ -241,13 +253,13 @@ add_specs suite_builder setup = t3.at "['prefix'] + 'X'" . to_vector . should_equal ["prefixX", "prefixX", "prefixX"] group_builder.specify "Should not disambiguate if set_mode is Update" <| - t = table_builder [["X", [1, 2, 3]]] + t = table_builder [["X", [1, 2, 3]]] connection=data.connection t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1) set_mode=Set_Mode.Update t2.column_names . should_equal ["X"] t2.at "X" . to_vector . should_equal [2, 3, 4] group_builder.specify "Should not disambiguate if set_mode is Add_Or_Update" <| - t = table_builder [["X", [1, 2, 3]], ["[X] + 1", [10, 20, 30]]] + t = table_builder [["X", [1, 2, 3]], ["[X] + 1", [10, 20, 30]]] connection=data.connection # set_mode=Set_Mode.Add_Or_Update is the default t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1) t2.column_names . should_equal ["X", "[X] + 1"] @@ -255,7 +267,7 @@ add_specs suite_builder setup = t2.at "[X] + 1" . to_vector . should_equal [2, 3, 4] group_builder.specify "Should not disambiguate if the new name is explicitly set" <| - t = table_builder [["X", [1, 2, 3]]] + t = table_builder [["X", [1, 2, 3]]] connection=data.connection t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1) new_name="X" t2.column_names . should_equal ["X"] t2.at "X" . to_vector . should_equal [2, 3, 4] diff --git a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso index ff0d3a6599ab..44bc2575734f 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso @@ -17,6 +17,14 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend + +type Data + Value ~connection + + setup create_connection_fn = + Data.Value (create_connection_fn Nothing) + + ## Currently these tests rely on filtering preserving the insertion ordering within tables. This is not necessarily guaranteed by RDBMS, so we may adapt this in the future. For now we implicitly assume the ordering is preserved, @@ -25,10 +33,13 @@ add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder test_selection = setup.test_selection + create_connection_fn = setup.create_connection_func suite_builder.group prefix+"Table.filter" group_builder-> + data = Data.setup create_connection_fn + group_builder.specify "by integer comparisons" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]] connection=data.connection t1 = t.filter "X" (Filter_Condition.Less than=10) t1.at "ix" . to_vector . should_equal [2, 4] t1.at "X" . to_vector . should_equal [3, 4] @@ -71,7 +82,7 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Is_Nan) . at "ix" . to_vector . should_equal [] group_builder.specify "by float operations" <| - t = table_builder [["ix", [1, 2, 3, 4, 5, 6]], ["X", [100.0, 2.5, Nothing, Number.nan, Number.positive_infinity, Number.negative_infinity]]] + t = table_builder [["ix", [1, 2, 3, 4, 5, 6]], ["X", [100.0, 2.5, Nothing, Number.nan, Number.positive_infinity, Number.negative_infinity]]] connection=data.connection t.filter "X" (Filter_Condition.Less than=10.0) . at "X" . to_vector . should_equal [2.5, Number.negative_infinity] @@ -90,7 +101,7 @@ add_specs suite_builder setup = t.filter "X" Filter_Condition.Is_Nan . at "ix" . to_vector . should_equal [4] group_builder.specify "Not_Equal test cases" pending="Specification needs clarifying, see: https://github.com/enso-org/enso/issues/5241#issuecomment-1480167927" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]] connection=data.connection t3 = t.filter "X" (Filter_Condition.Not_Equal to=100) t3 . at "X" . to_vector . should_equal [3, Nothing, 4, 12] t3 . at "ix" . to_vector . should_equal [2, 3, 4, 5] @@ -98,7 +109,7 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Not_Equal to=(Column_Ref.Name "Y")) . at "X" . to_vector . should_equal [3, Nothing, 4, 12] group_builder.specify "by text comparisons" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "baca", "b", Nothing, "c"]], ["Y", ["a", "b", "b", "c", "c"]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "baca", "b", Nothing, "c"]], ["Y", ["a", "b", "b", "c", "c"]]] connection=data.connection t1 = t.filter "X" (Filter_Condition.Less than="c") t1.at "ix" . to_vector . should_equal [1, 2, 3] t1.at "X" . to_vector . should_equal ["abb", "baca", "b"] @@ -126,13 +137,13 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Equal to=(Column_Ref.Name "Y")) . at "X" . to_vector . should_equal ["b", "c"] t.filter "X" (Filter_Condition.Between (Column_Ref.Name "Y") "bzzzz") . at "X" . to_vector . should_equal ["abb", "baca", "b"] - t2 = table_builder [["X", ["A", "a", "b"]], ["Y", ["a", "B", "b"]]] + t2 = table_builder [["X", ["A", "a", "b"]], ["Y", ["a", "B", "b"]]] connection=data.connection t2.filter "X" (Filter_Condition.Equal to="a") . at "X" . to_vector . should_equal ["a"] t2.filter "X" (Filter_Condition.Equal_Ignore_Case to="a") . at "X" . to_vector . should_equal ["A", "a"] t2.filter "X" (Filter_Condition.Equal_Ignore_Case to=(Column_Ref.Name "Y")) . at "X" . to_vector . should_equal ["A", "b"] group_builder.specify "by text search (contains, starts_with, ends_with, not_contains)" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]] connection=data.connection t.filter "X" (Filter_Condition.Starts_With "ba") . at "X" . to_vector . should_equal ["bacb", "banana"] t.filter "X" (Filter_Condition.Starts_With "BA" Case_Sensitivity.Sensitive) . at "X" . to_vector . should_equal [] @@ -166,7 +177,7 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Not_Contains (Column_Ref.Name "Y") Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["banana"] group_builder.specify "by text search (like, not_like)" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]] connection=data.connection t.filter "X" (Filter_Condition.Like "%an%") . at "X" . to_vector . should_equal ["banana", "nana"] t.filter "X" (Filter_Condition.Like "_a%") . at "X" . to_vector . should_equal ["bacb", "banana", "nana"] @@ -178,7 +189,7 @@ add_specs suite_builder setup = t.filter "Z" (Filter_Condition.Not_Like "[ab]%") . at "Z" . to_vector . should_equal ["aaaaa", "bbbbb"] group_builder.specify "text operations should also match newlines" <| - t = table_builder [["X", ['a\n\n\n', 'a\n', 'a\n\n\nb', 'a\nb', 'caa\nbb']]] + t = table_builder [["X", ['a\n\n\n', 'a\n', 'a\n\n\nb', 'a\nb', 'caa\nbb']]] connection=data.connection t.filter "X" (Filter_Condition.Like 'a_') . at "X" . to_vector . should_equal ['a\n'] t.filter "X" (Filter_Condition.Like 'a%') . at "X" . to_vector . should_equal ['a\n\n\n', 'a\n', 'a\n\n\nb', 'a\nb'] t.filter "X" (Filter_Condition.Like 'a_b') . at "X" . to_vector . should_equal ['a\nb'] @@ -191,7 +202,7 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Not_Contains '\nb') . at "X" . to_vector . should_equal ['a\n\n\n', 'a\n'] if test_selection.supports_unicode_normalization then - t = table_builder [["X", ['śnieg', 's\u0301nieg', 'X', Nothing, 'połać', 'połac\u0301']]] + t = table_builder [["X", ['śnieg', 's\u0301nieg', 'X', Nothing, 'połać', 'połac\u0301']]] connection=data.connection group_builder.specify "text operations should support Unicode normalization" <| t.filter "X" (Filter_Condition.Starts_With 'ś') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg'] t.filter "X" (Filter_Condition.Contains 'ś') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg'] @@ -207,12 +218,12 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Like 'ś%') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg'] group_builder.specify "by empty text" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "", " ", Nothing, "nana"]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "", " ", Nothing, "nana"]]] connection=data.connection t.filter "X" Filter_Condition.Is_Empty . at "X" . to_vector . should_equal ["", Nothing] t.filter "X" Filter_Condition.Not_Empty . at "X" . to_vector . should_equal ["abb", " ", "nana"] group_builder.specify "should check types for text operations" <| - t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, "A", "", " "]]] + t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, "A", "", " "]]] connection=data.connection check_problem result = result.should_fail_with Invalid_Value_Type result.catch.expected . should_equal "Char" @@ -255,7 +266,7 @@ add_specs suite_builder setup = check_scalar_type_error_handling (t.filter "X" (Filter_Condition.Not_Contains 42)) group_builder.specify "by nulls" <| - t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, 1, Nothing, 4]]] + t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, 1, Nothing, 4]]] connection=data.connection t1 = t.filter "X" Filter_Condition.Is_Nothing t1.at "ix" . to_vector . should_equal [1, 3] t1.at "X" . to_vector . should_equal [Nothing, Nothing] @@ -265,8 +276,8 @@ add_specs suite_builder setup = t2.at "X" . to_vector . should_equal [1, 4] group_builder.specify "by an Is_In check" <| - t = table_builder [["ix", [1, 2, 3, Nothing, 5, 6]], ["X", ["a", "b", "ccc", "X", "f", "2"]]] - t1 = table_builder [["txt", ["X", "a", "c", Nothing]], ["int", [Nothing, 2, 5, 4]], ["bool", [True, Nothing, Nothing, True]]] + t = table_builder [["ix", [1, 2, 3, Nothing, 5, 6]], ["X", ["a", "b", "ccc", "X", "f", "2"]]] connection=data.connection + t1 = table_builder [["txt", ["X", "a", "c", Nothing]], ["int", [Nothing, 2, 5, 4]], ["bool", [True, Nothing, Nothing, True]]] connection=data.connection t.filter "X" (Filter_Condition.Is_In (t1.at "txt")) . at "X" . to_vector . should_equal ["a", "X"] t.filter "X" (Filter_Condition.Is_In (t1.at "txt" . to_vector)) . at "X" . to_vector . should_equal ["a", "X"] @@ -297,7 +308,7 @@ add_specs suite_builder setup = True -> v2.should_equal [2, 3] False -> v2.should_fail_with SQL_Error - t2 = table_builder [["A", [True, False, True]], ["B", [False, False, False]], ["C", [True, False, Nothing]]] + t2 = table_builder [["A", [True, False, True]], ["B", [False, False, False]], ["C", [True, False, Nothing]]] connection=data.connection t2.filter "A" (Filter_Condition.Is_In (t1.at "bool")) . at "A" . to_vector . should_equal [True, True] t2.filter "A" (Filter_Condition.Is_In (t1.at "bool" . to_vector)) . at "A" . to_vector . should_equal [True, True] t2.filter "B" (Filter_Condition.Is_In [True, Nothing]) . at "B" . to_vector . should_equal [] @@ -311,7 +322,7 @@ add_specs suite_builder setup = NOT on a row-by-row basis like all other operations. Column_Ref is used with row-by-row ops, so this would only cause confusion. Very rarely someone wants to filter a column by Is_In within the same table - and that's the only approach Column_Ref would support. - t = table_builder [["A", [1, 2, 3]], ["B", [2, 3, 4]]] + t = table_builder [["A", [1, 2, 3]], ["B", [2, 3, 4]]] connection=data.connection t.filter "A" (Filter_Condition.Is_In (Column_Ref.Name "B")) . should_fail_with Illegal_Argument @@ -319,12 +330,12 @@ add_specs suite_builder setup = t.filter "A" (Filter_Condition.Is_In (t.at "B")) . at "A" . to_vector . should_equal [2, 3] group_builder.specify "by a boolean mask" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] connection=data.connection t.filter "b" . at "ix" . to_vector . should_equal [1, 4, 5] t.filter "b" Filter_Condition.Is_False . at "ix" . to_vector . should_equal [2] group_builder.specify "should correctly mask all kinds of columns" <| - t = table_builder [["ints", [1, 2, 3, Nothing, 4]], ["floats", [4.0, Nothing, 3.0, 2.0, 1.0]], ["bools", [False, False, True, Nothing, False]], ["strings", ["a", Nothing, "b", "c", "d"]], ["mask", [False, True, True, True, Nothing]]] + t = table_builder [["ints", [1, 2, 3, Nothing, 4]], ["floats", [4.0, Nothing, 3.0, 2.0, 1.0]], ["bools", [False, False, True, Nothing, False]], ["strings", ["a", Nothing, "b", "c", "d"]], ["mask", [False, True, True, True, Nothing]]] connection=data.connection t2 = t.filter "mask" t2.at "ints" . to_vector . should_equal [2, 3, Nothing] t2.at "floats" . to_vector . should_equal [Nothing, 3.0, 2.0] @@ -333,7 +344,7 @@ add_specs suite_builder setup = t2.at "mask" . to_vector . should_equal [True, True, True] group_builder.specify "should check types of boolean operations" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] connection=data.connection check_problem result = result.should_fail_with Invalid_Value_Type result.catch.expected . should_equal Value_Type.Boolean @@ -342,7 +353,7 @@ add_specs suite_builder setup = check_problem <| t.filter "ix" Filter_Condition.Is_False group_builder.specify "should check types of numeric operations" <| - t = table_builder [["a", ["a", "b"]]] + t = table_builder [["a", ["a", "b"]]] connection=data.connection check_problem result = result.should_fail_with Invalid_Value_Type result.catch.expected . should_equal "a numeric" @@ -354,32 +365,32 @@ add_specs suite_builder setup = check_problem <| t.filter "a" Filter_Condition.Is_Nan group_builder.specify "by a custom expression built from table's columns" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10, 20, 13, 4, 5]], ["Y", [0, -100, 8, 2, 5]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10, 20, 13, 4, 5]], ["Y", [0, -100, 8, 2, 5]]] connection=data.connection t.filter (t.at "X" + t.at "Y" > 9) . at "ix" . to_vector . should_equal [1, 3, 5] group_builder.specify "should handle selection errors: unknown column name" <| - t = table_builder [["X", [10, 20, 13, 4, 5]]] + t = table_builder [["X", [10, 20, 13, 4, 5]]] connection=data.connection t.filter "unknown column" . should_fail_with No_Such_Column t.filter "unknown column" . catch . should_equal (No_Such_Column.Error "unknown column") group_builder.specify "should handle selection errors: out of bounds index" <| - t = table_builder [["X", [10, 20, 13, 4, 5]]] + t = table_builder [["X", [10, 20, 13, 4, 5]]] connection=data.connection t.filter 4 . should_fail_with Index_Out_Of_Bounds t.filter 4 . catch . should_equal (Index_Out_Of_Bounds.Error 4 1) group_builder.specify "should handle illegal arguments" <| - t = table_builder [["X", [10, 20, 13, 4, 5]]] + t = table_builder [["X", [10, 20, 13, 4, 5]]] connection=data.connection Test.expect_panic_with (t.filter "X" "NOT A CONDITION") Type_Error group_builder.specify "should nicely handle Filter_Condition with unapplied arguments" <| - t = table_builder [["X", [10, 20, 13, 4, 5]]] + t = table_builder [["X", [10, 20, 13, 4, 5]]] connection=data.connection t.filter "X" (Filter_Condition.Equal) . should_fail_with Illegal_Argument t.filter "X" (Filter_Condition.Starts_With) . should_fail_with Illegal_Argument t.filter "X" (Filter_Condition.Between) . should_fail_with Illegal_Argument t.filter "X" (Filter_Condition.Between 1) . should_fail_with Illegal_Argument group_builder.specify "should report issues: floating point equality" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, 2.0]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, 2.0]]] connection=data.connection r1 = t.filter "X" (Filter_Condition.Equal 2) on_problems=Problem_Behavior.Ignore r1.at "ix" . to_vector . should_equal [3, 5] @@ -397,7 +408,7 @@ add_specs suite_builder setup = Problems.expect_warning Floating_Point_Equality r4 group_builder.specify "already existing warnings should not be escalated to errors in error handling mode" <| - t1 = table_builder [["X", [1.5, 2.0, 0.0]], ["ix", [1, 2, 3]]] + t1 = table_builder [["X", [1.5, 2.0, 0.0]], ["ix", [1, 2, 3]]] connection=data.connection c1 = Warning.attach (Illegal_State.Error "FOO") (t1.evaluate_expression "3.0 + [X]") Problems.expect_warning Illegal_State c1 @@ -431,18 +442,20 @@ add_specs suite_builder setup = err2.should_fail_with Floating_Point_Equality suite_builder.group prefix+"Table.filter_by_expression" group_builder-> + data = Data.setup create_connection_fn + group_builder.specify "by a boolean column" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] connection=data.connection t.filter_by_expression "[b]" . at "ix" . to_vector . should_equal [1, 4, 5] t.filter_by_expression "![b]" . at "ix" . to_vector . should_equal [2] group_builder.specify "by an integer comparison" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] connection=data.connection t.filter_by_expression "[ix]==3" . at "ix" . to_vector . should_equal [3] t.filter_by_expression "[ix]>2" . at "ix" . to_vector . should_equal [3, 4, 5] group_builder.specify "fail gracefully" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] connection=data.connection t.filter_by_expression "[ix" . should_fail_with Expression_Error t.filter_by_expression "[ix" . catch . should_be_a Expression_Error.Syntax_Error t.filter_by_expression "Starts_With([b])" . should_fail_with Expression_Error @@ -465,7 +478,7 @@ add_specs suite_builder setup = t.filter_by_expression "is_empty('', 42)" . catch . should_be_a Expression_Error.Argument_Mismatch group_builder.specify "should report issues: floating point equality" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, -2.0]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, -2.0]]] connection=data.connection r1 = t.filter_by_expression "[X] * [X] == 4.0" on_problems=Problem_Behavior.Ignore Problems.assume_no_problems r1 r1.at "ix" . to_vector . should_equal [3, 5] @@ -484,7 +497,7 @@ add_specs suite_builder setup = db_pending = if setup.is_database then "Arithmetic error handling is currently not implemented for the Database backend." group_builder.specify "should report issues: arithmetic error" pending=db_pending <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [2.0, 2.0, 0.0, 1.0, 2.0]]] + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [2.0, 2.0, 0.0, 1.0, 2.0]]] connection=data.connection action = t.filter_by_expression "8.0 / [X] <= 4.0" on_problems=_ tester table = table . at "ix" . to_vector . should_equal [1, 2, 5] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso index b1b4fc1b58ec..cc0ef5f64225 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso @@ -14,15 +14,23 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import expect_column_names, run_default_backend +type Data + Value ~connection + + setup create_connection_fn = + Data.Value (create_connection_fn Nothing) add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func materialize = setup.materialize suite_builder.group prefix+"Table.cross_join" group_builder-> + data = Data.setup create_connection_fn + group_builder.specify "should allow to create a cross product of two tables in the right order" <| - t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] - t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]] + t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] connection=data.connection + t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]] connection=data.connection t3 = t1.cross_join t2 expect_column_names ["X", "Y", "Z", "W"] t3 @@ -41,8 +49,8 @@ add_specs suite_builder setup = False -> r.should_equal expected_rows group_builder.specify "should work correctly with empty tables" <| - t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] - t2 = table_builder [["Z", ['a']], ["W", ['c']]] + t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] connection=data.connection + t2 = table_builder [["Z", ['a']], ["W", ['c']]] connection=data.connection # Workaround to easily create empty table until table builder allows that directly. empty = t2.filter "Z" Filter_Condition.Is_Nothing empty.row_count . should_equal 0 @@ -58,10 +66,10 @@ add_specs suite_builder setup = t4.at "X" . to_vector . should_equal [] group_builder.specify "should respect the right row limit" <| - t2 = table_builder [["X", [1, 2]]] - t3 = table_builder [["X", [1, 2, 3]]] - t100 = table_builder [["Y", 0.up_to 100 . to_vector]] - t101 = table_builder [["Y", 0.up_to 101 . to_vector]] + t2 = table_builder [["X", [1, 2]]] connection=data.connection + t3 = table_builder [["X", [1, 2, 3]]] connection=data.connection + t100 = table_builder [["Y", 0.up_to 100 . to_vector]] connection=data.connection + t101 = table_builder [["Y", 0.up_to 101 . to_vector]] connection=data.connection t2.cross_join t100 . row_count . should_equal 200 t101.cross_join t2 . row_count . should_equal 202 @@ -75,8 +83,8 @@ add_specs suite_builder setup = t2.cross_join t3 right_row_limit=2 on_problems=Problem_Behavior.Report_Error . should_fail_with Cross_Join_Row_Limit_Exceeded group_builder.specify "should ensure 1-1 mapping even with duplicate rows" <| - t1 = table_builder [["X", [2, 1, 2, 2]], ["Y", [5, 4, 5, 5]]] - t2 = table_builder [["Z", ['a', 'b', 'a', 'b']]] + t1 = table_builder [["X", [2, 1, 2, 2]], ["Y", [5, 4, 5, 5]]] connection=data.connection + t2 = table_builder [["Z", ['a', 'b', 'a', 'b']]] connection=data.connection t3 = t1.cross_join t2 expect_column_names ["X", "Y", "Z"] t3 @@ -93,7 +101,7 @@ add_specs suite_builder setup = False -> r.should_equal expected_rows group_builder.specify "should allow self-joins" <| - t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] + t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] connection=data.connection t2 = t1.cross_join t1 expect_column_names ["X", "Y", "Right X", "Right Y"] t2 @@ -110,8 +118,8 @@ add_specs suite_builder setup = False -> r.should_equal expected_rows group_builder.specify "should rename columns of the right table to avoid duplicates" <| - t1 = table_builder [["X", [1]], ["Y", [5]], ["Right Y", [10]]] - t2 = table_builder [["X", ['a']], ["Y", ['d']]] + t1 = table_builder [["X", [1]], ["Y", [5]], ["Right Y", [10]]] connection=data.connection + t2 = table_builder [["X", ['a']], ["Y", ['d']]] connection=data.connection t3 = t1.cross_join t2 expect_column_names ["X", "Y", "Right Y", "Right X", "Right Y 1"] t3 @@ -127,13 +135,13 @@ add_specs suite_builder setup = expect_column_names ["X", "Y", "Right Y", "X 1", "Y 1"] (t1.cross_join t2 right_prefix="") - t4 = table_builder [["X", [1]], ["Right X", [5]]] + t4 = table_builder [["X", [1]], ["Right X", [5]]] connection=data.connection expect_column_names ["X", "Y", "Right Y", "Right X 1", "Right X"] (t1.cross_join t4) expect_column_names ["X", "Right X", "Right X 1", "Y", "Right Y"] (t4.cross_join t1) group_builder.specify "should respect the column ordering" <| - t1 = table_builder [["X", [100, 2]], ["Y", [4, 5]]] - t2 = table_builder [["Z", ['a', 'b', 'c']], ["W", ['x', 'd', 'd']]] + t1 = table_builder [["X", [100, 2]], ["Y", [4, 5]]] connection=data.connection + t2 = table_builder [["Z", ['a', 'b', 'c']], ["W", ['x', 'd', 'd']]] connection=data.connection t3 = t1.order_by "X" t4 = t2.order_by ([Sort_Column.Name "Z" Sort_Direction.Descending]) @@ -156,12 +164,12 @@ add_specs suite_builder setup = False -> r.should_equal expected_rows group_builder.specify "Cross join is not possible via call to .join" <| - t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] - t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]] + t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] connection=data.connection + t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]] connection=data.connection Test.expect_panic_with (t1.join t2 join_kind=Join_Kind_Cross.Cross on=[]) Type_Error group_builder.specify "should gracefully handle tables from different backends" <| - t1 = table_builder [["A", ["a", "b"]]] + t1 = table_builder [["A", ["a", "b"]]] connection=data.connection alternative_connection = Database.connect (SQLite In_Memory) t0 = (Table.new [["X", [1, 2, 4]], ["Z", [10, 20, 30]]]).select_into_database_table alternative_connection "T0" temporary=True diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso index c2e18d3c6e06..c2af9307e06e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso @@ -24,18 +24,37 @@ type My_Type_Comparator Comparable.from (_:My_Type) = My_Type_Comparator + +type Data + Value ~data + + connection self = self.data.at 0 + t1 self = self.data.at 1 + t2 self = self.data.at 2 + t3 self = self.data.at 3 + t4 self = self.data.at 4 + + setup create_connection_fn table_builder = Data.Value <| + connection = create_connection_fn Nothing + t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] connection=connection + t2 = table_builder [["Z", [2, 3, 2, 4]], ["W", [4, 5, 6, 7]]] connection=connection + t3 = table_builder [["X", [1, 1, 1, 2, 2, 2]], ["Y", ["A", "B", "B", "C", "C", "A"]], ["Z", [1, 2, 3, 4, 5, 6]]] connection=connection + t4 = table_builder [["X", [1, 1, 3, 2, 2, 4]], ["Y", ["B", "B", "C", "C", "D", "A"]], ["Z", [1, 2, 3, 4, 5, 6]]] connection=connection + [connection, t1, t2, t3, t4] + + add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func materialize = setup.materialize db_todo = if setup.is_database.not then Nothing else "ToDo: handling NULLs in equality conditions." suite_builder.group prefix+"Table.join" group_builder-> - t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] - t2 = table_builder [["Z", [2, 3, 2, 4]], ["W", [4, 5, 6, 7]]] + data = Data.setup create_connection_fn table_builder group_builder.specify "should by default do a Left Outer join on equality of first column in the left table, correlated with column of the same name in the right one" <| - t3 = table_builder [["Z", [4, 5, 6, 7]], ["X", [2, 3, 2, 4]]] - t4 = t1.join t3 |> materialize |> _.order_by ["X", "Z"] + t3 = table_builder [["Z", [4, 5, 6, 7]], ["X", [2, 3, 2, 4]]] connection=data.connection + t4 = data.t1.join t3 |> materialize |> _.order_by ["X", "Z"] expect_column_names ["X", "Y", "Z", "Right X"] t4 t4.at "X" . to_vector . should_equal [1, 2, 2, 3] t4.at "Y" . to_vector . should_equal [4, 5, 5, 6] @@ -43,7 +62,7 @@ add_specs suite_builder setup = t4.at "Z" . to_vector . should_equal [Nothing, 4, 6, 5] group_builder.specify "should allow Inner join" <| - t3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals 0 0) + t3 = data.t1.join data.t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals 0 0) expect_column_names ["X", "Y", "Z", "W"] t3 t4 = t3 |> materialize |> _.order_by ["X", "W"] t4.at "X" . to_vector . should_equal [2, 2, 3] @@ -52,7 +71,7 @@ add_specs suite_builder setup = t4.at "W" . to_vector . should_equal [4, 6, 5] group_builder.specify "should allow Full join" <| - t3 = t1.join t2 join_kind=Join_Kind.Full on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"] + t3 = data.t1.join data.t2 join_kind=Join_Kind.Full on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"] expect_column_names ["X", "Y", "Z", "W"] t3 t3.at "X" . to_vector . should_equal [Nothing, 1, 2, 2, 3] t3.at "Y" . to_vector . should_equal [Nothing, 4, 5, 5, 6] @@ -60,7 +79,7 @@ add_specs suite_builder setup = t3.at "W" . to_vector . should_equal [7, Nothing, 4, 6, 5] group_builder.specify "should allow Right Outer join" <| - t5 = t1.join t2 join_kind=Join_Kind.Right_Outer on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"] + t5 = data.t1.join data.t2 join_kind=Join_Kind.Right_Outer on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"] expect_column_names ["X", "Y", "Z", "W"] t5 t5.at "X" . to_vector . should_equal [Nothing, 2, 2, 3] t5.at "Y" . to_vector . should_equal [Nothing, 5, 5, 6] @@ -68,18 +87,16 @@ add_specs suite_builder setup = t5.at "W" . to_vector . should_equal [7, 4, 6, 5] group_builder.specify "should allow to perform anti-joins" <| - t6 = t1.join t2 join_kind=Join_Kind.Left_Exclusive on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X"] + t6 = data.t1.join data.t2 join_kind=Join_Kind.Left_Exclusive on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X"] t6.columns.map .name . should_equal ["X", "Y"] t6.at "X" . to_vector . should_equal [1] t6.at "Y" . to_vector . should_equal [4] - t7 = t1.join t2 join_kind=Join_Kind.Right_Exclusive on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["Z"] + t7 = data.t1.join data.t2 join_kind=Join_Kind.Right_Exclusive on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["Z"] t7.columns.map .name . should_equal ["Z", "W"] t7.at "Z" . to_vector . should_equal [4] t7.at "W" . to_vector . should_equal [7] - t3 = table_builder [["X", [1, 1, 1, 2, 2, 2]], ["Y", ["A", "B", "B", "C", "C", "A"]], ["Z", [1, 2, 3, 4, 5, 6]]] - t4 = table_builder [["X", [1, 1, 3, 2, 2, 4]], ["Y", ["B", "B", "C", "C", "D", "A"]], ["Z", [1, 2, 3, 4, 5, 6]]] check_xy_joined r = expect_column_names ["X", "Y", "Z", "Right Z"] r r.at "X" . to_vector . should_equal [1, 1, 1, 1, 2, 2] @@ -89,20 +106,20 @@ add_specs suite_builder setup = group_builder.specify "should allow to join on equality of multiple columns and drop redundant columns if Inner join" <| conditions = [Join_Condition.Equals "Y" "Y", Join_Condition.Equals "X" "X"] - r = t3.join t4 join_kind=Join_Kind.Inner on=conditions |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] + r = data.t3.join data.t4 join_kind=Join_Kind.Inner on=conditions |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] check_xy_joined r [Join_Kind.Full, Join_Kind.Left_Outer, Join_Kind.Right_Outer].each kind-> - r2 = t3.join t4 join_kind=kind on=conditions + r2 = data.t3.join data.t4 join_kind=kind on=conditions expect_column_names ["X", "Y", "Z", "Right X", "Right Y", "Right Z"] r2 group_builder.specify "should support same-name column join shorthand" <| - r = t3.join t4 join_kind=Join_Kind.Inner on=["X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] + r = data.t3.join data.t4 join_kind=Join_Kind.Inner on=["X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] check_xy_joined r group_builder.specify "should correctly handle duplicated rows in Equals" <| - t1 = table_builder [["X", [1, 2, 2, 3]]] - t2 = table_builder [["X", [1, 2, 2, 4]]] + t1 = table_builder [["X", [1, 2, 2, 3]]] connection=data.connection + t2 = table_builder [["X", [1, 2, 2, 4]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Full on="X" . order_by "X" within_table r1 <| # Both 2's from t1 match with _both_ ones from t2 _each_, so in total we get 4 `2` pairs: @@ -110,8 +127,8 @@ add_specs suite_builder setup = r1.at "Right X" . to_vector . should_equal [4, 1, 2, 2, 2, 2, Nothing] group_builder.specify "should allow to join on text equality ignoring case" <| - t1 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]] - t2 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]] + t1 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]] connection=data.connection + t2 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Inner expect_column_names ["X", "Y", "Z"] r1 @@ -128,8 +145,8 @@ add_specs suite_builder setup = if setup.test_selection.supports_unicode_normalization then group_builder.specify "should correctly handle Unicode equality" <| - t1 = table_builder [["X", ['s\u0301', 'S\u0301']], ["Y", [1, 2]]] - t2 = table_builder [["X", ['s', 'S', 'ś']], ["Z", [1, 2, 3]]] + t1 = table_builder [["X", ['s\u0301', 'S\u0301']], ["Y", [1, 2]]] connection=data.connection + t2 = table_builder [["X", ['s', 'S', 'ś']], ["Z", [1, 2, 3]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Inner expect_column_names ["X", "Y", "Z"] r1 @@ -146,8 +163,8 @@ add_specs suite_builder setup = # This may need a test_selection toggle in the future, depending on how well databases like coercing decimals and integers. group_builder.specify "should correctly handle Enso Float-Integer equality" <| - t1 = table_builder [["X", [1, 2]], ["Y", [10, 20]]] - t2 = table_builder [["X", [2.0, 2.1, 0.0]], ["Z", [1, 2, 3]]] + t1 = table_builder [["X", [1, 2]], ["Y", [10, 20]]] connection=data.connection + t2 = table_builder [["X", [2.0, 2.1, 0.0]], ["Z", [1, 2, 3]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Inner expect_column_names ["X", "Y", "Z"] r1 @@ -157,8 +174,8 @@ add_specs suite_builder setup = if setup.supports_custom_objects then group_builder.specify "should allow equality joins for custom objects" <| - t1 = table_builder [["X", [My_Type.Value 1 2, My_Type.Value 2 3]], ["Y", [1, 2]]] - t2 = table_builder [["X", [My_Type.Value 5 0, My_Type.Value 2 1]], ["Z", [10, 20]]] + t1 = table_builder [["X", [My_Type.Value 1 2, My_Type.Value 2 3]], ["Y", [1, 2]]] connection=data.connection + t2 = table_builder [["X", [My_Type.Value 5 0, My_Type.Value 2 1]], ["Z", [10, 20]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Inner |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "Z"] r1 @@ -172,8 +189,8 @@ add_specs suite_builder setup = r1 . at "Z" . to_vector . should_equal [20, 10] group_builder.specify "should allow range-based joins (using Between) for numbers" <| - t1 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]] - t2 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]] + t1 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]] connection=data.connection + t2 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]] connection=data.connection r1 = t1.join join_kind=Join_Kind.Inner t2 on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["X", "Z"] r1.column_names . should_equal ["X", "Y", "lower", "upper", "Z"] @@ -184,8 +201,8 @@ add_specs suite_builder setup = r1 . at "Z" . to_vector . should_equal [1, 2, 3, 2, 3] group_builder.specify "should allow range-based joins (using Between) for text" <| - t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]]] - t2 = table_builder [["lower", ["a", "b"]], ["upper", ["a", "ccc"]], ["Z", [10, 20]]] + t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]]] connection=data.connection + t2 = table_builder [["lower", ["a", "b"]], ["upper", ["a", "ccc"]], ["Z", [10, 20]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["X", "Z"] r1.column_names . should_equal ["X", "Y", "lower", "upper", "Z"] @@ -201,8 +218,8 @@ add_specs suite_builder setup = # 3. empty bounds (lower > upper: 10 > 0) # 4. equal bounds (10 = 10) # 5. unmatched rows on both sides - Full join - t1 = table_builder [["X", [1, 10, 20, 1, 2, 1, 1]], ["id", [1, 2, 3, 4, 5, 7, 7]]] - t2 = table_builder [["lower", [0, 10, 10]], ["upper", [3, 10, 0]], ["Z", ['a', 'b', 'c']]] + t1 = table_builder [["X", [1, 10, 20, 1, 2, 1, 1]], ["id", [1, 2, 3, 4, 5, 7, 7]]] connection=data.connection + t2 = table_builder [["lower", [0, 10, 10]], ["upper", [3, 10, 0]], ["Z", ['a', 'b', 'c']]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Full on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["Z", "id"] within_table r1 <| r1.column_names . should_equal ["X", "id", "lower", "upper", "Z"] @@ -223,15 +240,15 @@ add_specs suite_builder setup = xs = [0, 0, 1, 1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 4] ys = [1, 2, 3, 1, 9, 2, 3, 2, 4, 2, 1, 1, 1, 2] pts = xs.zip ys . take (Index_Sub_Range.Sample xs.length seed=42) - t1 = table_builder [["X", pts.map .first], ["Y", pts.map .second]] + t1 = table_builder [["X", pts.map .first], ["Y", pts.map .second]] connection=data.connection - t2 = table_builder [["lx", [1]], ["ux", [3]], ["ly", [1]], ["uy", [2]]] + t2 = table_builder [["lx", [1]], ["ux", [3]], ["ly", [1]], ["uy", [2]]] connection=data.connection r2 = t1.join t2 join_kind=Join_Kind.Inner on=[Join_Condition.Between "X" "lx" "ux", Join_Condition.Between "Y" "ly" "uy"] |> materialize |> _.order_by ["X", "Y"] within_table r2 <| r2.at "X" . to_vector . should_equal [1, 1, 2, 3, 3] r2.at "Y" . to_vector . should_equal [1, 2, 2, 1, 2] - t3 = table_builder [["lx", [1.9]], ["ux", [3]], ["ly", [1]], ["uy", [2]]] + t3 = table_builder [["lx", [1.9]], ["ux", [3]], ["ly", [1]], ["uy", [2]]] connection=data.connection r3 = t1.join t3 join_kind=Join_Kind.Inner on=[Join_Condition.Between "X" "lx" "ux", Join_Condition.Between "Y" "ly" "uy"] |> materialize |> _.order_by ["X", "Y"] within_table r3 <| r3.at "X" . to_vector . should_equal [2, 3, 3] @@ -239,8 +256,8 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle Between edge cases (3)" <| # 7. duplicated rows on both sides - t1 = table_builder [["X", [10, 20, 20]]] - t2 = table_builder [["low", [15, 15]], ["high", [30, 30]]] + t1 = table_builder [["X", [10, 20, 20]]] connection=data.connection + t2 = table_builder [["low", [15, 15]], ["high", [30, 30]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Right_Outer on=(Join_Condition.Between "X" "low" "high") within_table r1 <| r1.at "X" . to_vector . should_equal [20, 20, 20, 20] @@ -255,8 +272,8 @@ add_specs suite_builder setup = if setup.test_selection.supports_unicode_normalization then group_builder.specify "should allow range-based joins (using Between) for text with Unicode normalization" <| - t1 = table_builder [["X", ['s\u0301', 's']], ["Y", [1, 2]]] - t2 = table_builder [["lower", ['s', 'ś']], ["upper", ['sa', 'ś']], ["Z", [10, 20]]] + t1 = table_builder [["X", ['s\u0301', 's']], ["Y", [1, 2]]] connection=data.connection + t2 = table_builder [["lower", ['s', 'ś']], ["upper", ['sa', 'ś']], ["Z", [10, 20]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "lower", "upper", "Z"] r1 @@ -268,8 +285,8 @@ add_specs suite_builder setup = if setup.supports_custom_objects then group_builder.specify "should allow range-based joins (using Between) for custom objects" <| - t1 = table_builder [["X", [My_Type.Value 20 30, My_Type.Value 1 2]], ["Y", [1, 2]]] - t2 = table_builder [["lower", [My_Type.Value 3 0, My_Type.Value 10 10]], ["upper", [My_Type.Value 2 1, My_Type.Value 100 0]], ["Z", [10, 20]]] + t1 = table_builder [["X", [My_Type.Value 20 30, My_Type.Value 1 2]], ["Y", [1, 2]]] connection=data.connection + t2 = table_builder [["lower", [My_Type.Value 3 0, My_Type.Value 10 10]], ["upper", [My_Type.Value 2 1, My_Type.Value 100 0]], ["Z", [10, 20]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["Z"] expect_column_names ["X", "Y", "lower", "upper", "Z"] r1 @@ -280,8 +297,8 @@ add_specs suite_builder setup = r1 . at "Z" . to_vector . should_equal [10, 20] group_builder.specify "should allow to mix join conditions of various kinds" <| - t1 = table_builder [["X", [1, 12, 12, 0]], ["Y", [1, 2, 3, 4]], ["Z", ["a", "A", "a", "ą"]], ["W", [1, 2, 3, 4]]] - t2 = table_builder [["X", [12, 12, 1]], ["l", [0, 100, 100]], ["u", [10, 100, 200]], ["Z", ["A", "A", "A"]], ["W'", [10, 20, 30]]] + t1 = table_builder [["X", [1, 12, 12, 0]], ["Y", [1, 2, 3, 4]], ["Z", ["a", "A", "a", "ą"]], ["W", [1, 2, 3, 4]]] connection=data.connection + t2 = table_builder [["X", [12, 12, 1]], ["l", [0, 100, 100]], ["u", [10, 100, 200]], ["Z", ["A", "A", "A"]], ["W'", [10, 20, 30]]] connection=data.connection conditions = [Join_Condition.Between "Y" "l" "u", Join_Condition.Equals_Ignore_Case "Z" "Z", Join_Condition.Equals "X" "X"] r1 = t1.join t2 join_kind=Join_Kind.Inner on=conditions |> materialize |> _.order_by ["Y"] @@ -314,19 +331,19 @@ add_specs suite_builder setup = r3.at "W'" . to_vector . should_equal [20, 30] group_builder.specify "should work fine if the same condition is specified multiple times" <| - r = t3.join t4 join_kind=Join_Kind.Inner on=["X", "X", "Y", "X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] + r = data.t3.join data.t4 join_kind=Join_Kind.Inner on=["X", "X", "Y", "X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] check_xy_joined r - t5 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]] - t6 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]] + t5 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]] connection=data.connection + t6 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]] connection=data.connection r1 = t5.join t6 join_kind=Join_Kind.Inner on=[Join_Condition.Between "X" "lower" "upper", Join_Condition.Between "X" "lower" "upper", Join_Condition.Between "X" "lower" "upper"] |> materialize |> _.order_by ["X", "Z"] r1 . at "X" . to_vector . should_equal [1, 10, 10, 12, 12] r1 . at "Y" . to_vector . should_equal [1, 2, 2, 3, 3] r1 . at "Z" . to_vector . should_equal [1, 2, 3, 2, 3] - t7 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]] - t8 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]] + t7 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]] connection=data.connection + t8 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]] connection=data.connection r2 = t7.join t8 join_kind=Join_Kind.Inner on=[Join_Condition.Equals_Ignore_Case "X", Join_Condition.Equals_Ignore_Case "X", Join_Condition.Equals_Ignore_Case "X" "X"] |> materialize |> _.order_by ["Z"] r2 . at "X" . to_vector . should_equal ["a", "a", "B"] @@ -334,7 +351,7 @@ add_specs suite_builder setup = r2 . at "Z" . to_vector . should_equal [1, 2, 3] group_builder.specify "should correctly handle joining a table with itself" <| - t1 = table_builder [["X", [0, 1, 2, 3, 2]], ["Y", [1, 2, 3, 4, 100]], ["A", ["B", "C", "D", "E", "X"]]] + t1 = table_builder [["X", [0, 1, 2, 3, 2]], ["Y", [1, 2, 3, 4, 100]], ["A", ["B", "C", "D", "E", "X"]]] connection=data.connection t2 = t1.join t1 join_kind=Join_Kind.Inner on=(Join_Condition.Equals left="X" right="Y") |> materialize |> _.order_by ["X", "Y"] expect_column_names ["X", "Y", "A", "Right X", "Right Y", "Right A"] t2 @@ -356,7 +373,7 @@ add_specs suite_builder setup = t3.at "Right X" . to_vector . should_equal [2, 3, Nothing, 0, 1, 1, 2] t3.at "Right A" . to_vector . should_equal ["X", "E", Nothing, "B", "C", "C", "D"] - t4 = table_builder [["X", [Nothing, "a", "B"]], ["Y", ["ą", "b", Nothing]], ["Z", [1, 2, 3]]] + t4 = table_builder [["X", [Nothing, "a", "B"]], ["Y", ["ą", "b", Nothing]], ["Z", [1, 2, 3]]] connection=data.connection t5 = t4.join t4 join_kind=Join_Kind.Inner on=(Join_Condition.Equals_Ignore_Case left="Y" right="X") |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "Z", "Right X", "Right Y", "Right Z"] t5 # TODO enable once we handle nothing properly @@ -369,8 +386,8 @@ add_specs suite_builder setup = # t5.at "Right Z" . to_vector . should_equal [1, 3] group_builder.specify "should gracefully handle unmatched columns in Join_Conditions" <| - t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] - t2 = table_builder [["Z", [2, 1]], ["W", [5, 6]]] + t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] connection=data.connection + t2 = table_builder [["Z", [2, 1]], ["W", [5, 6]]] connection=data.connection # Report error if the default fails - the right table does not have a column with same name as first column of left one: r1 = t1.join t2 @@ -391,8 +408,8 @@ add_specs suite_builder setup = r3.catch.to_display_text.should_equal "The criteria 'baz', 42 (index), -3 (index) did not match any columns in the right table." group_builder.specify "should report Invalid_Value_Type if non-text columns are provided to Equals_Ignore_Case" <| - t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] - t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]] + t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] connection=data.connection + t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]] connection=data.connection test result = result.should_fail_with Invalid_Value_Type @@ -404,23 +421,23 @@ add_specs suite_builder setup = t1.join t2 on=(Join_Condition.Equals_Ignore_Case "Y" "Z") on_problems=Problem_Behavior.Ignore group_builder.specify "should report Invalid_Value_Type if incompatible types are correlated" <| - t1 = table_builder [["X", ["1", "2", "c"]]] - t2 = table_builder [["X", [1, 2, 3]]] + t1 = table_builder [["X", ["1", "2", "c"]]] connection=data.connection + t2 = table_builder [["X", [1, 2, 3]]] connection=data.connection r1 = t1.join t2 on_problems=Problem_Behavior.Ignore r1.should_fail_with Invalid_Value_Type group_builder.specify "should report Invalid_Value_Type if incompatible columns types are correlated in Between" <| - t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] - t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]] + t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] connection=data.connection + t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]] connection=data.connection t1.join t2 on=(Join_Condition.Between "X" "W" "W") . should_fail_with Invalid_Value_Type t1.join t2 on=(Join_Condition.Between "Y" "W" "Z") . should_fail_with Invalid_Value_Type t1.join t2 on=(Join_Condition.Between "Y" "Z" "W") . should_fail_with Invalid_Value_Type group_builder.specify "should warn when joining on equality of Float columns" <| - t1 = table_builder [["X", [1.5, 2.0, 2.00000000001]], ["Y", [10, 20, 30]]] - t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] + t1 = table_builder [["X", [1.5, 2.0, 2.00000000001]], ["Y", [10, 20, 30]]] connection=data.connection + t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] connection=data.connection action1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Z") on_problems=_ tester1 table = @@ -449,8 +466,8 @@ add_specs suite_builder setup = Problems.assume_no_problems r2 if setup.supports_custom_objects then - t1 = table_builder [["X", [My_Type.Value 1 2, 2.0, 2]], ["Y", [10, 20, 30]]] - t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] + t1 = table_builder [["X", [My_Type.Value 1 2, 2.0, 2]], ["Y", [10, 20, 30]]] connection=data.connection + t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] connection=data.connection r3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Z") on_problems=Problem_Behavior.Report_Warning r3.column_names.should_equal ["X", "Y", "Z", "W"] r4 = r3.order_by ["Y", "W"] @@ -462,8 +479,8 @@ add_specs suite_builder setup = Problems.get_attached_warnings r3 . should_contain_the_same_elements_as expected_problems group_builder.specify "should correctly handle nulls in equality conditions" pending=db_todo <| - t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] - t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] + t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] connection=data.connection + t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Inner |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "Z"] r1 @@ -472,8 +489,8 @@ add_specs suite_builder setup = r1.at "Z" . to_vector . should_equal [20, 30, 10, 20, 30] group_builder.specify "should correctly handle nulls in case-insensitive equality conditions" pending=db_todo <| - t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] - t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] + t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] connection=data.connection + t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals_Ignore_Case "X") |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "Right X", "Z"] r1 @@ -483,8 +500,8 @@ add_specs suite_builder setup = r1.at "Z" . to_vector . should_equal [10, 20, 30, 10, 20, 30] group_builder.specify "should correctly handle nulls in Between conditions" <| - t1 = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [0, 1, 2, 3]]] - t2 = table_builder [["l", [Nothing, 0, 1]], ["u", [100, 10, Nothing]], ["Z", [10, 20, 30]]] + t1 = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [0, 1, 2, 3]]] connection=data.connection + t2 = table_builder [["l", [Nothing, 0, 1]], ["u", [100, 10, Nothing]], ["Z", [10, 20, 30]]] connection=data.connection r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "l" "u") |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "l", "u", "Z"] r1 @@ -495,8 +512,8 @@ add_specs suite_builder setup = r1.at "Z" . to_vector . should_equal [20, 20] group_builder.specify "should rename columns of the right table to avoid duplicates" <| - t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]], ["Right Y", [5, 6]]] - t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]]] + t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]], ["Right Y", [5, 6]]] connection=data.connection + t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]]] connection=data.connection t3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Y") |> materialize |> _.order_by ["Right X"] Problems.get_attached_warnings t3 . should_equal [Duplicate_Output_Column_Names.Error ["Right Y"]] @@ -511,8 +528,8 @@ add_specs suite_builder setup = err1.should_fail_with Duplicate_Output_Column_Names err1.catch.column_names . should_equal ["Right Y"] - t4 = table_builder [["Right X", [1, 1]], ["X", [1, 2]], ["Y", [3, 4]], ["Right Y 2", [2, 2]]] - t5 = table_builder [["Right X", [2, 1]], ["X", [2, 2]], ["Y", [2, 2]], ["Right Y", [2, 2]], ["Right Y 1", [2, 2]], ["Right Y 4", [2, 2]]] + t4 = table_builder [["Right X", [1, 1]], ["X", [1, 2]], ["Y", [3, 4]], ["Right Y 2", [2, 2]]] connection=data.connection + t5 = table_builder [["Right X", [2, 1]], ["X", [2, 2]], ["Y", [2, 2]], ["Right Y", [2, 2]], ["Right Y 1", [2, 2]], ["Right Y 4", [2, 2]]] connection=data.connection t6 = t4.join t5 on=(Join_Condition.Equals "X" "Y") t6.column_names.should_equal ["Right X", "X", "Y", "Right Y 2"]+["Right Right X", "Right X 1", "Right Y 3", "Right Y", "Right Y 1", "Right Y 4"] @@ -531,8 +548,8 @@ add_specs suite_builder setup = t8.column_names.should_equal ["X", "Y", "Right Y", "PY"] group_builder.specify "should warn about renamed columns" <| - t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] - t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]], ["Right Y", [2, 44]]] + t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] connection=data.connection + t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]], ["Right Y", [2, 44]]] connection=data.connection action1 = t1.join t2 on=(Join_Condition.Equals "X" "Y") on_problems=_ tester1 table = @@ -548,12 +565,12 @@ add_specs suite_builder setup = group_builder.specify "should pass dataflow errors through" <| error = Error.throw (Illegal_State.Error "FOO") - t1.join error . should_fail_with Illegal_State - t1.join t2 on=[error, "X"] . should_fail_with Illegal_State + data.t1.join error . should_fail_with Illegal_State + data.t1.join data.t2 on=[error, "X"] . should_fail_with Illegal_State group_builder.specify "should correctly handle all null rows" pending=db_todo <| - t1 = table_builder [["A", [Nothing, 2, Nothing, 1]], ["B", [Nothing, 3, 4, 7]]] - t2 = table_builder [["C", [Nothing, 2, Nothing, 4]], ["D", [Nothing, 5, 6, Nothing]]] + t1 = table_builder [["A", [Nothing, 2, Nothing, 1]], ["B", [Nothing, 3, 4, 7]]] connection=data.connection + t2 = table_builder [["C", [Nothing, 2, Nothing, 4]], ["D", [Nothing, 5, 6, Nothing]]] connection=data.connection t3 = t1.join t2 on=[Join_Condition.Equals "A" "C"] join_kind=Join_Kind.Inner expect_column_names ["A", "B", "C", "D"] t3 @@ -615,8 +632,8 @@ add_specs suite_builder setup = t6.at "C" . to_vector . should_equal [4] t6.at "D" . to_vector . should_equal [Nothing] - t7 = table_builder [["A", [Nothing, 2]], ["B", [Nothing, 3]]] - t8 = table_builder [["C", [2, 3]], ["D", [4, 5]]] + t7 = table_builder [["A", [Nothing, 2]], ["B", [Nothing, 3]]] connection=data.connection + t8 = table_builder [["C", [2, 3]], ["D", [4, 5]]] connection=data.connection t9 = t7.join t8 on=[Join_Condition.Equals "A" "C"] join_kind=Join_Kind.Inner r9 = materialize t9 . order_by ["A", "B", "D"] . rows . map .to_vector @@ -659,9 +676,9 @@ add_specs suite_builder setup = r12.at 0 . should_equal [3, 5] group_builder.specify "should work correctly when composing multiple join operations" <| - ta = table_builder [["id", [0, 1]], ["name", ["Foo", "X"]]] - tb = table_builder [["id", [2, 0]], ["name", ["Bar", "Y"]]] - tc = table_builder [["id_a", [0, 1]], ["id_b", [0, 2]]] + ta = table_builder [["id", [0, 1]], ["name", ["Foo", "X"]]] connection=data.connection + tb = table_builder [["id", [2, 0]], ["name", ["Bar", "Y"]]] connection=data.connection + tc = table_builder [["id_a", [0, 1]], ["id_b", [0, 2]]] connection=data.connection res = (tc.join ta on=(Join_Condition.Equals "id_a" "id")) . join tb on=(Join_Condition.Equals "id_b" "id") right_prefix="b_" sel = res.select_columns ["name", "b_name"] @@ -671,7 +688,7 @@ add_specs suite_builder setup = r.at 1 . should_equal ["X", "Bar"] group_builder.specify "should work correctly when the join is performed on a transformed table" <| - t1 = table_builder [["X", [1, 2, 3]]] + t1 = table_builder [["X", [1, 2, 3]]] connection=data.connection t1_2 = t1.set "10*[X]+1" new_name="A" t1_3 = t1.set "[X]+20" new_name="B" @@ -682,8 +699,8 @@ add_specs suite_builder setup = t2.at "B" . to_vector . should_equal [21] t2.at "Right X" . to_vector . should_equal [1] - t4 = table_builder [["X", [1, 2, 3]], ["Y", [10, 20, 30]]] - t5 = table_builder [["X", [5, 7, 1]], ["Z", [100, 200, 300]]] + t4 = table_builder [["X", [1, 2, 3]], ["Y", [10, 20, 30]]] connection=data.connection + t5 = table_builder [["X", [5, 7, 1]], ["Z", [100, 200, 300]]] connection=data.connection t4_2 = t4.set "2*[X]+1" new_name="C" t6 = t4_2.join t5 on=(Join_Condition.Equals "C" "X") join_kind=Join_Kind.Inner @@ -694,8 +711,8 @@ add_specs suite_builder setup = r2.at 1 . should_equal [3, 30, 7, 7, 200] group_builder.specify "should allow full joins with more complex join conditions" <| - t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [10, 20, 30]]] - t2 = table_builder [["X", ["Ć", "A", "b"]], ["Z", [100, 200, 300]]] + t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [10, 20, 30]]] connection=data.connection + t2 = table_builder [["X", ["Ć", "A", "b"]], ["Z", [100, 200, 300]]] connection=data.connection t3 = t1.join t2 on=(Join_Condition.Equals_Ignore_Case "X") join_kind=Join_Kind.Full expect_column_names ["X", "Y", "Right X", "Z"] t3 @@ -706,8 +723,8 @@ add_specs suite_builder setup = r.at 2 . should_equal ["b", 20, "b", 300] r.at 3 . should_equal ["c", 30, Nothing, Nothing] - t4 = table_builder [["X", [1, 2, 3]], ["Y", [10, 20, 30]]] - t5 = table_builder [["X", [5, 7, 1]], ["Z", [100, 200, 300]]] + t4 = table_builder [["X", [1, 2, 3]], ["Y", [10, 20, 30]]] connection=data.connection + t5 = table_builder [["X", [5, 7, 1]], ["Z", [100, 200, 300]]] connection=data.connection t4_2 = t4.set "2*[X]+1" new_name="C" t6 = t4_2.join t5 on=(Join_Condition.Equals "C" "X") join_kind=Join_Kind.Full @@ -719,7 +736,7 @@ add_specs suite_builder setup = r2.at 2 . should_equal [2, 20, 5, 5, 100] r2.at 3 . should_equal [3, 30, 7, 7, 200] - t4_3 = table_builder [["X", [Nothing, 2, 3]], ["Y", [10, 20, 30]]] + t4_3 = table_builder [["X", [Nothing, 2, 3]], ["Y", [10, 20, 30]]] connection=data.connection t4_4 = t4_3.set (t4_3.at "X" . fill_nothing 7) new_name="C" t7 = t4_4.join t5 on=(Join_Condition.Equals "C" "X") join_kind=Join_Kind.Full within_table t7 <| @@ -732,7 +749,7 @@ add_specs suite_builder setup = r3.at 3 . should_equal [2, 20, 2, Nothing, Nothing] r3.at 4 . should_equal [3, 30, 3, Nothing, Nothing] - t8 = table_builder [["X", [2, 99]], ["Y", [20, 99]], ["C", [5, 99]]] + t8 = table_builder [["X", [2, 99]], ["Y", [20, 99]], ["C", [5, 99]]] connection=data.connection t9 = t4_2.join t8 join_kind=Join_Kind.Full on=["X", "Y", "C"] within_table t9 <| t9.column_names . should_equal ["X", "Y", "C", "Right X", "Right Y", "Right C"] @@ -747,5 +764,5 @@ add_specs suite_builder setup = alternative_connection = Database.connect (SQLite In_Memory) t0 = (Table.new [["X", [1, 2, 4]], ["Z", [10, 20, 30]]]).select_into_database_table alternative_connection "T0" temporary=True - r1 = t1.join t0 + r1 = data.t1.join t0 r1.should_fail_with Illegal_Argument diff --git a/test/Table_Tests/src/Common_Table_Operations/Main.enso b/test/Table_Tests/src/Common_Table_Operations/Main.enso index cc5e225fee34..a44e299822ce 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Main.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Main.enso @@ -44,7 +44,8 @@ type Test_Setup table using the tested backend. - table_builder: A function used to build a table using the tested backend from a vector of columns represented as pairs of name and - vector of values. + vector of values. The second optional argument is connection or Nothing. + An example signature of the method is `table_builder cols connection=Nothing`. - materialize: A helper function which materializes a table from the tested backend as an in-memory table. Used to easily inspect results of a particular query/operation. @@ -57,7 +58,7 @@ type Test_Setup support particular features. - create_connection_func: A function that takes Nothing and creates a related database connection or Nothing for in-memory tests. - Config prefix table_fn empty_table_fn table_builder materialize is_database test_selection aggregate_test_selection create_connection_func + Config prefix table_fn empty_table_fn (table_builder : (Vector Any -> (Any|Nothing)) -> Any) materialize is_database test_selection aggregate_test_selection create_connection_func ## Specifies if the given Table backend supports custom Enso types. diff --git a/test/Table_Tests/src/Common_Table_Operations/Util.enso b/test/Table_Tests/src/Common_Table_Operations/Util.enso index 2848feb15bb6..8f863a75feda 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Util.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Util.enso @@ -11,6 +11,7 @@ expect_column_names names table = context of a specific backend. However, for the purpose of testing we provide a shortcut that allows to run these tests with the in-memory backend. run_default_backend suite = + _ = suite Panic.throw "Unimplemented" ## Adds a clue which will display the provided table next to the failed test diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 066ebc4aa5df..896d7c2baf18 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -272,8 +272,10 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup = sqlite_spec suite_builder prefix create_connection_func = name_counter = Ref.new 0 - table_builder columns = - connection = create_connection_func Nothing + # The default `connection` parameter always create a new connection. + # In some tests, for example, where we are joining tables, we have to specify + # exactly the same connection. + table_builder columns connection=(create_connection_func Nothing) = ix = name_counter.get name_counter . put ix+1 name = Name_Generator.random_name "table_"+ix.to_text From 43b22a023c46214c866dfb4678e993381b8ac719 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 13:15:28 +0100 Subject: [PATCH 38/93] Implement Util.run_default_backend Inspired by the previous version --- .../Column_Operations_Spec.enso | 2 ++ .../src/Common_Table_Operations/Util.enso | 34 +++++++++++++++++-- 2 files changed, 33 insertions(+), 3 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso index 06f632df1a4e..5de441273ec5 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso @@ -20,6 +20,8 @@ import enso_dev.Tests.Data.Round_Spec from project.Common_Table_Operations.Util import run_default_backend +main = run_default_backend add_specs + add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder diff --git a/test/Table_Tests/src/Common_Table_Operations/Util.enso b/test/Table_Tests/src/Common_Table_Operations/Util.enso index 8f863a75feda..d7d51485f420 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Util.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Util.enso @@ -1,8 +1,12 @@ from Standard.Base import all +from Standard.Table import Table from Standard.Test_New import all import project.In_Memory.Common_Spec as In_Memory_Table_Spec +import project.Common_Table_Operations.Main.Test_Setup +import project.Common_Table_Operations.Main.Test_Selection +import project.Common_Table_Operations.Aggregate_Spec expect_column_names names table = table.columns . map .name . should_equal names frames_to_skip=2 @@ -10,9 +14,33 @@ expect_column_names names table = ## These tests are parametrized by various backends and so they should be run in context of a specific backend. However, for the purpose of testing we provide a shortcut that allows to run these tests with the in-memory backend. -run_default_backend suite = - _ = suite - Panic.throw "Unimplemented" + + Arguments: + - add_specs: A function that takes two parameters: a suite builder and Test_Setup. + And adds test specs to the suite builder. +run_default_backend add_specs = + selection = Test_Selection.Config supports_case_sensitive_columns=True order_by=True natural_ordering=True case_insensitive_ordering=True order_by_unicode_normalization_by_default=True supports_unicode_normalization=True supports_time_duration=True supports_nanoseconds_in_time=True supports_mixed_columns=True fixed_length_text_columns=True supports_8bit_integer=True + aggregate_selection = Aggregate_Spec.Test_Selection.Config + + table_fn _ = (enso_project.data / "data.csv") . read + + empty_table_fn _ = + table = table_fn Nothing + table.take 0 + + materialize = x->x + + table_builder cols connection=Nothing = + _ = connection + Table.new cols + + create_connection_func _ = Nothing + + setup = Test_Setup.Config "[In-Memory] " table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func + suite = Test.build suite_builder-> + add_specs suite_builder setup + suite.run_with_filter + ## Adds a clue which will display the provided table next to the failed test description. From 602d3820ace20e9f88af2136a1b8383e51b752d9 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 13:26:25 +0100 Subject: [PATCH 39/93] Fix Date_Time_Spec --- .../Date_Time_Spec.enso | 84 +++++++++++-------- 1 file changed, 50 insertions(+), 34 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso index 1b7f0b0c8299..3f1b3945c22f 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso @@ -8,13 +8,30 @@ from Standard.Database.Errors import Unsupported_Database_Operation from Standard.Test_New import all - from project.Common_Table_Operations.Util import all +main = run_default_backend add_specs + +type Data + Value ~data + + connection self = self.data.at 0 + dates self = self.data.at 1 + times self = self.data.at 2 + datetimes self = self.data.at 3 + + setup create_connection_fn table_builder = Data.Value <| + connection = create_connection_fn Nothing + dates = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [2020, 29, 1, 100]]] connection=connection + times = table_builder [["A", [Time_Of_Day.new 23 59 59 millisecond=567 nanosecond=123, Time_Of_Day.new 2 30 44 nanosecond=1002000, Time_Of_Day.new 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]] connection=connection + datetimes = table_builder [["A", [Date_Time.new 2020 12 31 23 59 59 millisecond=567 nanosecond=123, Date_Time.new 2024 2 29 2 30 44 nanosecond=1002000, Date_Time.new 1990 1 1 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]] connection=connection + [connection, dates, times, datetimes] + add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend." @@ -49,11 +66,10 @@ add_specs suite_builder setup = table.at "X" . to_vector . should_equal xs suite_builder.group prefix+"Date-Time operations" pending=pending_datetime group_builder-> - dates = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [2020, 29, 1, 100]]] - times = table_builder [["A", [Time_Of_Day.new 23 59 59 millisecond=567 nanosecond=123, Time_Of_Day.new 2 30 44 nanosecond=1002000, Time_Of_Day.new 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]] - datetimes = table_builder [["A", [Date_Time.new 2020 12 31 23 59 59 millisecond=567 nanosecond=123, Date_Time.new 2024 2 29 2 30 44 nanosecond=1002000, Date_Time.new 1990 1 1 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]] + data = Data.setup create_connection_fn table_builder + group_builder.specify "should allow to get the year/month/day of a Date" <| - t = dates + t = data.dates a = t.at "A" a.year . to_vector . should_equal [2020, 2024, 1990, Nothing] a.month . to_vector . should_equal [12, 2, 1, Nothing] @@ -68,7 +84,7 @@ add_specs suite_builder setup = ((a.day) == (t.at "X")).to_vector . should_equal [False, True, True, Nothing] group_builder.specify "should allow to get the year/month/day of a Date_Time" <| - t = datetimes + t = data.datetimes a = t.at "A" a.year . to_vector . should_equal [2020, 2024, 1990, Nothing] a.month . to_vector . should_equal [12, 2, 1, Nothing] @@ -82,14 +98,14 @@ add_specs suite_builder setup = ((a.day) == (t.at "X")).to_vector . should_equal [False, True, True, Nothing] group_builder.specify "should allow to evaluate expressions with year/month/day" <| - t = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [0, 2, 1, 100]], ["B", [Date_Time.new 2020 10 31 23 59 59, Date_Time.new 2024 4 29 2 30 44, Date_Time.new 1990 10 1 0 0 0, Nothing]]] + t = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [0, 2, 1, 100]], ["B", [Date_Time.new 2020 10 31 23 59 59, Date_Time.new 2024 4 29 2 30 44, Date_Time.new 1990 10 1 0 0 0, Nothing]]] connection=data.connection c = t.evaluate_expression "year([A]) + [X] + day([A]) * month([B])" Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <| c.value_type.is_integer.should_be_true c.to_vector . should_equal [(2020 + 0 + 31 * 10), (2024 + 2 + 29 * 4), (1990 + 1 + 1 * 10), Nothing] group_builder.specify "should allow to get hour/minute/second of a Time_Of_Day" <| - a = times.at "A" + a = data.times.at "A" a.hour . to_vector . should_equal [23, 2, 0, Nothing] a.minute . to_vector . should_equal [59, 30, 0, Nothing] a.second . to_vector . should_equal [59, 44, 0, Nothing] @@ -103,7 +119,7 @@ add_specs suite_builder setup = c.value_type.is_integer.should_be_true group_builder.specify "should allow to get hour/minute/second of a Date_Time" <| - a = datetimes.at "A" + a = data.datetimes.at "A" a.hour . to_vector . should_equal [23, 2, 0, Nothing] a.minute . to_vector . should_equal [59, 30, 0, Nothing] a.second . to_vector . should_equal [59, 44, 0, Nothing] @@ -117,7 +133,7 @@ add_specs suite_builder setup = c.value_type.is_integer.should_be_true group_builder.specify "should allow to get millisecond/nanosecond of Time_Of_Day through date_part" <| - a = times.at "A" + a = data.times.at "A" a.date_part Time_Period.Second . to_vector . should_equal [59, 44, 0, Nothing] a.date_part Time_Period.Millisecond . to_vector . should_equal [567, 1, 0, Nothing] a.date_part Time_Period.Microsecond . to_vector . should_equal [0, 2, 0, Nothing] @@ -132,7 +148,7 @@ add_specs suite_builder setup = c.value_type.is_integer.should_be_true group_builder.specify "should allow to get week/quarter of Date through date_part" <| - a = dates.at "A" + a = data.dates.at "A" a.date_part Date_Period.Quarter . to_vector . should_equal [4, 1, 1, Nothing] a.date_part Date_Period.Week . to_vector . should_equal [53, 9, 1, Nothing] @@ -141,7 +157,7 @@ add_specs suite_builder setup = c.value_type.is_integer.should_be_true group_builder.specify "should allow to get various date_part of Date_Time" <| - a = datetimes.at "A" + a = data.datetimes.at "A" a.date_part Date_Period.Quarter . to_vector . should_equal [4, 1, 1, Nothing] a.date_part Date_Period.Week . to_vector . should_equal [53, 9, 1, Nothing] a.date_part Time_Period.Millisecond . to_vector . should_equal [567, 1, 0, Nothing] @@ -158,7 +174,7 @@ add_specs suite_builder setup = group_builder.specify "should allow to compare dates" <| - t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date.new 2021 12 5]]] + t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date.new 2021 12 5]]] connection=data.connection [(<), (<=), (>), (>=), (==), (!=)].each op-> op (t.at "X") (t.at "Y") . value_type . should_equal Value_Type.Boolean @@ -166,7 +182,7 @@ add_specs suite_builder setup = op (t.at "X") (Date.new 2021 12 4) . to_vector . should_succeed group_builder.specify "should allow to compare date-times" <| - t = table_builder [["X", [Date_Time.new 2021 12 3 12 30 0]], ["Y", [Date_Time.new 2021 12 5 12 30 0]]] + t = table_builder [["X", [Date_Time.new 2021 12 3 12 30 0]], ["Y", [Date_Time.new 2021 12 5 12 30 0]]] connection=data.connection [(<), (<=), (>), (>=), (==), (!=)].each op-> op (t.at "X") (t.at "Y") . value_type . should_equal Value_Type.Boolean @@ -174,7 +190,7 @@ add_specs suite_builder setup = op (t.at "X") (Date_Time.new 2021 12 4 12 30 0) . to_vector . should_succeed group_builder.specify "should allow to compare time-of-day" <| - t = table_builder [["X", [Time_Of_Day.new 12 30 0]], ["Y", [Time_Of_Day.new 12 30 1]]] + t = table_builder [["X", [Time_Of_Day.new 12 30 0]], ["Y", [Time_Of_Day.new 12 30 1]]] connection=data.connection [(<), (<=), (>), (>=), (==), (!=)].each op-> op (t.at "X") (t.at "Y") . value_type . should_equal Value_Type.Boolean @@ -182,7 +198,7 @@ add_specs suite_builder setup = op (t.at "X") (Time_Of_Day.new 12 30 0) . to_vector . should_succeed group_builder.specify "should not allow to mix types in ordering comparisons" <| - t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]] + t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]] connection=data.connection [(<), (<=), (>), (>=)].each op-> op (t.at "X") (t.at "Y") . should_fail_with Invalid_Value_Type @@ -190,14 +206,14 @@ add_specs suite_builder setup = if setup.test_selection.supports_time_duration then group_builder.specify "should allow to subtract two Dates" <| - t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] + t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] connection=data.connection ((t.at "Y") - (t.at "X")) . to_vector . should_equal [Period.new months=1 days=2] ((t.at "Y") - (Date.new 2020 12 5)) . to_vector . should_equal [Period.new years=1] group_builder.specify "should allow to subtract two Date_Times" <| dx = Date_Time.new 2021 11 30 10 15 0 - t = table_builder [["X", [dx]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]] + t = table_builder [["X", [dx]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]] connection=data.connection hours = 2 + 24 * 5 diff = Duration.new hours=hours minutes=15 seconds=20 @@ -205,16 +221,16 @@ add_specs suite_builder setup = ((t.at "Y") - dx) . to_vector . should_equal [diff] group_builder.specify "should allow to subtract two Time_Of_Days" <| - t = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]] + t = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]] connection=data.connection ((t.at "Y") - (t.at "X")) . to_vector . should_equal [Duration.new hours=2 minutes=15 seconds=20, Duration.new hours=(-1) minutes=0 seconds=0] ((t.at "Y") - (Time_Of_Day.new 0 0 0)) . to_vector . should_equal [Duration.new hours=12 minutes=30 seconds=20, Duration.zero] if setup.test_selection.supports_time_duration.not then group_builder.specify "should report unsupported operation for subtracting date/time" <| - t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] - t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]] - t3 = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]] + t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] connection=data.connection + t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]] connection=data.connection + t3 = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]] connection=data.connection ((t1.at "Y") - (t1.at "X")) . should_fail_with Unsupported_Database_Operation ((t1.at "Y") - (Date.new 2020 12 5)) . should_fail_with Unsupported_Database_Operation @@ -224,7 +240,7 @@ add_specs suite_builder setup = ((t3.at "Y") - (Time_Of_Day.new 0 0 0)) . should_fail_with Unsupported_Database_Operation group_builder.specify "should report an Invalid_Value_Type error when subtracting mixed date/time types" <| - t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]] + t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]] connection=data.connection ((t.at "Y") - (t.at "X")) . should_fail_with Invalid_Value_Type ((t.at "Y") - (Time_Of_Day.new 12 30 0)) . should_fail_with Invalid_Value_Type @@ -234,7 +250,7 @@ add_specs suite_builder setup = ((t.at "Z") - (Date.new 2021 11 3)) . should_fail_with Invalid_Value_Type group_builder.specify "should allow computing a SQL-like difference" <| - t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] + t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] connection=data.connection (t1.at "X").date_diff (t1.at "Y") Date_Period.Day . to_vector . should_equal [32] (t1.at "Y").date_diff (t1.at "X") Date_Period.Day . to_vector . should_equal [-32] @@ -264,7 +280,7 @@ add_specs suite_builder setup = (t1.at "X").date_diff (t1.at "Y") Time_Period.Hour . should_fail_with Illegal_Argument zone = Time_Zone.parse "Europe/Warsaw" - t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0 zone=zone]], ["Y", [Date_Time.new 2021 12 5 12 30 20 zone=zone]]] + t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0 zone=zone]], ["Y", [Date_Time.new 2021 12 5 12 30 20 zone=zone]]] connection=data.connection (t2.at "X").date_diff (t2.at "Y") Date_Period.Day . to_vector . should_equal [32] (t2.at "Y").date_diff (t2.at "X") Date_Period.Day . to_vector . should_equal [-32] @@ -301,7 +317,7 @@ add_specs suite_builder setup = (t2.at "X").date_diff (t2.at "Y") Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation (t2.at "X").date_diff (Date_Time.new 2021 11 3 10 15 30 123 456 789 zone=zone) Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation - t3 = table_builder [["X", [Time_Of_Day.new 10 15 0]], ["Y", [Time_Of_Day.new 12 30 20]]] + t3 = table_builder [["X", [Time_Of_Day.new 10 15 0]], ["Y", [Time_Of_Day.new 12 30 20]]] connection=data.connection # There is no default period: (t3.at "X").date_diff (t3.at "Y") . should_be_a Function @@ -334,7 +350,7 @@ add_specs suite_builder setup = (t3.at "X").date_diff (Time_Of_Day.new 10 15 12 34 56 78) Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation group_builder.specify "date_diff should return integers" <| - t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]] + t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]] connection=data.connection time_periods = [Time_Period.Hour, Time_Period.Minute, Time_Period.Second] date_periods = [Date_Period.Day, Date_Period.Week, Date_Period.Month, Date_Period.Quarter, Date_Period.Year] @@ -349,7 +365,7 @@ add_specs suite_builder setup = (t.at "Z").date_diff (Date_Time.new 2021 12 05 01 02) p . value_type . is_integer . should_be_true group_builder.specify "should not allow mixing types in date_diff" <| - t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]] + t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]] connection=data.connection (t.at "X").date_diff (t.at "Y") Date_Period.Day . should_fail_with Invalid_Value_Type (t.at "Z").date_diff (t.at "X") Date_Period.Day . should_fail_with Invalid_Value_Type (t.at "Y").date_diff (t.at "Z") Time_Period.Hour . should_fail_with Invalid_Value_Type @@ -361,7 +377,7 @@ add_specs suite_builder setup = (t.at "Z").date_diff (Time_Of_Day.new 12 30 20) Time_Period.Hour . should_fail_with Invalid_Value_Type group_builder.specify "should allow an SQL-like shift" <| - t1 = table_builder [["X", [Date.new 2021 01 31, Date.new 2021 01 01, Date.new 2021 12 31]], ["Y", [5, -1, 0]]] + t1 = table_builder [["X", [Date.new 2021 01 31, Date.new 2021 01 01, Date.new 2021 12 31]], ["Y", [5, -1, 0]]] connection=data.connection (t1.at "X").date_add (t1.at "Y") Date_Period.Day . to_vector . should_equal [Date.new 2021 02 05, Date.new 2020 12 31, Date.new 2021 12 31] (t1.at "X").date_add -1 Date_Period.Day . to_vector . should_equal [Date.new 2021 01 30, Date.new 2020 12 31, Date.new 2021 12 30] (t1.at "X").date_add (t1.at "Y") Date_Period.Month . to_vector . should_equal [Date.new 2021 06 30, Date.new 2020 12 01, Date.new 2021 12 31] @@ -378,7 +394,7 @@ add_specs suite_builder setup = # Will accept Time_Period.Day as alias of Date_Period.Day (t1.at "X").date_add 1 Time_Period.Day . to_vector . should_equal [Date.new 2021 02 01, Date.new 2021 01 02, Date.new 2022 01 01] - t2 = table_builder [["X", [Date_Time.new 2021 01 31 12 30 0, Date_Time.new 2021 01 01 12 30 0, Date_Time.new 2021 12 31 12 30 0]], ["Y", [5, -1, 0]]] + t2 = table_builder [["X", [Date_Time.new 2021 01 31 12 30 0, Date_Time.new 2021 01 01 12 30 0, Date_Time.new 2021 12 31 12 30 0]], ["Y", [5, -1, 0]]] connection=data.connection (t2.at "X").date_add (t2.at "Y") Date_Period.Day . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 02 05 12 30 0, Date_Time.new 2020 12 31 12 30 0, Date_Time.new 2021 12 31 12 30 0] (t2.at "X").date_add -1 Time_Period.Day . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 30 12 30 0, Date_Time.new 2020 12 31 12 30 0, Date_Time.new 2021 12 30 12 30 0] (t2.at "X").date_add (t2.at "Y") Date_Period.Month . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 06 30 12 30 0, Date_Time.new 2020 12 01 12 30 0, Date_Time.new 2021 12 31 12 30 0] @@ -400,7 +416,7 @@ add_specs suite_builder setup = False -> (t2.at "X").date_add 1 Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation - t3 = table_builder [["X", [Time_Of_Day.new 12 30 0, Time_Of_Day.new 23 45 0, Time_Of_Day.new 1 30 0]], ["Y", [5, -1, 0]]] + t3 = table_builder [["X", [Time_Of_Day.new 12 30 0, Time_Of_Day.new 23 45 0, Time_Of_Day.new 1 30 0]], ["Y", [5, -1, 0]]] connection=data.connection (t3.at "X").date_add (t3.at "Y") Time_Period.Hour . to_vector . should_equal [Time_Of_Day.new 17 30 0, Time_Of_Day.new 22 45 0, Time_Of_Day.new 1 30 0] (t3.at "X").date_add 1 Time_Period.Hour . to_vector . should_equal [Time_Of_Day.new 13 30 0, Time_Of_Day.new 0 45 0, Time_Of_Day.new 2 30 0] @@ -427,13 +443,13 @@ add_specs suite_builder setup = (t3.at "X").date_add (t3.at "Y") . to_vector . should_equal [Time_Of_Day.new 17 30 0, Time_Of_Day.new 22 45 0, Time_Of_Day.new 1 30 0] group_builder.specify "should check shift_amount type in date_add" <| - t = table_builder [["X", [Date.new 2021 01 31]]] + t = table_builder [["X", [Date.new 2021 01 31]]] connection=data.connection t.at "X" . date_add "text" Date_Period.Day . should_fail_with Invalid_Value_Type group_builder.specify "date_diff and date_add should work correctly with DST" pending="May be failing on some Database configurations. ToDo: investigate - https://github.com/enso-org/enso/issues/7326" <| zone = Time_Zone.parse "Europe/Warsaw" dt1 = Date_Time.new 2023 03 26 00 30 00 zone=zone - t = table_builder [["X", [dt1]]] + t = table_builder [["X", [dt1]]] connection=data.connection x = t.at "X" # +24h will shift 1 day and 1 hour, because they 26th of March has only 23 hours within it @@ -461,7 +477,7 @@ add_specs suite_builder setup = dt3 = Date_Time.new 2023 03 28 01 30 00 zone=zone dt4 = Date_Time.new 2023 03 29 00 30 00 zone=zone - t2 = table_builder [["X", [dt3]]] + t2 = table_builder [["X", [dt3]]] connection=data.connection # No DST switch here, so all backends agree that 0 days elapsed in the 23 hours. (t2.at "X").date_diff dt4 Date_Period.Day . to_vector . should_equal [0] (t2.at "X").date_diff dt4 Time_Period.Day . to_vector . should_equal [0] From 1946e306cde7431b8556d63d8db78c443c232661 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 15:23:46 +0100 Subject: [PATCH 40/93] Add main method to some tests. --- .../src/Common_Table_Operations/Derived_Columns_Spec.enso | 2 ++ test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso | 1 + .../src/Common_Table_Operations/Expression_Spec.enso | 1 + test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso | 1 + .../src/Common_Table_Operations/Integration_Tests.enso | 2 ++ test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso | 1 + 6 files changed, 8 insertions(+) diff --git a/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso index 5afdc64fe847..b9cccc09b105 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso @@ -11,6 +11,8 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import all +main = run_default_backend add_specs + type Data Value ~connection diff --git a/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso index ea12d2f65d1e..1f07e7f41521 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso @@ -8,6 +8,7 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend +main = run_default_backend add_specs add_specs suite_builder setup = table_builder = setup.table_builder diff --git a/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso index a19442e018d9..ce948b914d08 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso @@ -15,6 +15,7 @@ from Standard.Test_New import all from project.Util import all from project.Common_Table_Operations.Util import run_default_backend +main = run_default_backend (add_specs detailed=True) add_specs suite_builder detailed setup = prefix = setup.prefix diff --git a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso index 44bc2575734f..29a3f0020ae8 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso @@ -17,6 +17,7 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend +main = run_default_backend add_specs type Data Value ~connection diff --git a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso index b32a2d196943..2f0b26c13a6b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso @@ -11,6 +11,8 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend from project.Common_Table_Operations.Core_Spec import weird_names +main = run_default_backend add_specs + add_specs suite_builder setup = table_builder = setup.table_builder materialize = setup.materialize diff --git a/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso index 27506d019678..01cd0426a554 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso @@ -12,6 +12,7 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend +main = run_default_backend add_specs add_specs suite_builder setup = prefix = setup.prefix From 9ccbdc0523b7baf585a5c2d985c8bcacc6a7e262 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 15:23:57 +0100 Subject: [PATCH 41/93] Fix Missing_Values_Spec --- .../Missing_Values_Spec.enso | 87 +++++++++++-------- 1 file changed, 52 insertions(+), 35 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso index fd14817fa7de..f7bb0a56e61b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso @@ -11,13 +11,20 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend +main = run_default_backend add_specs -add_specs suite_builder setup = - prefix = setup.prefix - table_builder = setup.table_builder - test_selection = setup.test_selection - suite_builder.group prefix+"Dropping Missing Values" group_builder-> - t0 = table_builder [["a", [0, 1, Nothing, 42, Nothing, 5]], ["b", [True, Nothing, True, False, Nothing, False]], ["c", ["", "foo", "bar", Nothing, Nothing, " "]]] +type Data + Value ~data + + connection self = self.data.at 0 + t0 self = self.data.at 1 + t1 self = self.data.at 2 + t3 self = self.data.at 3 + t4 self = self.data.at 4 + + setup create_connection_fn table_builder = + connection = create_connection_fn Nothing + t0 = table_builder [["a", [0, 1, Nothing, 42, Nothing, 5]], ["b", [True, Nothing, True, False, Nothing, False]], ["c", ["", "foo", "bar", Nothing, Nothing, " "]]] connection=connection t1 = a = ["a", [1, Nothing, 3, 4]] b = ["b", ["a", "b", Nothing, " "]] @@ -25,17 +32,33 @@ add_specs suite_builder setup = d = ["d", [Nothing, True, False, True]] e = ["e", ["", "", "foo", "bar"]] f = ["f", [Nothing, "", Nothing, ""]] - table_builder [a, b, c, d, e, f] + table_builder [a, b, c, d, e, f] connection=connection + t3 = table_builder [["X", [2.0, 1.5, Number.nan, Number.nan]], ["Y", [Nothing, 2.0, Nothing, 5.0]]] + t4 = + c = ["c", [10, 20, 40, 30]] + g = ["g", [Number.nan, 1, 2, 3.4]] + h = ["h", [Number.nan, Nothing, Number.nan, Nothing]] + table_builder [c, g, h] + Data.Value [connection, t0, t1, t3, t4] + + +add_specs suite_builder setup = + prefix = setup.prefix + table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func + test_selection = setup.test_selection + suite_builder.group prefix+"Dropping Missing Values" group_builder-> + data = Data.setup create_connection_fn table_builder group_builder.specify "filter_blank_rows should drop rows that contain at least one missing cell" <| - d = t0.filter_blank_rows when=Blank_Selector.Any_Cell + d = data.t0.filter_blank_rows when=Blank_Selector.Any_Cell d.row_count . should_equal 1 d.at "a" . to_vector . should_equal [5] d.at "b" . to_vector . should_equal [False] d.at "c" . to_vector . should_equal [" "] group_builder.specify "filter_blank_rows should drop rows that are all blank" <| - d2 = t0.filter_blank_rows when=Blank_Selector.All_Cells + d2 = data.t0.filter_blank_rows when=Blank_Selector.All_Cells d2.at "a" . to_vector . should_equal [0, 1, Nothing, 42, 5] d2.at "b" . to_vector . should_equal [True, Nothing, True, False, False] d2.at "c" . to_vector . should_equal ["", "foo", "bar", Nothing, " "] @@ -43,7 +66,7 @@ add_specs suite_builder setup = group_builder.specify "filter_blank_rows should deal with edge cases" <| ## TODO currently our builder does not allow all-null tables, so we create one with a 0 and remove it by filter. See #6159. - t0 = table_builder [["X", [0, Nothing, Nothing, Nothing]]] + t0 = table_builder [["X", [0, Nothing, Nothing, Nothing]]] connection=data.connection t1 = t0.filter "X" (Filter_Condition.Is_Nothing) t1.row_count . should_equal 3 t1.at "X" . to_vector . should_equal [Nothing, Nothing, Nothing] @@ -52,7 +75,7 @@ add_specs suite_builder setup = t2.row_count . should_equal 0 t2.at "X" . to_vector . should_equal [] - t3 = table_builder [["X", ["", "", Nothing]]] + t3 = table_builder [["X", ["", "", Nothing]]] connection=data.connection t4 = t3.filter_blank_rows when=Blank_Selector.All_Cells t4.row_count . should_equal 0 t4.at "X" . to_vector . should_equal [] @@ -60,89 +83,83 @@ add_specs suite_builder setup = group_builder.specify "filter_blank_rows should work with a table with many columns" <| cols = Vector.new 60 i-> ["col_"+i.to_text, [i, Nothing]] - t1 = table_builder cols + t1 = table_builder cols connection=data.connection t2 = t1.filter_blank_rows t2.columns.length . should_equal 60 t2.row_count . should_equal 1 t2.at 42 . to_vector . should_equal [42] group_builder.specify "should allow to select blank columns" <| - r1 = t1.select_blank_columns + r1 = data.t1.select_blank_columns r1.columns.map .name . should_equal ["f"] r1.at "f" . to_vector . should_equal [Nothing, "", Nothing, ""] - r2 = t1.select_blank_columns when=Blank_Selector.Any_Cell + r2 = data.t1.select_blank_columns when=Blank_Selector.Any_Cell r2.columns.map .name . should_equal ["a", "b", "d", "e", "f"] r2.at "d" . to_vector . should_equal [Nothing, True, False, True] group_builder.specify "should allow to remove blank columns" <| - r1 = t1.remove_blank_columns + r1 = data.t1.remove_blank_columns r1.columns.map .name . should_equal ["a", "b", "c", "d", "e"] r1.at "a" . to_vector . should_equal [1, Nothing, 3, 4] - r2 = t1.remove_blank_columns when=Blank_Selector.Any_Cell + r2 = data.t1.remove_blank_columns when=Blank_Selector.Any_Cell r2.columns.map .name . should_equal ["c"] r2.at "c" . to_vector . should_equal [10, 20, 30, 40] - t3 = table_builder [["X", [2.0, 1.5, Number.nan, Number.nan]], ["Y", [Nothing, 2.0, Nothing, 5.0]]] - t4 = - c = ["c", [10, 20, 40, 30]] - g = ["g", [Number.nan, 1, 2, 3.4]] - h = ["h", [Number.nan, Nothing, Number.nan, Nothing]] - table_builder [c, g, h] if test_selection.is_nan_and_nothing_distinct then group_builder.specify "should not treat NaNs as blank by default" <| - r1 = t3.filter_blank_rows when=Blank_Selector.Any_Cell + r1 = data.t3.filter_blank_rows when=Blank_Selector.Any_Cell # We cannot use `Vector.==` because `NaN != NaN`. r1.at "X" . to_vector . to_text . should_equal "[1.5, NaN]" r1.at "Y" . to_vector . should_equal [2.0, 5.0] - r2 = t3.filter_blank_rows when=Blank_Selector.All_Cells + r2 = data.t3.filter_blank_rows when=Blank_Selector.All_Cells r2.at "X" . to_vector . to_text . should_equal "[2.0, 1.5, NaN, NaN]" r2.at "Y" . to_vector . should_equal [Nothing, 2.0, Nothing, 5.0] - r3 = t4.remove_blank_columns + r3 = data.t4.remove_blank_columns r3.columns.map .name . should_equal ["c", "g", "h"] r3.at "g" . to_vector . to_text . should_equal "[NaN, 1.0, 2.0, 3.4]" - r4 = t4.remove_blank_columns when=Blank_Selector.Any_Cell + r4 = data.t4.remove_blank_columns when=Blank_Selector.Any_Cell r4.columns.map .name . should_equal ["c", "g"] r4.at "g" . to_vector . to_text . should_equal "[NaN, 1.0, 2.0, 3.4]" - r5 = t4.select_blank_columns when=Blank_Selector.Any_Cell + r5 = data.t4.select_blank_columns when=Blank_Selector.Any_Cell r5.columns.map .name . should_equal ["h"] r5.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]" group_builder.specify "should allow to treat NaNs as blank if asked" <| - r1 = t3.filter_blank_rows when=Blank_Selector.Any_Cell treat_nans_as_blank=True + r1 = data.t3.filter_blank_rows when=Blank_Selector.Any_Cell treat_nans_as_blank=True # We cannot use `Vector.==` because `NaN != NaN`. r1.at "X" . to_vector . should_equal [1.5] r1.at "Y" . to_vector . should_equal [2.0] - r2 = t3.filter_blank_rows when=Blank_Selector.All_Cells treat_nans_as_blank=True + r2 = data.t3.filter_blank_rows when=Blank_Selector.All_Cells treat_nans_as_blank=True r2.at "X" . to_vector . to_text . should_equal "[2.0, 1.5, NaN]" r2.at "Y" . to_vector . should_equal [Nothing, 2.0, 5.0] - r3 = t4.remove_blank_columns when=Blank_Selector.All_Cells treat_nans_as_blank=True + r3 = data.t4.remove_blank_columns when=Blank_Selector.All_Cells treat_nans_as_blank=True r3.columns.map .name . should_equal ["c", "g"] r3.at "g" . to_vector . to_text . should_equal "[NaN, 1.0, 2.0, 3.4]" - r4 = t4.select_blank_columns when=Blank_Selector.All_Cells treat_nans_as_blank=True + r4 = data.t4.select_blank_columns when=Blank_Selector.All_Cells treat_nans_as_blank=True r4.columns.map .name . should_equal ["h"] r4.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]" - r5 = t4.remove_blank_columns when=Blank_Selector.Any_Cell treat_nans_as_blank=True + r5 = data.t4.remove_blank_columns when=Blank_Selector.Any_Cell treat_nans_as_blank=True r5.columns.map .name . should_equal ["c"] r5.at "c" . to_vector . should_equal [10, 20, 40, 30] - r6 = t4.select_blank_columns when=Blank_Selector.Any_Cell treat_nans_as_blank=True + r6 = data.t4.select_blank_columns when=Blank_Selector.Any_Cell treat_nans_as_blank=True r6.columns.map .name . should_equal ["g", "h"] r6.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]" if test_selection.is_nan_and_nothing_distinct.not then group_builder.specify "this backend treats NaN as Nothing" <| - t3.at "X" . to_vector . should_equal [2.0, 1.5, Nothing, Nothing] - t3.at "X" . is_nan . to_vector . should_fail_with Unsupported_Database_Operation + data.t3.at "X" . to_vector . should_equal [2.0, 1.5, Nothing, Nothing] + data.t3.at "X" . is_nan . to_vector . should_fail_with Unsupported_Database_Operation group_builder.specify "select_blank_columns and remove_blank_columns should deal with edge cases" <| t = table_builder [["X", [1, 2, 3, 4]]] From 3ae3a7f98fcd864a8fdfb5bcfbf38792ed01e624 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 15:28:30 +0100 Subject: [PATCH 42/93] Fix Order_By_Spec --- .../Order_By_Spec.enso | 118 ++++++++++-------- 1 file changed, 65 insertions(+), 53 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso index 2eceb9652644..b0fa3d272986 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso @@ -10,16 +10,13 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend -type My_Type - Foo x +main = run_default_backend add_specs -add_specs suite_builder setup = - prefix = setup.prefix - table_builder = setup.table_builder - test_selection = setup.test_selection - order_by_pending = if test_selection.order_by.not then - "ToDo: order_by is not yet supported by this backend." - suite_builder.group prefix+"Table.order_by" pending=order_by_pending group_builder-> +type Data + Value ~table + + setup create_connection_fn table_builder = + connection = create_connection_fn Nothing mk_table = col1 = ["alpha", [3, 2, 1, 0]] col2 = ["beta", ["a", "b", "a", "b"]] @@ -31,30 +28,45 @@ add_specs suite_builder setup = col8 = ["phi", ["śc", Nothing, 's\u0301b', "śa"]] col9 = ["tau", [32.0, 0.5, -0.1, 1.6]] col10 = ["rho", ["BB", Nothing, Nothing, "B"]] - table_builder [col1, col2, col3, col4, col5, col6, col7, col8, col9, col10] - table = mk_table + table_builder [col1, col2, col3, col4, col5, col6, col7, col8, col9, col10] connection=connection + Data.Value mk_table + + +type My_Type + Foo x + + +add_specs suite_builder setup = + prefix = setup.prefix + table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func + test_selection = setup.test_selection + order_by_pending = if test_selection.order_by.not then + "ToDo: order_by is not yet supported by this backend." + suite_builder.group prefix+"Table.order_by" pending=order_by_pending group_builder-> + data = Data.setup create_connection_fn table_builder group_builder.specify "should work as shown in the doc examples" <| - t1 = table.order_by ["alpha"] + t1 = data.table.order_by ["alpha"] t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] ## Assumes stable sorting on database engine. - t2 = table.order_by [1, Sort_Column.Index -8 Sort_Direction.Descending] + t2 = data.table.order_by [1, Sort_Column.Index -8 Sort_Direction.Descending] t2.at "beta" . to_vector . should_equal ["a", "a", "b", "b"] t2.at "gamma" . to_vector . should_equal [3, 1, 4, 2] t2.at "alpha" . to_vector . should_equal [1, 3, 0, 2] - t3 = table.order_by [Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive] + t3 = data.table.order_by [Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive] t3.at "alpha" . to_vector . should_equal [0, 1, 2, 3] group_builder.specify "should work with single column name" <| - t1 = table.order_by "alpha" + t1 = data.table.order_by "alpha" t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] group_builder.specify "should work with single Sort_Column" <| - t1 = table.order_by [Sort_Column.Name "alpha"] + t1 = data.table.order_by [Sort_Column.Name "alpha"] t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] @@ -62,7 +74,7 @@ add_specs suite_builder setup = t2.at "alpha" . to_vector . should_equal [3, 2, 1, 0] t2.at "gamma" . to_vector . should_equal [1, 2, 3, 4] - t3 = table.order_by [Sort_Column.Index 0] + t3 = data.table.order_by [Sort_Column.Index 0] t3.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t3.at "gamma" . to_vector . should_equal [4, 3, 2, 1] @@ -71,11 +83,11 @@ add_specs suite_builder setup = t4.at "gamma" . to_vector . should_equal [1, 2, 3, 4] group_builder.specify "should allow the selector to mix regex and case insensitive matching" <| - t4 = table.order_by [Sort_Column.Select_By_Name "A.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive] + t4 = data.table.order_by [Sort_Column.Select_By_Name "A.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive] t4.at "alpha" . to_vector . should_equal [0, 1, 2, 3] group_builder.specify "should correctly handle regexes matching multiple names" <| - t1 = table.order_by [Sort_Column.Select_By_Name ".*ta" Sort_Direction.Descending use_regex=True] + t1 = data.table.order_by [Sort_Column.Select_By_Name ".*ta" Sort_Direction.Descending use_regex=True] t1.at "beta" . to_vector . should_equal ["b", "b", "a", "a"] t1.at "delta" . to_vector . should_equal ["a1", "a03", "a2", "a10"] t1.at "gamma" . to_vector . should_equal [2, 4, 3, 1] @@ -83,53 +95,53 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle problems: out of bounds indices" <| selector = [0, 100, Sort_Column.Index -200, Sort_Column.Index 300] expected_problem = Missing_Input_Columns.Error [100, -200, 300] - t1 = table.order_by selector + t1 = data.table.order_by selector t1.should_fail_with Missing_Input_Columns t1.catch . should_equal expected_problem - action = table.order_by selector error_on_missing_columns=False on_problems=_ + action = data.table.order_by selector error_on_missing_columns=False on_problems=_ tester table = table.at "alpha" . to_vector . should_equal [0, 1, 2, 3] Problems.test_problem_handling action [expected_problem] tester group_builder.specify "should correctly handle edge-cases: duplicate selectors" <| selector1 = ["alpha", Sort_Column.Name "alpha" Sort_Direction.Descending] - t1 = table.order_by selector1 + t1 = data.table.order_by selector1 Problems.assume_no_problems t1 t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] - t2 = table.order_by [Sort_Column.Index 0, Sort_Column.Index 0 Sort_Direction.Descending] + t2 = data.table.order_by [Sort_Column.Index 0, Sort_Column.Index 0 Sort_Direction.Descending] Problems.assume_no_problems t2 t2.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t2.at "gamma" . to_vector . should_equal [4, 3, 2, 1] - t3 = table.order_by [Sort_Column.Index 0, Sort_Column.Name "alpha" Sort_Direction.Descending] + t3 = data.table.order_by [Sort_Column.Index 0, Sort_Column.Name "alpha" Sort_Direction.Descending] Problems.assume_no_problems t3 t3.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t3.at "gamma" . to_vector . should_equal [4, 3, 2, 1] group_builder.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| selector = [Sort_Column.Select_By_Name "ALPHA" case_sensitivity=Case_Sensitivity.Insensitive, Sort_Column.Select_By_Name "alpha" Sort_Direction.Descending] - t1 = table.order_by selector + t1 = data.table.order_by selector Problems.assume_no_problems t1 t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] group_builder.specify "should correctly handle edge-cases: duplicate matches due to regexes" <| selector = [Sort_Column.Select_By_Name "a.*" use_regex=True, Sort_Column.Select_By_Name "alpha" Sort_Direction.Descending] - t1 = table.order_by selector + t1 = data.table.order_by selector Problems.assume_no_problems t1 t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] group_builder.specify "should correctly handle edge-cases: mixed selector types" <| - t1 = table.order_by [Sort_Column.Name "alpha", Sort_Column.Index 1] + t1 = data.table.order_by [Sort_Column.Name "alpha", Sort_Column.Index 1] t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "beta" . to_vector . should_equal ["b", "a", "b", "a"] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] - t2 = table.order_by [Sort_Column.Select_By_Name "a.*a" use_regex=True, Sort_Column.Index 1] + t2 = data.table.order_by [Sort_Column.Select_By_Name "a.*a" use_regex=True, Sort_Column.Index 1] t2.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t2.at "beta" . to_vector . should_equal ["b", "a", "b", "a"] t2.at "gamma" . to_vector . should_equal [4, 3, 2, 1] @@ -138,21 +150,21 @@ add_specs suite_builder setup = weird_name = '.*?-!@#!"' selector = [Sort_Column.Name "alpha", "hmm", Sort_Column.Name weird_name] expected_problem = Missing_Input_Columns.Error ["hmm", weird_name] - t1 = table.order_by selector + t1 = data.table.order_by selector t1.should_fail_with Missing_Input_Columns t1.catch . should_equal expected_problem - action = table.order_by selector error_on_missing_columns=False on_problems=_ + action = data.table.order_by selector error_on_missing_columns=False on_problems=_ tester table = table.at "alpha" . to_vector . should_equal [0, 1, 2, 3] Problems.test_problem_handling action [expected_problem] tester group_builder.specify "should report a problem if no columns are selected for ordering" <| - t2 = table.order_by [] + t2 = data.table.order_by [] t2.should_fail_with No_Input_Columns_Selected group_builder.specify "should stack consecutive ordering operations" <| - t1 = table.order_by [Sort_Column.Name "alpha"] + t1 = data.table.order_by [Sort_Column.Name "alpha"] t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "beta" . to_vector . should_equal ["b", "a", "b", "a"] @@ -171,50 +183,50 @@ add_specs suite_builder setup = t4.at "alpha" . to_vector . should_equal [3, 1, 2, 0] group_builder.specify "should give priority to the first selected column and use the next ones for breaking ties" <| - t1 = table.order_by ["beta", Sort_Column.Name "alpha" Sort_Direction.Ascending] + t1 = data.table.order_by ["beta", Sort_Column.Name "alpha" Sort_Direction.Ascending] t1.at "beta" . to_vector . should_equal ["a", "a", "b", "b"] t1.at "alpha" . to_vector . should_equal [1, 3, 0, 2] t1.at "gamma" . to_vector . should_equal [3, 1, 4, 2] - t1a = table.order_by ["beta", Sort_Column.Name "alpha" Sort_Direction.Ascending] + t1a = data.table.order_by ["beta", Sort_Column.Name "alpha" Sort_Direction.Ascending] t1a.at "beta" . to_vector . should_equal ["a", "a", "b", "b"] t1a.at "alpha" . to_vector . should_equal [1, 3, 0, 2] t1a.at "gamma" . to_vector . should_equal [3, 1, 4, 2] - t2 = table.order_by [Sort_Column.Name "beta", Sort_Column.Name "alpha" Sort_Direction.Descending] + t2 = data.table.order_by [Sort_Column.Name "beta", Sort_Column.Name "alpha" Sort_Direction.Descending] t2.at "beta" . to_vector . should_equal ["a", "a", "b", "b"] t2.at "alpha" . to_vector . should_equal [3, 1, 2, 0] t2.at "gamma" . to_vector . should_equal [1, 3, 2, 4] - t3 = table.order_by [Sort_Column.Name "alpha", Sort_Column.Name "beta"] + t3 = data.table.order_by [Sort_Column.Name "alpha", Sort_Column.Name "beta"] t3.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t3.at "beta" . to_vector . should_equal ["b", "a", "b", "a"] t3.at "gamma" . to_vector . should_equal [4, 3, 2, 1] - t4 = table.order_by [Sort_Column.Index 1, Sort_Column.Index 0 Sort_Direction.Ascending] + t4 = data.table.order_by [Sort_Column.Index 1, Sort_Column.Index 0 Sort_Direction.Ascending] t4.at "beta" . to_vector . should_equal ["a", "a", "b", "b"] t4.at "alpha" . to_vector . should_equal [1, 3, 0, 2] t4.at "gamma" . to_vector . should_equal [3, 1, 4, 2] group_builder.specify "should deal with real numbers, and not warn when ordering by floats" <| - t1 = table.order_by ["tau"] + t1 = data.table.order_by ["tau"] t1.at "tau" . to_vector . should_equal [-0.1, 0.5, 1.6, 32.0] t1.at "alpha" . to_vector . should_equal [1, 2, 0, 3] Problems.assume_no_problems t1 group_builder.specify "should deal with nulls" <| - t1 = table.order_by ["xi"] + t1 = data.table.order_by ["xi"] t1.at "xi" . to_vector . should_equal [Nothing, 0.5, 1.0, 1.5] t1.at "alpha" . to_vector . should_equal [1, 0, 3, 2] - t2 = table.order_by [Sort_Column.Name "rho"] + t2 = data.table.order_by [Sort_Column.Name "rho"] t2.at "rho" . to_vector . should_equal [Nothing, Nothing, "B", "BB"] - t3 = table.order_by [Sort_Column.Name "rho" Sort_Direction.Descending] + t3 = data.table.order_by [Sort_Column.Name "rho" Sort_Direction.Descending] t3.at "rho" . to_vector . should_equal ["BB", "B", Nothing, Nothing] group_builder.specify "should behave as expected with Unicode normalization, depending on the defaults settings" <| - t1 = table.order_by [Sort_Column.Name "phi"] + t1 = data.table.order_by [Sort_Column.Name "phi"] case test_selection.order_by_unicode_normalization_by_default of True -> t1.at "phi" . to_vector . should_equal [Nothing, "śa", 's\u0301b', "śc"] @@ -224,46 +236,46 @@ add_specs suite_builder setup = t1.at "alpha" . to_vector . should_equal [2, 1, 0, 3] group_builder.specify "should support natural ordering" pending=(if test_selection.natural_ordering.not then "Natural ordering is not supported.") <| - t1 = table.order_by [Sort_Column.Name "delta"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True) + t1 = data.table.order_by [Sort_Column.Name "delta"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True) t1.at "delta" . to_vector . should_equal ["a1", "a2", "a03", "a10"] t1.at "alpha" . to_vector . should_equal [2, 1, 0, 3] - t2 = table.order_by ["delta"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=False) + t2 = data.table.order_by ["delta"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=False) t2.at "delta" . to_vector . should_equal ["a03", "a1", "a10", "a2"] t2.at "alpha" . to_vector . should_equal [0, 2, 3, 1] group_builder.specify "should support case insensitive ordering" pending=(if test_selection.case_insensitive_ordering.not then "Case insensitive ordering is not supported.") <| - t1 = table.order_by [Sort_Column.Name "eta"] text_ordering=(Text_Ordering.Case_Insensitive) + t1 = data.table.order_by [Sort_Column.Name "eta"] text_ordering=(Text_Ordering.Case_Insensitive) expected = case test_selection.case_insensitive_ascii_only of True -> ["Aleph", "alpha", "Beta", "bądź"] False -> ["Aleph", "alpha", "bądź", "Beta"] t1.at "eta" . to_vector . should_equal expected - t2 = table.order_by [Sort_Column.Name "eta"] text_ordering=(Text_Ordering.Case_Sensitive) + t2 = data.table.order_by [Sort_Column.Name "eta"] text_ordering=(Text_Ordering.Case_Sensitive) t2.at "eta" . to_vector . should_equal ["Aleph", "Beta", "alpha", "bądź"] - t3 = table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive) + t3 = data.table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive) t3.at "psi" . to_vector . should_equal [Nothing, "c01", "c10", "C2"] - t4 = table.order_by [Sort_Column.Name "psi" Sort_Direction.Descending] text_ordering=(Text_Ordering.Case_Sensitive) + t4 = data.table.order_by [Sort_Column.Name "psi" Sort_Direction.Descending] text_ordering=(Text_Ordering.Case_Sensitive) t4.at "psi" . to_vector . should_equal ["c10", "c01", "C2", Nothing] group_builder.specify "should support natural and case insensitive ordering at the same time" pending=(if (test_selection.natural_ordering.not || test_selection.case_insensitive_ordering.not) then "Natural ordering or case sensitive ordering is not supported.") <| - t1 = table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) + t1 = data.table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) t1.at "psi" . to_vector . should_equal [Nothing, "c01", "C2", "c10"] - t2 = table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True) + t2 = data.table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True) t2.at "psi" . to_vector . should_equal [Nothing, "C2", "c01", "c10"] - t3 = table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive) + t3 = data.table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive) t3.at "psi" . to_vector . should_equal [Nothing, "c01", "c10", "C2"] - t4 = table.order_by [Sort_Column.Name "psi"] + t4 = data.table.order_by [Sort_Column.Name "psi"] t4.at "psi" . to_vector . should_equal [Nothing, "C2", "c01", "c10"] group_builder.specify "text ordering settings should not affect numeric columns" <| ordering = Text_Ordering.Case_Insensitive sort_digits_as_numbers=True - t1 = table.order_by [Sort_Column.Name "alpha"] text_ordering=ordering + t1 = data.table.order_by [Sort_Column.Name "alpha"] text_ordering=ordering t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] From 4a6afefe591873d676f94237b8518626c3fbe941 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 15:36:38 +0100 Subject: [PATCH 43/93] Fix Select_Columns_Spec --- .../Select_Columns_Spec.enso | 289 ++++++++++-------- 1 file changed, 162 insertions(+), 127 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso index 9e63ee73f859..2787da2eb232 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso @@ -8,47 +8,91 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import expect_column_names, run_default_backend +main = run_default_backend add_specs + +type Select_Columns_Data + Value ~table + + setup create_connection_fn table_builder = + connection = create_connection_fn Nothing + table = + col1 = ["foo", [1,2,3]] + col2 = ["bar", [4,5,6]] + col3 = ["Baz", [7,8,9]] + col4 = ["foo 1", [10,11,12]] + col5 = ["foo 2", [13,14,15]] + col6 = ["ab.+123", [16,17,18]] + col7 = ["abcd123", [19,20,21]] + table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection + Select_Columns_Data.Value table + + +type Sort_Columns_Data + Value ~table + + setup create_connection_fn table_builder = + connection = create_connection_fn Nothing + table = + col1 = ["foo 21", [1,2,3]] + col2 = ["foo 100", [4,5,6]] + col3 = ["foo 1", [7,8,9]] + col4 = ["Foo 2", [10,11,12]] + col5 = ["foo 3", [13,14,15]] + col6 = ["foo 001", [16,17,18]] + col7 = ["bar", [19,20,21]] + table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection + Sort_Columns_Data.Value table + + +type Rename_Columns_Data + Value ~table + + setup create_connection_fn table_builder = + connection = create_connection_fn Nothing + table = + col1 = ["alpha", [1,2,3]] + col2 = ["beta", [4,5,6]] + col3 = ["gamma", [16,17,18]] + col4 = ["delta", [19,20,21]] + table_builder [col1, col2, col3, col4] connection=connection + Rename_Columns_Data.Value table + + add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func test_selection = setup.test_selection - table = - col1 = ["foo", [1,2,3]] - col2 = ["bar", [4,5,6]] - col3 = ["Baz", [7,8,9]] - col4 = ["foo 1", [10,11,12]] - col5 = ["foo 2", [13,14,15]] - col6 = ["ab.+123", [16,17,18]] - col7 = ["abcd123", [19,20,21]] - table_builder [col1, col2, col3, col4, col5, col6, col7] suite_builder.group prefix+"Table.select_columns" group_builder-> + data = Select_Columns_Data.setup create_connection_fn table_builder + group_builder.specify "should work as shown in the doc examples" <| - expect_column_names ["foo", "bar"] <| table.select_columns ["bar", "foo"] - expect_column_names ["bar", "Baz", "foo 1", "foo 2"] <| table.select_columns ["foo.+".to_regex, "b.*".to_regex True] - expect_column_names ["abcd123", "foo", "bar"] <| table.select_columns [-1, 0, 1] reorder=True + expect_column_names ["foo", "bar"] <| data.table.select_columns ["bar", "foo"] + expect_column_names ["bar", "Baz", "foo 1", "foo 2"] <| data.table.select_columns ["foo.+".to_regex, "b.*".to_regex True] + expect_column_names ["abcd123", "foo", "bar"] <| data.table.select_columns [-1, 0, 1] reorder=True group_builder.specify "should allow to reorder columns if asked to" <| - table_2 = table.select_columns ["bar", "foo"] reorder=True + table_2 = data.table.select_columns ["bar", "foo"] reorder=True expect_column_names ["bar", "foo"] table_2 table_2 . at "bar" . to_vector . should_equal [4,5,6] table_2 . at "foo" . to_vector . should_equal [1,2,3] group_builder.specify "should correctly handle regex matching" <| - expect_column_names ["foo"] <| table.select_columns ["foo".to_regex] - expect_column_names ["ab.+123", "abcd123"] <| table.select_columns ["a.*".to_regex] - expect_column_names ["ab.+123", "abcd123"] <| table.select_columns ["ab.+123".to_regex] - expect_column_names ["ab.+123"] <| table.select_columns ["ab.+123"] - expect_column_names ["abcd123"] <| table.select_columns ["abcd123".to_regex] + expect_column_names ["foo"] <| data.table.select_columns ["foo".to_regex] + expect_column_names ["ab.+123", "abcd123"] <| data.table.select_columns ["a.*".to_regex] + expect_column_names ["ab.+123", "abcd123"] <| data.table.select_columns ["ab.+123".to_regex] + expect_column_names ["ab.+123"] <| data.table.select_columns ["ab.+123"] + expect_column_names ["abcd123"] <| data.table.select_columns ["abcd123".to_regex] group_builder.specify "should allow negative indices" <| - expect_column_names ["foo", "bar", "foo 2"] <| table.select_columns [-3, 0, 1] + expect_column_names ["foo", "bar", "foo 2"] <| data.table.select_columns [-3, 0, 1] group_builder.specify "should allow mixed names and indexes" <| - expect_column_names ["foo", "bar", "foo 2"] <| table.select_columns [-3, "bar", 0] - expect_column_names ["foo 2", "bar", "foo"] <| table.select_columns [-3, "bar", 0] reorder=True - expect_column_names ["foo", "bar", "foo 1", "foo 2", "abcd123"] <| table.select_columns [-1, "bar", "foo.*".to_regex] - expect_column_names ["foo", "foo 1", "foo 2", "bar", "abcd123"] <| table.select_columns ["foo.*".to_regex, "bar", "foo", -1] reorder=True + expect_column_names ["foo", "bar", "foo 2"] <| data.table.select_columns [-3, "bar", 0] + expect_column_names ["foo 2", "bar", "foo"] <| data.table.select_columns [-3, "bar", 0] reorder=True + expect_column_names ["foo", "bar", "foo 1", "foo 2", "abcd123"] <| data.table.select_columns [-1, "bar", "foo.*".to_regex] + expect_column_names ["foo", "foo 1", "foo 2", "bar", "abcd123"] <| data.table.select_columns ["foo.*".to_regex, "bar", "foo", -1] reorder=True if test_selection.supports_case_sensitive_columns then group_builder.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| @@ -60,102 +104,104 @@ add_specs suite_builder setup = expect_column_names ["bar", "Bar"] <| table.select_columns ["bar"] case_sensitivity=Case_Sensitivity.Insensitive group_builder.specify "should correctly handle regexes matching multiple names" <| - expect_column_names ["foo", "bar", "foo 1", "foo 2"] <| table.select_columns ["b.*".to_regex, "f.+".to_regex] - expect_column_names ["bar", "foo", "foo 1", "foo 2"] <| table.select_columns ["b.*".to_regex, "f.+".to_regex] reorder=True + expect_column_names ["foo", "bar", "foo 1", "foo 2"] <| data.table.select_columns ["b.*".to_regex, "f.+".to_regex] + expect_column_names ["bar", "foo", "foo 1", "foo 2"] <| data.table.select_columns ["b.*".to_regex, "f.+".to_regex] reorder=True group_builder.specify "should correctly handle problems: out of bounds indices" <| selector = [1, 0, 100, -200, 300] - action = table.select_columns selector error_on_missing_columns=False on_problems=_ + action = data.table.select_columns selector error_on_missing_columns=False on_problems=_ tester = expect_column_names ["foo", "bar"] problems = [Missing_Input_Columns.Error [100, -200, 300]] Problems.test_problem_handling action problems tester - err = table.select_columns selector + err = data.table.select_columns selector err.should_fail_with Missing_Input_Columns group_builder.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] - t = table.select_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.select_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["foo"] t expect_column_names ["foo", "bar"] <| - table.select_columns [0, 1, 0] + data.table.select_columns [0, 1, 0] group_builder.specify "should correctly handle edge-cases: aliased indices" <| selector = [0, -6, 1, -7] - t = table.select_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.select_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["foo", "bar"] t group_builder.specify "should correctly handle edge-cases: duplicate names" <| selector = ["foo", "foo"] - t = table.select_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.select_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["foo"] t expect_column_names ["foo", "bar"] <| - table.select_columns ["foo", "bar", "foo", "foo", "bar"] reorder=True + data.table.select_columns ["foo", "bar", "foo", "foo", "bar"] reorder=True expect_column_names ["bar", "foo"] <| - table.select_columns ["bar", "foo", "bar", "foo", "foo", "bar"] reorder=True + data.table.select_columns ["bar", "foo", "bar", "foo", "foo", "bar"] reorder=True expect_column_names ["foo", "bar"] <| - table.select_columns ["bar", "foo", "foo", "bar"] reorder=False + data.table.select_columns ["bar", "foo", "foo", "bar"] reorder=False group_builder.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| selector = ["FOO", "foo"] - t = table.select_columns selector case_sensitivity=Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error + t = data.table.select_columns selector case_sensitivity=Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error expect_column_names ["foo"] t expect_column_names ["bar", "foo"] <| - table.select_columns ["BAR", "foo", "bar"] reorder=True case_sensitivity=Case_Sensitivity.Insensitive + data.table.select_columns ["BAR", "foo", "bar"] reorder=True case_sensitivity=Case_Sensitivity.Insensitive group_builder.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' selector = ["foo", "hmm", weird_name] - action = table.select_columns selector error_on_missing_columns=False on_problems=_ + action = data.table.select_columns selector error_on_missing_columns=False on_problems=_ tester = expect_column_names ["foo"] problems = [Missing_Input_Columns.Error ["hmm", weird_name]] Problems.test_problem_handling action problems tester - err = table.select_columns selector on_problems=Problem_Behavior.Ignore + err = data.table.select_columns selector on_problems=Problem_Behavior.Ignore err.should_fail_with Missing_Input_Columns err.catch.criteria . should_equal ["hmm", weird_name] group_builder.specify "should correctly handle problems in mixed case" <| - err = table.select_columns ["foo", "hmm", 99] on_problems=Problem_Behavior.Ignore + err = data.table.select_columns ["foo", "hmm", 99] on_problems=Problem_Behavior.Ignore err.should_fail_with Missing_Input_Columns err.catch.criteria . should_equal ["hmm", 99] group_builder.specify "should correctly handle problems: no columns in the output" <| [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> - t = table.select_columns [] on_problems=pb + t = data.table.select_columns [] on_problems=pb t.should_fail_with No_Output_Columns # Just selecting [] means the No_Output_Columns does not have an additional cause. t.catch.cause . should_equal Nothing t.catch.to_display_text . should_equal "The result would contain no columns." - table.select_columns ["hmmm"] . should_fail_with Missing_Input_Columns - r2 = table.select_columns ["hmmm"] error_on_missing_columns=False + data.table.select_columns ["hmmm"] . should_fail_with Missing_Input_Columns + r2 = data.table.select_columns ["hmmm"] error_on_missing_columns=False r2.should_fail_with No_Output_Columns r2.catch.cause . should_be_a Missing_Input_Columns r2.catch.to_display_text . should_equal "No columns in the result, because of another problem: The criteria 'hmmm' did not match any columns." suite_builder.group prefix+"Table.remove_columns" group_builder-> + data = Select_Columns_Data.setup create_connection_fn table_builder + group_builder.specify "should work as shown in the doc examples" <| - expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| table.remove_columns ["bar", "foo"] - expect_column_names ["foo", "ab.+123", "abcd123"] <| table.remove_columns ["foo.+".to_regex, "b.*".to_regex] Case_Sensitivity.Insensitive - expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123"] <| table.remove_columns [-1, 0, 1] + expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| data.table.remove_columns ["bar", "foo"] + expect_column_names ["foo", "ab.+123", "abcd123"] <| data.table.remove_columns ["foo.+".to_regex, "b.*".to_regex] Case_Sensitivity.Insensitive + expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123"] <| data.table.remove_columns [-1, 0, 1] group_builder.specify "should correctly handle regex matching" <| - last_ones = table.columns.drop 1 . map .name - expect_column_names last_ones <| table.remove_columns ["foo".to_regex] + last_ones = data.table.columns.drop 1 . map .name + expect_column_names last_ones <| data.table.remove_columns ["foo".to_regex] first_ones = ["foo", "bar", "Baz", "foo 1", "foo 2"] - expect_column_names first_ones <| table.remove_columns ["a.*".to_regex] - expect_column_names first_ones <| table.remove_columns ["ab.+123".to_regex] - expect_column_names first_ones+["abcd123"] <| table.remove_columns ["ab.+123"] Case_Sensitivity.Insensitive - expect_column_names first_ones+["ab.+123"] <| table.remove_columns ["abcd123".to_regex] + expect_column_names first_ones <| data.table.remove_columns ["a.*".to_regex] + expect_column_names first_ones <| data.table.remove_columns ["ab.+123".to_regex] + expect_column_names first_ones+["abcd123"] <| data.table.remove_columns ["ab.+123"] Case_Sensitivity.Insensitive + expect_column_names first_ones+["ab.+123"] <| data.table.remove_columns ["abcd123".to_regex] group_builder.specify "should allow negative indices" <| - expect_column_names ["Baz", "foo 1", "ab.+123"] <| table.remove_columns [-1, -3, 0, 1] + expect_column_names ["Baz", "foo 1", "ab.+123"] <| data.table.remove_columns [-1, -3, 0, 1] if test_selection.supports_case_sensitive_columns then group_builder.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| @@ -167,79 +213,81 @@ add_specs suite_builder setup = expect_column_names ["foo"] <| table.remove_columns "bar" Case_Sensitivity.Insensitive group_builder.specify "should correctly handle regexes matching multiple names" <| - expect_column_names ["Baz", "ab.+123", "abcd123"] <| table.remove_columns ["f.+".to_regex, "b.*".to_regex] + expect_column_names ["Baz", "ab.+123", "abcd123"] <| data.table.remove_columns ["f.+".to_regex, "b.*".to_regex] group_builder.specify "should correctly handle problems: out of bounds indices" <| selector = [1, 0, 100, -200, 300] - action = table.remove_columns selector on_problems=_ + action = data.table.remove_columns selector on_problems=_ tester = expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] problems = [Missing_Input_Columns.Error [100, -200, 300]] Problems.test_problem_handling action problems tester - err = table.remove_columns selector error_on_missing_columns=True + err = data.table.remove_columns selector error_on_missing_columns=True err.should_fail_with Missing_Input_Columns group_builder.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] - t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.remove_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t group_builder.specify "should correctly handle edge-cases: aliased indices" <| selector = [0, -7, -6, 1] - t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.remove_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t group_builder.specify "should correctly handle edge-cases: duplicate names" <| selector = ["foo", "foo"] - t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.remove_columns selector on_problems=Problem_Behavior.Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t group_builder.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| selector = ["FOO", "foo"] - t = table.remove_columns selector Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error + t = data.table.remove_columns selector Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t group_builder.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' selector = ["foo", "hmm", weird_name] - action = table.remove_columns selector on_problems=_ + action = data.table.remove_columns selector on_problems=_ tester = expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] problems = [Missing_Input_Columns.Error ["hmm", weird_name]] Problems.test_problem_handling action problems tester - err = table.remove_columns selector error_on_missing_columns=True on_problems=Problem_Behavior.Ignore + err = data.table.remove_columns selector error_on_missing_columns=True on_problems=Problem_Behavior.Ignore err.should_fail_with Missing_Input_Columns group_builder.specify "should correctly handle problems: no columns in the output" <| [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> selector = [".*".to_regex] - t = table.remove_columns selector on_problems=pb + t = data.table.remove_columns selector on_problems=pb t.should_fail_with No_Output_Columns selector_2 = [".*".to_regex, "hmmm".to_regex] - t1 = table.remove_columns selector_2 + t1 = data.table.remove_columns selector_2 t1.should_fail_with No_Output_Columns # No cause specified - even if some criteria were unmatched, that is not the reason for the No_Output_Columns (the reason is all other columns got deleted, by other criteria that _did_ match). t1.catch.cause . should_equal Nothing suite_builder.group prefix+"Table.reorder_columns" group_builder-> + data = Select_Columns_Data.setup create_connection_fn table_builder + group_builder.specify "should work as shown in the doc examples" <| - expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns "foo" Position.After_Other_Columns - expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] <| table.reorder_columns ["foo", "bar"] Position.After_Other_Columns - expect_column_names ["foo 1", "foo 2", "bar", "Baz", "foo", "ab.+123", "abcd123"] <| table.reorder_columns ["foo.+".to_regex, "b.*".to_regex] case_sensitivity=Case_Sensitivity.Insensitive - expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| table.reorder_columns [1, 0] Position.Before_Other_Columns - expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns [0] Position.After_Other_Columns + expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| data.table.reorder_columns "foo" Position.After_Other_Columns + expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] <| data.table.reorder_columns ["foo", "bar"] Position.After_Other_Columns + expect_column_names ["foo 1", "foo 2", "bar", "Baz", "foo", "ab.+123", "abcd123"] <| data.table.reorder_columns ["foo.+".to_regex, "b.*".to_regex] case_sensitivity=Case_Sensitivity.Insensitive + expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| data.table.reorder_columns [1, 0] Position.Before_Other_Columns + expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| data.table.reorder_columns [0] Position.After_Other_Columns group_builder.specify "should correctly handle regex matching" <| - expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns ["foo".to_regex] Position.After_Other_Columns + expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| data.table.reorder_columns ["foo".to_regex] Position.After_Other_Columns rest = ["foo", "bar", "Baz", "foo 1", "foo 2"] - expect_column_names ["ab.+123", "abcd123"]+rest <| table.reorder_columns ["a.*".to_regex] - expect_column_names ["ab.+123", "abcd123"]+rest <| table.reorder_columns ["ab.+123".to_regex] - expect_column_names ["ab.+123"]+rest+["abcd123"] <| table.reorder_columns ["ab.+123"] - expect_column_names ["abcd123"]+rest+["ab.+123"] <| table.reorder_columns ["abcd123".to_regex] + expect_column_names ["ab.+123", "abcd123"]+rest <| data.table.reorder_columns ["a.*".to_regex] + expect_column_names ["ab.+123", "abcd123"]+rest <| data.table.reorder_columns ["ab.+123".to_regex] + expect_column_names ["ab.+123"]+rest+["abcd123"] <| data.table.reorder_columns ["ab.+123"] + expect_column_names ["abcd123"]+rest+["ab.+123"] <| data.table.reorder_columns ["abcd123".to_regex] group_builder.specify "should allow negative indices" <| - expect_column_names ["abcd123", "foo 2", "foo", "bar", "Baz", "foo 1", "ab.+123"] <| table.reorder_columns [-1, -3, 0, 1] + expect_column_names ["abcd123", "foo 2", "foo", "bar", "Baz", "foo 1", "ab.+123"] <| data.table.reorder_columns [-1, -3, 0, 1] if test_selection.supports_case_sensitive_columns then group_builder.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| @@ -251,86 +299,73 @@ add_specs suite_builder setup = expect_column_names ["bar", "Bar", "foo"] <| table.reorder_columns ["bar"] case_sensitivity=Case_Sensitivity.Insensitive group_builder.specify "should correctly handle regexes matching multiple names" <| - expect_column_names ["bar", "foo", "foo 1", "foo 2", "Baz", "ab.+123", "abcd123"] <| table.reorder_columns ["b.*".to_regex, "f.+".to_regex] + expect_column_names ["bar", "foo", "foo 1", "foo 2", "Baz", "ab.+123", "abcd123"] <| data.table.reorder_columns ["b.*".to_regex, "f.+".to_regex] group_builder.specify "should correctly handle problems: out of bounds indices" <| selector = [1, 0, 100, -200, 300] - action = table.reorder_columns selector on_problems=_ + action = data.table.reorder_columns selector on_problems=_ tester = expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] problems = [Missing_Input_Columns.Error [100, -200, 300]] Problems.test_problem_handling action problems tester - err = table.reorder_columns selector error_on_missing_columns=True + err = data.table.reorder_columns selector error_on_missing_columns=True err.should_fail_with Missing_Input_Columns group_builder.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] - t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error + t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t group_builder.specify "should correctly handle edge-cases: aliased indices" <| selector = [0, -7, -6, 1] - t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error + t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] t group_builder.specify "should correctly handle edge-cases: duplicate names" <| selector = ["foo", "foo"] - t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error + t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t group_builder.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' selector = ["foo", "hmm", weird_name] - action = table.reorder_columns selector Position.After_Other_Columns on_problems=_ + action = data.table.reorder_columns selector Position.After_Other_Columns on_problems=_ tester = expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] problems = [Missing_Input_Columns.Error ["hmm", weird_name]] Problems.test_problem_handling action problems tester - err = table.reorder_columns selector Position.After_Other_Columns error_on_missing_columns=True + err = data.table.reorder_columns selector Position.After_Other_Columns error_on_missing_columns=True err.should_fail_with Missing_Input_Columns suite_builder.group prefix+"Table.sort_columns" group_builder-> - table = - col1 = ["foo 21", [1,2,3]] - col2 = ["foo 100", [4,5,6]] - col3 = ["foo 1", [7,8,9]] - col4 = ["Foo 2", [10,11,12]] - col5 = ["foo 3", [13,14,15]] - col6 = ["foo 001", [16,17,18]] - col7 = ["bar", [19,20,21]] - table_builder [col1, col2, col3, col4, col5, col6, col7] + data = Sort_Columns_Data.setup create_connection_fn table_builder group_builder.specify "should work as shown in the doc examples" <| - sorted = table.sort_columns + sorted = data.table.sort_columns expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 100", "foo 21", "foo 3"] sorted sorted.columns.first.to_vector . should_equal [10,11,12] - expect_column_names ["bar", "foo 001", "foo 1", "Foo 2", "foo 3", "foo 21", "foo 100"] <| table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) - expect_column_names ["foo 3", "foo 21", "foo 100", "foo 1", "foo 001", "bar", "Foo 2"] <| table.sort_columns Sort_Direction.Descending + expect_column_names ["bar", "foo 001", "foo 1", "Foo 2", "foo 3", "foo 21", "foo 100"] <| data.table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) + expect_column_names ["foo 3", "foo 21", "foo 100", "foo 1", "foo 001", "bar", "Foo 2"] <| data.table.sort_columns Sort_Direction.Descending group_builder.specify "should correctly handle case-insensitive sorting" <| - expect_column_names ["bar", "foo 001", "foo 1", "foo 100", "Foo 2", "foo 21", "foo 3"] <| table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive) + expect_column_names ["bar", "foo 001", "foo 1", "foo 100", "Foo 2", "foo 21", "foo 3"] <| data.table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive) group_builder.specify "should correctly handle natural order sorting" <| - expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 3", "foo 21", "foo 100"] <| table.sort_columns text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True) + expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 3", "foo 21", "foo 100"] <| data.table.sort_columns text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True) group_builder.specify "should correctly handle various combinations of options" <| - expect_column_names ["foo 100", "foo 21", "foo 3", "Foo 2", "foo 1", "foo 001", "bar"] <| table.sort_columns Sort_Direction.Descending text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) + expect_column_names ["foo 100", "foo 21", "foo 3", "Foo 2", "foo 1", "foo 001", "bar"] <| data.table.sort_columns Sort_Direction.Descending text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) suite_builder.group prefix+"Table.rename_columns" group_builder-> - table = - col1 = ["alpha", [1,2,3]] - col2 = ["beta", [4,5,6]] - col3 = ["gamma", [16,17,18]] - col4 = ["delta", [19,20,21]] - table_builder [col1, col2, col3, col4] + data = Rename_Columns_Data.setup create_connection_fn table_builder group_builder.specify "should work as shown in the doc examples" <| expect_column_names ["FirstColumn", "beta", "gamma", "delta"] <| - table.rename_columns ["FirstColumn"] + data.table.rename_columns ["FirstColumn"] expect_column_names ["prefix_alpha", "prefix_beta", "prefix_gamma", "prefix_delta"] <| - table.rename_columns (table.columns.map c-> "prefix_" + c.name) + data.table.rename_columns (data.table.columns.map c-> "prefix_" + c.name) t1 = table_builder [["alpha", [1]], ["name=123", [2]], ["name= foo bar", [3]]] expect_column_names ["alpha", "key:123", "key: foo bar"] <| @@ -339,81 +374,81 @@ add_specs suite_builder setup = group_builder.specify "should work by index" <| map = Map.from_vector [[0, "FirstColumn"], [-2, "Another"]] expect_column_names ["FirstColumn", "beta", "Another", "delta"] <| - table.rename_columns map + data.table.rename_columns map group_builder.specify "should work by position" <| vec = ["one", "two", "three"] expect_column_names ["one", "two", "three", "delta"] <| - table.rename_columns vec + data.table.rename_columns vec group_builder.specify "should work by Vector" <| vec = ["one", "two", "three"] expect_column_names ["one", "two", "three", "delta"] <| - table.rename_columns vec + data.table.rename_columns vec group_builder.specify "should work by Vector of Pairs" <| vec = [["beta", "one"], ["delta", "two"], ["alpha", "three"]] expect_column_names ["three", "one", "gamma", "two"] <| - table.rename_columns vec + data.table.rename_columns vec group_builder.specify "should work by name" <| map = Map.from_vector [["alpha", "FirstColumn"], ["delta", "Another"]] expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <| - table.rename_columns map + data.table.rename_columns map group_builder.specify "should work by mixed Map" <| map = Map.from_vector [["alpha", "FirstColumn"], [-1, "Another"]] expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <| - table.rename_columns map + data.table.rename_columns map group_builder.specify "should work by name case-insensitively" <| map = Map.from_vector [["ALPHA", "FirstColumn"], ["DELTA", "Another"]] expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <| - table.rename_columns map Case_Sensitivity.Insensitive + data.table.rename_columns map Case_Sensitivity.Insensitive group_builder.specify "should work by name using regex" <| map = Map.from_vector [["a.*".to_regex, "FirstColumn"]] expect_column_names ["FirstColumn", "beta", "gamma", "delta"] <| - table.rename_columns map + data.table.rename_columns map group_builder.specify "should work by name using regex substitution" <| map = Map.from_vector [["a(.*)".to_regex, "$1"]] expect_column_names ["lpha", "beta", "gamma", "delta"] <| - table.rename_columns map + data.table.rename_columns map group_builder.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' map = Map.from_vector [["alpha", "FirstColumn"], ["omicron", "Another"], [weird_name, "Fixed"]] - action = table.rename_columns map error_on_missing_columns=False on_problems=_ + action = data.table.rename_columns map error_on_missing_columns=False on_problems=_ tester = expect_column_names ["FirstColumn", "beta", "gamma", "delta"] err_checker err = err.catch.should_be_a Missing_Input_Columns.Error err.catch.criteria.should_contain_the_same_elements_as ["omicron", weird_name] Problems.test_advanced_problem_handling action err_checker (x-> x) tester - err = table.rename_columns map + err = data.table.rename_columns map err.should_fail_with Missing_Input_Columns group_builder.specify "should correctly handle problems: out of bounds indices" <| map = Map.from_vector [[0, "FirstColumn"], [-1, "Another"], [100, "Boo"], [-200, "Nothing"], [300, "Here"]] - action = table.rename_columns map error_on_missing_columns=False on_problems=_ + action = data.table.rename_columns map error_on_missing_columns=False on_problems=_ tester = expect_column_names ["FirstColumn", "beta", "gamma", "Another"] err_checker err = err.catch.should_be_a Missing_Input_Columns.Error err.catch.criteria.should_contain_the_same_elements_as [-200, 100, 300] Problems.test_advanced_problem_handling action err_checker (x-> x) tester - err = table.rename_columns map + err = data.table.rename_columns map err.should_fail_with Missing_Input_Columns group_builder.specify "should correctly handle edge-cases: aliased indices" <| map1 = Map.from_vector [[1, "FirstColumn"], [-3, "FirstColumn"]] - t1 = table.rename_columns map1 on_problems=Problem_Behavior.Report_Error + t1 = data.table.rename_columns map1 on_problems=Problem_Behavior.Report_Error Problems.assume_no_problems t1 expect_column_names ["alpha", "FirstColumn", "gamma", "delta"] t1 map2 = Map.from_vector [[1, "FirstColumn"], [-3, "DifferentName!"]] - t2 = table.rename_columns map2 on_problems=Problem_Behavior.Report_Error + t2 = data.table.rename_columns map2 on_problems=Problem_Behavior.Report_Error t2.should_fail_with Ambiguous_Column_Rename err = t2.catch err.column_name . should_equal "beta" @@ -447,44 +482,44 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle problems: invalid names ''" <| map = Map.from_vector [[1, ""]] [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> - r = table.rename_columns map on_problems=pb + r = data.table.rename_columns map on_problems=pb r.should_fail_with Invalid_Column_Names group_builder.specify "should correctly handle problems: invalid names Nothing" <| map = ["alpha", Nothing] [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> - r = table.rename_columns map on_problems=pb + r = data.table.rename_columns map on_problems=pb r.should_fail_with Invalid_Column_Names group_builder.specify "should correctly handle problems: invalid names null character" <| map = ["alpha", 'a\0b'] [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> - r = table.rename_columns map on_problems=pb + r = data.table.rename_columns map on_problems=pb r.should_fail_with Invalid_Column_Names group_builder.specify "should correctly handle problems: duplicate names" <| map = ["Test", "Test", "Test", "Test"] - action = table.rename_columns map on_problems=_ + action = data.table.rename_columns map on_problems=_ tester = expect_column_names ["Test 1", "Test 2", "Test 3", "Test"] problems = [Duplicate_Output_Column_Names.Error ["Test", "Test", "Test"]] Problems.test_problem_handling action problems tester group_builder.specify "should correctly handle problems: new name is clashing with existing name of existing column" <| map = Map.from_vector [["alpha", "beta"]] - action = table.rename_columns map on_problems=_ + action = data.table.rename_columns map on_problems=_ tester = expect_column_names ["beta", "beta 1", "gamma", "delta"] problems = [Duplicate_Output_Column_Names.Error ["beta"]] Problems.test_problem_handling action problems tester map2 = Map.from_vector [["beta", "alpha"]] - action2 = table.rename_columns map2 on_problems=_ + action2 = data.table.rename_columns map2 on_problems=_ tester2 = expect_column_names ["alpha 1", "alpha", "gamma", "delta"] problems2 = [Duplicate_Output_Column_Names.Error ["alpha"]] Problems.test_problem_handling action2 problems2 tester2 group_builder.specify "should correctly handle problems: too many input names" <| map = ["A", "B", "C", "D", "E", "F"] - action = table.rename_columns map on_problems=_ + action = data.table.rename_columns map on_problems=_ tester = expect_column_names ["A", "B", "C", "D"] problem_checker problem = problem.should_be_a Too_Many_Column_Names_Provided.Error From b152643d4c311f92c56b6eaf39daf6d85c88cd52 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 15:47:06 +0100 Subject: [PATCH 44/93] Fix Take_Drop_Spec --- .../Take_Drop_Spec.enso | 481 ++++++++++-------- 1 file changed, 257 insertions(+), 224 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso index f9b06b7661da..4fc2650488d8 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso @@ -12,124 +12,166 @@ from Standard.Test_New import all from project.Util import all from project.Common_Table_Operations.Util import run_default_backend +main = run_default_backend add_specs + +type Table_Take_Drop_Data + Value ~data + + connection self = self.data.at 0 + table self = self.data.at 1 + empty self = self.data.at 2 + + setup create_connection_fn table_builder = Table_Take_Drop_Data.Value <| + connection = create_connection_fn Nothing + table = + col1 = ["alpha", [1,2,3,4,5,6,7,8]] + col2 = ["beta", ["A","B","C","D","E","F","G","H"]] + (table_builder [col1, col2] connection=connection) . order_by "alpha" + empty = table.remove_all_rows + [connection, table, empty] + + +type Column_Take_Drop_Data + Value ~data + + connection self = self.data.at 0 + table self = self.data.at 1 + alpha self = self.data.at 2 + beta self = self.data.at 3 + empty_alpha self = self.data.at 4 + empty_beta self = self.data.at 5 + + setup create_connection_fn table_builder = Column_Take_Drop_Data.Value <| + connection = create_connection_fn Nothing + table = + col1 = ["alpha", [1,2,3,4,5,6,7,8]] + col2 = ["beta", ["A","B","C","D","E","F","G","H"]] + (table_builder [col1, col2] connection=connection) . order_by "alpha" + alpha = table.at "alpha" + beta = table.at "beta" + + empty_table = table.remove_all_rows + empty_alpha = empty_table.at "alpha" + empty_beta = empty_table.at "beta" + + [connection, table, alpha, beta, empty_alpha, empty_beta] + + add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func suite_builder.group prefix+"Table.take/drop" group_builder-> - table = - col1 = ["alpha", [1,2,3,4,5,6,7,8]] - col2 = ["beta", ["A","B","C","D","E","F","G","H"]] - table_builder [col1, col2] . order_by "alpha" - empty = table.remove_all_rows + data = Table_Take_Drop_Data.setup create_connection_fn table_builder group_builder.specify "should allow selecting first or last N rows" <| - table.take.at "alpha" . to_vector . should_equal [1] - table.take.at "beta" . to_vector . should_equal ["A"] - table.drop.at "alpha" . to_vector . should_equal [2,3,4,5,6,7,8] - - table.take (First 4) . at "alpha" . to_vector . should_equal [1,2,3,4] - table.take (First 0) . at "alpha" . to_vector . should_equal [] - table.take (First -1) . at "alpha" . to_vector . should_equal [] - table.take (First 100) . should_equal table - - table.drop (First 2) . at "beta" . to_vector . should_equal ["C","D","E","F","G","H"] - table.drop (First 0) . should_equal table - table.drop (First -1) . should_equal table - table.drop (First 100) . should_equal empty - - table.take 4 . at "alpha" . to_vector . should_equal [1,2,3,4] - table.take 0 . at "alpha" . to_vector . should_equal [] - table.take -1 . at "alpha" . to_vector . should_equal [] - table.take 100 . should_equal table - - table.drop 2 . at "beta" . to_vector . should_equal ["C","D","E","F","G","H"] - table.drop 0 . should_equal table - table.drop -1 . should_equal table - table.drop 100 . should_equal empty - - table.take (Last 4) . at "beta" . to_vector . should_equal ["E","F","G","H"] - table.take (Last 0) . should_equal empty - table.take (Last -1) . should_equal empty - table.take (Last 100) . should_equal table - - table.drop (Last 2) . at "alpha" . to_vector . should_equal [1,2,3,4,5,6] - table.drop (Last 0) . should_equal table - table.drop (Last -1) . should_equal table - table.drop (Last 100) . should_equal empty + data.table.take.at "alpha" . to_vector . should_equal [1] + data.table.take.at "beta" . to_vector . should_equal ["A"] + data.table.drop.at "alpha" . to_vector . should_equal [2,3,4,5,6,7,8] + + data.table.take (First 4) . at "alpha" . to_vector . should_equal [1,2,3,4] + data.table.take (First 0) . at "alpha" . to_vector . should_equal [] + data.table.take (First -1) . at "alpha" . to_vector . should_equal [] + data.table.take (First 100) . should_equal data.table + + data.table.drop (First 2) . at "beta" . to_vector . should_equal ["C","D","E","F","G","H"] + data.table.drop (First 0) . should_equal data.table + data.table.drop (First -1) . should_equal data.table + data.table.drop (First 100) . should_equal data.empty + + data.table.take 4 . at "alpha" . to_vector . should_equal [1,2,3,4] + data.table.take 0 . at "alpha" . to_vector . should_equal [] + data.table.take -1 . at "alpha" . to_vector . should_equal [] + data.table.take 100 . should_equal data.table + + data.table.drop 2 . at "beta" . to_vector . should_equal ["C","D","E","F","G","H"] + data.table.drop 0 . should_equal data.table + data.table.drop -1 . should_equal data.table + data.table.drop 100 . should_equal data.empty + + data.table.take (Last 4) . at "beta" . to_vector . should_equal ["E","F","G","H"] + data.table.take (Last 0) . should_equal data.empty + data.table.take (Last -1) . should_equal data.empty + data.table.take (Last 100) . should_equal data.table + + data.table.drop (Last 2) . at "alpha" . to_vector . should_equal [1,2,3,4,5,6] + data.table.drop (Last 0) . should_equal data.table + data.table.drop (Last -1) . should_equal data.table + data.table.drop (Last 100) . should_equal data.empty group_builder.specify "should handle consecutive take/drops" <| - table.take 5 . order_by "alpha" . take 3 . at "alpha" . to_vector . should_equal [1, 2, 3] - table.take 3 . order_by "alpha" . take 5 . at "alpha" . to_vector . should_equal [1, 2, 3] - table.take 5 . order_by "alpha" . drop 3 . at "alpha" . to_vector . should_equal [4, 5] - table.drop 3 . order_by "alpha" . drop 2 . at "alpha" . to_vector . should_equal [6, 7, 8] - table.drop 2 . order_by "alpha" . drop 3 . at "alpha" . to_vector . should_equal [6, 7, 8] - table.drop 3 . order_by "alpha" . take 2 . at "alpha" . to_vector . should_equal [4, 5] + data.table.take 5 . order_by "alpha" . take 3 . at "alpha" . to_vector . should_equal [1, 2, 3] + data.table.take 3 . order_by "alpha" . take 5 . at "alpha" . to_vector . should_equal [1, 2, 3] + data.table.take 5 . order_by "alpha" . drop 3 . at "alpha" . to_vector . should_equal [4, 5] + data.table.drop 3 . order_by "alpha" . drop 2 . at "alpha" . to_vector . should_equal [6, 7, 8] + data.table.drop 2 . order_by "alpha" . drop 3 . at "alpha" . to_vector . should_equal [6, 7, 8] + data.table.drop 3 . order_by "alpha" . take 2 . at "alpha" . to_vector . should_equal [4, 5] group_builder.specify "should allow selecting rows by ranges or indices" <| - table.take (2.up_to 4) . at "beta" . to_vector . should_equal ["C", "D"] - table.take (0.up_to 0) . should_equal empty - table.take (100.up_to 100) . should_fail_with Index_Out_Of_Bounds - table.take (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8) - table.take (0.up_to 100) . should_equal table - table.take (0.up_to table.row_count) . should_equal table - empty.take (0.up_to 0) . should_fail_with Index_Out_Of_Bounds - empty.take (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) - table.take (100.up_to 99) . should_fail_with Index_Out_Of_Bounds - - table.drop (2.up_to 4) . at "alpha" . to_vector . should_equal [1, 2, 5, 6, 7, 8] - table.drop (0.up_to 0) . should_equal table - table.drop (100.up_to 100) . should_fail_with Index_Out_Of_Bounds - table.drop (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8) - table.drop (0.up_to 100) . should_equal empty - table.drop (0.up_to table.row_count) . should_equal empty - empty.drop (0.up_to 0) . should_fail_with Index_Out_Of_Bounds - empty.drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) - table.drop (100.up_to 99) . should_fail_with Index_Out_Of_Bounds - - table.take (Index_Sub_Range.By_Index 0) . at "beta" . to_vector . should_equal ["A"] - empty.take (Index_Sub_Range.By_Index 0) . should_fail_with Index_Out_Of_Bounds - table.take (Index_Sub_Range.By_Index []) . should_equal empty - table.take (Index_Sub_Range.By_Index [-1, -1]) . at "beta" . to_vector . should_equal ["H", "H"] - table.take (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8] - table.take (0.up_to 100 . with_step 2) . at "alpha" . to_vector . should_equal [1, 3, 5, 7] - table.take (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6] - table.take (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [2, 3, 3, 4, 5] - table.take (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [3, 4, 5, 2, 3] - table.take (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds - table.take (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds - - table.drop (Index_Sub_Range.By_Index 0) . at "alpha" . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8] - table.drop (Index_Sub_Range.By_Index []) . should_equal table - table.drop (Index_Sub_Range.By_Index [-1, -1]) . at "alpha" . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7] - table.drop (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [2, 3] - table.drop (0.up_to 100 . with_step 2) . at "alpha" . to_vector . should_equal [2, 4, 6, 8] - table.drop (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [8] - table.drop (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8] - table.drop (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8] - table.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds - table.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds + data.table.take (2.up_to 4) . at "beta" . to_vector . should_equal ["C", "D"] + data.table.take (0.up_to 0) . should_equal data.empty + data.table.take (100.up_to 100) . should_fail_with Index_Out_Of_Bounds + data.table.take (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8) + data.table.take (0.up_to 100) . should_equal data.table + data.table.take (0.up_to data.table.row_count) . should_equal data.table + data.empty.take (0.up_to 0) . should_fail_with Index_Out_Of_Bounds + data.empty.take (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) + data.table.take (100.up_to 99) . should_fail_with Index_Out_Of_Bounds + + data.table.drop (2.up_to 4) . at "alpha" . to_vector . should_equal [1, 2, 5, 6, 7, 8] + data.table.drop (0.up_to 0) . should_equal data.table + data.table.drop (100.up_to 100) . should_fail_with Index_Out_Of_Bounds + data.table.drop (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8) + data.table.drop (0.up_to 100) . should_equal data.empty + data.table.drop (0.up_to data.table.row_count) . should_equal data.empty + data.empty.drop (0.up_to 0) . should_fail_with Index_Out_Of_Bounds + data.empty.drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) + data.table.drop (100.up_to 99) . should_fail_with Index_Out_Of_Bounds + + data.table.take (Index_Sub_Range.By_Index 0) . at "beta" . to_vector . should_equal ["A"] + data.empty.take (Index_Sub_Range.By_Index 0) . should_fail_with Index_Out_Of_Bounds + data.table.take (Index_Sub_Range.By_Index []) . should_equal data.empty + data.table.take (Index_Sub_Range.By_Index [-1, -1]) . at "beta" . to_vector . should_equal ["H", "H"] + data.table.take (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8] + data.table.take (0.up_to 100 . with_step 2) . at "alpha" . to_vector . should_equal [1, 3, 5, 7] + data.table.take (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6] + data.table.take (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [2, 3, 3, 4, 5] + data.table.take (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [3, 4, 5, 2, 3] + data.table.take (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds + data.table.take (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds + + data.table.drop (Index_Sub_Range.By_Index 0) . at "alpha" . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8] + data.table.drop (Index_Sub_Range.By_Index []) . should_equal data.table + data.table.drop (Index_Sub_Range.By_Index [-1, -1]) . at "alpha" . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7] + data.table.drop (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [2, 3] + data.table.drop (0.up_to 100 . with_step 2) . at "alpha" . to_vector . should_equal [2, 4, 6, 8] + data.table.drop (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [8] + data.table.drop (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8] + data.table.drop (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8] + data.table.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds + data.table.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds group_builder.specify "should allow selecting every Nth row" <| - table.take (Every 1) . should_equal table - table.take (Every 3) . at "alpha" . to_vector . should_equal [1, 4, 7] - table.take (Every 3 first=1) . at "alpha" . to_vector . should_equal [2, 5, 8] - table.take (Every 2 first=1) . at "beta" . to_vector . should_equal ["B", "D", "F", "H"] - table.take (Every 2 first=100) . at "alpha" . to_vector . should_equal [] - table.take (Every 200) . at "alpha" . to_vector . should_equal [1] - empty.take (Every 2) . should_equal empty - table.take (Every 0) . should_fail_with Illegal_Argument - empty.take (Every 0) . should_fail_with Illegal_Argument - - table.drop (Every 1) . should_equal empty - table.drop (Every 3) . at "alpha" . to_vector . should_equal [2, 3, 5, 6, 8] - table.drop (Every 3 first=1) . at "alpha" . to_vector . should_equal [1, 3, 4, 6, 7] - table.drop (Every 2 first=1) . at "alpha" . to_vector . should_equal [1, 3, 5, 7] - table.drop (Every 2 first=100) . should_equal table - table.drop (Every 200) . at "beta" . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"] - empty.drop (Every 2) . should_equal empty - table.drop (Every 0) . should_fail_with Illegal_Argument - empty.drop (Every 0) . should_fail_with Illegal_Argument + data.table.take (Every 1) . should_equal data.table + data.table.take (Every 3) . at "alpha" . to_vector . should_equal [1, 4, 7] + data.table.take (Every 3 first=1) . at "alpha" . to_vector . should_equal [2, 5, 8] + data.table.take (Every 2 first=1) . at "beta" . to_vector . should_equal ["B", "D", "F", "H"] + data.table.take (Every 2 first=100) . at "alpha" . to_vector . should_equal [] + data.table.take (Every 200) . at "alpha" . to_vector . should_equal [1] + data.empty.take (Every 2) . should_equal data.empty + data.table.take (Every 0) . should_fail_with Illegal_Argument + data.empty.take (Every 0) . should_fail_with Illegal_Argument + + data.table.drop (Every 1) . should_equal data.empty + data.table.drop (Every 3) . at "alpha" . to_vector . should_equal [2, 3, 5, 6, 8] + data.table.drop (Every 3 first=1) . at "alpha" . to_vector . should_equal [1, 3, 4, 6, 7] + data.table.drop (Every 2 first=1) . at "alpha" . to_vector . should_equal [1, 3, 5, 7] + data.table.drop (Every 2 first=100) . should_equal data.table + data.table.drop (Every 200) . at "beta" . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"] + data.empty.drop (Every 2) . should_equal data.empty + data.table.drop (Every 0) . should_fail_with Illegal_Argument + data.empty.drop (Every 0) . should_fail_with Illegal_Argument if setup.is_database.not then group_builder.specify "should allow sampling rows" <| @@ -150,19 +192,19 @@ add_specs suite_builder setup = three.drop (Sample 1) . should_equal two three.drop (Sample 100) . should_equal empty - rnd = table.take (Sample 3 seed=42) + rnd = data.table.take (Sample 3 seed=42) random_indices = [5, 6, 2] - alpha_sample = random_indices.map (table.at "alpha" . to_vector . at) - beta_sample = random_indices.map (table.at "beta" . to_vector . at) + alpha_sample = random_indices.map (data.table.at "alpha" . to_vector . at) + beta_sample = random_indices.map (data.table.at "beta" . to_vector . at) rnd.at "alpha" . to_vector . should_equal alpha_sample rnd.at "beta" . to_vector . should_equal beta_sample group_builder.specify "sampling should be deterministic when a seed is supplied" <| - table.take (Sample 3 seed=4200000) . should_equal (table.take (Sample 3 seed=4200000)) + data.table.take (Sample 3 seed=4200000) . should_equal (data.table.take (Sample 3 seed=4200000)) group_builder.specify "sampling should be non-deterministic when a seed is not supplied" <| 0.up_to 3 . map _-> - table.take (Sample 3) . should_not_equal (table.take (Sample 3)) + data.table.take (Sample 3) . should_not_equal (data.table.take (Sample 3)) if setup.is_database.not then group_builder.specify "should allow selecting rows as long as they satisfy a predicate" <| @@ -200,7 +242,7 @@ add_specs suite_builder setup = False -> unordered_table.take . at "alpha" . to_vector . should_equal [1] group_builder.specify "Should work correctly after aggregation" <| - t0 = table_builder [["X", ["a", "b", "a", "c"]], ["Y", [1, 2, 4, 8]]] + t0 = table_builder [["X", ["a", "b", "a", "c"]], ["Y", [1, 2, 4, 8]]] connection=data.connection t1 = t0.aggregate [Group_By "X", Sum "Y"] t2 = t1.order_by "X" . take 2 @@ -212,124 +254,115 @@ add_specs suite_builder setup = t3.at "Sum Y" . to_vector . should_equal [2.0, 8.0] suite_builder.group prefix+"Column.take/drop" group_builder-> - table = - col1 = ["alpha", [1,2,3,4,5,6,7,8]] - col2 = ["beta", ["A","B","C","D","E","F","G","H"]] - table_builder [col1, col2] . order_by "alpha" - alpha = table.at "alpha" - beta = table.at "beta" - - empty_table = table.remove_all_rows - empty_alpha = empty_table.at "alpha" - empty_beta = empty_table.at "beta" - + data = Column_Take_Drop_Data.setup create_connection_fn table_builder + group_builder.specify "should allow selecting first or last N rows" <| - alpha.take.to_vector . should_equal [1] - beta.take.to_vector . should_equal ["A"] - alpha.drop.to_vector . should_equal [2,3,4,5,6,7,8] - - alpha.take (First 4) . to_vector . should_equal [1,2,3,4] - alpha.take (First 0) . should_equal empty_alpha - alpha.take (First -1) . should_equal empty_alpha - alpha.take (First 100) . should_equal alpha - - alpha.take 4 . to_vector . should_equal [1,2,3,4] - alpha.take 0 . should_equal empty_alpha - alpha.take -1 . should_equal empty_alpha - alpha.take 100 . should_equal alpha - - beta.drop (First 2) . to_vector . should_equal ["C","D","E","F","G","H"] - alpha.drop (First 0) . should_equal alpha - alpha.drop (First -1) . should_equal alpha - alpha.drop (First 100) . should_equal empty_alpha - - beta.drop 2 . to_vector . should_equal ["C","D","E","F","G","H"] - alpha.drop 0 . should_equal alpha - alpha.drop -1 . should_equal alpha - alpha.drop 100 . should_equal empty_alpha - - beta.take (Last 4) . to_vector . should_equal ["E","F","G","H"] - beta.take (Last 0) . should_equal empty_beta - beta.take (Last -1) . should_equal empty_beta - beta.take (Last 100) . should_equal beta - - alpha.drop (Last 2) . to_vector . should_equal [1,2,3,4,5,6] - alpha.drop (Last 0) . should_equal alpha - alpha.drop (Last -1) . should_equal alpha - alpha.drop (Last 100) . should_equal empty_alpha + data.alpha.take.to_vector . should_equal [1] + data.beta.take.to_vector . should_equal ["A"] + data.alpha.drop.to_vector . should_equal [2,3,4,5,6,7,8] + + data.alpha.take (First 4) . to_vector . should_equal [1,2,3,4] + data.alpha.take (First 0) . should_equal data.empty_alpha + data.alpha.take (First -1) . should_equal data.empty_alpha + data.alpha.take (First 100) . should_equal data.alpha + + data.alpha.take 4 . to_vector . should_equal [1,2,3,4] + data.alpha.take 0 . should_equal data.empty_alpha + data.alpha.take -1 . should_equal data.empty_alpha + data.alpha.take 100 . should_equal data.alpha + + data.beta.drop (First 2) . to_vector . should_equal ["C","D","E","F","G","H"] + data.alpha.drop (First 0) . should_equal data.alpha + data.alpha.drop (First -1) . should_equal data.alpha + data.alpha.drop (First 100) . should_equal data.empty_alpha + + data.beta.drop 2 . to_vector . should_equal ["C","D","E","F","G","H"] + data.alpha.drop 0 . should_equal data.alpha + data.alpha.drop -1 . should_equal data.alpha + data.alpha.drop 100 . should_equal data.empty_alpha + + data.beta.take (Last 4) . to_vector . should_equal ["E","F","G","H"] + data.beta.take (Last 0) . should_equal data.empty_beta + data.beta.take (Last -1) . should_equal data.empty_beta + data.beta.take (Last 100) . should_equal data.beta + + data.alpha.drop (Last 2) . to_vector . should_equal [1,2,3,4,5,6] + data.alpha.drop (Last 0) . should_equal data.alpha + data.alpha.drop (Last -1) . should_equal data.alpha + data.alpha.drop (Last 100) . should_equal data.empty_alpha group_builder.specify "should handle consecutive take/drops" <| - alpha.take 5 . sort . take 3 . to_vector . should_equal [1, 2, 3] - alpha.take 3 . sort . take 5 . to_vector . should_equal [1, 2, 3] - alpha.take 5 . sort . drop 3 . to_vector . should_equal [4, 5] - alpha.drop 3 . sort . drop 2 . to_vector . should_equal [6, 7, 8] - alpha.drop 2 . sort . drop 3 . to_vector . should_equal [6, 7, 8] - alpha.drop 3 . sort . take 2 . to_vector . should_equal [4, 5] + data.alpha.take 5 . sort . take 3 . to_vector . should_equal [1, 2, 3] + data.alpha.take 3 . sort . take 5 . to_vector . should_equal [1, 2, 3] + data.alpha.take 5 . sort . drop 3 . to_vector . should_equal [4, 5] + data.alpha.drop 3 . sort . drop 2 . to_vector . should_equal [6, 7, 8] + data.alpha.drop 2 . sort . drop 3 . to_vector . should_equal [6, 7, 8] + data.alpha.drop 3 . sort . take 2 . to_vector . should_equal [4, 5] group_builder.specify "should allow selecting rows by ranges or indices" <| - beta.take (2.up_to 4) . to_vector . should_equal ["C", "D"] - beta.take (0.up_to 0) . should_equal empty_beta - beta.take (100.up_to 100) . should_fail_with Index_Out_Of_Bounds - beta.take (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8) - beta.take (0.up_to 100) . should_equal beta - beta.take (0.up_to table.row_count) . should_equal beta - empty_beta.take (0.up_to 0) . should_fail_with Index_Out_Of_Bounds - empty_beta.take (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) - beta.take (100.up_to 99) . should_fail_with Index_Out_Of_Bounds - - alpha.drop (2.up_to 4) . to_vector . should_equal [1, 2, 5, 6, 7, 8] - alpha.drop (0.up_to 0) . should_equal alpha - alpha.drop (100.up_to 100) . should_fail_with Index_Out_Of_Bounds - alpha.drop (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8) - alpha.drop (0.up_to 100) . should_equal empty_alpha - alpha.drop (0.up_to table.row_count) . should_equal empty_alpha - empty_alpha.drop (0.up_to 0) . should_fail_with Index_Out_Of_Bounds - empty_alpha.drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) - alpha.drop (100.up_to 99) . should_fail_with Index_Out_Of_Bounds - - beta.take (Index_Sub_Range.By_Index 0) . to_vector . should_equal ["A"] - empty_beta.take (Index_Sub_Range.By_Index 0) . should_fail_with Index_Out_Of_Bounds - beta.take (Index_Sub_Range.By_Index []) . should_equal empty_beta - beta.take (Index_Sub_Range.By_Index [-1, -1]) . to_vector . should_equal ["H", "H"] - alpha.take (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8] - alpha.take (0.up_to 100 . with_step 2) . to_vector . should_equal [1, 3, 5, 7] - alpha.take (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6] - alpha.take (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [2, 3, 3, 4, 5] - alpha.take (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [3, 4, 5, 2, 3] - alpha.take (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds - alpha.take (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds - - alpha.drop (Index_Sub_Range.By_Index 0) . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8] - alpha.drop (Index_Sub_Range.By_Index []) . should_equal alpha - alpha.drop (Index_Sub_Range.By_Index [-1, -1]) . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7] - alpha.drop (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [2, 3] - alpha.drop (0.up_to 100 . with_step 2) . to_vector . should_equal [2, 4, 6, 8] - alpha.drop (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [8] - alpha.drop (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [1, 6, 7, 8] - alpha.drop (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [1, 6, 7, 8] - alpha.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds - alpha.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds + data.beta.take (2.up_to 4) . to_vector . should_equal ["C", "D"] + data.beta.take (0.up_to 0) . should_equal data.empty_beta + data.beta.take (100.up_to 100) . should_fail_with Index_Out_Of_Bounds + data.beta.take (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8) + data.beta.take (0.up_to 100) . should_equal data.beta + data.beta.take (0.up_to data.table.row_count) . should_equal data.beta + data.empty_beta.take (0.up_to 0) . should_fail_with Index_Out_Of_Bounds + data.empty_beta.take (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) + data.beta.take (100.up_to 99) . should_fail_with Index_Out_Of_Bounds + + data.alpha.drop (2.up_to 4) . to_vector . should_equal [1, 2, 5, 6, 7, 8] + data.alpha.drop (0.up_to 0) . should_equal data.alpha + data.alpha.drop (100.up_to 100) . should_fail_with Index_Out_Of_Bounds + data.alpha.drop (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8) + data.alpha.drop (0.up_to 100) . should_equal data.empty_alpha + data.alpha.drop (0.up_to data.table.row_count) . should_equal data.empty_alpha + data.empty_alpha.drop (0.up_to 0) . should_fail_with Index_Out_Of_Bounds + data.empty_alpha.drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) + data.alpha.drop (100.up_to 99) . should_fail_with Index_Out_Of_Bounds + + data.beta.take (Index_Sub_Range.By_Index 0) . to_vector . should_equal ["A"] + data.empty_beta.take (Index_Sub_Range.By_Index 0) . should_fail_with Index_Out_Of_Bounds + data.beta.take (Index_Sub_Range.By_Index []) . should_equal data.empty_beta + data.beta.take (Index_Sub_Range.By_Index [-1, -1]) . to_vector . should_equal ["H", "H"] + data.alpha.take (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8] + data.alpha.take (0.up_to 100 . with_step 2) . to_vector . should_equal [1, 3, 5, 7] + data.alpha.take (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6] + data.alpha.take (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [2, 3, 3, 4, 5] + data.alpha.take (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [3, 4, 5, 2, 3] + data.alpha.take (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds + data.alpha.take (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds + + data.alpha.drop (Index_Sub_Range.By_Index 0) . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8] + data.alpha.drop (Index_Sub_Range.By_Index []) . should_equal data.alpha + data.alpha.drop (Index_Sub_Range.By_Index [-1, -1]) . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7] + data.alpha.drop (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [2, 3] + data.alpha.drop (0.up_to 100 . with_step 2) . to_vector . should_equal [2, 4, 6, 8] + data.alpha.drop (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [8] + data.alpha.drop (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [1, 6, 7, 8] + data.alpha.drop (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [1, 6, 7, 8] + data.alpha.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds + data.alpha.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds group_builder.specify "should allow selecting every Nth row" <| - alpha.take (Every 1) . should_equal alpha - alpha.take (Every 3) . to_vector . should_equal [1, 4, 7] - alpha.take (Every 3 first=1) . to_vector . should_equal [2, 5, 8] - beta.take (Every 2 first=1) . to_vector . should_equal ["B", "D", "F", "H"] - alpha.take (Every 2 first=100) . to_vector . should_equal [] - alpha.take (Every 200) . to_vector . should_equal [1] - empty_beta.take (Every 2) . should_equal empty_beta - beta.take (Every 0) . should_fail_with Illegal_Argument - empty_beta.take (Every 0) . should_fail_with Illegal_Argument - - alpha.drop (Every 1) . should_equal empty_alpha - alpha.drop (Every 3) . to_vector . should_equal [2, 3, 5, 6, 8] - alpha.drop (Every 3 first=1) . to_vector . should_equal [1, 3, 4, 6, 7] - alpha.drop (Every 2 first=1) . to_vector . should_equal [1, 3, 5, 7] - alpha.drop (Every 2 first=100) . should_equal alpha - beta.drop (Every 200) . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"] - empty_beta.drop (Every 2) . should_equal empty_beta - beta.drop (Every 0) . should_fail_with Illegal_Argument - empty_beta.drop (Every 0) . should_fail_with Illegal_Argument + data.alpha.take (Every 1) . should_equal data.alpha + data.alpha.take (Every 3) . to_vector . should_equal [1, 4, 7] + data.alpha.take (Every 3 first=1) . to_vector . should_equal [2, 5, 8] + data.beta.take (Every 2 first=1) . to_vector . should_equal ["B", "D", "F", "H"] + data.alpha.take (Every 2 first=100) . to_vector . should_equal [] + data.alpha.take (Every 200) . to_vector . should_equal [1] + data.empty_beta.take (Every 2) . should_equal data.empty_beta + data.beta.take (Every 0) . should_fail_with Illegal_Argument + data.empty_beta.take (Every 0) . should_fail_with Illegal_Argument + + data.alpha.drop (Every 1) . should_equal data.empty_alpha + data.alpha.drop (Every 3) . to_vector . should_equal [2, 3, 5, 6, 8] + data.alpha.drop (Every 3 first=1) . to_vector . should_equal [1, 3, 4, 6, 7] + data.alpha.drop (Every 2 first=1) . to_vector . should_equal [1, 3, 5, 7] + data.alpha.drop (Every 2 first=100) . should_equal data.alpha + data.beta.drop (Every 200) . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"] + data.empty_beta.drop (Every 2) . should_equal data.empty_beta + data.beta.drop (Every 0) . should_fail_with Illegal_Argument + data.empty_beta.drop (Every 0) . should_fail_with Illegal_Argument if setup.is_database.not then group_builder.specify "should allow sampling rows" <| @@ -360,9 +393,9 @@ add_specs suite_builder setup = three.drop (Sample 1) . should_equal two three.drop (Sample 100) . should_equal empty - rnd = alpha.take (Sample 3 seed=42) + rnd = data.alpha.take (Sample 3 seed=42) random_indices = [5, 6, 2] - sample = alpha.take (Index_Sub_Range.By_Index random_indices) + sample = data.alpha.take (Index_Sub_Range.By_Index random_indices) rnd.should_equal sample if setup.is_database.not then From a07754dba80dbf04e5db5856473585c30f256859 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 15:50:12 +0100 Subject: [PATCH 45/93] Add some more main methods to tests --- .../src/Common_Table_Operations/Join/Lookup_Spec.enso | 2 ++ .../src/Common_Table_Operations/Join/Union_Spec.enso | 2 ++ 2 files changed, 4 insertions(+) diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso index 64138696fd70..d05d56b0f706 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso @@ -12,6 +12,8 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend, within_table import project.Util +main = run_default_backend add_specs + add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso index 44fd1b71610d..8b50e3300065 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso @@ -13,6 +13,8 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import expect_column_names, run_default_backend, within_table import project.Util +main = run_default_backend add_specs + type My_Type Value x y From bc4ba75215f36ef2896e919c7119a31dde9c08f9 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 16:06:30 +0100 Subject: [PATCH 46/93] Use the same connection for table_builder in Integration_Tests --- .../Integration_Tests.enso | 36 ++++++++++++------- 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso index 2f0b26c13a6b..058767096634 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso @@ -13,10 +13,20 @@ from project.Common_Table_Operations.Core_Spec import weird_names main = run_default_backend add_specs +type Data + Value ~connection + + setup create_connection_fn = + Data.Value (create_connection_fn Nothing) + + add_specs suite_builder setup = table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func materialize = setup.materialize suite_builder.group setup.prefix+" Interactions Between various operations" group_builder-> + data = Data.setup create_connection_fn + group_builder.specify "using weird column names with operations and filtering" <| columns = weird_names.map_with_index ix-> name-> [name, [100+ix, 200+ix, 300+ix]] @@ -33,8 +43,8 @@ add_specs suite_builder setup = result.at name . to_vector . should_equal [200+ix] group_builder.specify "aggregates and joins" <| - t1 = table_builder [["Count", [1, 2, 3]], ["Class", ["X", "Y", "Z"]]] - t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "A", "C", "D", "D", "B", "B"]]] + t1 = table_builder [["Count", [1, 2, 3]], ["Class", ["X", "Y", "Z"]]] connection=data.connection + t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "A", "C", "D", "D", "B", "B"]]] connection=data.connection t3 = t2.aggregate [Group_By "Letter", Count] t4 = t3.join t1 on="Count" join_kind=Join_Kind.Left_Outer |> materialize |> _.order_by "Letter" @@ -46,7 +56,7 @@ add_specs suite_builder setup = rows.at 3 . should_equal ["D", 2, 2, "Y"] group_builder.specify "aggregates and distinct" <| - t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C"]], ["Points", [2, 5, 2, 1, 10, 3]]] + t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C"]], ["Points", [2, 5, 2, 1, 10, 3]]] connection=data.connection t3 = t2.aggregate [Group_By "Letter", Sum "Points"] t4 = t3.distinct "Sum Points" |> materialize |> _.order_by "Sum Points" @@ -61,7 +71,7 @@ add_specs suite_builder setup = rows.at 1 . should_equal ["C", 13] group_builder.specify "aggregates and filtering" <| - t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C", "B"]], ["Points", [2, 5, 2, 1, 10, 3, 0]]] + t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C", "B"]], ["Points", [2, 5, 2, 1, 10, 3, 0]]] connection=data.connection t3 = t2.aggregate [Group_By "Letter", Sum "Points"] t4 = t3.filter "Sum Points" (Filter_Condition.Equal 5) |> materialize |> _.order_by "Letter" @@ -71,7 +81,7 @@ add_specs suite_builder setup = rows.at 1 . should_equal ["B", 5] group_builder.specify "aggregates and ordering" <| - t1 = table_builder [["Letter", ["C", "A", "B", "A", "A", "C", "C", "B"]], ["Points", [0, -100, 5, 2, 1, 10, 3, 0]]] + t1 = table_builder [["Letter", ["C", "A", "B", "A", "A", "C", "C", "B"]], ["Points", [0, -100, 5, 2, 1, 10, 3, 0]]] connection=data.connection t2 = t1.aggregate [Group_By "Letter", Sum "Points"] t3 = t2.order_by "Sum Points" |> materialize t3.columns.map .name . should_equal ["Letter", "Sum Points"] @@ -79,7 +89,7 @@ add_specs suite_builder setup = t3.at "Sum Points" . to_vector . should_equal [-97, 5, 13] group_builder.specify "distinct and ordering" <| - t1 = table_builder [["X", [1, 2, 2, 1]], ["Y", ["a", "b", "b", "a"]], ["Z", [1, 2, 3, 4]]] + t1 = table_builder [["X", [1, 2, 2, 1]], ["Y", ["a", "b", "b", "a"]], ["Z", [1, 2, 3, 4]]] connection=data.connection # These are 'adversarial' white-box examples constructed knowing that Postgres' DISTINCT ON does not play too well with ORDER BY and it needs to be handled carefully. t2 = t1.order_by "X" . distinct "X" |> materialize @@ -94,7 +104,7 @@ add_specs suite_builder setup = a = ["A", ["a", "a", "a", "a", "a", "a"]] b = ["B", [1, 1, 2, 2, 1, 2]] c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]] - t = table_builder [a, b, c] . order_by ([(Sort_Column.Name "C" Sort_Direction.Descending)]) + t = (table_builder [a, b, c] connection=data.connection) . order_by ([(Sort_Column.Name "C" Sort_Direction.Descending)]) t2 = t.distinct ["A", "B"] on_problems=Report_Error # Now, reverse the order! @@ -138,8 +148,8 @@ add_specs suite_builder setup = ## This mostly checks that various operations handle all kinds of Integer storage implementations (add_row_number may use a different storage than regular columns) if setup.is_database.not then group_builder.specify "add_row_number and other operations" <| - t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]], ["Z", [0.25, 0.5, 0.75]]] - t2 = table_builder [["X", ["ddd", "eee", "fff"]]] + t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]], ["Z", [0.25, 0.5, 0.75]]] connection=data.connection + t2 = table_builder [["X", ["ddd", "eee", "fff"]]] connection=data.connection t11 = t1.add_row_number (t11.at "Row" + 2) . to_vector . should_equal [3, 4, 5] @@ -158,7 +168,7 @@ add_specs suite_builder setup = r12.at "X" . to_vector . should_equal ["a", "b", "c", "ddd", "eee", "fff"] r12.at "Row" . to_vector . should_equal [1, 2, 3, 100, 101, 102] - t3 = table_builder [["X", ["a", "b", "c"]], ["Row", [1.5, 2.5, 3.5]]] + t3 = table_builder [["X", ["a", "b", "c"]], ["Row", [1.5, 2.5, 3.5]]] connection=data.connection t123 = ((t1.add_row_number).union [(t2.add_row_number), t3]) r123 = t123 |> materialize @@ -167,8 +177,8 @@ add_specs suite_builder setup = if setup.test_selection.fixed_length_text_columns then group_builder.specify "types of unioned fixed-length columns should be correctly inferred after passing through other operations that infer types from Database, like aggregate Shortest" <| - t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 0, 2]]] . cast "X" (Value_Type.Char 1 False) - t2 = table_builder [["X", ["ddd", "eee", "fff"]], ["Y", [0, 1, 0]]] . cast "X" (Value_Type.Char 3 False) + t1 = (table_builder [["X", ["a", "b", "c"]], ["Y", [1, 0, 2]]] connection=data.connection) . cast "X" (Value_Type.Char 1 False) + t2 = (table_builder [["X", ["ddd", "eee", "fff"]], ["Y", [0, 1, 0]]] connection=data.connection) . cast "X" (Value_Type.Char 3 False) t3 = t1.union t2 vt1 = t3.at "X" . value_type @@ -186,7 +196,7 @@ add_specs suite_builder setup = t5.at "Shortest X" . to_vector . should_equal ["b", "a", "c"] group_builder.specify "types should be correctly preserved after aggregation after iif" <| - t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]] + t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]] connection=data.connection t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=3 variable_length=False) x = t1.at "x" From af23b6536980a9405e349e987ff462fc18904aa4 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 16:06:40 +0100 Subject: [PATCH 47/93] Fix setup in Core_Spec --- .../Common_Table_Operations/Core_Spec.enso | 192 +++++++++++------- 1 file changed, 116 insertions(+), 76 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso index 3c4c82a8c614..224ab36094d2 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso @@ -14,92 +14,129 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend +main = run_default_backend add_specs + +type Data + Value ~table + + setup create_connection_fn table_builder = + connection = create_connection_fn Nothing + table = + col1 = ["foo", [1,2,3]] + col2 = ["bar", [4,5,6]] + col3 = ["Baz", [7,8,9]] + col4 = ["foo 1", [10,11,12]] + col5 = ["foo 2", [13,14,15]] + col6 = ["ab.+123", [16,17,18]] + col7 = ["abcd123", [19,20,21]] + table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection + Data.Value table + +type Rows_Data + Value ~table + + setup create_connection_fn table_builder = + connection = create_connection_fn Nothing + table = table_builder [["X", [1, 2, 3, 4]], ["Y", [5, 6, 7, 8]], ["Z", ["A", "B", "C", "D"]]] connection=connection + Rows_Data.Value table + +type Read_Data + Value ~data + + connection self = self.data.at 0 + t_big self = self.data.at 1 + t_small self = self.data.at 2 + + setup create_connection_fn table_builder = Read_Data.Value <| + connection = create_connection_fn Nothing + t_big = table_builder [["X", (0.up_to 1500)]] connection=connection + t_small = table_builder [["X", (0.up_to 10)]] connection=connection + [connection, t_big, t_small] + + add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder - table_fn = - col1 = ["foo", [1,2,3]] - col2 = ["bar", [4,5,6]] - col3 = ["Baz", [7,8,9]] - col4 = ["foo 1", [10,11,12]] - col5 = ["foo 2", [13,14,15]] - col6 = ["ab.+123", [16,17,18]] - col7 = ["abcd123", [19,20,21]] - table_builder [col1, col2, col3, col4, col5, col6, col7] - table = table_fn + create_connection_fn = setup.create_connection_func suite_builder.group prefix+"Table.at" group_builder-> + data = Data.setup create_connection_fn table_builder + group_builder.specify "should allow selecting columns by name" <| - column_1 = table.at "bar" + column_1 = data.table.at "bar" column_1.name . should_equal "bar" column_1.to_vector . should_equal [4, 5, 6] - table.at "nonexistent column name" . should_fail_with No_Such_Column + data.table.at "nonexistent column name" . should_fail_with No_Such_Column group_builder.specify "should allow selecting columns by index" <| - column_1 = table.at + column_1 = data.table.at column_1.name . should_equal "foo" column_1.to_vector . should_equal [1, 2, 3] - column_2 = table.at 2 + column_2 = data.table.at 2 column_2.name . should_equal "Baz" column_2.to_vector . should_equal [7, 8, 9] - column_3 = table.at -1 + column_3 = data.table.at -1 column_3.name . should_equal "abcd123" column_3.to_vector . should_equal [19, 20, 21] - column_4 = table.first_column + column_4 = data.table.first_column column_4.name . should_equal "foo" column_4.to_vector . should_equal [1, 2, 3] - column_5 = table.second_column + column_5 = data.table.second_column column_5.name . should_equal "bar" column_5.to_vector . should_equal [4, 5, 6] - column_6 = table.last_column + column_6 = data.table.last_column column_6.name . should_equal "abcd123" column_6.to_vector . should_equal [19, 20, 21] - table.at 100 . should_fail_with Index_Out_Of_Bounds + data.table.at 100 . should_fail_with Index_Out_Of_Bounds group_builder.specify "should fail with Type Error is not an Integer or Text" <| - table.at (Pair.new 1 2) . should_fail_with Illegal_Argument - table.at (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair." + data.table.at (Pair.new 1 2) . should_fail_with Illegal_Argument + data.table.at (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair." suite_builder.group prefix+"Table.get" group_builder-> + data = Data.setup create_connection_fn table_builder + group_builder.specify "should allow selecting columns by name" <| - column_1 = table.get "bar" + column_1 = data.table.get "bar" column_1.name . should_equal "bar" column_1.to_vector . should_equal [4, 5, 6] - table.get "nonexistent column name" . should_equal Nothing - table.get "nonexistent column name" column_1 . name . should_equal "bar" + data.table.get "nonexistent column name" . should_equal Nothing + data.table.get "nonexistent column name" column_1 . name . should_equal "bar" group_builder.specify "should allow selecting columns by index" <| - column_1 = table.get + column_1 = data.table.get column_1.name . should_equal "foo" column_1.to_vector . should_equal [1, 2, 3] - column_2 = table.get 2 + column_2 = data.table.get 2 column_2.name . should_equal "Baz" column_2.to_vector . should_equal [7, 8, 9] - column_3 = table.get -1 + column_3 = data.table.get -1 column_3.name . should_equal "abcd123" column_3.to_vector . should_equal [19, 20, 21] - table.get 100 . should_equal Nothing - table.get 100 column_1 . name . should_equal "foo" + data.table.get 100 . should_equal Nothing + data.table.get 100 column_1 . name . should_equal "foo" group_builder.specify "should fail with Type Error is not an Integer or Text" <| - table.get (Pair.new 1 2) . should_fail_with Illegal_Argument - table.get (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair." + data.table.get (Pair.new 1 2) . should_fail_with Illegal_Argument + data.table.get (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair." suite_builder.group prefix+"Table.set" group_builder-> + data = Data.setup create_connection_fn table_builder + group_builder.specify "should allow adding a column" <| - bar2 = table.get "bar" . rename "bar2" - t2 = table.set bar2 + bar2 = data.table.get "bar" . rename "bar2" + t2 = data.table.set bar2 t2.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar2"] t2.get "bar2" . to_vector . should_equal [4, 5, 6] @@ -107,11 +144,11 @@ add_specs suite_builder setup = t3.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar2", "bar3"] group_builder.specify "should not allow illegal column names" <| - table.set (table.get "bar") new_name='a\0b' . should_fail_with Invalid_Column_Names + data.table.set (data.table.get "bar") new_name='a\0b' . should_fail_with Invalid_Column_Names group_builder.specify "should allow replacing a column" <| - foo = table.get "bar" . rename "foo" - t2 = table.set foo + foo = data.table.get "bar" . rename "foo" + t2 = data.table.set foo t2.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t2.get "foo" . to_vector . should_equal [4, 5, 6] @@ -119,11 +156,11 @@ add_specs suite_builder setup = t3.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar3"] group_builder.specify "should allow adding a column" <| - bar2 = table.get "bar" . rename "bar2" - table.set bar2 set_mode=Set_Mode.Update . should_fail_with Missing_Column + bar2 = data.table.get "bar" . rename "bar2" + data.table.set bar2 set_mode=Set_Mode.Update . should_fail_with Missing_Column - foo = table.get "bar" . rename "foo" - table.set foo set_mode=Set_Mode.Add . should_fail_with Existing_Column + foo = data.table.get "bar" . rename "foo" + data.table.set foo set_mode=Set_Mode.Add . should_fail_with Existing_Column group_builder.specify "should not affect existing columns that depended on the old column being replaced" <| t1 = table_builder [["X", [1,2,3]]] @@ -175,26 +212,31 @@ add_specs suite_builder setup = False -> r1.should_fail_with Illegal_Argument suite_builder.group prefix+"Table.column_names" group_builder-> + data = Data.setup create_connection_fn table_builder + group_builder.specify "should return the names of all columns" <| - table.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] + data.table.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] group_builder.specify "should allow weird column names in all backends" <| columns = weird_names.map_with_index ix-> name-> [name, [100+ix, 2, 3]] - table = table_builder columns - table.column_names . should_equal weird_names + data.table = table_builder columns + data.table.column_names . should_equal weird_names weird_names.map_with_index ix-> name-> - table.at name . to_vector . should_equal [100+ix, 2, 3] + data.table.at name . to_vector . should_equal [100+ix, 2, 3] suite_builder.group prefix+"Table.column_count" group_builder-> + data = Data.setup create_connection_fn table_builder + group_builder.specify "should allow getting the column count" <| - table.column_count . should_equal 7 + data.table.column_count . should_equal 7 suite_builder.group prefix+"Table.rows" group_builder-> - table = table_builder [["X", [1, 2, 3, 4]], ["Y", [5, 6, 7, 8]], ["Z", ["A", "B", "C", "D"]]] + data = Rows_Data.setup create_connection_fn table_builder + group_builder.specify "should allow to get a Vector of Table rows" <| - rows = table.rows + rows = data.table.rows rows.length . should_equal 4 first_row = rows.first @@ -226,51 +268,50 @@ add_specs suite_builder setup = rows.map .to_vector . should_equal [[1, 5, "A"], [2, 6, "B"], [3, 7, "C"], [4, 8, "D"]] group_builder.specify "should let you get the first row" <| - first_row = table.first_row + first_row = data.table.first_row first_row . length . should_equal 3 first_row.at "X" . should_equal 1 first_row.at "Y" . should_equal 5 first_row.at "Z" . should_equal "A" group_builder.specify "should let you get the second row" <| - second_row = table.second_row + second_row = data.table.second_row second_row . length . should_equal 3 second_row.at "X" . should_equal 2 second_row.at "Y" . should_equal 6 second_row.at "Z" . should_equal "B" group_builder.specify "should let you get the last row" <| - last_row = table.last_row + last_row = data.table.last_row last_row . length . should_equal 3 last_row.at "X" . should_equal 4 last_row.at "Y" . should_equal 8 last_row.at "Z" . should_equal "D" group_builder.specify "should fetch rows up to the specified limit" <| - table.rows max_rows=2 . map .to_vector . should_equal [[1, 5, "A"], [2, 6, "B"]] + data.table.rows max_rows=2 . map .to_vector . should_equal [[1, 5, "A"], [2, 6, "B"]] group_builder.specify "should correctly handle errors" <| - table.rows.at 5 . should_fail_with Index_Out_Of_Bounds - err = table.rows.at -6 + data.table.rows.at 5 . should_fail_with Index_Out_Of_Bounds + err = data.table.rows.at -6 err.should_fail_with Index_Out_Of_Bounds err.catch . should_equal (Index_Out_Of_Bounds.Error -6 4) - table.rows (max_rows=2) . at 2 . should_fail_with Index_Out_Of_Bounds - table.rows . at 0 . at -4 . should_fail_with Index_Out_Of_Bounds - table.rows . at 0 . at "unknown" . should_fail_with No_Such_Column + data.table.rows (max_rows=2) . at 2 . should_fail_with Index_Out_Of_Bounds + data.table.rows . at 0 . at -4 . should_fail_with Index_Out_Of_Bounds + data.table.rows . at 0 . at "unknown" . should_fail_with No_Such_Column suite_builder.group prefix+"Table.read" group_builder-> - t_big = table_builder [["X", (0.up_to 1500)]] - t_small = table_builder [["X", (0.up_to 10)]] + data = Read_Data.setup create_connection_fn table_builder has_default_row_limit = setup.is_database group_builder.specify "should have a row limit by default and warn about it" <| - t_big.row_count . should_equal 1500 - t_small.row_count . should_equal 10 + data.t_big.row_count . should_equal 1500 + data.t_small.row_count . should_equal 10 - t1 = t_big.read + t1 = data.t_big.read case has_default_row_limit of True -> t1.row_count . should_equal 1000 @@ -280,46 +321,46 @@ add_specs suite_builder setup = t1.row_count . should_equal 1500 Problems.assume_no_problems t1 - t2 = t_small.read + t2 = data.t_small.read t2.row_count . should_equal 10 Problems.assume_no_problems t2 group_builder.specify "should allow to set the row limit" <| - t1 = t_big.read max_rows=23 + t1 = data.t_big.read max_rows=23 t1.row_count . should_equal 23 w1 = Problems.expect_only_warning Not_All_Rows_Downloaded t1 w1.max_rows . should_equal 23 w1.to_display_text . should_contain "some rows have been dropped" - t2 = t_big.read max_rows=1500 + t2 = data.t_big.read max_rows=1500 t2.row_count . should_equal 1500 Problems.assume_no_problems t2 - t3 = t_small.read max_rows=1 + t3 = data.t_small.read max_rows=1 t3.row_count . should_equal 1 w3 = Problems.expect_only_warning Not_All_Rows_Downloaded t3 w3.max_rows . should_equal 1 group_builder.specify "should allow to have no row limit" <| - t1 = t_big.read max_rows=Nothing + t1 = data.t_big.read max_rows=Nothing t1.row_count . should_equal 1500 Problems.assume_no_problems t1 group_builder.specify "should allow to turn off the warning" <| - t1 = t_big.read warn_if_more_rows=False + t1 = data.t_big.read warn_if_more_rows=False t1.row_count . should_equal (if has_default_row_limit then 1000 else 1500) Problems.assume_no_problems t1 - t2 = t_big.read max_rows=123 warn_if_more_rows=False + t2 = data.t_big.read max_rows=123 warn_if_more_rows=False t2.row_count . should_equal 123 Problems.assume_no_problems t2 - t3 = t_big.read max_rows=12300 warn_if_more_rows=False + t3 = data.t_big.read max_rows=12300 warn_if_more_rows=False t3.row_count . should_equal 1500 Problems.assume_no_problems t3 group_builder.specify "should also work as Column.read" <| - c1 = t_big.at "X" + c1 = data.t_big.at "X" c1.length . should_equal 1500 r2 = c1.read @@ -348,24 +389,23 @@ add_specs suite_builder setup = Problems.assume_no_problems r5 if setup.is_database then group_builder.specify "should allow similar API on Connection.read" <| - connection = setup.connection_fn Nothing - connection.query t_big.name . row_count . should_equal 1500 + data.connection.query data.t_big.name . row_count . should_equal 1500 - t1 = connection.read t_big.name + t1 = data.connection.read data.t_big.name t1.row_count . should_equal 1000 w1 = Problems.expect_only_warning Not_All_Rows_Downloaded t1 w1.max_rows . should_equal 1000 - t2 = connection.read t_big.name limit=42 + t2 = data.connection.read data.t_big.name limit=42 t2.row_count . should_equal 42 w2 = Problems.expect_only_warning Not_All_Rows_Downloaded t2 w2.max_rows . should_equal 42 - t3 = connection.read t_big.name limit=Nothing + t3 = data.connection.read data.t_big.name limit=Nothing t3.row_count . should_equal 1500 Problems.assume_no_problems t3 - t4 = connection.read t_big.name warn_if_more_rows=False + t4 = data.connection.read data.t_big.name warn_if_more_rows=False t4.row_count . should_equal 1000 Problems.assume_no_problems t4 From 10d1f3e856c1e65071ed24cd08a9723531a6f7ea Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 17:59:11 +0100 Subject: [PATCH 48/93] Fix setup in Upload_Spec --- .../Table_Tests/src/Database/Upload_Spec.enso | 200 +++++++++--------- 1 file changed, 100 insertions(+), 100 deletions(-) diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso index 77cd5d789a3f..af2d1775bea9 100644 --- a/test/Table_Tests/src/Database/Upload_Spec.enso +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -45,15 +45,15 @@ type Data [connection, in_memory_table] -in_memory_table_builder args primary_key=[] data=Nothing = - _ = [primary_key, data] +in_memory_table_builder args primary_key=[] connection = + _ = [primary_key, connection] case args of table : Table -> table _ -> Table.new args -database_table_builder name_prefix args primary_key=[] (data : Data) = - in_memory_table = in_memory_table_builder args - in_memory_table.select_into_database_table data.connection (Name_Generator.random_name name_prefix) temporary=True primary_key=primary_key +database_table_builder name_prefix args primary_key=[] connection = + in_memory_table = in_memory_table_builder args connection=connection + in_memory_table.select_into_database_table connection (Name_Generator.random_name name_prefix) temporary=True primary_key=primary_key ## PRIVATE Adds uploading table specs to the suite builder. @@ -597,7 +597,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = suite_builder.group prefix+"Appending an in-memory table to a Database table" group_builder-> data = Data.setup make_new_connection - test_table_append group_builder data in_memory_table_builder (database_table_builder "target-table" data=data) + test_table_append group_builder data in_memory_table_builder (database_table_builder "target-table") group_builder.specify "will issue a friendly error if using in-memory table as target" <| t1 = Table.new [["X", [1, 2, 3]]] @@ -609,17 +609,17 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = suite_builder.group prefix+"Appending a Database table to a Database table" group_builder-> data = Data.setup make_new_connection - test_table_append group_builder data (database_table_builder "source-table" data=data) (database_table_builder "target-table" data=data) + test_table_append group_builder data (database_table_builder "source-table") (database_table_builder "target-table") suite_builder.group prefix+"Deleting rows from a Database table (source=in-memory)" group_builder-> data = Data.setup make_new_connection - test_table_delete group_builder data in_memory_table_builder (database_table_builder "target-table" data=data) + test_table_delete group_builder data in_memory_table_builder (database_table_builder "target-table") suite_builder.group prefix+"Deleting rows from a Database table (source=Database)" group_builder-> data = Data.setup make_new_connection - test_table_delete group_builder data (database_table_builder "source-table" data=data) (database_table_builder "target-table" data=data) + test_table_delete group_builder data (database_table_builder "source-table") (database_table_builder "target-table") suite_builder.group prefix+"Deleting rows from a Database table" group_builder-> data = Data.setup make_new_connection @@ -635,7 +635,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = t2.at "X" . to_vector . should_equal [] group_builder.specify "should allow to delete rows based on another query" <| - table = database_table_builder "target-table" [["student_id", [1, 2, 44, 100, 120]], ["first_name", ["Alice", "Bob", "Charlie", "David", "Eve"]], ["graduation_year", [2023, 2024, 2021, 2020, 2019]]] primary_key=["student_id"] data=data + table = database_table_builder "target-table" [["student_id", [1, 2, 44, 100, 120]], ["first_name", ["Alice", "Bob", "Charlie", "David", "Eve"]], ["graduation_year", [2023, 2024, 2021, 2020, 2019]]] primary_key=["student_id"] connection=data.connection graduates = table.filter "graduation_year" (Filter_Condition.Less 2023) affected_rows = table.delete_rows graduates # uses the primary key by default affected_rows . should_equal 3 @@ -649,7 +649,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = r1.to_display_text.should_contain "in-memory tables are immutable" group_builder.specify "can use itself as source of rows to delete (even if that's an anti-pattern)" <| - t1 = database_table_builder "target-table" [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] data=data + t1 = database_table_builder "target-table" [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] connection=data.connection affected_rows = t1.delete_rows t1 affected_rows . should_equal 3 t1.rows.should_equal [] @@ -758,14 +758,14 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = pre_existing_table.at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3] - tests group_builder data make_new_connection (in_memory_table_builder) " (from memory)" persistent_connector - tests group_builder data make_new_connection (database_table_builder "ec-tests-table" data=data) " (from Database table)" persistent_connector + tests group_builder data make_new_connection in_memory_table_builder " (from memory)" persistent_connector + tests group_builder data make_new_connection (database_table_builder "ec-tests-table") " (from Database table)" persistent_connector test_table_append group_builder (data : Data) source_table_builder target_table_builder = group_builder.specify "should be able to append new rows to a table" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["X", [4, 5, 6]], ["Y", ['d', 'e', 'f']]] + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] connection=data.connection + src = source_table_builder [["X", [4, 5, 6]], ["Y", ['d', 'e', 'f']]] connection=data.connection result = dest.update_rows src key_columns=["X"] result.column_names . should_equal ["X", "Y"] @@ -778,8 +778,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ rows1.should_contain_the_same_elements_as expected_rows group_builder.specify "should error if new rows clash with existing ones and mode is Insert, target table should remain unchanged" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["X", [1, 5, 6]], ["Y", ['d', 'e', 'f']]] + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] connection=data.connection + src = source_table_builder [["X", [1, 5, 6]], ["Y", ['d', 'e', 'f']]] connection=data.connection # This is checked in dry-run mode but only for the first 1000 rows. run_with_and_without_output <| @@ -787,19 +787,19 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ r1.should_fail_with Rows_Already_Present group_builder.specify "should use the target table primary key for the key by default" <| - dest1 = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [4, 5, 6]]] primary_key=["Y", "Z"] + dest1 = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [4, 5, 6]]] primary_key=["Y", "Z"] connection=data.connection default_key_columns dest1 . should_equal ["Y", "Z"] - dest2 = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["Y"] - src = source_table_builder [["X", [4, 5]], ["Y", ['b', 'e']]] + dest2 = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["Y"] connection=data.connection + src = source_table_builder [["X", [4, 5]], ["Y", ['b', 'e']]] connection=data.connection # Not specifying `key_columns`, rely on `default_key_columns` inferring Y as default based on the primary key. r1 = dest2.update_rows src rows = r1.rows.to_vector.map .to_vector rows.should_contain_the_same_elements_as [[1, 'a'], [4, 'b'], [3, 'c'], [5, 'e']] group_builder.specify "should be able to Update existing rows in a table" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - src = source_table_builder [["X", [2]], ["Y", ['ZZZ']]] + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] connection=data.connection + src = source_table_builder [["X", [2]], ["Y", ['ZZZ']]] connection=data.connection r1 = dest.update_rows src update_action=Update_Action.Update key_columns=["X"] r1.column_names . should_equal ["X", "Y"] @@ -809,8 +809,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ rows.should_contain_the_same_elements_as [[1, 'a'], [2, 'ZZZ'], [3, 'c']] group_builder.specify "should fail on unmatched rows in Update mode" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - src = source_table_builder [["X", [2, 100]], ["Y", ['d', 'e']]] + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] connection=data.connection + src = source_table_builder [["X", [2, 100]], ["Y", ['d', 'e']]] connection=data.connection # In dry run mode this will only check first 1000 rows. run_with_and_without_output <| @@ -822,8 +822,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ rows.should_contain_the_same_elements_as [[1, 'a'], [2, 'b'], [3, 'c']] group_builder.specify "should upsert by default (update existing rows, insert new rows)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["X", [2, 100]], ["Y", ['D', 'E']]] + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] connection=data.connection + src = source_table_builder [["X", [2, 100]], ["Y", ['D', 'E']]] connection=data.connection r1 = dest.update_rows src key_columns=["X"] Problems.assume_no_problems r1 r1.column_names . should_equal ["X", "Y"] @@ -836,8 +836,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ rows2.should_contain_the_same_elements_as expected_rows group_builder.specify "should allow to align an existing table with a source (upsert + delete rows missing from source)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["X", [2, 100]], ["Y", ['D', 'E']]] + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] connection=data.connection + src = source_table_builder [["X", [2, 100]], ["Y", ['D', 'E']]] connection=data.connection r1 = dest.update_rows src update_action=Update_Action.Align_Records key_columns=["X"] Problems.assume_no_problems r1 r1.column_names . should_equal ["X", "Y"] @@ -848,8 +848,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ rows2.should_contain_the_same_elements_as expected_rows group_builder.specify "should match columns by name, reordering to destination order if needed (Insert)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [4, 5, 6]]] + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] connection=data.connection + src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [4, 5, 6]]] connection=data.connection result = dest.update_rows src update_action=Update_Action.Insert key_columns=["X"] result.column_names . should_equal ["X", "Y"] src.column_names . should_equal ["Y", "X"] @@ -858,8 +858,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ rows1.should_contain_the_same_elements_as expected_rows group_builder.specify "should match columns by name, reordering to destination order if needed (Upsert)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [1, 5, 6]]] + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] connection=data.connection + src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [1, 5, 6]]] connection=data.connection result = dest.update_rows src key_columns=["X"] result.column_names . should_equal ["X", "Y"] src.column_names . should_equal ["Y", "X"] @@ -868,8 +868,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ rows1.should_contain_the_same_elements_as expected_rows group_builder.specify "should match columns by name, reordering to destination order if needed (Update)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [3, 2, 1]]] + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] connection=data.connection + src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [3, 2, 1]]] connection=data.connection result = dest.update_rows src update_action=Update_Action.Update key_columns=["X"] result.column_names . should_equal ["X", "Y"] src.column_names . should_equal ["Y", "X"] @@ -878,8 +878,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ rows1.should_contain_the_same_elements_as expected_rows group_builder.specify "should match columns by name, reordering to destination order if needed (Align)" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [2, 1, 6]]] + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] connection=data.connection + src = source_table_builder [["Y", ['d', 'e', 'f']], ["X", [2, 1, 6]]] connection=data.connection result = dest.update_rows src update_action=Update_Action.Align_Records key_columns=["X"] result.column_names . should_equal ["X", "Y"] src.column_names . should_equal ["Y", "X"] @@ -888,9 +888,9 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ rows1.should_contain_the_same_elements_as expected_rows group_builder.specify "should allow to use a transformed table, with computed fields, as a source" <| - dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] - t1 = source_table_builder [["Z", [10, 20]], ["Y", ['D', 'E']]] - t2 = source_table_builder [["Z", [20, 10]], ["X", [-99, 10]]] + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] connection=data.connection + t1 = source_table_builder [["Z", [10, 20]], ["Y", ['D', 'E']]] connection=data.connection + t2 = source_table_builder [["Z", [20, 10]], ["X", [-99, 10]]] connection=data.connection src = t1.join t2 on=["Z"] join_kind=Join_Kind.Inner . remove_columns "Z" . set "[X] + 100" "X" src.at "X" . to_vector . should_contain_the_same_elements_as [1, 110] @@ -902,8 +902,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ rows1.should_contain_the_same_elements_as expected_rows group_builder.specify "should allow specifying no key in Insert mode" <| - dest = target_table_builder [["X", [1, 10, 100]]] - src = source_table_builder [["X", [1, 2, 3]]] + dest = target_table_builder [["X", [1, 10, 100]]] connection=data.connection + src = source_table_builder [["X", [1, 2, 3]]] connection=data.connection result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] expected = [1, 10, 100, 1, 2, 3] @@ -920,8 +920,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ r3.at "X" . to_vector . should_contain_the_same_elements_as expected group_builder.specify "should fail if no key is specified in other modes" <| - dest = target_table_builder [["X", [1, 10, 100]]] - src = source_table_builder [["X", [1, 2, 3]]] + dest = target_table_builder [["X", [1, 10, 100]]] connection=data.connection + src = source_table_builder [["X", [1, 2, 3]]] connection=data.connection run_with_and_without_output <| r1 = dest.update_rows src update_action=Update_Action.Update key_columns=[] @@ -943,9 +943,9 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ r5.should_fail_with Illegal_Argument group_builder.specify "should fail if the key is not unique in the input table" <| - d1 = target_table_builder [["X", [0, 10, 100]]] primary_key=["X"] - d2 = target_table_builder [["X", [0, 10, 100]]] - src = source_table_builder [["X", [1, 1, 3]]] + d1 = target_table_builder [["X", [0, 10, 100]]] primary_key=["X"] connection=data.connection + d2 = target_table_builder [["X", [0, 10, 100]]] connection=data.connection + src = source_table_builder [["X", [1, 1, 3]]] connection=data.connection # Only checks 1000 rows in dry run mode. run_with_and_without_output <| @@ -957,8 +957,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ r2.should_fail_with Non_Unique_Key group_builder.specify "will fail if source table contains null keys, unless only Inserting" <| - t1 = target_table_builder [["X", [0, 10, 100]], ["Y", ["a", "b", "c"]]] primary_key=[] - s1 = source_table_builder [["X", [10, Nothing]], ["Y", ["x", "y"]]] + t1 = target_table_builder [["X", [0, 10, 100]], ["Y", ["a", "b", "c"]]] primary_key=[] connection=data.connection + s1 = source_table_builder [["X", [10, Nothing]], ["Y", ["x", "y"]]] connection=data.connection run_with_and_without_output <| r1 = t1.update_rows s1 key_columns=["X"] update_action=Update_Action.Update_Or_Insert r1.should_fail_with Null_Values_In_Key_Columns @@ -970,15 +970,15 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ m2.at "X" . to_vector . should_equal [0, 10, 100, 10, Nothing] group_builder.specify "should fail if the key causes update of multiple values (it's not unique in the target table)" <| - dest = target_table_builder [["X", [1, 1, 2]], ["Y", ['a', 'b', 'c']]] - src = source_table_builder [["X", [1, 2, 3]], ["Y", ['d', 'e', 'f']]] + dest = target_table_builder [["X", [1, 1, 2]], ["Y", ['a', 'b', 'c']]] connection=data.connection + src = source_table_builder [["X", [1, 2, 3]], ["Y", ['d', 'e', 'f']]] connection=data.connection run_with_and_without_output <| r1 = dest.update_rows src key_columns=["X"] r1.should_fail_with Multiple_Target_Rows_Matched_For_Update r1.catch.to_display_text . should_contain "key [1] matched 2 rows" - src2 = source_table_builder [["X", [1]], ["Y", ['d']]] + src2 = source_table_builder [["X", [1]], ["Y", ['d']]] connection=data.connection run_with_and_without_output <| r2 = dest.update_rows src2 key_columns=["X"] update_action=Update_Action.Update r2.should_fail_with Multiple_Target_Rows_Matched_For_Update @@ -992,13 +992,13 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ ## BUT the check should not throw an error if the duplicated key is on an unaffected row! (here key 1 is duplicated, but we are NOT updating it) - src3 = source_table_builder [["X", [2]], ["Y", ['f']]] + src3 = source_table_builder [["X", [2]], ["Y", ['f']]] connection=data.connection Problems.assume_no_problems <| dest.update_rows src3 key_columns=["X"] group_builder.specify "should fail if the source table contains columns not present in the target (data loss)" <| - dest = target_table_builder [["X", [0, 10, 100]]] primary_key=["X"] - src = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] + dest = target_table_builder [["X", [0, 10, 100]]] primary_key=["X"] connection=data.connection + src = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] connection=data.connection run_with_and_without_output <| r1 = dest.update_rows src key_columns=["X"] r1.should_fail_with Unmatched_Columns @@ -1009,7 +1009,7 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ group_builder.specify "should use defaults when inserting" <| dest_name = Name_Generator.random_name "table-defaults" dest = data.connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "X" Value_Type.Integer] temporary=True primary_key=[] . should_succeed - src = source_table_builder [["X", [1, 2, 3]]] + src = source_table_builder [["X", [1, 2, 3]]] connection=data.connection r1 = dest.update_rows src key_columns=[] update_action=Update_Action.Insert Problems.assume_no_problems r1 r1.column_names . should_equal ["Y", "X"] @@ -1024,7 +1024,7 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ Problems.assume_no_problems <| dest.update_rows (Table.from_rows ["X", "Y", "Z"] [[1, 1000, 10]]) key_columns=[] update_action=Update_Action.Insert - src = source_table_builder [["X", [1, 2, 3]], ["Z", [100, 200, 300]]] + src = source_table_builder [["X", [1, 2, 3]], ["Z", [100, 200, 300]]] connection=data.connection r1 = dest.update_rows src key_columns=["X"] update_action=Update_Action.Update_Or_Insert Problems.assume_no_problems r1 r1.column_names . should_equal ["Y", "X", "Z"] @@ -1038,7 +1038,7 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ dest = data.connection.create_table dest_name [Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Default_Expression "42"], Column_Description.Value "Z" Value_Type.Integer] temporary=True . should_succeed initial_data = Table.new [["X", [10, 20]], ["Y", [100, 200]], ["Z", [1000, 2000]]] dest.update_rows initial_data key_columns=[] update_action=Update_Action.Insert . should_succeed - src = source_table_builder [["X", [10, 2, 3]], ["Z", [-1, -2, -3]]] + src = source_table_builder [["X", [10, 2, 3]], ["Z", [-1, -2, -3]]] connection=data.connection r1 = dest.update_rows src update_action=Update_Action.Align_Records key_columns=["X"] Problems.assume_no_problems r1 r1.column_names . should_equal ["X", "Y", "Z"] @@ -1051,25 +1051,25 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ group_builder.specify "should fail if the source table is missing some columns and the column in the target has no default value" <| dest_name = Name_Generator.random_name "table-notnull" dest = data.connection.create_table dest_name [Column_Description.Value "Y" Value_Type.Integer [Column_Constraint.Not_Null], Column_Description.Value "X" Value_Type.Integer] temporary=True primary_key=[] . should_succeed - src = source_table_builder [["X", [1, 2, 3]]] + src = source_table_builder [["X", [1, 2, 3]]] connection=data.connection r1 = dest.update_rows src key_columns=[] update_action=Update_Action.Insert # We may want a more specific error for missing columns without defaults, but for now it's just a SQL error. r1.should_fail_with SQL_Error data.connection.drop_table dest_name group_builder.specify "should fail if the source table is missing some columns, if asked to" <| - dest = target_table_builder [["X", [0, 10, 100]], ["Y", ['a', 'b', 'c']]] - src = source_table_builder [["X", [1, 2, 3]]] + dest = target_table_builder [["X", [0, 10, 100]], ["Y", ['a', 'b', 'c']]] connection=data.connection + src = source_table_builder [["X", [1, 2, 3]]] connection=data.connection run_with_and_without_output <| r1 = dest.update_rows src error_on_missing_columns=True update_action=Update_Action.Insert key_columns=[] r1.should_fail_with Missing_Input_Columns r1.catch.criteria . should_equal ["Y"] group_builder.specify "should fail if some of key_columns do not exist in either table" <| - d1 = target_table_builder [["X", [0, 10, 100]]] - d2 = target_table_builder [["X", [0, 10, 100]], ["Y", ['a', 'b', 'c']]] - s1 = source_table_builder [["X", [1, 3]]] - s2 = source_table_builder [["X", [1, 3]], ["Y", ['e', 'f']]] + d1 = target_table_builder [["X", [0, 10, 100]]] connection=data.connection + d2 = target_table_builder [["X", [0, 10, 100]], ["Y", ['a', 'b', 'c']]] connection=data.connection + s1 = source_table_builder [["X", [1, 3]]] connection=data.connection + s2 = source_table_builder [["X", [1, 3]], ["Y", ['e', 'f']]] connection=data.connection run_with_and_without_output <| r1 = d1.update_rows s1 key_columns=["Y"] @@ -1084,7 +1084,7 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ ((r3.catch.is_a Missing_Input_Columns) || (r3.catch.is_a Unmatched_Columns)).should_be_true group_builder.specify "should fail if the target table does not exist" <| - t = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] + t = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] connection=data.connection nonexistent_name = Name_Generator.random_name "nonexistent-table" nonexistent_ref = data.connection.create_table nonexistent_name t # Dropping the table to make it not exist. @@ -1099,7 +1099,7 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ r2.should_fail_with Table_Not_Found group_builder.specify "should fail if the target table is in-memory" <| - t = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] + t = source_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] connection=data.connection in_memory_table = Table.new [["X", [0]], ["Y", ['_']]] run_with_and_without_output <| r1 = in_memory_table.update_rows t key_columns=[] @@ -1109,8 +1109,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ r2.should_fail_with Illegal_Argument group_builder.specify "should warn if type widening occurs" <| - dest = target_table_builder [["X", [3.25, 4.25, 10.0]]] - src = source_table_builder [["X", [1, 2, 0]]] + dest = target_table_builder [["X", [3.25, 4.25, 10.0]]] connection=data.connection + src = source_table_builder [["X", [1, 2, 0]]] connection=data.connection # Warning should be present in dry-run mode too! Context.Output.with_disabled <| @@ -1129,8 +1129,8 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ result.at "X" . to_vector . should_contain_the_same_elements_as [3.25, 4.25, 10.0, 1, 2, 0] group_builder.specify "should fail if types of columns are not compatible" <| - dest = target_table_builder [["X", ["a", "B", "c"]]] - src = source_table_builder [["X", [1, 2, 3]]] + dest = target_table_builder [["X", ["a", "B", "c"]]] connection=data.connection + src = source_table_builder [["X", [1, 2, 3]]] connection=data.connection run_with_and_without_output <| result = dest.update_rows src update_action=Update_Action.Insert key_columns=[] @@ -1141,7 +1141,7 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ err.got_type.is_numeric . should_be_true group_builder.specify "fails if the target type is more restrictive than source" <| - src = source_table_builder [["X", [1, 2, 3]], ["Y", ["a", "xyz", "abcdefghijkl"]], ["Z", ["a", "pqrst", "abcdefghijkl"]]] + src = source_table_builder [["X", [1, 2, 3]], ["Y", ["a", "xyz", "abcdefghijkl"]], ["Z", ["a", "pqrst", "abcdefghijkl"]]] connection=data.connection dest_name = Name_Generator.random_name "dest-table-test-types" structure = x = Column_Description.Value "X" (Value_Type.Integer Bits.Bits_16) @@ -1162,7 +1162,7 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ dest_name = Name_Generator.random_name "dest-table" # We will make the upload fail by violating the NOT NULL constraint. dest = data.connection.create_table dest_name [Column_Description.Value "X" Value_Type.Integer [Column_Constraint.Not_Null]] temporary=True primary_key=[] . should_succeed - src = source_table_builder [["X", [1, Nothing, 3]]] + src = source_table_builder [["X", [1, Nothing, 3]]] connection=data.connection existing_tables = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector res = dest.update_rows src update_action=Update_Action.Insert key_columns=[] @@ -1185,16 +1185,16 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ test_table_delete group_builder (data : Data) source_table_builder target_table_builder = group_builder.specify "should remove rows matching by key_columns" <| - table = target_table_builder [["student_id", [1, 2, 44, 100, 120]], ["first_name", ["Alice", "Bob", "Charlie", "David", "Eve"]], ["score", [100, 100, 44, 100, 120]]] primary_key=["student_id"] - key_values_to_delete = source_table_builder [["student_id", [44, 100]]] + table = target_table_builder [["student_id", [1, 2, 44, 100, 120]], ["first_name", ["Alice", "Bob", "Charlie", "David", "Eve"]], ["score", [100, 100, 44, 100, 120]]] primary_key=["student_id"] connection=data.connection + key_values_to_delete = source_table_builder [["student_id", [44, 100]]] connection=data.connection # key columns should automatically be discovered by the primary key affected_rows = table.delete_rows key_values_to_delete affected_rows . should_equal 2 table.rows.map .to_vector . should_equal [[1, "Alice", 100], [2, "Bob", 100], [120, "Eve", 120]] group_builder.specify "will require key_columns if no default can be used as no primary key is set" <| - table = target_table_builder [["X", [1, 2, 3]]] primary_key=[] - key_values_to_delete = source_table_builder [["X", [1, 2]]] + table = target_table_builder [["X", [1, 2, 3]]] primary_key=[] connection=data.connection + key_values_to_delete = source_table_builder [["X", [1, 2]]] connection=data.connection run_with_and_without_output <| r1 = table.delete_rows key_values_to_delete @@ -1207,29 +1207,29 @@ test_table_delete group_builder (data : Data) source_table_builder target_table_ table.at "X" . to_vector . should_equal [3] group_builder.specify "does not fail if no rows matching the key_values_to_delete are found" <| - table = target_table_builder [["X", [1, 2, 3]]] primary_key=[] - key_values_to_delete = source_table_builder [["X", [4, 5]]] + table = target_table_builder [["X", [1, 2, 3]]] primary_key=[] connection=data.connection + key_values_to_delete = source_table_builder [["X", [4, 5]]] connection=data.connection r1 = table.delete_rows key_values_to_delete key_columns=["X"] # Error: Unresolved method `delete_rows` r1.should_equal 0 Problems.assume_no_problems r1 table.at "X" . to_vector . should_equal [1, 2, 3] - key_values_2 = source_table_builder [["X", [4, 3, 5]]] + key_values_2 = source_table_builder [["X", [4, 3, 5]]] connection=data.connection r2 = table.delete_rows key_values_2 key_columns=["X"] r2.should_equal 1 Problems.assume_no_problems r2 table.at "X" . to_vector . should_equal [1, 2] group_builder.specify "should allow to use multiple columns as key" <| - table = target_table_builder [["X", [1, 2, 2, 3, 4, 4]], ["Y", ['a', 'b', 'c', 'd', 'e', 'f']]] primary_key=[] - keys = source_table_builder [["X", [2, 4]], ["Y", ['b', 'f']]] + table = target_table_builder [["X", [1, 2, 2, 3, 4, 4]], ["Y", ['a', 'b', 'c', 'd', 'e', 'f']]] primary_key=[] connection=data.connection + keys = source_table_builder [["X", [2, 4]], ["Y", ['b', 'f']]] connection=data.connection affected_rows = table.delete_rows keys key_columns=["X", "Y"] affected_rows . should_equal 2 table.rows.map .to_vector . should_equal [[1, "a"], [2, "c"], [3, "d"], [4, "e"]] group_builder.specify "should fail if key_columns are missing in source or target tables" <| - table = target_table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] primary_key=[] - keys = source_table_builder [["Z", [7, 8]]] + table = target_table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] primary_key=[] connection=data.connection + keys = source_table_builder [["Z", [7, 8]]] connection=data.connection run_with_and_without_output <| r1 = table.delete_rows keys key_columns=["Y"] @@ -1247,14 +1247,14 @@ test_table_delete group_builder (data : Data) source_table_builder target_table_ r3.catch.criteria . should_equal ["neither"] group_builder.specify "should fail if empty key_columns were provided" <| - table = target_table_builder [["X", [1, 2, 3]]] primary_key=["X"] - keys = source_table_builder [["X", [1, 2]]] + table = target_table_builder [["X", [1, 2, 3]]] primary_key=["X"] connection=data.connection + keys = source_table_builder [["X", [1, 2]]] connection=data.connection r1 = table.delete_rows keys key_columns=[] r1.should_fail_with Illegal_Argument group_builder.specify "should fail if multiple rows match a single key_values_to_delete row, unless allow_duplicate_matches is set to True" <| - table = target_table_builder [["X", [1, 2, 2, 3, 2]], ["Y", ['a', 'b', 'c', 'd', 'e']]] primary_key=[] - keys = source_table_builder [["X", [2]], ["Y", ['b']]] + table = target_table_builder [["X", [1, 2, 2, 3, 2]], ["Y", ['a', 'b', 'c', 'd', 'e']]] primary_key=[] connection=data.connection + keys = source_table_builder [["X", [2]], ["Y", ['b']]] connection=data.connection run_with_and_without_output <| r1 = table.delete_rows keys key_columns=["X"] @@ -1269,8 +1269,8 @@ test_table_delete group_builder (data : Data) source_table_builder target_table_ table.rows.map .to_vector . should_equal [[1, "a"], [3, "d"]] group_builder.specify "should fail if the target table does not exist" <| - table = target_table_builder [["X", [1, 2, 3]]] - keys = source_table_builder [["X", [1, 2]]] + table = target_table_builder [["X", [1, 2, 3]]] connection=data.connection + keys = source_table_builder [["X", [1, 2]]] connection=data.connection data.connection.drop_table table.name run_with_and_without_output <| @@ -1278,8 +1278,8 @@ test_table_delete group_builder (data : Data) source_table_builder target_table_ table.delete_rows keys . should_fail_with Table_Not_Found group_builder.specify "will warn if not all input rows were checked as part of a dry run" <| - target = target_table_builder [["X", [0, 1, 500, 1500, 3500]]] primary_key=["X"] - source = source_table_builder [["X", (1.up_to 2000).to_vector]] primary_key=["X"] + target = target_table_builder [["X", [0, 1, 500, 1500, 3500]]] primary_key=["X"] connection=data.connection + source = source_table_builder [["X", (1.up_to 2000).to_vector]] primary_key=["X"] connection=data.connection Context.Output.with_disabled <| r1 = target.delete_rows source @@ -1300,16 +1300,16 @@ test_table_delete group_builder (data : Data) source_table_builder target_table_ target.at "X" . to_vector . should_equal [0, 3500] group_builder.specify "will work fine if the target table contains NULL keys" <| - t1 = target_table_builder [["X", ["a", "b", Nothing, "c"]], ["Y", [1, 2, 3, Nothing]]] - s1 = source_table_builder [["X", ["b", "c"]]] + t1 = target_table_builder [["X", ["a", "b", Nothing, "c"]], ["Y", [1, 2, 3, Nothing]]] connection=data.connection + s1 = source_table_builder [["X", ["b", "c"]]] connection=data.connection t1.delete_rows s1 key_columns=["X"] . should_equal 2 m1 = t1.read . order_by "X" m1.at "X" . to_vector . should_equal [Nothing, "a"] m1.at "Y" . to_vector . should_equal [3, 1] group_builder.specify "will raise an error if they source table contains NULL keys" <| - t2 = target_table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, Nothing]]] - s2 = source_table_builder [["X", ["b", Nothing]], ["f", [10, 20]]] + t2 = target_table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, Nothing]]] connection=data.connection + s2 = source_table_builder [["X", ["b", Nothing]], ["f", [10, 20]]] connection=data.connection r1 = t2.delete_rows s2 key_columns=["X"] r1.should_fail_with Null_Values_In_Key_Columns r1.catch.to_display_text . should_contain "Nothing values in key columns" @@ -1319,7 +1319,7 @@ test_table_delete group_builder (data : Data) source_table_builder target_table_ tests group_builder (data : Data) make_new_connection source_table_builder (suffix : Text) persistent_connector = group_builder.specify "should return a temporary table with a sample of the data for select_into_database_table"+suffix <| Context.Output.with_disabled <| - src1 = source_table_builder [["X", [1, 2, 3]]] + src1 = source_table_builder [["X", [1, 2, 3]]] connection=data.connection name = (Name_Generator.random_name "table-foo2") r1 = src1.select_into_database_table data.connection name Problems.expect_only_warning Dry_Run_Operation r1 @@ -1332,7 +1332,7 @@ tests group_builder (data : Data) make_new_connection source_table_builder (suff # But a big one will be sampled. n = 2000 - src2 = source_table_builder [["X", (0.up_to n).to_vector]] + src2 = source_table_builder [["X", (0.up_to n).to_vector]] connection=data.connection # We re-use the name - multiple dry-runs for the same table name should be allowed without issues. r2 = src2.select_into_database_table data.connection name Problems.expect_only_warning Dry_Run_Operation r2 @@ -1345,7 +1345,7 @@ tests group_builder (data : Data) make_new_connection source_table_builder (suff dest_data = Table.new [["X", [1, 2, 3]]] dest = dest_data.select_into_database_table data.connection (Name_Generator.random_name "target-table") temporary=True primary_key=[] Context.Output.with_disabled <| - src = source_table_builder [["X", [4, 5, 6]]] + src = source_table_builder [["X", [4, 5, 6]]] connection=data.connection r1 = dest.update_rows src update_action=Update_Action.Insert key_columns=[] Problems.expect_only_warning Dry_Run_Operation r1 r1.column_names . should_equal ["X"] @@ -1360,7 +1360,7 @@ tests group_builder (data : Data) make_new_connection source_table_builder (suff dest_data = Table.new [["X", v]] dest = dest_data.select_into_database_table data.connection (Name_Generator.random_name "table-delete-rows-dry") temporary=True primary_key=[] Context.Output.with_disabled <| - src = source_table_builder [["X", [2, 3]]] + src = source_table_builder [["X", [2, 3]]] connection=data.connection r1 = dest.delete_rows src key_columns=["X"] # 2 rows would be deleted r1.should_equal 2 @@ -1369,7 +1369,7 @@ tests group_builder (data : Data) make_new_connection source_table_builder (suff # The target table is unaffected. dest.at "X" . to_vector . should_equal v - src2 = source_table_builder [["X", [4]]] + src2 = source_table_builder [["X", [4]]] connection=data.connection r2 = dest.delete_rows src2 key_columns=["X"] allow_duplicate_matches=True # 3 rows would be deleted r2.should_equal 3 From 30760787be64ca612455ce6ae9329e07229a8390 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 18:04:28 +0100 Subject: [PATCH 49/93] Fix setup in Union_Spec --- .../Join/Union_Spec.enso | 185 ++++++++++-------- 1 file changed, 102 insertions(+), 83 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso index 8b50e3300065..6c74ef8f6014 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso @@ -18,14 +18,25 @@ main = run_default_backend add_specs type My_Type Value x y +type Data + Value ~connection + + setup create_connection_fn = + connection = create_connection_fn Nothing + Data.Value connection + + add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func suite_builder.group prefix+"Table.union" group_builder-> + data = Data.setup create_connection_fn + group_builder.specify "should merge columns from multiple tables" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, True]]] - t2 = table_builder [["A", [4, 5, 6]], ["B", ["d", "e", "f"]], ["C", [False, True, False]]] - t3 = table_builder [["A", [7, 8, 9]], ["B", ["g", "h", "i"]], ["C", [True, False, False]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, True]]] connection=data.connection + t2 = table_builder [["A", [4, 5, 6]], ["B", ["d", "e", "f"]], ["C", [False, True, False]]] connection=data.connection + t3 = table_builder [["A", [7, 8, 9]], ["B", ["g", "h", "i"]], ["C", [True, False, False]]] connection=data.connection t4 = t1.union t2 expect_column_names ["A", "B", "C"] t4 @@ -40,9 +51,9 @@ add_specs suite_builder setup = t5.at "C" . to_vector . should_equal [True, False, False, True, False, True, False, True, False] group_builder.specify "should fill unmatched columns (by name matching) with nulls and report a warning by default" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] - t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] - t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection + t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] connection=data.connection + t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] connection=data.connection action = t1.union [t2, t3] on_problems=_ tester table = @@ -64,9 +75,9 @@ add_specs suite_builder setup = Problems.test_problem_handling action2 problems2 tester2 group_builder.specify "should drop unmatched columns if asked to" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] - t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] - t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection + t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] connection=data.connection + t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] connection=data.connection t4 = t1.union [t2, t3] keep_unmatched_columns=False on_problems=Problem_Behavior.Report_Error Problems.assume_no_problems t4 @@ -74,9 +85,9 @@ add_specs suite_builder setup = t4.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6, Nothing, Nothing, 0] group_builder.specify "should keep unmatched columns without errors if asked to" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] - t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] - t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection + t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] connection=data.connection + t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] connection=data.connection t4 = t1.union [t2, t3] keep_unmatched_columns=True on_problems=Problem_Behavior.Report_Error Problems.assume_no_problems t4 @@ -86,17 +97,17 @@ add_specs suite_builder setup = t4.at "C" . to_vector . should_equal [Nothing, Nothing, Nothing, "d", "e", "f", "g", "h", "i"] group_builder.specify "should fail if asked to drop unmatched columns but the set of common columns is empty" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] - t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] - t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection + t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] connection=data.connection + t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] connection=data.connection t4 = t1.union [t2, t3] keep_unmatched_columns=False on_problems=Problem_Behavior.Ignore t4.should_fail_with No_Output_Columns t4.catch.to_display_text . should_equal "No columns in the result, because of another problem: Unmatched columns are set to be dropped, but no common column names were found." group_builder.specify "should ignore column names when matching by position" <| - t1 = table_builder [["A", [1, 2, 3]], ["Y", ["a", "b", "c"]]] - t2 = table_builder [["X", [4, 5, 6]], ["A", ["d", "e", "f"]]] + t1 = table_builder [["A", [1, 2, 3]], ["Y", ["a", "b", "c"]]] connection=data.connection + t2 = table_builder [["X", [4, 5, 6]], ["A", ["d", "e", "f"]]] connection=data.connection t3 = t1.union t2 match_columns=Match_Columns.By_Position expect_column_names ["A", "Y"] t3 @@ -104,9 +115,9 @@ add_specs suite_builder setup = t3.at "Y" . to_vector . should_equal ["a", "b", "c", "d", "e", "f"] group_builder.specify "should fill extra columns (positional matching) with nulls and report a warning by default" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] - t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] - t3 = table_builder [["A2", [10, 20, 30]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection + t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] connection=data.connection + t3 = table_builder [["A2", [10, 20, 30]]] connection=data.connection action = t1.union [t2, t3] match_columns=Match_Columns.By_Position on_problems=_ tester table = @@ -118,9 +129,9 @@ add_specs suite_builder setup = Problems.test_problem_handling action problems tester group_builder.specify "should keep the least number of columns with positional matching if asked to drop unmatched ones" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] - t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] - t3 = table_builder [["A2", [10, 20, 30]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection + t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] connection=data.connection + t3 = table_builder [["A2", [10, 20, 30]]] connection=data.connection t4 = t1.union [t2, t3] keep_unmatched_columns=False match_columns=Match_Columns.By_Position on_problems=Problem_Behavior.Report_Error Problems.assume_no_problems t4 @@ -128,9 +139,9 @@ add_specs suite_builder setup = t4.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6, 10, 20, 30] group_builder.specify "should keep the greatest number of columns with positional matching if asked to keep unmatched ones, filling missing values with null and reporting no problems" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] - t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] - t3 = table_builder [["A2", [10, 20, 30]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection + t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] connection=data.connection + t3 = table_builder [["A2", [10, 20, 30]]] connection=data.connection t4 = t1.union [t2, t3] match_columns=Match_Columns.By_Position keep_unmatched_columns=True on_problems=Problem_Behavior.Ignore Problems.assume_no_problems t4 @@ -140,8 +151,8 @@ add_specs suite_builder setup = t4.at "C" . to_vector . should_equal [Nothing, Nothing, Nothing, 7, 8, 9, Nothing, Nothing, Nothing] group_builder.specify "should use column names from the first table that has enough columns in positional matching mode" <| - t1 = table_builder [["A", [1, 2, 3]]] - t2 = table_builder [["X", [4, 5, 6]], ["A", ["a", "b", "c"]]] + t1 = table_builder [["A", [1, 2, 3]]] connection=data.connection + t2 = table_builder [["X", [4, 5, 6]], ["A", ["a", "b", "c"]]] connection=data.connection check table = expect_column_names ["X", "A"] table @@ -157,29 +168,29 @@ add_specs suite_builder setup = within_table t4 <| check t4 - t5 = table_builder [["Y", [7, 8, 9]], ["A", ["d", "e", "f"]], ["Z", [10, 11, 12]]] - t6 = table_builder [["W", [0]]] - t7 = table_builder [["X", [7, 8, 9]], ["Y", ["d", "e", "f"]], ["Z", [10, 11, 12]]] + t5 = table_builder [["Y", [7, 8, 9]], ["A", ["d", "e", "f"]], ["Z", [10, 11, 12]]] connection=data.connection + t6 = table_builder [["W", [0]]] connection=data.connection + t7 = table_builder [["X", [7, 8, 9]], ["Y", ["d", "e", "f"]], ["Z", [10, 11, 12]]] connection=data.connection t8 = t1.union [t2, t5, t6, t7] match_columns=Match_Columns.By_Position expect_column_names ["Y", "A", "Z"] t8 group_builder.specify "should allow to merge a table with itself" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection t2 = t1.union [t1, t1] expect_column_names ["A", "B"] t2 t2.at "A" . to_vector . should_equal [1, 2, 3, 1, 2, 3, 1, 2, 3] t2.at "B" . to_vector . should_equal ["a", "b", "c", "a", "b", "c", "a", "b", "c"] group_builder.specify "should not de-duplicate rows" <| - t1 = table_builder [["A", [1, 1, 3]], ["B", ["a", "a", "c"]]] - t2 = table_builder [["A", [1, 2, 2]], ["B", ["a", "b", "b"]]] + t1 = table_builder [["A", [1, 1, 3]], ["B", ["a", "a", "c"]]] connection=data.connection + t2 = table_builder [["A", [1, 2, 2]], ["B", ["a", "b", "b"]]] connection=data.connection t3 = t1.union t2 expect_column_names ["A", "B"] t3 t3.at "A" . to_vector . should_equal [1, 1, 3, 1, 2, 2] t3.at "B" . to_vector . should_equal ["a", "a", "c", "a", "b", "b"] group_builder.specify "should gracefully handle the case where no tables to union were provided" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection check_same table = expect_column_names ["A", "B"] table @@ -196,8 +207,8 @@ add_specs suite_builder setup = check_same <| t1.union [] match_columns=Match_Columns.By_Position keep_unmatched_columns=True group_builder.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <| - t1 = table_builder [["A", ["a", "b", "c"]]] . cast "A" (Value_Type.Char size=1 variable_length=False) - t2 = table_builder [["A", ["xyz", "abc", "def"]]] . cast "A" (Value_Type.Char size=3 variable_length=False) + t1 = (table_builder [["A", ["a", "b", "c"]]] connection=data.connection) . cast "A" (Value_Type.Char size=1 variable_length=False) + t2 = (table_builder [["A", ["xyz", "abc", "def"]]] connection=data.connection) . cast "A" (Value_Type.Char size=3 variable_length=False) t1.at "A" . value_type . should_equal (Value_Type.Char size=1 variable_length=False) t2.at "A" . value_type . should_equal (Value_Type.Char size=3 variable_length=False) @@ -210,8 +221,8 @@ add_specs suite_builder setup = t3.at "A" . value_type . variable_length . should_be_true group_builder.specify "should find a common type that will fit the merged columns" <| - t1 = table_builder [["A", [0, 1, 2]]] - t2 = table_builder [["A", [1.0, 2.0, 2.5]]] + t1 = table_builder [["A", [0, 1, 2]]] connection=data.connection + t2 = table_builder [["A", [1.0, 2.0, 2.5]]] connection=data.connection t1.at "A" . value_type . is_integer . should_be_true t2.at "A" . value_type . is_floating_point . should_be_true @@ -223,7 +234,7 @@ add_specs suite_builder setup = # Specific type tests that apply to in-memory. Database behaviour is up to implementation. if setup.is_database.not then - t4 = table_builder [["A", [2^100, 2^10, 2]]] + t4 = table_builder [["A", [2^100, 2^10, 2]]] connection=data.connection t4.at "A" . value_type . should_be_a (Value_Type.Decimal ...) t5 = t2.union t4 @@ -241,8 +252,8 @@ add_specs suite_builder setup = group_builder.specify "should resort to Mixed value type only if at least one column is already Mixed" <| ## TODO currently no way to retype a column to Mixed, so we are using a custom object - t1 = table_builder [["A", [1, 2, 3]], ["mixed", ["a", My_Type.Value 1 2, Nothing]]] - t2 = table_builder [["A", [4, 5, 6]], ["mixed", [1, 2, 3]]] + t1 = table_builder [["A", [1, 2, 3]], ["mixed", ["a", My_Type.Value 1 2, Nothing]]] connection=data.connection + t2 = table_builder [["A", [4, 5, 6]], ["mixed", [1, 2, 3]]] connection=data.connection t1.at "mixed" . value_type . should_equal Value_Type.Mixed t2.at "mixed" . value_type . should_equal Value_Type.Integer @@ -252,8 +263,8 @@ add_specs suite_builder setup = t3.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6] t3.at "mixed" . to_vector . should_equal ["a", My_Type.Value 1 2, Nothing, 1, 2, 3] - t4 = table_builder [["A", [1, 3]], ["mixed", [True, False]]] - t5 = table_builder [["A", [4, 5]], ["mixed", ["X", "y"]]] + t4 = table_builder [["A", [1, 3]], ["mixed", [True, False]]] connection=data.connection + t5 = table_builder [["A", [4, 5]], ["mixed", ["X", "y"]]] connection=data.connection t4.at "mixed" . value_type . should_equal Value_Type.Boolean t5.at "mixed" . value_type . should_equal Value_Type.Char @@ -265,8 +276,8 @@ add_specs suite_builder setup = t6.at "mixed" . value_type . should_equal Value_Type.Mixed group_builder.specify "if no common type can be found, should report error and drop the problematic column" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, Nothing]]] - t2 = table_builder [["C", ["x", "Y", "Z"]], ["A", [4, 5, 6]], ["B", [1, 2, 3]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, Nothing]]] connection=data.connection + t2 = table_builder [["C", ["x", "Y", "Z"]], ["A", [4, 5, 6]], ["B", [1, 2, 3]]] connection=data.connection r1 = t1.union t2 on_problems=Problem_Behavior.Report_Error r1.should_fail_with No_Common_Type @@ -292,8 +303,8 @@ add_specs suite_builder setup = r4.should_fail_with No_Common_Type group_builder.specify "if type widening is not allowed, should use the type from first table that contained the given column" <| - t1 = table_builder [["A", [1, 2, 3]]] - t2 = table_builder [["A", [4, 5, 6]], ["B", [1.2, 2.2, 3.1]]] + t1 = table_builder [["A", [1, 2, 3]]] connection=data.connection + t2 = table_builder [["A", [4, 5, 6]], ["B", [1.2, 2.2, 3.1]]] connection=data.connection t3 = t1.union t2 allow_type_widening=False keep_unmatched_columns=True within_table t3 <| @@ -306,8 +317,8 @@ add_specs suite_builder setup = t3.at "B" . value_type . is_floating_point . should_be_true group_builder.specify "if type widening is not allowed and types do not match, should report error and drop the problematic column" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", [1, 2, 3]], ["E", [1.1, 2.5, 3.2]]] - t2 = table_builder [["A", [4, 5, 6]], ["B", [1.5, 2.5, 3.5]], ["E", [1, 2, 3]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", [1, 2, 3]], ["E", [1.1, 2.5, 3.2]]] connection=data.connection + t2 = table_builder [["A", [4, 5, 6]], ["B", [1.5, 2.5, 3.5]], ["E", [1, 2, 3]]] connection=data.connection t1.at "B" . value_type . is_integer . should_be_true t1.at "E" . value_type . is_floating_point . should_be_true @@ -332,11 +343,11 @@ add_specs suite_builder setup = # Database backends are not required to support Mixed types. if setup.is_database.not then group_builder.specify "even if type widening is not allowed, if the first column is mixed, it should accept any column to be concatenated to it" <| - t1 = table_builder [["X", ["a", 1, Nothing]]] - t2 = table_builder [["X", [1]]] - t3 = table_builder [["X", [1.2, 2.3, 3.4]]] - t4 = table_builder [["X", ["a", "b"]]] - t5 = table_builder [["X", [True, False]]] + t1 = table_builder [["X", ["a", 1, Nothing]]] connection=data.connection + t2 = table_builder [["X", [1]]] connection=data.connection + t3 = table_builder [["X", [1.2, 2.3, 3.4]]] connection=data.connection + t4 = table_builder [["X", ["a", "b"]]] connection=data.connection + t5 = table_builder [["X", [True, False]]] connection=data.connection t1.at "X" . value_type . should_equal Value_Type.Mixed t2.at "X" . value_type . should_equal Value_Type.Integer @@ -347,9 +358,9 @@ add_specs suite_builder setup = t6.at "X" . to_vector . should_equal ["a", 1, Nothing, 1, 1.2, 2.3, 3.4, "a", "b", True, False] group_builder.specify "when finding a common type for numeric columns to be Float, any precision loss should be reported" <| - t1 = table_builder [["X", [1, (2^62)-1, 3]]] - t2 = table_builder [["X", [1.5, 2.5, 3.5]]] - t3 = table_builder [["X", [(2^100)+1, 2^10, 2]]] + t1 = table_builder [["X", [1, (2^62)-1, 3]]] connection=data.connection + t2 = table_builder [["X", [1.5, 2.5, 3.5]]] connection=data.connection + t3 = table_builder [["X", [(2^100)+1, 2^10, 2]]] connection=data.connection t1.at "X" . value_type . should_equal Value_Type.Integer t2.at "X" . value_type . should_equal Value_Type.Float @@ -365,40 +376,48 @@ add_specs suite_builder setup = w.affected_rows_count . should_equal 2 group_builder.specify "if type mismatches cause all columns to be dropped, fail with No_Output_Columns" <| - t1 = table_builder [["A", [1, 2, 3]]] - t2 = table_builder [["A", ['x']]] + t1 = table_builder [["A", [1, 2, 3]]] connection=data.connection + t2 = table_builder [["A", ['x']]] connection=data.connection e3 = t1.union t2 allow_type_widening=True on_problems=Problem_Behavior.Ignore e3.should_fail_with No_Output_Columns - t4 = table_builder [["A", [1.5]]] + t4 = table_builder [["A", [1.5]]] connection=data.connection e5 = t1.union t4 allow_type_widening=False on_problems=Problem_Behavior.Ignore e5.should_fail_with No_Output_Columns - t1 = table_builder [["X", [0, 1, 2]], ["Y", ['aa', 'bb', 'cc']]] . cast "X" (Value_Type.Integer Bits.Bits_16) . cast "Y" (Value_Type.Char size=2 variable_length=False) - t2 = table_builder [["X", [3, 4, 5]], ["Y", ['x', 'y', 'z']]] . cast "X" (Value_Type.Integer Bits.Bits_32) . cast "Y" (Value_Type.Char size=1 variable_length=False) - supports_complex_types = (t1.is_error || t2.is_error || Problems.get_attached_warnings t1 . not_empty).not - if supports_complex_types then - group_builder.specify "should find a common type (2)" <| - t12 = t1.union t2 - Problems.assume_no_problems t12 - t12.at "X" . value_type . should_equal (Value_Type.Integer Bits.Bits_32) - t12.at "Y" . value_type . should_equal (Value_Type.Char size=2 variable_length=True) - - t12.at "X" . to_vector . should_equal [0, 1, 2, 3, 4, 5] - t12.at "Y" . to_vector . should_equal ['aa', 'bb', 'cc', 'x', 'y', 'z'] - - group_builder.specify "should fail to find a common type if widening is not allowed (2)" <| - r1 = t1.union t2 allow_type_widening=False - r1.should_fail_with No_Output_Columns - r1.catch.cause . should_be_a Column_Type_Mismatch - r1.catch.to_display_text . should_equal "No columns in the result, because of another problem: The column [X] expects type Integer (16 bits) but one of the provided tables had type Integer (32 bits) which is not compatible with it." - - # And this should report Column_Type_Mismatch as the more important error too: - t1.union t2 allow_type_widening=False on_problems=Problem_Behavior.Report_Error . should_fail_with Column_Type_Mismatch + group_builder.specify "should find a common type (2)" <| + t1 = (table_builder [["X", [0, 1, 2]], ["Y", ['aa', 'bb', 'cc']]] connection=data.connection) . cast "X" (Value_Type.Integer Bits.Bits_16) . cast "Y" (Value_Type.Char size=2 variable_length=False) + t2 = (table_builder [["X", [3, 4, 5]], ["Y", ['x', 'y', 'z']]] connection=data.connection) . cast "X" (Value_Type.Integer Bits.Bits_32) . cast "Y" (Value_Type.Char size=1 variable_length=False) + supports_complex_types = (t1.is_error || t2.is_error || Problems.get_attached_warnings t1 . not_empty).not + case supports_complex_types of + False -> Nothing + True -> + t12 = t1.union t2 + Problems.assume_no_problems t12 + t12.at "X" . value_type . should_equal (Value_Type.Integer Bits.Bits_32) + t12.at "Y" . value_type . should_equal (Value_Type.Char size=2 variable_length=True) + + t12.at "X" . to_vector . should_equal [0, 1, 2, 3, 4, 5] + t12.at "Y" . to_vector . should_equal ['aa', 'bb', 'cc', 'x', 'y', 'z'] + + group_builder.specify "should fail to find a common type if widening is not allowed (2)" <| + t1 = (table_builder [["X", [0, 1, 2]], ["Y", ['aa', 'bb', 'cc']]] connection=data.connection) . cast "X" (Value_Type.Integer Bits.Bits_16) . cast "Y" (Value_Type.Char size=2 variable_length=False) + t2 = (table_builder [["X", [3, 4, 5]], ["Y", ['x', 'y', 'z']]] connection=data.connection) . cast "X" (Value_Type.Integer Bits.Bits_32) . cast "Y" (Value_Type.Char size=1 variable_length=False) + supports_complex_types = (t1.is_error || t2.is_error || Problems.get_attached_warnings t1 . not_empty).not + case supports_complex_types of + False -> Nothing + True -> + r1 = t1.union t2 allow_type_widening=False + r1.should_fail_with No_Output_Columns + r1.catch.cause . should_be_a Column_Type_Mismatch + r1.catch.to_display_text . should_equal "No columns in the result, because of another problem: The column [X] expects type Integer (16 bits) but one of the provided tables had type Integer (32 bits) which is not compatible with it." + + # And this should report Column_Type_Mismatch as the more important error too: + t1.union t2 allow_type_widening=False on_problems=Problem_Behavior.Report_Error . should_fail_with Column_Type_Mismatch group_builder.specify "should gracefully handle tables from different backends" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection alternative_connection = Database.connect (SQLite In_Memory) t0 = (Table.new [["A", [1, 2, 4]], ["B", ["10", "20", "30"]]]).select_into_database_table alternative_connection "T0" temporary=True From 25662a4b07eacee8a02f0725dc010cc4f67bc2a3 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 18:04:45 +0100 Subject: [PATCH 50/93] Fix setup in Lookup_Spec --- .../Join/Lookup_Spec.enso | 119 ++++++++++-------- 1 file changed, 65 insertions(+), 54 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso index d05d56b0f706..5c62579533c0 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso @@ -14,14 +14,25 @@ import project.Util main = run_default_backend add_specs +type Data + Value ~connection + + setup create_connection_fn = + connection = create_connection_fn Nothing + Data.Value connection + + add_specs suite_builder setup = prefix = setup.prefix table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func materialize = setup.materialize suite_builder.group prefix+"Table.merge" group_builder-> + data = Data.setup create_connection_fn + group_builder.specify "should allow to simply update columns based on a lookup table" <| - lookup = table_builder [["Y", ["A", "B", "A"]], ["X", [1, 2, 3]]] - my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] + lookup = table_builder [["Y", ["A", "B", "A"]], ["X", [1, 2, 3]]] connection=data.connection + my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection t2 = my_table.merge lookup key_columns="X" t2.column_names . should_equal ["X", "Y", "Z"] @@ -31,8 +42,8 @@ add_specs suite_builder setup = m2.at "Y" . to_vector . should_equal ["A", "B", "A", "B"] group_builder.specify "should allow to add new columns from a lookup table" <| - lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]] - my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]] + lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]] connection=data.connection + my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]] connection=data.connection t2 = my_table.merge lookup key_columns="code" add_new_columns=True t2.column_names . should_equal ["id", "code", "hmm", "status"] @@ -42,8 +53,8 @@ add_specs suite_builder setup = m2.at "hmm" . to_vector . should_equal [10, 20, 30, 40] m2.at "status" . to_vector . should_equal ["new", "changed", "changed", "old"] - lookup2 = table_builder [["is_X", [True, False]], ["X", ["Yes", "No"]]] - my_table2 = table_builder [["A", [1, 2, 3, 4]], ["is_X", [True, True, False, True]]] + lookup2 = table_builder [["is_X", [True, False]], ["X", ["Yes", "No"]]] connection=data.connection + my_table2 = table_builder [["A", [1, 2, 3, 4]], ["is_X", [True, True, False, True]]] connection=data.connection t3 = my_table2.merge lookup2 key_columns="is_X" add_new_columns=True t3.column_names . should_equal ["A", "is_X", "X"] m3 = t3 |> materialize |> _.order_by "A" @@ -52,8 +63,8 @@ add_specs suite_builder setup = m3.at "X" . to_vector . should_equal ["Yes", "Yes", "No", "Yes"] group_builder.specify "will warn if extra columns are unexpected (add_new_columns=False) (default)" <| - lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]] - my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]] + lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]] connection=data.connection + my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]] connection=data.connection t2 = my_table.merge lookup key_columns="code" t2.column_names . should_equal ["id", "code", "hmm"] @@ -71,7 +82,7 @@ add_specs suite_builder setup = err2.should_fail_with Unexpected_Extra_Columns err2.catch.columns . should_equal ["status"] - lookup2 = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]], ["hmm", [111, 222, 333]]] + lookup2 = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]], ["hmm", [111, 222, 333]]] connection=data.connection t3 = my_table.merge lookup2 key_columns=["code"] add_new_columns=False t3.column_names . should_equal ["id", "code", "hmm"] m3 = t3 |> materialize |> _.order_by "id" @@ -82,8 +93,8 @@ add_specs suite_builder setup = w3.columns . should_equal ["status"] group_builder.specify "will only update rows that are matched and skip others (default - allow_unmatched_rows=True)" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] - my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection + my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection t2 = my_table.merge lookup key_columns=["X"] t2.column_names . should_equal ["X", "Y", "Z"] @@ -94,8 +105,8 @@ add_specs suite_builder setup = m2.at "Y" . to_vector . should_equal ["A", "B", "ZZZ", "B"] group_builder.specify "will fill new columns of unmatched rows with Nothing (allow_unmatched_rows=True)" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]], ["W", [1.5, 2.0]]] - my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]], ["W", [1.5, 2.0]]] connection=data.connection + my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection t2 = my_table.merge lookup key_columns=["X"] allow_unmatched_rows=True add_new_columns=True t2.column_names . should_equal ["X", "Y", "Z", "W"] @@ -106,8 +117,8 @@ add_specs suite_builder setup = m2.at "W" . to_vector . should_equal [1.5, 2.0, Nothing, 2.0] group_builder.specify "will report unmatched rows (if allow_unmatched_rows=False)" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] - my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection + my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection r2 = my_table.merge lookup key_columns=["X"] allow_unmatched_rows=False add_new_columns=True r2.should_fail_with Unmatched_Rows_In_Lookup @@ -115,8 +126,8 @@ add_specs suite_builder setup = r2.catch.to_display_text . should_contain "[3]" # But lookup table containing other keys that are not present in source is NOT a problem. - lookup2 = table_builder [["X", [1, 2, 3]], ["Y", ["A", "B", "C"]]] - my_table2 = table_builder [["X", [1, 2, 1, 1]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] + lookup2 = table_builder [["X", [1, 2, 3]], ["Y", ["A", "B", "C"]]] connection=data.connection + my_table2 = table_builder [["X", [1, 2, 1, 1]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection t3 = my_table2.merge lookup2 key_columns=["X"] add_new_columns=True m3 = t3 |> materialize |> _.order_by "Z" m3.at "X" . to_vector . should_equal [1, 2, 1, 1] @@ -124,8 +135,8 @@ add_specs suite_builder setup = m3.at "Z" . to_vector . should_equal [10, 20, 30, 40] group_builder.specify "will fail on missing key columns in either table" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] - my_table = table_builder [["X", [1, 2, 3, 2]], ["Z", [10, 20, 30, 40]]] + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection + my_table = table_builder [["X", [1, 2, 3, 2]], ["Z", [10, 20, 30, 40]]] connection=data.connection r2 = my_table.merge lookup key_columns=["Y"] r2.should_fail_with Missing_Input_Columns @@ -137,8 +148,8 @@ add_specs suite_builder setup = r3.catch.to_display_text . should_contain "in the lookup table" group_builder.specify "should allow matching by multiple key columns" <| - lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "B"]], ["Z", [100, 100, 200]]] - my_table = table_builder [["X", [1, 1, 1, 2]], ["Y", ["A", "B", "A", "B"]], ["Z", [10, 20, 30, 40]], ["W", [1000, 2000, 3000, 4000]]] + lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "B"]], ["Z", [100, 100, 200]]] connection=data.connection + my_table = table_builder [["X", [1, 1, 1, 2]], ["Y", ["A", "B", "A", "B"]], ["Z", [10, 20, 30, 40]], ["W", [1000, 2000, 3000, 4000]]] connection=data.connection t2 = my_table.merge lookup key_columns=["X", "Y"] t2.column_names . should_equal ["X", "Y", "Z", "W"] @@ -150,8 +161,8 @@ add_specs suite_builder setup = m2.at "Z" . to_vector . should_equal [100, 200, 100, 100] group_builder.specify "will fail on duplicate matches in the lookup table" <| - lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "C"]]] - my_table = table_builder [["X", [4, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] + lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "C"]]] connection=data.connection + my_table = table_builder [["X", [4, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection # If the duplicates do not show up in result - it is accepted. t2 = my_table.merge lookup key_columns=["X"] @@ -161,14 +172,14 @@ add_specs suite_builder setup = m2.at "X" . to_vector . should_equal [4, 2, 3, 2] m2.at "Y" . to_vector . should_equal ["Z", "B", "ZZZ", "B"] - my_table2 = table_builder [["X", [1, 2]], ["Y", ["Z", "ZZ"]], ["Z", [10, 20]]] + my_table2 = table_builder [["X", [1, 2]], ["Y", ["Z", "ZZ"]], ["Z", [10, 20]]] connection=data.connection r2 = my_table2.merge lookup key_columns=["X"] r2.should_fail_with Non_Unique_Key r2.catch.key_column_names . should_equal ["X"] r2.catch.clashing_example_key_values . should_equal [1] r2.catch.clashing_example_row_count . should_equal 2 - lookup2 = table_builder [["X", [1, 1]], ["Y", ["A", "A"]], ["Z", [100, 100]]] + lookup2 = table_builder [["X", [1, 1]], ["Y", ["A", "A"]], ["Z", [100, 100]]] connection=data.connection Problems.assume_no_problems <| my_table.merge lookup2 key_columns=["X", "Y"] r3 = my_table2.merge lookup2 key_columns=["X"] r3.should_fail_with Non_Unique_Key @@ -178,7 +189,7 @@ add_specs suite_builder setup = m3.at "X" . to_vector . should_equal [1, 2] m3.at "Y" . to_vector . should_equal ["Z", "ZZ"] - my_table3 = table_builder [["X", [1, 1, 2]], ["Y", ["A", "Z", "ZZ"]], ["Z", [10, 20, 30]]] + my_table3 = table_builder [["X", [1, 1, 2]], ["Y", ["A", "Z", "ZZ"]], ["Z", [10, 20, 30]]] connection=data.connection r4 = my_table3.merge lookup2 key_columns=["X", "Y"] r4.should_fail_with Non_Unique_Key r4.catch.key_column_names . should_equal ["X", "Y"] @@ -186,8 +197,8 @@ add_specs suite_builder setup = r4.catch.clashing_example_row_count . should_equal 2 group_builder.specify "will preserve count of rows, even if there are duplicates" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] - my_table = table_builder [["X", [1, 2, 2, 2, 1]], ["Z", [10, 20, 20, 20, 50]]] + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection + my_table = table_builder [["X", [1, 2, 2, 2, 1]], ["Z", [10, 20, 20, 20, 50]]] connection=data.connection t2 = my_table.merge lookup key_columns=["X"] add_new_columns=True t2.column_names . should_equal ["X", "Z", "Y"] @@ -197,8 +208,8 @@ add_specs suite_builder setup = m2.at "Z" . to_vector . should_equal [10, 20, 20, 20, 50] group_builder.specify "should correctly preserve types of original, merged and added columns" <| - table = table_builder [["key1", [0, 1]], ["key2", ["o", "?"]], ["X", [1, 10]], ["Y", ["A", "E"]], ["Z", [1.5, 2.0]], ["W", [True, False]], ["A", [2, 22]], ["B", ["1", "2"]], ["C", [2.0, 2.5]], ["D", [False, False]]] - lookup = table_builder [["key1", [0, 2]], ["key2", ["o", "?"]], ["X2", [100, 1000]], ["Y2", ["foo", "bar"]], ["Z2", [0.5, 4.0]], ["W2", [False, True]], ["A", [3, 55]], ["B", ["F", "F"]], ["C", [3.0, 10.5]], ["D", [True, False]]] + table = table_builder [["key1", [0, 1]], ["key2", ["o", "?"]], ["X", [1, 10]], ["Y", ["A", "E"]], ["Z", [1.5, 2.0]], ["W", [True, False]], ["A", [2, 22]], ["B", ["1", "2"]], ["C", [2.0, 2.5]], ["D", [False, False]]] connection=data.connection + lookup = table_builder [["key1", [0, 2]], ["key2", ["o", "?"]], ["X2", [100, 1000]], ["Y2", ["foo", "bar"]], ["Z2", [0.5, 4.0]], ["W2", [False, True]], ["A", [3, 55]], ["B", ["F", "F"]], ["C", [3.0, 10.5]], ["D", [True, False]]] connection=data.connection [True, False].each allow_unmatched_rows-> table_prepared = if allow_unmatched_rows then table else @@ -226,8 +237,8 @@ add_specs suite_builder setup = t2.at "W2" . value_type . should_equal Value_Type.Boolean if setup.test_selection.fixed_length_text_columns then group_builder.specify "should correctly preserve types of original, merged and added columns (various Char types test case)" <| - table2 = table_builder [["key", ["0"]], ["X", ["a"]], ["A", ["bbbbb"]]] . cast "key" (Value_Type.Char size=50) . cast "X" (Value_Type.Char size=1) . cast "A" (Value_Type.Char size=5 variable_length=False) - lookup2 = table_builder [["key", ["0"]], ["X2", ["ccc"]], ["A", ["dddd"]]] . cast "key" (Value_Type.Char size=100) . cast "X2" (Value_Type.Char size=3 variable_length=False) . cast "A" (Value_Type.Char size=4 variable_length=False) + table2 = (table_builder [["key", ["0"]], ["X", ["a"]], ["A", ["bbbbb"]]] connection=data.connection) . cast "key" (Value_Type.Char size=50) . cast "X" (Value_Type.Char size=1) . cast "A" (Value_Type.Char size=5 variable_length=False) + lookup2 = (table_builder [["key", ["0"]], ["X2", ["ccc"]], ["A", ["dddd"]]] connection=data.connection) . cast "key" (Value_Type.Char size=100) . cast "X2" (Value_Type.Char size=3 variable_length=False) . cast "A" (Value_Type.Char size=4 variable_length=False) table2.at "key" . value_type . should_equal (Value_Type.Char size=50 variable_length=True) table2.at "X" . value_type . should_equal (Value_Type.Char size=1 variable_length=True) @@ -255,8 +266,8 @@ add_specs suite_builder setup = t3.at "A" . value_type . should_equal (Value_Type.Char size=4 variable_length=False) group_builder.specify "will report Floating_Point_Equality if floating-point columns are used as key" <| - lookup = table_builder [["X", [1.0, 2.0, 3.0]], ["Y", ["A", "B", "C"]]] - my_table = table_builder [["X", [2.0, 3.0, 2.0, 3.0]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] + lookup = table_builder [["X", [1.0, 2.0, 3.0]], ["Y", ["A", "B", "C"]]] connection=data.connection + my_table = table_builder [["X", [2.0, 3.0, 2.0, 3.0]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection lookup.at "X" . value_type . is_floating_point . should_be_true t2 = my_table.merge lookup key_columns="X" @@ -270,8 +281,8 @@ add_specs suite_builder setup = w2.to_display_text . should_contain "X" group_builder.specify "will fail with No_Common_Type if types of updated columns are not compatible" <| - lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]] - my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] + lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]] connection=data.connection + my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection r2 = my_table.merge lookup key_columns="X" r2.should_fail_with No_Common_Type @@ -280,8 +291,8 @@ add_specs suite_builder setup = r2.catch.to_display_text . should_contain "when unifying column [Y]" group_builder.specify "will allow incompatible types if allow_unmatched_rows=False" <| - lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]] - my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] + lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]] connection=data.connection + my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection my_table.at "Y" . value_type . is_text . should_be_true t2 = my_table.merge lookup key_columns="X" allow_unmatched_rows=False @@ -293,14 +304,14 @@ add_specs suite_builder setup = m2.at "Z" . to_vector . should_equal [10, 20, 30, 40] group_builder.specify "will fail if key columns of the lookup table contain Nothing" <| - lookup1 = table_builder [["X", [1, 2, Nothing]], ["Y", ["A", "B", "C"]]] - my_table1 = table_builder [["X", [2, 3, 2, 3]], ["Z", [10, 20, 30, 40]]] + lookup1 = table_builder [["X", [1, 2, Nothing]], ["Y", ["A", "B", "C"]]] connection=data.connection + my_table1 = table_builder [["X", [2, 3, 2, 3]], ["Z", [10, 20, 30, 40]]] connection=data.connection r1 = my_table1.merge lookup1 key_columns="X" add_new_columns=True r1.should_fail_with Null_Values_In_Key_Columns # But NULLs in source table key are OK - lookup2 = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] - my_table2 = table_builder [["X", [2, 3, Nothing, 3]], ["Z", [10, 20, 30, 40]]] + lookup2 = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection + my_table2 = table_builder [["X", [2, 3, Nothing, 3]], ["Z", [10, 20, 30, 40]]] connection=data.connection t2 = my_table2.merge lookup2 key_columns="X" allow_unmatched_rows=True add_new_columns=True m2 = t2 |> materialize |> _.order_by "Z" m2.at "X" . to_vector . should_equal [2, 3, Nothing, 3] @@ -312,17 +323,17 @@ add_specs suite_builder setup = r3.should_fail_with Unmatched_Rows_In_Lookup group_builder.specify "will not allow providing no key_columns" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] - my_table = table_builder [["X", [2, 1]], ["Z", [10, 20]]] + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection + my_table = table_builder [["X", [2, 1]], ["Z", [10, 20]]] connection=data.connection r2 = my_table.merge lookup key_columns=[] add_new_columns=True r2.should_fail_with Illegal_Argument if setup.is_database.not then group_builder.specify "(in-memory only) will preserve the order of rows from the original table" <| - lookup = table_builder [["Y", [1, 0]], ["V", ["TRUE", "FALSE"]]] + lookup = table_builder [["Y", [1, 0]], ["V", ["TRUE", "FALSE"]]] connection=data.connection xs = 0.up_to 50 . to_vector ys = xs.map x-> x%2 - my_table = table_builder [["X", xs], ["Y", ys]] + my_table = table_builder [["X", xs], ["Y", ys]] connection=data.connection t2 = my_table.merge lookup key_columns="Y" add_new_columns=True t2.column_names . should_equal ["X", "Y", "V"] @@ -334,8 +345,8 @@ add_specs suite_builder setup = if setup.is_database then group_builder.specify "(database-only) will fail if pre-checked invariants get invalidated between the query is constructed and then materialized" <| Test.with_clue "(lookup is unique check) " <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] - table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection + table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] connection=data.connection r1 = table.merge lookup key_columns="X" # Immediately, the query is all good. @@ -374,8 +385,8 @@ add_specs suite_builder setup = r3.at "Z" . to_vector . should_fail_with Invariant_Violation Test.with_clue "(no unmatched rows check - added a row in source) " <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] - table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection + table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] connection=data.connection r1 = table.merge lookup key_columns="X" allow_unmatched_rows=False # Immediately, the query is all good. @@ -396,8 +407,8 @@ add_specs suite_builder setup = m2.should_fail_with Invariant_Violation Test.with_clue "(no unmatched rows check - removed a row in lookup) " <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] - table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection + table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] connection=data.connection r1 = table.merge lookup key_columns="X" allow_unmatched_rows=False # Immediately, the query is all good. @@ -419,7 +430,7 @@ add_specs suite_builder setup = # This does not seem useful really, but there is no reason to disallow it, so we should ensure it does not crash. group_builder.specify "(edge-case) should allow lookup with itself" <| - table = table_builder [["X", [1, 2, 3]], ["Y", ["A", "B", "C"]]] + table = table_builder [["X", [1, 2, 3]], ["Y", ["A", "B", "C"]]] connection=data.connection t2 = table.merge table key_columns="X" t2.column_names . should_equal ["X", "Y"] @@ -428,7 +439,7 @@ add_specs suite_builder setup = m2.at "Y" . to_vector . should_equal ["A", "B", "C"] group_builder.specify "should gracefully handle tables from different backends" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection alternative_connection = Database.connect (SQLite In_Memory) t0 = (Table.new [["A", [3, 2, 1]], ["B", ["x", "y", "z"]]]).select_into_database_table alternative_connection "T0" temporary=True From ddb97e2d132b810255dbdf322a2b55223a8e81aa Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 18:04:55 +0100 Subject: [PATCH 51/93] Fix some typos --- .../src/Common_Table_Operations/Add_Row_Number_Spec.enso | 5 +++-- test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso | 6 +++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso index 5e5e715706fe..1879254490e8 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso @@ -196,8 +196,9 @@ add_specs suite_builder setup = if setup.is_database then suite_builder.group prefix+"Table.add_row_number (Database specific)" group_builder-> group_builder.specify "will use the primary key by default" <| - src = table_builder [["X", [500, 400, 30, 1, 2]], ["Y", [10, 20, 30, 40, 50]]] - db_table = src.select_into_database_table setup.connection (Name_Generator.random_name "add-row-number-test-1") temporary=True primary_key=["X"] + connection = setup.create_connection_func Nothing + src = table_builder [["X", [500, 400, 30, 1, 2]], ["Y", [10, 20, 30, 40, 50]]] connection=connection + db_table = src.select_into_database_table connection (Name_Generator.random_name "add-row-number-test-1") temporary=True primary_key=["X"] t2 = db_table.add_row_number |> materialize |> _.order_by ["Y"] t2.at "Y" . to_vector . should_equal [10, 20, 30, 40, 50] diff --git a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso index 224ab36094d2..3ef01fa124b2 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso @@ -220,11 +220,11 @@ add_specs suite_builder setup = group_builder.specify "should allow weird column names in all backends" <| columns = weird_names.map_with_index ix-> name-> [name, [100+ix, 2, 3]] - data.table = table_builder columns - data.table.column_names . should_equal weird_names + table = table_builder columns + table.column_names . should_equal weird_names weird_names.map_with_index ix-> name-> - data.table.at name . to_vector . should_equal [100+ix, 2, 3] + table.at name . to_vector . should_equal [100+ix, 2, 3] suite_builder.group prefix+"Table.column_count" group_builder-> data = Data.setup create_connection_fn table_builder From 45be0655d10030e4106a968231049c5fc2a28374 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 18:05:50 +0100 Subject: [PATCH 52/93] Fix typos in SQLite_Spec --- .../Table_Tests/src/Database/SQLite_Spec.enso | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 896d7c2baf18..16252a605867 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -23,12 +23,12 @@ import project.Database.Types.SQLite_Type_Mapping_Spec import project.Database.Helpers.Name_Generator import project.Common_Table_Operations -type Data +type Test_Data Value ~connection setup create_connection_func = connection = create_connection_func Nothing - Data.Value connection + Test_Data.Value connection type Metadata_Data @@ -77,7 +77,7 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup = table_builder = setup.table_builder suite_builder.group prefix+"Schemas and Databases" group_builder-> - data = Data.setup create_connection_func + data = Test_Data.setup create_connection_func group_builder.specify "should be able to get current database and list databases" <| data.connection.database . should_equal Nothing @@ -132,7 +132,7 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup = tables.at "Name" . to_vector . contains data.vinfo . should_be_true suite_builder.group prefix+"Error Handling" group_builder-> - data = Data.setup create_connection_func + data = Test_Data.setup create_connection_func group_builder.specify "should wrap errors" <| data.connection.read (SQL_Query.Raw_SQL "foobar") . should_fail_with SQL_Error @@ -380,26 +380,26 @@ suite = Test.build suite_builder-> Auto_Detect.get_writing_format (enso_project.data / "nonexistent-data.sqlite") . should_be_a SQLite_Format group_builder.specify "should not recognise nonexistent or empty files for reading" <| - r1 = data.read (enso_project.data / "nonexistent-data.db") + r1 = Data.read (enso_project.data / "nonexistent-data.db") r1.should_fail_with File_Error r1.catch . should_be_a File_Error.Not_Found empty = enso_project.data / "transient" / "empty-data.db" "".write empty on_existing_file=Existing_File_Behavior.Overwrite . should_succeed - r2 = data.read empty + r2 = Data.read empty r2.should_fail_with File_Error r2.catch . should_be_a File_Error.Unsupported_Type empty.delete_if_exists broken = enso_project.data / "transient" / "empty-data.db" "SOME_RANDOM_DATA".write empty on_existing_file=Existing_File_Behavior.Overwrite . should_succeed - r3 = data.read broken + r3 = Data.read broken r3.should_fail_with File_Error r3.catch . should_be_a File_Error.Unsupported_Type broken.delete_if_exists group_builder.specify "should connect to a db file" <| - connection = data.read data.file + connection = Data.read data.file tables = connection.tables tables.row_count . should_not_equal 0 connection.close @@ -412,6 +412,6 @@ suite = Test.build suite_builder-> Warning.get_all t2 . length . should_equal 1 main = - IO.println <| "=========" - suite.print_all - IO.println <| "=========" + group_filter = Regex.compile ".*" + spec_filter = Regex.compile ".*" + suite.run_with_filter group_filter spec_filter From 9574b3a84e395509814bab2610ceef2f1db11107 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 18:07:31 +0100 Subject: [PATCH 53/93] Remove Bool_Spec_New from Base_Test --- test/Tests/src/Data/Bool_Spec_New.enso | 52 -------------------------- 1 file changed, 52 deletions(-) delete mode 100644 test/Tests/src/Data/Bool_Spec_New.enso diff --git a/test/Tests/src/Data/Bool_Spec_New.enso b/test/Tests/src/Data/Bool_Spec_New.enso deleted file mode 100644 index bffe69c14fb3..000000000000 --- a/test/Tests/src/Data/Bool_Spec_New.enso +++ /dev/null @@ -1,52 +0,0 @@ -from Standard.Base import all -from Standard.Test_New import all - -Boolean.method self = self - -type My_Error - Value a - -crash = - Error.throw (My_Error.Value "foo") - -suite = Test.build builder-> - builder.group "Booleans" group_builder-> - group_builder.specify "should allow converting Bools to Text values" <| - True.to_text . should_equal "True" - False.to_text . should_equal "False" - - group_builder.specify "should allow for comparing Bools" <| - (True == True) . should_be_true - (False == False) . should_be_true - (True > False) . should_be_true - (False < True) . should_be_true - - group_builder.specify "should allow == operator" <| - True.should_equal True - False.should_equal False - True.should_not_equal False - False.should_not_equal True - (1 == 1).should_equal True - - group_builder.specify "should allow for extending Bools in a local module" <| - test = 1 == 2 - test.method . should_equal test - - group_builder.specify "should short-circuit ||" <| - (1 == 1) || (crash) . should_equal True - (1 == 0) || (1 == 1) . should_equal True - (1 == 0) || (crash) . should_fail_with My_Error - (1 == 1) || "foo" . should_equal True - (1 == 0) || "foo" . should_equal "foo" - - group_builder.specify "should short-circuit &&" <| - (1 == 0) && (crash) . should_equal False - (1 == 1) && (1 == 0) . should_equal False - (1 == 1) && (1 == 1) . should_equal True - (1 == 1) && (crash) . should_fail_with My_Error - (1 == 0) && "foo" . should_equal False - (1 == 1) && "foo" . should_equal "foo" - - -main = - suite.run_all From 6b42315726401f4cbc107ea93dd644722c3a0de5 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 18 Jan 2024 18:16:28 +0100 Subject: [PATCH 54/93] Refactor Table_Tests/src/Helpers to Test_New --- test/Table_Tests/src/Helpers/Main.enso | 14 ++-- .../src/Helpers/Sorted_List_Index_Spec.enso | 79 +++++++++++-------- .../Helpers/Unique_Naming_Strategy_Spec.enso | 50 ++++++------ .../src/Helpers/Value_Type_Spec.enso | 17 ++-- 4 files changed, 87 insertions(+), 73 deletions(-) diff --git a/test/Table_Tests/src/Helpers/Main.enso b/test/Table_Tests/src/Helpers/Main.enso index f2f3f68f06bc..19a9f9de1ca4 100644 --- a/test/Table_Tests/src/Helpers/Main.enso +++ b/test/Table_Tests/src/Helpers/Main.enso @@ -1,14 +1,14 @@ from Standard.Base import all -from Standard.Test import Test_Suite +from Standard.Test_New import all import project.Helpers.Sorted_List_Index_Spec import project.Helpers.Unique_Naming_Strategy_Spec import project.Helpers.Value_Type_Spec -spec = - Unique_Naming_Strategy_Spec.spec - Sorted_List_Index_Spec.spec - Value_Type_Spec.spec - -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + Unique_Naming_Strategy_Spec.add_specs suite_builder + Sorted_List_Index_Spec.add_specs suite_builder + Value_Type_Spec.add_specs suite_builder + suite.run_with_filter diff --git a/test/Table_Tests/src/Helpers/Sorted_List_Index_Spec.enso b/test/Table_Tests/src/Helpers/Sorted_List_Index_Spec.enso index 0433f397d86a..e72e4da01470 100644 --- a/test/Table_Tests/src/Helpers/Sorted_List_Index_Spec.enso +++ b/test/Table_Tests/src/Helpers/Sorted_List_Index_Spec.enso @@ -3,59 +3,68 @@ from Standard.Base import all # We need this import, to ensure that we depend on `Standard.Table`, so that the Java import of `org.enso.table` is valid. from Standard.Table import all -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all polyglot java import java.util.Comparator polyglot java import org.enso.table.data.table.join.between.SortedListIndex -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + +make_index vec = SortedListIndex.build vec Comparator.naturalOrder + +type Data + Value ~index1 + + setup = + v1 = [0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3, 4, 5, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 10, 10, 10, 10, 11, 14, 17, 19] + v1_shuffled = v1.take (Index_Sub_Range.Sample v1.length) + Data.Value (make_index v1_shuffled) + ## White-box tests for the SortedListIndex, ensuring correctness of the implementation - these are additional tests apart from the `Join_Condition.Between` test cases, to ensure no off-by-one errors or other bugs are present in the implementation. -spec = Test.group "SortedListIndex (used for SortJoin)" <| - make_index vec = SortedListIndex.build vec Comparator.naturalOrder - - v1 = [0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3, 4, 5, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 10, 10, 10, 10, 11, 14, 17, 19] - v1_shuffled = v1.take (Index_Sub_Range.Sample v1.length) - index1 = make_index v1_shuffled - - Test.specify "should correctly handle empty matches" <| - Vector.from_polyglot_array (index1.findSubRange 9 9) . should_equal [] - Vector.from_polyglot_array (index1.findSubRange -10 -2) . should_equal [] - Vector.from_polyglot_array (index1.findSubRange 200 300) . should_equal [] - Vector.from_polyglot_array (index1.findSubRange 20 0) . should_equal [] +add_specs suite_builder = suite_builder.group "SortedListIndex (used for SortJoin)" group_builder-> + data = Data.setup + + group_builder.specify "should correctly handle empty matches" <| + Vector.from_polyglot_array (data.index1.findSubRange 9 9) . should_equal [] + Vector.from_polyglot_array (data.index1.findSubRange -10 -2) . should_equal [] + Vector.from_polyglot_array (data.index1.findSubRange 200 300) . should_equal [] + Vector.from_polyglot_array (data.index1.findSubRange 20 0) . should_equal [] - Test.specify "should correctly handle single-element matches" <| - Vector.from_polyglot_array (index1.findSubRange 8 8) . should_equal [8] - Vector.from_polyglot_array (index1.findSubRange 12 16) . should_equal [14] - Vector.from_polyglot_array (index1.findSubRange 18 100) . should_equal [19] - Vector.from_polyglot_array (index1.findSubRange 19 100) . should_equal [19] - Vector.from_polyglot_array (index1.findSubRange 19 19) . should_equal [19] + group_builder.specify "should correctly handle single-element matches" <| + Vector.from_polyglot_array (data.index1.findSubRange 8 8) . should_equal [8] + Vector.from_polyglot_array (data.index1.findSubRange 12 16) . should_equal [14] + Vector.from_polyglot_array (data.index1.findSubRange 18 100) . should_equal [19] + Vector.from_polyglot_array (data.index1.findSubRange 19 100) . should_equal [19] + Vector.from_polyglot_array (data.index1.findSubRange 19 19) . should_equal [19] - Test.specify "should correctly handle matches" <| - Vector.from_polyglot_array (index1.findSubRange 4 6) . should_equal [4, 5, 6] - Vector.from_polyglot_array (index1.findSubRange 3 5) . should_equal [3, 3, 4, 5] + group_builder.specify "should correctly handle matches" <| + Vector.from_polyglot_array (data.index1.findSubRange 4 6) . should_equal [4, 5, 6] + Vector.from_polyglot_array (data.index1.findSubRange 3 5) . should_equal [3, 3, 4, 5] - Vector.from_polyglot_array (index1.findSubRange 0 3) . should_equal [0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3] - Vector.from_polyglot_array (index1.findSubRange 2 4) . should_equal [2, 2, 2, 3, 3, 4] - Vector.from_polyglot_array (index1.findSubRange 8 10) . should_equal [8, 10, 10, 10, 10] - Vector.from_polyglot_array (index1.findSubRange 8 11) . should_equal [8, 10, 10, 10, 10, 11] - Vector.from_polyglot_array (index1.findSubRange 8 12) . should_equal [8, 10, 10, 10, 10, 11] - Vector.from_polyglot_array (index1.findSubRange 9 12) . should_equal [10, 10, 10, 10, 11] + Vector.from_polyglot_array (data.index1.findSubRange 0 3) . should_equal [0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3] + Vector.from_polyglot_array (data.index1.findSubRange 2 4) . should_equal [2, 2, 2, 3, 3, 4] + Vector.from_polyglot_array (data.index1.findSubRange 8 10) . should_equal [8, 10, 10, 10, 10] + Vector.from_polyglot_array (data.index1.findSubRange 8 11) . should_equal [8, 10, 10, 10, 10, 11] + Vector.from_polyglot_array (data.index1.findSubRange 8 12) . should_equal [8, 10, 10, 10, 10, 11] + Vector.from_polyglot_array (data.index1.findSubRange 9 12) . should_equal [10, 10, 10, 10, 11] - Test.specify "should correctly handle big all-equal ranges" <| - Vector.from_polyglot_array (index1.findSubRange 1 1) . should_equal [1, 1, 1, 1] - Vector.from_polyglot_array (index1.findSubRange 7 7) . should_equal [7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7] + group_builder.specify "should correctly handle big all-equal ranges" <| + Vector.from_polyglot_array (data.index1.findSubRange 1 1) . should_equal [1, 1, 1, 1] + Vector.from_polyglot_array (data.index1.findSubRange 7 7) . should_equal [7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7] - Test.specify "other cases: empty index" <| + group_builder.specify "other cases: empty index" <| index2 = make_index [] Vector.from_polyglot_array (index2.findSubRange 1 5) . should_equal [] - Test.specify "other cases: single element index" <| + group_builder.specify "other cases: single element index" <| index2 = make_index [5] Vector.from_polyglot_array (index2.findSubRange 1 5) . should_equal [5] Vector.from_polyglot_array (index2.findSubRange 5 5) . should_equal [5] diff --git a/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso b/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso index e449ecef3a02..c878dfba5517 100644 --- a/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso +++ b/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso @@ -6,25 +6,24 @@ import Standard.Table.Internal.Unique_Name_Strategy.Unique_Name_Strategy import Standard.Database.Internal.Common.Encoding_Limited_Naming_Properties.Encoding_Limited_Naming_Properties -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all -spec = +add_specs suite_builder = no_limit = Unlimited_Naming_Properties.Instance - Test.group 'Unique_Name_Strategy Helper' <| - Test.specify 'should change an empty name to "Column"' <| + suite_builder.group 'Unique_Name_Strategy Helper' group_builder-> + group_builder.specify 'should change an empty name to "Column"' <| strategy = Unique_Name_Strategy.new no_limit strategy.make_valid_name "" . should_equal "Column" strategy.make_valid_name "FOO" . should_equal "FOO" strategy.make_valid_name "Column" . should_equal "Column" strategy.invalid_names.length . should_equal 1 - Test.specify 'should change Nothing to "Column"' <| + group_builder.specify 'should change Nothing to "Column"' <| strategy = Unique_Name_Strategy.new no_limit strategy.make_valid_name Nothing . should_equal "Column" strategy.invalid_names.length . should_equal 1 - Test.specify 'should not rename unique names' <| + group_builder.specify 'should not rename unique names' <| strategy = Unique_Name_Strategy.new no_limit strategy.make_unique "A" . should_equal "A" strategy.make_unique "B" . should_equal "B" @@ -32,7 +31,7 @@ spec = strategy.renames.length . should_equal 0 strategy.invalid_names.length . should_equal 0 - Test.specify 'should rename duplicates names' <| + group_builder.specify 'should rename duplicates names' <| strategy = Unique_Name_Strategy.new no_limit strategy.make_unique "A" . should_equal "A" strategy.make_unique "A" . should_equal "A 1" @@ -40,7 +39,7 @@ spec = strategy.renames.length . should_equal 2 strategy.invalid_names.length . should_equal 0 - Test.specify 'should preserve existing suffix' <| + group_builder.specify 'should preserve existing suffix' <| strategy = Unique_Name_Strategy.new no_limit strategy.make_unique "A" . should_equal "A" strategy.make_unique "A 1" . should_equal "A 1" @@ -49,7 +48,7 @@ spec = strategy.renames.length . should_equal 2 strategy.invalid_names.length . should_equal 0 - Test.specify "should always add a counter when renaming invalid names" <| + group_builder.specify "should always add a counter when renaming invalid names" <| strategy = Unique_Name_Strategy.new no_limit strategy.make_unique "" . should_equal "Column 1" strategy.make_unique "" . should_equal "Column 2" @@ -58,7 +57,7 @@ spec = strategy.make_unique "Column" . should_equal "Column" strategy.make_unique "" . should_equal "Column 4" - Test.specify 'should work as in examples' <| + group_builder.specify 'should work as in examples' <| unique_name_strategy = Unique_Name_Strategy.new no_limit unique_names = ["A", "B", "A", ""] . map unique_name_strategy.make_unique duplicates = unique_name_strategy.renames @@ -71,7 +70,7 @@ spec = strategy_1.make_unique "A" . should_equal "A" strategy_1.make_unique "A" . should_equal "A 1" - Test.specify "should treat string equality consistently with Enso" <| + group_builder.specify "should treat string equality consistently with Enso" <| s1 = 'ś' s2 = 's\u0301' # Enso makes these values equal @@ -91,7 +90,7 @@ spec = # But the underlying representation should remain unchanged. r2.codepoints . should_equal [115, 769, 32, 49] - Test.specify "should work with a string size limit" <| + group_builder.specify "should work with a string size limit" <| limit = Encoding_Limited_Naming_Properties.Instance encoding=Encoding.utf_8 limit=5 strategy = Unique_Name_Strategy.new limit @@ -125,7 +124,7 @@ spec = strategy.make_valid_name "abc" . should_equal "abc" strategy.make_valid_name "123456789" . should_equal "12345" - Test.specify "should handle too small limits gracefully" <| + group_builder.specify "should handle too small limits gracefully" <| limit = Encoding_Limited_Naming_Properties.Instance encoding=Encoding.utf_8 limit=1 strategy = Unique_Name_Strategy.new limit strategy.make_unique "A" . should_equal "A" @@ -148,7 +147,7 @@ spec = strategy2.make_unique "B" . should_equal "B" strategy2.make_unique "B" . should_fail_with Illegal_Argument - Test.specify "should correctly handle graphemes spanning multiple units with size limit" <| + group_builder.specify "should correctly handle graphemes spanning multiple units with size limit" <| limit = Encoding_Limited_Naming_Properties.Instance encoding=Encoding.utf_8 limit=3 strategy = Unique_Name_Strategy.new limit @@ -175,7 +174,7 @@ spec = strategy2.make_unique facepalm+facepalm . should_equal facepalm+" 1" strategy2.make_unique facepalm+facepalm . should_equal facepalm+" 2" - Test.specify "should fail gracefully when encountering not-encodable characters" <| + group_builder.specify "should fail gracefully when encountering not-encodable characters" <| limit = Encoding_Limited_Naming_Properties.Instance encoding=Encoding.ascii limit=5 strategy = Unique_Name_Strategy.new limit strategy.make_valid_name "ąęś" . should_fail_with Illegal_Argument @@ -183,8 +182,8 @@ spec = strategy.make_unique "ABC" . should_equal "ABC" strategy.combine_with_prefix ["A"] ["ą"] "P_" . should_fail_with Illegal_Argument - Test.group "Unique_Name_Strategy.combine_with_prefix" <| - Test.specify "should work as in examples" <| + suite_builder.group "Unique_Name_Strategy.combine_with_prefix" group_builder-> + group_builder.specify "should work as in examples" <| strategy = Unique_Name_Strategy.new no_limit first = ["A", "B", "second_A"] second = ["A", "B", "second_A 1", "C"] @@ -193,19 +192,19 @@ spec = strategy.invalid_names . should_equal [] strategy.renames . should_equal ["second_A"] - Test.specify "should work with no prefix" <| + group_builder.specify "should work with no prefix" <| first = ["A", "B"] second = ["B", "A", "C"] strategy = Unique_Name_Strategy.new no_limit r = strategy.combine_with_prefix first second "" r . should_equal ["B 1", "A 1", "C"] - Test.specify "should work for empty input" <| + group_builder.specify "should work for empty input" <| Unique_Name_Strategy.new no_limit . combine_with_prefix [] [] "" . should_equal [] Unique_Name_Strategy.new no_limit . combine_with_prefix ["a"] [] "" . should_equal [] Unique_Name_Strategy.new no_limit . combine_with_prefix [] ["a"] "" . should_equal ["a"] - Test.specify "should find the first free spot" <| + group_builder.specify "should find the first free spot" <| Unique_Name_Strategy.new no_limit . combine_with_prefix ["A", "A 1", "A 2"] ["A"] "" . should_equal ["A 3"] Unique_Name_Strategy.new no_limit . combine_with_prefix ["A", "A 1", "A 2"] ["A 4", "A 6", "A 100", "A", "A 3"] "" . should_equal ["A 4", "A 6", "A 100", "A 5", "A 3"] @@ -214,13 +213,13 @@ spec = Unique_Name_Strategy.new no_limit . combine_with_prefix ["A", "A 1", "A 2", "P_A 1"] ["A"] "P_" . should_equal ["P_A"] Unique_Name_Strategy.new no_limit . combine_with_prefix ["A", "A 1", "A 2", "P_A 1"] ["A", "P_A", "P_A 2"] "P_" . should_equal ["P_A 3", "P_A", "P_A 2"] - Test.specify "will add a prefix/suffix, not increment an existing counter" <| + group_builder.specify "will add a prefix/suffix, not increment an existing counter" <| first = ["A", "A 1", "A 2", "A 3"] Unique_Name_Strategy.new no_limit . combine_with_prefix first ["A 2"] "P_" . should_equal ["P_A 2"] Unique_Name_Strategy.new no_limit . combine_with_prefix first ["A 2"] "" . should_equal ["A 2 1"] Unique_Name_Strategy.new no_limit . combine_with_prefix first+["P_A 2"] ["A 2"] "P_" . should_equal ["P_A 2 1"] - Test.specify "should prioritize existing names when renaming conflicts and rename only ones that are clashing with the other list" <| + group_builder.specify "should prioritize existing names when renaming conflicts and rename only ones that are clashing with the other list" <| first = ["A", "B"] second = ["B", "A", "B 1", "C", "B 2", "B_4"] strategy = Unique_Name_Strategy.new no_limit @@ -236,4 +235,7 @@ spec = r3 = Unique_Name_Strategy.new no_limit . combine_with_prefix first third "P_" r3 . should_equal ["P_B 3", "P_A", "P_B", "X", "P_B 1", "P_B 2"] -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter diff --git a/test/Table_Tests/src/Helpers/Value_Type_Spec.enso b/test/Table_Tests/src/Helpers/Value_Type_Spec.enso index 294dc0d80192..9df0152ae14f 100644 --- a/test/Table_Tests/src/Helpers/Value_Type_Spec.enso +++ b/test/Table_Tests/src/Helpers/Value_Type_Spec.enso @@ -4,12 +4,11 @@ import Standard.Table.Data.Type.Value_Type.Bits import Standard.Table.Data.Type.Value_Type.Value_Type import Standard.Table.Data.Type.Value_Type_Helpers -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all -spec = - Test.group "Value_Type" <| - Test.specify "should have a nice display text representation" <| +add_specs suite_builder = + suite_builder.group "Value_Type" group_builder-> + group_builder.specify "should have a nice display text representation" <| Value_Type.Boolean.to_display_text . should_equal "Boolean" Value_Type.Byte.to_display_text . should_equal "Byte" @@ -28,7 +27,7 @@ spec = Value_Type.Unsupported_Data_Type.to_display_text . should_equal "Unsupported_Data_Type" (Value_Type.Unsupported_Data_Type "FOO-BAR").to_display_text . should_equal "Unsupported_Data_Type (FOO-BAR)" - Test.specify "should use correct in-memory logic to reconcile pairs of types for operations like union/iif" <| + group_builder.specify "should use correct in-memory logic to reconcile pairs of types for operations like union/iif" <| Value_Type_Helpers.reconcile_types Value_Type.Boolean Value_Type.Boolean . should_equal Value_Type.Boolean Value_Type_Helpers.reconcile_types (Value_Type.Integer Bits.Bits_16) (Value_Type.Integer Bits.Bits_32) . should_equal (Value_Type.Integer Bits.Bits_32) @@ -62,4 +61,8 @@ spec = Value_Type_Helpers.reconcile_types Value_Type.Boolean Value_Type.Byte . should_equal Value_Type.Mixed Value_Type_Helpers.reconcile_types (Value_Type.Float Bits.Bits_32) Value_Type.Boolean . should_equal Value_Type.Mixed -main = Test_Suite.run_main spec + +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter From a16aa29e7e8bc5eba1578a051f8f20c924ce0a32 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 19 Jan 2024 12:33:28 +0100 Subject: [PATCH 55/93] Refactor Table_Tests/src/In_Memory to Test_New --- .../src/In_Memory/Aggregate_Column_Spec.enso | 271 +++++++++-------- .../src/In_Memory/Builders_Spec.enso | 14 +- .../src/In_Memory/Column_Format_Spec.enso | 149 +++++----- .../src/In_Memory/Column_Spec.enso | 98 ++++--- .../src/In_Memory/Fan_Out_Spec.enso | 17 +- .../src/In_Memory/Integer_Overflow_Spec.enso | 43 +-- .../src/In_Memory/Lossy_Conversions_Spec.enso | 23 +- test/Table_Tests/src/In_Memory/Main.enso | 34 +-- .../src/In_Memory/Parse_To_Table_Spec.enso | 42 +-- .../src/In_Memory/Split_Tokenize_Spec.enso | 114 ++++---- .../src/In_Memory/Table_Conversion_Spec.enso | 254 ++++++++-------- .../src/In_Memory/Table_Date_Spec.enso | 83 ++++-- .../src/In_Memory/Table_Date_Time_Spec.enso | 70 +++-- .../src/In_Memory/Table_Format_Spec.enso | 238 +++++++-------- .../Table_Tests/src/In_Memory/Table_Spec.enso | 274 +++++++++--------- .../src/In_Memory/Table_Time_Of_Day_Spec.enso | 71 +++-- test/Table_Tests/src/Util.enso | 12 +- 17 files changed, 969 insertions(+), 838 deletions(-) diff --git a/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso b/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso index 19c6de7dcba9..e5b250bf9b95 100644 --- a/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso @@ -8,12 +8,21 @@ import Standard.Table.Internal.Aggregate_Column_Helper import Standard.Table.Internal.Java_Problems import Standard.Table.Internal.Problem_Builder.Problem_Builder -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all -spec = Test.group "Aggregate Columns" <| - simple_table = Table.new [["count", [1, 2, Nothing, 3, Nothing]], ["is_valid", [Nothing, False, True, False, Nothing]], ["float", [3.4, 1, 5.6, 2.1, Nothing]], ["text", ["A", "", Nothing, "B,C", Nothing]]] - empty_table = Table.new [["count", []], ["is_valid", []], ["text", []]] +type Data + Value ~data + + simple_table self = self.data.at 0 + empty_table self = self.data.at 1 + + setup = Data.Value <| + simple_table = Table.new [["count", [1, 2, Nothing, 3, Nothing]], ["is_valid", [Nothing, False, True, False, Nothing]], ["float", [3.4, 1, 5.6, 2.1, Nothing]], ["text", ["A", "", Nothing, "B,C", Nothing]]] + empty_table = Table.new [["count", []], ["is_valid", []], ["text", []]] + [simple_table, empty_table] + + +add_specs suite_builder = suite_builder.group "Aggregate Columns" group_builder-> test_name = "Test Column" @@ -34,155 +43,161 @@ spec = Test.group "Aggregate Columns" <| if epsilon != False then ((result - expected_result).abs < epsilon).should_be_true else result.should_equal expected_result - Test.specify "should be able to count a set" <| - test_aggregator simple_table (Count) "Count" simple_table.row_count - test_aggregator simple_table (Count test_name) test_name simple_table.row_count - test_aggregator empty_table (Count test_name) test_name empty_table.row_count - - Test.specify "should be able to count missing values in a set" <| - test_aggregator simple_table (Count_Nothing 0) "Count Nothing count" 2 - test_aggregator simple_table (Count_Nothing 0 test_name) test_name 2 - test_aggregator simple_table (Count_Nothing "text" test_name) test_name 2 - test_aggregator empty_table (Count_Nothing 0 test_name) test_name empty_table.row_count - - Test.specify "should be able to count non missing values in a set" <| - test_aggregator simple_table (Count_Not_Nothing 0) "Count Not Nothing count" 3 - test_aggregator simple_table (Count_Not_Nothing 0 test_name) test_name 3 - test_aggregator simple_table (Count_Not_Nothing "text" test_name) test_name 3 - test_aggregator empty_table (Count_Not_Nothing 0 test_name) test_name empty_table.row_count - - Test.specify "should be able to count empties in a set of Texts" <| - test_aggregator simple_table (Count_Empty -1) "Count Empty text" 3 - test_aggregator simple_table (Count_Empty -1 test_name) test_name 3 - test_aggregator simple_table (Count_Empty "text" test_name) test_name 3 + data = Data.setup + + group_builder.specify "should be able to count a set" <| + test_aggregator data.simple_table (Count) "Count" data.simple_table.row_count + test_aggregator data.simple_table (Count test_name) test_name data.simple_table.row_count + test_aggregator data.empty_table (Count test_name) test_name data.empty_table.row_count + + group_builder.specify "should be able to count missing values in a set" <| + test_aggregator data.simple_table (Count_Nothing 0) "Count Nothing count" 2 + test_aggregator data.simple_table (Count_Nothing 0 test_name) test_name 2 + test_aggregator data.simple_table (Count_Nothing "text" test_name) test_name 2 + test_aggregator data.empty_table (Count_Nothing 0 test_name) test_name data.empty_table.row_count + + group_builder.specify "should be able to count non missing values in a set" <| + test_aggregator data.simple_table (Count_Not_Nothing 0) "Count Not Nothing count" 3 + test_aggregator data.simple_table (Count_Not_Nothing 0 test_name) test_name 3 + test_aggregator data.simple_table (Count_Not_Nothing "text" test_name) test_name 3 + test_aggregator data.empty_table (Count_Not_Nothing 0 test_name) test_name data.empty_table.row_count + + group_builder.specify "should be able to count empties in a set of Texts" <| + test_aggregator data.simple_table (Count_Empty -1) "Count Empty text" 3 + test_aggregator data.simple_table (Count_Empty -1 test_name) test_name 3 + test_aggregator data.simple_table (Count_Empty "text" test_name) test_name 3 # TODO [RW] Re-enable this once #6281 is implemented. - # test_aggregator empty_table (Count_Empty 0 test_name) test_name empty_table.row_count + # test_aggregator data.empty_table (Count_Empty 0 test_name) test_name data.empty_table.row_count - Test.specify "should be able to count non empties in a set of Texts" <| - test_aggregator simple_table (Count_Not_Empty -1) "Count Not Empty text" 2 - test_aggregator simple_table (Count_Not_Empty -1 test_name) test_name 2 - test_aggregator simple_table (Count_Not_Empty "text" test_name) test_name 2 + group_builder.specify "should be able to count non empties in a set of Texts" <| + test_aggregator data.simple_table (Count_Not_Empty -1) "Count Not Empty text" 2 + test_aggregator data.simple_table (Count_Not_Empty -1 test_name) test_name 2 + test_aggregator data.simple_table (Count_Not_Empty "text" test_name) test_name 2 # TODO [RW] Re-enable this once #6281 is implemented. - # test_aggregator empty_table (Count_Not_Empty 0 test_name) test_name empty_table.row_count + # test_aggregator data.empty_table (Count_Not_Empty 0 test_name) test_name data.empty_table.row_count - Test.specify "should be able to total a set of values" <| - test_aggregator simple_table (Sum -2) "Sum float" 12.1 - test_aggregator simple_table (Sum -2 test_name) test_name 12.1 - test_aggregator simple_table (Sum "float" test_name) test_name 12.1 + group_builder.specify "should be able to total a set of values" <| + test_aggregator data.simple_table (Sum -2) "Sum float" 12.1 + test_aggregator data.simple_table (Sum -2 test_name) test_name 12.1 + test_aggregator data.simple_table (Sum "float" test_name) test_name 12.1 # TODO [RW] Re-enable this once #6281 is implemented. - # test_aggregator empty_table (Sum 0 test_name) test_name Nothing + # test_aggregator data.empty_table (Sum 0 test_name) test_name Nothing - Test.specify "should be able to average a set of values" <| - test_aggregator simple_table (Average -2) "Average float" 3.025 0.000001 - test_aggregator simple_table (Average -2 test_name) test_name 3.025 0.000001 - test_aggregator simple_table (Average "float" test_name) test_name 3.025 0.000001 + group_builder.specify "should be able to average a set of values" <| + test_aggregator data.simple_table (Average -2) "Average float" 3.025 0.000001 + test_aggregator data.simple_table (Average -2 test_name) test_name 3.025 0.000001 + test_aggregator data.simple_table (Average "float" test_name) test_name 3.025 0.000001 # TODO [RW] Re-enable this once #6281 is implemented. - # test_aggregator empty_table (Average 0 test_name) test_name Nothing + # test_aggregator data.empty_table (Average 0 test_name) test_name Nothing - Test.specify "should be able to compute standard deviation a set of values" <| - test_aggregator simple_table (Standard_Deviation -2) "Standard Deviation float" 1.977161 0.000001 - test_aggregator simple_table (Standard_Deviation -2 test_name) test_name 1.977161 0.000001 - test_aggregator simple_table (Standard_Deviation "float" test_name) test_name 1.977161 0.000001 + group_builder.specify "should be able to compute standard deviation a set of values" <| + test_aggregator data.simple_table (Standard_Deviation -2) "Standard Deviation float" 1.977161 0.000001 + test_aggregator data.simple_table (Standard_Deviation -2 test_name) test_name 1.977161 0.000001 + test_aggregator data.simple_table (Standard_Deviation "float" test_name) test_name 1.977161 0.000001 # TODO [RW] Re-enable this once #6281 is implemented. - # test_aggregator empty_table (Standard_Deviation 0 test_name) test_name Nothing + # test_aggregator data.empty_table (Standard_Deviation 0 test_name) test_name Nothing - Test.specify "should be able to compute standard deviation of a population a set of values" <| - test_aggregator simple_table (Standard_Deviation -2 population=True) "Standard Deviation float" 1.712271 0.000001 - test_aggregator simple_table (Standard_Deviation -2 test_name population=True) test_name 1.712271 0.000001 - test_aggregator simple_table (Standard_Deviation "float" test_name population=True) test_name 1.712271 0.000001 + group_builder.specify "should be able to compute standard deviation of a population a set of values" <| + test_aggregator data.simple_table (Standard_Deviation -2 population=True) "Standard Deviation float" 1.712271 0.000001 + test_aggregator data.simple_table (Standard_Deviation -2 test_name population=True) test_name 1.712271 0.000001 + test_aggregator data.simple_table (Standard_Deviation "float" test_name population=True) test_name 1.712271 0.000001 # TODO [RW] Re-enable this once #6281 is implemented. - # test_aggregator empty_table (Standard_Deviation 0 test_name population=True) test_name Nothing + # test_aggregator data.empty_table (Standard_Deviation 0 test_name population=True) test_name Nothing - Test.specify "should be able to compute median a set of values" <| - test_aggregator simple_table (Median -2) "Median float" 2.75 0.000001 - test_aggregator simple_table (Median -2 test_name) test_name 2.75 0.000001 - test_aggregator simple_table (Median "float" test_name) test_name 2.75 0.000001 + group_builder.specify "should be able to compute median a set of values" <| + test_aggregator data.simple_table (Median -2) "Median float" 2.75 0.000001 + test_aggregator data.simple_table (Median -2 test_name) test_name 2.75 0.000001 + test_aggregator data.simple_table (Median "float" test_name) test_name 2.75 0.000001 # TODO [RW] Re-enable this once #6281 is implemented. - # test_aggregator empty_table (Median 0 test_name) test_name Nothing - - Test.specify "should be able to compute first of a set of values including missing" <| - test_aggregator simple_table (First 1 ignore_nothing=False) "First is_valid" Nothing - test_aggregator simple_table (First 1 test_name ignore_nothing=False) test_name Nothing - test_aggregator simple_table (First "is_valid" test_name ignore_nothing=False) test_name Nothing - test_aggregator empty_table (First 0 test_name ignore_nothing=False) test_name Nothing - - Test.specify "should be able to compute first of a set of values excluding missing" <| - test_aggregator simple_table (First 1) "First is_valid" False - test_aggregator simple_table (First 1 test_name) test_name False - test_aggregator simple_table (First "is_valid" test_name) test_name False - test_aggregator empty_table (First 0 test_name) test_name Nothing - - Test.specify "should be able to compute last of a set of values including missing" <| - test_aggregator simple_table (Last 1 ignore_nothing=False) "Last is_valid" Nothing - test_aggregator simple_table (Last 1 test_name ignore_nothing=False) test_name Nothing - test_aggregator simple_table (Last "is_valid" test_name ignore_nothing=False) test_name Nothing - test_aggregator empty_table (Last 0 test_name ignore_nothing=False) test_name Nothing - - Test.specify "should be able to compute last of a set of values excluding missing" <| - test_aggregator simple_table (Last 1) "Last is_valid" False - test_aggregator simple_table (Last 1 test_name) test_name False - test_aggregator simple_table (Last "is_valid" test_name) test_name False - test_aggregator empty_table (Last 0 test_name) test_name Nothing - - Test.specify "should be able to concatenate a set of values excluding missing" <| - test_aggregator simple_table (Concatenate -1 "" ',' '[' ']' '"') "Concatenate text" '[A,"",,"B,C",]' - test_aggregator simple_table (Concatenate -1 test_name) test_name 'AB,C' - test_aggregator simple_table (Concatenate "text" test_name ',') test_name 'A,,,B,C,' + # test_aggregator data.empty_table (Median 0 test_name) test_name Nothing + + group_builder.specify "should be able to compute first of a set of values including missing" <| + test_aggregator data.simple_table (First 1 ignore_nothing=False) "First is_valid" Nothing + test_aggregator data.simple_table (First 1 test_name ignore_nothing=False) test_name Nothing + test_aggregator data.simple_table (First "is_valid" test_name ignore_nothing=False) test_name Nothing + test_aggregator data.empty_table (First 0 test_name ignore_nothing=False) test_name Nothing + + group_builder.specify "should be able to compute first of a set of values excluding missing" <| + test_aggregator data.simple_table (First 1) "First is_valid" False + test_aggregator data.simple_table (First 1 test_name) test_name False + test_aggregator data.simple_table (First "is_valid" test_name) test_name False + test_aggregator data.empty_table (First 0 test_name) test_name Nothing + + group_builder.specify "should be able to compute last of a set of values including missing" <| + test_aggregator data.simple_table (Last 1 ignore_nothing=False) "Last is_valid" Nothing + test_aggregator data.simple_table (Last 1 test_name ignore_nothing=False) test_name Nothing + test_aggregator data.simple_table (Last "is_valid" test_name ignore_nothing=False) test_name Nothing + test_aggregator data.empty_table (Last 0 test_name ignore_nothing=False) test_name Nothing + + group_builder.specify "should be able to compute last of a set of values excluding missing" <| + test_aggregator data.simple_table (Last 1) "Last is_valid" False + test_aggregator data.simple_table (Last 1 test_name) test_name False + test_aggregator data.simple_table (Last "is_valid" test_name) test_name False + test_aggregator data.empty_table (Last 0 test_name) test_name Nothing + + group_builder.specify "should be able to concatenate a set of values excluding missing" <| + test_aggregator data.simple_table (Concatenate -1 "" ',' '[' ']' '"') "Concatenate text" '[A,"",,"B,C",]' + test_aggregator data.simple_table (Concatenate -1 test_name) test_name 'AB,C' + test_aggregator data.simple_table (Concatenate "text" test_name ',') test_name 'A,,,B,C,' # TODO [RW] Re-enable this once #6281 is implemented. - # test_aggregator empty_table (Concatenate 0 test_name) test_name Nothing - - Test.specify "should be able to count distinct items on a single set of values" <| - test_aggregator simple_table (Count_Distinct 0) "Count Distinct count" 4 - test_aggregator simple_table (Count_Distinct 0 test_name) test_name 4 - test_aggregator simple_table (Count_Distinct "count" test_name) test_name 4 - test_aggregator empty_table (Count_Distinct 0 test_name) test_name 0 - test_aggregator simple_table (Count_Distinct "float" test_name ignore_nothing=False) test_name 5 - test_aggregator simple_table (Count_Distinct "float" test_name ignore_nothing=True) test_name 4 - - Test.specify "should be able to count distinct items on a multiple sets of values" <| - test_aggregator simple_table (Count_Distinct [0, 1]) "Count Distinct count is_valid" 5 - test_aggregator simple_table (Count_Distinct ["is_valid", "float"]) "Count Distinct is_valid float" 5 - test_aggregator simple_table (Count_Distinct ["is_valid", "float"] ignore_nothing=True) "Count Distinct is_valid float" 4 - - Test.specify "should be able to get the minimum of a set of values" <| - test_aggregator simple_table (Minimum -2) "Minimum float" 1 - test_aggregator simple_table (Minimum -2 test_name) test_name 1 - test_aggregator simple_table (Minimum "float" test_name) test_name 1 - test_aggregator empty_table (Minimum 0 test_name) test_name Nothing - - Test.specify "should be able to get the maximum of a set of values" <| - test_aggregator simple_table (Maximum -2) "Maximum float" 5.6 - test_aggregator simple_table (Maximum -2 test_name) test_name 5.6 - test_aggregator simple_table (Maximum "float" test_name) test_name 5.6 - test_aggregator empty_table (Maximum 0 test_name) test_name Nothing - - Test.specify "should be able to get the shortest of a set of texts" <| - test_aggregator simple_table (Shortest -1) "Shortest text" "" - test_aggregator simple_table (Shortest -1 test_name) test_name "" - test_aggregator simple_table (Shortest "text" test_name) test_name "" + # test_aggregator data.empty_table (Concatenate 0 test_name) test_name Nothing + + group_builder.specify "should be able to count distinct items on a single set of values" <| + test_aggregator data.simple_table (Count_Distinct 0) "Count Distinct count" 4 + test_aggregator data.simple_table (Count_Distinct 0 test_name) test_name 4 + test_aggregator data.simple_table (Count_Distinct "count" test_name) test_name 4 + test_aggregator data.empty_table (Count_Distinct 0 test_name) test_name 0 + test_aggregator data.simple_table (Count_Distinct "float" test_name ignore_nothing=False) test_name 5 + test_aggregator data.simple_table (Count_Distinct "float" test_name ignore_nothing=True) test_name 4 + + group_builder.specify "should be able to count distinct items on a multiple sets of values" <| + test_aggregator data.simple_table (Count_Distinct [0, 1]) "Count Distinct count is_valid" 5 + test_aggregator data.simple_table (Count_Distinct ["is_valid", "float"]) "Count Distinct is_valid float" 5 + test_aggregator data.simple_table (Count_Distinct ["is_valid", "float"] ignore_nothing=True) "Count Distinct is_valid float" 4 + + group_builder.specify "should be able to get the minimum of a set of values" <| + test_aggregator data.simple_table (Minimum -2) "Minimum float" 1 + test_aggregator data.simple_table (Minimum -2 test_name) test_name 1 + test_aggregator data.simple_table (Minimum "float" test_name) test_name 1 + test_aggregator data.empty_table (Minimum 0 test_name) test_name Nothing + + group_builder.specify "should be able to get the maximum of a set of values" <| + test_aggregator data.simple_table (Maximum -2) "Maximum float" 5.6 + test_aggregator data.simple_table (Maximum -2 test_name) test_name 5.6 + test_aggregator data.simple_table (Maximum "float" test_name) test_name 5.6 + test_aggregator data.empty_table (Maximum 0 test_name) test_name Nothing + + group_builder.specify "should be able to get the shortest of a set of texts" <| + test_aggregator data.simple_table (Shortest -1) "Shortest text" "" + test_aggregator data.simple_table (Shortest -1 test_name) test_name "" + test_aggregator data.simple_table (Shortest "text" test_name) test_name "" # TODO [RW] Re-enable this once #6281 is implemented. - # test_aggregator empty_table (Shortest 0 test_name) test_name Nothing + # test_aggregator data.empty_table (Shortest 0 test_name) test_name Nothing - Test.specify "should be able to get the longest of a set of texts" <| - test_aggregator simple_table (Longest -1) "Longest text" "B,C" - test_aggregator simple_table (Longest -1 test_name) test_name "B,C" - test_aggregator simple_table (Longest "text" test_name) test_name "B,C" + group_builder.specify "should be able to get the longest of a set of texts" <| + test_aggregator data.simple_table (Longest -1) "Longest text" "B,C" + test_aggregator data.simple_table (Longest -1 test_name) test_name "B,C" + test_aggregator data.simple_table (Longest "text" test_name) test_name "B,C" # TODO [RW] Re-enable this once #6281 is implemented. - # test_aggregator empty_table (Longest 0 test_name) test_name Nothing + # test_aggregator data.empty_table (Longest 0 test_name) test_name Nothing - Test.specify "should be able to get the mode of a set of numbers" <| + group_builder.specify "should be able to get the mode of a set of numbers" <| mode_table = Table.new [["tests", [1,2,3,4,2,4,1,2,3,4,2,1,3,5,2,1,2,4,5,2,1,2,3,5,6,1,2,2]]] test_aggregator mode_table (Mode -1) "Mode tests" 2 test_aggregator mode_table (Mode -1 test_name) test_name 2 - test_aggregator empty_table (Mode 0 test_name) test_name Nothing + test_aggregator data.empty_table (Mode 0 test_name) test_name Nothing - Test.specify "should be able to get the percentile of a set of numbers" <| + group_builder.specify "should be able to get the percentile of a set of numbers" <| percentile_table = Table.new [["tests", [67,23,56,93,36,47,45,1,88,44,49,13,74,76,4,97,49,81,81,37]]] test_aggregator percentile_table (Percentile 0 0) "0%-ile tests" 1 test_aggregator percentile_table (Percentile 0 -1 test_name) test_name 1 test_aggregator percentile_table (Percentile 0.15 0) "15%-ile tests" 21.5 test_aggregator percentile_table (Percentile 0.25 0) "25%-ile tests" 36.75 test_aggregator percentile_table (Percentile 0.66 0) "66%-ile tests" 70.78 - test_aggregator empty_table (Mode 0 test_name) test_name Nothing + test_aggregator data.empty_table (Mode 0 test_name) test_name Nothing + +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter -main = Test_Suite.run_main spec diff --git a/test/Table_Tests/src/In_Memory/Builders_Spec.enso b/test/Table_Tests/src/In_Memory/Builders_Spec.enso index dc142981ba5e..7590a5c6344b 100644 --- a/test/Table_Tests/src/In_Memory/Builders_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Builders_Spec.enso @@ -4,16 +4,18 @@ from Standard.Table import Column from Standard.Table.Internal.Java_Exports import make_inferred_builder import Standard.Table.Internal.Java_Problems -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all type My Data x y -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter -spec = Test.group "[In-Memory] Storage Builders" <| - Test.specify "should correctly incrementally build a table in presence of lots of null values" <| +add_specs suite_builder = suite_builder.group "[In-Memory] Storage Builders" group_builder-> + group_builder.specify "should correctly incrementally build a table in presence of lots of null values" <| ## This test is introduced to avoid regressions related to a bug where the underlying array was not resized to big enough size after many null values were inserted to a builder. @@ -34,7 +36,7 @@ spec = Test.group "[In-Memory] Storage Builders" <| column.to_vector . should_equal vector Problems.assume_no_problems r - Test.specify "Inferred Builder should correctly resize when retyping to a mixed column, with an underestimated initial size" <| + group_builder.specify "Inferred Builder should correctly resize when retyping to a mixed column, with an underestimated initial size" <| mixed_values = [10, 11, 22, 23, 24, 25, '2020-02-28'] r = Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator-> builder = make_inferred_builder 3 java_problem_aggregator diff --git a/test/Table_Tests/src/In_Memory/Column_Format_Spec.enso b/test/Table_Tests/src/In_Memory/Column_Format_Spec.enso index f9b838d356dd..7812ad66eccb 100644 --- a/test/Table_Tests/src/In_Memory/Column_Format_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Column_Format_Spec.enso @@ -10,20 +10,19 @@ from Standard.Table import Column, Value_Type from Standard.Table.Errors import Invalid_Value_Type from Standard.Table.Internal.Column_Format import all -import Standard.Test.Extensions -from Standard.Test import Test, Test_Suite +from Standard.Test_New import all from project.Util import all -spec = - Test.group "Date Column.format, with format string" <| - Test.specify "Date column" <| +add_specs suite_builder = + suite_builder.group "Date Column.format, with format string" group_builder-> + group_builder.specify "Date column" <| input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25] expected = Column.from_vector "values" ["20201221", "20230425"] actual = input.format "yyyyMMdd" actual . should_equal expected - Test.specify "Date with locale" <| + group_builder.specify "Date with locale" <| input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25] expected_default = Column.from_vector "values" ["21. June 2020", "25. April 2023"] expected_gb = Column.from_vector "values" ["21. June 2020", "25. April 2023"] @@ -33,61 +32,61 @@ spec = input.format (Date_Time_Formatter.from "d. MMMM yyyy" Locale.uk) . should_equal expected_gb input.format (Date_Time_Formatter.from "d. MMMM yyyy" Locale.france) . should_equal expected_fr - Test.specify "Empty/Nothing format" <| + group_builder.specify "Empty/Nothing format" <| input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25] expected = Column.from_vector "values" ['2020-12-21', '2023-04-25'] input.format . should_equal expected input.format "" . should_equal expected input.format Nothing . should_equal expected - Test.specify "Bad format" <| + group_builder.specify "Bad format" <| input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25] input.format "jjjjjj" . should_fail_with Date_Time_Format_Parse_Error - Test.group "Date Column.format, with format Column" <| - Test.specify "Date column" <| + suite_builder.group "Date Column.format, with format Column" group_builder-> + group_builder.specify "Date column" <| input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25] formats = Column.from_vector "formats" ["yyyyMMdd", "dd-MM-yyyy"] expected = Column.from_vector "values" ["20201221", "25-04-2023"] actual = input.format formats actual . should_equal expected - Test.specify "Date with locale" <| + group_builder.specify "Date with locale" <| input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25] formats = Column.from_vector "formats" ["d. MMMM yyyy", "d-MMMM-yyyy"] expected = Column.from_vector "values" ["21. juin 2020", "25-avril-2023"] input.format formats (Locale.new "fr") . should_equal expected - Test.specify "Empty/Nothing format, with format Column" <| + group_builder.specify "Empty/Nothing format, with format Column" <| input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25] formats = Column.from_vector "formats" ["", Nothing] expected = Column.from_vector "values" ["2020-12-21", "2023-04-25"] actual = input.format formats actual . should_equal expected - Test.specify "Bad format" <| + group_builder.specify "Bad format" <| input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25, Date.new 2023 4 26] formats = Column.from_vector "formats" ["yyyyMMdd", "jjjjj", "FFF"] input.format formats . should_fail_with Date_Time_Format_Parse_Error - Test.specify "Bad format column type" <| + group_builder.specify "Bad format column type" <| input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25, Date.new 2023 4 26] formats = Column.from_vector "formats" [3, 4, 5] input.format formats . should_fail_with Invalid_Value_Type - Test.specify "column length mismatch" <| + group_builder.specify "column length mismatch" <| input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25] formats = Column.from_vector "formats" ["yyyyMMdd", "DDDDD", "w"] input.format formats . should_fail_with Illegal_Argument - Test.group "Date_Time Column.format, with format string" <| - Test.specify "Date_Time column" <| + suite_builder.group "Date_Time Column.format, with format string" group_builder-> + group_builder.specify "Date_Time column" <| input = Column.from_vector "values" [Date_Time.new 2020 12 21 8 10 20, Date_Time.new 2023 4 25 14 25 2] expected = Column.from_vector "values" ["20201221 08.10.20", "20230425 14.25.02"] actual = input.format "yyyyMMdd HH.mm.ss" actual . should_equal expected - Test.specify "Date_Time with locale" <| + group_builder.specify "Date_Time with locale" <| input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2] expected_default = Column.from_vector "values" ["21. June 2020 08.10.20", "25. April 2023 14.25.02"] expected_gb = Column.from_vector "values" ["21. June 2020 08.10.20", "25. April 2023 14.25.02"] @@ -97,7 +96,7 @@ spec = input.format (Date_Time_Formatter.from "d. MMMM yyyy HH.mm.ss" Locale.uk) . should_equal expected_gb input.format (Date_Time_Formatter.from "d. MMMM yyyy HH.mm.ss" Locale.france) . should_equal expected_fr - Test.specify "overriding the Locale with `format` argument" <| + group_builder.specify "overriding the Locale with `format` argument" <| formatter = Date_Time_Formatter.from "d. MMMM yyyy HH.mm.ss" Locale.france input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2] expected_fr = Column.from_vector "values" ["21. juin 2020 08.10.20", "25. avril 2023 14.25.02"] @@ -107,7 +106,7 @@ spec = # If I provide a locale argument, it overrides what is already in the formatter: input.format formatter Locale.poland . should_equal expected_pl - Test.specify "Empty/Nothing format" <| + group_builder.specify "Empty/Nothing format" <| zone = Time_Zone.parse "US/Hawaii" input = Column.from_vector "values" [Date_Time.new 2020 12 21 8 10 20 zone=zone, Date_Time.new 2023 4 25 14 25 2 zone=zone] expected = Column.from_vector "values" ['2020-12-21 08:10:20-10:00[US/Hawaii]', '2023-04-25 14:25:02-10:00[US/Hawaii]'] @@ -115,25 +114,25 @@ spec = input.format "" . should_equal expected input.format Nothing . should_equal expected - Test.specify "Bad format" <| + group_builder.specify "Bad format" <| input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2] input.format "jjjjjjjj" . should_fail_with Date_Time_Format_Parse_Error - Test.group "Date_Time Column.format, with format Column" <| - Test.specify "Date_Time column" <| + suite_builder.group "Date_Time Column.format, with format Column" group_builder-> + group_builder.specify "Date_Time column" <| input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2] formats = Column.from_vector "formats" ["yyyyMMdd HH.mm.ss", "dd-MM-yyyy HH.mm.ss"] expected = Column.from_vector "values" ["20200621 08.10.20", "25-04-2023 14.25.02"] actual = input.format formats actual . should_equal expected - Test.specify "Date_Time with locale" <| + group_builder.specify "Date_Time with locale" <| input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2] formats = Column.from_vector "formats" ["d. MMMM yyyy HH.mm.ss", "d-MMMM-yyyy HH.mm.ss"] expected = Column.from_vector "values" ["21. juin 2020 08.10.20", "25-avril-2023 14.25.02"] input.format formats (Locale.new "fr") . should_equal expected - Test.specify "Empty/Nothing format, with format Column" <| + group_builder.specify "Empty/Nothing format, with format Column" <| zone = Time_Zone.parse "US/Hawaii" input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20 zone=zone, Date_Time.new 2023 4 25 14 25 2 zone=zone] formats = Column.from_vector "formats" ["", Nothing] @@ -141,29 +140,29 @@ spec = actual = input.format formats actual . should_equal expected - Test.specify "Bad format" <| + group_builder.specify "Bad format" <| input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2, Date_Time.new 2023 4 26 3 4 5] formats = Column.from_vector "formats" ["yyyyMMdd HH.mm.ss", "jjjjj", "FFF"] input.format formats . should_fail_with Date_Time_Format_Parse_Error - Test.specify "Bad format column type" <| + group_builder.specify "Bad format column type" <| input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2] formats = Column.from_vector "formats" [3, 4, 5] input.format formats . should_fail_with Invalid_Value_Type - Test.specify "column length mismatch" <| + group_builder.specify "column length mismatch" <| input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2] formats = Column.from_vector "formats" ["yyyyMMdd", "jjjj", "w"] input.format formats . should_fail_with Illegal_Argument - Test.group "Time_Of_Day Column.format, with format string" <| - Test.specify "Time_Of_Day column" <| + suite_builder.group "Time_Of_Day Column.format, with format string" group_builder-> + group_builder.specify "Time_Of_Day column" <| input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2] expected = Column.from_vector "values" ["08.10.20", "14.25.02"] actual = input.format "HH.mm.ss" actual . should_equal expected - Test.specify "Time_Of_Day with locale" <| + group_builder.specify "Time_Of_Day with locale" <| input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2] # Note that the results are all the same. expected = Column.from_vector "values" ["08.10.20", "14.25.02"] @@ -171,178 +170,182 @@ spec = input.format "HH.mm.ss" (Locale.default) . should_equal expected input.format "HH.mm.ss" (Locale.new "gb") . should_equal expected - Test.specify "Empty/Nothing format" <| + group_builder.specify "Empty/Nothing format" <| input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2] expected = Column.from_vector "values" ['08:10:20', '14:25:02'] input.format . should_equal expected input.format "" . should_equal expected input.format Nothing . should_equal expected - Test.specify "Bad format" <| + group_builder.specify "Bad format" <| input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2] input.format "jjjj" . should_fail_with Date_Time_Format_Parse_Error - Test.specify "Format for wrong date type" <| + group_builder.specify "Format for wrong date type" <| input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2] input.format "yyyyMMdd HH.mm.ss" . should_fail_with Time_Error - Test.group "Time_Of_Day Column.format, with format Column" <| - Test.specify "Time_Of_Day column" <| + suite_builder.group "Time_Of_Day Column.format, with format Column" group_builder-> + group_builder.specify "Time_Of_Day column" <| input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2] formats = Column.from_vector "formats" ["HH.mm.ss", "ss mm HH"] expected = Column.from_vector "values" ["08.10.20", "02 25 14"] actual = input.format formats actual . should_equal expected - Test.specify "Time_Of_Day with locale" <| + group_builder.specify "Time_Of_Day with locale" <| input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2] formats = Column.from_vector "formats" ["HH.mm.ss", "ss mm HH"] expected = Column.from_vector "values" ["08.10.20", "02 25 14"] input.format formats (Locale.new "fr") . should_equal expected - Test.specify "Empty/Nothing format, with format Column" <| + group_builder.specify "Empty/Nothing format, with format Column" <| input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2] formats = Column.from_vector "formats" ["", Nothing] expected = Column.from_vector "values" ["08:10:20", "14:25:02"] actual = input.format formats actual . should_equal expected - Test.specify "Bad format" <| + group_builder.specify "Bad format" <| input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2, Time_Of_Day.new 3 4 5] formats = Column.from_vector "formats" ["HH.mm.ss", "jjjjj", "FFF"] input.format formats . should_fail_with Date_Time_Format_Parse_Error - Test.specify "Bad format column type" <| + group_builder.specify "Bad format column type" <| input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2] formats = Column.from_vector "formats" [3, 4, 5] input.format formats . should_fail_with Invalid_Value_Type - Test.specify "column length mismatch" <| + group_builder.specify "column length mismatch" <| input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2] formats = Column.from_vector "formats" ["yyyyMMdd", "jjjjj", "w"] input.format formats . should_fail_with Illegal_Argument - Test.group "Boolean Column.format, with format string" <| - Test.specify "Boolean column" <| + suite_builder.group "Boolean Column.format, with format string" group_builder-> + group_builder.specify "Boolean column" <| input = Column.from_vector "values" [True, False] expected = Column.from_vector "values" ["t", "f"] actual = input.format "t|f" actual . should_equal expected - Test.specify "Empty/Nothing format" <| + group_builder.specify "Empty/Nothing format" <| input = Column.from_vector "values" [True, False] expected = Column.from_vector "values" ["True", "False"] input.format . should_equal expected input.format "" . should_equal expected input.format Nothing . should_equal expected - Test.specify "Bad format" <| + group_builder.specify "Bad format" <| input = Column.from_vector "values" [True, False] input.format "x|y|z" . should_fail_with Illegal_Argument - Test.group "Boolean Column.format, with format Column" <| - Test.specify "Time_Of_Day column" <| + suite_builder.group "Boolean Column.format, with format Column" group_builder-> + group_builder.specify "Time_Of_Day column" <| input = Column.from_vector "values" [True, False, True, False] formats = Column.from_vector "formats" ["True|False", "True|False", "troo|valz", "troo|valz"] expected = Column.from_vector "values" ["True", "False", "troo", "valz"] actual = input.format formats actual . should_equal expected - Test.specify "Empty/Nothing format, with format Column" <| + group_builder.specify "Empty/Nothing format, with format Column" <| input = Column.from_vector "values" [True, False] formats = Column.from_vector "formats" ["", Nothing] expected = Column.from_vector "values" ["True", "False"] input.format formats . should_equal expected - Test.specify "Bad format" <| + group_builder.specify "Bad format" <| input = Column.from_vector "values" [True, False] formats = Column.from_vector "formats" ["True|False", "xyzzy"] input.format formats . should_fail_with Illegal_Argument - spec_with_numeric_type "Integer" (Value_Type.Integer Bits.Bits_64) - spec_with_numeric_type "Float" (Value_Type.Float Bits.Bits_64) + spec_with_numeric_type suite_builder "Integer" (Value_Type.Integer Bits.Bits_64) + spec_with_numeric_type suite_builder "Float" (Value_Type.Float Bits.Bits_64) - Test.group "Integer" <| - Test.specify "Integer Column (constructing the column directly from Integers)" <| + suite_builder.group "Integer" group_builder-> + group_builder.specify "Integer Column (constructing the column directly from Integers)" <| input = Column.from_vector "values" [100000000, 2222, 3] expected = Column.from_vector "values" ["100,000,000.00", "2,222.00", "3.00"] input.format "#,##0.00" . should_equal expected - Test.group "Numeric, empty/Nothing" <| - Test.specify "Integer" <| + suite_builder.group "Numeric, empty/Nothing" group_builder-> + group_builder.specify "Integer" <| input = Column.from_vector "values" ["100000000", "2222", "3"] . parse (Value_Type.Integer Bits.Bits_64) expected = Column.from_vector "values" ["100000000", "2222", "3"] input.format . should_equal expected input.format "" . should_equal expected input.format Nothing . should_equal expected - Test.specify "Float" <| + group_builder.specify "Float" <| input = Column.from_vector "values" ["100000000", "2222", "3"] . parse (Value_Type.Float Bits.Bits_64) expected = Column.from_vector "values" ['1.0E8', '2222.0', '3.0'] input.format . should_equal expected input.format "" . should_equal expected input.format Nothing . should_equal expected - Test.specify "Integer, with format Column" <| + group_builder.specify "Integer, with format Column" <| input = Column.from_vector "values" ["100000000", "2222", "3"] . parse (Value_Type.Integer Bits.Bits_64) formats = Column.from_vector "formats" ["", Nothing, Nothing] expected = Column.from_vector "values" ["100000000", "2222", "3"] input.format formats . should_equal expected - Test.specify "Float, with format Column" <| + group_builder.specify "Float, with format Column" <| input = Column.from_vector "values" ["100000000", "2222", "3"] . parse (Value_Type.Float Bits.Bits_64) formats = Column.from_vector "formats" ["", Nothing, Nothing] expected = Column.from_vector "values" ['1.0E8', '2222.0', '3.0'] input.format formats . should_equal expected - Test.group "Errors" <| - Test.specify "Unsupported column type" <| + suite_builder.group "Errors" group_builder-> + group_builder.specify "Unsupported column type" <| input = Column.from_vector "values" ["100000000", "hey", "3"] input.format "xyz" . should_fail_with Illegal_Argument - Test.specify "Format is not text" <| + group_builder.specify "Format is not text" <| input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25] Test.expect_panic_with (input.format 73) Type_Error - Test.group "Edge cases" <| - Test.specify "empty table is ok" <| + suite_builder.group "Edge cases" group_builder-> + group_builder.specify "empty table is ok" <| input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25] . take 0 expected = Column.from_vector "values" [] actual = input.format "yyyyMMdd" actual . should_equal expected -spec_with_numeric_type name numeric_type = - Test.group name <| - Test.specify "Column" <| +spec_with_numeric_type suite_builder name numeric_type = + suite_builder.group name group_builder-> + group_builder.specify "Column" <| input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type expected = Column.from_vector "values" ["100,000,000.00", "2,222.00", "3.00"] input.format "#,##0.00" . should_equal expected - Test.specify "Column with locale" <| + group_builder.specify "Column with locale" <| input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type expected = Column.from_vector "values" ["100 000 000,00", "2 222,00", "3,00"] input.format "#,##0.00" locale=(Locale.new "fr") . should_equal expected - Test.specify "Bad format" <| + group_builder.specify "Bad format" <| input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type input.format "#.##0,00" . should_fail_with Illegal_Argument - Test.group name+", with format Column" <| - Test.specify "Column" <| + suite_builder.group name+", with format Column" group_builder-> + group_builder.specify "Column" <| input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type formats = Column.from_vector "formats" ["#,##0.00", "0.00", "0"] expected = Column.from_vector "values" ["100,000,000.00", "2222.00", "3"] input.format formats . should_equal expected - Test.specify "Column with locale" <| + group_builder.specify "Column with locale" <| input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type formats = Column.from_vector "formats" ["#,##0.00", "0.00", "0"] expected = Column.from_vector "values" ["100 000 000,00", "2222,00", "3"] input.format formats locale=(Locale.new "fr") . should_equal expected - Test.specify "Bad format" <| + group_builder.specify "Bad format" <| input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type formats = Column.from_vector "formats" ["#,##0.00", "#.##0,00", "0"] input.format formats . should_fail_with Illegal_Argument -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/In_Memory/Column_Spec.enso b/test/Table_Tests/src/In_Memory/Column_Spec.enso index ef2f67492f43..ee6cb9b2da7a 100644 --- a/test/Table_Tests/src/In_Memory/Column_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Column_Spec.enso @@ -12,23 +12,27 @@ from Standard.Table import Column, Value_Type, Auto import Standard.Table.Data.Type.Value_Type.Bits from Standard.Table.Errors import Invalid_Value_Type, Invalid_Column_Names -from Standard.Test import Test, Test_Suite, Problems +from Standard.Test_New import all -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter -spec = - Test.group "Columns" <| + +add_specs suite_builder = + suite_builder.group "Columns" group_builder-> test_column = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] empty_column = Column.from_vector "Test" [] - Test.specify "should allow getting specific elements" <| + group_builder.specify "should allow getting specific elements" <| test_column.at 0 . should_equal 1 test_column.at 2 . should_equal 5 test_column.at 5 . should_equal 6 test_column.at 6 . should_fail_with Index_Out_Of_Bounds empty_column.at 0 . should_fail_with Index_Out_Of_Bounds - Test.specify "should be able to take the first n elements" <| + group_builder.specify "should be able to take the first n elements" <| expected_1 = Column.from_vector "Test" [1, 3, 5] expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] expected_3 = Column.from_vector "Test" [] @@ -36,7 +40,7 @@ spec = test_column.take (First 7) . to_vector . should_equal expected_2.to_vector test_column.take (First 0) . to_vector . should_equal expected_3.to_vector - Test.specify "should be able to take the first n elements by Integer" <| + group_builder.specify "should be able to take the first n elements by Integer" <| expected_1 = Column.from_vector "Test" [1, 3, 5] expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] expected_3 = Column.from_vector "Test" [] @@ -44,7 +48,7 @@ spec = test_column.take 7 . to_vector . should_equal expected_2.to_vector test_column.take 0 . to_vector . should_equal expected_3.to_vector - Test.specify "should be able to take the last n elements" <| + group_builder.specify "should be able to take the last n elements" <| expected_1 = Column.from_vector "Test" [2, 4, 6] expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] expected_3 = Column.from_vector "Test" [] @@ -52,27 +56,27 @@ spec = test_column.take (Last 7) . to_vector . should_equal expected_2.to_vector test_column.take (Last 0) . to_vector . should_equal expected_3.to_vector - Test.specify "should be able to get the first element" <| + group_builder.specify "should be able to get the first element" <| test_column.first . should_equal 1 empty_column.first.should_fail_with Index_Out_Of_Bounds - Test.specify "should be able to get the last element" <| + group_builder.specify "should be able to get the last element" <| test_column.last . should_equal 6 empty_column.last.should_fail_with Index_Out_Of_Bounds - Test.specify "should be able to be reversed" <| + group_builder.specify "should be able to be reversed" <| expected_1 = Column.from_vector "Test" [6, 4, 2, 5, 3, 1] test_column.reverse.to_vector . should_equal expected_1.to_vector empty_column.reverse.to_vector . should_equal empty_column.to_vector - Test.specify "should allow to count duplicate value occurrences" <| + group_builder.specify "should allow to count duplicate value occurrences" <| c_1 = Column.from_vector "c 1" [0, 1, 2, 2, 1, 0, 2] c_1.duplicate_count.to_vector.should_equal [0, 0, 0, 1, 1, 1, 2] c_2 = Column.from_vector "c 2" ["foo", "bar", "foo", "baz", "bar"] c_2.duplicate_count.to_vector.should_equal [0, 0, 1, 0, 1] - Test.specify "should result in correct Storage if operation allows it" <| + group_builder.specify "should result in correct Storage if operation allows it" <| another = Column.from_vector "Test" [10, 20, 30, 40, 50, 60] (test_column + 1).value_type . should_equal Value_Type.Integer (test_column - 1).value_type . should_equal Value_Type.Integer @@ -80,14 +84,14 @@ spec = (test_column * 1.5).value_type . should_equal Value_Type.Float (test_column + another).value_type . should_equal Value_Type.Integer - Test.specify "should forward dataflow error if constructed from one" <| + group_builder.specify "should forward dataflow error if constructed from one" <| foo x = if x == 1 then Error.throw "X" else x col = Column.from_vector "Test" [foo 0, foo 1, foo 2] col . should_fail_with Text col.catch . should_equal "X" - Test.specify "should not allow invalid column names" <| + group_builder.specify "should not allow invalid column names" <| c1 = Column.from_vector "" [1, 2, 3] c1.should_fail_with Invalid_Column_Names @@ -99,14 +103,14 @@ spec = c4 = Column.from_vector 'foo\0bar' [1, 2, 3] c4.should_fail_with Invalid_Column_Names - Test.specify "will coerce integers to decimals by default, to get a numeric column" <| + group_builder.specify "will coerce integers to decimals by default, to get a numeric column" <| c1 = Column.from_vector "X" [1, 2.0] c1.value_type . should_equal Value_Type.Float c1.at 0 . should_be_a Float c1.at 1 . should_be_a Float c1.at 0 . is_a Integer . should_be_false - Test.specify "will preserve the types if the column is Mixed, regardless of ordering" <| + group_builder.specify "will preserve the types if the column is Mixed, regardless of ordering" <| run_test vector = Test.with_clue vector.pretty+": " <| c = Column.from_vector "X" vector @@ -149,7 +153,7 @@ spec = run_test (big_test_vector medium big) run_test (big_test_vector 123 456) - Test.specify "should allow to set a specific type at construction" <| + group_builder.specify "should allow to set a specific type at construction" <| c1 = Column.from_vector "X" [1, 2] Value_Type.Float c1.value_type . should_equal Value_Type.Float c1.at 0 . should_be_a Float @@ -199,7 +203,7 @@ spec = c9.value_type . should_equal Value_Type.Time c9.to_vector . should_equal [Time_Of_Day.new 10 11 12, Time_Of_Day.new 11 30] - Test.specify "will fail if unexpected values are encountered for the requested type" <| + group_builder.specify "will fail if unexpected values are encountered for the requested type" <| r1 = Column.from_vector "X" ["a", 2] Value_Type.Char r1.should_fail_with Invalid_Value_Type r1.catch.to_display_text.should_contain "Expected type Char (variable length, max_size=unlimited), but got a value 2 of type Integer (16 bits)" @@ -239,15 +243,15 @@ spec = r9.should_fail_with Invalid_Value_Type r9.catch.to_display_text.should_contain "Expected type Integer (64 bits), but got a value 1.5 of type Float" - Test.specify "will not allow to construct a column with Char size=0" <| + group_builder.specify "will not allow to construct a column with Char size=0" <| r1 = Column.from_vector "X" [] (Value_Type.Char size=0 variable_length=False) r1.should_fail_with Illegal_Argument r2 = Column.from_vector "X" [] (Value_Type.Char size=0 variable_length=True) r2.should_fail_with Illegal_Argument - Test.group "Rounding" <| - Test.specify "should be able to round a column of decimals" <| + suite_builder.group "Rounding" group_builder-> + group_builder.specify "should be able to round a column of decimals" <| Column.from_vector "foo" [1.2, 2.3, 2.5, 3.6] . round . should_equal (Column.from_vector "round([foo])" [1, 2, 3, 4]) Column.from_vector "foo" [1.25, 2.33, 3.57] . round 1 . should_equal <| Column.from_vector "round([foo])" [1.3, 2.3, 3.6] Column.from_vector "foo" [12.0, 24.0, 25.0, 29.0] . round -1 . should_equal <| Column.from_vector "round([foo])" [10, 20, 30, 30] @@ -257,98 +261,98 @@ spec = Column.from_vector "foo" [-1.25, -2.33, -2.45, -3.57] . round 1 . should_equal <| Column.from_vector "round([foo])" [-1.3, -2.3, -2.5, -3.6] Column.from_vector "foo" [-12.0, -24.0, -25.0, -29.0] . round -1 . should_equal <| Column.from_vector "round([foo])" [-10, -20, -30, -30] - Test.specify "decimal rounding should return the correct column type" <| + group_builder.specify "decimal rounding should return the correct column type" <| col = Column.from_vector "foo" [1.21, 2.34, 3.68] col . round -1 . value_type . should_equal Value_Type.Integer col . round . value_type . should_equal Value_Type.Integer col . round 1 . value_type . should_equal Value_Type.Float - Test.specify "should be able to round a column of integers" <| + group_builder.specify "should be able to round a column of integers" <| Column.from_vector "foo" [12, 24, 25, 29] . round . should_equal <| Column.from_vector "round([foo])" [12, 24, 25, 29] Column.from_vector "foo" [12, 24, 25, 29] . round -1 . should_equal <| Column.from_vector "round([foo])" [10, 20, 30, 30] Column.from_vector "foo" [15, 25, 35] . round -1 use_bankers=True . should_equal <| Column.from_vector "round([foo])" [20, 20, 40] - Test.specify "integer rounding should return the correct column type" <| + group_builder.specify "integer rounding should return the correct column type" <| col = Column.from_vector "foo" [12, 24, 25, 29] col . round 1 . value_type . should_equal Value_Type.Integer col . round 0 . value_type . should_equal Value_Type.Integer col . round -1 . value_type . should_equal Value_Type.Integer - Test.specify "rounding should not attach a warning by default" <| + group_builder.specify "rounding should not attach a warning by default" <| Problems.assume_no_problems <| Column.from_vector "foo" [12, 24, 25, 29] . round 1 - Test.specify "should report out-of-range values as warnings" <| + group_builder.specify "should report out-of-range values as warnings" <| col = Column.from_vector "foo" [12, 23, 99999999999999999] expected = Column.from_vector "round([foo])" [10, 20, Nothing] actual = col.round -1 actual . should_equal expected Warning.get_all actual . map .value . should_equal [Illegal_Argument.Error "Error: `round` can only accept values between -99999999999999 and 99999999999999 (inclusive), but was 99999999999999999 (at rows [2])."] - Test.specify "should throw an error on decimal places out of range" <| + group_builder.specify "should throw an error on decimal places out of range" <| col = Column.from_vector "foo" [12, 23, 99999999999999999] col.round decimal_places=-1200 . should_fail_with Illegal_Argument - Test.specify "should handle type errors" <| + group_builder.specify "should handle type errors" <| col = Column.from_vector "foo" [12, 23, 45] Test.expect_panic_with (col.round use_bankers="string") Type_Error Test.expect_panic_with (col.round decimal_places="string") Type_Error - Test.group "truncate" <| - Test.specify "should be able to truncate a column of floats" <| + suite_builder.group "truncate" group_builder-> + group_builder.specify "should be able to truncate a column of floats" <| Column.from_vector "foo" [1.25, 2.33, 3.57] . truncate . should_equal <| Column.from_vector "truncate([foo])" [1, 2, 3] Column.from_vector "foo" [1.25, 2.33, 3.57] . truncate . value_type . should_equal Value_Type.Integer - Test.specify "should also work on ints" <| + group_builder.specify "should also work on ints" <| Column.from_vector "foo" [1, 2, 3] . truncate . should_equal <| Column.from_vector "truncate([foo])" [1, 2, 3] Column.from_vector "foo" [1, 2, 3] . truncate . value_type . should_equal Value_Type.Integer - Test.specify "Should error on input of the wrong type" <| + group_builder.specify "Should error on input of the wrong type" <| Column.from_vector "foo" ["asdf", "zxcv", "qwer"] . truncate . should_fail_with Invalid_Value_Type - Test.group "ceil" <| - Test.specify "should be able to take the ceil of a column of floats" <| + suite_builder.group "ceil" group_builder-> + group_builder.specify "should be able to take the ceil of a column of floats" <| Column.from_vector "foo" [1.25, 2.33, 3.57] . ceil . should_equal <| Column.from_vector "ceil([foo])" [2, 3, 4] Column.from_vector "foo" [1.25, 2.33, 3.57] . ceil . value_type . should_equal Value_Type.Integer - Test.specify "should also work on ints" <| + group_builder.specify "should also work on ints" <| Column.from_vector "foo" [1, 2, 3] . ceil . should_equal <| Column.from_vector "ceil([foo])" [1, 2, 3] Column.from_vector "foo" [1, 2, 3] . ceil . value_type . should_equal Value_Type.Integer - Test.specify "Should error on input of the wrong type" <| + group_builder.specify "Should error on input of the wrong type" <| Column.from_vector "foo" ["asdf", "zxcv", "qwer"] . ceil . should_fail_with Invalid_Value_Type - Test.group "floor" <| - Test.specify "should be able to take the floor of a column of floats" <| + suite_builder.group "floor" group_builder-> + group_builder.specify "should be able to take the floor of a column of floats" <| Column.from_vector "foo" [1.25, 2.33, 3.57] . floor . should_equal <| Column.from_vector "floor([foo])" [1, 2, 3] Column.from_vector "foo" [1.25, 2.33, 3.57] . floor . value_type . should_equal Value_Type.Integer - Test.specify "should also work on ints" <| + group_builder.specify "should also work on ints" <| Column.from_vector "foo" [1, 2, 3] . floor . should_equal <| Column.from_vector "floor([foo])" [1, 2, 3] Column.from_vector "foo" [1, 2, 3] . floor . value_type . should_equal Value_Type.Integer - Test.specify "Should error on input of the wrong type" <| + group_builder.specify "Should error on input of the wrong type" <| Column.from_vector "foo" ["asdf", "zxcv", "qwer"] . floor . should_fail_with Invalid_Value_Type - Test.group "round/truncate/ceil/floor" <| + suite_builder.group "round/truncate/ceil/floor" group_builder-> do_op n op = col = Column.from_vector "x" [n] result = op col result.to_vector.at 0 do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers) - Test.specify "round returns the correct type" <| + group_builder.specify "round returns the correct type" <| do_round 231.2 1 . should_be_a Float do_round 231.2 0 . should_be_a Integer do_round 231.2 . should_be_a Integer do_round 231.2 -1 . should_be_a Integer - Test.specify "round returns the correct type" <| + group_builder.specify "round returns the correct type" <| do_round 231 1 . should_be_a Integer do_round 231 0 . should_be_a Integer do_round 231 . should_be_a Integer do_round 231 -1 . should_be_a Integer - Test.specify "nan/inf" <| + group_builder.specify "nan/inf" <| ops = [.truncate, .ceil, .floor, .round] ops.map op-> col = Column.from_vector "x" [2.1, 0.0, Number.nan, Number.positive_infinity, Number.negative_infinity, Nothing, 12.1] @@ -359,8 +363,8 @@ spec = warnings . should_contain <| Arithmetic_Error.Error 'Value is Infinity (at rows [3]).' warnings . should_contain <| Arithmetic_Error.Error 'Value is NaN (at rows [2]).' - Test.group "Date_Time truncate" <| - Test.specify "should be able to truncate a column of Date_Times" <| + suite_builder.group "Date_Time truncate" group_builder-> + group_builder.specify "should be able to truncate a column of Date_Times" <| c = Column.from_vector "foo" [Date_Time.new 2020 10 24 1 2 3, Date_Time.new 2020 10 24 1 2 3] truncated = c.truncate truncated . should_equal <| Column.from_vector "truncate([foo])" [Date.new 2020 10 24, Date.new 2020 10 24] diff --git a/test/Table_Tests/src/In_Memory/Fan_Out_Spec.enso b/test/Table_Tests/src/In_Memory/Fan_Out_Spec.enso index aa4527d41eb3..c5f6cac1faba 100644 --- a/test/Table_Tests/src/In_Memory/Fan_Out_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Fan_Out_Spec.enso @@ -5,12 +5,11 @@ from project.Util import all import Standard.Table.Internal.Fan_Out from Standard.Table import Table -import Standard.Test.Extensions -from Standard.Test import Test, Test_Suite, Problems +from Standard.Test_New import all -spec = - Test.group "Fan_Out" <| - Test.specify "can do fan_out_to_columns " <| +add_specs suite_builder = + suite_builder.group "Fan_Out" group_builder-> + group_builder.specify "can do fan_out_to_columns " <| cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing], [1, "c", "d", "ef"], [2, "gh", "ij", "u"]] @@ -18,7 +17,7 @@ spec = t2 = Fan_Out.fan_out_to_columns t "bar" (_.split "b") t2.should_equal expected - Test.specify "can do fan_out_to_rows" <| + group_builder.specify "can do fan_out_to_rows" <| cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]] t = Table.new cols expected_rows = [[0, "a"], [0, "c"], [1, "c"], [1, "d"], [1, "ef"], [2, "gh"], [2, "ij"], [2, "u"]] @@ -26,4 +25,8 @@ spec = t2 = Fan_Out.fan_out_to_rows t "bar" (_.split "b") t2.should_equal expected -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/In_Memory/Integer_Overflow_Spec.enso b/test/Table_Tests/src/In_Memory/Integer_Overflow_Spec.enso index 66bc81de4802..7c99afd0a6b8 100644 --- a/test/Table_Tests/src/In_Memory/Integer_Overflow_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Integer_Overflow_Spec.enso @@ -4,8 +4,7 @@ import Standard.Table.Data.Type.Value_Type.Bits from Standard.Table import all from Standard.Table.Errors import Arithmetic_Overflow, Conversion_Failure, Invalid_Value_Type, No_Common_Type, Loss_Of_Integer_Precision -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Util import all @@ -16,11 +15,15 @@ polyglot java import java.lang.Long as Java_Long polyglot java import org.enso.table_test_helpers.PolyglotHelpers -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter -spec = - Test.group "[In-Memory] Column operation Integer Overflow handling" <| - Test.specify "64-bit integer column overflow" <| + +add_specs suite_builder = + suite_builder.group "[In-Memory] Column operation Integer Overflow handling" group_builder-> + group_builder.specify "64-bit integer column overflow" <| min_value = Java_Long.MIN_VALUE max_value = Java_Long.MAX_VALUE value_type = Value_Type.Integer Bits.Bits_64 @@ -90,7 +93,7 @@ spec = c10.value_type . should_equal value_type Problems.expect_only_warning Arithmetic_Overflow c10 - test_no_overflow value_type max_value min_value = Test.specify "operations on "+value_type.to_display_text+" will not overflow, because the result type is always 64-bit integer column" <| + test_no_overflow value_type max_value min_value = group_builder.specify "operations on "+value_type.to_display_text+" will not overflow, because the result type is always 64-bit integer column" <| t = Table.new [["X", [0, 1, max_value, 0]], ["Y", [0, -1, min_value, 0]], ["U", [1, 1, 1, 1]]] x = t.at "X" . cast value_type y = t.at "Y" . cast value_type @@ -140,7 +143,7 @@ spec = test_no_overflow (Value_Type.Integer Bits.Bits_16) Java_Short.MAX_VALUE Java_Short.MIN_VALUE test_no_overflow (Value_Type.Integer Bits.Bits_32) Java_Integer.MAX_VALUE Java_Integer.MIN_VALUE - Test.specify "if we cast to Decimal first, then the operations will not overflow" <| + group_builder.specify "if we cast to Decimal first, then the operations will not overflow" <| t0 = Table.new [["X", [0, 1, Java_Long.MAX_VALUE, 0]], ["U", [1, 1, 1, 1]]] t1 = t0.cast "X" (Value_Type.Decimal scale=0) x = t1.at "X" @@ -162,7 +165,7 @@ spec = Problems.assume_no_problems c4 c4.to_vector . should_equal [0, 1, Java_Long.MAX_VALUE*Java_Long.MAX_VALUE, 0] - Test.specify "mixed operations" <| + group_builder.specify "mixed operations" <| t = Table.new [["X", [Java_Short.MAX_VALUE]], ["Y", [1]]] x = t.at "X" . cast (Value_Type.Integer Bits.Bits_16) y = t.at "Y" . cast Value_Type.Byte @@ -195,8 +198,8 @@ spec = (x%2).value_type . should_equal (Value_Type.Integer Bits.Bits_64) - Test.group "[In-Memory] Handling of Big Integer values" <| - Test.specify "will create a BigInteger column if some values do not fit in long" <| + suite_builder.group "[In-Memory] Handling of Big Integer values" group_builder-> + group_builder.specify "will create a BigInteger column if some values do not fit in long" <| c0 = Column.from_vector "X" [Java_Long.MAX_VALUE, 0, 1] Problems.assume_no_problems c0 c0.value_type . should_equal (Value_Type.Integer Bits.Bits_64) @@ -220,12 +223,12 @@ spec = Problems.assume_no_problems t1 t1.at "X" . value_type . should_be_a (Value_Type.Decimal ...) - Test.specify "should fail if a big integer is provided for an Integer 64-bit column" <| + group_builder.specify "should fail if a big integer is provided for an Integer 64-bit column" <| c1 = Column.from_vector "X" [Java_Long.MAX_VALUE, 2^70, 100] value_type=Value_Type.Integer c1.should_fail_with Invalid_Value_Type c1.catch.to_display_text . should_contain "Decimal" - Test.specify "allows to construct a column from big integers coming from Java" <| + group_builder.specify "allows to construct a column from big integers coming from Java" <| big_integer_but_small = PolyglotHelpers.createSmallBigIntegerComingFromJava t1 = Table.new [["X", [big_integer_but_small]]] t1.at "X" . value_type . should_equal (Value_Type.Integer Bits.Bits_64) @@ -239,12 +242,12 @@ spec = v2.at 0 . should_be_a Integer v2.at 0 . to_text . should_equal big_big_integer.to_text - Test.specify "will create a Mixed column if other types are present" <| + group_builder.specify "will create a Mixed column if other types are present" <| c1 = Column.from_vector "X" [Java_Long.MAX_VALUE, 2^70, "abc"] Problems.assume_no_problems c1 c1.value_type . should_equal Value_Type.Mixed - Test.specify "should allow to create a Float column from a big integer, but warn about Loss_Of_Integer_Precision if relevant" <| + group_builder.specify "should allow to create a Float column from a big integer, but warn about Loss_Of_Integer_Precision if relevant" <| # 2^70 is not exactly representable as a Float. (2^70 + 0.0).truncate . should_not_equal (2^70) @@ -268,7 +271,7 @@ spec = c4.to_vector . should_equal [2^70, 1] Problems.expect_only_warning Loss_Of_Integer_Precision c4 - Test.specify "should use Decimal type if a mapping operation yields a numeric column with big integers" <| + group_builder.specify "should use Decimal type if a mapping operation yields a numeric column with big integers" <| c = Column.from_vector "X" [1, 2, 3] f1 x = if x == 2 then 2^70 else x @@ -287,7 +290,7 @@ spec = Problems.assume_no_problems c2 c2.value_type . should_equal Value_Type.Mixed - Test.specify "allows arithmetic on Decimal columns" <| + group_builder.specify "allows arithmetic on Decimal columns" <| t = Table.new [["X", [10^30, 2^70, Nothing, 3]], ["Y", [10^20, 2, 3, 4]]] x = t.at "X" y = t.at "Y" @@ -324,7 +327,7 @@ spec = x.is_infinite . to_vector . should_equal [False, False, Nothing, False] x.is_in [3, 2^70] . to_vector . should_equal [False, True, False, True] - Test.specify "allows arithmetic on Decimal columns and other numeric columns" <| + group_builder.specify "allows arithmetic on Decimal columns and other numeric columns" <| t = Table.new [["X", [10^30, 2^70, Nothing, 3]], ["Y", [1, 2, 3, 4]], ["Z", [1.5, 2.5, 3.5, 4.5]]] x = t.at "X" y = t.at "Y" @@ -391,7 +394,7 @@ spec = r4.value_type . should_be_a (Value_Type.Decimal ...) r4.to_vector . should_equal [10^30, 2^70, 23, 3] - Test.specify "returns a Decimal column if the scalar argument is a big integer" <| + group_builder.specify "returns a Decimal column if the scalar argument is a big integer" <| c = Column.from_vector "X" [1, 2, Nothing, 3] c.value_type.should_equal Value_Type.Integer x = 2^70 @@ -430,7 +433,7 @@ spec = r7.value_type . should_be_a (Value_Type.Decimal ...) r7.to_vector . should_equal [1, 2, x, 3] - Test.specify "should work fine with typing edge cases" <| + group_builder.specify "should work fine with typing edge cases" <| c1 = Column.from_vector "X" [2^70, 100, Nothing, 200] c1.value_type . should_be_a (Value_Type.Decimal ...) diff --git a/test/Table_Tests/src/In_Memory/Lossy_Conversions_Spec.enso b/test/Table_Tests/src/In_Memory/Lossy_Conversions_Spec.enso index 8dac3b9c1d1b..dd9da2ff5002 100644 --- a/test/Table_Tests/src/In_Memory/Lossy_Conversions_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Lossy_Conversions_Spec.enso @@ -3,21 +3,24 @@ from Standard.Base import all from Standard.Table import all from Standard.Table.Errors import Loss_Of_Integer_Precision -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Util import all -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter -spec = + +add_specs suite_builder = table_builder = Table.new - Test.group "[In-Memory] Loss_Of_Integer_Precision (large integers not being exactly representable in Float)" <| + suite_builder.group "[In-Memory] Loss_Of_Integer_Precision (large integers not being exactly representable in Float)" group_builder-> # The power of 2 is still represented exactly, but x+1 is not anymore. x = 2^60 y = x+1 - Test.specify "should be reported in `cast`" <| + group_builder.specify "should be reported in `cast`" <| t1 = table_builder [["X", [y]]] c1 = t1.at "X" . cast Value_Type.Float c1.value_type . should_equal Value_Type.Float @@ -49,7 +52,7 @@ spec = w2.to_display_text . should_contain "too large to represent exactly" w2.affected_rows_count . should_equal 4 - Test.specify "should be reported in Column.from_vector and Table.new" <| + group_builder.specify "should be reported in Column.from_vector and Table.new" <| c1 = Column.from_vector "X" [1, 2, x, x+1, x+2, 100] # No problems if integers only. Problems.assume_no_problems c1 @@ -78,7 +81,7 @@ spec = Problems.assume_no_problems c6 c6.to_vector.map .to_text . should_equal ([x+1, 1.0, x+2, "a", x+3].map .to_text) - Test.specify "should not be reported when numeric operation mixes Float and Integer" <| + group_builder.specify "should not be reported when numeric operation mixes Float and Integer" <| t = table_builder [["A", [y]], ["B", [1.0]]] a = t.at "A" b = t.at "B" @@ -97,7 +100,7 @@ spec = Problems.assume_no_problems (a - b) Problems.assume_no_problems (b / a) - Test.specify "should be reported if mixing column types in `iif` causes an Integer column to be casted" <| + group_builder.specify "should be reported if mixing column types in `iif` causes an Integer column to be casted" <| t = table_builder [["A", [x+1, x+2]], ["B", [1.5, 2.5]], ["C", [True, False]]] r = (t.at "C").iif (t.at "A") (t.at "B") r.value_type . should_equal Value_Type.Float @@ -107,7 +110,7 @@ spec = w.affected_rows_count . should_equal 1 w.example_value.to_text . should_equal (x+1).to_text - Test.specify "should be reported when reading a CSV file" <| + group_builder.specify "should be reported when reading a CSV file" <| t = (enso_project.data / "lossy_int.csv") . read t.column_names . should_equal ["X", "Y", "Z"] t.at "X" . value_type . should_equal Value_Type.Char diff --git a/test/Table_Tests/src/In_Memory/Main.enso b/test/Table_Tests/src/In_Memory/Main.enso index 9221dba475ff..820f21adc953 100644 --- a/test/Table_Tests/src/In_Memory/Main.enso +++ b/test/Table_Tests/src/In_Memory/Main.enso @@ -1,12 +1,11 @@ from Standard.Base import all -from Standard.Test import Test_Suite +from Standard.Test_New import all import project.In_Memory.Aggregate_Column_Spec import project.In_Memory.Builders_Spec import project.In_Memory.Column_Spec import project.In_Memory.Column_Format_Spec -import project.In_Memory.Common_Spec import project.In_Memory.Integer_Overflow_Spec import project.In_Memory.Lossy_Conversions_Spec import project.In_Memory.Parse_To_Table_Spec @@ -16,19 +15,18 @@ import project.In_Memory.Table_Date_Spec import project.In_Memory.Table_Date_Time_Spec import project.In_Memory.Table_Time_Of_Day_Spec -spec = - Table_Spec.spec - Column_Spec.spec - Column_Format_Spec.spec - Common_Spec.spec - Integer_Overflow_Spec.spec - Lossy_Conversions_Spec.spec - Table_Date_Spec.spec - Table_Date_Time_Spec.spec - Table_Time_Of_Day_Spec.spec - Aggregate_Column_Spec.spec - Builders_Spec.spec - Split_Tokenize_Spec.spec - Parse_To_Table_Spec.spec - -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + Table_Spec.add_specs suite_builder + Column_Spec.add_specs suite_builder + Column_Format_Spec.add_specs suite_builder + Integer_Overflow_Spec.add_specs suite_builder + Lossy_Conversions_Spec.add_specs suite_builder + Table_Date_Spec.add_specs suite_builder + Table_Date_Time_Spec.add_specs suite_builder + Table_Time_Of_Day_Spec.add_specs suite_builder + Aggregate_Column_Spec.add_specs suite_builder + Builders_Spec.add_specs suite_builder + Split_Tokenize_Spec.add_specs suite_builder + Parse_To_Table_Spec.add_specs suite_builder + suite.run_with_filter diff --git a/test/Table_Tests/src/In_Memory/Parse_To_Table_Spec.enso b/test/Table_Tests/src/In_Memory/Parse_To_Table_Spec.enso index 4e1efc7861e9..59345a01d769 100644 --- a/test/Table_Tests/src/In_Memory/Parse_To_Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Parse_To_Table_Spec.enso @@ -9,73 +9,77 @@ from Standard.Table import Table from Standard.Table.Data.Type.Value_Type import Bits, Value_Type from Standard.Table.Errors import Invalid_Value_Type, Column_Count_Exceeded, Duplicate_Output_Column_Names, Missing_Input_Columns from Standard.Table.Extensions.Table_Conversions import all -from Standard.Test import Test, Test_Suite, Problems +from Standard.Test_New import all from project.Util import all -spec = - Test.group "Text.parse_to_table" <| - Test.specify "text_to_table" <| +add_specs suite_builder = + suite_builder.group "Text.parse_to_table" group_builder-> + group_builder.specify "text_to_table" <| expected = Table.from_rows ["Column"] [["a"], ["ab12"], ["bt100"], ["c12"], ["d20"], ["q"]] actual = "a 7 ab12 bt100 c12d20q 12".parse_to_table "[a-z]+\d*" actual.should_equal expected - Test.specify "text_to_table with a regex" <| + group_builder.specify "text_to_table with a regex" <| expected = Table.from_rows ["Column"] [["a"], ["ab12"], ["bt100"], ["c12"], ["d20"], ["q"]] actual = "a 7 ab12 bt100 c12d20q 12".parse_to_table "[a-z]+\d*".to_regex actual.should_equal expected - Test.group "Text.parse_to_table with groups" <| - Test.specify "with groups" <| + suite_builder.group "Text.parse_to_table with groups" group_builder-> + group_builder.specify "with groups" <| expected = Table.from_rows ["Column 1", "Column 2"] [["ab", 12], ["bt", 100], ["c", 12], ["d", 20]] actual = "a 7 ab-12 bt-100 c-12d-20q q8 12".parse_to_table "([a-z]+)-(\d*)" actual.should_equal expected - Test.specify "with named groups" <| + group_builder.specify "with named groups" <| expected = Table.from_rows ["letters", "Column 2"] [["ab", 12], ["bt", 100], ["c", 12], ["d", 20]] actual = "a 7 ab-12 bt-100 c-12d-20q q8 12".parse_to_table "(?[a-z]+)-(\d*)" actual.should_equal expected - Test.group "Text.parse_to_table with case-insensitivity" <| - Test.specify "case insensitivity" <| + suite_builder.group "Text.parse_to_table with case-insensitivity" group_builder-> + group_builder.specify "case insensitivity" <| expected = Table.from_rows ["Column 1", "Column 2"] [["a", "B"], ["A", "b"], ["a", "b"], ["A", "B"]] actual = "xy aB Ab ab AB".parse_to_table "(a)(b)" case_sensitivity=Case_Sensitivity.Insensitive actual.should_equal expected - Test.group "Text.parse_to_table parsing" <| - Test.specify "parsing on" <| + suite_builder.group "Text.parse_to_table parsing" group_builder-> + group_builder.specify "parsing on" <| expected = Table.from_rows ["Column 1", "Column 2"] [["ab", 12], ["bt", 100], ["c", 12], ["d", 20]] actual = "a 7 ab-12 bt-100 c-12d-20q q8 12".parse_to_table "([a-z]+)-(\d*)" actual.should_equal expected actual.columns.map .value_type . should_equal [Value_Type.Char Nothing True, Value_Type.Integer Bits.Bits_64] - Test.specify "parsing on, with a mixed column" <| + group_builder.specify "parsing on, with a mixed column" <| expected = Table.from_rows ["Column 1", "Column 2"] [["ab", "12"], ["bt", "100"], ["c", "012"], ["d", "20"]] actual = "a 7 ab-12 bt-100 c-012d-20q q8 12".parse_to_table "([a-z]+)-(\d*)" actual.should_equal expected actual.columns.map .value_type . should_equal [Value_Type.Char Nothing True, Value_Type.Char Nothing True] - Test.specify "parsing off" <| + group_builder.specify "parsing off" <| expected = Table.from_rows ["Column 1", "Column 2"] [["ab", "12"], ["bt", "100"], ["c", "12"], ["d", "20"]] actual = "a 7 ab-12 bt-100 c-12d-20q q8 12".parse_to_table "([a-z]+)-(\d*)" parse_values=False actual.should_equal expected actual.columns.map .value_type . should_equal [Value_Type.Char Nothing True, Value_Type.Char Nothing True] - Test.group "Text.parse_to_table errors" <| - Test.specify "Regex_Syntax_Error" <| + suite_builder.group "Text.parse_to_table errors" group_builder-> + group_builder.specify "Regex_Syntax_Error" <| "abc".parse_to_table "(a)(?<<" . should_fail_with Regex_Syntax_Error - Test.specify "enpty pattern" <| + group_builder.specify "enpty pattern" <| "abc".parse_to_table "" . should_fail_with Illegal_Argument - Test.specify "bad arg" <| + group_builder.specify "bad arg" <| Test.expect_panic_with (actual = "a 7 ab12 bt100 c12d20q 12".parse_to_table 12) Type_Error -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso b/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso index ce6d75a7cd05..ba6ac63bcf4c 100644 --- a/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso @@ -4,12 +4,12 @@ import Standard.Test.Extensions from Standard.Table import Table from Standard.Table.Errors import Invalid_Value_Type, Column_Count_Exceeded, Duplicate_Output_Column_Names, No_Such_Column -from Standard.Test import Test, Test_Suite, Problems +from Standard.Test_New import all from project.Util import all -spec = - Test.group "Table.split" <| - Test.specify "can do split_to_columns" <| +add_specs suite_builder = + suite_builder.group "Table.split" group_builder-> + group_builder.specify "can do split_to_columns" <| cols = [["foo", [0, 1, 2]], ["bar", ["a|c", "c|d|ef", "gh|ij|u"]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing], [1, "c", "d", "ef"], [2, "gh", "ij", "u"]] @@ -17,7 +17,7 @@ spec = t2 = t.split_to_columns "bar" "|" t2.should_equal expected - Test.specify "can do split_to_columns where split character, first, last and only character" <| + group_builder.specify "can do split_to_columns where split character, first, last and only character" <| cols = [["foo", [0, 1, 2]], ["bar", ["|cb", "ab|", "|"]]] t = Table.new cols expected_rows = [[0, "", "cb"], [1, "ab", ""], [2, "", ""]] @@ -25,7 +25,7 @@ spec = t2 = t.split_to_columns "bar" "|" t2.should_equal expected - Test.specify "can do split_to_columns where split character, first, last and only character and mismatch in number of split characters" <| + group_builder.specify "can do split_to_columns where split character, first, last and only character and mismatch in number of split characters" <| cols = [["foo", [0, 1, 2]], ["bar", ["|c|", "ab|", "|"]]] t = Table.new cols expected_rows = [[0, "", "c", ""], [1, "ab", "", Nothing], [2, "", "", Nothing]] @@ -33,7 +33,7 @@ spec = t2 = t.split_to_columns "bar" "|" t2.should_equal expected - Test.specify "can do split_to_rows" <| + group_builder.specify "can do split_to_rows" <| cols = [["foo", [0, 1, 2]], ["bar", ["a|c", "c|d|ef", "gh|ij|u"]]] t = Table.new cols expected_rows = [[0, "a"], [0, "c"], [1, "c"], [1, "d"], [1, "ef"], [2, "gh"], [2, "ij"], [2, "u"]] @@ -41,7 +41,7 @@ spec = t2 = t.split_to_rows "bar" "|" t2.should_equal expected - Test.specify "can do split_to_rows where split character, first, last and only character" <| + group_builder.specify "can do split_to_rows where split character, first, last and only character" <| cols = [["foo", [0, 1, 2]], ["bar", ["|cb", "ab|", "|"]]] t = Table.new cols expected_rows = [[0, ""], [0, "cb"], [1, "ab"], [1, ""], [2, ""], [2, ""]] @@ -49,7 +49,7 @@ spec = t2 = t.split_to_rows "bar" "|" t2.should_equal expected - Test.specify "can do split_to_columns with some Nothings and Empty Strings" <| + group_builder.specify "can do split_to_columns with some Nothings and Empty Strings" <| cols = [["foo", [0, 1, 2, 3, 4]], ["bar", ["a|c", "c|d|ef", Nothing, "gh|ij|u", ""]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing], [1, "c", "d", "ef"], [2, Nothing, Nothing, Nothing], [3, "gh", "ij", "u"], [4, "", Nothing, Nothing]] @@ -57,7 +57,7 @@ spec = t2 = t.split_to_columns "bar" "|" t2.should_equal expected - Test.specify "can do split_to_rows with some Nothings and Empty Strings" <| + group_builder.specify "can do split_to_rows with some Nothings and Empty Strings" <| cols = [["foo", [0, 1, 2, 3, 4]], ["bar", ["a|c", "c|d|ef", Nothing, "gh|ij|u", ""]]] t = Table.new cols expected_rows = [[0, "a"], [0, "c"], [1, "c"], [1, "d"], [1, "ef"], [2, Nothing], [3, "gh"], [3, "ij"], [3, "u"], [4, ""]] @@ -65,7 +65,7 @@ spec = t2 = t.split_to_rows "bar" "|" t2.should_equal expected - Test.specify "can do split_to_columns with one output column, no column suffix added" <| + group_builder.specify "can do split_to_columns with one output column, no column suffix added" <| cols = [["foo", [0, 1, 2, 3, 4]], ["bar", ["abc", "cbdbef", "ghbijbu", Nothing, ""]]] t = Table.new cols expected_rows = [[0, "abc"], [1, "cbdbef"], [2, "ghbijbu"], [3, Nothing], [4, ""]] @@ -73,8 +73,8 @@ spec = t2 = t.split_to_columns "bar" "|" t2.should_equal expected - Test.group "Table.tokenize" <| - Test.specify "can do tokenize_to_columns" <| + suite_builder.group "Table.tokenize" group_builder-> + group_builder.specify "can do tokenize_to_columns" <| cols = [["foo", [0, 1, 2]], ["bar", ["a12b34r5", "23", "2r4r55"]]] t = Table.new cols expected_rows = [[0, "12", "34", "5"], [1, "23", Nothing, Nothing], [2, "2", "4", "55"]] @@ -82,7 +82,7 @@ spec = t2 = t.tokenize_to_columns "bar" "\d+" t2.should_equal expected - Test.specify "can do tokenize_to_rows" <| + group_builder.specify "can do tokenize_to_rows" <| cols = [["foo", [0, 1, 2]], ["bar", ["a12b34r5", "23", "2r4r55"]]] t = Table.new cols expected_rows = [[0, "12"], [0, "34"], [0, "5"], [1, "23"], [2, "2"], [2, "4"], [2, "55"]] @@ -90,7 +90,7 @@ spec = t2 = t.tokenize_to_rows "bar" "\d+" t2.should_equal expected - Test.specify "can do tokenize_to_columns with some nothings" <| + group_builder.specify "can do tokenize_to_columns with some nothings" <| cols = [["foo", [0, 1, 2, 3]], ["bar", ["a12b34r5", Nothing, "23", "2r4r55"]]] t = Table.new cols expected_rows = [[0, "12", "34", "5"], [1, Nothing, Nothing, Nothing], [2, "23", Nothing, Nothing], [3, "2", "4", "55"]] @@ -98,7 +98,7 @@ spec = t2 = t.tokenize_to_columns "bar" "\d+" t2.should_equal expected - Test.specify "can do tokenize_to_rows with some Nothings" <| + group_builder.specify "can do tokenize_to_rows with some Nothings" <| cols = [["foo", [0, 1, 2, 3]], ["bar", ["a12b34r5", Nothing, "23", "2r4r55"]]] t = Table.new cols expected_rows = [[0, "12"], [0, "34"], [0, "5"], [2, "23"], [3, "2"], [3, "4"], [3, "55"]] @@ -106,7 +106,7 @@ spec = t2 = t.tokenize_to_rows "bar" "\d+" t2.should_equal expected - Test.specify "can do tokenize_to_columns with one output column, no column suffix needed" <| + group_builder.specify "can do tokenize_to_columns with one output column, no column suffix needed" <| cols = [["foo", [0, 1, 2]], ["bar", ["a12b", "23", "2r"]]] t = Table.new cols expected_rows = [[0, "12"], [1, "23"], [2, "2"]] @@ -114,7 +114,7 @@ spec = t2 = t.tokenize_to_columns "bar" "\d+" t2.should_equal expected - Test.specify "can do tokenize_to_rows with some rows that have no matches" <| + group_builder.specify "can do tokenize_to_rows with some rows that have no matches" <| cols = [["foo", [0, 1, 2, 3]], ["bar", ["a12b34r5", "23", "q", "2r4r55"]]] t = Table.new cols expected_rows = [[0, "12"], [0, "34"], [0, "5"], [1, "23"], [3, "2"], [3, "4"], [3, "55"]] @@ -122,7 +122,7 @@ spec = t2 = t.tokenize_to_rows "bar" "\d+" t2.should_equal expected - Test.specify "can do tokenize_to_columns with groups" <| + group_builder.specify "can do tokenize_to_columns with groups" <| cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]] t = Table.new cols expected_rows = [[0, "a1", "b12", "d50"], [1, "b10", "c20", Nothing]] @@ -130,7 +130,7 @@ spec = t2 = t.tokenize_to_columns "bar" "([a-z]).(\d+)" t2.should_equal expected - Test.specify "can do tokenize_to_rows with groups" <| + group_builder.specify "can do tokenize_to_rows with groups" <| cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]] t = Table.new cols expected_rows = [[0, "a1"], [0, "b12"], [0, "d50"], [1, "b10"], [1, "c20"]] @@ -138,7 +138,7 @@ spec = t2 = t.tokenize_to_rows "bar" "([a-z]).(\d+)" t2.should_equal expected - Test.specify "can do tokenize_to_columns case-insensitively" <| + group_builder.specify "can do tokenize_to_columns case-insensitively" <| cols = [["foo", [0, 1, 2]], ["bar", ["aBqcE", "qcBr", "cCb"]]] t = Table.new cols expected_rows = [[0, "B", "c", Nothing], [1, "c", "B", Nothing], [2, "c", "C", "b"]] @@ -146,7 +146,7 @@ spec = t2 = t.tokenize_to_columns "bar" "[bc]" case_sensitivity=Case_Sensitivity.Insensitive t2.should_equal expected - Test.specify "can do tokenize_to_rows case-insensitively" <| + group_builder.specify "can do tokenize_to_rows case-insensitively" <| cols = [["foo", [0, 1, 2]], ["bar", ["aBqcE", "qcBr", "cCb"]]] t = Table.new cols expected_rows = [[0, "B"], [0, "c"], [1, "c"], [1, "B"], [2, "c"], [2, "C"], [2, "b"]] @@ -154,20 +154,20 @@ spec = t2 = t.tokenize_to_rows "bar" "[bc]" case_sensitivity=Case_Sensitivity.Insensitive t2.should_equal expected - Test.specify "can do tokenize_to_rows with at_least_one_row=True" <| + group_builder.specify "can do tokenize_to_rows with at_least_one_row=True" <| input = Table.from_rows ["foo", "bar"] [[0, "a12b34r5"], [1, "qqq"], [2, "2r4r55"]] expected = Table.from_rows ["foo", "bar"] [[0, "12"], [0, "34"], [0, "5"], [1, Nothing], [2, "2"], [2, "4"], [2, "55"]] actual = input.tokenize_to_rows "bar" "\d+" at_least_one_row=True actual.should_equal expected - Test.specify "can do tokenize_to_rows with at_least_one_row=True, with groups" <| + group_builder.specify "can do tokenize_to_rows with at_least_one_row=True, with groups" <| input = Table.from_rows ["foo", "bar"] [[0, "a12b34r5"], [1, "qqq"], [2, "2r44r55"], [3, Nothing]] expected = Table.from_rows ["foo", "bar"] [[0, "12"], [0, "34"], [1, Nothing], [2, "44"], [2, "55"], [3, Nothing]] actual = input.tokenize_to_rows "bar" "(\d)(\d)" at_least_one_row=True actual.should_equal expected - Test.group "Table.split/tokenize column count" <| - Test.specify "should generate extra empty columns if column_count is set" <| + suite_builder.group "Table.split/tokenize column count" group_builder-> + group_builder.specify "should generate extra empty columns if column_count is set" <| cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing, Nothing], [1, "c", "d", "ef", Nothing], [2, "gh", "ij", "u", Nothing]] @@ -176,7 +176,7 @@ spec = t2.should_equal expected t2.at "bar 3" . value_type . is_text . should_be_true - Test.specify "split should limit columns and return problems when exceeding the column limit" <| + group_builder.specify "split should limit columns and return problems when exceeding the column limit" <| cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]] t = Table.new cols expected_rows = [[0, "a", "c"], [1, "c", "d"], [2, "gh", "ij"]] @@ -186,7 +186,7 @@ spec = problems = [Column_Count_Exceeded.Error 2 3] Problems.test_problem_handling action problems tester - Test.specify "tokenize should limit columns and return problems when exceeding the column limit" <| + group_builder.specify "tokenize should limit columns and return problems when exceeding the column limit" <| cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]] t = Table.new cols expected_rows = [[0, "a1", "b12", "d50"], [1, "b10", "c20", Nothing]] @@ -196,7 +196,7 @@ spec = problems = [Column_Count_Exceeded.Error 2 3] Problems.test_problem_handling action problems tester - Test.specify "should generate extra empty columns if column_count is set (with rows in a different order)" <| + group_builder.specify "should generate extra empty columns if column_count is set (with rows in a different order)" <| cols = [["foo", [0, 1, 2]], ["bar", ["ghbijbu", "cbdbef", "abc"]]] t = Table.new cols expected_rows = [[0, "gh", "ij", "u", Nothing], [1, "c", "d", "ef", Nothing], [2, "a", "c", Nothing, Nothing]] @@ -205,8 +205,8 @@ spec = t2.should_equal expected t2.at "bar 3" . value_type . is_text . should_be_true - Test.group "Table.split/tokenize errors" <| - Test.specify "won't work on a non-text column" <| + suite_builder.group "Table.split/tokenize errors" group_builder-> + group_builder.specify "won't work on a non-text column" <| cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]] t = Table.new cols t.split_to_columns "foo" "x" . should_fail_with Invalid_Value_Type @@ -214,7 +214,7 @@ spec = t.tokenize_to_columns "foo" "x" . should_fail_with Invalid_Value_Type t.tokenize_to_rows "foo" "x" . should_fail_with Invalid_Value_Type - Test.specify "won't work on a mixed column" <| + group_builder.specify "won't work on a mixed column" <| cols = [["foo", [0, 1]], ["bar", [500, "ab-10:bc-20c"]]] t = Table.new cols t.split_to_columns "bar" "x" . should_fail_with Invalid_Value_Type @@ -222,18 +222,18 @@ spec = t.tokenize_to_columns "bar" "x" . should_fail_with Invalid_Value_Type t.tokenize_to_rows "bar" "x" . should_fail_with Invalid_Value_Type - Test.specify "*_to_columns handles missing input column" <| + group_builder.specify "*_to_columns handles missing input column" <| cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]] t = Table.new cols t.tokenize_to_columns "invalid_name" "([a-z]).(\d+)" . should_fail_with No_Such_Column - Test.specify "*_to_rows handles missing input column" <| + group_builder.specify "*_to_rows handles missing input column" <| cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]] t = Table.new cols t.tokenize_to_rows "invalid_name" "([a-z]).(\d+)" . should_fail_with No_Such_Column - Test.group "Table.split/tokenize name conflicts" <| - Test.specify "split will make column names unique" <| + suite_builder.group "Table.split/tokenize name conflicts" group_builder-> + group_builder.specify "split will make column names unique" <| cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]], ["bar 1", ["a", "b", "c"]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing, "a"], [1, "c", "d", "ef", "b"], [2, "gh", "ij", "u", "c"]] @@ -243,7 +243,7 @@ spec = problems = [Duplicate_Output_Column_Names.Error ["bar 1"]] Problems.test_problem_handling action problems tester - Test.specify "tokenize will make column names unique" <| + group_builder.specify "tokenize will make column names unique" <| cols = [["foo", [0, 1, 2]], ["bar", ["a12b34r5", "23", "2r4r55"]], ["bar 1", ["a", "b", "c"]]] t = Table.new cols expected_rows = [[0, "12", "34", "5", "a"], [1, "23", Nothing, Nothing, "b"], [2, "2", "4", "55", "c"]] @@ -253,8 +253,8 @@ spec = problems = [Duplicate_Output_Column_Names.Error ["bar 1"]] Problems.test_problem_handling action problems tester - Test.group "Table.split/tokenize column order" <| - Test.specify "preserves column order" <| + suite_builder.group "Table.split/tokenize column order" group_builder-> + group_builder.specify "preserves column order" <| cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]], ["baz", [1, 2, 3]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing, 1], [1, "c", "d", "ef", 2], [2, "gh", "ij", "u", 3]] @@ -262,74 +262,74 @@ spec = t2 = t.split_to_columns "bar" "b" t2.should_equal expected - Test.group "Table.parse_to_columns" <| - Test.specify "can parse to columns" <| + suite_builder.group "Table.parse_to_columns" group_builder-> + group_builder.specify "can parse to columns" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]] expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [["x", 1, 2, "y"], ["x", 3, 4, "y"], ["x", 5, 6, "y"], ["xx", 4, 8, "yy"], ["xx", 5, 9, "yy"]] actual = t.parse_to_columns "bar" "(\d)(\d)" actual.should_equal expected - Test.specify "no regex groups" <| + group_builder.specify "no regex groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]] expected = Table.from_rows ["foo", "bar", "baz"] [["x", 12, "y"], ["x", 34, "y"], ["x", 56, "y"], ["xx", 48, "yy"], ["xx", 59, "yy"]] actual = t.parse_to_columns "bar" "\d\d" actual.should_equal expected - Test.specify "named groups" <| + group_builder.specify "named groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]] expected = Table.from_rows ["foo", "xomt", "biff", "baz"] [["x", 1, 2, "y"], ["x", 3, 4, "y"], ["x", 5, 6, "y"], ["xx", 4, 8, "yy"], ["xx", 5, 9, "yy"]] actual = t.parse_to_columns "bar" "(?\d)(?\d)" actual.should_equal expected - Test.specify "non-participating groups" <| + group_builder.specify "non-participating groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "q1", "y"], ["xx", "qp", "yy"]] expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "baz"] [["x", "1", 1, Nothing, "y"], ["xx", "p", Nothing, "p", "yy"]] actual = t.parse_to_columns "bar" "q((\d)|([a-z]))" actual.should_equal expected - Test.specify "case-insensitive" <| + group_builder.specify "case-insensitive" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "qq", "y"], ["xx", "qQ", "yy"]] expected = Table.from_rows ["foo", "bar 1", "baz"] [["x", "q", "y"], ["xx", "Q", "yy"]] actual = t.parse_to_columns "bar" "q(q)" case_sensitivity=Case_Sensitivity.Insensitive actual.should_equal expected - Test.specify "no post-parsing" <| + group_builder.specify "no post-parsing" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]] expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [["x", "1", "2", "y"], ["x", "3", "4", "y"], ["x", "5", "6", "y"], ["xx", "4", "8", "yy"], ["xx", "5", "9", "yy"]] actual = t.parse_to_columns "bar" "(\d)(\d)" parse_values=False actual.should_equal expected - Test.specify "column name clash" <| + group_builder.specify "column name clash" <| t = Table.from_rows ["foo", "bar", "bar 1"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]] expected = Table.from_rows ["foo", "bar 1 1", "bar 2", "bar 1"] [["x", 1, 2, "y"], ["x", 3, 4, "y"], ["x", 5, 6, "y"], ["xx", 4, 8, "yy"], ["xx", 5, 9, "yy"]] actual = t.parse_to_columns "bar" "(\d)(\d)" actual.should_equal expected - Test.specify "column and group name clash" <| + group_builder.specify "column and group name clash" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "123", "y"]] expected = Table.from_rows ["foo", "bar", "baz 1", "quux", "baz"] [["x", 1, 2, 3, "y"]] actual = t.parse_to_columns "bar" "(?\d)(?\d)(?\d)" actual.should_equal expected - Test.specify "empty table" <| + group_builder.specify "empty table" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] . take 0 expected = Table.from_rows ["foo", "bar", "baz"] [] actual = t.parse_to_columns "bar" "\d+" actual.should_equal expected - Test.specify "empty table, with regex groups" <| + group_builder.specify "empty table, with regex groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] . take 0 expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [["x", "a", "a", "y"]] . take 0 actual = t.parse_to_columns "bar" "(\d)(\d)" actual.should_equal expected - Test.specify "empty table, with named and unnamed regex groups" <| + group_builder.specify "empty table, with named and unnamed regex groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] . take 0 expected = Table.from_rows ["foo", "quux", "bar 1", "foo 1", "bar 2", "baz"] [["x", "a", "a", "a", "a", "y"]] . take 0 actual = t.parse_to_columns "bar" "(?)(\d)(?\d)(\d)" actual.should_equal expected - Test.specify "input with no matches" <| + group_builder.specify "input with no matches" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"], ["xx", "q12", "yy"], ["xxx", "34r", "yyyy"]] expected = Table.from_rows ["foo", "bar", "baz"] [["x", Nothing, "y"], ["xx", 12, "yy"], ["xxx", 34, "yyyy"]] actual = t.parse_to_columns "bar" "\d+" @@ -340,16 +340,20 @@ spec = actual2 = t2.parse_to_columns "amount" "\$(?\d+(?:.\d+)?)(?B|M)" actual2.should_equal expected2 - Test.specify "input with no matches, with regex groups" <| + group_builder.specify "input with no matches, with regex groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [["x", Nothing, Nothing, "y"]] actual = t.parse_to_columns "bar" "(\d)(\d)" actual.should_equal expected - Test.specify "input with no matches, with named and unnamed regex groups" <| + group_builder.specify "input with no matches, with named and unnamed regex groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] expected = Table.from_rows ["foo", "quux", "bar 1", "foo 1", "bar 2", "baz"] [["x", Nothing, Nothing, Nothing, Nothing, "y"]] actual = t.parse_to_columns "bar" "(?)(\d)(?\d)(\d)" actual.should_equal expected -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso index b5fd17118665..37309c81d273 100644 --- a/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso @@ -8,8 +8,7 @@ from Standard.Table import Table, Column from Standard.Table.Errors import No_Such_Column from Standard.Table.Extensions.Table_Conversions import all -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Util import all @@ -18,162 +17,177 @@ type My_Mod_Type to_text self = "x%10="+(self.x%10).to_text -spec = - single_values = [Nothing, 12, 13.4, True, "hello", Date.new 2023 10 6, Time_Of_Day.new 3 4 5 200, Date_Time.new 2023 11 7 2 3 4] - uniform_json = Json.parse <| ''' - [ - { "first": "Mary", "last": "Smith", "age": 23 }, - { "first": "Joe", "last": "Burton", "age": 34 } - ] - non_uniform_json = Json.parse <| ''' - [ - { "first": "Mary", "last": "Smith", "age": 23 }, - { "height": 1.9, "weight": 70 } - ] - - library_test_file = enso_project.data / "xml" / "library.xml" - library_document = XML_Document.from_file library_test_file - library_root = library_document . root_element +type Data + Value ~data + + single_values self = self.data.at 0 + uniform_json self = self.data.at 1 + non_uniform_json self = self.data.at 2 + library_document self = self.data.at 3 + library_root self = self.data.at 4 + + setup = Data.Value <| + single_values = [Nothing, 12, 13.4, True, "hello", Date.new 2023 10 6, Time_Of_Day.new 3 4 5 200, Date_Time.new 2023 11 7 2 3 4] + uniform_json = Json.parse <| ''' + [ + { "first": "Mary", "last": "Smith", "age": 23 }, + { "first": "Joe", "last": "Burton", "age": 34 } + ] + non_uniform_json = Json.parse <| ''' + [ + { "first": "Mary", "last": "Smith", "age": 23 }, + { "height": 1.9, "weight": 70 } + ] + + library_test_file = enso_project.data / "xml" / "library.xml" + library_document = XML_Document.from_file library_test_file + library_root = library_document . root_element + [single_values, uniform_json, non_uniform_json, library_document, library_root] + + +add_specs suite_builder = trim_if_text x = case x of _ : Text -> x.trim _ -> x + + data = Data.setup - Test.group "from_objects with single values" <| - Test.specify "Can create a table from a single value" <| - single_values.map v-> + suite_builder.group "from_objects with single values" group_builder-> + group_builder.specify "Can create a table from a single value" <| + data.single_values.map v-> expected = Table.from_rows ["Value"] [[v]] Table.from_objects v . should_equal expected - Test.specify "Can create a table from a vector of single values" <| - expected = Table.new [["Value", single_values]] - Table.from_objects single_values . should_equal expected + group_builder.specify "Can create a table from a vector of single values" <| + expected = Table.new [["Value", data.single_values]] + Table.from_objects data.single_values . should_equal expected - Test.specify "A single value with a field list results in columns of Nothing" <| + group_builder.specify "A single value with a field list results in columns of Nothing" <| expected = Table.new [["aaa", [Nothing]], ["bbb", [Nothing]]] Table.from_objects 1 ['aaa', 'bbb'] . should_equal expected - Test.specify "A single value with the field list [Value] results in a column with the value" <| + group_builder.specify "A single value with the field list [Value] results in a column with the value" <| expected = Table.new [["Value", [1]], ["bbb", [Nothing]]] Table.from_objects 1 ["Value", "bbb"] . should_equal expected - Test.specify "A vector of single values with a field list results in a column of Nothing" <| + group_builder.specify "A vector of single values with a field list results in a column of Nothing" <| expected = Table.new [["aaa", [Nothing, Nothing]], ["bbb", [Nothing, Nothing]]] Table.from_objects [1, 2] ['aaa', 'bbb'] . should_equal expected - Test.group "from_objects with JSON (single values)" <| - Test.specify "Generates a single-row table from a JSON object" <| + suite_builder.group "from_objects with JSON (single values)" group_builder-> + group_builder.specify "Generates a single-row table from a JSON object" <| expected = Table.from_rows ["first", "last", "age"] [["Mary", "Smith", 23]] - Table.from_objects (uniform_json.at 0) . should_equal expected + Table.from_objects (data.uniform_json.at 0) . should_equal expected - Test.specify "works fine even if requested fields are duplicated" <| + group_builder.specify "works fine even if requested fields are duplicated" <| expected = Table.from_rows ["first", "last"] [["Mary", "Smith"]] - Table.from_objects (uniform_json.at 0) ["first", "last", "first", "first"] . should_equal expected + Table.from_objects (data.uniform_json.at 0) ["first", "last", "first", "first"] . should_equal expected - Test.group "from_objects with uniform JSON vector" <| - Test.specify "Generates a table from a vector of JSON objects" <| + suite_builder.group "from_objects with uniform JSON vector" group_builder-> + group_builder.specify "Generates a table from a vector of JSON objects" <| expected = Table.from_rows ["first", "last", "age"] [["Mary", "Smith", 23], ["Joe", "Burton", 34]] - Table.from_objects uniform_json . should_equal expected + Table.from_objects data.uniform_json . should_equal expected - Test.specify "Generates a table from a vector of JSON objects, with exact fields" <| + group_builder.specify "Generates a table from a vector of JSON objects, with exact fields" <| expected = Table.from_rows ["first", "last", "age"] [["Mary", "Smith", 23], ["Joe", "Burton", 34]] - Table.from_objects uniform_json ["first", "last", "age"] . should_equal expected + Table.from_objects data.uniform_json ["first", "last", "age"] . should_equal expected - Test.specify "Generates a table from a vector of JSON objects, with a subset of fields" <| + group_builder.specify "Generates a table from a vector of JSON objects, with a subset of fields" <| expected = Table.from_rows ["last", "age"] [["Smith", 23], ["Burton", 34]] - Table.from_objects uniform_json ["last", "age"] . should_equal expected + Table.from_objects data.uniform_json ["last", "age"] . should_equal expected - Test.specify "Generates a table from a vector of JSON objects, with extra fields" <| + group_builder.specify "Generates a table from a vector of JSON objects, with extra fields" <| expected = Table.from_rows ["first", "middle", "last", "age"] [["Mary", Nothing, "Smith", 23], ["Joe", Nothing, "Burton", 34]] - Table.from_objects uniform_json ["first", "middle", "last", "age"] . should_equal expected + Table.from_objects data.uniform_json ["first", "middle", "last", "age"] . should_equal expected - Test.specify "Generates a table from a vector of JSON objects, with ignored and extra fields" <| + group_builder.specify "Generates a table from a vector of JSON objects, with ignored and extra fields" <| expected = Table.from_rows ["first", "middle", "age"] [["Mary", Nothing, 23], ["Joe", Nothing, 34]] - Table.from_objects uniform_json ["first", "middle", "age"] . should_equal expected + Table.from_objects data.uniform_json ["first", "middle", "age"] . should_equal expected - Test.group "from_objects with non-uniform JSON vector" <| - Test.specify "Generates a table from a non-uniform vector of JSON objects" <| + suite_builder.group "from_objects with non-uniform JSON vector" group_builder-> + group_builder.specify "Generates a table from a non-uniform vector of JSON objects" <| expected = Table.from_rows ["first", "last", "age", "height", "weight"] [["Mary", "Smith", 23, Nothing, Nothing], [Nothing, Nothing, Nothing, 1.9, 70]] - Table.from_objects non_uniform_json . should_equal expected + Table.from_objects data.non_uniform_json . should_equal expected - Test.specify "Generates a table from a non-uniform vector of JSON objects, with exact fields" <| + group_builder.specify "Generates a table from a non-uniform vector of JSON objects, with exact fields" <| expected = Table.from_rows ["first", "last", "age", "height", "weight"] [["Mary", "Smith", 23, Nothing, Nothing], [Nothing, Nothing, Nothing, 1.9, 70]] - Table.from_objects non_uniform_json ["first", "last", "age", "height", "weight"] . should_equal expected + Table.from_objects data.non_uniform_json ["first", "last", "age", "height", "weight"] . should_equal expected - Test.specify "Generates a table from a non-uniform vector of JSON objects, with ignored fields" <| + group_builder.specify "Generates a table from a non-uniform vector of JSON objects, with ignored fields" <| expected = Table.from_rows ["last", "weight"] [["Smith", Nothing], [Nothing, 70]] - Table.from_objects non_uniform_json ["last", "weight"] . should_equal expected + Table.from_objects data.non_uniform_json ["last", "weight"] . should_equal expected - Test.specify "Generates a table from a non-uniform vector of JSON objects, with extra fields" <| + group_builder.specify "Generates a table from a non-uniform vector of JSON objects, with extra fields" <| expected = Table.from_rows ["first", "middle", "last", "age", "height", "weight"] [["Mary", Nothing, "Smith", 23, Nothing, Nothing], [Nothing, Nothing, Nothing, Nothing, 1.9, 70]] - Table.from_objects non_uniform_json ["first", "middle", "last", "age", "height", "weight"] . should_equal expected + Table.from_objects data.non_uniform_json ["first", "middle", "last", "age", "height", "weight"] . should_equal expected - Test.specify "Generates a table from a non-uniform vector of JSON objects, with ignored and extra fields" <| + group_builder.specify "Generates a table from a non-uniform vector of JSON objects, with ignored and extra fields" <| expected = Table.from_rows ["first", "middle", "height", "weight"] [["Mary", Nothing, Nothing, Nothing], [Nothing, Nothing, 1.9, 70]] - Table.from_objects non_uniform_json ["first", "middle", "height", "weight"] . should_equal expected + Table.from_objects data.non_uniform_json ["first", "middle", "height", "weight"] . should_equal expected - Test.specify "Generates a table from a non-uniform vector of JSON objects, with ignored and extra fields, taking only from one kind of value" <| + group_builder.specify "Generates a table from a non-uniform vector of JSON objects, with ignored and extra fields, taking only from one kind of value" <| expected = Table.from_rows ["first"] [["Mary"], [Nothing]] - Table.from_objects non_uniform_json ["first"] . should_equal expected + Table.from_objects data.non_uniform_json ["first"] . should_equal expected - Test.group "from_objects with mixed vector of single and JSON objects" <| - Test.specify "Generates a table from a mixed vector of single values and JSON objects" <| + suite_builder.group "from_objects with mixed vector of single and JSON objects" group_builder-> + group_builder.specify "Generates a table from a mixed vector of single values and JSON objects" <| expected = Table.from_rows ["first", "last", "age", "Value"] [["Mary", "Smith", 23, Nothing], ["Joe", "Burton", 34, Nothing], [Nothing, Nothing, Nothing, 12]] - Table.from_objects uniform_json+[12] . should_equal expected + Table.from_objects data.uniform_json+[12] . should_equal expected - Test.specify "Generates a table from a mixed vector of single values and JSON objects, with exact fields" <| + group_builder.specify "Generates a table from a mixed vector of single values and JSON objects, with exact fields" <| expected = Table.from_rows ["first", "last", "age", "Value"] [["Mary", "Smith", 23, Nothing], ["Joe", "Burton", 34, Nothing], [Nothing, Nothing, Nothing, 12]] - Table.from_objects uniform_json+[12] ["first", "last", "age", "Value"] . should_equal expected + Table.from_objects data.uniform_json+[12] ["first", "last", "age", "Value"] . should_equal expected - Test.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored fields" <| + group_builder.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored fields" <| expected = Table.from_rows ["last", "age", "Value"] [["Smith", 23, Nothing], ["Burton", 34, Nothing], [Nothing, Nothing, 12]] - Table.from_objects uniform_json+[12] ["last", "age", "Value"] . should_equal expected + Table.from_objects data.uniform_json+[12] ["last", "age", "Value"] . should_equal expected - Test.specify "Generates a table from a mixed vector of single values and JSON objects, with extra fields" <| + group_builder.specify "Generates a table from a mixed vector of single values and JSON objects, with extra fields" <| expected = Table.from_rows ["first", "last", "age", "blah", "Value"] [["Mary", "Smith", 23, Nothing, Nothing], ["Joe", "Burton", 34, Nothing, Nothing], [Nothing, Nothing, Nothing, Nothing, 12]] - Table.from_objects uniform_json+[12] ["first", "last", "age", "blah", "Value"] . should_equal expected + Table.from_objects data.uniform_json+[12] ["first", "last", "age", "blah", "Value"] . should_equal expected - Test.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored and extra fields" <| + group_builder.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored and extra fields" <| expected = Table.from_rows ["first", "last", "blah", "Value"] [["Mary", "Smith", Nothing, Nothing], ["Joe", "Burton", Nothing, Nothing], [Nothing, Nothing, Nothing, 12]] - Table.from_objects uniform_json+[12] ["first", "last", "blah", "Value"] . should_equal expected + Table.from_objects data.uniform_json+[12] ["first", "last", "blah", "Value"] . should_equal expected - Test.group "from_objects with Array" <| - Test.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored and extra fields" <| + suite_builder.group "from_objects with Array" group_builder-> + group_builder.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored and extra fields" <| expected = Table.from_rows ["first", "last", "blah", "Value"] [["Mary", "Smith", Nothing, Nothing], ["Joe", "Burton", Nothing, Nothing], [Nothing, Nothing, Nothing, 12]] - Table.from_objects (uniform_json+[12]).to_array ["first", "last", "blah", "Value"] . should_equal expected + Table.from_objects (data.uniform_json+[12]).to_array ["first", "last", "blah", "Value"] . should_equal expected - Test.group "expand_column" <| - Test.specify "Expands a column of single values" <| + suite_builder.group "expand_column" group_builder-> + group_builder.specify "Expands a column of single values" <| table = Table.new [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]] expected = Table.new [["aaa", [1, 2]], ["bbb Value", [3, 4]], ["ccc", [5, 6]]] table.expand_column "bbb" . should_equal expected - Test.specify "Expands a uniform column of JSON objects" <| - table = Table.new [["aaa", [1, 2]], ["bbb", uniform_json], ["ccc", [5, 6]]] + group_builder.specify "Expands a uniform column of JSON objects" <| + table = Table.new [["aaa", [1, 2]], ["bbb", data.uniform_json], ["ccc", [5, 6]]] expected = Table.new [["aaa", [1, 2]], ["bbb first", ["Mary", "Joe"]], ["bbb last", ["Smith", "Burton"]], ["bbb age", [23, 34]], ["ccc", [5, 6]]] table.expand_column "bbb" . should_equal expected - Test.specify "Expands a uniform column of JSON objects, with extra and ignored fields" <| - table = Table.new [["aaa", [1, 2]], ["bbb", uniform_json], ["ccc", [5, 6]]] + group_builder.specify "Expands a uniform column of JSON objects, with extra and ignored fields" <| + table = Table.new [["aaa", [1, 2]], ["bbb", data.uniform_json], ["ccc", [5, 6]]] expected = Table.new [["aaa", [1, 2]], ["bbb age", [23, 34]], ["bbb foo", [Nothing, Nothing]], ["ccc", [5, 6]]] table.expand_column "bbb" ["age", "foo"] . should_equal expected - Test.specify "Expands a non-uniform column of JSON objects" <| - table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]] + group_builder.specify "Expands a non-uniform column of JSON objects" <| + table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]] expected = Table.new [["aaa", [1, 2]], ["bbb first", ["Mary", Nothing]], ["bbb last", ["Smith", Nothing]], ["bbb age", [23, Nothing]], ["bbb height", [Nothing, 1.9]], ["bbb weight", [Nothing, 70]], ["ccc", [5, 6]]] table.expand_column "bbb" . should_equal expected - Test.specify "Expands a non-uniform column of JSON objects with extra and ignored fields" <| - table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]] + group_builder.specify "Expands a non-uniform column of JSON objects with extra and ignored fields" <| + table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]] expected = Table.new [["aaa", [1, 2]], ["bbb last", ["Smith", Nothing]], ["bbb height", [Nothing, 1.9]], ["bbb foo", [Nothing, Nothing]], ["ccc", [5, 6]]] table.expand_column "bbb" ["last", "height", "foo"] . should_equal expected - Test.specify "accept vectors/arrays within a column" <| + group_builder.specify "accept vectors/arrays within a column" <| table = Table.new [["aaa", [1, 2]], ["bbb", [[1, 2, 3], [4, 5, 6].to_array]]] expected = Table.new [["aaa", [1, 2]], ["bbb Value", [[1, 2, 3], [4, 5, 6].to_array]]] table.expand_column "bbb" . should_equal expected - Test.specify "will work even if keys are not Text" <| + group_builder.specify "will work even if keys are not Text" <| table = Table.new [["a", [1, 2]], ["b", [Map.from_vector [[1, "x"], [2, "y"]], Map.from_vector [[2, "z"], [3, "w"]]]]] expected = Table.new [["a", [1, 2]], ["b 1", ["x", Nothing]], ["b 2", ["y", "z"]], ["b 3", [Nothing, "w"]]] table.expand_column "b" . should_equal expected @@ -182,7 +196,7 @@ spec = expected2 = Table.new [["a", [1, 2]], ["b x%10=2", ["x", "z"]], ["b x%10=3", ["y", Nothing]]] table2.expand_column "b" . should_equal expected2 - Test.specify "will fail if text representation of keys is not unique" <| + group_builder.specify "will fail if text representation of keys is not unique" <| k1 = My_Mod_Type.Value 12 k2 = My_Mod_Type.Value 32 m = Map.from_vector [[k1, "a"], [k2, "b"]] @@ -196,46 +210,46 @@ spec = r.should_fail_with Illegal_Argument r.catch.to_display_text . should_contain "keys are duplicated when converted to text" - Test.specify "will error when all objects have no fields" <| + group_builder.specify "will error when all objects have no fields" <| table = Table.new [["aaa", [1, 2]], ["bbb", [Map.from_vector [], Map.from_vector []]], ["ccc", [5, 6]]] r = table.expand_column "bbb" r.should_fail_with Illegal_Argument r.catch.message.should_contain "all input objects had no fields" - Test.specify "will error when fields=[]" <| - table = Table.new [["aaa", [1, 2]], ["bbb", uniform_json], ["ccc", [5, 6]]] + group_builder.specify "will error when fields=[]" <| + table = Table.new [["aaa", [1, 2]], ["bbb", data.uniform_json], ["ccc", [5, 6]]] r = table.expand_column "bbb" fields=[] r.should_fail_with Illegal_Argument r.catch.message . should_equal "The fields parameter cannot be empty." - Test.specify "Can expand with no prefix" <| - table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]] + group_builder.specify "Can expand with no prefix" <| + table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]] expected = Table.new [["aaa", [1, 2]], ["last", ["Smith", Nothing]], ["height", [Nothing, 1.9]], ["foo", [Nothing, Nothing]], ["ccc", [5, 6]]] table.expand_column "bbb" ["last", "height", "foo"] Prefix_Name.None . should_equal expected - Test.specify "Can expand with an explicit column name prefix" <| - table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]] + group_builder.specify "Can expand with an explicit column name prefix" <| + table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]] expected = Table.new [["aaa", [1, 2]], ["bbb last", ["Smith", Nothing]], ["bbb height", [Nothing, 1.9]], ["bbb foo", [Nothing, Nothing]], ["ccc", [5, 6]]] table.expand_column "bbb" ["last", "height", "foo"] Prefix_Name.Column_Name . should_equal expected - Test.specify "Can expand with a custom prefix" <| - table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]] + group_builder.specify "Can expand with a custom prefix" <| + table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]] expected = Table.new [["aaa", [1, 2]], ["expanded last", ["Smith", Nothing]], ["expanded height", [Nothing, 1.9]], ["expanded foo", [Nothing, Nothing]], ["ccc", [5, 6]]] table.expand_column "bbb" ["last", "height", "foo"] (Prefix_Name.Custom "expanded ") . should_equal expected - Test.specify "Can expand with a custom prefix, via from conversion" <| - table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]] + group_builder.specify "Can expand with a custom prefix, via from conversion" <| + table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]] expected = Table.new [["aaa", [1, 2]], ["expanded last", ["Smith", Nothing]], ["expanded height", [Nothing, 1.9]], ["expanded foo", [Nothing, Nothing]], ["ccc", [5, 6]]] table.expand_column "bbb" ["last", "height", "foo"] "expanded " . should_equal expected - Test.group "expand_to_rows" <| - Test.specify "Can expand single values" <| + suite_builder.group "expand_to_rows" group_builder-> + group_builder.specify "Can expand single values" <| values_to_expand = [3, 4] table = Table.new [["aaa", [1, 2]], ["bbb", values_to_expand], ["ccc", [5, 6]]] expected = Table.new [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]] table.expand_to_rows "bbb" . should_equal expected - Test.specify "Can expand Vectors" <| + group_builder.specify "Can expand Vectors" <| values_to_expand = [[10, 11], [20, 21, 22], [30]] table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]] expected = Table.new [["aaa", [1, 1, 2, 2, 2, 3]], ["bbb", [10, 11, 20, 21, 22, 30]], ["ccc", [5, 5, 6, 6, 6, 7]]] @@ -243,31 +257,31 @@ spec = r . should_equal expected r.at "bbb" . value_type . should_equal Value_Type.Integer - Test.specify "Can expand Arrays" <| + group_builder.specify "Can expand Arrays" <| values_to_expand = [[10, 11].to_array, [20, 21, 22].to_array, [30].to_array] table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]] expected = Table.new [["aaa", [1, 1, 2, 2, 2, 3]], ["bbb", [10, 11, 20, 21, 22, 30]], ["ccc", [5, 5, 6, 6, 6, 7]]] table.expand_to_rows "bbb" . should_equal expected - Test.specify "Can expand Lists" <| + group_builder.specify "Can expand Lists" <| values_to_expand = [[10, 11].to_list, [20, 21, 22].to_list, [30].to_list] table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]] expected = Table.new [["aaa", [1, 1, 2, 2, 2, 3]], ["bbb", [10, 11, 20, 21, 22, 30]], ["ccc", [5, 5, 6, 6, 6, 7]]] table.expand_to_rows "bbb" . should_equal expected - Test.specify "Can expand Pairs" <| + group_builder.specify "Can expand Pairs" <| values_to_expand = [Pair.new 10 20, Pair.new "a" [30], Pair.new 40 50] table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]] expected = Table.new [["aaa", [1, 1, 2, 2, 3, 3]], ["bbb", [10, 20, "a", [30], 40, 50]], ["ccc", [5, 5, 6, 6, 7, 7]]] table.expand_to_rows "bbb" . should_equal expected - Test.specify "Can expand Ranges" <| + group_builder.specify "Can expand Ranges" <| values_to_expand = [Range.new 10 12, Range.new 20 27 step=3, Range.new 30 31] table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]] expected = Table.new [["aaa", [1, 1, 2, 2, 2, 3]], ["bbb", [10, 11, 20, 23, 26, 30]], ["ccc", [5, 5, 6, 6, 6, 7]]] table.expand_to_rows "bbb" . should_equal expected - Test.specify "Can expand Date_Ranges" <| + group_builder.specify "Can expand Date_Ranges" <| range0 = (Date.new 2020 02 28).up_to (Date.new 2020 03 01) range1 = (Date.new 2020 10 28).up_to (Date.new 2020 11 16) . with_step Date_Period.Week range2 = (Date.new 2023 07 03).up_to (Date.new 2023 10 03) . with_step Date_Period.Month @@ -277,32 +291,32 @@ spec = expected = Table.new [["aaa", [1, 1, 2, 2, 2, 3, 3, 3]], ["bbb", values_expanded], ["ccc", [5, 5, 6, 6, 6, 7, 7, 7]]] table.expand_to_rows "bbb" . should_equal expected - Test.specify "Can expand mixed columns" <| + group_builder.specify "Can expand mixed columns" <| values_to_expand = [[10, 11], 22.up_to 26, (Date.new 2020 02 28).up_to (Date.new 2020 03 01)] values_expanded = [10, 11, 22, 23, 24, 25, Date.new 2020 02 28, Date.new 2020 02 29] table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]] expected = Table.new [["aaa", [1, 1, 2, 2, 2, 2, 3, 3]], ["bbb", values_expanded], ["ccc", [5, 5, 6, 6, 6, 6, 7, 7]]] table.expand_to_rows "bbb" . should_equal expected - Test.specify "Respects `at_least_one_row=True`" <| + group_builder.specify "Respects `at_least_one_row=True`" <| values_to_expand = [[10, 11], [], [30]] table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]] expected = Table.new [["aaa", [1, 1, 2, 3]], ["bbb", [10, 11, Nothing, 30]], ["ccc", [5, 5, 6, 7]]] table.expand_to_rows "bbb" at_least_one_row=True . should_equal expected - Test.specify "Respects `at_least_one_row=False`" <| + group_builder.specify "Respects `at_least_one_row=False`" <| values_to_expand = [[10, 11], [], [30]] table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]] expected = Table.new [["aaa", [1, 1, 3]], ["bbb", [10, 11, 30]], ["ccc", [5, 5, 7]]] table.expand_to_rows "bbb" . should_equal expected - Test.specify "Missing column" <| + group_builder.specify "Missing column" <| table = Table.new [["aaa", [1, 2, 3]], ["notbbb", [8, 8, 8]], ["ccc", [5, 6, 7]]] table.expand_to_rows "bbb" . should_fail_with No_Such_Column - Test.group "XML" <| - Test.specify "Can convert XML_Document and XML_Element to a table" <| - [library_document, library_root].map x-> + suite_builder.group "XML" group_builder-> + group_builder.specify "Can convert XML_Document and XML_Element to a table" <| + [data.library_document, data.library_root].map x-> t = x.to Table t.at "Name" . to_vector . should_equal ["Library"] t.at "@catalog" . to_vector . should_equal ["Fiction"] @@ -314,8 +328,8 @@ spec = t.at "Children" . at 0 . at 4 . name . should_equal "Magazine" t.column_names . should_not_contain "Value" - Test.specify "Can expand the Children column to rows" <| - t = library_root.to Table . expand_to_rows "Children" + group_builder.specify "Can expand the Children column to rows" <| + t = data.library_root.to Table . expand_to_rows "Children" t.at "Name" . to_vector . should_equal (Vector.fill 5 "Library") t.at "@catalog" . to_vector . should_equal (Vector.fill 5 "Fiction") t.at "@letter" . to_vector . should_equal (Vector.fill 5 "A") @@ -326,8 +340,8 @@ spec = t.at "Children" . at 4 . name . should_equal "Magazine" t.column_names . should_not_contain "Value" - Test.specify "Can expand the child nodes" <| - t = library_root.to Table . expand_to_rows "Children" . expand_column "Children" + group_builder.specify "Can expand the child nodes" <| + t = data.library_root.to Table . expand_to_rows "Children" . expand_column "Children" t.at "Name" . to_vector . should_equal (Vector.fill 5 "Library") t.at "@catalog" . to_vector . should_equal (Vector.fill 5 "Fiction") t.at "@letter" . to_vector . should_equal (Vector.fill 5 "A") @@ -338,7 +352,7 @@ spec = t.at "Children @series" . to_vector . map trim_if_text . should_equal [Nothing, Nothing, Nothing, Nothing, 'AutoCar'] t.column_names . should_not_contain "Children Children" - Test.specify "Can expand nested child nodes" <| + group_builder.specify "Can expand nested child nodes" <| xml_string = ''' @@ -354,15 +368,15 @@ spec = t = xml.to Table . expand_to_rows "Children" . expand_column "Children" . expand_to_rows "Children Children" . expand_column "Children Children" t.at "Children Children @id" . to_vector . should_equal ["1", "2", "3", "4"] - Test.specify "Converting a node without child nodes produces a Value column" <| - book = library_root . at 1 + group_builder.specify "Converting a node without child nodes produces a Value column" <| + book = data.library_root . at 1 t = book.to Table t.at "Name" . to_vector . should_equal ["Book"] t.at "@author" . to_vector . should_equal ["An Author"] t.column_names . should_not_contain "Children" t.at "Value" . to_vector . should_equal ["My Book"] - Test.specify "Converting a node without any child nodes does not produce Value or Children columns" <| + group_builder.specify "Converting a node without any child nodes does not produce Value or Children columns" <| xml = XML_Document.from_text '' t = xml.to Table t.at "Name" . to_vector . should_equal ["foo"] @@ -370,4 +384,8 @@ spec = t.column_names . should_not_contain "Children" t.column_names . should_not_contain "Value" -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso index 5689d240857b..5c8c07db9577 100644 --- a/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso @@ -4,51 +4,72 @@ from Standard.Table import Table, Column, Delimited, Data_Formatter import Standard.Table.Data.Type.Value_Type.Value_Type from Standard.Table.Extensions.Table_Conversions import all -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all + from project.Util import all -spec = - c_number = ["Number", [71, 72, 73, 74, 75, 76, 77]] - c_party = ["Party", ["Conservative", "Conservative", "Labour", "Labour", "Conservative", "Conservative", "Conservative"]] - c_name = ["Title", ["Margaret Thatcher", "John Major", "Tony Blair", "Gordon Brown", "David Cameron", "Theresa May", "Boris Johnson"]] - c_from = ["From", [Date.new 1979 05 04, Date.new 1990 11 28, Date.new 1997 05 02, Date.new 2007 06 27, Date.new 2010 05 11, Date.new 2016 07 13, Date.new 2019 07 24]] - c_to = ["To", [Date.new 1990 11 28, Date.new 1997 05 02, Date.new 2007 06 27, Date.new 2010 05 11, Date.new 2016 07 13, Date.new 2019 07 24, Date.new 2022 07 07]] - expected = Table.new [c_number, c_party, c_name, c_from, c_to] - Test.group "File.read (Delimited) should work with Dates" <| +type Data + Value ~data + + c_number self = self.data.at 0 + c_party self = self.data.at 1 + c_name self = self.data.at 2 + c_from self = self.data.at 3 + c_to self = self.data.at 4 + expected self = self.data.at 5 + table self = self.data.at 6 + + setup = Data.Value <| + c_number = ["Number", [71, 72, 73, 74, 75, 76, 77]] + c_party = ["Party", ["Conservative", "Conservative", "Labour", "Labour", "Conservative", "Conservative", "Conservative"]] + c_name = ["Title", ["Margaret Thatcher", "John Major", "Tony Blair", "Gordon Brown", "David Cameron", "Theresa May", "Boris Johnson"]] + c_from = ["From", [Date.new 1979 05 04, Date.new 1990 11 28, Date.new 1997 05 02, Date.new 2007 06 27, Date.new 2010 05 11, Date.new 2016 07 13, Date.new 2019 07 24]] + c_to = ["To", [Date.new 1990 11 28, Date.new 1997 05 02, Date.new 2007 06 27, Date.new 2010 05 11, Date.new 2016 07 13, Date.new 2019 07 24, Date.new 2022 07 07]] + expected = Table.new [c_number, c_party, c_name, c_from, c_to] table = (enso_project.data / "prime_ministers.csv").read - Test.specify "should be able to read in a table with dates" <| - table.column_count.should_equal 5 - table.row_count.should_equal 7 - table.info.at "Column" . to_vector . should_equal ['Number','Party', 'Title', 'From', 'To'] - table.info.at "Value Type" . to_vector . should_equal [Value_Type.Integer, Value_Type.Char, Value_Type.Char, Value_Type.Date, Value_Type.Date] - - Test.specify "should be able to treat a single value as a Date" <| - from_column = table.at 'From' + [c_number, c_party, c_name, c_from, c_to, expected, table] + + +add_specs suite_builder = + data = Data.setup + + suite_builder.group "File.read (Delimited) should work with Dates" group_builder-> + group_builder.specify "should be able to read in a table with dates" <| + data.table.column_count.should_equal 5 + data.table.row_count.should_equal 7 + data.table.info.at "Column" . to_vector . should_equal ['Number','Party', 'Title', 'From', 'To'] + data.table.info.at "Value Type" . to_vector . should_equal [Value_Type.Integer, Value_Type.Char, Value_Type.Char, Value_Type.Date, Value_Type.Date] + + group_builder.specify "should be able to treat a single value as a Date" <| + from_column = data.table.at 'From' from_column.at 6 . year . should_equal 2019 from_column.at 6 . should_equal (Date.new 2019 7 24) - Test.specify "should be able to compare columns and table" <| - table.at 'Number' . should_equal (Column.from_vector c_number.first c_number.second) - table.at 'Party' . should_equal (Column.from_vector c_party.first c_party.second) - table.at 'Title' . should_equal (Column.from_vector c_name.first c_name.second) - table.at 'From' . should_equal (Column.from_vector c_from.first c_from.second) - table.at 'To' . should_equal (Column.from_vector c_to.first c_to.second) - table.should_equal expected + group_builder.specify "should be able to compare columns and data.table" <| + data.table.at 'Number' . should_equal (Column.from_vector data.c_number.first data.c_number.second) + data.table.at 'Party' . should_equal (Column.from_vector data.c_party.first data.c_party.second) + data.table.at 'Title' . should_equal (Column.from_vector data.c_name.first data.c_name.second) + data.table.at 'From' . should_equal (Column.from_vector data.c_from.first data.c_from.second) + data.table.at 'To' . should_equal (Column.from_vector data.c_to.first data.c_to.second) + data.table.should_equal data.expected - Test.group "Should be able to serialise a table with Dates to Text" <| - Test.specify "should serialise back to input" <| + suite_builder.group "Should be able to serialise a data.table with Dates to Text" group_builder-> + group_builder.specify "should serialise back to input" <| expected_text = normalize_lines <| (enso_project.data / "prime_ministers.csv").read_text - delimited = Text.from expected format=(Delimited "," line_endings=Line_Ending_Style.Unix) + delimited = Text.from data.expected format=(Delimited "," line_endings=Line_Ending_Style.Unix) delimited.should_equal expected_text - Test.specify "should serialise dates with format" <| - test_table = Table.new [c_from] + group_builder.specify "should serialise dates with format" <| + test_table = Table.new [data.c_from] expected_text = 'From\n04.05.1979\n28.11.1990\n02.05.1997\n27.06.2007\n11.05.2010\n13.07.2016\n24.07.2019\n' data_formatter = Data_Formatter.Value.with_datetime_formats date_formats=["dd.MM.yyyy"] delimited = Text.from test_table format=(Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix) delimited.should_equal expected_text -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/In_Memory/Table_Date_Time_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Date_Time_Spec.enso index 36ec7a883039..773a11691ddf 100644 --- a/test/Table_Tests/src/In_Memory/Table_Date_Time_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Date_Time_Spec.enso @@ -4,49 +4,65 @@ from Standard.Table import Table, Delimited, Column, Data_Formatter import Standard.Table.Data.Type.Value_Type.Value_Type from Standard.Table.Extensions.Table_Conversions import all -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all from project.Util import all -spec = - c_number = ["Serial number", ["2LMXK1", "2LMXK1", "JEMLP3", "JEMLP3", "BR83GP", "BR83GP"]] - c_type = ["Movement type", [101, 301, 101, 203, 101, 301]] - c_date = ["Posting date", [Date_Time.new 2015 1 5 9 0, Date_Time.new 2015 1 5 14 0, Date_Time.new 2015 1 6 9 0, Date_Time.new 2015 1 7 17 30, Date_Time.new 2011 1 5 9 0, Date_Time.new 2011 1 9 15 30]] - expected = Table.new [c_number, c_type, c_date] +type Data + Value ~data - Test.group "File.read (Delimited) should work with Date_Time" <| + c_number self = self.data.at 0 + c_type self = self.data.at 1 + c_date self = self.data.at 2 + expected self = self.data.at 3 + table self = self.data.at 4 + + setup = Data.Value <| + c_number = ["Serial number", ["2LMXK1", "2LMXK1", "JEMLP3", "JEMLP3", "BR83GP", "BR83GP"]] + c_type = ["Movement type", [101, 301, 101, 203, 101, 301]] + c_date = ["Posting date", [Date_Time.new 2015 1 5 9 0, Date_Time.new 2015 1 5 14 0, Date_Time.new 2015 1 6 9 0, Date_Time.new 2015 1 7 17 30, Date_Time.new 2011 1 5 9 0, Date_Time.new 2011 1 9 15 30]] + expected = Table.new [c_number, c_type, c_date] table = (enso_project.data / "datetime_sample.csv").read - Test.specify "should be able to read in a table with dates" <| - table.column_count.should_equal 3 - table.info.at "Column" . to_vector . should_equal ['Serial number','Movement type', 'Posting date'] - table.info.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Date_Time] - table.row_count.should_equal 6 - - Test.specify "should be able to treat a single value as a Date_Time" <| - from_column = table.at 'Posting date' + [c_number, c_type, c_date, expected, table] + + +add_specs suite_builder = + data = Data.setup + suite_builder.group "File.read (Delimited) should work with Date_Time" group_builder-> + group_builder.specify "should be able to read in a table with dates" <| + data.table.column_count.should_equal 3 + data.table.info.at "Column" . to_vector . should_equal ['Serial number','Movement type', 'Posting date'] + data.table.info.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Date_Time] + data.table.row_count.should_equal 6 + + group_builder.specify "should be able to treat a single value as a Date_Time" <| + from_column = data.table.at 'Posting date' from_column.at 5 . year . should_equal 2011 from_column.at 5 . should_equal (Date_Time.new 2011 1 9 15 30) - Test.specify "should be able to compare columns and table" <| - table.at 'Serial number' . should_equal (Column.from_vector c_number.first c_number.second) - table.at 'Movement type' . should_equal (Column.from_vector c_type.first c_type.second) - table.at 'Posting date' . should_equal (Column.from_vector c_date.first c_date.second) - table.should_equal expected + group_builder.specify "should be able to compare columns and table" <| + data.table.at 'Serial number' . should_equal (Column.from_vector data.c_number.first data.c_number.second) + data.table.at 'Movement type' . should_equal (Column.from_vector data.c_type.first data.c_type.second) + data.table.at 'Posting date' . should_equal (Column.from_vector data.c_date.first data.c_date.second) + data.table.should_equal data.expected - Test.group "Should be able to serialise a table with DateTimes to Text" <| - Test.specify "should serialise back to input" <| + suite_builder.group "Should be able to serialise a table with DateTimes to Text" group_builder-> + group_builder.specify "should serialise back to input" <| expected_text = normalize_lines <| (enso_project.data / "datetime_sample_normalized_hours.csv").read_text data_formatter = Data_Formatter.Value . with_datetime_formats datetime_formats=["yyyy-MM-dd HH:mm:ss"] - delimited = Text.from expected format=(Delimited "," line_endings=Line_Ending_Style.Unix value_formatter=data_formatter) + delimited = Text.from data.expected format=(Delimited "," line_endings=Line_Ending_Style.Unix value_formatter=data_formatter) delimited.should_equal expected_text - Test.specify "should serialise dates with format" <| - test_table = Table.new [c_date] + group_builder.specify "should serialise dates with format" <| + test_table = Table.new [data.c_date] expected_text = 'Posting date\n05.01.2015 09-00\n05.01.2015 14-00\n06.01.2015 09-00\n07.01.2015 17-30\n05.01.2011 09-00\n09.01.2011 15-30\n' data_formatter = Data_Formatter.Value . with_datetime_formats datetime_formats=["dd.MM.yyyy HH-mm"] delimited = Text.from test_table format=(Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix) delimited.should_equal expected_text -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/In_Memory/Table_Format_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Format_Spec.enso index d13bf14ab02f..8ddd34099fb3 100644 --- a/test/Table_Tests/src/In_Memory/Table_Format_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Format_Spec.enso @@ -6,211 +6,217 @@ import Standard.Base.Errors.Time_Error.Time_Error from Standard.Table import Column, Table from Standard.Table.Errors import No_Input_Columns_Selected, Missing_Input_Columns -import Standard.Test.Extensions -from Standard.Test import Problems, Test, Test_Suite +from Standard.Test_New import all from project.Util import all -spec = - column0 = Column.from_vector "dates" [Date.new 2020 12 21, Date.new 2023 4 25] - column1 = Column.from_vector "datetimes" [Date_Time.new 2020 01 10 3 4 5 zone=Time_Zone.utc, Date_Time.new 2020 08 08 5 6 7 zone=Time_Zone.utc] - column2 = Column.from_vector "times" [Time_Of_Day.new 1 2 3, Time_Of_Day.new 10 30 35] - column3 = Column.from_vector "bools" [True, False] - column4 = Column.from_vector "ints" [100000000, 2222] - column5 = Column.from_vector "floats" [200000000.9, 4444.3] - column6 = Column.from_vector "strings" ["a", "b"] - column7 = Column.from_vector "bool_formats" ["t|f", "T|F"] - column8 = Column.from_vector "numeric_formats" ["#,##0.00", "#,##0"] - column9 = Column.from_vector "date_formats" ["yyyyMMdd", "ddMMyyyy"] - column10 = Column.from_vector "time_formats" ["hh:mm:ss", "ss:mm:hh"] - table = Table.new [column0, column1, column2, column3, column4, column5, column6, column7, column8, column9, column10] +type Data + Value ~table + + setup = + column0 = Column.from_vector "dates" [Date.new 2020 12 21, Date.new 2023 4 25] + column1 = Column.from_vector "datetimes" [Date_Time.new 2020 01 10 3 4 5 zone=Time_Zone.utc, Date_Time.new 2020 08 08 5 6 7 zone=Time_Zone.utc] + column2 = Column.from_vector "times" [Time_Of_Day.new 1 2 3, Time_Of_Day.new 10 30 35] + column3 = Column.from_vector "bools" [True, False] + column4 = Column.from_vector "ints" [100000000, 2222] + column5 = Column.from_vector "floats" [200000000.9, 4444.3] + column6 = Column.from_vector "strings" ["a", "b"] + column7 = Column.from_vector "bool_formats" ["t|f", "T|F"] + column8 = Column.from_vector "numeric_formats" ["#,##0.00", "#,##0"] + column9 = Column.from_vector "date_formats" ["yyyyMMdd", "ddMMyyyy"] + column10 = Column.from_vector "time_formats" ["hh:mm:ss", "ss:mm:hh"] + table = Table.new [column0, column1, column2, column3, column4, column5, column6, column7, column8, column9, column10] + Data.Value table +add_specs suite_builder = check_unchanged t0 t1 columns = columns.map c-> Test.with_clue c+": " <| (t0.at c) . should_equal (t1.at c) - Test.group "Dates" <| - Test.specify "Date and Date_Time, with format string" <| + data = Data.setup + + suite_builder.group "Dates" group_builder-> + group_builder.specify "Date and Date_Time, with format string" <| expected_dates = Column.from_vector "dates" ["20201221", "20230425"] expected_datetimes = Column.from_vector "datetimes" ["20200110", "20200808"] - actual = table.format ["dates", "datetimes"] "yyyyMMdd" - actual.column_names . should_equal table.column_names + actual = data.table.format ["dates", "datetimes"] "yyyyMMdd" + actual.column_names . should_equal data.table.column_names actual.at "dates" . should_equal expected_dates actual.at "datetimes" . should_equal expected_datetimes - check_unchanged table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Date_Time and Time_Of_Day, with format string" <| + group_builder.specify "Date_Time and Time_Of_Day, with format string" <| expected_datetimes = Column.from_vector "datetimes" ["03:04:05", "05:06:07"] expected_times = Column.from_vector "times" ["01:02:03", "10:30:35"] - actual = table.format ["datetimes", "times"] "hh:mm:ss" - actual.column_names . should_equal table.column_names + actual = data.table.format ["datetimes", "times"] "hh:mm:ss" + actual.column_names . should_equal data.table.column_names actual.at "datetimes" . should_equal expected_datetimes actual.at "times" . should_equal expected_times - check_unchanged table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Date and Date_Time, with Date_Time_Formatter" <| + group_builder.specify "Date and Date_Time, with Date_Time_Formatter" <| expected_dates = Column.from_vector "dates" ["20201221", "20230425"] expected_datetimes = Column.from_vector "datetimes" ["20200110", "20200808"] - actual = table.format ["dates", "datetimes"] (Date_Time_Formatter.from "yyyyMMdd") - actual.column_names . should_equal table.column_names + actual = data.table.format ["dates", "datetimes"] (Date_Time_Formatter.from "yyyyMMdd") + actual.column_names . should_equal data.table.column_names actual.at "dates" . should_equal expected_dates actual.at "datetimes" . should_equal expected_datetimes - check_unchanged table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Date_Time and Time_Of_Day, Date_Time_Formatter" <| + group_builder.specify "Date_Time and Time_Of_Day, Date_Time_Formatter" <| expected_datetimes = Column.from_vector "datetimes" ["03:04:05", "05:06:07"] expected_times = Column.from_vector "times" ["01:02:03", "10:30:35"] - actual = table.format ["datetimes", "times"] (Date_Time_Formatter.from "hh:mm:ss") - actual.column_names . should_equal table.column_names + actual = data.table.format ["datetimes", "times"] (Date_Time_Formatter.from "hh:mm:ss") + actual.column_names . should_equal data.table.column_names actual.at "datetimes" . should_equal expected_datetimes actual.at "times" . should_equal expected_times - check_unchanged table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Date and Date_Time, with format column" <| + group_builder.specify "Date and Date_Time, with format column" <| expected_dates = Column.from_vector "dates" ["20201221", "25042023"] expected_datetimes = Column.from_vector "datetimes" ["20200110", "08082020"] - actual = table.format ["dates", "datetimes"] (table.at "date_formats") - actual.column_names . should_equal table.column_names + actual = data.table.format ["dates", "datetimes"] (data.table.at "date_formats") + actual.column_names . should_equal data.table.column_names actual.at "dates" . should_equal expected_dates actual.at "datetimes" . should_equal expected_datetimes - check_unchanged table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Date_Time and Time_Of_Day, with format column" <| + group_builder.specify "Date_Time and Time_Of_Day, with format column" <| expected_datetimes = Column.from_vector "datetimes" ["03:04:05", "07:06:05"] expected_times = Column.from_vector "times" ["01:02:03", "35:30:10"] - actual = table.format ["datetimes", "times"] (table.at "time_formats") - actual.column_names . should_equal table.column_names + actual = data.table.format ["datetimes", "times"] (data.table.at "time_formats") + actual.column_names . should_equal data.table.column_names actual.at "datetimes" . should_equal expected_datetimes actual.at "times" . should_equal expected_times - check_unchanged table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Date and Date_Time, with empty format string" <| + group_builder.specify "Date and Date_Time, with empty format string" <| expected_dates = Column.from_vector "dates" ["2020-12-21", "2023-04-25"] expected_datetimes = Column.from_vector "datetimes" ['2020-01-10 03:04:05Z[UTC]', '2020-08-08 05:06:07Z[UTC]'] expected_times = Column.from_vector "times" ["01:02:03", "10:30:35"] - actual = table.format ["dates", "datetimes", "times"] "" - actual.column_names . should_equal table.column_names + actual = data.table.format ["dates", "datetimes", "times"] "" + actual.column_names . should_equal data.table.column_names actual.at "dates" . should_equal expected_dates actual.at "datetimes" . should_equal expected_datetimes actual.at "times" . should_equal expected_times - check_unchanged table actual ["bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Date and Date_Time, with Nothing format" <| + group_builder.specify "Date and Date_Time, with Nothing format" <| expected_dates = Column.from_vector "dates" ["2020-12-21", "2023-04-25"] expected_datetimes = Column.from_vector "datetimes" ['2020-01-10 03:04:05Z[UTC]', '2020-08-08 05:06:07Z[UTC]'] expected_times = Column.from_vector "times" ["01:02:03", "10:30:35"] - actual = table.format ["dates", "datetimes", "times"] Nothing - actual.column_names . should_equal table.column_names + actual = data.table.format ["dates", "datetimes", "times"] Nothing + actual.column_names . should_equal data.table.column_names actual.at "dates" . should_equal expected_dates actual.at "datetimes" . should_equal expected_datetimes actual.at "times" . should_equal expected_times - check_unchanged table actual ["bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.group "Boolean" <| - Test.specify "Booleans, with format string" <| + suite_builder.group "Boolean" group_builder-> + group_builder.specify "Booleans, with format string" <| expected_bools = Column.from_vector "bools" ["tt", "ff"] - actual = table.format ["bools"] "tt|ff" - actual.column_names . should_equal table.column_names + actual = data.table.format ["bools"] "tt|ff" + actual.column_names . should_equal data.table.column_names actual.at "bools" . should_equal expected_bools - check_unchanged table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Booleans, with column" <| + group_builder.specify "Booleans, with column" <| expected_bools = Column.from_vector "bools" ["t", "F"] - actual = table.format ["bools"] (table.at "bool_formats") - actual.column_names . should_equal table.column_names + actual = data.table.format ["bools"] (data.table.at "bool_formats") + actual.column_names . should_equal data.table.column_names actual.at "bools" . should_equal expected_bools - check_unchanged table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Booleans, with format empty format string" <| + group_builder.specify "Booleans, with format empty format string" <| expected_bools = Column.from_vector "bools" ["True", "False"] - actual = table.format ["bools"] "" - actual.column_names . should_equal table.column_names + actual = data.table.format ["bools"] "" + actual.column_names . should_equal data.table.column_names actual.at "bools" . should_equal expected_bools - check_unchanged table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Booleans, with format Nothing format" <| + group_builder.specify "Booleans, with format Nothing format" <| expected_bools = Column.from_vector "bools" ["True", "False"] - actual = table.format ["bools"] Nothing - actual.column_names . should_equal table.column_names + actual = data.table.format ["bools"] Nothing + actual.column_names . should_equal data.table.column_names actual.at "bools" . should_equal expected_bools - check_unchanged table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.group "Numeric" <| - Test.specify "Ints and floats, with format string" <| + suite_builder.group "Numeric" group_builder-> + group_builder.specify "Ints and floats, with format string" <| expected_ints = Column.from_vector "ints" ["100,000,000.00", "2,222.00"] expected_floats = Column.from_vector "floats" ["200,000,000.90", "4,444.30"] - actual = table.format ["ints", "floats"] "#,##0.00" - actual.column_names . should_equal table.column_names + actual = data.table.format ["ints", "floats"] "#,##0.00" + actual.column_names . should_equal data.table.column_names actual.at "ints" . should_equal expected_ints actual.at "floats" . should_equal expected_floats - check_unchanged table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Ints and floats, with column" <| + group_builder.specify "Ints and floats, with column" <| expected_ints = Column.from_vector "ints" ["100,000,000.00", "2,222"] expected_floats = Column.from_vector "floats" ["200,000,000.90", "4,444"] - actual = table.format ["ints", "floats"] (table.at "numeric_formats") - actual.column_names . should_equal table.column_names + actual = data.table.format ["ints", "floats"] (data.table.at "numeric_formats") + actual.column_names . should_equal data.table.column_names actual.at "ints" . should_equal expected_ints actual.at "floats" . should_equal expected_floats - check_unchanged table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Ints and floats, with empty format string" <| + group_builder.specify "Ints and floats, with empty format string" <| expected_ints = Column.from_vector "ints" ["100000000", "2222"] expected_floats = Column.from_vector "floats" ['2.000000009E8', '4444.3'] - actual = table.format ["ints", "floats"] "" - actual.column_names . should_equal table.column_names + actual = data.table.format ["ints", "floats"] "" + actual.column_names . should_equal data.table.column_names actual.at "ints" . should_equal expected_ints actual.at "floats" . should_equal expected_floats - check_unchanged table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Ints and floats, with Nothing format" <| + group_builder.specify "Ints and floats, with Nothing format" <| expected_ints = Column.from_vector "ints" ["100000000", "2222"] expected_floats = Column.from_vector "floats" ['2.000000009E8', '4444.3'] - actual = table.format ["ints", "floats"] Nothing - actual.column_names . should_equal table.column_names + actual = data.table.format ["ints", "floats"] Nothing + actual.column_names . should_equal data.table.column_names actual.at "ints" . should_equal expected_ints actual.at "floats" . should_equal expected_floats - check_unchanged table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.group "All types" <| - Test.specify "Format all columns" <| + suite_builder.group "All types" group_builder-> + group_builder.specify "Format all columns" <| expected_dates = Column.from_vector "dates" ["2020-12-21", "2023-04-25"] expected_datetimes = Column.from_vector "datetimes" ['2020-01-10 03:04:05Z[UTC]', '2020-08-08 05:06:07Z[UTC]'] expected_times = Column.from_vector "times" ["01:02:03", "10:30:35"] expected_ints = Column.from_vector "ints" ["100000000", "2222"] expected_floats = Column.from_vector "floats" ['2.000000009E8', '4444.3'] expected_bools = Column.from_vector "bools" ["True", "False"] - actual = table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "" - actual.column_names . should_equal table.column_names + actual = data.table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "" + actual.column_names . should_equal data.table.column_names actual.at "dates" . should_equal expected_dates actual.at "datetimes" . should_equal expected_datetimes actual.at "times" . should_equal expected_times actual.at "bools" . should_equal expected_bools actual.at "ints" . should_equal expected_ints actual.at "floats" . should_equal expected_floats - check_unchanged table actual ["strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.group "Column selectors" <| - Test.specify "Date and Date_Time, with regex column selector" <| + suite_builder.group "Column selectors" group_builder-> + group_builder.specify "Date and Date_Time, with regex column selector" <| expected_dates = Column.from_vector "dates" ["20201221", "20230425"] expected_datetimes = Column.from_vector "datetimes" ["20200110", "20200808"] - actual = table.format "date(|time)s".to_regex "yyyyMMdd" - actual.column_names . should_equal table.column_names + actual = data.table.format "date(|time)s".to_regex "yyyyMMdd" + actual.column_names . should_equal data.table.column_names actual.at "dates" . should_equal expected_dates actual.at "datetimes" . should_equal expected_datetimes - check_unchanged table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.specify "Date and Date_Time, with index column selector" <| + group_builder.specify "Date and Date_Time, with index column selector" <| expected_dates = Column.from_vector "dates" ["20201221", "20230425"] expected_datetimes = Column.from_vector "datetimes" ["20200110", "20200808"] - actual = table.format [0, 1] "yyyyMMdd" - actual.column_names . should_equal table.column_names + actual = data.table.format [0, 1] "yyyyMMdd" + actual.column_names . should_equal data.table.column_names actual.at "dates" . should_equal expected_dates actual.at "datetimes" . should_equal expected_datetimes - check_unchanged table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] + check_unchanged data.table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"] - Test.group "Locales" <| - Test.specify "Date with locale" <| + suite_builder.group "Locales" group_builder-> + group_builder.specify "Date with locale" <| input = Table.new [Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25]] expected_default = Table.new [Column.from_vector "values" ["21. June 2020", "25. April 2023"]] expected_gb = Table.new [Column.from_vector "values" ["21. June 2020", "25. April 2023"]] @@ -220,7 +226,7 @@ spec = input.format ["values"] (Date_Time_Formatter.from "d. MMMM yyyy" Locale.uk) . should_equal expected_gb input.format ["values"] (Date_Time_Formatter.from "d. MMMM yyyy" Locale.france) . should_equal expected_fr - Test.specify "Column with locale" <| + group_builder.specify "Column with locale" <| input = Column.from_vector "values" [100000000, 2222, 3] formats = Column.from_vector "formats" ["#,##0.00", "0.00", "0"] output = Column.from_vector "values" ["100 000 000,00", "2222,00", "3"] @@ -228,23 +234,27 @@ spec = expected = Table.new [output, formats] table.format ["values"] (table.at "formats") locale=(Locale.new "fr") . should_equal expected - Test.group "Errors" <| - Test.specify "Bad formats" <| - table.format ["dates", "datetimes"] "jjjjjj" . should_fail_with Date_Time_Format_Parse_Error - table.format ["ints", "floats"] "#.##0,00" . should_fail_with Illegal_Argument - table.format ["bools"] "||||" . should_fail_with Illegal_Argument + suite_builder.group "Errors" group_builder-> + group_builder.specify "Bad formats" <| + data.table.format ["dates", "datetimes"] "jjjjjj" . should_fail_with Date_Time_Format_Parse_Error + data.table.format ["ints", "floats"] "#.##0,00" . should_fail_with Illegal_Argument + data.table.format ["bools"] "||||" . should_fail_with Illegal_Argument - Test.specify "Format all columns with a type-specific format" <| - table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "yyyyMMdd" . should_fail_with Time_Error - table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "#,##0.00" . should_fail_with Illegal_Argument - table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "t|f" . should_fail_with Time_Error + group_builder.specify "Format all columns with a type-specific format" <| + data.table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "yyyyMMdd" . should_fail_with Time_Error + data.table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "#,##0.00" . should_fail_with Illegal_Argument + data.table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "t|f" . should_fail_with Time_Error - Test.specify "Missing column" <| - table.format ["abc", "dates"] . should_fail_with Missing_Input_Columns + group_builder.specify "Missing column" <| + data.table.format ["abc", "dates"] . should_fail_with Missing_Input_Columns Problems.expect_only_warning Missing_Input_Columns <| - table.format ["abc", "dates"] error_on_missing_columns=False . should_be_a Table + data.table.format ["abc", "dates"] error_on_missing_columns=False . should_be_a Table + + group_builder.specify "No columns selected" <| + data.table.format [] . should_fail_with No_Input_Columns_Selected - Test.specify "No columns selected" <| - table.format [] . should_fail_with No_Input_Columns_Selected +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter group_filter="Errors" -main = Test_Suite.run_main spec diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index 1de5b986bcc0..118093e382fc 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -15,8 +15,7 @@ import Standard.Database.Extensions.Upload_Database_Table import Standard.Database.Extensions.Upload_In_Memory_Table from Standard.Database import Database, SQLite, In_Memory -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Util import all @@ -41,25 +40,32 @@ type My_Comparator Comparable.from (_:My) = My_Comparator -spec = - make_varied_type_table = - strs = ["strs", ["a", "b", "c", Nothing]] - ints = ["ints", [Nothing, 1, 2, 4]] - doubles = ["doubles", [0.0, 1.5, Nothing, 2.0]] - doubles_and_ints = ["doubles_and_ints", [0, 1.5, Nothing, 2]] - custom_objects = ["custom_objects", [My.Data 1 2, My.Data 3 4, Nothing, Nothing]] - dates = ["dates", [Nothing, Date.new 2000, Date.new 2022 8 20, Date.new 1999 1 1]] - times = ["times", [Time_Of_Day.new 18 00, Time_Of_Day.new 1 2 34, Nothing, Time_Of_Day.new]] - datetimes = ["datetimes", [Date_Time.new 2000, Date_Time.new 1999 1 2 3 4 5, Nothing, Date_Time.new 2022 8 27 11 22 25]] - mixed = ["mixed", [1, "a", Nothing, Date.new 2022 8 27]] - mixed_dates = ["mixed_dates", [Date.new 1999 1 2, Date_Time.new 1999 1 2 3 40, Date.new 1999 1 2, Date_Time.new 1999 1 2 3 40]] - just_nulls = ["just_nulls", [Nothing, Nothing, Nothing, Nothing]] - - Table.new [strs, ints, doubles, doubles_and_ints, custom_objects, dates, times, datetimes, mixed, mixed_dates, just_nulls] - varied_type_table = make_varied_type_table - - Test.group "Construction" <| - Test.specify "should allow creating a table from rows" <| + +type Data + Value ~varied_type_table + + setup = + make_varied_type_table = + strs = ["strs", ["a", "b", "c", Nothing]] + ints = ["ints", [Nothing, 1, 2, 4]] + doubles = ["doubles", [0.0, 1.5, Nothing, 2.0]] + doubles_and_ints = ["doubles_and_ints", [0, 1.5, Nothing, 2]] + custom_objects = ["custom_objects", [My.Data 1 2, My.Data 3 4, Nothing, Nothing]] + dates = ["dates", [Nothing, Date.new 2000, Date.new 2022 8 20, Date.new 1999 1 1]] + times = ["times", [Time_Of_Day.new 18 00, Time_Of_Day.new 1 2 34, Nothing, Time_Of_Day.new]] + datetimes = ["datetimes", [Date_Time.new 2000, Date_Time.new 1999 1 2 3 4 5, Nothing, Date_Time.new 2022 8 27 11 22 25]] + mixed = ["mixed", [1, "a", Nothing, Date.new 2022 8 27]] + mixed_dates = ["mixed_dates", [Date.new 1999 1 2, Date_Time.new 1999 1 2 3 40, Date.new 1999 1 2, Date_Time.new 1999 1 2 3 40]] + just_nulls = ["just_nulls", [Nothing, Nothing, Nothing, Nothing]] + + Table.new [strs, ints, doubles, doubles_and_ints, custom_objects, dates, times, datetimes, mixed, mixed_dates, just_nulls] + Data.Value make_varied_type_table + +add_specs suite_builder = + suite_builder.group "Construction" group_builder-> + data = Data.setup + + group_builder.specify "should allow creating a table from rows" <| header = ['foo', 'bar'] rows = [[1, False], [2, True], [3, False]] r = Table.from_rows header rows @@ -72,7 +78,7 @@ spec = r2.at "foo" . to_vector . should_equal [] r2.at "bar" . to_vector . should_equal [] - Test.specify "should allow creating a table from columns" <| + group_builder.specify "should allow creating a table from columns" <| r = Table.new [["foo", [1, 2, 3]], ["bar", [False, True, False]]] r.row_count.should_equal 3 r.at "foo" . to_vector . should_equal [1, 2, 3] @@ -83,7 +89,7 @@ spec = r2.at "foo" . to_vector . should_equal [] r2.at "bar" . to_vector . should_equal [] - Test.specify "should handle error scenarios gracefully" <| + group_builder.specify "should handle error scenarios gracefully" <| Table.new [["X", [1,2,3]], ["Y", [4]]] . should_fail_with Illegal_Argument Table.new [["X", [1]], ["X", [2]]] . should_fail_with Illegal_Argument Table.new [["X", [1]], ["Y", [2], "Z"]] . should_fail_with Illegal_Argument @@ -98,25 +104,25 @@ spec = Table.new [["X", [1,2,3]], ["", [4,5,6]]] . should_fail_with Invalid_Column_Names - Test.specify "should be internally guarded against creating a table without columns" <| + group_builder.specify "should be internally guarded against creating a table without columns" <| Test.expect_panic_with (Java_Table.new []) IllegalArgumentException - Test.specify "should correctly infer storage types" <| - varied_type_table.at "strs" . value_type . should_equal Value_Type.Char - varied_type_table.at "ints" . value_type . should_equal Value_Type.Integer - varied_type_table.at "doubles" . value_type . should_equal Value_Type.Float - varied_type_table.at "doubles_and_ints" . value_type . should_equal Value_Type.Float - varied_type_table.at "custom_objects" . value_type . should_equal Value_Type.Mixed - varied_type_table.at "dates" . value_type . should_equal Value_Type.Date - varied_type_table.at "times" . value_type . should_equal Value_Type.Time - varied_type_table.at "datetimes" . value_type . should_equal Value_Type.Date_Time - varied_type_table.at "mixed" . value_type . should_equal Value_Type.Mixed - varied_type_table.at "mixed_dates" . value_type . should_equal Value_Type.Mixed - varied_type_table.at "just_nulls" . value_type . should_equal Value_Type.Mixed + group_builder.specify "should correctly infer storage types" <| + data.varied_type_table.at "strs" . value_type . should_equal Value_Type.Char + data.varied_type_table.at "ints" . value_type . should_equal Value_Type.Integer + data.varied_type_table.at "doubles" . value_type . should_equal Value_Type.Float + data.varied_type_table.at "doubles_and_ints" . value_type . should_equal Value_Type.Float + data.varied_type_table.at "custom_objects" . value_type . should_equal Value_Type.Mixed + data.varied_type_table.at "dates" . value_type . should_equal Value_Type.Date + data.varied_type_table.at "times" . value_type . should_equal Value_Type.Time + data.varied_type_table.at "datetimes" . value_type . should_equal Value_Type.Date_Time + data.varied_type_table.at "mixed" . value_type . should_equal Value_Type.Mixed + data.varied_type_table.at "mixed_dates" . value_type . should_equal Value_Type.Mixed + data.varied_type_table.at "just_nulls" . value_type . should_equal Value_Type.Mixed pending_python_missing = if Polyglot.is_language_installed "python" . not then "Can't run Python tests, Python is not installed." - Test.specify "should work with polyglot values coming from Python" pending=pending_python_missing <| + group_builder.specify "should work with polyglot values coming from Python" pending=pending_python_missing <| enso_dates = ["enso_dates", [Date.new 2022 8 27, Date.new 1999 1 1]] py_dates = ["py_dates", [py_make_date 2022 8 27, py_make_date 1999 1 1]] py_objects = ["py_objects", [py_make_object "a" "b", py_make_object "foo" "bar"]] @@ -126,7 +132,7 @@ spec = (table.at "enso_dates" == table.at "py_dates").to_vector . should_equal [True, True] - Test.specify "should work with polyglot values coming from JS" <| + group_builder.specify "should work with polyglot values coming from JS" <| enso_dates = ["enso_dates", [Date.new 2022 8 27, Date.new 1999 1 1]] js_dates = ["js_dates", [js_make_date 2022 8 27, js_make_date 1999 1 1]] js_objects = ["js_objects", [js_make_object "a" "b", js_make_object "foo" "bar"]] @@ -141,14 +147,14 @@ spec = (js_converted_dates == table.at "enso_dates").to_vector . should_equal [True, True] (enso_date_times == table.at "js_dates").to_vector . should_equal [True, True] - Test.specify "should work with a Text value split into lines" <| + group_builder.specify "should work with a Text value split into lines" <| ## This tests verifies an issue with passing through a `List` to the table. words = 'The\nquick\nbrown\nfox\njumps\nover\nthe\nlazy\ndog'.lines table = Table.new [["words", words]] table.at "words" . value_type . should_equal Value_Type.Char table.at "words" . to_vector . should_equal words - Test.specify "should handle Unicode normalization when accessing table columns" <| + group_builder.specify "should handle Unicode normalization when accessing table columns" <| col1 = ['s\u0301ciana', [1, 2, 3]] col2 = ['café', [4, 5, 6]] t = Table.new [col1, col2] @@ -162,8 +168,8 @@ spec = r2 = Table.new [['ściana', [1, 2, 3]], ['s\u0301ciana', [4, 5, 6]]] r2.should_fail_with Illegal_Argument - Test.group "Vector conversion" <| - Test.specify "should allow converting columns to valid vectors" <| + suite_builder.group "Vector conversion" group_builder-> + group_builder.specify "should allow converting columns to valid vectors" <| col_1 = Column.from_vector 'x' [1, 2, 3] col_1.to_vector.reduce (+) . should_equal 6 @@ -173,8 +179,8 @@ spec = col_3 = Column.from_vector 'z' [False, True, False] col_3.to_vector.map .not . should_equal [True, False, True] - Test.group "Mapping Operations" <| - Test.specify "should allow mapping a function over a column" <| + suite_builder.group "Mapping Operations" group_builder-> + group_builder.specify "should allow mapping a function over a column" <| c_str = Column.from_vector 'x' ['a', 'b', Nothing, 'b'] c_str.map (+ "x") . to_vector . should_equal ['ax', 'bx', Nothing, 'bx'] c_int = Column.from_vector 'x' [1, 2, 1, 5, 1] @@ -186,13 +192,13 @@ spec = c_any = Column.from_vector 'x' [My.Data 1 6, My.Data 6 3, My.Data 2 5, My.Data 3 4, My.Data 200 300] c_any.map (_.frobnicate) . to_vector . should_equal [My.Data 6 1, My.Data 3 6, My.Data 5 2, My.Data 4 3, My.Data 300 200] - Test.specify "should correctly handle storage of results" <| + group_builder.specify "should correctly handle storage of results" <| c_int = Column.from_vector 'year' [2022, 2000, 1999] r = c_int . map Date_Time.new r.to_vector . should_equal [Date_Time.new 2022, Date_Time.new 2000, Date_Time.new 1999] r.value_type . should_equal Value_Type.Date_Time - Test.specify "should allow zipping columns with a custom function" <| + group_builder.specify "should allow zipping columns with a custom function" <| b = Column.from_vector 'w' [6.3, 3.1, 5.2, 4.6, 8.0] a = Column.from_vector 'z' ['foo', 'bar', 'baz', 'spam', 'eggs'] c = a.zip b x-> y-> x + y.to_text @@ -211,7 +217,7 @@ spec = r2.to_vector . should_equal [Time_Of_Day.new 12 0, Time_Of_Day.new 13 30, Time_Of_Day.new 0 45] r2.value_type . should_equal Value_Type.Time - Test.specify "should handle vectorized equality and fall back on non-vectorized if needed" <| + group_builder.specify "should handle vectorized equality and fall back on non-vectorized if needed" <| c_str = Column.from_vector 'x' ['a', 'b', Nothing, 'b'] (c_str == 'b').to_vector.should_equal [False, True, Nothing, True] c_int = Column.from_vector 'x' [1, 2, 1, 5, 1] @@ -223,7 +229,7 @@ spec = c_any = Column.from_vector 'x' [My.Data 1 6, My.Data 6 3, My.Data 2 5, My.Data 3 4, My.Data 200 300] (c_any == My.Data 7 0).to_vector.should_equal [True, False, True, True, False] - Test.specify "should switch between maps and zips based on argument type" <| + group_builder.specify "should switch between maps and zips based on argument type" <| a = Column.from_vector 'x' [0, 1, 7, 3, 6] b = Column.from_vector 'w' [6.3, 3.1, 5.2, Nothing, 8] (a + 3.2).to_vector.should_equal [3.2, 4.2, 10.2, 6.2, 9.2] @@ -235,7 +241,7 @@ spec = both = gt_const && gt_b both.to_vector.should_equal [False, False, True, False, False] - Test.specify "should handle Text operations" <| + group_builder.specify "should handle Text operations" <| a = Column.from_vector 'a' ["abab", "abc", Nothing, "bca", "acca"] nils = [Nothing, Nothing, Nothing, Nothing, Nothing] @@ -253,7 +259,7 @@ spec = a.contains c . to_vector . should_equal [True, True, Nothing, Nothing, True] a.contains Nothing . to_vector . should_equal nils - Test.specify "should take Unicode normalization into account in Text operations" <| + group_builder.specify "should take Unicode normalization into account in Text operations" <| x = Column.from_vector 'x' ['s', 'ś', 's\u0301'] y = Column.from_vector 'y' ['s\u0301', 's\u0301', 'ś'] @@ -275,35 +281,35 @@ spec = z.contains 'ś' . to_vector . should_equal [True, True, False, True, True] z.contains 's\u0301' . to_vector . should_equal [True, True, False, True, True] - Test.group "Masking Tables" <| - Test.specify "should allow selecting table rows based on a boolean column" <| + suite_builder.group "Masking Tables" group_builder-> + group_builder.specify "should allow selecting table rows based on a boolean column" <| df = (enso_project.data / "simple_empty.csv").read r = df.filter (Column.from_vector 'x' [True, False, False, True]) r.at "a" . to_vector . should_equal ["1", "10"] r.at "b" . to_vector . should_equal [2, 11] r.at "c" . to_vector . should_equal [Nothing, 12] - Test.specify "should treat NA values in the mask as false and extend the mask with NAs" <| + group_builder.specify "should treat NA values in the mask as false and extend the mask with NAs" <| df = (enso_project.data / "simple_empty.csv").read r = df.filter (Column.from_vector 'x' [Nothing, True, False]) r.at "a" . to_vector . should_equal ["4"] r.at "b" . to_vector . should_equal [Nothing] r.at "c" . to_vector . should_equal [6] - Test.specify "should work correctly if a mask is bigger than the table itself" <| + group_builder.specify "should work correctly if a mask is bigger than the table itself" <| df = (enso_project.data / "simple_empty.csv").read r = df.filter (Column.from_vector 'x' [True, False, False, False, True]) r.at "a" . to_vector . should_equal ["1"] r.at "b" . to_vector . should_equal [2] r.at "c" . to_vector . should_equal [Nothing] - Test.group "Counting Values" <| - Test.specify "should count missing and non-missing values" <| + suite_builder.group "Counting Values" group_builder-> + group_builder.specify "should count missing and non-missing values" <| col = Column.from_vector 'x' [1, Nothing, 2] col.length . should_equal 3 col.count . should_equal 2 col.count_nothing . should_equal 1 - Test.group "Dropping Missing Values" <| - Test.specify "should correctly handle NaNs with mixed type columns" <| + suite_builder.group "Dropping Missing Values" group_builder-> + group_builder.specify "should correctly handle NaNs with mixed type columns" <| t = Table.new [["X", [1, 2, 3, 4, 5]], ["Y", ["A", "", Nothing, Number.nan, 0]]] t1 = t.filter_blank_rows when=Blank_Selector.Any_Cell treat_nans_as_blank=False t1.at "X" . to_vector . should_equal [1, 4, 5] @@ -322,8 +328,8 @@ spec = t4 = t3.remove_blank_columns treat_nans_as_blank=True t4.columns . map .name . should_equal ["X"] - Test.group "Info" <| - Test.specify "should return Table information" <| + suite_builder.group "Info" group_builder-> + group_builder.specify "should return Table information" <| a = ["strs", ["a", "b", Nothing, "a"]] b = ["ints", [1, 2, Nothing, Nothing]] c = ["objs", [1, "a", "c", Any]] @@ -333,15 +339,15 @@ spec = i.at "Items Count" . to_vector . should_equal [3, 2, 4] i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Mixed] - Test.group "Sorting Tables" <| - Test.specify 'should respect defined comparison operations for custom types' <| + suite_builder.group "Sorting Tables" group_builder-> + group_builder.specify 'should respect defined comparison operations for custom types' <| c_1 = ['id', [1, 2, 3, 4, 5, 6]] c_2 = ['val', [My.Data 1 2, My.Data 3 4, My.Data 2 1, My.Data 5 2, My.Data 7 0, My.Data 4 -1]] df = Table.new [c_1, c_2] r = df.order_by (['val']) r.at 'id' . to_vector . should_equal [1,3,6,2,4,5] - Test.specify 'should correctly reorder all kinds of columns and leave the original columns untouched' <| + group_builder.specify 'should correctly reorder all kinds of columns and leave the original columns untouched' <| ord = [0, 3, 2, 4, 1] ints = [1, 2, 3, 4, 5] reals = [1.3, 4.6, 3.2, 5.2, 1.6] @@ -400,8 +406,8 @@ spec = r6 = df.order_by (['mixed_dates']) r6 . should_fail_with Incomparable_Values - Test.group "Sorting Columns" <| - Test.specify 'should sort columns with specified ordering and missing placement' <| + suite_builder.group "Sorting Columns" group_builder-> + group_builder.specify 'should sort columns with specified ordering and missing placement' <| c = Column.from_vector 'foo' [1, 7, Nothing, 4, 8, Nothing] r_1 = c.sort @@ -413,13 +419,13 @@ spec = r_3 = c.sort Sort_Direction.Descending missing_last=False r_3.to_vector.should_equal [Nothing,Nothing,8,7,4,1] - Test.specify 'should respect defined comparison operations for custom types' <| + group_builder.specify 'should respect defined comparison operations for custom types' <| c = Column.from_vector 'foo' [My.Data 1 2, My.Data 3 4, My.Data 2 1, My.Data 5 2, My.Data 7 0, My.Data 4 -1] r = c.sort r.to_vector.should_equal [My.Data 1 2, My.Data 2 1, My.Data 4 -1, My.Data 3 4, My.Data 5 2, My.Data 7 0] - Test.specify 'should allow passing a custom comparator' <| + group_builder.specify 'should allow passing a custom comparator' <| c = Column.from_vector 'foo' [My.Data 1 2, My.Data 2 5, My.Data 3 4, My.Data 6 3, Nothing, My.Data 1 0] cmp a b = Ordering.compare (a.x-a.y).abs (b.x-b.y).abs r = c.sort by=cmp @@ -432,12 +438,12 @@ spec = r3 = d.sort by=cmp2 missing_last=False r3.to_vector.should_equal [Nothing,5,4,3,2,1] - Test.specify 'should handle Unicode characters correctly' <| + group_builder.specify 'should handle Unicode characters correctly' <| c = Column.from_vector 'c' ['z', 'a', 'd', 'f', 's', 'e\u0301', 'ś', 'ą', 's\u0301', 'w', 'b'] c.sort.to_vector . should_equal ['a', 'ą', 'b', 'd', 'e\u0301', 'f', 's', 's\u0301', 'ś', 'w', 'z'] - Test.group "Slicing Tables" <| - Test.specify 'should allow taking first n rows' <| + suite_builder.group "Slicing Tables" group_builder-> + group_builder.specify 'should allow taking first n rows' <| i_1 = ['ix', [1, 2, 3]] c_1 = ['col', [5, 6, 7]] c_2 = ['col2', ["a", Nothing, "c"]] @@ -460,7 +466,7 @@ spec = t_1.at 'col' . take (First 2) . to_vector . should_equal (t_1.at 'col' . to_vector . take (First 2)) t_1.at 'col' . take 2 . to_vector . should_equal (t_1.at 'col' . to_vector . take 2) - Test.specify "should allow taking the last n rows" <| + group_builder.specify "should allow taking the last n rows" <| i_1 = ['ix', [1, 2, 3]] c_1 = ['col1', [5, 6, 7]] c_2 = ['col2', ["a", Nothing, "c"]] @@ -476,7 +482,7 @@ spec = t_1.at 'col1' . take (Last 2) . to_vector . should_equal (t_1.at 'col1' . to_vector . take (Last 2)) - Test.specify "should allow taking/dropping a prefix of rows that satisfy a predicate" <| + group_builder.specify "should allow taking/dropping a prefix of rows that satisfy a predicate" <| t1 = Table.new [["X", [1, 2, 3, 4, 5, 5]], ["Y", [9, 8, 7, 2, 10, 5]]] t2 = t1.take (Index_Sub_Range.While row-> row.to_vector.compute Statistic.Sum == 10) @@ -499,7 +505,7 @@ spec = t6.at "X" . to_vector . should_equal [] t6.at "Y" . to_vector . should_equal [] - Test.specify "should allow reversing the table" <| + group_builder.specify "should allow reversing the table" <| i_1 = ['ix', [1, 2, 3]] c_1 = ['col1', [5, 6, 7]] c_2 = ['col2', ["a", Nothing, "c"]] @@ -518,8 +524,8 @@ spec = t_2.at 'col2' . to_vector . should_equal (expected.at 'col2' . to_vector) t_2.at 'col3' . to_vector . should_equal (expected.at 'col3' . to_vector) - Test.group "fill" - Test.specify "should allow to fill_nothing from a value" <| + suite_builder.group "fill" group_builder-> + group_builder.specify "should allow to fill_nothing from a value" <| col0 = Column.from_vector "col0" [0, Nothing, 4, 5, Nothing, Nothing] col1 = Column.from_vector "col1" [Nothing, 200, Nothing, 400, 500, Nothing] default = 1000 @@ -528,7 +534,7 @@ spec = actual.at "col0" . to_vector . should_equal [0, 1000, 4, 5, 1000, 1000] actual.at "col1" . to_vector . should_equal [1000, 200, 1000, 400, 500, 1000] - Test.specify "should allow to fill_nothing from other columns" <| + group_builder.specify "should allow to fill_nothing from other columns" <| col0 = Column.from_vector "col0" [0, Nothing, 4, 5, Nothing, Nothing] col1 = Column.from_vector "col1" [Nothing, 200, Nothing, 400, 500, Nothing] defaults = Column.from_vector "def0" [1, 2, 10, 20, Nothing, 30] @@ -537,7 +543,7 @@ spec = actual.at "col0" . to_vector . should_equal [0, 2, 4, 5, Nothing, 30] actual.at "col1" . to_vector . should_equal [1, 200, 10, 400, 500, 30] - Test.specify "should allow to fill_empty from a value" <| + group_builder.specify "should allow to fill_empty from a value" <| col0 = Column.from_vector "col0" ["0", Nothing, "4", "5", Nothing, Nothing] col1 = Column.from_vector "col1" [Nothing, "200", Nothing, "400", "500", Nothing] default = "1000" @@ -546,7 +552,7 @@ spec = actual.at "col0" . to_vector . should_equal ["0", "1000", "4", "5", "1000", "1000"] actual.at "col1" . to_vector . should_equal ["1000", "200", "1000", "400", "500", "1000"] - Test.specify "should allow to fill_empty from other columns" <| + group_builder.specify "should allow to fill_empty from other columns" <| col0 = Column.from_vector "col0" ["0", Nothing, "4", "5", Nothing, Nothing] col1 = Column.from_vector "col1" [Nothing, "200", Nothing, "400", "500", Nothing] defaults = Column.from_vector "def0" ["1", "2", "10", "20", Nothing, "30"] @@ -555,7 +561,7 @@ spec = actual.at "col0" . to_vector . should_equal ["0", "2", "4", "5", Nothing, "30"] actual.at "col1" . to_vector . should_equal ["1", "200", "10", "400", "500", "30"] - Test.specify "fill_nothing should leave other columns alone" <| + group_builder.specify "fill_nothing should leave other columns alone" <| col0 = Column.from_vector "col0" [0, Nothing, 4, 5, Nothing, Nothing] col_between = Column.from_vector "col_between" [3, 4, 5, 6, 7, 8] col1 = Column.from_vector "col1" [Nothing, 200, Nothing, 400, 500, Nothing] @@ -567,7 +573,7 @@ spec = actual.at "col1" . to_vector . should_equal [1000, 200, 1000, 400, 500, 1000] actual.column_names . should_equal ["col0", "col_between", "col1"] - Test.specify "fill_nothing should work with integer column selectors" <| + group_builder.specify "fill_nothing should work with integer column selectors" <| col0 = Column.from_vector "col0" [0, Nothing, 4, 5, Nothing, Nothing] col_between = Column.from_vector "col_between" [3, 4, 5, 6, 7, 8] col1 = Column.from_vector "col1" [Nothing, 200, Nothing, 400, 500, Nothing] @@ -579,11 +585,11 @@ spec = actual.at "col1" . to_vector . should_equal [1000, 200, 1000, 400, 500, 1000] actual.column_names . should_equal ["col0", "col_between", "col1"] - Test.group "Use First Row As Names" <| + suite_builder.group "Use First Row As Names" group_builder-> expect_column_names names table = table.columns . map .name . should_equal names frames_to_skip=2 - Test.specify "should work happily with mixed types" <| + group_builder.specify "should work happily with mixed types" <| c_0 = ['A', ["H", "B", "C"]] c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]] c_2 = ['x', [1, 2, 3]] @@ -592,7 +598,7 @@ spec = table = Table.new [c_0, c_1, c_2, c_3, c_4] expect_column_names ["H", "1980-01-01", "1", "5.3", "True"] table.use_first_row_as_names - Test.specify "should correctly handle problems: invalid names ''" <| + group_builder.specify "should correctly handle problems: invalid names ''" <| c_0 = ['A', ["", "B", "C"]] c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]] c_2 = ['x', [1, 2, 3]] @@ -604,7 +610,7 @@ spec = problems = [Invalid_Column_Names.Error [""]] Problems.test_problem_handling action problems tester - Test.specify "should correctly handle problems: invalid names Nothing" <| + group_builder.specify "should correctly handle problems: invalid names Nothing" <| c_0 = ['A', ["A", "B", "C"]] c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]] c_2 = ['x', [Nothing, 2, 3]] @@ -616,7 +622,7 @@ spec = problems = [Invalid_Column_Names.Error [Nothing]] Problems.test_problem_handling action problems tester - Test.specify "should correctly handle problems: multiple invalid names" <| + group_builder.specify "should correctly handle problems: multiple invalid names" <| c_0 = ['A', ["", "B", "C"]] c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]] c_2 = ['x', [Nothing, 2, 3]] @@ -628,7 +634,7 @@ spec = problems = [Invalid_Column_Names.Error ["", Nothing]] Problems.test_problem_handling action problems tester - Test.specify "should correctly handle problems: duplicate names" <| + group_builder.specify "should correctly handle problems: duplicate names" <| c_0 = ['A', ["A", "B", "C"]] c_1 = ['B', ["A", "B", "C"]] c_2 = ['x', ["A", "B", "C"]] @@ -639,8 +645,8 @@ spec = problems = [Duplicate_Output_Column_Names.Error ["A", "A", "A"]] Problems.test_problem_handling action problems tester - Test.group "[In-Memory] Table.aggregate" <| - Test.specify "should return columns with correct types" <| + suite_builder.group "[In-Memory] Table.aggregate" group_builder-> + group_builder.specify "should return columns with correct types" <| dates = ["dates", [Date.new 1999, Date.new 2000, Date.new 2000, Date.new 2000]] texts = ["texts", ["a", "bb", "a", "bb"]] mixed = ["mixed", [1, "a", "a", 1]] @@ -665,7 +671,7 @@ spec = t4.info.at "Column" . to_vector . should_equal ["mixed", "Sum ints", "Sum floats"] t4.info.at "Value Type" . to_vector . should_equal [Value_Type.Mixed, Value_Type.Float, Value_Type.Float] - Test.specify "should take Unicode normalization into account when grouping by Text" <| + group_builder.specify "should take Unicode normalization into account when grouping by Text" <| texts = ["texts", ['ściana', 'ściana', 'łąka', 's\u0301ciana', 'ła\u0328ka', 'sciana']] ints = ["ints", [1, 2, 4, 8, 16, 32]] table = Table.new [texts, ints] @@ -676,7 +682,7 @@ spec = r2 = table.aggregate [Count_Distinct "texts"] r2.at "Count Distinct texts" . to_vector . should_equal [3] - Test.specify "should be able to aggregate over enso Types" <| + group_builder.specify "should be able to aggregate over enso Types" <| weekday_table = Table.new [["days", [Day_Of_Week.Monday, Day_Of_Week.Monday, Day_Of_Week.Monday, Day_Of_Week.Tuesday, Day_Of_Week.Sunday]], ["group", [1,1,2,1,2]]] r1 = weekday_table.aggregate [Group_By "days"] . order_by "days" @@ -691,8 +697,8 @@ spec = more guarantees: preserving order of rows and always selecting the first row of ones sharing the same distinctness key. For database tests (to be added later) we can not rely on ordering. - Test.group "[In-Memory] Table.distinct" <| - Test.specify "should allow to select distinct rows based on a subset of columns, returning the first row from each group" <| + suite_builder.group "[In-Memory] Table.distinct" group_builder-> + group_builder.specify "should allow to select distinct rows based on a subset of columns, returning the first row from each group" <| a = ["A", ["a", "a", "a", "a", "a", "a"]] b = ["B", [1, 1, 2, 2, 1, 2]] c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]] @@ -703,7 +709,7 @@ spec = r2.at "B" . to_vector . should_equal [1, 2] r2.at "C" . to_vector . should_equal [0.1, 0.3] - Test.specify "should handle nulls correctly and preserve original ordering" <| + group_builder.specify "should handle nulls correctly and preserve original ordering" <| a = ["A", ["a", Nothing, "b", "a", "b", Nothing, "a", "b"]] b = ["B", [1, 2, 3, 4, 5, 6, 7, 8]] t = Table.new [a, b] @@ -711,11 +717,11 @@ spec = r.at "A" . to_vector . should_equal ["a", Nothing, "b"] r.at "B" . to_vector . should_equal [1, 2, 3] - Test.specify "should handle Unicode normalization of keys correctly" <| + group_builder.specify "should handle Unicode normalization of keys correctly" <| t1 = Table.new [["X", ['ś', 's\u0301', 's', 'ś']]] t1.distinct . at "X" . to_vector . should_equal ['ś', 's'] - Test.specify "should allow to control case-sensitivity of keys, correctly handling Unicode folding" <| + group_builder.specify "should allow to control case-sensitivity of keys, correctly handling Unicode folding" <| x = ["X", ['A', 'a', 'enso', 'śledź', 'Enso', 'A', 's\u0301ledz\u0301']] y = ["Y", [1, 2, 3, 4, 5, 6, 7]] t1 = Table.new [x, y] @@ -730,7 +736,7 @@ spec = t2 = Table.new [["X", ["łąka", "STRASSE", "Straße", "ffi", "ŁĄka", "ffi"]]] t2.distinct case_sensitivity=Case_Sensitivity.Insensitive . at "X" . to_vector . should_equal ["łąka", "STRASSE", "ffi"] - Test.specify "should report a warning if the key contains floating point values" <| + group_builder.specify "should report a warning if the key contains floating point values" <| t1 = Table.new [["X", [3.0, 1.0, 2.0, 2.0, 1.0]]] action1 = t1.distinct on_problems=_ tester1 table = @@ -745,15 +751,17 @@ spec = problems2 = [Floating_Point_Equality.Error "X"] Problems.test_problem_handling action2 problems2 tester2 - Test.specify "should be able to create distinct on Enso objects" <| + group_builder.specify "should be able to create distinct on Enso objects" <| t = Table.new [["X", [My.Data 1 2, My.Data 3 4, My.Data 1 2]]] t.distinct ["X"] . at "X" . to_vector . should_equal [My.Data 1 2, My.Data 3 4] t2 = Table.new [["X", [Day_Of_Week.Monday, Day_Of_Week.Tuesday, Day_Of_Week.Monday, Day_Of_Week.Monday, Day_Of_Week.Tuesday, Day_Of_Week.Wednesday]]] t2.distinct ["X"] . at "X" . to_vector . should_equal [Day_Of_Week.Monday, Day_Of_Week.Tuesday, Day_Of_Week.Wednesday] - Test.group "[In-Memory] Table.filter" <| - Test.specify "by a custom predicate" <| + suite_builder.group "[In-Memory] Table.filter" group_builder-> + data = Data.setup + + group_builder.specify "by a custom predicate" <| t = Table.new [["ix", [1, 2, 3, 4, 5]], ["X", [5, 0, 4, 5, 1]]] t1 = t.filter "X" (x -> x % 2 == 0) t1.at "ix" . to_vector . should_equal [2, 3] @@ -767,7 +775,7 @@ spec = t3.at "ix" . to_vector . should_equal [1, 4, 5] t3.at "X" . to_vector . should_equal [5, 5, 1] - Test.specify "by custom object comparisons" <| + group_builder.specify "by custom object comparisons" <| t = Table.new [["ix", [1, 2, 3, 4, 5]], ["X", [My.Data 1 2, My.Data 300 400, My.Data 100 200, My.Data 5 6, My.Data 7 8]]] t1 = t.filter "X" (Filter_Condition.Between (My.Data 10 20) (My.Data 300 400)) t1.at "ix" . to_vector . should_equal [2, 3] @@ -784,7 +792,7 @@ spec = t.filter "X" (Filter_Condition.Less than=c) . at "X" . to_vector . should_equal [My.Data 100 200, My.Data 5 6] t.filter "X" (Filter_Condition.Greater than=c) . at "X" . to_vector . should_equal [] - Test.specify "by a boolean mask of varying length" <| + group_builder.specify "by a boolean mask of varying length" <| t = Table.new [["A", [1, 2, 3]], ["B", [4, 5, 6]]] t1 = t.filter (Column.from_vector "f" [False, True]) @@ -795,7 +803,7 @@ spec = t2.at "A" . to_vector . should_equal [2, 3] t2.at "B" . to_vector . should_equal [5, 6] - Test.specify "by an Is_In check, on various types of columns" <| + group_builder.specify "by an Is_In check, on various types of columns" <| ins = Table.new <| str = ["str", ["c", "b", Nothing, Nothing]] int = ["int", [1, 2, 3, 3]] @@ -808,29 +816,29 @@ spec = nulls = ["nulls", [Nothing, Nothing, Nothing, 0]] custom = ["custom", [2, My.Data 2 1, Nothing, Nothing]] [str, int, int2, dbl, dates, dts, tod, mix, nulls, custom] - varied_type_table.filter "strs" (Filter_Condition.Is_In (ins.at "str")) . at "strs" . to_vector . should_equal ["b", "c", Nothing] - varied_type_table.filter "strs" (Filter_Condition.Is_In (ins.at "str" . to_vector)) . at "strs" . to_vector . should_equal ["b", "c", Nothing] - varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int")) . at "ints" . to_vector . should_equal [1, 2] - varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int" . to_vector)) . at "ints" . to_vector . should_equal [1, 2] - varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int2")) . at "ints" . to_vector . should_equal [Nothing, 1] - varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int2" . to_vector)) . at "ints" . to_vector . should_equal [Nothing, 1] - varied_type_table.filter "doubles" (Filter_Condition.Is_In (ins.at "dbl")) . at "doubles" . to_vector . should_equal [0.0, Nothing] - varied_type_table.filter "doubles" (Filter_Condition.Is_In (ins.at "dbl" . to_vector)) . at "doubles" . to_vector . should_equal [0.0, Nothing] - varied_type_table.filter "dates" (Filter_Condition.Is_In (ins.at "dates")) . at "dates" . to_vector . should_equal [Date.new 2000, Date.new 1999 1 1] - varied_type_table.filter "dates" (Filter_Condition.Is_In (ins.at "dates" . to_vector)) . at "dates" . to_vector . should_equal [Date.new 2000, Date.new 1999 1 1] - varied_type_table.filter "datetimes" (Filter_Condition.Is_In (ins.at "dts")) . at "datetimes" . to_vector . should_equal [Nothing, Date_Time.new 2022 8 27 11 22 25] - varied_type_table.filter "datetimes" (Filter_Condition.Is_In (ins.at "dts" . to_vector)) . at "datetimes" . to_vector . should_equal [Nothing, Date_Time.new 2022 8 27 11 22 25] - varied_type_table.filter "times" (Filter_Condition.Is_In (ins.at "tod")) . at "times" . to_vector . should_equal [Time_Of_Day.new 18 00] - varied_type_table.filter "times" (Filter_Condition.Is_In (ins.at "tod" . to_vector)) . at "times" . to_vector . should_equal [Time_Of_Day.new 18 00] - varied_type_table.filter "mixed" (Filter_Condition.Is_In [42, "a", 1, Nothing, Date.new 2022 8 27, Date_Time.new 2022 8 27]) . at "mixed" . to_vector . should_equal [1, "a", Nothing, Date.new 2022 8 27] - varied_type_table.filter "mixed" (Filter_Condition.Is_In (ins.at "mix")) . at "mixed" . to_vector . should_equal [1] - varied_type_table.filter "mixed" (Filter_Condition.Is_In (ins.at "mix" . to_vector)) . at "mixed" . to_vector . should_equal [1] - varied_type_table.filter "just_nulls" (Filter_Condition.Is_In []) . at "just_nulls" . to_vector . should_equal [] - varied_type_table.filter "just_nulls" (Filter_Condition.Is_In (ins.at "nulls")) . at "just_nulls" . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing] - varied_type_table.filter "just_nulls" (Filter_Condition.Is_In (ins.at "nulls" . to_vector)) . at "just_nulls" . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing] - varied_type_table.filter "just_nulls" (Filter_Condition.Is_In [0]) . at "just_nulls" . to_vector . should_equal [] - varied_type_table.filter "custom_objects" (Filter_Condition.Is_In (ins.at "custom")) . at "custom_objects" . to_vector . should_equal [My.Data 1 2, Nothing, Nothing] - varied_type_table.filter "custom_objects" (Filter_Condition.Is_In (ins.at "custom" . to_vector)) . at "custom_objects" . to_vector . should_equal [My.Data 1 2, Nothing, Nothing] + data.varied_type_table.filter "strs" (Filter_Condition.Is_In (ins.at "str")) . at "strs" . to_vector . should_equal ["b", "c", Nothing] + data.varied_type_table.filter "strs" (Filter_Condition.Is_In (ins.at "str" . to_vector)) . at "strs" . to_vector . should_equal ["b", "c", Nothing] + data.varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int")) . at "ints" . to_vector . should_equal [1, 2] + data.varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int" . to_vector)) . at "ints" . to_vector . should_equal [1, 2] + data.varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int2")) . at "ints" . to_vector . should_equal [Nothing, 1] + data.varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int2" . to_vector)) . at "ints" . to_vector . should_equal [Nothing, 1] + data.varied_type_table.filter "doubles" (Filter_Condition.Is_In (ins.at "dbl")) . at "doubles" . to_vector . should_equal [0.0, Nothing] + data.varied_type_table.filter "doubles" (Filter_Condition.Is_In (ins.at "dbl" . to_vector)) . at "doubles" . to_vector . should_equal [0.0, Nothing] + data.varied_type_table.filter "dates" (Filter_Condition.Is_In (ins.at "dates")) . at "dates" . to_vector . should_equal [Date.new 2000, Date.new 1999 1 1] + data.varied_type_table.filter "dates" (Filter_Condition.Is_In (ins.at "dates" . to_vector)) . at "dates" . to_vector . should_equal [Date.new 2000, Date.new 1999 1 1] + data.varied_type_table.filter "datetimes" (Filter_Condition.Is_In (ins.at "dts")) . at "datetimes" . to_vector . should_equal [Nothing, Date_Time.new 2022 8 27 11 22 25] + data.varied_type_table.filter "datetimes" (Filter_Condition.Is_In (ins.at "dts" . to_vector)) . at "datetimes" . to_vector . should_equal [Nothing, Date_Time.new 2022 8 27 11 22 25] + data.varied_type_table.filter "times" (Filter_Condition.Is_In (ins.at "tod")) . at "times" . to_vector . should_equal [Time_Of_Day.new 18 00] + data.varied_type_table.filter "times" (Filter_Condition.Is_In (ins.at "tod" . to_vector)) . at "times" . to_vector . should_equal [Time_Of_Day.new 18 00] + data.varied_type_table.filter "mixed" (Filter_Condition.Is_In [42, "a", 1, Nothing, Date.new 2022 8 27, Date_Time.new 2022 8 27]) . at "mixed" . to_vector . should_equal [1, "a", Nothing, Date.new 2022 8 27] + data.varied_type_table.filter "mixed" (Filter_Condition.Is_In (ins.at "mix")) . at "mixed" . to_vector . should_equal [1] + data.varied_type_table.filter "mixed" (Filter_Condition.Is_In (ins.at "mix" . to_vector)) . at "mixed" . to_vector . should_equal [1] + data.varied_type_table.filter "just_nulls" (Filter_Condition.Is_In []) . at "just_nulls" . to_vector . should_equal [] + data.varied_type_table.filter "just_nulls" (Filter_Condition.Is_In (ins.at "nulls")) . at "just_nulls" . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing] + data.varied_type_table.filter "just_nulls" (Filter_Condition.Is_In (ins.at "nulls" . to_vector)) . at "just_nulls" . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing] + data.varied_type_table.filter "just_nulls" (Filter_Condition.Is_In [0]) . at "just_nulls" . to_vector . should_equal [] + data.varied_type_table.filter "custom_objects" (Filter_Condition.Is_In (ins.at "custom")) . at "custom_objects" . to_vector . should_equal [My.Data 1 2, Nothing, Nothing] + data.varied_type_table.filter "custom_objects" (Filter_Condition.Is_In (ins.at "custom" . to_vector)) . at "custom_objects" . to_vector . should_equal [My.Data 1 2, Nothing, Nothing] t2 = Table.new [["ints", [1, 2, 3]], ["doubles", [1.2, 0.0, 1.0]]] t2.filter "ints" (Filter_Condition.Is_In [2.0, 1.5, 3, 4]) . at "ints" . to_vector . should_equal [2, 3] @@ -872,8 +880,8 @@ spec = t2.filter "Y" (Filter_Condition.Is_In in_vector) . at "Y" . to_vector . should_equal expected_neg_vector t2.filter "Y" (Filter_Condition.Is_In in_column) . at "Y" . to_vector . should_equal expected_neg_vector - Test.group "[In-Memory-specific] Table.join" <| - Test.specify "should correctly report unsupported cross-backend joins" <| + suite_builder.group "[In-Memory-specific] Table.join" group_builder-> + group_builder.specify "should correctly report unsupported cross-backend joins" <| t = Table.new [["X", [1, 2, 3]]] Panic.recover Type_Error (t.join 42) . should_fail_with Type_Error @@ -884,8 +892,8 @@ spec = r.should_fail_with Illegal_Argument r.catch.message . contains "cross-backend" . should_be_true - Test.group "[In-Memory-specific] Table.set" <| - Test.specify "should allow using vector and range for a new column" <| + suite_builder.group "[In-Memory-specific] Table.set" group_builder-> + group_builder.specify "should allow using vector and range for a new column" <| t = Table.new [["X", [1, 2, 3]]] t_vec = t.set [10, 20, 30] @@ -900,7 +908,7 @@ spec = t_date_range.column_names.should_equal ["X", "Date Range"] t_date_range.at "Date Range" . to_vector . should_equal [Date.new 2020 1 1, Date.new 2020 1 2, Date.new 2020 1 3] - Test.specify "should fail if there is a length mismatch on a new column" <| + group_builder.specify "should fail if there is a length mismatch on a new column" <| t = Table.new [["X", [1, 2, 3]]] c = Column.from_vector "Column" [10, 20] @@ -909,7 +917,11 @@ spec = t.set (100.up_to 102) . should_fail_with Row_Count_Mismatch t.set ((Date.new 2020 1 1).up_to (Date.new 2020 1 3)) . should_fail_with Row_Count_Mismatch -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + ## JS indexes months form 0, so we need to subtract 1. foreign js js_make_date year month day = """ diff --git a/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso index d21d6b95e0bc..b96dd46a7c74 100644 --- a/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso @@ -4,49 +4,66 @@ from Standard.Table import Table, Delimited, Column, Data_Formatter import Standard.Table.Data.Type.Value_Type.Value_Type from Standard.Table.Extensions.Table_Conversions import all -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all from project.Util import all -spec = - c_number = ["Serial number", ["2LMXK1", "2LMXK1", "JEMLP3", "JEMLP3", "BR83GP", "BR83GP"]] - c_type = ["Movement type", [101, 301, 101, 203, 101, 301]] - c_time = ["Posting time", [Time_Of_Day.new 9 0, Time_Of_Day.new 14 0 12, Time_Of_Day.new 9 0, Time_Of_Day.new 17 30, Time_Of_Day.new 9 0 4, Time_Of_Day.new 15 30]] - expected = Table.new [c_number, c_type, c_time] +type Data + Value ~data - Test.group "File.read (Delimited) should work with Time_Of_Days" <| + c_number self = self.data.at 0 + c_type self = self.data.at 1 + c_time self = self.data.at 2 + expected self = self.data.at 3 + table self = self.data.at 4 + + setup = Data.Value <| + c_number = ["Serial number", ["2LMXK1", "2LMXK1", "JEMLP3", "JEMLP3", "BR83GP", "BR83GP"]] + c_type = ["Movement type", [101, 301, 101, 203, 101, 301]] + c_time = ["Posting time", [Time_Of_Day.new 9 0, Time_Of_Day.new 14 0 12, Time_Of_Day.new 9 0, Time_Of_Day.new 17 30, Time_Of_Day.new 9 0 4, Time_Of_Day.new 15 30]] + expected = Table.new [c_number, c_type, c_time] table = (enso_project.data / "time_of_day_sample.csv").read - Test.specify "should be able to read in a table with dates" <| - table.column_count.should_equal 3 - table.info.at "Column" . to_vector . should_equal ['Serial number','Movement type', 'Posting time'] - table.info.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Time] - table.row_count.should_equal 6 - - Test.specify "should be able to treat a single value as a Time_Of_Days" <| - from_column = table.at 'Posting time' + [c_number, c_type, c_time, expected, table] + + +add_specs suite_builder = + data = Data.setup + + suite_builder.group "File.read (Delimited) should work with Time_Of_Days" group_builder-> + group_builder.specify "should be able to read in a data.table with dates" <| + data.table.column_count.should_equal 3 + data.table.info.at "Column" . to_vector . should_equal ['Serial number','Movement type', 'Posting time'] + data.table.info.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Time] + data.table.row_count.should_equal 6 + + group_builder.specify "should be able to treat a single value as a Time_Of_Days" <| + from_column = data.table.at 'Posting time' from_column.at 5 . hour . should_equal 15 from_column.at 5 . minute . should_equal 30 from_column.at 5 . should_equal (Time_Of_Day.new 15 30) - Test.specify "should be able to compare columns and table" <| - table.at 'Serial number' . should_equal (Column.from_vector c_number.first c_number.second) - table.at 'Movement type' . should_equal (Column.from_vector c_type.first c_type.second) - table.at 'Posting time' . should_equal (Column.from_vector c_time.first c_time.second) - table.should_equal expected + group_builder.specify "should be able to compare columns and data.table" <| + data.table.at 'Serial number' . should_equal (Column.from_vector data.c_number.first data.c_number.second) + data.table.at 'Movement type' . should_equal (Column.from_vector data.c_type.first data.c_type.second) + data.table.at 'Posting time' . should_equal (Column.from_vector data.c_time.first data.c_time.second) + data.table.should_equal data.expected - Test.group "Should be able to serialise a table with Time_Of_Days to Text" <| - Test.specify "should serialise back to input" <| + suite_builder.group "Should be able to serialise a data.table with Time_Of_Days to Text" group_builder-> + group_builder.specify "should serialise back to input" <| expected_text = normalize_lines <| (enso_project.data / "time_of_day_sample_normalized_hours.csv").read_text - delimited = Text.from expected format=(Delimited "," line_endings=Line_Ending_Style.Unix) + delimited = Text.from data.expected format=(Delimited "," line_endings=Line_Ending_Style.Unix) delimited.should_equal expected_text - Test.specify "should serialise dates with format" <| - test_table = Table.new [c_time] + group_builder.specify "should serialise dates with format" <| + test_table = Table.new [data.c_time] expected_text = 'Posting time\n09-00-00\n14-00-12\n09-00-00\n17-30-00\n09-00-04\n15-30-00\n' data_formatter = Data_Formatter.Value . with_datetime_formats time_formats=["HH-mm-ss"] delimited = Text.from test_table format=(Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix) delimited.should_equal expected_text -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/Util.enso b/test/Table_Tests/src/Util.enso index a8e1195067f8..367ca2e8451f 100644 --- a/test/Table_Tests/src/Util.enso +++ b/test/Table_Tests/src/Util.enso @@ -5,13 +5,11 @@ import Standard.Database.Data.Column.Column as Database_Column import Standard.Table.Data.Table.Table as In_Memory_Table import Standard.Table.Data.Column.Column as In_Memory_Column -from Standard.Test import Test -import Standard.Test.Extensions -import Standard.Test.Test_Result.Test_Result +from Standard.Test_New import all polyglot java import org.enso.base_test_helpers.FileSystemHelper -In_Memory_Table.should_equal : Any -> Integer -> Test_Result +In_Memory_Table.should_equal : Any -> Integer -> Any In_Memory_Table.should_equal self expected frames_to_skip=0 = loc = Meta.get_source_location 1+frames_to_skip case expected of @@ -26,7 +24,7 @@ In_Memory_Table.should_equal self expected frames_to_skip=0 = Test.fail msg _ -> Test.fail "Got a Table, but expected a "+expected.to_display_text+' (at '+loc+').' -In_Memory_Column.should_equal : Any -> Integer -> Test_Result +In_Memory_Column.should_equal : Any -> Integer -> Any In_Memory_Column.should_equal self expected frames_to_skip=0 = loc = Meta.get_source_location 1+frames_to_skip case expected of @@ -38,13 +36,13 @@ In_Memory_Column.should_equal self expected frames_to_skip=0 = self.to_vector.should_equal expected.to_vector 2+frames_to_skip _ -> Test.fail "Got a Column, but expected a "+expected.to_display_text+' (at '+loc+').' -Database_Table.should_equal : Database_Table -> Integer -> Test_Result +Database_Table.should_equal : Database_Table -> Integer -> Any Database_Table.should_equal self expected frames_to_skip=0 = t0 = self.read t1 = expected.read t0 . should_equal t1 frames_to_skip -Database_Column.should_equal : Database_Column -> Integer -> Test_Result +Database_Column.should_equal : Database_Column -> Integer -> Any Database_Column.should_equal self expected frames_to_skip=0 = t0 = self.read t1 = expected.read From 12b6ae441ab00a6ef4d58232cea68367b5f82666 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 19 Jan 2024 12:34:01 +0100 Subject: [PATCH 56/93] Remove unused Table_Tests/src/In_Memory/Common_Spec --- .../src/In_Memory/Common_Spec.enso | 23 ------------------- 1 file changed, 23 deletions(-) delete mode 100644 test/Table_Tests/src/In_Memory/Common_Spec.enso diff --git a/test/Table_Tests/src/In_Memory/Common_Spec.enso b/test/Table_Tests/src/In_Memory/Common_Spec.enso deleted file mode 100644 index 9e6c4195c582..000000000000 --- a/test/Table_Tests/src/In_Memory/Common_Spec.enso +++ /dev/null @@ -1,23 +0,0 @@ -from Standard.Base import all - -from Standard.Table import Table - -from Standard.Test import Test_Suite - -import project.Common_Table_Operations - -run_common_spec spec = - selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by=True natural_ordering=True case_insensitive_ordering=True order_by_unicode_normalization_by_default=True supports_unicode_normalization=True supports_time_duration=True supports_nanoseconds_in_time=True supports_mixed_columns=True fixed_length_text_columns=True supports_8bit_integer=True - aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config - - table = (enso_project.data / "data.csv") . read - empty_table = table.take 0 - materialize = x->x - - setup = Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " table empty_table Table.new materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection connection=Nothing - spec setup - -spec = - run_common_spec Common_Table_Operations.Main.spec - -main = Test_Suite.run_main spec From 348af3f7d1e8cab9238fa2945eed0fbc1c0130a4 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 19 Jan 2024 12:34:42 +0100 Subject: [PATCH 57/93] WIP: Fix "Keys are not Text" test --- test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso index 37309c81d273..6c24b5f60d84 100644 --- a/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso @@ -193,7 +193,7 @@ add_specs suite_builder = table.expand_column "b" . should_equal expected table2 = Table.new [["a", [1, 2]], ["b", [Map.from_vector [[My_Mod_Type.Value 12, "x"], [My_Mod_Type.Value 23, "y"]], Map.from_vector [[My_Mod_Type.Value 32, "z"]]]]] - expected2 = Table.new [["a", [1, 2]], ["b x%10=2", ["x", "z"]], ["b x%10=3", ["y", Nothing]]] + expected2 = Table.new [["a", [1, 2]], ["b x%10=3", ["y", Nothing]], ["b x%10=2", ["x", "z"]]] table2.expand_column "b" . should_equal expected2 group_builder.specify "will fail if text representation of keys is not unique" <| From 5e3114ec506b35f7dbd503b38b06839d694865c3 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 19 Jan 2024 18:47:50 +0100 Subject: [PATCH 58/93] Refactor Table_Tests/src/IO to Test_New --- test/Table_Tests/src/IO/Csv_Spec.enso | 59 ++- .../src/IO/Delimited_Read_Spec.enso | 75 +-- .../src/IO/Delimited_Write_Spec.enso | 79 +-- test/Table_Tests/src/IO/Excel_Spec.enso | 455 +++++++++--------- test/Table_Tests/src/IO/Fetch_Spec.enso | 23 +- test/Table_Tests/src/IO/Formats_Spec.enso | 34 +- test/Table_Tests/src/IO/Json_Spec.enso | 45 +- test/Table_Tests/src/IO/Main.enso | 22 +- 8 files changed, 407 insertions(+), 385 deletions(-) diff --git a/test/Table_Tests/src/IO/Csv_Spec.enso b/test/Table_Tests/src/IO/Csv_Spec.enso index 99f2609f9a30..0a707f064b2f 100644 --- a/test/Table_Tests/src/IO/Csv_Spec.enso +++ b/test/Table_Tests/src/IO/Csv_Spec.enso @@ -2,30 +2,39 @@ from Standard.Base import all from Standard.Table import all -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Util import all -spec = - c_1 = ["a", ["1", "4", "7", "10"]] - c_2 = ["b", [2, Nothing, 8, 11]] - c_3 = ["c", [Nothing, 6, 9, 12]] - expected_table = Table.new [c_1, c_2, c_3] +type Data + Value ~expected_table - Test.group "Table.from Text" <| - Test.specify "should create a table from a textual CSV" <| + setup = + c_1 = ["a", ["1", "4", "7", "10"]] + c_2 = ["b", [2, Nothing, 8, 11]] + c_3 = ["c", [Nothing, 6, 9, 12]] + expected_table = Table.new [c_1, c_2, c_3] + Data.Value expected_table + + +add_specs suite_builder = + suite_builder.group "Table.from Text" group_builder-> + data = Data.setup + + group_builder.specify "should create a table from a textual CSV" <| file_contents = (enso_project.data / "simple_empty.csv") . read_text table = Table.from file_contents (format = Delimited ",") - table.should_equal expected_table + table.should_equal data.expected_table - Test.group "File.read (Delimited)" <| - Test.specify "should create a table from a CSV in a file" <| + suite_builder.group "File.read (Delimited)" group_builder-> + data = Data.setup + + group_builder.specify "should create a table from a CSV in a file" <| file = (enso_project.data / "simple_empty.csv") table = file.read - table.should_equal expected_table + table.should_equal data.expected_table - Test.specify "should correctly infer types of varied-type columns" <| + group_builder.specify "should correctly infer types of varied-type columns" <| varied_column = (enso_project.data / "varied_column.csv") . read c_1 = ["Column 1", ["2005-02-25", "2005-02-28", "4", "2005-03-02", Nothing, "2005-03-04", "2005-03-07", "2005-03-08"]] # We can re-enable this once date support is improved. @@ -38,15 +47,15 @@ spec = expected = Table.new [c_1, c_3, c_4, c_5, c_6] varied_column.select_columns [0, 2, 3, 4, 5] . should_equal expected - Test.specify "should handle duplicated columns" <| + group_builder.specify "should handle duplicated columns" <| csv = """ name,x,y,x,y foo,10,20,30,20 t = Table.from csv (format = Delimited ",") t.columns.map .name . should_equal ['name', 'x', 'y', 'x 1', 'y 1'] - Test.group 'Writing' <| - Test.specify 'should properly serialize simple tables' <| + suite_builder.group 'Writing' group_builder-> + group_builder.specify 'should properly serialize simple tables' <| varied_column = (enso_project.data / "varied_column.csv") . read res = Text.from varied_column format=(Delimited ",") exp = normalize_lines <| ''' @@ -61,7 +70,7 @@ spec = 2005-03-08,2005-03-08,8,8,8.0,osiem res.should_equal exp - Test.specify 'should properly handle quoting of records and allow specifying separators' <| + group_builder.specify 'should properly handle quoting of records and allow specifying separators' <| c1 = ['name', ['Robert");DROP TABLE Students;--', 'This;Name;;Is""Strange', 'Marcin,,']] c2 = ['grade', [10, 20, 'hello;world']] t = Table.new [c1, c2] @@ -75,7 +84,7 @@ spec = res = Text.from t format=(Delimited ";") res.should_equal expected - Test.specify 'should allow forced quoting of records' <| + group_builder.specify 'should allow forced quoting of records' <| c1 = ['name', ['Robert");DROP TABLE Students;--', 'This;Name;;Is""Strange', 'Marcin,,']] c2 = ['grade', [10, 20, 'hello;world']] t = Table.new [c1, c2] @@ -90,7 +99,7 @@ spec = res.should_equal expected - Test.specify 'should write CSV to a file' <| + group_builder.specify 'should write CSV to a file' <| varied_column = (enso_project.data / "varied_column.csv") . read out = enso_project.data / "transient" / "out.csv" out.delete_if_exists @@ -108,8 +117,8 @@ spec = out.read_text.should_equal exp out.delete_if_exists - Test.group "Integration" <| - Test.specify "should be able to round-trip a table with all kinds of weird characters to CSV and back" <| + suite_builder.group "Integration" group_builder-> + group_builder.specify "should be able to round-trip a table with all kinds of weird characters to CSV and back" <| names = ['Śłąęźż");DROP TABLE Students;--', 'This;Name;;Is""Strange', 'Marcin,,', '\'', 'a\n\nb', 'a\tc', Nothing, Nothing, Nothing, '42', '💁👌🎍😍', '', 'null?\0?', 'FFFD', '\uFFFD', '\r\n', 'a\r\nb\n\rc\rd\ne', 'what about these # ?? // /* hmm */ is it included?', 'and the rare \v vertical tab?'] d = Date_Time.new 2015 10 29 23 55 49 t = Table.new [['name', names], ['header\nspanning\nmultiple\nlines', names.map_with_index ix-> _-> ix*10], ['dates', names.map_with_index ix-> _-> d + Duration.new hours=3*ix]] @@ -136,4 +145,8 @@ spec = out2.delete_if_exists -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso index 590b5bc478cf..d9f22777eae8 100644 --- a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso @@ -7,14 +7,13 @@ from Standard.Table import Table, Column, Data_Formatter, Quote_Style, Delimited from Standard.Table.Extensions.Table_Conversions import all from Standard.Table.Errors import all -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all import project.Util -spec = - Test.group "Delimited File Parsing" <| - Test.specify "should load a simple table with headers" <| +add_specs suite_builder = + suite_builder.group "Delimited File Parsing" group_builder-> + group_builder.specify "should load a simple table with headers" <| c_1 = ["a", ['1', '4', '7', '10']] c_2 = ["b", ['2', Nothing, '8', '11']] c_3 = ["c", [Nothing, '6', '9', '12']] @@ -22,7 +21,7 @@ spec = simple_empty = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=True value_formatter=Nothing) simple_empty.should_equal expected_table - Test.specify "should load a simple table without headers" <| + group_builder.specify "should load a simple table without headers" <| c_1 = ["Column 1", ['a', '1', '4', '7', '10']] c_2 = ["Column 2", ['b', '2', Nothing, '8', '11']] c_3 = ["Column 3", ['c', Nothing, '6', '9', '12']] @@ -30,7 +29,7 @@ spec = simple_empty = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False value_formatter=Nothing) simple_empty.should_equal expected_table - Test.specify "should work in presence of missing headers" <| + group_builder.specify "should work in presence of missing headers" <| action on_problems = Data.read (enso_project.data / "missing_header.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems tester table = table.columns.map .name . should_equal ["a", "Column 1", "c", "Column 2", "d"] @@ -42,7 +41,7 @@ spec = problems = [Invalid_Column_Names.Error [Nothing, Nothing]] Problems.test_problem_handling action problems tester - Test.specify "should infer headers based on the first two rows" <| + group_builder.specify "should infer headers based on the first two rows" <| t1 = Data.read (enso_project.data / "data_small.csv") (Delimited ",") t1.columns.map .name . should_equal ["Code", "Index", "Flag", "Value", "ValueWithNothing", "TextWithNothing", "Hexadecimal", "Leading0s", "QuotedNumbers", "Mixed Types"] @@ -74,7 +73,7 @@ spec = t6.at "1" . to_vector . should_equal ["y"] t6.at "x" . to_vector . should_equal [2] - Test.specify "should not use the first row as headers if it is the only row, unless specifically asked to" <| + group_builder.specify "should not use the first row as headers if it is the only row, unless specifically asked to" <| t1 = Data.read (enso_project.data / "one_row.csv") (Delimited ",") t1.columns.map .name . should_equal ["Column 1", "Column 2", "Column 3"] t1.at "Column 1" . to_vector . should_equal ["x"] @@ -86,11 +85,11 @@ spec = t2.row_count . should_equal 0 t2.at "x" . to_vector . should_equal [] - Test.specify "should raise an informative error when loading an empty file" <| + group_builder.specify "should raise an informative error when loading an empty file" <| t = Data.read (enso_project.data / "empty.txt") (Delimited "," headers=True value_formatter=Nothing) t.should_fail_with Empty_File_Error - Test.specify "should correctly handle file opening issues" <| + group_builder.specify "should correctly handle file opening issues" <| nonexistent_file = enso_project.data / "a_filename_that_does_not_exist.foobar" r1 = Data.read nonexistent_file (Delimited "," headers=True value_formatter=Nothing) r1.should_fail_with File_Error @@ -101,7 +100,7 @@ spec = r2.should_fail_with File_Error r2.catch.should_be_a File_Error.IO_Error - Test.specify "should work with all kinds of line endings" <| + group_builder.specify "should work with all kinds of line endings" <| path name = enso_project.data / 'transient' / name create_file name ending_style = lines = ['a,b,c', 'd,e,f', '1,2,3'] @@ -128,7 +127,7 @@ spec = ['crlf.csv', 'lf.csv', 'cr.csv', 'mixed.csv'].each (path >> .delete) - Test.specify "should allow to override line endings style" <| + group_builder.specify "should allow to override line endings style" <| file = enso_project.data / "transient" / "lf.csv" lines = ['a,b,c', 'd,e,f', '1,2,3'] text = lines.join '\n' @@ -155,21 +154,21 @@ spec = table . should_equal reference_table file_2.delete - Test.specify "should work with Windows-1252 encoding" <| + group_builder.specify "should work with Windows-1252 encoding" <| table = Data.read (enso_project.data / "windows.csv") (Delimited "," headers=True encoding=Encoding.windows_1252) Problem_Behavior.Report_Error table.columns.map .name . should_equal ['a', 'b', 'c'] table.at 'a' . to_vector . should_equal ['$¢'] table.at 'b' . to_vector . should_equal ['¤'] table.at 'c' . to_vector . should_equal ['¥'] - Test.specify "should work with UTF-16 encoding" <| + group_builder.specify "should work with UTF-16 encoding" <| table = Data.read (enso_project.data / "utf16.csv") (Delimited "," headers=True encoding=Encoding.utf_16_be) Problem_Behavior.Report_Error table.columns.map .name . should_equal ['ą', '🚀b', 'ć😎'] table.at 'ą' . to_vector . should_equal ['ą'] table.at '🚀b' . to_vector . should_equal ['✨🚀🚧😍😃😍😎😙😉☺'] table.at 'ć😎' . to_vector . should_equal ['แมวมีสี่ขา'] - Test.specify "should report errors when encountering malformed characters" <| + group_builder.specify "should report errors when encountering malformed characters" <| utf8_file = (enso_project.data / "transient" / "utf8_invalid.csv") utf8_bytes = [97, 44, 98, 44, 99, 10, -60, -123, 44, -17, -65, -65, 44, -61, 40, -61, 40, 10] utf8_bytes.write_bytes utf8_file @@ -196,7 +195,7 @@ spec = problems_2 = [Encoding_Error.Error "Encoding issues at byte 22."] Problems.test_problem_handling action_2 problems_2 tester_2 - Test.specify "should handle duplicated columns" <| + group_builder.specify "should handle duplicated columns" <| action on_problems = Data.read (enso_project.data / "duplicated_columns.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems tester table = table.columns.map .name . should_equal ['a', 'b', 'c', 'a 1'] @@ -205,7 +204,7 @@ spec = problems = [Duplicate_Output_Column_Names.Error ['a']] Problems.test_problem_handling action problems tester - Test.specify "should handle quotes" <| + group_builder.specify "should handle quotes" <| t1 = Data.read (enso_project.data / "double_quoted.csv") (Delimited "," headers=True value_formatter=Nothing) t1.at 'a' . to_vector . should_equal ['a, x', '"a'] t1.at 'c' . to_vector . should_equal ['3', '"'] @@ -218,13 +217,13 @@ spec = t3.at 'b' . to_vector . should_equal ['z"'] t3.at 'c' . to_vector . should_equal ['a'] - Test.specify "should support rows spanning multiple lines if quoted" <| + group_builder.specify "should support rows spanning multiple lines if quoted" <| t1 = Data.read (enso_project.data / "multiline_quoted.csv") (Delimited "," headers=True value_formatter=Nothing) t1.at 'a' . to_vector . should_equal ['1', '4'] t1.at 'b' . to_vector . should_equal ['start\n\ncontinue', '5'] t1.at 'c' . to_vector . should_equal ['3', '6'] - Test.specify "should fail in presence of a mismatched quote" <| + group_builder.specify "should fail in presence of a mismatched quote" <| [Problem_Behavior.Report_Error, Problem_Behavior.Report_Warning, Problem_Behavior.Ignore].each pb-> format = (Delimited "," headers=True value_formatter=Nothing) r1 = Data.read (enso_project.data / "mismatched_quote.csv") format on_problems=pb @@ -250,7 +249,7 @@ spec = r5.should_fail_with File_Error r5.catch.should_be_a File_Error.Corrupted_Format - Test.specify "should fail in presence of a mismatched quote (2)" pending="ToDo: To be fixed in https://github.com/enso-org/enso/issues/5839" <| + group_builder.specify "should fail in presence of a mismatched quote (2)" pending="ToDo: To be fixed in https://github.com/enso-org/enso/issues/5839" <| [Problem_Behavior.Report_Error, Problem_Behavior.Report_Warning, Problem_Behavior.Ignore].each pb-> format = (Delimited "," headers=True value_formatter=Nothing) format3 = format . with_quotes quote_escape="\" @@ -263,7 +262,7 @@ spec = r6.catch.should_be_a File_Error.Corrupted_Format f6.delete - Test.specify "should handle quotes if they are opened in the middle of an unquoted cell in a sane way" pending="ToDo: To be fixed in https://github.com/enso-org/enso/issues/5839" <| + group_builder.specify "should handle quotes if they are opened in the middle of an unquoted cell in a sane way" pending="ToDo: To be fixed in https://github.com/enso-org/enso/issues/5839" <| t1 = Data.read (enso_project.data / "mismatched_quote_at_end.csv") (Delimited "," headers=True value_formatter=Nothing) t1.column_names . should_equal ["a", "b", "c"] t1.at 'a' . to_vector . should_equal ['1', 'abc', '7'] @@ -283,7 +282,7 @@ spec = t3.column_names.should_equal ["A", "B", "C"] t3.print - Test.specify "should handle too long and too short rows" <| + group_builder.specify "should handle too long and too short rows" <| action keep_invalid_rows on_problems = Data.read (enso_project.data / "varying_rows.csv") (Delimited "," headers=True keep_invalid_rows=keep_invalid_rows value_formatter=Nothing) on_problems=on_problems @@ -321,7 +320,7 @@ spec = r3.at 'b' . to_vector . should_equal ['2', '0', '5'] r3.at 'c' . to_vector . should_equal ['3', Nothing, '6'] - Test.specify "should aggregate invalid rows over some limit" <| + group_builder.specify "should aggregate invalid rows over some limit" <| action on_problems = Data.read (enso_project.data / "many_invalid_rows.csv") (Delimited "," headers=True keep_invalid_rows=False value_formatter=Nothing) on_problems @@ -333,7 +332,7 @@ spec = problems = [Invalid_Row.Error 3 Nothing ['1'] 3, Invalid_Row.Error 4 Nothing ['2'] 3, Invalid_Row.Error 5 Nothing ['3'] 3, Invalid_Row.Error 6 Nothing ['4'] 3, Invalid_Row.Error 8 Nothing ['6'] 3, Invalid_Row.Error 9 Nothing ['7'] 3, Invalid_Row.Error 10 Nothing ['8'] 3, Invalid_Row.Error 11 Nothing ['9'] 3, Invalid_Row.Error 12 Nothing ['10'] 3, Invalid_Row.Error 13 Nothing ['11'] 3, Additional_Invalid_Rows.Error 3] Problems.test_problem_handling action problems tester - Test.specify "should allow to skip rows" <| + group_builder.specify "should allow to skip rows" <| t1 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 value_formatter=Nothing) t1.at "Column 1" . to_vector . should_equal ['7', '10'] @@ -341,7 +340,7 @@ spec = t2.columns.map .name . should_equal ['7', '8', '9'] t2.at "7" . to_vector . should_equal ['10'] - Test.specify "should allow to set a limit of rows to read" <| + group_builder.specify "should allow to set a limit of rows to read" <| t1 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False row_limit=2 value_formatter=Nothing) t1.at "Column 1" . to_vector . should_equal ['a', '1'] @@ -363,7 +362,7 @@ spec = t6 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 row_limit=1000 value_formatter=Nothing) t6.at "Column 1" . to_vector . should_equal ['7', '10'] - Test.specify "should check arguments" <| + group_builder.specify "should check arguments" <| path = (enso_project.data / "simple_empty.csv") pb = Problem_Behavior.Report_Error path.read (Delimited "," headers=False . with_quotes quote='abc') pb . should_fail_with Illegal_Argument @@ -371,7 +370,7 @@ spec = path.read (Delimited "," headers=False . with_quotes quote_escape='//') pb . should_fail_with Illegal_Argument path.read (Delimited 'a\u{301}' headers=False) pb . should_fail_with Illegal_Argument - Test.specify "should correctly guess column types" <| + group_builder.specify "should correctly guess column types" <| t = (enso_project.data / "data_small.csv") . read (Delimited "," headers=True) t.at "Code" . to_vector . should_equal ["gxl", "wca", "nfw", "der"] t.at "Index" . to_vector . should_equal [7, 0, 1, 7] @@ -387,7 +386,7 @@ spec = t2 = (enso_project.data / "data_small.csv") . read (Delimited "," headers=True value_formatter=(Data_Formatter.Value allow_leading_zeros=True)) t2.at "Leading0s" . to_vector . should_equal [1, 2, 123, Nothing] - Test.specify "should be able to detect types automatically" <| + group_builder.specify "should be able to detect types automatically" <| t1 = (enso_project.data / "data_small.csv") . read t1.at "Code" . to_vector . should_equal ["gxl", "wca", "nfw", "der"] t1.at "Index" . to_vector . should_equal [7, 0, 1, 7] @@ -398,7 +397,7 @@ spec = t2.at "c" . to_vector . should_equal [3, 6] t2.columns.map .name . should_equal ["a", "b", "c"] - Test.specify "should be able to read in a file without splitting it to columns" <| + group_builder.specify "should be able to read in a file without splitting it to columns" <| t1 = (enso_project.data / "data_small.csv") . read (Delimited "" headers=False) expected = ['Code,Index,Flag,Value,ValueWithNothing,TextWithNothing,"Hexadecimal",Leading0s,QuotedNumbers,"Mixed Types"'] + ['gxl,7,True,38.76109,63.13, pq6igd2wyd ,4DD4675B,001,"1","33"'] @@ -407,7 +406,7 @@ spec = + ['der,7,True,0.86658,,,F32E1EFE,,"34",True'] t1.at 0 . to_vector . should_equal expected - Test.specify "should be able to parse raw text" <| + group_builder.specify "should be able to parse raw text" <| text1 = """ a,b,c 1,2,3 @@ -424,19 +423,19 @@ spec = t2.at "a" . to_vector . should_equal [1, 3] t2.at "b" . to_vector . should_equal [2, 4] - Test.specify "should be able to read column names starting with #" <| + group_builder.specify "should be able to read column names starting with #" <| reference_table = Table.new [["#", ["a", ";1", "5"]], ["x", [42, 2, 6]], ["y", ["c # comment??", "3", "7;comment?"]]] table = Data.read (enso_project.data / "comments.csv") table.should_equal reference_table - Test.specify "should be able to handle comments if enabled" <| + group_builder.specify "should be able to handle comments if enabled" <| table_hash = Table.new [["a", [";1", "5"]], ["42", [2, 6]], ["c # comment??", ["3", "7;comment?"]]] table_semicolon = Table.new [["#", ["a", "5"]], ["x", [42, 6]], ["y", ["c # comment??", "7;comment?"]]] Data.read (enso_project.data / "comments.csv") (Delimited ',' . with_comments . with_headers) . should_equal table_hash Data.read (enso_project.data / "comments.csv") (Delimited ',' . with_comments ';' . with_headers) . should_equal table_semicolon - Test.specify "should manage to parse a file containing null characters" pending="Parsing NULL character in CSV currently does not handle some edge cases. It may need to be revised. See issue https://github.com/enso-org/enso/issues/5655" <| + group_builder.specify "should manage to parse a file containing null characters" pending="Parsing NULL character in CSV currently does not handle some edge cases. It may need to be revised. See issue https://github.com/enso-org/enso/issues/5655" <| f = enso_project.data / "transient" / "slash_zero.csv" f.delete_if_exists txt = 'a,b\n\0,\0\nx\0y,zw\na#b,c\0d' @@ -449,7 +448,7 @@ spec = f.delete_if_exists - Test.specify "should allow to build the Delimited configuration using builders" <| + group_builder.specify "should allow to build the Delimited configuration using builders" <| Delimited "," . clone . should_equal (Delimited ",") Delimited "," encoding=Encoding.ascii skip_rows=123 row_limit=100 headers=False value_formatter=Nothing . clone . should_equal (Delimited "," headers=False value_formatter=Nothing skip_rows=123 row_limit=100 encoding=Encoding.ascii) Delimited "," . clone quote_style=Quote_Style.No_Quotes headers=False value_formatter=Nothing . should_equal (Delimited "," headers=False value_formatter=Nothing quote_style=Quote_Style.No_Quotes) @@ -474,4 +473,8 @@ spec = Delimited ',' comment_character='#' . without_comments . should_equal (Delimited ',' comment_character=Nothing) Delimited ',' . with_line_endings Line_Ending_Style.Unix . should_equal (Delimited ',' line_endings=Line_Ending_Style.Unix) -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso index b7ff1fb10afc..275440123cf8 100644 --- a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso @@ -6,8 +6,7 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument from Standard.Table import Table, Column, Data_Formatter, Quote_Style, Match_Columns, Delimited from Standard.Table.Errors import all -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all from project.Util import all @@ -22,10 +21,10 @@ join_lines lines trailing_newline=True = eol = default_line_endings_for_new_files.to_text if trailing_newline then lines.join eol suffix=eol else lines.join eol -spec = +add_specs suite_builder = line_ending_pairs = [[Line_Ending_Style.Unix, '\n'], [Line_Ending_Style.Windows, '\r\n'], [Line_Ending_Style.Mac_Legacy, '\r']] - Test.group "Delimited File Writing" <| - Test.specify "should correctly write a simple table and return the written file object on success" <| + suite_builder.group "Delimited File Writing" group_builder-> + group_builder.specify "should correctly write a simple table and return the written file object on success" <| table = Table.new [["A", [1,2,3]], ["B", [1.0,1.5,2.2]], ["C", ["x","y","z"]], ["D", ["a", 2, My_Type.Value 10]]] file = (enso_project.data / "transient" / "written.csv") file.delete_if_exists @@ -39,7 +38,7 @@ spec = text.should_equal expected_text file.delete - Test.specify "should allow to specify line ending style" <| + group_builder.specify "should allow to specify line ending style" <| table = Table.new [["a", ["b", "c"]], ["d", ["e", "f"]]] lines = ["a,d", "b,e", "c,f"] line_ending_pairs.each setting-> @@ -51,7 +50,7 @@ spec = text.should_equal (lines.join separator suffix=separator) file.delete - Test.specify 'should quote values that contain the delimiter, newline or quotes, in the [,""] variant' <| + group_builder.specify 'should quote values that contain the delimiter, newline or quotes, in the [,""] variant' <| data_formatter = Data_Formatter.Value decimal_point="," table = Table.new [['The Column "Name"', ["foo","'bar'",'"baz"', 'one, two, three', 'a\nb']], ["Hello, Column?", [1.0, 1000000.5, 2.2, -1.5, 0.0]]] file = (enso_project.data / "transient" / "quotes1.csv") @@ -69,7 +68,7 @@ spec = text.should_equal expected_text file.delete - Test.specify 'should quote values that contain the delimiter, newline or quotes, in the [;\\\"] variant' <| + group_builder.specify 'should quote values that contain the delimiter, newline or quotes, in the [;\\\"] variant' <| data_formatter = Data_Formatter.Value thousand_separator="'" table = Table.new [['"A"', ["foo",'!"baz" ', 'one, two, three', "a;b; c ", "a\b", 'n\nm']], ["B", [1000000.5, 1000.0, 0.0, -1.2, Nothing, 33]]] file = (enso_project.data / "transient" / "quotes2.csv") @@ -88,7 +87,7 @@ spec = text.should_equal expected_text file.delete - Test.specify "should quote values that contain the delimiter, newline or quotes, in the [\t''] variant" <| + group_builder.specify "should quote values that contain the delimiter, newline or quotes, in the [\t''] variant" <| data_formatter = Data_Formatter.Value thousand_separator="'" table = Table.new [['"A"', [Nothing,"The 'thing'.", 'one, "two", three', 'a\tb', 'x\ny', 'w\vz']], ["B\C", [1000000.5, 1000.0, Nothing, -1.2, 2.0, 42.0]]] file = (enso_project.data / "transient" / "quotes3.csv") @@ -107,7 +106,7 @@ spec = text.should_equal expected_text file.delete - Test.specify "should correctly distinguish empty text from a missing value" <| + group_builder.specify "should correctly distinguish empty text from a missing value" <| table = Table.new [["A", [1,Nothing,3]], ["B", [Nothing,"","abc"]]] file = (enso_project.data / "transient" / "empty_vs_null.csv") file.delete_if_exists @@ -121,7 +120,7 @@ spec = text.should_equal expected_text file.delete - Test.specify "should quote values containing the comment symbol if comments are enabled" <| + group_builder.specify "should quote values containing the comment symbol if comments are enabled" <| table = Table.new [["#", ['b', 'x', '#']], ["B", [Nothing,"#","abc"]]] file = (enso_project.data / "transient" / "comments.csv") file.delete_if_exists @@ -142,7 +141,7 @@ spec = text_2.should_equal expected_text_2 file.delete - Test.specify 'should not quote values if quoting is disabled' <| + group_builder.specify 'should not quote values if quoting is disabled' <| format = Delimited "," value_formatter=(Data_Formatter.Value decimal_point=",") . without_quotes table = Table.new [['The Column "Name"', ["foo","'bar'",'"baz"', 'one, two, three']], ["Hello, Column?", [1.0, 1000000.5, 2.2, -1.5]]] file = (enso_project.data / "transient" / "quote_disabled.csv") @@ -168,7 +167,7 @@ spec = text.should_equal expected_text file.delete - Test.specify 'should allow to always quote text and custom values, but for non-text primitives only if absolutely necessary' <| + group_builder.specify 'should allow to always quote text and custom values, but for non-text primitives only if absolutely necessary' <| format = Delimited "," value_formatter=(Data_Formatter.Value thousand_separator='"' . with_datetime_formats date_formats=["dddd, d MMM y"]) . with_quotes always_quote=True quote_escape='\\' table = Table.new [['The Column "Name"', ["foo","'bar'",'"baz"', 'one, two, three']], ["B", [1.0, 1000000.5, 2.2, -1.5]], ["C", ["foo", My_Type.Value 44, (Date.new 2022 06 21), 42]], ["D", [1,2,3,4000]], ["E", [Nothing, (Time_Of_Day.new 13 55), Nothing, Nothing]]] file = (enso_project.data / "transient" / "quote_always.csv") @@ -184,7 +183,7 @@ spec = text.should_equal expected_text file.delete - Test.specify "should correctly handle alternative encodings" <| + group_builder.specify "should correctly handle alternative encodings" <| table = Table.new [["ąęćś", [0]], ["ß", ["żółw 🐢"]]] file = (enso_project.data / "transient" / "utf16.csv") file.delete_if_exists @@ -196,7 +195,7 @@ spec = text.should_equal expected_text file.delete - Test.specify "should correctly handle encoding errors" <| + group_builder.specify "should correctly handle encoding errors" <| table = Table.new [["A", [0, 1]], ["B", ["słówka", "🐢"]]] file = (enso_project.data / "transient" / "ascii.csv") file.delete_if_exists @@ -214,7 +213,7 @@ spec = Problems.get_attached_warnings result . should_equal [Encoding_Error.Error msg] file.delete - Test.specify "should allow only text columns if no formatter is specified" <| + group_builder.specify "should allow only text columns if no formatter is specified" <| format = Delimited "," value_formatter=Nothing table_1 = Table.new [["A", ["x", "y"]], ["B", ["z", "w"]]] file_1 = (enso_project.data / "transient" / "textonly.csv") @@ -239,7 +238,7 @@ spec = file_1.delete file_2.delete - Test.specify "should create a new file in append mode if it didn't exist" <| + group_builder.specify "should create a new file in append mode if it didn't exist" <| table = Table.new [["A", [1,2,3]], ["B", [1.0,1.5,2.2]], ["C", ["x","y","z"]]] file = (enso_project.data / "transient" / "append_nonexistent.csv") file.delete_if_exists @@ -248,7 +247,7 @@ spec = got_table.should_equal table file.delete - Test.specify "should correctly append to an empty file" <| + group_builder.specify "should correctly append to an empty file" <| table = Table.new [["A", [1,2,3]], ["B", [1.0,1.5,2.2]], ["C", ["x","y","z"]]] file = (enso_project.data / "transient" / "append_empty.csv") file.delete_if_exists @@ -258,7 +257,7 @@ spec = got_table.should_equal table file.delete - Test.specify "should correctly append to a file with a missing newline at EOF" <| + group_builder.specify "should correctly append to a file with a missing newline at EOF" <| table = Table.new [["A", [1,2,3]], ["B", [1.0,1.5,2.2]], ["C", ["x","y","z"]]] file = (enso_project.data / "transient" / "append_missing_newline.csv") file.delete_if_exists @@ -269,7 +268,7 @@ spec = text.should_equal (expected_lines.join '\r' suffix='\r') file.delete - Test.specify "should append to a file, matching columns by name (headers=Infer)" <| + group_builder.specify "should append to a file, matching columns by name (headers=Infer)" <| existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]], ["C", ["x","y"]]] appending_table = Table.new [["B", [33,44]], ["A", [Nothing, 0]], ["C", ["a","BB"]]] file = (enso_project.data / "transient" / "append_by_name.csv") @@ -281,7 +280,7 @@ spec = got_table.should_equal expected_table file.delete - Test.specify "should append to a file, matching columns by name (headers=True)" <| + group_builder.specify "should append to a file, matching columns by name (headers=True)" <| existing_table = Table.new [["0", [1,2]], ["B1", [1.0,1.5]], ["C", ["x","y"]]] appending_table = Table.new [["B1", [33,44]], ["0", [Nothing, 0]], ["C", ["a","BB"]]] file = (enso_project.data / "transient" / "append_by_name_2.csv") @@ -294,7 +293,7 @@ spec = got_table.should_equal expected_table file.delete - Test.specify "should fail when appending and matching columns by name but column names are not available in the file (headers=Infer)" <| + group_builder.specify "should fail when appending and matching columns by name but column names are not available in the file (headers=Infer)" <| existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]], ["C", ["x","y"]]] appending_table = Table.new [["B", [33,44]], ["A", [Nothing, 0]], ["C", ["a","BB"]]] file = (enso_project.data / "transient" / "append_no_header.csv") @@ -304,7 +303,7 @@ spec = appending_table.write file on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument file.delete - Test.specify "should fail when appending and matching columns by name but headers are disabled (headers=False)" <| + group_builder.specify "should fail when appending and matching columns by name but headers are disabled (headers=False)" <| existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]], ["C", ["x","y"]]] appending_table = Table.new [["B", [33,44]], ["A", [Nothing, 0]], ["C", ["a","BB"]]] file = (enso_project.data / "transient" / "append_no_header.csv") @@ -314,7 +313,7 @@ spec = appending_table.write file no_header_format on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument file.delete - Test.specify "should fail on column mismatch when appending to a file by name" <| + group_builder.specify "should fail on column mismatch when appending to a file by name" <| existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]]] appending_table = Table.new [["B", [33,44]], ["X", [Nothing, 0]]] file = (enso_project.data / "transient" / "append_no_header.csv") @@ -327,7 +326,7 @@ spec = result.catch.to_display_text . should_equal "Columns mismatch. Missing from new data: [A] Extras in new data: [X]" file.delete - Test.specify "should append to a file, matching columns by position" <| + group_builder.specify "should append to a file, matching columns by position" <| existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]], ["C", ["x","y"]]] appending_table = Table.new [["AA", [33,44]], ["...", [Nothing, 0]], ["hmmm", ["a","BB"]]] @@ -353,7 +352,7 @@ spec = test_append initial_file_format=no_headers append_format=base_format expected_table_without_headers test_append initial_file_format=no_headers append_format=no_headers expected_table_without_headers - Test.specify "should fail on column count mismatch when appending to a file by position" <| + group_builder.specify "should fail on column count mismatch when appending to a file by position" <| existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]], ["C", ["x","y"]]] appending_table_1 = Table.new [["B", [33,44]], ["X", [Nothing, 0]]] appending_table_2 = Table.new [["B", [33,44]], ["X", [Nothing, 0]], ["Y", ["a","BB"]], ["Z", [Nothing, 0]]] @@ -375,7 +374,7 @@ spec = file.delete - Test.specify "should use the same line ending style as existing data when appending" <| + group_builder.specify "should use the same line ending style as existing data when appending" <| initial_table = Table.new [["a", [1, 2]], ["d", ["e", "f"]]] table_to_append = Table.new [["a", ["x", "y"]], ["d", ["z", "w"]]] expected_lines = ["a,d", "1,e", "2,f", "x,z", "y,w"] @@ -389,7 +388,7 @@ spec = text.should_equal (expected_lines.join separator suffix=separator) file.delete - Test.specify "should use Unix line ending style when appending to an empty or nonexistent file" <| + group_builder.specify "should use Unix line ending style when appending to an empty or nonexistent file" <| empty_file = (enso_project.data / "transient" / "empty.csv") "".write empty_file nonexistent_file = (enso_project.data / "transient" / "nonexistent.csv") @@ -404,7 +403,7 @@ spec = Data.read_text empty_file . should_equal expected_text Data.read_text nonexistent_file . should_equal expected_text - Test.specify "should use the existing line ending style when appending to a file consisting of only comments" <| + group_builder.specify "should use the existing line ending style when appending to a file consisting of only comments" <| initial_lines = ["# comment 1", "# comment 2"] table_to_append = Table.new [["a", ["x", "y"]], ["b", ["z", "w"]]] expected_lines = initial_lines + ["a,b", "x,z", "y,w"] @@ -420,7 +419,7 @@ spec = text.should_equal expected_text file.delete - Test.specify "should use the existing line ending style when appending to a file consisting of only comments missing last EOL" <| + group_builder.specify "should use the existing line ending style when appending to a file consisting of only comments missing last EOL" <| initial_lines = ["# comment 1", "# comment 2 without EOL"] table_to_append = Table.new [["a", ["x", "y"]], ["b", ["z", "w"]]] expected_lines = initial_lines + ["a,b", "x,z", "y,w"] @@ -436,7 +435,7 @@ spec = text.should_equal expected_text file.delete - Test.specify "should correctly handle append edge cases" <| + group_builder.specify "should correctly handle append edge cases" <| table = Table.new [["a", [1, 2]]] file = (enso_project.data / "transient" / "append_edge_cases.csv") file.delete_if_exists @@ -507,7 +506,7 @@ spec = text.should_equal expected_text file.delete - Test.specify "should use the existing line ending style when appending to a file consisting of only one comment with EOL" <| + group_builder.specify "should use the existing line ending style when appending to a file consisting of only one comment with EOL" <| initial_line = "# comment 1 with EOL" table_to_append = Table.new [["a", ["x", "y"]], ["b", ["z", "w"]]] expected_lines = [initial_line] + ["a,b", "x,z", "y,w"] @@ -523,7 +522,7 @@ spec = text.should_equal expected_text file.delete - Test.specify "should use the Unix line ending style when appending to a file consisting of only one comment and missing the EOL" <| + group_builder.specify "should use the Unix line ending style when appending to a file consisting of only one comment and missing the EOL" <| initial_lines = ["# comment 1 without EOL"] table_to_append = Table.new [["a", ["x", "y"]], ["b", ["z", "w"]]] expected_lines = initial_lines + ["a,b", "x,z", "y,w"] @@ -537,7 +536,7 @@ spec = text.should_equal expected_text file.delete - Test.specify "should fail if explicitly provided line endings do not match line endings in the file when appending" <| + group_builder.specify "should fail if explicitly provided line endings do not match line endings in the file when appending" <| initial_table = Table.new [["a", [1, 2]]] table_to_append = Table.new [["a", ["x", "y"]]] file = (enso_project.data / "transient" / "endings_mismatch.csv") @@ -548,7 +547,7 @@ spec = result.catch.message . should_equal "The explicitly provided line endings ('\n') do not match the line endings in the file ('\r')." file.delete - Test.specify "should fail if the target file is read-only" <| + group_builder.specify "should fail if the target file is read-only" <| f = enso_project.data / "transient" / "permission.csv" f.delete_if_exists @@ -566,7 +565,7 @@ spec = set_writable f True f.delete - Test.specify "should fail if the parent directory does not exist" <| + group_builder.specify "should fail if the parent directory does not exist" <| parent = enso_project.data / "transient" / "nonexistent" parent.exists.should_be_false @@ -576,7 +575,7 @@ spec = r1.should_fail_with File_Error r1.catch.should_be_a File_Error.Not_Found - Test.specify "should warn about not-encodable characters according to the problem behaviour" <| + group_builder.specify "should warn about not-encodable characters according to the problem behaviour" <| f = enso_project.data / "transient" / "encoding-errors.csv" format = Delimited "," encoding=Encoding.ascii headers=True @@ -602,4 +601,8 @@ spec = f.read Plain_Text . should_equal "Initial Content" f.delete -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/IO/Excel_Spec.enso b/test/Table_Tests/src/IO/Excel_Spec.enso index 903ebe0bd756..af04eaae1ed6 100644 --- a/test/Table_Tests/src/IO/Excel_Spec.enso +++ b/test/Table_Tests/src/IO/Excel_Spec.enso @@ -11,8 +11,8 @@ from Standard.Table import Table, Match_Columns, Excel, Excel_Range, Data_Format from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names, Invalid_Location, Range_Exceeded, Existing_Data, Column_Count_Mismatch, Column_Name_Mismatch, Empty_Sheet_Error -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + import Standard.Examples @@ -20,20 +20,20 @@ import project.Util polyglot java import org.enso.table_test_helpers.RandomHelpers -spec_fmt header file read_method sheet_count=5 = - Test.group header <| - Test.specify "should read a workbook in" <| +spec_fmt suite_builder header file read_method sheet_count=5 = + suite_builder.group header group_builder-> + group_builder.specify "should read a workbook in" <| wb = read_method file wb.sheet_count . should_equal sheet_count - Test.specify "should read the specified sheet by index and use correct headers" <| + group_builder.specify "should read the specified sheet by index and use correct headers" <| t = read_method file (Excel (Worksheet 1)) t.columns.map .name . should_equal ['Name', 'Quantity', 'Price'] t.at 'Name' . to_vector . should_equal ['blouse', 't-shirt', 'trousers', 'shoes', 'skirt', 'dress'] t.at 'Quantity' . to_vector . should_equal [10, 20, Nothing, 30, Nothing, 5] t.at 'Price' . to_vector . should_equal [22.3, 32, 43.2, 54, 31, Nothing] - Test.specify "should read the specified sheet by index and properly format a table" <| + group_builder.specify "should read the specified sheet by index and properly format a table" <| t = read_method file (Excel (Worksheet 2) headers=False) t.columns.map .name . should_equal ['A', 'B', 'C', 'D', 'E'] t.at 'A' . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing] @@ -42,21 +42,21 @@ spec_fmt header file read_method sheet_count=5 = t.at 'D' . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing] t.at 'E' . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing, 'foo', Nothing] - Test.specify "should read the specified sheet by name and properly handle dates" <| + group_builder.specify "should read the specified sheet by name and properly handle dates" <| t = read_method file (Excel (Worksheet 'Dates')) t.columns.map .name . should_equal ['Student Name', 'Enrolment Date'] t.at 'Enrolment Date' . map .day . to_vector . should_equal [2, 26, 4, 24, 31, 7] - Test.specify "should give an informative error when reading an empty table" <| + group_builder.specify "should give an informative error when reading an empty table" <| t = read_method file (Excel (Worksheet "Empty")) t.should_fail_with Empty_Sheet_Error - Test.specify "should gracefully handle duplicate column names and formulas" <| + group_builder.specify "should gracefully handle duplicate column names and formulas" <| t = read_method file (Excel (Worksheet "Duplicate Columns")) t.columns.map .name . should_equal ['Item', 'Price', 'Quantity', 'Price 1'] t.at 'Price 1' . to_vector . should_equal [20, 40, 0, 60, 0, 10] - Test.specify "should allow reading with cell range specified" <| + group_builder.specify "should allow reading with cell range specified" <| t_1 = read_method file (Excel (Cell_Range "Simple!B:C")) t_1.columns.map .name . should_equal ['Quantity', 'Price'] t_1.at 'Quantity' . to_vector . should_equal [10, 20, Nothing, 30, Nothing, 5] @@ -73,280 +73,271 @@ spec_fmt header file read_method sheet_count=5 = t_3.at 'B' . to_vector . should_equal [Nothing, 30] t_3.at 'C' . to_vector . should_equal [43.2, 54] -spec_write suffix test_sheet_name = - Test.group ("Write " + suffix + " Files") <| +type Spec_Write_Data + Value ~data counter suffix + + table self = self.data.at 0 + clothes self = self.data.at 1 + sub_clothes self = self.data.at 2 + + setup suffix = table = enso_project.data/'varied_column.csv' . read clothes = enso_project.data/'clothes.csv' . read sub_clothes = clothes.select_columns [0, 1] - counter = Ref.new 0 - create_out = - i = counter.get + 1 - counter.put i - f = enso_project.data / "transient" / ("out" + i.to_text + "." + suffix) - Panic.rethrow f.delete_if_exists - f - - Test.specify 'should write a table to non-existent file as a new sheet with headers; and return the file object on success' <| - out = create_out - table.write out on_problems=Report_Error . should_succeed . should_equal out + Spec_Write_Data.Value [table, clothes, sub_clothes] counter suffix + + teardown self = + enso_project.data/"transient" . list "out*" . each .delete + + create_out self = + i = self.counter.get + 1 + self.counter.put i + f = enso_project.data / "transient" / ("out" + i.to_text + "." + self.suffix) + Panic.rethrow f.delete_if_exists + f + +spec_write suite_builder suffix test_sheet_name = + suite_builder.group ("Write " + suffix + " Files") group_builder-> + data = Spec_Write_Data.setup suffix + + group_builder.teardown <| + data.teardown + + group_builder.specify 'should write a table to non-existent file as a new sheet with headers; and return the file object on success' <| + out = data.create_out + data.table.write out on_problems=Report_Error . should_succeed . should_equal out written = out.read written.sheet_count . should_equal 1 written.sheet_names . should_equal ['EnsoSheet'] - written.read 'EnsoSheet' . should_equal table + written.read 'EnsoSheet' . should_equal data.table written.close - out.delete_if_exists . should_succeed - Test.specify 'should write a table to non-existent file in append mode as a new sheet with headers' <| - out = create_out - table.write out on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + group_builder.specify 'should write a table to non-existent file in append mode as a new sheet with headers' <| + out = data.create_out + data.table.write out on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed written = out.read written.sheet_count . should_equal 1 written.sheet_names . should_equal ['EnsoSheet'] - written.read 'EnsoSheet' . should_equal table + written.read 'EnsoSheet' . should_equal data.table written.close - out.delete_if_exists . should_succeed - Test.specify 'should write a table to existing file overriding EnsoSheet' <| - out = create_out - table.write out on_problems=Report_Error . should_succeed - table.write out on_problems=Report_Error . should_succeed + group_builder.specify 'should write a table to existing file overriding EnsoSheet' <| + out = data.create_out + data.table.write out on_problems=Report_Error . should_succeed + data.table.write out on_problems=Report_Error . should_succeed written_workbook = out.read written_workbook.sheet_count . should_equal 1 written_workbook.sheet_names . should_equal ['EnsoSheet'] - written_workbook.read 'EnsoSheet' . should_equal table + written_workbook.read 'EnsoSheet' . should_equal data.table written_workbook.close - out.delete_if_exists . should_succeed - Test.specify 'should write a table to existing file in overwrite mode as a new sheet with headers' <| - out = create_out + group_builder.specify 'should write a table to existing file in overwrite mode as a new sheet with headers' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - table.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed + data.table.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed written = out.read (Excel (Worksheet "Another")) - written.should_equal table - out.delete_if_exists . should_succeed + written.should_equal data.table - Test.specify 'should write a table to existing file in overwrite mode as a new sheet without headers' <| - out = create_out + group_builder.specify 'should write a table to existing file in overwrite mode as a new sheet without headers' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - table.write out (Excel (Worksheet "NoHeaders")) on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed + data.table.write out (Excel (Worksheet "NoHeaders")) on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed written = out.read (Excel (Worksheet "NoHeaders")) - written.should_equal (table.rename_columns ['A', 'B', 'C', 'D', 'E', 'F']) - out.delete_if_exists . should_succeed + written.should_equal (data.table.rename_columns ['A', 'B', 'C', 'D', 'E', 'F']) - Test.specify 'should create new sheets at the start if index is 0' <| - out = create_out - table.write out (Excel (Worksheet 0)) on_problems=Report_Error . should_succeed - clothes.write out (Excel (Worksheet 0)) on_problems=Report_Error . should_succeed + group_builder.specify 'should create new sheets at the start if index is 0' <| + out = data.create_out + data.table.write out (Excel (Worksheet 0)) on_problems=Report_Error . should_succeed + data.clothes.write out (Excel (Worksheet 0)) on_problems=Report_Error . should_succeed read_1 = out.read (Excel (Worksheet "Sheet1")) - read_1 . should_equal table + read_1 . should_equal data.table read_2 = out.read (Excel (Worksheet "Sheet2")) - read_2 . should_equal clothes + read_2 . should_equal data.clothes read_3 = out.read (Excel (Sheet_Names)) read_3 . should_equal ["Sheet2", "Sheet1"] - out.delete_if_exists . should_succeed - Test.specify 'should write a table to specific single cell location of an existing sheet' <| - out = create_out + group_builder.specify 'should write a table to specific single cell location of an existing sheet' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - table.write out (Excel (Cell_Range "Another!G1")) on_problems=Report_Error . should_succeed + data.table.write out (Excel (Cell_Range "Another!G1")) on_problems=Report_Error . should_succeed written = out.read (Excel (Cell_Range "Another!G1")) - written.should_equal table - out.delete_if_exists . should_succeed + written.should_equal data.table - Test.specify 'should clear out an existing fixed range and replace' <| - out = create_out + group_builder.specify 'should clear out an existing fixed range and replace' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - sub_clothes.write out (Excel (Cell_Range "Another!A1:D20")) on_problems=Report_Error . should_succeed + data.sub_clothes.write out (Excel (Cell_Range "Another!A1:D20")) on_problems=Report_Error . should_succeed written = out.read (Excel (Cell_Range "Another!A1")) - written.should_equal sub_clothes - out.delete_if_exists . should_succeed + written.should_equal data.sub_clothes - Test.specify 'should clear out an existing range and replace' <| - out = create_out + group_builder.specify 'should clear out an existing range and replace' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - sub_clothes.write out (Excel (Cell_Range "Another!A1")) on_problems=Report_Error . should_succeed + data.sub_clothes.write out (Excel (Cell_Range "Another!A1")) on_problems=Report_Error . should_succeed written = out.read (Excel (Cell_Range "Another!A1")) - written.should_equal sub_clothes - out.delete_if_exists . should_succeed + written.should_equal data.sub_clothes - Test.specify 'should result in Invalid_Location error if trying to write in a bad location' <| - out = create_out + group_builder.specify 'should result in Invalid_Location error if trying to write in a bad location' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - sub_clothes.write out (Excel (Cell_Range "DoesNotExist!A1")) . should_fail_with Invalid_Location - sub_clothes.write out (Excel (Cell_Range "DoesNotExist!A1:B2")) . should_fail_with Invalid_Location - sub_clothes.write out (Excel (Cell_Range "SillyRangeName")) . should_fail_with Invalid_Location - out.delete_if_exists . should_succeed + data.sub_clothes.write out (Excel (Cell_Range "DoesNotExist!A1")) . should_fail_with Invalid_Location + data.sub_clothes.write out (Excel (Cell_Range "DoesNotExist!A1:B2")) . should_fail_with Invalid_Location + data.sub_clothes.write out (Excel (Cell_Range "SillyRangeName")) . should_fail_with Invalid_Location - Test.specify 'should result in Range_Exceeded error if trying to write in too small a range' <| - out = create_out + group_builder.specify 'should result in Range_Exceeded error if trying to write in too small a range' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - sub_clothes.write out (Excel (Cell_Range "Another!A1:B2")) . should_fail_with Range_Exceeded - out.delete_if_exists . should_succeed + data.sub_clothes.write out (Excel (Cell_Range "Another!A1:B2")) . should_fail_with Range_Exceeded - Test.specify 'should result in Existing_Data error if in Error mode and trying to replace' <| - out = create_out + group_builder.specify 'should result in Existing_Data error if in Error mode and trying to replace' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out lmd = out.last_modified_time - r1 = sub_clothes.write out (Excel (Worksheet 1)) on_existing_file=Existing_File_Behavior.Error + r1 = data.sub_clothes.write out (Excel (Worksheet 1)) on_existing_file=Existing_File_Behavior.Error r1.should_fail_with File_Error r1.catch.should_be_a File_Error.Already_Exists - sub_clothes.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error - sub_clothes.write out (Excel (Cell_Range "Another!A1")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error - sub_clothes.write out (Excel (Cell_Range "Sheet1!A9")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error + data.sub_clothes.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error + data.sub_clothes.write out (Excel (Cell_Range "Another!A1")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error + data.sub_clothes.write out (Excel (Cell_Range "Sheet1!A9")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error Test.with_clue "the original file should remain unmodified: " <| out.last_modified_time.should_equal lmd - out.delete_if_exists . should_succeed - Test.specify 'should not allow adding a new sheet if in Error mode, even if sheet is not clashing' <| - out = create_out + group_builder.specify 'should not allow adding a new sheet if in Error mode, even if sheet is not clashing' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out lmd = out.last_modified_time - result = sub_clothes.write out (Excel (Worksheet "Testing")) on_existing_file=Existing_File_Behavior.Error + result = data.sub_clothes.write out (Excel (Worksheet "Testing")) on_existing_file=Existing_File_Behavior.Error result.should_fail_with File_Error result.catch.should_be_a File_Error.Already_Exists Test.with_clue "the original file should remain unmodified: " <| out.last_modified_time.should_equal lmd - out.delete_if_exists . should_succeed - Test.specify 'should write a table to non-existent file as a new sheet without headers' <| - out = create_out - table.write out (Excel (Worksheet "Sheet1") headers=False) on_problems=Report_Error . should_succeed + group_builder.specify 'should write a table to non-existent file as a new sheet without headers' <| + out = data.create_out + data.table.write out (Excel (Worksheet "Sheet1") headers=False) on_problems=Report_Error . should_succeed written = out.read written.sheet_count . should_equal 1 written.sheet_names . should_equal ['Sheet1'] - written.read 'Sheet1' . should_equal (table.rename_columns ['A', 'B', 'C', 'D', 'E', 'F']) + written.read 'Sheet1' . should_equal (data.table.rename_columns ['A', 'B', 'C', 'D', 'E', 'F']) # We need to close the workbook to be able to delete it. written.close - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a sheet by name' <| - out = create_out + group_builder.specify 'should be able to append to a sheet by name' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a sheet by position' <| - out = create_out + group_builder.specify 'should be able to append to a sheet by position' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a sheet by name out of order' <| - out = create_out + group_builder.specify 'should be able to append to a sheet by name out of order' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a single cell by name' <| - out = create_out + group_builder.specify 'should be able to append to a single cell by name' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] extra_another.write out (Excel (Cell_Range "Another!A1")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a single cell by position' <| - out = create_out + group_builder.specify 'should be able to append to a single cell by position' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] extra_another.write out (Excel (Cell_Range "Another!A1")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a single cell by name out of order' <| - out = create_out + group_builder.specify 'should be able to append to a single cell by name out of order' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] extra_another.write out (Excel (Cell_Range "Another!A1")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a range by name' <| - out = create_out + group_builder.specify 'should be able to append to a range by name' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['AA', ['d', 'e']], ['BB', [4, 5]], ['CC', [True, False]], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a', 'b', 'c', 'd', 'e']], ['BB', [1, 2, 3, 4, 5]], ['CC', [True, False, False, True, False]]] extra_another.write out (Excel (Cell_Range "Another!A1:D6")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a range by position' <| - out = create_out + group_builder.specify 'should be able to append to a range by position' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] extra_another.write out (Excel (Cell_Range "Another!A1:D6")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a range by name not in top left' <| - out = create_out + group_builder.specify 'should be able to append to a range by name not in top left' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['CC',[True, False, False, True, False]]] extra_another.write out (Excel (Cell_Range "Random!K9")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed written = out.read (Excel (Cell_Range "Random!K9")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a range by name after deduplication of names' <| - out = create_out + group_builder.specify 'should be able to append to a range by name after deduplication of names' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['AA 1',[True, False]], ['BB 1', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['AA 1',[True, False, False, True, False]]] extra_another.write out (Excel (Cell_Range "Random!S3")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed written = out.read (Excel (Cell_Range "Random!S3")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a range by position not in top left' <| - out = create_out + group_builder.specify 'should be able to append to a range by position not in top left' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['CC',[True, False, False, True, False]]] extra_another.write out (Excel (Cell_Range "Random!K9")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed written = out.read (Excel (Cell_Range "Random!K9")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to append to a range by name out of order' <| - out = create_out + group_builder.specify 'should be able to append to a range by name out of order' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] extra_another.write out (Excel (Cell_Range "Another!A1:D6")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2] written.should_equal expected - out.delete_if_exists . should_succeed - Test.specify 'should be able to write to a new dry run file' <| - out = create_out + group_builder.specify 'should be able to write to a new dry run file' <| + out = data.create_out temp = Context.Output.with_disabled <| - result = table.write out on_problems=Report_Error . should_succeed + result = data.table.write out on_problems=Report_Error . should_succeed Problems.expect_only_warning Dry_Run_Operation result result.exists.should_be_true @@ -355,16 +346,16 @@ spec_write suffix test_sheet_name = written = result.read written.sheet_count . should_equal 1 written.sheet_names . should_equal ['EnsoSheet'] - written.read 'EnsoSheet' . should_equal table + written.read 'EnsoSheet' . should_equal data.table written.close result temp.delete_if_exists - Test.specify "should be able to write to a dry-run file, even if the dry-run workbook is open" <| - out = create_out + group_builder.specify "should be able to write to a dry-run file, even if the dry-run workbook is open" <| + out = data.create_out out.exists.should_be_false temp = Context.Output.with_disabled <| - result = table.write out on_problems=Report_Error . should_succeed + result = data.table.write out on_problems=Report_Error . should_succeed Problems.expect_only_warning Dry_Run_Operation result result.exists.should_be_true result @@ -375,7 +366,7 @@ spec_write suffix test_sheet_name = opened_temp.sheet_names . should_equal ['EnsoSheet'] temp2 = Context.Output.with_disabled <| - result = table.write out (Excel (Worksheet "Another")) on_problems=Report_Error . should_succeed + result = data.table.write out (Excel (Worksheet "Another")) on_problems=Report_Error . should_succeed Problems.expect_only_warning Dry_Run_Operation result result.exists.should_be_true result @@ -392,10 +383,10 @@ spec_write suffix test_sheet_name = opened_temp.close temp.delete_if_exists - Test.specify "should be able to write to a dry-run file multiple times if the dry-run file object is threaded through" <| - out = create_out + group_builder.specify "should be able to write to a dry-run file multiple times if the dry-run file object is threaded through" <| + out = data.create_out temp1 = Context.Output.with_disabled <| - result = table.write out on_problems=Report_Error . should_succeed + result = data.table.write out on_problems=Report_Error . should_succeed Problems.expect_only_warning Dry_Run_Operation result result.exists.should_be_true result @@ -405,7 +396,7 @@ spec_write suffix test_sheet_name = opened_temp.sheet_names . should_equal ['EnsoSheet'] temp2 = Context.Output.with_disabled <| - result = table.write temp1 (Excel (Worksheet "Another")) on_problems=Report_Error . should_succeed + result = data.table.write temp1 (Excel (Worksheet "Another")) on_problems=Report_Error . should_succeed Problems.expect_only_warning Dry_Run_Operation result result.exists.should_be_true result @@ -419,8 +410,8 @@ spec_write suffix test_sheet_name = opened_temp.close temp1.delete_if_exists - Test.specify "should be able to create a backup, even if it is currently open" <| - out = create_out + group_builder.specify "should be able to create a backup, even if it is currently open" <| + out = data.create_out bak = out.parent / (out.name+".bak") t1 = Table.new [["X", [1]]] @@ -448,92 +439,83 @@ spec_write suffix test_sheet_name = opened_out.close opened_backup.close - out.delete_if_exists . should_succeed bak.delete_if_exists . should_succeed - Test.specify 'should be able to write to an existing empty file' <| - out = create_out + group_builder.specify 'should be able to write to an existing empty file' <| + out = data.create_out [].write_bytes out out_bak = out.parent / (out.name+".bak") - table.write out on_problems=Report_Error . should_succeed . should_equal out + data.table.write out on_problems=Report_Error . should_succeed . should_equal out written = out.read written.sheet_count . should_equal 1 written.sheet_names . should_equal ['EnsoSheet'] - written.read 'EnsoSheet' . should_equal table + written.read 'EnsoSheet' . should_equal data.table Test.with_clue "should have created a backup file: " <| out_bak.exists.should_be_true out_bak.size.should_equal 0 written.close - out.delete_if_exists . should_succeed out_bak.delete_if_exists . should_succeed - Test.specify 'should fail to append to a sheet by name if missing columns' <| - out = create_out + group_builder.specify 'should fail to append to a sheet by name if missing columns' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']]] extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Column_Name_Mismatch - out.delete_if_exists . should_succeed - Test.specify 'should fail to append to a sheet by name if extra columns' <| - out = create_out + group_builder.specify 'should fail to append to a sheet by name if extra columns' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out lmd = out.last_modified_time extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']], ['EE', ['2022-01-20', '2022-01-21']]] extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Column_Name_Mismatch out.last_modified_time.should_equal lmd - out.delete_if_exists . should_succeed - Test.specify 'should fail to append to a sheet by name if no headers' <| - out = create_out + group_builder.specify 'should fail to append to a sheet by name if no headers' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out lmd = out.last_modified_time extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']], ['EE', ['2022-01-20', '2022-01-21']]] extra_another.write out (Excel (Worksheet "NoHeaders")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument extra_another.write out (Excel (Worksheet "Another") headers=False) on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument out.last_modified_time.should_equal lmd - out.delete_if_exists . should_succeed - Test.specify 'should fail to append to a sheet by position if too few columns' <| - out = create_out + group_builder.specify 'should fail to append to a sheet by position if too few columns' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out lmd = out.last_modified_time extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']]] extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position . should_fail_with Column_Count_Mismatch out.last_modified_time.should_equal lmd - out.delete_if_exists . should_succeed - Test.specify 'should fail to append to a sheet by position if too many columns' <| - out = create_out + group_builder.specify 'should fail to append to a sheet by position if too many columns' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out lmd = out.last_modified_time extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']], ['EE', ['2022-01-20', '2022-01-21']]] extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position . should_fail_with Column_Count_Mismatch out.last_modified_time.should_equal lmd - out.delete_if_exists . should_succeed - Test.specify 'should fail to append to a range by name if not large enough' <| - out = create_out + group_builder.specify 'should fail to append to a range by name if not large enough' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out lmd = out.last_modified_time extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]] extra_another.write out (Excel (Cell_Range "Another!A1:D5")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Range_Exceeded out.last_modified_time.should_equal lmd - out.delete_if_exists . should_succeed - Test.specify 'should fail to append to a range by name if it hits another table' <| - out = create_out + group_builder.specify 'should fail to append to a range by name if it hits another table' <| + out = data.create_out (enso_project.data / test_sheet_name) . copy_to out lmd = out.last_modified_time extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]] extra_another.write out (Excel (Cell_Range "Random!B3")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Existing_Data out.last_modified_time.should_equal lmd - out.delete_if_exists . should_succeed - Test.specify "should fail if the target file is read-only" <| + group_builder.specify "should fail if the target file is read-only" <| f = enso_project.data / "transient" / "permission."+suffix if f.exists then Util.set_writable f True f.delete_if_exists @@ -558,9 +540,9 @@ spec_write suffix test_sheet_name = Util.set_writable f True f.delete - Test.specify "should allow to write to a workbook that is open, and reflect that changes when the sheet is read again" <| - out = create_out - table.write out on_problems=Report_Error . should_succeed + group_builder.specify "should allow to write to a workbook that is open, and reflect that changes when the sheet is read again" <| + out = data.create_out + data.table.write out on_problems=Report_Error . should_succeed workbook = out.read (Excel headers=True) workbook.sheet_names.should_equal ["EnsoSheet"] @@ -570,7 +552,7 @@ spec_write suffix test_sheet_name = w2 = out.read (Excel headers=True) t1 = workbook.read "EnsoSheet" - t1.should_equal table + t1.should_equal data.table [Existing_File_Behavior.Backup, Existing_File_Behavior.Overwrite].each behavior-> Test.with_clue behavior.to_text+": " <| t2 = Table.new [["X", [behavior.to_text, "B", "C", behavior.to_text+"..."]]] @@ -587,9 +569,8 @@ spec_write suffix test_sheet_name = workbook.close w2.close - out.delete_if_exists . should_succeed - Test.specify "should fail if the parent directory does not exist" <| + group_builder.specify "should fail if the parent directory does not exist" <| parent = enso_project.data / "transient" / "nonexistent" parent.exists.should_be_false @@ -600,7 +581,7 @@ spec_write suffix test_sheet_name = r1.should_fail_with File_Error r1.catch.should_be_a File_Error.Not_Found - Test.specify "should allow to write and read-back Unicode characters" <| + group_builder.specify "should allow to write and read-back Unicode characters" <| encodings = enso_project.data / "transient" / "encodings."+suffix encodings.delete_if_exists . should_succeed @@ -610,7 +591,7 @@ spec_write suffix test_sheet_name = t2.at "A" . to_vector . should_equal ["A", "B", "😊", "D"] encodings.delete - Test.specify "should be able to overwrite a pre-existing empty file" <| + group_builder.specify "should be able to overwrite a pre-existing empty file" <| empty = enso_project.data / "transient" / "empty."+suffix [Existing_File_Behavior.Backup, Existing_File_Behavior.Overwrite, Existing_File_Behavior.Append].each behavior-> Test.with_clue behavior.to_text+": " <| empty.delete_if_exists . should_succeed @@ -625,17 +606,19 @@ spec_write suffix test_sheet_name = t2 = empty.read (Excel (Worksheet "EnsoSheet")) t2.should_equal t1 -spec = - Test.group 'Excel Range' <| - check_range excel_range sheet_name tlbr_vector single_cell=False = - excel_range.sheet_name . should_equal sheet_name - excel_range.top_row . should_equal (tlbr_vector.at 0) - excel_range.left_column . should_equal (tlbr_vector.at 1) - excel_range.bottom_row . should_equal (tlbr_vector.at 2) - excel_range.right_column . should_equal (tlbr_vector.at 3) - excel_range.is_single_cell . should_equal single_cell - - Test.specify 'should be able to parse A1 format' <| + +check_range excel_range sheet_name tlbr_vector single_cell=False = + excel_range.sheet_name . should_equal sheet_name + excel_range.top_row . should_equal (tlbr_vector.at 0) + excel_range.left_column . should_equal (tlbr_vector.at 1) + excel_range.bottom_row . should_equal (tlbr_vector.at 2) + excel_range.right_column . should_equal (tlbr_vector.at 3) + excel_range.is_single_cell . should_equal single_cell + + +add_specs suite_builder = + suite_builder.group 'Excel Range' group_builder-> + group_builder.specify 'should be able to parse A1 format' <| check_range (Excel_Range.from_address "Test!EE4") 'Test' [4, 135, 4, 135] True check_range (Excel_Range.from_address "Test!EE4:EE4") 'Test' [4, 135, 4, 135] check_range (Excel_Range.from_address "Test!A1:D5") 'Test' [1, 1, 5, 4] @@ -650,15 +633,15 @@ spec = check_range (Excel_Range.from_address "Test!$CB") 'Test' [Nothing, 80, Nothing, 80] check_range (Excel_Range.from_address "Test!$DD:$XAZ") 'Test' [Nothing, 108, Nothing, 16276] - Test.specify 'should be able to parse RC format' <| + group_builder.specify 'should be able to parse RC format' <| check_range (Excel_Range.from_address "Test!R1C1") 'Test' [1, 1, 1, 1] True check_range (Excel_Range.from_address "Test!R1C1:R5C3") 'Test' [1, 1, 5, 3] - Test.specify 'should fail gracefully for invalid patterns' <| + group_builder.specify 'should fail gracefully for invalid patterns' <| Excel_Range.from_address "Test!$$QA1" . should_fail_with Illegal_Argument Excel_Range.from_address "Test!BADADDRESS" . should_fail_with Illegal_Argument - Test.specify 'should allow Range creation for a cell' <| + group_builder.specify 'should allow Range creation for a cell' <| check_range (Excel_Range.for_cell "Hello World" 123 14) 'Hello World' [14, 123, 14, 123] True check_range (Excel_Range.for_cell "Hello World" "DS" 14) 'Hello World' [14, 123, 14, 123] True Excel_Range.for_cell "Test" 123 14 . address . should_equal "Test!DS14" @@ -669,7 +652,7 @@ spec = Excel_Range.for_cell "Test" 1 10000000 . should_fail_with Illegal_Argument Excel_Range.for_cell "Test" 1 0 . should_fail_with Illegal_Argument - Test.specify 'should allow Range creation for a range' <| + group_builder.specify 'should allow Range creation for a range' <| check_range (Excel_Range.for_range "Hello World" 55 120 123 14) 'Hello World' [14, 55, 120, 123] check_range (Excel_Range.for_range "Hello World" "BC" 120 "DS" 14) 'Hello World' [14, 55, 120, 123] Excel_Range.for_range "Test" 55 120 123 14 . address . should_equal "Test!BC14:DS120" @@ -684,7 +667,7 @@ spec = Excel_Range.for_range "Test" 5 1 123 0 . should_fail_with Illegal_Argument Excel_Range.for_range "Test" 5 1 123 10000000 . should_fail_with Illegal_Argument - Test.specify 'should allow Range creation for a column' <| + group_builder.specify 'should allow Range creation for a column' <| check_range (Excel_Range.for_columns "Hello World" 123) 'Hello World' [Nothing, 123, Nothing, 123] check_range (Excel_Range.for_columns "Hello World" "DS") 'Hello World' [Nothing, 123, Nothing, 123] Excel_Range.for_columns "Test" 123 . address . should_equal "Test!DS" @@ -693,7 +676,7 @@ spec = Excel_Range.for_columns "Test" "ZZZ" . should_fail_with Illegal_Argument Excel_Range.for_columns "Test" 0 . should_fail_with Illegal_Argument - Test.specify 'should allow Range creation for columns' <| + group_builder.specify 'should allow Range creation for columns' <| check_range (Excel_Range.for_columns "Hello World" "BC" 123) 'Hello World' [Nothing, 55, Nothing, 123] check_range (Excel_Range.for_columns "Hello World" 55 "DS") 'Hello World' [Nothing, 55, Nothing, 123] Excel_Range.for_columns "Test" 55 123 . address . should_equal "Test!BC:DS" @@ -702,14 +685,14 @@ spec = Excel_Range.for_columns "Test" 55 "ZZZ" . should_fail_with Illegal_Argument Excel_Range.for_columns "Test" 55 0 . should_fail_with Illegal_Argument - Test.specify 'should allow Range creation for a row' <| + group_builder.specify 'should allow Range creation for a row' <| check_range (Excel_Range.for_rows "Hello World" 123) 'Hello World' [123, Nothing, 123, Nothing] Excel_Range.for_rows "Test" 123 . address . should_equal "Test!123" Excel_Range.for_rows "Hello World" 123 . address . should_equal "'Hello World'!123" Excel_Range.for_rows "Test" 20000000 . should_fail_with Illegal_Argument Excel_Range.for_rows "Test" 0 . should_fail_with Illegal_Argument - Test.specify 'should allow Range creation for rows' <| + group_builder.specify 'should allow Range creation for rows' <| check_range (Excel_Range.for_rows "Hello World" 55 123) 'Hello World' [55, Nothing, 123, Nothing] Excel_Range.for_rows "Test" 55 123 . address . should_equal "Test!55:123" Excel_Range.for_rows "Hello World" 55 123 . address . should_equal "'Hello World'!55:123" @@ -749,8 +732,8 @@ spec = workbook.sheet_count . should_equal sheets workbook.named_ranges_count . should_equal ranges - Test.group "Read XLSX / XLS Files" <| - Test.specify "should let you read the workbook with Auto_Detect" <| + suite_builder.group "Read XLSX / XLS Files" group_builder-> + group_builder.specify "should let you read the workbook with Auto_Detect" <| check_workbook <| xlsx_sheet.read check_workbook <| Data.read xlsx_sheet check_workbook <| Data.read xlsx_path @@ -759,7 +742,7 @@ spec = check_workbook <| Data.read xls_sheet check_workbook <| Data.read xls_path - Test.specify "should let you read the workbook with Excel" <| + group_builder.specify "should let you read the workbook with Excel" <| check_workbook <| xlsx_sheet.read Excel check_workbook <| Data.read xlsx_sheet Excel check_workbook <| Data.read xlsx_path Excel @@ -768,7 +751,7 @@ spec = check_workbook <| Data.read xls_sheet Excel check_workbook <| Data.read xls_path Excel - Test.specify "workbook should look like a database connection" <| + group_builder.specify "workbook should look like a database connection" <| workbook = xlsx_sheet.read workbook.database . should_equal xlsx_sheet.normalize.path @@ -786,17 +769,17 @@ spec = workbook.tables "%not%" . row_count . should_equal 1 workbook.tables "%not%" . at 'Name' . to_vector . should_equal ["Another"] - Test.specify "should let you read the sheet names" <| + group_builder.specify "should let you read the sheet names" <| xlsx_sheet.read (Excel Sheet_Names) . should_equal sheet_names xls_sheet.read (Excel Sheet_Names) . should_equal sheet_names xlsx_sheet.read . sheet_names . should_equal sheet_names - Test.specify "should let you read the range names" <| + group_builder.specify "should let you read the range names" <| xlsx_sheet.read (Excel Range_Names) . should_equal range_names xls_sheet.read (Excel Range_Names) . should_equal range_names xlsx_sheet.read . named_ranges . should_equal range_names - Test.specify "should let you read by sheet index" <| + group_builder.specify "should let you read by sheet index" <| table = xlsx_sheet.read (Excel (Worksheet 1)) check_table table @@ -804,7 +787,7 @@ spec = table_2.row_count . should_equal col_a.length check_table table_2 - Test.specify "should let you read by sheet name" <| + group_builder.specify "should let you read by sheet name" <| table = xlsx_sheet.read (Excel (Worksheet "Sheet1")) check_table table @@ -815,7 +798,7 @@ spec = table_3 = xlsx_sheet.read . read "Sheet1" check_table table_3 - Test.specify "should let you read XLS by sheet index" <| + group_builder.specify "should let you read XLS by sheet index" <| table = xls_sheet.read (Excel (Worksheet 1)) check_table table @@ -823,14 +806,14 @@ spec = table_2.row_count . should_equal col_a.length check_table table_2 - Test.specify "should let you read XLS by sheet name" <| + group_builder.specify "should let you read XLS by sheet name" <| table = xls_sheet.read (Excel (Worksheet "Sheet1")) check_table table table_2 = xls_sheet.read . read "Sheet1" check_table table_2 - Test.specify "should let you read by range" <| + group_builder.specify "should let you read by range" <| table = xlsx_sheet.read (Excel (Cell_Range "Sheet1!A:C")) check_table table 3 @@ -844,7 +827,7 @@ spec = check_table <| xlsx_sheet.read . read "Sheet1!10:13" check_table count=3 <| xlsx_sheet.read . read "Sheet1!A10:C13" - Test.specify "should let you read by range name" <| + group_builder.specify "should let you read by range name" <| table = xlsx_sheet.read (Excel (Cell_Range "myData")) table.row_count . should_equal col_a.length check_table table 3 @@ -853,7 +836,7 @@ spec = table_2.row_count . should_equal col_a.length check_table table_2 3 - Test.specify "should let you restrict number of rows read and skip rows" <| + group_builder.specify "should let you restrict number of rows read and skip rows" <| table = xlsx_sheet.read (Excel (Worksheet "Sheet1")) check_table table @@ -867,14 +850,14 @@ spec = table_4 = xlsx_sheet.read (Excel (Worksheet "Sheet1" row_limit=6)) table_4.row_count . should_equal 6 - Test.group "Problems" <| - Test.specify "should handle non-existing file gracefully" <| + suite_builder.group "Problems" group_builder-> + group_builder.specify "should handle non-existing file gracefully" <| bad_file = enso_project.data / "DoesNotExists.xlsx" result = bad_file.read (Excel (Cell_Range "Sheet1!A:C")) result.should_fail_with File_Error result.catch.should_be_a File_Error.Not_Found - Test.specify "should handle wrong xls_format gracefully" <| + group_builder.specify "should handle wrong xls_format gracefully" <| xlsx_sheet_copy = enso_project.data / "transient" / "TestSheetCopy.xlsx" xlsx_sheet.copy_to xlsx_sheet_copy @@ -897,7 +880,7 @@ spec = r2.should_fail_with File_Error r2.catch.should_be_a File_Error.Corrupted_Format - Test.specify "should handle malformed XLS files gracefully" <| + group_builder.specify "should handle malformed XLS files gracefully" <| bad_file = enso_project.data / "transient" / "malformed.xls" "not really an XLS file contents...".write bad_file on_existing_file=Existing_File_Behavior.Overwrite @@ -921,7 +904,7 @@ spec = bad_file.delete - Test.specify "will fail if an operation is performed on a closed workbook" <| + group_builder.specify "will fail if an operation is performed on a closed workbook" <| workbook = xlsx_sheet.read workbook.sheet_count . should_equal 4 @@ -932,7 +915,7 @@ spec = workbook.read "Sheet1" . should_fail_with Illegal_State ci_pending = if Environment.get "CI" != Nothing then "This test takes a lot of time so it is disabled on CI." - Test.specify "should be able to write and read a big XLSX file (>110MB)" pending=ci_pending <| + group_builder.specify "should be able to write and read a big XLSX file (>110MB)" pending=ci_pending <| n = 10^6 IO.println "Generating big XLSX file "+Time_Of_Day.now.to_text rng = RandomHelpers.new 123 @@ -962,7 +945,7 @@ spec = workbook.close big_file.delete_if_exists . should_succeed - Test.specify "should be able to write and read a big XLS file (>110MB)" pending=ci_pending <| + group_builder.specify "should be able to write and read a big XLS file (>110MB)" pending=ci_pending <| IO.println "Generating big XLS file "+Time_Of_Day.now.to_text rng = RandomHelpers.new 123 # Here we instead create a 2D table, because XLS has a limit of 65536 rows and 16k columns. @@ -992,11 +975,11 @@ spec = workbook.close big_file.delete_if_exists . should_succeed - spec_fmt 'XLSX reading' Examples.xlsx .read + spec_fmt suite_builder 'XLSX reading' Examples.xlsx .read - spec_fmt 'XLS reading' Examples.xls .read + spec_fmt suite_builder 'XLS reading' Examples.xls .read - Test.group "Reading single cells correctly" <| + suite_builder.group "Reading single cells correctly" group_builder-> file = enso_project.data / "RangeTests.xlsx" check_table table col_names data = @@ -1005,7 +988,7 @@ spec = data.each_with_index idx->values-> table.at (col_names.at idx) . to_vector . should_equal values - Test.specify "Simple table" <| + group_builder.specify "Simple table" <| check_table (file.read (Excel (Cell_Range "Sheet1!A1"))) ["AA", "BB"] [[1,2,3,4,5,6], ["A","B","C","D","E","F"]] check_table (file.read (Excel (Cell_Range "Sheet1!A2"))) ["A", "B"] [[1,2,3,4,5,6], ["A","B","C","D","E","F"]] check_table (file.read (Excel (Cell_Range "Sheet1!A1:A1"))) ["A"] [["AA"]] @@ -1013,37 +996,39 @@ spec = check_table (file.read (Excel (Cell_Range "Sheet1!B1") headers=True)) ["BB"] [["A","B","C","D","E","F"]] check_table (file.read (Excel (Cell_Range "Sheet1!B2"))) ["B"] [["A","B","C","D","E","F"]] - Test.specify "Patchy table" <| + group_builder.specify "Patchy table" <| check_table (file.read (Excel (Cell_Range "Sheet1!D1"))) ["A", "B", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]] check_table (file.read (Excel (Cell_Range "Sheet1!D2"))) ["D", "E", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]] check_table (file.read (Excel (Cell_Range "Sheet1!E"))) ["B"] [[4,4,Nothing,Nothing,Nothing,Nothing]] check_table (file.read (Excel (Cell_Range "Sheet1!E1"))) ["B", "F"] [[4,4,Nothing], [6,Nothing,6]] check_table (file.read (Excel (Cell_Range "Sheet1!E2"))) ["E", "F"] [[4,4,Nothing], [6,Nothing,6]] - Test.specify "Single cell" <| + group_builder.specify "Single cell" <| check_table (file.read (Excel (Cell_Range "Sheet1!H1"))) ["H"] [["Single Cell"]] check_table (file.read (Excel (Cell_Range "Sheet1!H2"))) ["H"] [[]] - Test.specify "Single line" <| + group_builder.specify "Single line" <| check_table (file.read (Excel (Cell_Range "Sheet1!J1"))) ["J", "K", "L"] [["Just"],["Some"],["Headers"]] - Test.specify "Growing table" <| + group_builder.specify "Growing table" <| check_table (file.read (Excel (Cell_Range "Sheet1!N1"))) ["A", "Full", "Table", "Q"] [["Hello","World",Nothing,"Extend"],[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]] check_table (file.read (Excel (Cell_Range "Sheet1!O1"))) ["Full", "Table", "Q"] [[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]] check_table (file.read (Excel (Cell_Range "Sheet1!O2"))) ["O", "P", "Q"] [[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]] - Test.specify "Should handle blank headers without warnings" <| + group_builder.specify "Should handle blank headers without warnings" <| check_table (file.read (Excel (Cell_Range "Sheet1!D1"))) ["A", "B", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]] - Test.specify "Should handle duplicate headers with warnings" <| + group_builder.specify "Should handle duplicate headers with warnings" <| action = file.read (Excel (Cell_Range "Sheet1!S1")) on_problems=_ tester = check_table _ ["DD", "DD 1"] [[1,3], [2,4]] problems = [Duplicate_Output_Column_Names.Error ["DD"]] Problems.test_problem_handling action problems tester - # Cleanup any leftovers from previous runs - enso_project.data/"transient" . list "out*" . each .delete - spec_write "xlsx" 'TestSheet.xlsx' - spec_write "xls" 'TestSheetOld.xls' + spec_write suite_builder "xlsx" 'TestSheet.xlsx' + spec_write suite_builder "xls" 'TestSheetOld.xls' + +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter spec_filter="should write a table to non-existent file as a new sheet with headers; and return the file object on success" -main = Test_Suite.run_main spec diff --git a/test/Table_Tests/src/IO/Fetch_Spec.enso b/test/Table_Tests/src/IO/Fetch_Spec.enso index d5aacb829c40..2af7a949213d 100644 --- a/test/Table_Tests/src/IO/Fetch_Spec.enso +++ b/test/Table_Tests/src/IO/Fetch_Spec.enso @@ -6,16 +6,19 @@ import Standard.Base.Runtime.Context from Standard.Table import all import Standard.Table.Errors.Invalid_JSON_Format -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all import project.Util polyglot java import java.util.Base64 -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter -spec = + +add_specs suite_builder = ## To run this test locally: $ sbt 'http-test-helper/run localhost 8080' $ export ENSO_HTTP_TEST_HTTPBIN_URL=http://localhost:8080/ @@ -25,13 +28,13 @@ spec = pending_has_url = if base_url != Nothing then Nothing else "The HTTP tests only run when the `ENSO_HTTP_TEST_HTTPBIN_URL` environment variable is set to URL of the httpbin server" - Test.group "fetching files using HTTP" pending=pending_has_url <| - Test.specify "fetching json" <| + suite_builder.group "fetching files using HTTP" pending=pending_has_url group_builder-> + group_builder.specify "fetching json" <| r = Data.fetch base_url_with_slash+"testfiles/table.json" expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]] r.to Table . should_equal expected_table - Test.specify "fetching csv" <| + group_builder.specify "fetching csv" <| url = base_url_with_slash+"testfiles/table.csv" r = Data.fetch url expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]] @@ -43,7 +46,7 @@ spec = r2.should_be_a Table r2.should_equal expected_table - Test.specify "fetching xls" <| + group_builder.specify "fetching xls" <| url = base_url_with_slash+"testfiles/table.xls" r = Data.fetch url expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]] @@ -56,7 +59,7 @@ spec = r2.should_be_a Table r2.should_equal expected_table - Test.specify "fetching xlsx" <| + group_builder.specify "fetching xlsx" <| url = base_url_with_slash+"testfiles/table.xlsx" r = Data.fetch url expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]] @@ -73,7 +76,7 @@ spec = r3.should_be_a Table r3.should_equal expected_table - Test.specify "format detection based on Content-Type and Content-Disposition" <| + group_builder.specify "format detection based on Content-Type and Content-Disposition" <| content = 'A,B\n1,x\n3,y' uri = URI.from (base_url_with_slash+"test_headers") . add_query_argument "base64_response_data" (Base64.getEncoder.encodeToString (content.bytes Encoding.utf_8)) expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]] diff --git a/test/Table_Tests/src/IO/Formats_Spec.enso b/test/Table_Tests/src/IO/Formats_Spec.enso index 5ede91b108d1..ea167111a688 100644 --- a/test/Table_Tests/src/IO/Formats_Spec.enso +++ b/test/Table_Tests/src/IO/Formats_Spec.enso @@ -5,18 +5,18 @@ import Standard.Base.Runtime.Context from Standard.Table import all import Standard.Table.Errors.Invalid_JSON_Format -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all + import project.Util -spec = +add_specs suite_builder = transient = enso_project.data / "transient" - Test.group 'Various File Format support on Table' <| + suite_builder.group 'Various File Format support on Table' group_builder-> t1 = Table.new [["X", [1, 2, 3]]] simple_empty = enso_project.data/'simple_empty.csv' . read - Test.specify "should be able to be written as CSV, Excel" <| + group_builder.specify "should be able to be written as CSV, Excel" <| f1 = transient / "test2.csv" f2 = transient / "test3.xlsx" [f1, f2].each f-> @@ -25,14 +25,14 @@ spec = f.exists.should_be_true f.delete - Test.specify "should be able to be written as JSON using Table.write" <| + group_builder.specify "should be able to be written as JSON using Table.write" <| f1 = transient / "test1.json" f1.delete_if_exists t1.write f1 . should_succeed f1.exists.should_be_true f1.delete - Test.specify 'should write JSON tables' <| + group_builder.specify 'should write JSON tables' <| simple_empty = enso_project.data/'simple_empty.csv' . read out = transient / 'out.json' out.delete_if_exists @@ -40,7 +40,7 @@ spec = Table.from_objects (Json.parse out.read_text) ['a', 'b', 'c'] . should_equal simple_empty out.delete_if_exists - Test.specify 'should append to JSON tables' <| + group_builder.specify 'should append to JSON tables' <| out = transient / 'out.json' out.delete_if_exists simple_empty.write out . should_equal out @@ -48,7 +48,7 @@ spec = Table.from_objects (Json.parse out.read_text) ['a', 'b', 'c'] . row_count . should_equal 2*simple_empty.row_count out.delete_if_exists - Test.specify 'should fail to append to JSON non-arrays' <| + group_builder.specify 'should fail to append to JSON non-arrays' <| out = transient / 'out.json' out.delete_if_exists '1'.write out @@ -63,7 +63,7 @@ spec = simple_empty.write out on_existing_file=Existing_File_Behavior.Append . should_fail_with Invalid_JSON_Format out.delete_if_exists - Test.specify "should fail gracefully when provided with an unsupported format" <| + group_builder.specify "should fail gracefully when provided with an unsupported format" <| f1 = (transient / "test4.unknown-format") f1.delete_if_exists r1 = t1.write f1 @@ -80,11 +80,11 @@ spec = r2.catch.should_be_a File_Error.Unsupported_Output_Type r2.catch.format . should_equal my_format - write_tests extension = Test.group 'Writing to '+extension+' files' <| + write_tests extension = suite_builder.group 'Writing to '+extension+' files' group_builder-> count result = if result . is_a Table then result.row_count else result.length - Test.specify "should write to a temporary "+extension+" file part of the data if context is disabled" <| + group_builder.specify "should write to a temporary "+extension+" file part of the data if context is disabled" <| f = transient / ("big." + extension) f.delete_if_exists f_bak = transient / ("big." + extension + ".bak") @@ -112,7 +112,7 @@ spec = f_bak.delete_if_exists r.delete_if_exists - Test.specify "should create a backup file if overwriting" <| + group_builder.specify "should create a backup file if overwriting" <| f = transient / ("test." + extension) f.delete_if_exists f_bak = transient / ("test." + extension + ".bak") @@ -138,7 +138,7 @@ spec = f_bak.delete_if_exists - Test.specify "should support appending" <| + group_builder.specify "should support appending" <| f = transient / ("test." + extension) f.delete_if_exists f_bak = transient / ("test." + extension + ".bak") @@ -161,4 +161,8 @@ spec = write_tests "csv" write_tests "json" -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/IO/Json_Spec.enso b/test/Table_Tests/src/IO/Json_Spec.enso index 5aea09b08fb3..f54b2f8e378c 100644 --- a/test/Table_Tests/src/IO/Json_Spec.enso +++ b/test/Table_Tests/src/IO/Json_Spec.enso @@ -2,26 +2,37 @@ from Standard.Base import all from Standard.Table import Table from Standard.Table.Extensions.Table_Conversions import all -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all import project.Util -spec = Test.group 'JSON conversion' <| - clothes = enso_project.data/'clothes.csv' . read +type Data + Value ~clothes - Test.specify 'should convert tables to a format compatible with Table.from_objects' <| - clothes_json = clothes.to_json - Table.from_objects (Json.parse clothes_json) ['Id', 'Name', 'Quantity', 'Rating', 'Price'] . should_equal clothes + setup = + clothes = enso_project.data/'clothes.csv' . read + Data.Value clothes - Test.specify "should allow converting a JSON array into a table" <| - r_1 = JS_Object.from_pairs [['foo', 20], ['bar', 'baz'], ['baz', False]] - r_2 = JS_Object.from_pairs [['bar', 'xyz'], ['baz', True]] - r_3 = JS_Object.from_pairs [['baz', False], ['foo', 13]] - t = Table.from_objects [r_1, r_2, r_3] ['foo', 'bar', 'baz'] - t.columns.map .name . should_equal ['foo', 'bar', 'baz'] - t.at 'foo' . to_vector . should_equal [20, Nothing, 13] - t.at 'bar' . to_vector . should_equal ['baz', 'xyz', Nothing] - t.at 'baz' . to_vector . should_equal [False, True, False] +add_specs suite_builder = + suite_builder.group 'JSON conversion' group_builder-> + data = Data.setup + + group_builder.specify 'should convert tables to a format compatible with Table.from_objects' <| + clothes_json = data.clothes.to_json + Table.from_objects (Json.parse clothes_json) ['Id', 'Name', 'Quantity', 'Rating', 'Price'] . should_equal data.clothes + + group_builder.specify "should allow converting a JSON array into a table" <| + r_1 = JS_Object.from_pairs [['foo', 20], ['bar', 'baz'], ['baz', False]] + r_2 = JS_Object.from_pairs [['bar', 'xyz'], ['baz', True]] + r_3 = JS_Object.from_pairs [['baz', False], ['foo', 13]] + t = Table.from_objects [r_1, r_2, r_3] ['foo', 'bar', 'baz'] + t.columns.map .name . should_equal ['foo', 'bar', 'baz'] + t.at 'foo' . to_vector . should_equal [20, Nothing, 13] + t.at 'bar' . to_vector . should_equal ['baz', 'xyz', Nothing] + t.at 'baz' . to_vector . should_equal [False, True, False] + +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter -main = Test_Suite.run_main spec diff --git a/test/Table_Tests/src/IO/Main.enso b/test/Table_Tests/src/IO/Main.enso index bb69e1cd0d68..ad285ae46f29 100644 --- a/test/Table_Tests/src/IO/Main.enso +++ b/test/Table_Tests/src/IO/Main.enso @@ -1,6 +1,6 @@ from Standard.Base import all -from Standard.Test import Test_Suite +from Standard.Test_New import all import project.IO.Csv_Spec import project.IO.Delimited_Read_Spec @@ -10,13 +10,13 @@ import project.IO.Fetch_Spec import project.IO.Formats_Spec import project.IO.Json_Spec -spec = - Csv_Spec.spec - Delimited_Read_Spec.spec - Delimited_Write_Spec.spec - Excel_Spec.spec - Formats_Spec.spec - Fetch_Spec.spec - Json_Spec.spec - -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + Csv_Spec.add_specs suite_builder + Delimited_Read_Spec.add_specs suite_builder + Delimited_Write_Spec.add_specs suite_builder + Excel_Spec.add_specs suite_builder + Formats_Spec.add_specs suite_builder + Fetch_Spec.add_specs suite_builder + Json_Spec.add_specs suite_builder + suite.run_with_filter From 2f28af487a5da0bc7c831ecbafaee94e4ce3f535 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 19 Jan 2024 18:51:57 +0100 Subject: [PATCH 59/93] Refactor Table_Tests/src/Formatting to Test_New --- .../src/Formatting/Data_Formatter_Spec.enso | 69 ++++++++------- test/Table_Tests/src/Formatting/Main.enso | 12 +-- .../src/Formatting/Parse_Values_Spec.enso | 85 ++++++++++--------- 3 files changed, 86 insertions(+), 80 deletions(-) diff --git a/test/Table_Tests/src/Formatting/Data_Formatter_Spec.enso b/test/Table_Tests/src/Formatting/Data_Formatter_Spec.enso index 5c97c7193fea..bca778caa0e5 100644 --- a/test/Table_Tests/src/Formatting/Data_Formatter_Spec.enso +++ b/test/Table_Tests/src/Formatting/Data_Formatter_Spec.enso @@ -6,8 +6,7 @@ from Standard.Base.Data.Time.Errors import Date_Time_Format_Parse_Error, Suspici from Standard.Table import Table, Column, Data_Formatter, Quote_Style, Value_Type from Standard.Table.Errors import all -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all type Custom_Type Value field @@ -30,9 +29,9 @@ type Custom_Type_With_Panic to_text : Text to_text self = Panic.throw (Illegal_State.Error "foo_panic") -spec = - Test.group "DataFormatter.parse" <| - Test.specify "should parse numbers" <| +add_specs suite_builder = + suite_builder.group "DataFormatter.parse" group_builder-> + group_builder.specify "should parse numbers" <| formatter = Data_Formatter.Value formatter.parse "123" . should_equal 123 formatter.parse "1000000" . should_equal 1000000 @@ -47,7 +46,7 @@ spec = formatter.parse "-Infinity" . should_equal (Number.negative_infinity) formatter.parse "NaN" . is_nan . should_be_true - Test.specify "should prefer the US decimal point in auto mode" <| + group_builder.specify "should prefer the US decimal point in auto mode" <| formatter = Data_Formatter.Value formatter.parse "1.5" . should_equal 1.5 formatter.parse "1.25" . should_equal 1.25 @@ -61,7 +60,7 @@ spec = formatter.parse "1'000" . should_equal 1000 formatter.parse "1.000.000" . should_equal 1000000 - Test.specify "should allow customizing the decimal point and thousand separator" <| + group_builder.specify "should allow customizing the decimal point and thousand separator" <| formatter = Data_Formatter.Value thousand_separator="_" decimal_point="," formatter.parse "123" . should_equal 123 formatter.parse "1_000_000" . should_equal 1000000 @@ -73,7 +72,7 @@ spec = formatter.parse "-1,0" . should_equal -1.0 formatter.parse "1,0001" . should_equal 1.0001 - Test.specify "should never infer thousand separator to be equal to decimal point" <| + group_builder.specify "should never infer thousand separator to be equal to decimal point" <| f1 = Data_Formatter.Value decimal_point="." f1.parse "1.0" . should_equal 1.0 f1.parse "1.000" . should_equal 1.0 @@ -103,7 +102,7 @@ spec = r6 = Data_Formatter.Value decimal_point="." thousand_separator="." r6.parse "1.000" . should_fail_with Illegal_Argument - Test.specify "should support exponential notation, but only if explicitly enabled" <| + group_builder.specify "should support exponential notation, but only if explicitly enabled" <| plain_formatter = Data_Formatter.Value exponential_formatter = Data_Formatter.Value allow_exponential_notation=True plain_formatter.parse "1E3" . should_equal "1E3" @@ -121,7 +120,7 @@ spec = exponential_formatter.parse "1.2E-3" . should_equal 0.0012 exponential_formatter.parse "1.2E-3" Value_Type.Float . should_equal 0.0012 - Test.specify "handle leading zeros, only if enabled" <| + group_builder.specify "handle leading zeros, only if enabled" <| Data_Formatter.Value.parse "0100" . should_equal "0100" Data_Formatter.Value.parse "000" . should_equal "000" Data_Formatter.Value.parse "000.0" . should_equal "000.0" @@ -130,18 +129,18 @@ spec = formatter.parse "000" . should_equal 0 formatter.parse "000.0" . should_equal 0.0 - Test.specify "should parse booleans" <| + group_builder.specify "should parse booleans" <| formatter = Data_Formatter.Value formatter.parse "True" . should_equal True formatter.parse "False" . should_equal False - Test.specify "should allow custom boolean formats" <| + group_builder.specify "should allow custom boolean formats" <| formatter = Data_Formatter.Value true_values=["YES", "1", "true"] false_values=["NO", "0", "false"] formatter.parse "YES" . should_equal True formatter.parse "NO" . should_equal False (Data_Formatter.Value true_values=[] false_values=[]).parse "True" type=Value_Type.Boolean . should_equal Nothing - Test.specify "should parse dates" <| + group_builder.specify "should parse dates" <| formatter = Data_Formatter.Value formatter.parse "2022-01-01" . should_equal (Date.new 2022) formatter.parse "2020-05-07" type=Value_Type.Date . should_equal (Date.new 2020 5 7) @@ -168,7 +167,7 @@ spec = formatter.parse "1999-01-01 00:00" type=Value_Type.Date . should_equal Nothing formatter.parse "30:00:65" . should_equal "30:00:65" - Test.specify "should report the warnings when parsing dates with suspicious format" <| + group_builder.specify "should report the warnings when parsing dates with suspicious format" <| c1 = Column.from_vector "strs" ["31.12", "01.01"] c2 = c1.parse Value_Type.Date "dd.MM" current_year = Date.today.year @@ -188,7 +187,7 @@ spec = c6.to_vector . should_equal ["25.12", "31.07"] Problems.assume_no_problems c6 - Test.specify "should fallback to Text" <| + group_builder.specify "should fallback to Text" <| formatter = Data_Formatter.Value formatter.parse "Text" . should_equal "Text" complex_text = """ @@ -196,7 +195,7 @@ spec = And newlines toO! formatter.parse complex_text . should_equal complex_text - Test.specify "should report Invalid_Format errors" <| + group_builder.specify "should report Invalid_Format errors" <| formatter = Data_Formatter.Value expect_warning r = r.should_equal Nothing @@ -213,15 +212,15 @@ spec = expect_warning <| formatter.parse "Text" type=Value_Type.Date_Time expect_warning <| formatter.parse "Text" type=Value_Type.Time - Test.specify "should not allow unexpected types" <| + group_builder.specify "should not allow unexpected types" <| formatter = Data_Formatter.Value formatter.parse "Text" type=List . should_fail_with Illegal_Argument - Test.group "DataFormatter.format" <| - Test.specify "should handle Nothing" <| + suite_builder.group "DataFormatter.format" group_builder-> + group_builder.specify "should handle Nothing" <| Data_Formatter.Value.format Nothing . should_equal Nothing - Test.specify "should format numbers" <| + group_builder.specify "should format numbers" <| formatter = Data_Formatter.Value formatter.format 123 . should_equal "123" formatter.format 1000000 . should_equal "1000000" @@ -234,7 +233,7 @@ spec = formatter.format (Number.negative_infinity) . should_equal "-Infinity" formatter.format (Number.nan) . should_equal "NaN" - Test.specify "should allow customizing the decimal point and thousand separator" <| + group_builder.specify "should allow customizing the decimal point and thousand separator" <| formatter = Data_Formatter.Value thousand_separator="_" decimal_point="," formatter.format 123 . should_equal "123" formatter.format 1000000 . should_equal "1_000_000" @@ -246,18 +245,18 @@ spec = formatter.format -1.0 . should_equal "-1,0" formatter.format 1.0001 . should_equal "1,0001" - Test.specify "should format booleans" <| + group_builder.specify "should format booleans" <| formatter = Data_Formatter.Value formatter.format True . should_equal "True" formatter.format False . should_equal "False" - Test.specify "should allow custom boolean formats" <| + group_builder.specify "should allow custom boolean formats" <| formatter = Data_Formatter.Value true_values=["YES", "1", "true"] false_values=["NO", "0", "false"] formatter.format True . should_equal "YES" formatter.format False . should_equal "NO" (Data_Formatter.Value true_values=[] false_values=[]).format True . should_fail_with Illegal_Argument - Test.specify "should format dates" <| + group_builder.specify "should format dates" <| formatter = Data_Formatter.Value formatter.format (Date.new 2022) . should_equal "2022-01-01" formatter.format (Date_Time.new 1999) . should_contain "1999-01-01 00:00:00" @@ -265,14 +264,14 @@ spec = formatter.format (Date_Time.new 1999 zone=(Time_Zone.parse "America/Los_Angeles")) . should_equal "1999-01-01 00:00:00-08:00[America/Los_Angeles]" formatter.format (Time_Of_Day.new) . should_equal "00:00:00" - Test.specify "should allow custom date formats" <| + group_builder.specify "should allow custom date formats" <| formatter = Data_Formatter.Value.with_datetime_formats date_formats=["ddd, d MMM y", Date_Time_Formatter.from_java "d MMM y[ G]"] datetime_formats=["dd/MM/yyyy HH:mm [ZZZZ]"] time_formats=["h:mma"] formatter.format (Date.new 2022 06 21) . should_equal "Tue, 21 Jun 2022" formatter.format (Date_Time.new 1999 02 03 04 56 11 zone=Time_Zone.utc) . should_equal "03/02/1999 04:56 GMT" formatter.format (Date_Time.new 1999 02 03 04 56 11 zone=(Time_Zone.parse "America/Los_Angeles")) . should_equal "03/02/1999 04:56 GMT-08:00" formatter.format (Time_Of_Day.new 13 55) . should_equal "1:55PM" - Test.specify "should act as identity on Text" <| + group_builder.specify "should act as identity on Text" <| formatter = Data_Formatter.Value formatter.format "Text" . should_equal "Text" complex_text = """ @@ -280,13 +279,13 @@ spec = And newlines toO! formatter.format complex_text . should_equal complex_text - Test.specify "should work with custom types, falling back to the `.to_text` method" <| + group_builder.specify "should work with custom types, falling back to the `.to_text` method" <| formatter = Data_Formatter.Value thousand_separator="_" formatter.format (Custom_Type.Value 42) . should_equal "(Custom_Type.Value 42)" # We fallback to `to_text`, so obviously the nested numbers will not know about our formatting settings. formatter.format (Custom_Type_With_To_Text.Value 1000) . should_equal "[CUSTOM = 1000]" - Test.specify "should correctly pass through errors from custom type's `.to_text` method" <| + group_builder.specify "should correctly pass through errors from custom type's `.to_text` method" <| formatter = Data_Formatter.Value r1 = formatter.format (Custom_Type_With_Error.Value 100) r1.should_be_a Text @@ -300,10 +299,10 @@ spec = r2.should_contain "Illegal_State" r2.should_contain "foo_panic" - Test.group "DataFormatter builders" <| + suite_builder.group "DataFormatter builders" group_builder-> # We create a formatter with all non-default values to ensure that the builders keep the existing values of other properties instead of switching to the constructor's defaults. formatter_1 = Data_Formatter.Value trim_values=False allow_leading_zeros=True decimal_point=',' thousand_separator='_' allow_exponential_notation=True datetime_formats=[Date_Time_Formatter.from "yyyy/MM/dd HH:mm:ss"] date_formats=[Date_Time_Formatter.from "dd/MM/yyyy"] time_formats=[Date_Time_Formatter.from "HH/mm/ss"] true_values=["YES"] false_values=["NO"] - Test.specify "should allow changing number formatting settings" <| + group_builder.specify "should allow changing number formatting settings" <| formatter_2 = formatter_1.with_number_formatting decimal_point="*" formatter_2.decimal_point . should_equal "*" formatter_2.thousand_separator . should_equal formatter_1.thousand_separator @@ -322,7 +321,7 @@ spec = formatter_3.allow_leading_zeros . should_equal False formatter_3.allow_exponential_notation . should_equal False - Test.specify "should allow changing datetime formats" <| + group_builder.specify "should allow changing datetime formats" <| formatter_1.with_datetime_formats . should_equal formatter_1 formatter_2 = formatter_1.with_datetime_formats date_formats="dd.MM.yyyy" @@ -340,7 +339,7 @@ spec = formatter_3 = formatter_1.with_datetime_formats date_formats=[] datetime_formats=["foobar"] time_formats="baz" formatter_3.should_fail_with Date_Time_Format_Parse_Error - Test.specify "should allow changing booleans' representations" <| + group_builder.specify "should allow changing booleans' representations" <| formatter_2 = formatter_1.with_boolean_values "1" "0" formatter_2.date_formats . should_equal formatter_1.date_formats formatter_2.datetime_formats . should_equal formatter_1.datetime_formats @@ -357,4 +356,8 @@ spec = formatter_3.true_values . should_equal [] formatter_3.false_values . should_equal [] -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + diff --git a/test/Table_Tests/src/Formatting/Main.enso b/test/Table_Tests/src/Formatting/Main.enso index 193863cb18c2..b6541499bd3d 100644 --- a/test/Table_Tests/src/Formatting/Main.enso +++ b/test/Table_Tests/src/Formatting/Main.enso @@ -1,12 +1,12 @@ from Standard.Base import all -from Standard.Test import Test_Suite +from Standard.Test_New import all import project.Formatting.Data_Formatter_Spec import project.Formatting.Parse_Values_Spec -spec = - Data_Formatter_Spec.spec - Parse_Values_Spec.spec - -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + Data_Formatter_Spec.add_specs suite_builder + Parse_Values_Spec.add_specs suite_builder + suite.run_with_filter diff --git a/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso b/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso index ff79a728ca13..4f5e823a3962 100644 --- a/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso +++ b/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso @@ -6,19 +6,18 @@ from Standard.Table import Table, Data_Formatter, Column from Standard.Table.Data.Type.Value_Type import Value_Type, Auto from Standard.Table.Errors import all -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all import project.Util -spec = - Test.group "Table.parse" <| - Test.specify "should correctly parse integers" <| +add_specs suite_builder = + suite_builder.group "Table.parse" group_builder-> + group_builder.specify "should correctly parse integers" <| t1 = Table.new [["ints", ["0", "+0", "-0", "+1", "-1", "1", "000", "0010", "12345", Nothing]]] t2 = t1.parse type=Value_Type.Integer t2.at "ints" . to_vector . should_equal [0, 0, 0, 1, -1, 1, 0, 10, 12345, Nothing] - Test.specify "should correctly parse decimals" <| + group_builder.specify "should correctly parse decimals" <| t1 = Table.new [["ints", ["0", "+0", "-0", "+1", "-1", "1", "12345", Nothing]]] t2 = t1.parse type=Value_Type.Float t2.at "ints" . to_vector . should_equal [0, 0, 0, 1, -1, 1, 12345, Nothing] @@ -33,7 +32,7 @@ spec = t6 = t5.parse type=Value_Type.Float t6.at "floats" . to_vector . should_equal [0.0, 0.0, 1.0, 0.1, 0.123, -0.1, 0.1, 0.0, 0.1234, Nothing, 11111111.111] - Test.specify "should parse leading zeros in numbers" <| + group_builder.specify "should parse leading zeros in numbers" <| t1 = Table.new [["ints", ["0", "+00", "-00", "+01", "-01", "01", "000", "0010", "12345", Nothing]]] t2 = Table.new [["floats", ["0.0000", ".0", "00.", "01.0", "-0010.0000", "1.0000"]]] @@ -64,7 +63,7 @@ spec = t8.at "floats" . to_vector . should_equal [0.0, 0.0, 0.0, 1.0, -10.0, 1.0] Problems.assume_no_problems t8 - Test.specify "should correctly parse booleans" <| + group_builder.specify "should correctly parse booleans" <| t1 = Table.new [["bools", ["true", "false", "True", "TRUE", "FALSE", Nothing, "False"]]] t2 = t1.parse type=Value_Type.Boolean t2.at "bools" . to_vector . should_equal [True, False, True, True, False, Nothing, False] @@ -73,7 +72,7 @@ spec = t4 = t3.parse type=Value_Type.Boolean format="yes|no" t4.at "bools" . to_vector . should_equal [Nothing, Nothing, Nothing, True, Nothing, False, Nothing] - Test.specify "should correctly parse date and time" <| + group_builder.specify "should correctly parse date and time" <| t1 = Table.new [["dates", ["2022-05-07", "2000-01-01", "2010-12-31"]]] t2 = t1.parse type=Value_Type.Date t2.at "dates" . to_vector . should_equal [Date.new 2022 5 7, Date.new 2000 1 1, Date.new 2010 12 31] @@ -91,7 +90,7 @@ spec = t8.at "dates" . value_type . should_equal Value_Type.Date t8.at "dates" . to_vector . should_equal [Date.new 2022 5 7, Date.new 2001 1 1, Date.new 2010 12 31] - Test.specify "should parse date and time in various formats" <| + group_builder.specify "should parse date and time in various formats" <| opts = Data_Formatter.Value.with_datetime_formats date_formats=["d.M.y", (Date_Time_Formatter.from_java "d MMM y[ G]"), "ddd, d MMM y"] datetime_formats=["yyyy-MM-dd HH:mm:ss", "dd/MM/yyyy HH:mm"] time_formats=["H:mm:ss.f", "h:mma"] t1 = Table.new [["dates", ["1.2.476", "10 Jan 1900 AD", "Tue, 3 Jun 2008"]]] @@ -106,7 +105,7 @@ spec = t6 = t5.parse format=opts type=Value_Type.Time t6.at "times" . to_vector . should_equal [Time_Of_Day.new 1 2 3 nanosecond=987654321, Time_Of_Day.new 13 30 0 0] - Test.specify "should warn when cells do not fit the expected format" <| + group_builder.specify "should warn when cells do not fit the expected format" <| ints = ["ints", ["0", "1", "1.0", "foobar", "", "--1", "+-1", "10", "-+1"]] # Currently scientific notation is not supported and we document that in this test, in the future the situation may change and the test may need to be flipped. floats = ["floats", ["0", "2.0", "1e6", "foobar", "", "--1", "+-1", "100.", "-+1"]] @@ -156,13 +155,13 @@ spec = p6 = [Invalid_Format.Error "times" Value_Type.Time ["2001-01-01", "2001-01-01 12:34:56", "Tuesday", "foobar", "", "10:99:99", "1/2/2003", "2001-30-10"]] Problems.test_problem_handling a6 p6 t6 - Test.specify "should leave not selected columns unaffected" <| + group_builder.specify "should leave not selected columns unaffected" <| t1 = Table.new [["A", ["1", "2"]], ["B", ["3", "4"]]] t2 = t1.parse columns="B" t2.at "A" . to_vector . should_equal ["1", "2"] t2.at "B" . to_vector . should_equal [3, 4] - Test.specify "should guess the datatype for columns" <| + group_builder.specify "should guess the datatype for columns" <| c1 = ["ints", ["1", " +2", "-123", Nothing]] c2 = ["ints0", ["01", "02 ", Nothing, "-1"]] c3 = ["floats", [" 1.0 ", "2.2", Nothing, "-1.0"]] @@ -212,7 +211,7 @@ spec = # `bools` are not requested to be parsed, so they are kept as-is, with leading whitespace etc. t6.at "bools" . to_vector . should_equal ["true", " False", Nothing, "True"] - Test.specify "should allow to specify a thousands separator and a custom decimal point" <| + group_builder.specify "should allow to specify a thousands separator and a custom decimal point" <| opts = Data_Formatter.Value decimal_point=',' thousand_separator='_' t1 = Table.new [["floats", ["0,0", "+0,0", "-0,0", "+1,5", "-1,2", "1,0", "0,0000", "10_000,", ",0"]]] t2 = t1.parse format=opts @@ -226,7 +225,7 @@ spec = t5.at "xs" . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing, 1000] Problems.get_attached_warnings t5 . should_equal [Invalid_Format.Error "xs" Value_Type.Integer ["1,2", "1.3", "_0", "0_", "1_0_0"]] - Test.specify "should allow to specify custom values for booleans" <| + group_builder.specify "should allow to specify custom values for booleans" <| opts_1 = Data_Formatter.Value true_values=["1", "YES"] false_values=["0"] t1 = Table.new [["bools", ["1", "0", "YES", "1", "0"]]] t2 = t1.parse format=opts_1 @@ -246,7 +245,7 @@ spec = times = ["times", ["11:00:00 ", " 00:00:00", "00 : 00 : 00", Nothing]] Table.new [ints, floats, bools, dates, datetimes, times] - Test.specify "should trim input values by default" <| + group_builder.specify "should trim input values by default" <| t1 = whitespace_table.parse columns="ints" type=Value_Type.Integer t1.at "ints" . to_vector . should_equal [0, 1, Nothing, 2] Problems.expect_only_warning (Invalid_Format.Error "ints" Value_Type.Integer ["0 1"]) t1 @@ -271,7 +270,7 @@ spec = t6.at "times" . to_vector . should_equal [Time_Of_Day.new 11 0 0, Time_Of_Day.new, Nothing, Nothing] Problems.expect_only_warning (Invalid_Format.Error "times" Value_Type.Time ["00 : 00 : 00"]) t6 - Test.specify "should fail to parse if whitespace is present and trimming is turned off" <| + group_builder.specify "should fail to parse if whitespace is present and trimming is turned off" <| opts = Data_Formatter.Value trim_values=False t1 = whitespace_table.parse format=opts columns="ints" type=Value_Type.Integer t1.at "ints" . to_vector . should_equal [0, Nothing, Nothing, Nothing] @@ -297,7 +296,7 @@ spec = t6.at "times" . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing] Problems.expect_only_warning (Invalid_Format.Error "times" Value_Type.Time ["11:00:00 ", " 00:00:00", "00 : 00 : 00"]) t6 - Test.specify "should fallback to text if whitespace is present and trimming is turned off" <| + group_builder.specify "should fallback to text if whitespace is present and trimming is turned off" <| c1 = ["1", " +2", "-123", Nothing] c2 = [" 1.0 ", "2.2", Nothing, "-1.0"] c3 = ["true", " False", Nothing, "True"] @@ -309,7 +308,7 @@ spec = t2.at "floats" . to_vector . should_equal c2 t2.at "bools" . to_vector . should_equal c3 - Test.specify "should allow selecting columns by regex" <| + group_builder.specify "should allow selecting columns by regex" <| t1 = Table.new [["An", ["1", "2", "3"]], ["Am", ["4", "5", "6"]], ["C", ["7", "8", "9"]], ["D", ["10", "11", "12"]]] r1 = t1.parse columns="A.*".to_regex r1.at "An" . to_vector . should_equal [1, 2, 3] @@ -317,7 +316,7 @@ spec = r1.at "C" . to_vector . should_equal ["7", "8", "9"] r1.at "D" . to_vector . should_equal ["10", "11", "12"] - Test.specify "should correctly handle problems: missing input columns" <| + group_builder.specify "should correctly handle problems: missing input columns" <| t1 = Table.new [["A", ["1", "2", "3"]]] r1 = t1.parse columns=["A", "B", "C", "E"] on_problems=Problem_Behavior.Ignore r1.should_fail_with Missing_Input_Columns @@ -334,7 +333,7 @@ spec = problems = [Missing_Input_Columns.Error ["B", "C", "E"]] Problems.test_problem_handling action problems tester - Test.specify "should correctly handle problems: out of bounds indices" <| + group_builder.specify "should correctly handle problems: out of bounds indices" <| t1 = Table.new [["A", ["1", "2", "3"]]] r1 = t1.parse columns=[0, -1, 42, -5] r1.should_fail_with Missing_Input_Columns @@ -346,7 +345,7 @@ spec = problems = [Missing_Input_Columns.Error [42, -5]] Problems.test_problem_handling action problems tester - Test.specify "should allow mixed column selectors" <| + group_builder.specify "should allow mixed column selectors" <| t1 = Table.new [["Am", ["1", "2", "3"]], ["B", ["4", "5", "6"]], ["C", ["7", "8", "9"]], ["D", ["10", "11", "12"]]] r1 = t1.parse columns=["A.*".to_regex, -2, "D"] r1.at "Am" . to_vector . should_equal [1, 2, 3] @@ -354,7 +353,7 @@ spec = r1.at "C" . to_vector . should_equal [7, 8, 9] r1.at "D" . to_vector . should_equal [10, 11, 12] - Test.specify "should handle edge-cases: overlapping selectors" <| + group_builder.specify "should handle edge-cases: overlapping selectors" <| t1 = Table.new [["Am", ["1", "2", "3"]], ["B", ["4", "5", "6"]], ["C", ["7", "8", "9"]], ["D", ["10", "11", "12"]]] r1 = t1.parse columns=["A.*".to_regex, 0, "D", -1, -1, 0, 3] r1.at "Am" . to_vector . should_equal [1, 2, 3] @@ -362,18 +361,18 @@ spec = r1.at "C" . to_vector . should_equal ["7", "8", "9"] r1.at "D" . to_vector . should_equal [10, 11, 12] - Test.specify "should error if invalid target type is provided" <| + group_builder.specify "should error if invalid target type is provided" <| t1 = Table.new [["A", ["1", "2", "3"]]] t1.parse type=Nothing . should_fail_with Illegal_Argument - Test.specify "should error if the input column is not text" <| + group_builder.specify "should error if the input column is not text" <| t1 = Table.new [["A", [1, 2, 3]], ["B", ["4", "5", "6"]], ["C", [7, 8, 9]], ["D", ["10", "11", "12"]]] r1 = t1.parse columns=["A", "B", "C"] r1.should_fail_with Invalid_Value_Type r1.catch.related_column . should_equal "A" r1.catch.expected.should_equal "Char" - Test.specify "should error if no input columns selected, unless error_on_missing_columns=False" <| + group_builder.specify "should error if no input columns selected, unless error_on_missing_columns=False" <| t1 = Table.new [["A", ["1", "2", "3"]]] r1 = t1.parse columns=[] r1.should_fail_with No_Input_Columns_Selected @@ -391,8 +390,8 @@ spec = Problems.expect_warning No_Input_Columns_Selected r4 Problems.expect_warning (Missing_Input_Columns.Error ["nonexistent column :D", -42]) r4 - Test.group "Column.parse" <| - Test.specify "should correctly parse integers" <| + suite_builder.group "Column.parse" group_builder-> + group_builder.specify "should correctly parse integers" <| c1 = Column.from_vector "ints" ["0", "+0", "-0", "+1", "-1", "1", "000", "0010", "12345", Nothing] c2 = c1.parse type=Value_Type.Integer c2.name.should_equal c1.name @@ -404,7 +403,7 @@ spec = c3.to_vector . should_equal [0, 0, 0, 1, -1, 1, 0, 10, 12345, Nothing] Problems.assume_no_problems c3 - Test.specify "should correctly parse integers in US formats" <| + group_builder.specify "should correctly parse integers in US formats" <| cUS = Column.from_vector "ints" ["1", "000123", "-1234", "1234567", "123e456"] pUS = cUS.parse type=Value_Type.Integer pUS.to_vector . should_equal [1, 123, -1234, 1234567, Nothing] @@ -432,7 +431,7 @@ spec = pUS5.to_vector . should_equal [1, 123, -1234, 1234567, Nothing] Problems.expect_warning (Invalid_Format.Error "ints" Value_Type.Integer ["(123,456)"]) pUS5 - Test.specify "should correctly parse integers in European formats" <| + group_builder.specify "should correctly parse integers in European formats" <| cDE = Column.from_vector "ints" ["1", "000123", "-1.234", "1.234.567", "12.34.56"] pDE = cDE.parse type=Value_Type.Integer pDE.to_vector . should_equal [1, 123, -1234, 1234567, Nothing] @@ -448,7 +447,7 @@ spec = pSW.to_vector . should_equal [1, 123, -1234, 1234567, Nothing] Problems.expect_warning (Invalid_Format.Error "ints" Value_Type.Integer ["(123'456)"]) pSW - Test.specify "should correctly parse decimals" <| + group_builder.specify "should correctly parse decimals" <| c1 = Column.from_vector "ints" ["0", "+0", "-0", "+1", "-1", "1", "000", "0010", "12345", Nothing] c2 = c1.parse Value_Type.Float c2.name.should_equal c1.name @@ -469,7 +468,7 @@ spec = c6.to_vector . should_equal [0.0, 0.0, 1.0, 0.1, 0.123, -0.1, 0.1, 0.0, 0.1234, Nothing, 11111111.111] Problems.assume_no_problems c6 - Test.specify "should correctly parse decimals in US formats" <| + group_builder.specify "should correctly parse decimals in US formats" <| cUS = Column.from_vector "floats" ["1.23", "000123", "-12.34", "123.4567", "123e456"] pUS = cUS.parse type=Value_Type.Float pUS.to_vector . should_equal [1.23, 123, -12.34, 123.4567, Nothing] @@ -497,7 +496,7 @@ spec = pUS5.to_vector . should_equal [1.23, 123, -1234.567, 1234567.789, Nothing] Problems.expect_warning (Invalid_Format.Error "floats" Value_Type.Float ["(123,456)"]) pUS5 - Test.specify "should correctly parse decimals in European formats" <| + group_builder.specify "should correctly parse decimals in European formats" <| cDE = Column.from_vector "floats" ["1,23", "000123", "-1.234,567", "1.234.567,789", "12.34,56"] pDE = cDE.parse type=Value_Type.Float pDE.to_vector . should_equal [1.23, 123, -1234.567, 1234567.789, Nothing] @@ -513,7 +512,7 @@ spec = pSW.to_vector . should_equal [1.23, 123, -1234.567, 1234567, Nothing] Problems.expect_warning (Invalid_Format.Error "floats" Value_Type.Float ["(123'456)"]) pSW - Test.specify "should correctly parse booleans" <| + group_builder.specify "should correctly parse booleans" <| c1 = Column.from_vector "bools" ["true", "false", "True", "TRUE", "FALSE", Nothing, "False"] c2 = c1.parse type=Value_Type.Boolean c2.name.should_equal c1.name @@ -533,7 +532,7 @@ spec = w.value_type . should_equal Value_Type.Boolean w.cells . should_equal ["yes"] - Test.specify "should correctly parse date and time" <| + group_builder.specify "should correctly parse date and time" <| c1 = Column.from_vector "date" ["2022-05-07", "2000-01-01", "2010-12-31"] c2 = c1.parse type=Value_Type.Date c2.to_vector . should_equal [Date.new 2022 5 7, Date.new 2000 1 1, Date.new 2010 12 31] @@ -556,7 +555,7 @@ spec = w.value_type . should_equal Value_Type.Date_Time w.cells . should_equal ["42", "2010-12-31"] - Test.specify "should correctly parse date and time with format" <| + group_builder.specify "should correctly parse date and time with format" <| c1 = Column.from_vector "date" ["5/7/2022", "1/1/2000", "12/31/2010"] c2 = c1.parse type=Value_Type.Date "M/d/yyyy" c2.to_vector . should_equal [Date.new 2022 5 7, Date.new 2000 1 1, Date.new 2010 12 31] @@ -565,13 +564,13 @@ spec = c4 = c3.parse type=Value_Type.Date_Time "M/d/yyyy HH:mm:ss" c4.to_vector . should_equal [Date_Time.new 2022 5 7 23 59 59, Date_Time.new 2000 1 1, Date_Time.new 2010 12 31 12 34 56] - Test.specify "should handle invalid format strings gracefully" <| + group_builder.specify "should handle invalid format strings gracefully" <| c1 = Column.from_vector "date" ["5/7/2022", "1/1/2000", "12/31/2010"] c1.parse type=Value_Type.Date "M/d/fqsrf" . should_fail_with Date_Time_Format_Parse_Error c1.parse type=Value_Type.Time "ęęę" . should_fail_with Date_Time_Format_Parse_Error c1.parse type=Value_Type.Date_Time "M/d/fqsrf HH:mm:ss.fff" . should_fail_with Date_Time_Format_Parse_Error - Test.specify "should correctly work in Auto mode" <| + group_builder.specify "should correctly work in Auto mode" <| c1 = Column.from_vector "A" ["1", "2", "3"] c2 = Column.from_vector "B" ["1.0", "2.5", "3"] c3 = Column.from_vector "C" ["2022-05-07", "2000-01-01", "2010-12-31"] @@ -627,11 +626,11 @@ spec = r8.to_vector . should_equal [Nothing, Nothing, Nothing] Problems.assume_no_problems r8 - Test.specify "should error if invalid target type is provided" <| + group_builder.specify "should error if invalid target type is provided" <| c1 = Column.from_vector "A" ["1", "2", "3"] c1.parse type=Nothing . should_fail_with Illegal_Argument - Test.specify "should error if the input column is not text" <| + group_builder.specify "should error if the input column is not text" <| c1 = Column.from_vector "A" [1, 2, 3] r1 = c1.parse r1.should_fail_with Invalid_Value_Type @@ -639,4 +638,8 @@ spec = r1.catch.expected . should_equal "Char" -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + From 110abff6d26bd416fc78dd882ffc9522aa21d314 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 19 Jan 2024 19:04:41 +0100 Subject: [PATCH 60/93] Refactor Codegen_Spec to the new API --- .../src/Database/Codegen_Spec.enso | 145 ++++++++++++------ 1 file changed, 96 insertions(+), 49 deletions(-) diff --git a/test/Table_Tests/src/Database/Codegen_Spec.enso b/test/Table_Tests/src/Database/Codegen_Spec.enso index 4c9123d8fe87..f514f22b2c75 100644 --- a/test/Table_Tests/src/Database/Codegen_Spec.enso +++ b/test/Table_Tests/src/Database/Codegen_Spec.enso @@ -10,22 +10,40 @@ import Standard.Database.Data.Dialect import Standard.Database.Data.SQL_Type.SQL_Type from Standard.Database.Errors import Unsupported_Database_Operation -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions +from Standard.Test_New import all + polyglot java import java.sql.Types as Java_SQL_Types -spec = - test_connection = + +type Data + Value ~data + + connection self = self.data.at 0 + t1 self = self.data.at 1 + + setup = c = Database.connect (SQLite In_Memory) c.create_table "T1" [Column_Description.Value "A" Value_Type.Integer, Column_Description.Value "B" Value_Type.Char, Column_Description.Value "C" Value_Type.Boolean] c.create_table "T2" [Column_Description.Value "D" Value_Type.Integer, Column_Description.Value "E" Value_Type.Integer, Column_Description.Value "F" Value_Type.Boolean] c.create_table "T3" [Column_Description.Value "A" Value_Type.Integer, Column_Description.Value "E" Value_Type.Boolean, Column_Description.Value "F" Value_Type.Integer] - c - t1 = test_connection.query (SQL_Query.Table_Name "T1") - Test.group "[Codegen] JSON serialization" <| - Test.specify "should serialize Tables and Columns to their SQL representation" pending="ToDo: decide on how we handle ==, see https://github.com/enso-org/enso/issues/5241" <| - q1 = t1.filter (t1.at "A" == 42) . to_json + t1 = c.query (SQL_Query.Table_Name "T1") + Data.Value [c, t1] + + teardown self = + self.connection.close + + +add_specs suite_builder = + + suite_builder.group "[Codegen] JSON serialization" group_builder-> + data = Data.setup + + group_builder.teardown <| + data.teardown + + group_builder.specify "should serialize Tables and Columns to their SQL representation" pending="ToDo: decide on how we handle ==, see https://github.com/enso-org/enso/issues/5241" <| + q1 = data.t1.filter (data.t1.at "A" == 42) . to_json part1 = JS_Object.from_pairs [["sql_code", 'SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE ("T1"."A" = ']] part2_sub = JS_Object.from_pairs [["value", 42]] @@ -34,86 +52,115 @@ spec = expected = JS_Object.from_pairs [["query", [part1, part2, part3]]] . to_text q1.should_equal expected - q2 = t1.at "A" . to_json + q2 = data.t1.at "A" . to_json expected_2 = JS_Object.from_pairs [["query", [JS_Object.from_pairs [["sql_code", 'SELECT "T1"."A" AS "A" FROM "T1" AS "T1"']]]]] . to_text q2.should_equal expected_2 - Test.group "[Codegen] Basic Select" <| - Test.specify "should select columns from a table" <| - t1.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1"', []] - t2 = t1.select_columns ["C", "B", "undefined"] reorder=True error_on_missing_columns=False + suite_builder.group "[Codegen] Basic Select" group_builder-> + data = Data.setup + + group_builder.teardown <| + data.teardown + + group_builder.specify "should select columns from a table" <| + data.t1.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1"', []] + t2 = data.t1.select_columns ["C", "B", "undefined"] reorder=True error_on_missing_columns=False t2.to_sql.prepare . should_equal ['SELECT "T1"."C" AS "C", "T1"."B" AS "B" FROM "T1" AS "T1"', []] - foo = t1.at "A" . rename "FOO" + foo = data.t1.at "A" . rename "FOO" foo.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "FOO" FROM "T1" AS "T1"', []] t3 = t2.set foo new_name="bar" t3.to_sql.prepare . should_equal ['SELECT "T1"."C" AS "C", "T1"."B" AS "B", "T1"."A" AS "bar" FROM "T1" AS "T1"', []] - Test.specify "should fail if at is called for a non-existent column" <| - t1.at "undefined" . should_fail_with No_Such_Column + group_builder.specify "should fail if at is called for a non-existent column" <| + data.t1.at "undefined" . should_fail_with No_Such_Column - Test.specify "should allow to limit the amount of returned results" <| - t2 = t1.limit 5 + group_builder.specify "should allow to limit the amount of returned results" <| + t2 = data.t1.limit 5 t2.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" LIMIT 5', []] - Test.group "[Codegen] Masking Tables and Columns" <| - Test.specify "should generate a single BETWEEN expression" <| - t2 = t1.filter "A" (Filter_Condition.Between 10 20) + suite_builder.group "[Codegen] Masking Tables and Columns" group_builder-> + data = Data.setup + + group_builder.teardown <| + data.teardown + + group_builder.specify "should generate a single BETWEEN expression" <| + t2 = data.t1.filter "A" (Filter_Condition.Between 10 20) t2.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE ("T1"."A" BETWEEN ? AND ?)', [10, 20]] - Test.specify "should generate an IN expression" <| - t2 = t1.filter "A" (Filter_Condition.Is_In [1, 2, 'foo']) + group_builder.specify "should generate an IN expression" <| + t2 = data.t1.filter "A" (Filter_Condition.Is_In [1, 2, 'foo']) t2.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE COALESCE("T1"."A" IN (?, ?, ?), FALSE)', [1, 2, "foo"]] - t3 = t1.filter "A" (Filter_Condition.Is_In [1]) + t3 = data.t1.filter "A" (Filter_Condition.Is_In [1]) t3.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE COALESCE("T1"."A" IN (?), FALSE)', [1]] - t4 = t1.filter "A" (Filter_Condition.Is_In []) + t4 = data.t1.filter "A" (Filter_Condition.Is_In []) t4.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE (FALSE)', []] - t5 = t1.filter "A" (Filter_Condition.Is_In [Nothing]) + t5 = data.t1.filter "A" (Filter_Condition.Is_In [Nothing]) t5.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE ((FALSE) OR ("T1"."A" IS NULL))', []] - Test.group "[Codegen] Handling Missing Values" <| - Test.specify "fill_nothing should allow to replace missing values in a column with a constant" <| - c = t1.at "B" . fill_nothing "not-applicable" + suite_builder.group "[Codegen] Handling Missing Values" group_builder-> + data = Data.setup + + group_builder.teardown <| + data.teardown + + group_builder.specify "fill_nothing should allow to replace missing values in a column with a constant" <| + c = data.t1.at "B" . fill_nothing "not-applicable" c.to_sql.prepare . should_equal ['SELECT CAST(COALESCE("T1"."B", ?) AS TEXT) AS "B" FROM "T1" AS "T1"', ["not-applicable"]] - Test.specify "filter_blank_rows should drop rows that contain at least one missing column in a Table" <| - t2 = t1.filter_blank_rows when=Blank_Selector.Any_Cell + group_builder.specify "filter_blank_rows should drop rows that contain at least one missing column in a Table" <| + t2 = data.t1.filter_blank_rows when=Blank_Selector.Any_Cell t2.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE (NOT ((("T1"."A" IS NULL) OR (("T1"."B" IS NULL) OR ("T1"."B" = \'\'))) OR ("T1"."C" IS NULL)))', []] - t3 = t1.filter_blank_rows when=Blank_Selector.All_Cells + t3 = data.t1.filter_blank_rows when=Blank_Selector.All_Cells t3.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE (NOT ((("T1"."A" IS NULL) AND (("T1"."B" IS NULL) OR ("T1"."B" = \'\'))) AND ("T1"."C" IS NULL)))', []] - Test.group "[Codegen] Sorting" <| - Test.specify "should allow sorting by a single column name" <| - r1 = t1.order_by ([Sort_Column.Name "A"]) . at "B" + suite_builder.group "[Codegen] Sorting" group_builder-> + data = Data.setup + + group_builder.teardown <| + data.teardown + + group_builder.specify "should allow sorting by a single column name" <| + r1 = data.t1.order_by ([Sort_Column.Name "A"]) . at "B" r1.to_sql.prepare . should_equal ['SELECT "T1"."B" AS "B" FROM "T1" AS "T1" ORDER BY "T1"."A" ASC', []] - r2 = t1.order_by ([Sort_Column.Name "B" Sort_Direction.Descending]) . at "A" + r2 = data.t1.order_by ([Sort_Column.Name "B" Sort_Direction.Descending]) . at "A" r2.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A" FROM "T1" AS "T1" ORDER BY "T1"."B" DESC', []] - Test.specify 'should allow sorting by multiple column names' <| - r1 = t1.order_by ([Sort_Column.Name 'A', Sort_Column.Name 'B']) + group_builder.specify 'should allow sorting by multiple column names' <| + r1 = data.t1.order_by ([Sort_Column.Name 'A', Sort_Column.Name 'B']) r1.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" ORDER BY "T1"."A" ASC, "T1"."B" ASC', []] - Test.specify 'should allow sorting with specific by-column rules' <| - r1 = t1.order_by ([Sort_Column.Name "A", Sort_Column.Name "B" Sort_Direction.Descending]) + group_builder.specify 'should allow sorting with specific by-column rules' <| + r1 = data.t1.order_by ([Sort_Column.Name "A", Sort_Column.Name "B" Sort_Direction.Descending]) r1.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" ORDER BY "T1"."A" ASC, "T1"."B" DESC', []] - Test.specify 'should return warnings and errors when passed a non-existent column' <| - t2 = t1.order_by ([Sort_Column.Name 'foobar']) + group_builder.specify 'should return warnings and errors when passed a non-existent column' <| + t2 = data.t1.order_by ([Sort_Column.Name 'foobar']) t2.should_fail_with Missing_Input_Columns - Test.group "[Codegen] Aggregation" <| - Test.specify "should allow to count rows" <| - code = t1.aggregate [Group_By "A" "A grp", Count "counter"] . to_sql . prepare + suite_builder.group "[Codegen] Aggregation" group_builder-> + data = Data.setup + + group_builder.teardown <| + data.teardown + + group_builder.specify "should allow to count rows" <| + code = data.t1.aggregate [Group_By "A" "A grp", Count "counter"] . to_sql . prepare code . should_equal ['SELECT "T1"."A grp" AS "A grp", "T1"."counter" AS "counter" FROM (SELECT "T1"."A" AS "A grp", COUNT(*) AS "counter" FROM "T1" AS "T1" GROUP BY "T1"."A") AS "T1"', []] - Test.specify "should allow to group by multiple fields" <| - code = t1.aggregate [Sum "A" "sum_a", Group_By "C", Group_By "B" "B grp"] . to_sql . prepare + group_builder.specify "should allow to group by multiple fields" <| + code = data.t1.aggregate [Sum "A" "sum_a", Group_By "C", Group_By "B" "B grp"] . to_sql . prepare code . should_equal ['SELECT "T1"."sum_a" AS "sum_a", "T1"."C" AS "C", "T1"."B grp" AS "B grp" FROM (SELECT SUM("T1"."A") AS "sum_a", "T1"."C" AS "C", "T1"."B" AS "B grp" FROM "T1" AS "T1" GROUP BY "T1"."C", "T1"."B") AS "T1"', []] -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter + From ca65fe46d9ba7a988ad57a5a38477b9e78205ba6 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Mon, 22 Jan 2024 16:21:51 +0100 Subject: [PATCH 61/93] Add some teardowns. Postgres tests were failing on "Too many clients" error. --- .../Missing_Values_Spec.enso | 10 +++ .../Take_Drop_Spec.enso | 21 ++++- .../src/Database/Common/Common_Spec.enso | 25 ++---- .../Table_Tests/src/Database/SQLite_Spec.enso | 85 +++++++++++++------ 4 files changed, 100 insertions(+), 41 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso index f7bb0a56e61b..6cfb6f5d6ab4 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso @@ -41,6 +41,13 @@ type Data table_builder [c, g, h] Data.Value [connection, t0, t1, t3, t4] + teardown self = + self.connection.drop_table self.t0.name + self.connection.drop_table self.t1.name + self.connection.drop_table self.t3.name + self.connection.drop_table self.t4.name + self.connection.close + add_specs suite_builder setup = prefix = setup.prefix @@ -50,6 +57,9 @@ add_specs suite_builder setup = suite_builder.group prefix+"Dropping Missing Values" group_builder-> data = Data.setup create_connection_fn table_builder + group_builder.teardown <| + data.teardown + group_builder.specify "filter_blank_rows should drop rows that contain at least one missing cell" <| d = data.t0.filter_blank_rows when=Blank_Selector.Any_Cell d.row_count . should_equal 1 diff --git a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso index 4fc2650488d8..8dc3450268be 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso @@ -30,6 +30,11 @@ type Table_Take_Drop_Data empty = table.remove_all_rows [connection, table, empty] + teardown self = + self.connection.drop_table self.table.name + self.connection.drop_table self.empty.name + self.connection.close + type Column_Take_Drop_Data Value ~data @@ -56,6 +61,14 @@ type Column_Take_Drop_Data [connection, table, alpha, beta, empty_alpha, empty_beta] + teardown self = + self.connection.drop_table self.table.name + self.connection.drop_table self.alpha.name + self.connection.drop_table self.beta.name + self.connection.drop_table self.empty_alpha.name + self.connection.drop_table self.empty_beta.name + self.connection.close + add_specs suite_builder setup = prefix = setup.prefix @@ -65,6 +78,9 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.take/drop" group_builder-> data = Table_Take_Drop_Data.setup create_connection_fn table_builder + group_builder.teardown <| + data.teardown + group_builder.specify "should allow selecting first or last N rows" <| data.table.take.at "alpha" . to_vector . should_equal [1] data.table.take.at "beta" . to_vector . should_equal ["A"] @@ -255,7 +271,10 @@ add_specs suite_builder setup = suite_builder.group prefix+"Column.take/drop" group_builder-> data = Column_Take_Drop_Data.setup create_connection_fn table_builder - + + group_builder.teardown <| + data.teardown + group_builder.specify "should allow selecting first or last N rows" <| data.alpha.take.to_vector . should_equal [1] data.beta.take.to_vector . should_equal ["A"] diff --git a/test/Table_Tests/src/Database/Common/Common_Spec.enso b/test/Table_Tests/src/Database/Common/Common_Spec.enso index a8248b61c428..b6ea1393c61d 100644 --- a/test/Table_Tests/src/Database/Common/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Common_Spec.enso @@ -21,12 +21,10 @@ import project.Database.Helpers.Name_Generator upload connection prefix data temporary=True = name = Name_Generator.random_name prefix table = data.select_into_database_table connection name temporary=temporary primary_key=Nothing - IO.println <| " upload: Created table with name " + name table drop_table connection name = - IO.println <| " drop_table: Dropping table with name " + name Panic.catch Any (connection.drop_table name) caught_panic-> IO.println <| "Failed to drop table: " + name + " because of: " + caught_panic.payload.to_display_text @@ -42,7 +40,6 @@ type Basic_Data big_size self = self.data.at 5 setup create_connection_fn = Basic_Data.Value <| - IO.println <| " Common_Spec_New.Basic_Data.setup" big_size = 1000 connection = create_connection_fn Nothing t1 = upload connection "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]]) @@ -60,12 +57,11 @@ type Basic_Data [connection, t1, t2, t4, big_table, big_size] teardown self = - data = self - IO.println <| " Common_Spec_New.Basic_Data.teardown" - drop_table data.connection data.t1.name - drop_table data.connection data.t2.name - drop_table data.connection data.t4.name - drop_table data.connection data.big_table.name + drop_table self.connection self.t1.name + drop_table self.connection self.t2.name + drop_table self.connection self.t4.name + drop_table self.connection self.big_table.name + self.connection.close type Sorting_Data @@ -80,7 +76,6 @@ type Sorting_Data t8 self = self.data.at 6 setup create_connection_fn = Sorting_Data.Value <| - IO.println <| " Common_Spec_New.Sorting_Data.setup" connection = create_connection_fn Nothing ints = [1, 2, 3, 4, 5] reals = [1.3, 4.6, 3.2, 5.2, 1.6] @@ -93,10 +88,9 @@ type Sorting_Data [connection, df, ints, reals, bools, texts, t8] teardown self = - IO.println <| " Common_Spec_New.Sorting_Data.teardown" drop_table self.connection self.df.name drop_table self.connection self.t8.name - + self.connection.close type Aggregation_Data @@ -132,9 +126,8 @@ type Aggregation_Data [connection, t9] teardown self = - IO.println <| " Common_Spec_New.Aggregation_Data.teardown" drop_table self.connection self.t9.name - + self.connection.close type Missing_Values_Data @@ -151,11 +144,11 @@ type Missing_Values_Data [connection, t4] teardown self = - IO.println <| " Common_Spec_New.Missing_Values_Data.teardown" drop_table self.connection self.t4.name + self.connection.close -## +## Adds common database tests specs to the suite builder. Arguments: - create_connection_fn: A function that creates an appropriate Connection to the database backend. diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 16252a605867..4a8ea31f2001 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -30,6 +30,9 @@ type Test_Data connection = create_connection_func Nothing Test_Data.Value connection + teardown self = + self.connection.close + type Metadata_Data Value ~data @@ -50,6 +53,11 @@ type Metadata_Data t.update_rows (Table.from_rows ["strs", "ints", "bools", "reals"] [row1, row2, row3]) update_action=Update_Action.Insert Metadata_Data.Value [connection, tinfo, t] + teardown self = + self.connection.drop_table self.t.name + self.connection.close + + type Tables_And_Table_Types_Data Value ~data @@ -71,6 +79,8 @@ type Tables_And_Table_Types_Data [connection, tinfo, vinfo, temporary_table] + teardown self = + self.connection.close sqlite_specific_spec suite_builder prefix create_connection_func setup = @@ -79,6 +89,9 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup = suite_builder.group prefix+"Schemas and Databases" group_builder-> data = Test_Data.setup create_connection_func + group_builder.teardown <| + data.teardown + group_builder.specify "should be able to get current database and list databases" <| data.connection.database . should_equal Nothing data.connection.databases . should_equal [Nothing] @@ -96,6 +109,9 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup = suite_builder.group prefix+"Tables and Table Types" group_builder-> data = Tables_And_Table_Types_Data.setup create_connection_func + group_builder.teardown <| + data.teardown + group_builder.specify "should be able to list table types" <| table_types = data.connection.table_types table_types.length . should_not_equal 0 @@ -134,6 +150,9 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup = suite_builder.group prefix+"Error Handling" group_builder-> data = Test_Data.setup create_connection_func + group_builder.teardown <| + data.teardown + group_builder.specify "should wrap errors" <| data.connection.read (SQL_Query.Raw_SQL "foobar") . should_fail_with SQL_Error data.connection.execute_update "foobar" . should_fail_with SQL_Error @@ -173,6 +192,9 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup = suite_builder.group prefix+"Metadata" group_builder-> data = Metadata_Data.setup create_connection_func + group_builder.teardown <| + data.teardown + group_builder.specify "should return Table information" <| i = data.t.info i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "reals"] @@ -192,6 +214,9 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup = suite_builder.group prefix+"Dialect-specific codegen" group_builder-> data = Metadata_Data.setup create_connection_func + group_builder.teardown <| + data.teardown + group_builder.specify "should generate queries for the Distinct operation" <| t = data.connection.query (SQL_Query.Table_Name data.tinfo) code_template = 'SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."reals" AS "reals" FROM (SELECT "{Tinfo}_inner"."strs" AS "strs", "{Tinfo}_inner"."ints" AS "ints", "{Tinfo}_inner"."bools" AS "bools", "{Tinfo}_inner"."reals" AS "reals" FROM (SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."reals" AS "reals" FROM "{Tinfo}" AS "{Tinfo}") AS "{Tinfo}_inner" GROUP BY "{Tinfo}_inner"."strs") AS "{Tinfo}"' @@ -199,8 +224,13 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup = t.distinct ["strs"] . to_sql . prepare . should_equal [expected_code, []] suite_builder.group prefix+"math functions" group_builder-> + data = Test_Data.setup create_connection_func + + group_builder.teardown <| + data.teardown + group_builder.specify "round, trunc, ceil, floor" <| - col = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] . at "x" + col = (table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] connection=data.connection) . at "x" col . cast Value_Type.Float . round . value_type . should_equal Value_Type.Float col . cast Value_Type.Integer . round . value_type . should_equal Value_Type.Float @@ -226,50 +256,57 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup = col . cast Value_Type.Integer . truncate . value_type . should_equal Value_Type.Integer col . cast Value_Type.Decimal . truncate . value_type . should_equal Value_Type.Float - do_op n op = - table = table_builder [["x", [n]]] + do_op data n op = + table = table_builder [["x", [n]]] connection=data.connection result = table.at "x" |> op result.to_vector.at 0 - do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers) + + do_round data n dp=0 use_bankers=False = do_op data n (_.round dp use_bankers) group_builder.specify "Can round correctly near the precision limit" <| # This value varies depending on the version of SQLite. - do_round 1.2222222222222225 15 . should_equal 1.222222222222223 0.000000000000002 - do_round -1.2222222222222225 15 . should_equal -1.222222222222223 0.000000000000002 - do_round 1.2222222222222235 15 . should_equal 1.222222222222223 - do_round -1.2222222222222235 15 . should_equal -1.222222222222223 + do_round data 1.2222222222222225 15 . should_equal 1.222222222222223 0.000000000000002 + do_round data -1.2222222222222225 15 . should_equal -1.222222222222223 0.000000000000002 + do_round data 1.2222222222222235 15 . should_equal 1.222222222222223 + do_round data -1.2222222222222235 15 . should_equal -1.222222222222223 group_builder.specify "Can round correctly near the precision limit, using banker's rounding" <| - do_round 1.2222222222222225 15 use_bankers=True . should_equal 1.222222222222222 - do_round -1.2222222222222225 15 use_bankers=True . should_equal -1.222222222222222 - do_round 1.2222222222222235 15 use_bankers=True . should_equal 1.222222222222224 - do_round -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224 + do_round data 1.2222222222222225 15 use_bankers=True . should_equal 1.222222222222222 + do_round data -1.2222222222222225 15 use_bankers=True . should_equal -1.222222222222222 + do_round data 1.2222222222222235 15 use_bankers=True . should_equal 1.222222222222224 + do_round data -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224 group_builder.specify "Can handle NaN/Infinity" <| nan_result = if setup.test_selection.is_nan_and_nothing_distinct then Number.nan else Nothing ops = [.round, .truncate, .ceil, .floor] ops.each op-> - do_op Number.nan op . should_equal nan_result - do_op Number.positive_infinity op . should_equal Number.positive_infinity - do_op Number.negative_infinity op . should_equal Number.negative_infinity + do_op data Number.nan op . should_equal nan_result + do_op data Number.positive_infinity op . should_equal Number.positive_infinity + do_op data Number.negative_infinity op . should_equal Number.negative_infinity group_builder.specify "round returns the correct type" <| - do_round 231.2 1 . should_be_a Float - do_round 231.2 0 . should_be_a Float - do_round 231.2 . should_be_a Float - do_round 231.2 -1 . should_be_a Float + do_round data 231.2 1 . should_be_a Float + do_round data 231.2 0 . should_be_a Float + do_round data 231.2 . should_be_a Float + do_round data 231.2 -1 . should_be_a Float group_builder.specify "round returns the correct type" <| - do_round 231 1 . should_be_a Float - do_round 231 0 . should_be_a Float - do_round 231 . should_be_a Float - do_round 231 -1 . should_be_a Float + do_round data 231 1 . should_be_a Float + do_round data 231 0 . should_be_a Float + do_round data 231 . should_be_a Float + do_round data 231 -1 . should_be_a Float suite_builder.group prefix+"Column.const" group_builder-> + data = Test_Data.setup create_connection_func + + group_builder.teardown <| + data.teardown + group_builder.specify "Does not support making a constant column from a Date" <| - t = table_builder [["x", ["1", "2", "3"]]] + t = table_builder [["x", ["1", "2", "3"]]] connection=data.connection t.at "x" . const (Date.new 12 4 12) . should_fail_with Unsupported_Database_Operation + sqlite_spec suite_builder prefix create_connection_func = name_counter = Ref.new 0 # The default `connection` parameter always create a new connection. From 666b1f7a2118f8924f75627facb2c4c5fe1a676d Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Mon, 22 Jan 2024 16:22:55 +0100 Subject: [PATCH 62/93] Refactor Postgres tests to Test_New --- .../Common/Names_Length_Limits_Spec.enso | 135 ++--- .../src/Database/Postgres_Spec.enso | 513 +++++++++++------- .../Types/Postgres_Type_Mapping_Spec.enso | 91 ++-- 3 files changed, 436 insertions(+), 303 deletions(-) diff --git a/test/Table_Tests/src/Database/Common/Names_Length_Limits_Spec.enso b/test/Table_Tests/src/Database/Common/Names_Length_Limits_Spec.enso index 545764358ab7..e752e0d70b52 100644 --- a/test/Table_Tests/src/Database/Common/Names_Length_Limits_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Names_Length_Limits_Spec.enso @@ -6,16 +6,24 @@ from Standard.Table.Errors import No_Such_Column, Name_Too_Long, Truncated_Colum from Standard.Database import all from Standard.Database.Errors import Table_Not_Found, Unsupported_Database_Operation -import Standard.Test.Extensions -from Standard.Test import Test, Test_Suite, Problems -from Standard.Test.Execution_Context_Helpers import run_with_and_without_output +from Standard.Test_New import all +from Standard.Test_New.Execution_Context_Helpers import run_with_and_without_output import project.Util import project.Database.Helpers.Name_Generator -spec prefix connection = - Test.group prefix+"Support for Long Column/Table names" <| - entity_naming_properties = connection.base_connection.entity_naming_properties +type Data + Value ~connection + + setup create_connection_func = + Data.Value (create_connection_func Nothing) + + +add_specs suite_builder prefix create_connection_func = + data = Data.setup create_connection_func + + suite_builder.group prefix+"Support for Long Column/Table names" group_builder-> + entity_naming_properties = data.connection.base_connection.entity_naming_properties max_table_name_length = entity_naming_properties.for_table_names.size_limit max_column_name_length = entity_naming_properties.for_column_names.size_limit has_maximum_table_name_length = max_table_name_length.is_nothing.not @@ -23,34 +31,34 @@ spec prefix connection = big_name_length = 10000 if has_maximum_table_name_length.not then - Test.specify "should allow to create tables with very long names" <| + group_builder.specify "should allow to create tables with very long names" <| name = "a" * big_name_length src = Table.new [["X", [1, 2, 3]]] - dest = src.select_into_database_table connection name temporary=True + dest = src.select_into_database_table data.connection name temporary=True Problems.assume_no_problems dest - dest_fetched = connection.query name + dest_fetched = data.connection.query name dest_fetched.at "X" . to_vector . should_equal [1, 2, 3] if has_maximum_table_name_length then - Test.specify "should not allow to create a table with a name that is too long" <| + group_builder.specify "should not allow to create a table with a name that is too long" <| name = "a" * (max_table_name_length + 1) src = Table.new [["X", [1, 2, 3]]] run_with_and_without_output <| - r = src.select_into_database_table connection name temporary=True + r = src.select_into_database_table data.connection name temporary=True r.should_fail_with Name_Too_Long r.catch.entity_kind . should_equal "table" r.catch.name . should_equal name r.catch.to_display_text . should_contain "The table name" r.catch.to_display_text . should_contain "is too long" - r2 = connection.create_table name [Column_Description.Value "X" Value_Type.Integer] temporary=True + r2 = data.connection.create_table name [Column_Description.Value "X" Value_Type.Integer] temporary=True r2.should_fail_with Name_Too_Long - connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found + data.connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found - Test.specify "should ensure length is measured in small units, even if grapheme length is lower" <| + group_builder.specify "should ensure length is measured in small units, even if grapheme length is lower" <| big_grapheme = '\u{1F926}\u{1F3FC}\u200D\u2642\uFE0F' big_grapheme.length . should_equal 1 big_grapheme.char_vector . length . should_equal 7 @@ -60,7 +68,7 @@ spec prefix connection = name_ok = "a" * max_table_name_length src = Table.new [["X", [1, 2, 3]]] - t1 = src.select_into_database_table connection name_ok temporary=True + t1 = src.select_into_database_table data.connection name_ok temporary=True Problems.assume_no_problems t1 ## This is a name that has Enso-length equal to maximum. @@ -69,7 +77,7 @@ spec prefix connection = grapheme clusters. name_large = big_grapheme * max_table_name_length - t2 = src.select_into_database_table connection name_large temporary=True + t2 = src.select_into_database_table data.connection name_large temporary=True t2.should_fail_with Name_Too_Long t2.catch.entity_kind . should_equal "table" t2.catch.name . should_equal name_large @@ -86,25 +94,25 @@ spec prefix connection = For example it seems to work fine on MySQL. name_medium = "ą" * max_table_name_length name_medium_encoded = entity_naming_properties.for_table_names.encoded_size name_medium - t3 = src.select_into_database_table connection name_medium temporary=True + t3 = src.select_into_database_table data.connection name_medium temporary=True case name_medium_encoded > max_table_name_length of True -> t3.should_fail_with Name_Too_Long False -> Problems.assume_no_problems t3 - Test.specify "should not mangle a too long name when accessing tables, returning a different table" <| + group_builder.specify "should not mangle a too long name when accessing tables, returning a different table" <| long_name = "z" * max_table_name_length src = Table.new [["X", [1, 2, 3]]] - t1 = src.select_into_database_table connection long_name temporary=True + t1 = src.select_into_database_table data.connection long_name temporary=True Problems.assume_no_problems t1 - connection.query long_name . at "X" . to_vector . should_equal [1, 2, 3] + data.connection.query long_name . at "X" . to_vector . should_equal [1, 2, 3] longer_name_with_same_prefix = long_name + ("z" * 10) - connection.query longer_name_with_same_prefix . should_fail_with Table_Not_Found - connection.query (SQL_Query.Table_Name longer_name_with_same_prefix) . should_fail_with Table_Not_Found + data.connection.query longer_name_with_same_prefix . should_fail_with Table_Not_Found + data.connection.query (SQL_Query.Table_Name longer_name_with_same_prefix) . should_fail_with Table_Not_Found - Test.specify "should be fine joining tables with long names" <| + group_builder.specify "should be fine joining tables with long names" <| ## If we know the maximum length, we choose a length that will be just short enough to fit in the limit, but long enough that after concatenating two of such names, the result will exceed the limit. @@ -116,9 +124,9 @@ spec prefix connection = name_1 = ("x" * (name_length - 1)) + "1" name_2 = ("x" * (name_length - 1)) + "2" name_3 = ("x" * (name_length - 1)) + "3" - t1 = (Table.new [["X", [1, 2]]]).select_into_database_table connection name_1 temporary=True - t2 = (Table.new [["X", [1, 2]]]).select_into_database_table connection name_2 temporary=True - t3 = (Table.new [["X", [1, 2]]]).select_into_database_table connection name_3 temporary=True + t1 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_1 temporary=True + t2 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_2 temporary=True + t3 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_3 temporary=True Test.with_clue "join: " <| t12 = t1.join t2 @@ -160,7 +168,7 @@ spec prefix connection = t11.at "X" . to_vector . should_contain_the_same_elements_as [1, 2] Problems.assume_no_problems t11 - Test.specify "should be fine operating on columns with long names" <| + group_builder.specify "should be fine operating on columns with long names" <| ## If we know the maximum length, we choose a length that will be just short enough to fit in the limit, but long enough that after concatenating two of such names, the result will exceed the limit. @@ -172,7 +180,7 @@ spec prefix connection = name_1 = ("x" * (name_length - 1)) + "1" name_2 = ("x" * (name_length - 1)) + "2" src = Table.new [[name_1, [1, 2, 3]], [name_2, [4, 5, 6]]] - t1 = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + t1 = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True # We create 2 Maximum columns that if wrongly truncated will have the same name, introducing possible ambiguity to further queries. t2 = t1.aggregate [Aggregate_Column.Group_By name_1, Aggregate_Column.Maximum name_2, Aggregate_Column.Maximum name_1] @@ -189,9 +197,9 @@ spec prefix connection = last_column.to_vector . should_contain_the_same_elements_as [4, 10, 18] if has_maximum_column_name_length.not then - Test.specify "should allow to create very long column names" <| + group_builder.specify "should allow to create very long column names" <| src = Table.new [["X", [1, 2, 3]]] - db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True c = db_table.at "X" long_name = "a" * big_name_length c2 = c.rename long_name @@ -214,9 +222,9 @@ spec prefix connection = v3 . should_equal [1, 4, 9] Problems.assume_no_problems v3 - Test.specify "should allow to use long names in complex queries" <| + group_builder.specify "should allow to use long names in complex queries" <| src = Table.new [["X", [1, 2, 3]]] - db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True c = db_table.at "X" long_name = "a" * big_name_length c2 = c.rename long_name @@ -229,23 +237,23 @@ spec prefix connection = t3.at long_name . to_vector . should_equal [1, 4, 9] t3.at ("Right_" + long_name) . to_vector . should_equal [1, 4, 9] - Test.specify "should allow to upload tables with very long column names" <| + group_builder.specify "should allow to upload tables with very long column names" <| name_a = "x" * big_name_length + "a" name_b = "x" * big_name_length + "b" src = Table.new [[name_a, [1, 2, 3]], [name_b, [4, 5, 6]]] - db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True Problems.assume_no_problems db_table db_table.at name_a . to_vector . should_equal [1, 2, 3] db_table.at name_b . to_vector . should_equal [4, 5, 6] - db_table_2 = connection.create_table (Name_Generator.random_name "long-column-names") [Column_Description.Value name_a Value_Type.Integer] temporary=True + db_table_2 = data.connection.create_table (Name_Generator.random_name "long-column-names") [Column_Description.Value name_a Value_Type.Integer] temporary=True Problems.assume_no_problems db_table_2 db_table_2.row_count . should_equal 0 if has_maximum_column_name_length then - Test.specify "should raise an error when renaming a column to a name that is too long" <| + group_builder.specify "should raise an error when renaming a column to a name that is too long" <| src = Table.new [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] - db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True c = db_table.at "X" long_name = "a" * (max_column_name_length + 1) c2 = c.rename long_name @@ -262,7 +270,7 @@ spec prefix connection = Problems.assume_no_problems <| db_table.set "[X] + [Y] * 10" "Z" db_table.set "[X] + [Y] * 10" long_name . should_fail_with Name_Too_Long - Test.specify "should prevent upload if column names are too long" <| + group_builder.specify "should prevent upload if column names are too long" <| name_a = "a" * (max_column_name_length + 1) name_b = "b" * (max_column_name_length + 1) @@ -270,17 +278,17 @@ spec prefix connection = src = Table.new [[name_a, [1, 2, 3]], [name_b, [4, 5, 6]]] run_with_and_without_output <| - r1 = src.select_into_database_table connection table_name temporary=True + r1 = src.select_into_database_table data.connection table_name temporary=True r1.should_fail_with Name_Too_Long r1.catch.entity_kind . should_equal "column" [name_a, name_b].should_contain r1.catch.name - r2 = connection.create_table table_name [Column_Description.Value name_a Value_Type.Integer] temporary=True + r2 = data.connection.create_table table_name [Column_Description.Value name_a Value_Type.Integer] temporary=True r2.should_fail_with Name_Too_Long - connection.query (SQL_Query.Table_Name table_name) . should_fail_with Table_Not_Found + data.connection.query (SQL_Query.Table_Name table_name) . should_fail_with Table_Not_Found - Test.specify "should ensure length is measured in code units, even if grapheme length is lower" <| + group_builder.specify "should ensure length is measured in code units, even if grapheme length is lower" <| big_grapheme = '\u{1F926}\u{1F3FC}\u200D\u2642\uFE0F' big_grapheme.length . should_equal 1 big_grapheme.char_vector . length . should_equal 7 @@ -289,7 +297,7 @@ spec prefix connection = ASCII names at max length should always work. name_ok = "a" * max_column_name_length src1 = Table.new [["X", [1, 2, 3]]] - db_table1 = src1.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + db_table1 = src1.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True Problems.assume_no_problems <| db_table1.at "X" . rename name_ok @@ -306,7 +314,7 @@ spec prefix connection = r1.catch.to_display_text.should_contain "too long" src2 = Table.new [[name_ok, [1, 2, 3]], [name_large, [4, 5, 6]]] - r2 = src2.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + r2 = src2.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True r2.should_fail_with Name_Too_Long r2.catch.name . should_equal name_large @@ -315,7 +323,7 @@ spec prefix connection = name_medium_encoded = entity_naming_properties.for_table_names.encoded_size name_medium r3 = db_table1.at "X" . rename name_medium src3 = Table.new [[name_ok, [1, 2, 3]], [name_medium, [4, 5, 6]]] - r4 = src3.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + r4 = src3.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True case name_medium_encoded > max_column_name_length of True -> r3.should_fail_with Name_Too_Long @@ -324,11 +332,11 @@ spec prefix connection = Problems.assume_no_problems r3 Problems.assume_no_problems r4 - Test.specify "should truncate the column name if the resulting operation-generated name is too long, without warnings" <| + group_builder.specify "should truncate the column name if the resulting operation-generated name is too long, without warnings" <| name_a = "a" * max_column_name_length name_b = "b" * max_column_name_length src = Table.new [[name_a, [1]], [name_b, [2]]] - db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True a = db_table.at name_a b = db_table.at name_b @@ -354,20 +362,20 @@ spec prefix connection = c3.name.should_contain "..., " c3.name.should_contain ")" - Test.specify "raise an error if name provided by the user in aggregate is too long" <| + group_builder.specify "raise an error if name provided by the user in aggregate is too long" <| src = Table.new [["X", [1, 2, 3]]] - db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True long_name = "a" * (max_column_name_length + 1) r = db_table.aggregate [Aggregate_Column.Maximum "X" new_name=long_name] r.should_fail_with Name_Too_Long r.catch.entity_kind . should_equal "column" r.catch.name . should_equal long_name - Test.specify "should truncate and deduplicate autogenerated names in aggregate, and issue a warning" <| + group_builder.specify "should truncate and deduplicate autogenerated names in aggregate, and issue a warning" <| name_a = "x" * (max_column_name_length - 1) + "A" name_b = "x" * (max_column_name_length - 1) + "B" src = Table.new [[name_a, [1, 2, 3]], [name_b, [4, 5, 6]]] - db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True t2 = db_table.aggregate [Aggregate_Column.Maximum name_a, Aggregate_Column.Maximum name_b] w1 = Problems.expect_warning Truncated_Column_Names t2 @@ -381,7 +389,7 @@ spec prefix connection = letter = Text.from_codepoints [65 + i] "x" * (max_column_name_length - 1) + letter src2 = Table.new (names.map_with_index i-> name-> [name, [100 + i, 200 + i]]) - db_table2 = src2.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + db_table2 = src2.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True Problems.assume_no_problems db_table2 t3 = db_table2.aggregate (names.map name-> Aggregate_Column.Maximum name) w2 = Problems.expect_warning Truncated_Column_Names t3 @@ -390,11 +398,11 @@ spec prefix connection = (0.up_to 15).each i-> t3.at (w2.find_truncated_name ("Maximum " + names.at i)) . to_vector . should_equal [200 + i] - Test.specify "should truncate and deduplicate the names in join/cross_join, and issue a warning" <| + group_builder.specify "should truncate and deduplicate the names in join/cross_join, and issue a warning" <| name_a = "x" * (max_column_name_length - 1) + "A" name_b = "x" * (max_column_name_length - 1) + "B" src = Table.new [[name_a, [1, 2, 3]], [name_b, [4, 5, 6]]] - db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True + db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True Test.with_clue "join: " <| t2 = db_table.join db_table join_kind=Join_Kind.Left_Outer on=name_a right_prefix="RIGHT_" @@ -422,13 +430,13 @@ spec prefix connection = m2.at (w.truncated_names.at 0) . to_vector . should_contain_the_same_elements_as [1, 2, 3] m2.at (w.truncated_names.at 1) . to_vector . should_contain_the_same_elements_as [4, 5, 6] - Test.specify "should truncate new column names in other operations" <| + group_builder.specify "should truncate new column names in other operations" <| name_a = "x" * (max_column_name_length - 1) + "A" name_b = "x" * (max_column_name_length - 1) + "B" src_a = Table.new [[name_a, ["1", "2", "3"]]] src_b = Table.new [[name_b, [4, 5, 6]]] - db_a = src_a.select_into_database_table connection (Name_Generator.random_name "long-column-names-a") temporary=True - db_b = src_b.select_into_database_table connection (Name_Generator.random_name "long-column-names-b") temporary=True + db_a = src_a.select_into_database_table data.connection (Name_Generator.random_name "long-column-names-a") temporary=True + db_b = src_b.select_into_database_table data.connection (Name_Generator.random_name "long-column-names-b") temporary=True Test.with_clue "zip test will have to be amended once it is implemented: " <| db_a.zip db_b . should_fail_with Unsupported_Database_Operation @@ -446,14 +454,14 @@ spec prefix connection = name_1 = "x" * (max_column_name_length + 1) + "A" name_2 = "x" * (max_column_name_length + 1) + "D" src = Table.new [["X", [name_1, "B", "C"]], ["Y", [name_2, "E", "F"]]] - db_table = src.select_into_database_table connection (Name_Generator.random_name "first-row-as-names") temporary=True primary_key=[] + db_table = src.select_into_database_table data.connection (Name_Generator.random_name "first-row-as-names") temporary=True primary_key=[] db_table.use_first_row_as_names . should_fail_with Unsupported_Database_Operation - Test.specify "should fail other operations that create new columns if the column names are too long" <| + group_builder.specify "should fail other operations that create new columns if the column names are too long" <| name_a = "x" * (max_column_name_length + 1) + "A" name_b = "x" * (max_column_name_length + 1) + "B" src = Table.new [["X", [name_a, name_b, name_a]], ["Y", [4, 5, 6]]] - db_table = src.select_into_database_table connection (Name_Generator.random_name "cross-tab") temporary=True primary_key=[] + db_table = src.select_into_database_table data.connection (Name_Generator.random_name "cross-tab") temporary=True primary_key=[] Test.with_clue "cross_tab test will have to be amended once it is implemented: " <| db_table.cross_tab group_by=[] name_column="X" . should_fail_with Unsupported_Database_Operation @@ -462,5 +470,8 @@ spec prefix connection = db_table.transpose attribute_column_name=name_a . should_fail_with Unsupported_Database_Operation db_table.transpose value_column_name=name_a . should_fail_with Unsupported_Database_Operation -main = Test_Suite.run_main <| - spec "[SQLite] " (Database.connect (SQLite In_Memory)) +main = + suite = Test.build suite_builder-> + add_specs suite_builder "[SQLite]" (_-> Database.connect (SQLite In_Memory)) + suite.run_with_filter + diff --git a/test/Table_Tests/src/Database/Postgres_Spec.enso b/test/Table_Tests/src/Database/Postgres_Spec.enso index 35f7acae68fd..25a3a33bdf0c 100644 --- a/test/Table_Tests/src/Database/Postgres_Spec.enso +++ b/test/Table_Tests/src/Database/Postgres_Spec.enso @@ -16,9 +16,8 @@ import Standard.Database.Internal.Replace_Params.Replace_Params from Standard.Database import all from Standard.Database.Errors import all -from Standard.Test import Test, Test_Suite, Problems -import Standard.Test.Extensions -import Standard.Test.Test_Environment +from Standard.Test_New import all +import Standard.Test_New.Test_Environment import project.Database.Common.Common_Spec import project.Database.Transaction_Spec @@ -32,34 +31,27 @@ from project.Database.Types.Postgres_Type_Mapping_Spec import default_text import enso_dev.Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup from enso_dev.Tests.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries -postgres_specific_spec connection db_name setup = - Test.group "[PostgreSQL] Schemas and Databases" <| - Test.specify "should be able to get current database and list databases" <| - connection.database . should_equal db_name - connection.databases.length . should_not_equal 0 - connection.databases.contains db_name . should_be_true - Meta.is_same_object connection (connection.set_database db_name) . should_be_true - - Test.specify "should be able to get current schema and list schemas" <| - connection.schema . should_equal "public" - connection.schemas.length . should_not_equal 0 - connection.schemas.contains "public" . should_be_true - connection.schemas.contains "information_schema" . should_be_true - Meta.is_same_object connection (connection.set_schema "public") . should_be_true - - Test.specify "should allow changing schema" <| - new_connection = connection.set_schema "information_schema" - new_schema = new_connection.read (SQL_Query.Raw_SQL "SELECT current_schema()") . at 0 . to_vector . first - new_schema . should_equal "information_schema" - databases = connection.databases.filter d->((d!=db_name) && (d!='rdsadmin')) - pending_database = if databases.length != 0 then Nothing else "Cannot test changing database unless two databases defined." - Test.specify "should allow changing database" pending=pending_database <| - new_connection = connection.set_database databases.first - new_database = new_connection.read (SQL_Query.Raw_SQL "SELECT current_database()") . at 0 . to_vector . first - new_database . should_equal databases.first +type Basic_Test_Data + Value ~connection + + setup create_connection_fn = + Basic_Test_Data.Value (create_connection_fn Nothing) + + teardown self = + self.connection.close + - Test.group "[PostgreSQL] Tables and Table Types" <| +type Postgres_Tables_Data + Value ~data + + connection self = self.data.at 0 + tinfo self = self.data.at 1 + vinfo self = self.data.at 2 + temporary_table self = self.data.at 3 + + setup create_connection_fn = Postgres_Tables_Data.Value <| + connection = create_connection_fn Nothing tinfo = Name_Generator.random_name "TestTable" connection.execute_update 'CREATE TABLE "'+tinfo+'" ("A" VARCHAR)' @@ -68,48 +60,141 @@ postgres_specific_spec connection db_name setup = temporary_table = Name_Generator.random_name "TemporaryTable" (Table.new [["X", [1, 2, 3]]]).select_into_database_table connection temporary_table temporary=True + [connection, tinfo, vinfo, temporary_table] + + teardown self = + self.connection.execute_update 'DROP VIEW "'+self.vinfo+'";' + self.connection.execute_update 'DROP TABLE "'+self.tinfo+'";' + self.connection.close + + +type Postgres_Info_Data + Value ~data + + connection self = self.data.at 0 + tinfo self = self.data.at 1 + t self = self.data.at 2 + + setup create_connection_fn = Postgres_Info_Data.Value <| + connection = create_connection_fn Nothing + tinfo = Name_Generator.random_name "Tinfo" + connection.execute_update 'CREATE TEMPORARY TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" BIGINT, "bools" BOOLEAN, "doubles" DOUBLE PRECISION)' + t = connection.query (SQL_Query.Table_Name tinfo) + row1 = ["a", Nothing, False, 1.2] + row2 = ["abc", Nothing, Nothing, 1.3] + row3 = ["def", 42, True, 1.4] + Panic.rethrow <| + t.update_rows (Table.from_rows ["strs", "ints", "bools", "doubles"] [row1, row2, row3]) update_action=Update_Action.Insert + [connection, tinfo, t] + + teardown self = + self.connection.execute_update 'DROP TABLE "'+self.tinfo+'"' + self.connection.close + + +type Postgres_Aggregate_Data + Value ~data + + connection self = self.data.at 0 + name self = self.data.at 1 + t self = self.data.at 2 + + setup create_connection_fn = Postgres_Aggregate_Data.Value <| + connection = create_connection_fn Nothing + name = Name_Generator.random_name "Ttypes" + connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("txt" VARCHAR, "i1" SMALLINT, "i2" INT, "i3" BIGINT, "i4" NUMERIC, "r1" REAL, "r2" DOUBLE PRECISION, "bools" BOOLEAN)' + t = connection.query (SQL_Query.Table_Name name) + [connection, name, t] + + teardown self = + self.connection.execute_update 'DROP TABLE "'+self.name+'"' + self.connection.close + + +postgres_specific_spec suite_builder create_connection_fn db_name setup = + table_builder = setup.table_builder + materialize = setup.materialize + + suite_builder.group "[PostgreSQL] Schemas and Databases" group_builder-> + data = Basic_Test_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "should be able to get current database and list databases" <| + data.connection.database . should_equal db_name + data.connection.databases.length . should_not_equal 0 + data.connection.databases.contains db_name . should_be_true + Meta.is_same_object data.connection (data.connection.set_database db_name) . should_be_true - Test.specify "should be able to list table types" <| - table_types = connection.table_types + group_builder.specify "should be able to get current schema and list schemas" <| + data.connection.schema . should_equal "public" + data.connection.schemas.length . should_not_equal 0 + data.connection.schemas.contains "public" . should_be_true + data.connection.schemas.contains "information_schema" . should_be_true + Meta.is_same_object data.connection (data.connection.set_schema "public") . should_be_true + + group_builder.specify "should allow changing schema" <| + new_connection = data.connection.set_schema "information_schema" + new_schema = new_connection.read (SQL_Query.Raw_SQL "SELECT current_schema()") . at 0 . to_vector . first + new_schema . should_equal "information_schema" + + group_builder.specify "should allow changing database" <| + databases = data.connection.databases.filter d->((d!=db_name) && (d!='rdsadmin')) + pending_database = if databases.length != 0 then Nothing else "Cannot test changing database unless two databases defined." + case pending_database of + Nothing -> + new_connection = data.connection.set_database databases.first + new_database = new_connection.read (SQL_Query.Raw_SQL "SELECT current_database()") . at 0 . to_vector . first + new_database . should_equal databases.first + # Nop - skip the test + _ -> Nothing + + suite_builder.group "[PostgreSQL] Tables and Table Types" group_builder-> + data = Postgres_Tables_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "should be able to list table types" <| + table_types = data.connection.table_types table_types.length . should_not_equal 0 table_types.contains "TABLE" . should_be_true table_types.contains "VIEW" . should_be_true - Test.specify "should be able to list tables" <| - tables = connection.tables + group_builder.specify "should be able to list tables" <| + tables = data.connection.tables tables.row_count . should_not_equal 0 tables.columns.map .name . should_equal ["Database", "Schema", "Name", "Type", "Description"] table_names = tables.at "Name" . to_vector - table_names.should_contain tinfo - table_names.should_contain vinfo - table_names.should_contain temporary_table + table_names.should_contain data.tinfo + table_names.should_contain data.vinfo + table_names.should_contain data.temporary_table - Test.specify "should be able to filter tables by name" <| - tables = connection.tables tinfo + group_builder.specify "should be able to filter tables by name" <| + tables = data.connection.tables data.tinfo tables.row_count . should_equal 1 ## The database check is disabled as the Postgres JDBC driver does not return the database name. ## tables.at "Database" . to_vector . at 0 . should_equal db_name tables.at "Schema" . to_vector . at 0 . should_equal "public" - tables.at "Name" . to_vector . at 0 . should_equal tinfo + tables.at "Name" . to_vector . at 0 . should_equal data.tinfo tables.at "Type" . to_vector . at 0 . should_equal "TABLE" - connection.tables "TestT_ble%" . row_count . should_equal 1 - connection.tables "Temporary%ble%" . row_count . should_equal 1 - connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["TEMPORARY TABLE"] - connection.tables "N_nexistent%" . row_count . should_equal 0 + data.connection.tables "TestT_ble%" . row_count . should_equal 1 + data.connection.tables "Temporary%ble%" . row_count . should_equal 1 + data.connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["TEMPORARY TABLE"] + data.connection.tables "N_nexistent%" . row_count . should_equal 0 - Test.specify "should be able to filter tables by type" <| - tables = connection.tables types=["VIEW"] + group_builder.specify "should be able to filter tables by type" <| + tables = data.connection.tables types=["VIEW"] tables.row_count . should_not_equal 0 - tables.at "Name" . to_vector . contains tinfo . should_be_false - tables.at "Name" . to_vector . contains vinfo . should_be_true + tables.at "Name" . to_vector . contains data.tinfo . should_be_false + tables.at "Name" . to_vector . contains data.vinfo . should_be_true - connection.execute_update 'DROP VIEW "'+vinfo+'";' - connection.execute_update 'DROP TABLE "'+tinfo+'";' - Test.group "[PostgreSQL] Database Encoding" <| - Test.specify "connector should support all known Postgres encodings" <| + suite_builder.group "[PostgreSQL] Database Encoding" group_builder-> + group_builder.specify "connector should support all known Postgres encodings" <| known_encodings = (enso_project.data / "postgres_known_encodings.txt") . read . lines known_encodings.length . should_equal 41 @@ -133,40 +218,35 @@ postgres_specific_spec connection db_name setup = False -> Problems.assume_no_problems encoding + suite_builder.group "[PostgreSQL] Info" group_builder-> + data = Postgres_Info_Data.setup create_connection_fn - tinfo = Name_Generator.random_name "Tinfo" - connection.execute_update 'CREATE TEMPORARY TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" BIGINT, "bools" BOOLEAN, "doubles" DOUBLE PRECISION)' - Test.group "[PostgreSQL] Info" <| - t = connection.query (SQL_Query.Table_Name tinfo) - row1 = ["a", Nothing, False, 1.2] - row2 = ["abc", Nothing, Nothing, 1.3] - row3 = ["def", 42, True, 1.4] - Panic.rethrow <| - t.update_rows (Table.from_rows ["strs", "ints", "bools", "doubles"] [row1, row2, row3]) update_action=Update_Action.Insert + group_builder.teardown <| + data.teardown - Test.specify "should return Table information" <| - i = t.info + group_builder.specify "should return Table information" <| + i = data.t.info i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "doubles"] i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3] i.at "Value Type" . to_vector . should_equal [default_text, Value_Type.Integer, Value_Type.Boolean, Value_Type.Float] - Test.specify "should return Table information, also for aggregated results" <| - i = t.aggregate [Concatenate "strs", Sum "ints", Count_Distinct "bools"] . info + group_builder.specify "should return Table information, also for aggregated results" <| + i = data.t.aggregate [Concatenate "strs", Sum "ints", Count_Distinct "bools"] . info i.at "Column" . to_vector . should_equal ["Concatenate strs", "Sum ints", "Count Distinct bools"] i.at "Items Count" . to_vector . should_equal [1, 1, 1] i.at "Value Type" . to_vector . should_equal [default_text, Value_Type.Decimal, Value_Type.Integer] - Test.specify "should infer standard types correctly" <| - t.at "strs" . value_type . is_text . should_be_true - t.at "ints" . value_type . is_integer . should_be_true - t.at "bools" . value_type . is_boolean . should_be_true - t.at "doubles" . value_type . is_floating_point . should_be_true + group_builder.specify "should infer standard types correctly" <| + data.t.at "strs" . value_type . is_text . should_be_true + data.t.at "ints" . value_type . is_integer . should_be_true + data.t.at "bools" . value_type . is_boolean . should_be_true + data.t.at "doubles" . value_type . is_floating_point . should_be_true - Test.specify "should preserve Postgres types when table is materialized, where possible" <| + group_builder.specify "should preserve Postgres types when table is materialized, where possible" <| name = Name_Generator.random_name "types-test" Problems.assume_no_problems <| - connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("int4" int4, "int2" int2, "txt-limited" varchar(10), "txt-fixed" char(3))' - t1 = connection.query (SQL_Query.Table_Name name) + data.connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("int4" int4, "int2" int2, "txt-limited" varchar(10), "txt-fixed" char(3))' + t1 = data.connection.query (SQL_Query.Table_Name name) t1.at "int4" . value_type . should_equal (Value_Type.Integer Bits.Bits_32) t1.at "int2" . value_type . should_equal (Value_Type.Integer Bits.Bits_16) t1.at "txt-limited" . value_type . should_equal (Value_Type.Char size=10 variable_length=True) @@ -178,32 +258,38 @@ postgres_specific_spec connection db_name setup = in_memory.at "txt-limited" . value_type . should_equal (Value_Type.Char size=10 variable_length=True) in_memory.at "txt-fixed" . value_type . should_equal (Value_Type.Char size=3 variable_length=False) - Test.group "[PostgreSQL] Dialect-specific codegen" <| - Test.specify "should generate queries for the Distinct operation" <| - t = connection.query (SQL_Query.Table_Name tinfo) + suite_builder.group "[PostgreSQL] Dialect-specific codegen" group_builder-> + data = Postgres_Info_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "should generate queries for the Distinct operation" <| + t = data.connection.query (SQL_Query.Table_Name data.tinfo) code_template = 'SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."doubles" AS "doubles" FROM (SELECT DISTINCT ON ("{Tinfo}_inner"."strs") "{Tinfo}_inner"."strs" AS "strs", "{Tinfo}_inner"."ints" AS "ints", "{Tinfo}_inner"."bools" AS "bools", "{Tinfo}_inner"."doubles" AS "doubles" FROM (SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."doubles" AS "doubles" FROM "{Tinfo}" AS "{Tinfo}") AS "{Tinfo}_inner") AS "{Tinfo}"' - expected_code = code_template.replace "{Tinfo}" tinfo + expected_code = code_template.replace "{Tinfo}" data.tinfo t.distinct ["strs"] . to_sql . prepare . should_equal [expected_code, []] - connection.execute_update 'DROP TABLE "'+tinfo+'"' - Test.group "[PostgreSQL] Table.aggregate should correctly infer result types" <| - name = Name_Generator.random_name "Ttypes" - connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("txt" VARCHAR, "i1" SMALLINT, "i2" INT, "i3" BIGINT, "i4" NUMERIC, "r1" REAL, "r2" DOUBLE PRECISION, "bools" BOOLEAN)' - t = connection.query (SQL_Query.Table_Name name) - Test.specify "Concatenate, Shortest and Longest" <| - r = t.aggregate [Concatenate "txt", Shortest "txt", Longest "txt"] + suite_builder.group "[PostgreSQL] Table.aggregate should correctly infer result types" group_builder-> + data = Postgres_Aggregate_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "Concatenate, Shortest and Longest" <| + r = data.t.aggregate [Concatenate "txt", Shortest "txt", Longest "txt"] r.columns.at 0 . value_type . should_equal default_text r.columns.at 1 . value_type . should_equal default_text r.columns.at 2 . value_type . should_equal default_text - Test.specify "Counts" <| - r = t.aggregate [Count, Count_Empty "txt", Count_Not_Empty "txt", Count_Distinct "i1", Count_Not_Nothing "i2", Count_Nothing "i3"] + group_builder.specify "Counts" <| + r = data.t.aggregate [Count, Count_Empty "txt", Count_Not_Empty "txt", Count_Distinct "i1", Count_Not_Nothing "i2", Count_Nothing "i3"] r.column_count . should_equal 6 r.columns.each column-> column.value_type . should_equal Value_Type.Integer - Test.specify "Sum" <| - r = t.aggregate [Sum "i1", Sum "i2", Sum "i3", Sum "i4", Sum "r1", Sum "r2"] + group_builder.specify "Sum" <| + r = data.t.aggregate [Sum "i1", Sum "i2", Sum "i3", Sum "i4", Sum "r1", Sum "r2"] r.columns.at 0 . value_type . should_equal Value_Type.Integer r.columns.at 1 . value_type . should_equal Value_Type.Integer r.columns.at 2 . value_type . should_equal Value_Type.Decimal @@ -211,8 +297,8 @@ postgres_specific_spec connection db_name setup = r.columns.at 4 . value_type . should_equal (Value_Type.Float Bits.Bits_32) r.columns.at 5 . value_type . should_equal (Value_Type.Float Bits.Bits_64) - Test.specify "Average" <| - r = t.aggregate [Average "i1", Average "i2", Average "i3", Average "i4", Average "r1", Average "r2"] + group_builder.specify "Average" <| + r = data.t.aggregate [Average "i1", Average "i2", Average "i3", Average "i4", Average "r1", Average "r2"] r.columns.at 0 . value_type . should_equal Value_Type.Decimal r.columns.at 1 . value_type . should_equal Value_Type.Decimal r.columns.at 2 . value_type . should_equal Value_Type.Decimal @@ -220,51 +306,58 @@ postgres_specific_spec connection db_name setup = r.columns.at 4 . value_type . should_equal Value_Type.Float r.columns.at 5 . value_type . should_equal Value_Type.Float - connection.execute_update 'DROP TABLE "'+name+'"' - Test.group "[PostgreSQL] Warning/Error handling" <| - Test.specify "query warnings should be propagated" <| + suite_builder.group "[PostgreSQL] Warning/Error handling" group_builder-> + data = Basic_Test_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "query warnings should be propagated" <| long_name = (Name_Generator.random_name "T") + ("a" * 100) - r = connection.execute_update 'CREATE TEMPORARY TABLE "'+long_name+'" ("A" VARCHAR)' + r = data.connection.execute_update 'CREATE TEMPORARY TABLE "'+long_name+'" ("A" VARCHAR)' w1 = Problems.expect_only_warning SQL_Warning r # The display text may itself be truncated, so we just check the first words. w1.to_display_text . should_contain "identifier" # And check the full message for words that could be truncated in short message. w1.message . should_contain "truncated to" - table = connection.query (SQL_Query.Raw_SQL 'SELECT 1 AS "'+long_name+'"') + table = data.connection.query (SQL_Query.Raw_SQL 'SELECT 1 AS "'+long_name+'"') w2 = Problems.expect_only_warning SQL_Warning table w2.message . should_contain "truncated" effective_name = table.column_names . at 0 effective_name . should_not_equal long_name long_name.should_contain effective_name - Test.specify "is capable of handling weird tables" <| - connection.execute_update 'CREATE TEMPORARY TABLE "empty-column-name" ("" VARCHAR)' . should_fail_with SQL_Error + group_builder.specify "is capable of handling weird tables" <| + data.connection.execute_update 'CREATE TEMPORARY TABLE "empty-column-name" ("" VARCHAR)' . should_fail_with SQL_Error Problems.assume_no_problems <| - connection.execute_update 'CREATE TEMPORARY TABLE "clashing-unicode-names" ("ś" VARCHAR, "s\u0301" INTEGER)' + data.connection.execute_update 'CREATE TEMPORARY TABLE "clashing-unicode-names" ("ś" VARCHAR, "s\u0301" INTEGER)' Problems.assume_no_problems <| - connection.execute_update 'INSERT INTO "clashing-unicode-names" VALUES (\'A\', 2)' - t2 = connection.query (SQL_Query.Table_Name "clashing-unicode-names") + data.connection.execute_update 'INSERT INTO "clashing-unicode-names" VALUES (\'A\', 2)' + t2 = data.connection.query (SQL_Query.Table_Name "clashing-unicode-names") Problems.expect_only_warning Duplicate_Output_Column_Names t2 t2.column_names . should_equal ["ś", "ś 1"] m2 = t2.read m2.at "ś" . to_vector . should_equal ["A"] m2.at "ś 1" . to_vector . should_equal [2] - r3 = connection.query 'SELECT 1 AS "A", 2 AS "A"' + r3 = data.connection.query 'SELECT 1 AS "A", 2 AS "A"' r3.should_fail_with Illegal_Argument r3.catch.cause . should_be_a Duplicate_Output_Column_Names - r4 = connection.query 'SELECT 1 AS ""' + r4 = data.connection.query 'SELECT 1 AS ""' r4.should_fail_with SQL_Error - table_builder = setup.table_builder - materialize = setup.materialize - Test.group "[PostgreSQL] Edge Cases" <| - Test.specify "materialize should respect the overridden type" <| - t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]] + suite_builder.group "[PostgreSQL] Edge Cases" group_builder-> + data = Basic_Test_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "materialize should respect the overridden type" <| + t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]] connection=data.connection t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=3 variable_length=False) x = t1.at "x" @@ -283,12 +376,12 @@ postgres_specific_spec connection db_name setup = Test.with_clue "d.value_type="+d.value_type.to_display_text+": " <| d.value_type.variable_length.should_be_true - Test.specify "should be able to round-trip a BigInteger column" <| + group_builder.specify "should be able to round-trip a BigInteger column" <| x = 2^70 m1 = Table.new [["X", [10, x]]] m1.at "X" . value_type . should_be_a (Value_Type.Decimal ...) - t1 = m1.select_into_database_table connection (Name_Generator.random_name "BigInteger") primary_key=[] temporary=True + t1 = m1.select_into_database_table data.connection (Name_Generator.random_name "BigInteger") primary_key=[] temporary=True t1.at "X" . value_type . should_be_a (Value_Type.Decimal ...) t1.at "X" . value_type . scale . should_equal 0 # If we want to enforce the scale, Postgres requires us to enforce a precision too, so we use the biggest one we can: @@ -325,7 +418,7 @@ postgres_specific_spec connection db_name setup = super_large = 11^2000 m3 = Table.new [["X", [super_large]]] m3.at "X" . value_type . should_be_a (Value_Type.Decimal ...) - t3 = m3.select_into_database_table connection (Name_Generator.random_name "BigInteger2") primary_key=[] temporary=True + t3 = m3.select_into_database_table data.connection (Name_Generator.random_name "BigInteger2") primary_key=[] temporary=True t3 . at "X" . value_type . should_be_a (Value_Type.Decimal ...) # If we exceed the 1000 digits precision, we cannot enforce neither scale nor precision anymore. t3 . at "X" . value_type . precision . should_equal Nothing @@ -344,9 +437,9 @@ postgres_specific_spec connection db_name setup = w4.requested_type . should_equal (Value_Type.Decimal precision=Nothing scale=Nothing) w4.actual_type . should_equal Value_Type.Float - Test.specify "should round-trip timestamptz column, preserving instant but converting to UTC" <| + group_builder.specify "should round-trip timestamptz column, preserving instant but converting to UTC" <| table_name = Name_Generator.random_name "TimestampTZ" - table = connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=True)] primary_key=[] + table = data.connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=True)] primary_key=[] dt1 = Date_Time.new 2022 05 04 15 30 dt2 = Date_Time.new 2022 05 04 15 30 zone=(Time_Zone.utc) @@ -377,9 +470,9 @@ postgres_specific_spec connection db_name setup = t2.row_count . should_equal local_equals.length t2.at "A" . to_vector . should_equal_tz_agnostic local_equals - Test.specify "will round-trip timestamp column without timezone by converting it to UTC" <| + group_builder.specify "will round-trip timestamp column without timezone by converting it to UTC" <| table_name = Name_Generator.random_name "Timestamp" - table = connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=False)] primary_key=[] + table = data.connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=False)] primary_key=[] Problems.assume_no_problems table dt1 = Date_Time.new 2022 05 04 15 30 @@ -439,9 +532,14 @@ postgres_specific_spec connection db_name setup = t2.row_count . should_equal 0 t2.at "A" . to_vector . should_equal [] - Test.group "[PostgreSQL] math functions" <| - Test.specify "round, trunc, ceil, floor" <| - col = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] . at "x" + suite_builder.group "[PostgreSQL] math functions" group_builder-> + data = Basic_Test_Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + group_builder.specify "round, trunc, ceil, floor" <| + col = (table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] connection=data.connection) . at "x" col . cast Value_Type.Integer . ceil . value_type . should_equal Value_Type.Float col . cast Value_Type.Float . round . value_type . should_equal Value_Type.Float @@ -468,70 +566,77 @@ postgres_specific_spec connection db_name setup = col . cast Value_Type.Integer . truncate . value_type . should_equal Value_Type.Float col . cast Value_Type.Decimal . truncate . value_type . should_equal Value_Type.Decimal - do_op n op = - table = table_builder [["x", [n]]] + do_op data n op = + table = table_builder [["x", [n]]] connection=data.connection result = table.at "x" |> op result.to_vector.at 0 - do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers) - Test.specify "Can round correctly near the precision limit" <| - do_round 1.2222222222222225 15 . should_equal 1.222222222222223 - do_round -1.2222222222222225 15 . should_equal -1.222222222222223 - do_round 1.2222222222222235 15 . should_equal 1.222222222222224 - do_round -1.2222222222222235 15 . should_equal -1.222222222222224 + do_round data n dp=0 use_bankers=False = do_op data n (_.round dp use_bankers) - Test.specify "Can round correctly near the precision limit, using banker's rounding" <| - do_round 1.2222222222222225 15 use_bankers=True . should_equal 1.222222222222222 - do_round -1.2222222222222225 15 use_bankers=True . should_equal -1.222222222222222 - do_round 1.2222222222222235 15 use_bankers=True . should_equal 1.222222222222224 - do_round -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224 + group_builder.specify "Can round correctly near the precision limit" <| + do_round data 1.2222222222222225 15 . should_equal 1.222222222222223 + do_round data -1.2222222222222225 15 . should_equal -1.222222222222223 + do_round data 1.2222222222222235 15 . should_equal 1.222222222222224 + do_round data -1.2222222222222235 15 . should_equal -1.222222222222224 - Test.specify "Can handle NaN/Infinity" <| + group_builder.specify "Can round correctly near the precision limit, using banker's rounding" <| + do_round data 1.2222222222222225 15 use_bankers=True . should_equal 1.222222222222222 + do_round data -1.2222222222222225 15 use_bankers=True . should_equal -1.222222222222222 + do_round data 1.2222222222222235 15 use_bankers=True . should_equal 1.222222222222224 + do_round data -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224 + + group_builder.specify "Can handle NaN/Infinity" <| nan_result = if setup.test_selection.is_nan_and_nothing_distinct then Number.nan else Nothing ops = [.round, .truncate, .ceil, .floor] ops.each op-> - do_op Number.nan op . should_equal nan_result - do_op Number.positive_infinity op . should_equal Number.positive_infinity - do_op Number.negative_infinity op . should_equal Number.negative_infinity - - Test.specify "round returns the correct type" <| - do_round 231.2 1 . should_be_a Float - do_round 231.2 0 . should_be_a Float - do_round 231.2 . should_be_a Float - do_round 231.2 -1 . should_be_a Float - - Test.specify "round returns the correct type" <| - do_round 231 1 . should_be_a Float - do_round 231 0 . should_be_a Float - do_round 231 . should_be_a Float - do_round 231 -1 . should_be_a Float - -run_tests connection db_name = + do_op data Number.nan op . should_equal nan_result + do_op data Number.positive_infinity op . should_equal Number.positive_infinity + do_op data Number.negative_infinity op . should_equal Number.negative_infinity + + group_builder.specify "round returns the correct type" <| + do_round data 231.2 1 . should_be_a Float + do_round data 231.2 0 . should_be_a Float + do_round data 231.2 . should_be_a Float + do_round data 231.2 -1 . should_be_a Float + + group_builder.specify "round returns the correct type" <| + do_round data 231 1 . should_be_a Float + do_round data 231 0 . should_be_a Float + do_round data 231 . should_be_a Float + do_round data 231 -1 . should_be_a Float + +add_postgres_specs suite_builder create_connection_fn db_name = prefix = "[PostgreSQL] " name_counter = Ref.new 0 - tables = Vector.new_builder - table_builder columns = + + table_builder columns connection=(create_connection_fn Nothing) = ix = name_counter.get name_counter . put ix+1 name = Name_Generator.random_name "table_"+ix.to_text in_mem_table = Table.new columns in_mem_table.select_into_database_table connection name primary_key=Nothing temporary=True + materialize = .read - Common_Spec.spec prefix connection + Common_Spec.add_specs suite_builder prefix create_connection_fn common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True supports_decimal_type=True supported_replace_params=supported_replace_params aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False agg_in_memory_table = (enso_project.data / "data.csv") . read - agg_table = agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True - tables.append agg_table.name - empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True - tables.append empty_agg_table.name - setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection connection=connection - postgres_specific_spec connection db_name setup - Common_Table_Operations.Main.spec setup + agg_table_fn = _-> + connection = create_connection_fn Nothing + agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True + + empty_agg_table_fn = _-> + connection = create_connection_fn Nothing + (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True + + setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn + + postgres_specific_spec suite_builder create_connection_fn db_name setup + Common_Table_Operations.Main.add_specs suite_builder setup ## PRIVATE supported_replace_params : Set Replace_Params @@ -543,7 +648,7 @@ supported_replace_params = e4 = [Replace_Params.Value Column Case_Sensitivity.Default False, Replace_Params.Value Column Case_Sensitivity.Sensitive False] Set.from_vector <| e0 + e1 + e2 + e3 + e4 -table_spec = +add_table_specs suite_builder = db_name = Environment.get "ENSO_DATABASE_TEST_DB_NAME" db_host_port = (Environment.get "ENSO_DATABASE_TEST_HOST").if_nothing "localhost" . split ':' db_host = db_host_port.at 0 @@ -553,14 +658,15 @@ table_spec = ca_cert_file = Environment.get "ENSO_DATABASE_TEST_CA_CERT_FILE" ssl_pending = if ca_cert_file.is_nothing then "PostgreSQL SSL test not configured." else Nothing - Test.group "[PostgreSQL] SSL connectivity tests" pending=ssl_pending <| - Test.specify "should connect without ssl parameter" <| + + suite_builder.group "[PostgreSQL] SSL connectivity tests" pending=ssl_pending group_builder-> + group_builder.specify "should connect without ssl parameter" <| Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password)) . should_succeed - Test.specify "should connect, requiring SSL" <| + group_builder.specify "should connect, requiring SSL" <| Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=SSL_Mode.Require) . should_succeed - Test.specify "should connect be able to verify the certificate" <| + group_builder.specify "should connect be able to verify the certificate" <| Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Verify_CA ca_cert_file)) . should_succeed ## Default certificate should not accept the self signed certificate. @@ -568,13 +674,13 @@ table_spec = ca_fail.is_error . should_equal True ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True - Test.specify "should connect be able to verify the host name against the certificate" <| + group_builder.specify "should connect be able to verify the host name against the certificate" <| Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) . should_succeed alternate_host = Environment.get "ENSO_DATABASE_TEST_ALTERNATE_HOST" . if_nothing <| if db_host == "127.0.0.1" then "localhost" else Nothing pending_alternate = if alternate_host.is_nothing then "Alternative host name not configured." else Nothing - Test.specify "should fail to connect with alternate host name not valid in certificate" pending=pending_alternate <| + group_builder.specify "should fail to connect with alternate host name not valid in certificate" pending=pending_alternate <| ca_fail = Database.connect (Postgres alternate_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) ca_fail.is_error . should_equal True ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True @@ -582,31 +688,31 @@ table_spec = case create_connection_builder of Nothing -> message = "PostgreSQL test database is not configured. See README.md for instructions." - Test.group "[PostgreSQL] Database tests" pending=message Nothing + Test.group "[PostgreSQL] Database tests" pending=message (_-> Nothing) connection_builder -> - connection = connection_builder Nothing - run_tests connection db_name - Postgres_Type_Mapping_Spec.spec connection - - Transaction_Spec.spec connection "[PostgreSQL] " - Upload_Spec.spec connection_builder "[PostgreSQL] " - - cloud_setup = Cloud_Tests_Setup.prepare - cloud_setup.with_prepared_environment <| Test.group "[PostgreSQL] Secrets in connection settings" <| - Test.specify "should allow to set up a connection with the password passed as a secret" <| - with_secret "my_postgres_username" db_user username_secret-> with_secret "my_postgres_password" db_password password_secret-> - my_secret_name = "Enso Test: My Secret App NAME " + (Random.uuid.take 5) - with_secret "my_postgres_app_name" my_secret_name app_name_secret-> with_retries <| - details = Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password username_secret password_secret) - # We set the ApplicationName option, so that we can see that secrets can be used in custom properties. - options = Connection_Options.Value [["ApplicationName", app_name_secret]] - connection = Database.connect details options - connection.should_succeed - Panic.with_finalizer connection.close <| - connection.tables . should_be_a Table - table = connection.read "SELECT application_name FROM pg_stat_activity" - application_names = table.at 0 . to_vector - application_names.should_contain my_secret_name + add_postgres_specs suite_builder connection_builder db_name + Postgres_Type_Mapping_Spec.add_specs suite_builder connection_builder + + Transaction_Spec.add_specs suite_builder connection_builder "[PostgreSQL] " + Upload_Spec.add_specs suite_builder connection_builder "[PostgreSQL] " + + suite_builder.group "[PostgreSQL] Secrets in connection settings" group_builder-> + group_builder.specify "should allow to set up a connection with the password passed as a secret" <| + cloud_setup = Cloud_Tests_Setup.prepare + cloud_setup.with_prepared_environment <| + with_secret "my_postgres_username" db_user username_secret-> with_secret "my_postgres_password" db_password password_secret-> + my_secret_name = "Enso Test: My Secret App NAME " + (Random.uuid.take 5) + with_secret "my_postgres_app_name" my_secret_name app_name_secret-> with_retries <| + details = Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password username_secret password_secret) + # We set the ApplicationName option, so that we can see that secrets can be used in custom properties. + options = Connection_Options.Value [["ApplicationName", app_name_secret]] + connection = Database.connect details options + connection.should_succeed + Panic.with_finalizer connection.close <| + connection.tables . should_be_a Table + table = connection.read "SELECT application_name FROM pg_stat_activity" + application_names = table.at 0 . to_vector + application_names.should_contain my_secret_name with_secret name value callback = secret = Enso_Secret.create name+Random.uuid value @@ -625,14 +731,15 @@ create_connection_builder = True -> Nothing False -> _ -> Panic.rethrow <| + # TODO: Ensure that this returns a function and does not initialize the connection Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password)) pgpass_file = enso_project.data / "pgpass.conf" -pgpass_spec = Test.group "[PostgreSQL] .pgpass" <| +add_pgpass_specs suite_builder = suite_builder.group "[PostgreSQL] .pgpass" group_builder-> make_pair username password = [Pair.new "user" username, Pair.new "password" password] - Test.specify "should correctly parse the file, including escapes, blank lines and comments" <| + group_builder.specify "should correctly parse the file, including escapes, blank lines and comments" <| result = Pgpass.parse_file pgpass_file result.length . should_equal 12 e1 = Pgpass.Pgpass_Entry.Value "localhost" "5432" "postgres" "postgres" "postgres" @@ -651,7 +758,7 @@ pgpass_spec = Test.group "[PostgreSQL] .pgpass" <| result.should_equal entries if Platform.is_unix then - Test.specify "should only accept the .pgpass file if it has correct permissions" <| + group_builder.specify "should only accept the .pgpass file if it has correct permissions" <| Process.run "chmod" ["0777", pgpass_file.absolute.path] . exit_code . should_equal Exit_Code.Success Test_Environment.unsafe_with_environment_override "PGPASSFILE" (pgpass_file.absolute.path) <| Pgpass.verify pgpass_file . should_equal False @@ -662,7 +769,7 @@ pgpass_spec = Test.group "[PostgreSQL] .pgpass" <| Pgpass.verify pgpass_file . should_equal True Pgpass.read "passwords should preserve leading space" "1" "some database name that is really : weird" . should_equal (make_pair "*" " pass") - Test.specify "should correctly match wildcards and use the first matching entry" <| + group_builder.specify "should correctly match wildcards and use the first matching entry" <| Test_Environment.unsafe_with_environment_override "PGPASSFILE" (pgpass_file.absolute.path) <| Pgpass.read "localhost" 5432 "postgres" . should_equal (make_pair "postgres" "postgres") Pgpass.read "192.168.4.0" "1234" "foo" . should_equal (make_pair "bar" "baz") @@ -673,8 +780,8 @@ pgpass_spec = Test.group "[PostgreSQL] .pgpass" <| Pgpass.read "\:" "1234" "blah" . should_equal (make_pair "*" "\:") Pgpass.read ":" ":" ":" . should_equal (make_pair ":" ":") -connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <| - Test.specify "should use environment variables as host, port and database defaults and fall back to hardcoded defaults" <| +add_connection_setup_specs suite_builder = suite_builder.group "[PostgreSQL] Connection setup" group_builder-> + group_builder.specify "should use environment variables as host, port and database defaults and fall back to hardcoded defaults" <| c1 = Postgres "example.com" 12345 "my_db" c2 = Postgres c3 = Test_Environment.unsafe_with_environment_override "PGHOST" "192.168.0.1" <| @@ -707,12 +814,12 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <| c4.jdbc_url . should_equal "jdbc:postgresql://localhost:5432/postgres" add_ssl props = props+[Pair.new 'sslmode' 'prefer'] - Test.specify "should use the given credentials" <| + group_builder.specify "should use the given credentials" <| c = Postgres credentials=(Credentials.Username_And_Password "myuser" "mypass") c.jdbc_url . should_equal "jdbc:postgresql://localhost:5432/postgres" c.jdbc_properties . should_equal <| add_ssl [Pair.new "user" "myuser", Pair.new "password" "mypass"] - Test.specify "should fallback to environment variables and fill-out missing information based on the PGPASS file (if available)" <| + group_builder.specify "should fallback to environment variables and fill-out missing information based on the PGPASS file (if available)" <| c1 = Postgres c1.jdbc_url . should_equal "jdbc:postgresql://localhost:5432/postgres" @@ -751,9 +858,13 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <| [c2, c3, c4].each c-> c.jdbc_properties . should_equal <| add_ssl [Pair.new "user" "other user", Pair.new "password" "other password"] -spec = - table_spec - pgpass_spec - connection_setup_spec +add_specs suite_builder = + add_table_specs suite_builder + add_pgpass_specs suite_builder + add_connection_setup_specs suite_builder + +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter -main = Test_Suite.run_main spec diff --git a/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso b/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso index 6e5b669f5b7d..7697ac24e704 100644 --- a/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso +++ b/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso @@ -7,24 +7,33 @@ from Standard.Table.Errors import Inexact_Type_Coercion from Standard.Database import all -from Standard.Test import Problems, Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all + import project.Database.Helpers.Name_Generator from project.Database.Postgres_Spec import create_connection_builder -spec connection = - make_table prefix columns = +type Data + Value ~connection + + setup create_connection_fn = + Data.Value (create_connection_fn Nothing) + + make_table self prefix columns = name = Name_Generator.random_name prefix column_exprs = columns.map col_def-> col_def.first + " " + col_def.second stmt = 'CREATE TEMPORARY TABLE "'+name+'" ('+(column_exprs.join ', ')+');' - Problems.assume_no_problems <| connection.execute_update stmt - connection.query (SQL_Query.Table_Name name) + Problems.assume_no_problems <| self.connection.execute_update stmt + self.connection.query (SQL_Query.Table_Name name) + + +add_specs suite_builder create_connection_fn = + data = Data.setup create_connection_fn - Test.group "[PostgreSQL] Type Mapping" <| - Test.specify "numeric" <| - t = make_table "ints" [["a", "smallint"], ["b", "int2"], ["c", "int"], ["d", "integer"], ["e", "int4"], ["f", "bigint"], ["g", "int8"]] + suite_builder.group "[PostgreSQL] Type Mapping" group_builder-> + group_builder.specify "numeric" <| + t = data.make_table "ints" [["a", "smallint"], ["b", "int2"], ["c", "int"], ["d", "integer"], ["e", "int4"], ["f", "bigint"], ["g", "int8"]] t.at "a" . value_type . should_equal (Value_Type.Integer Bits.Bits_16) t.at "b" . value_type . should_equal (Value_Type.Integer Bits.Bits_16) t.at "c" . value_type . should_equal (Value_Type.Integer Bits.Bits_32) @@ -33,13 +42,13 @@ spec connection = t.at "f" . value_type . should_equal (Value_Type.Integer Bits.Bits_64) t.at "g" . value_type . should_equal (Value_Type.Integer Bits.Bits_64) - t2 = make_table "floats" [["a", "real"], ["b", "float4"], ["c", "double precision"], ["d", "float8"]] + t2 = data.make_table "floats" [["a", "real"], ["b", "float4"], ["c", "double precision"], ["d", "float8"]] t2.at "a" . value_type . should_equal (Value_Type.Float Bits.Bits_32) t2.at "b" . value_type . should_equal (Value_Type.Float Bits.Bits_32) t2.at "c" . value_type . should_equal (Value_Type.Float Bits.Bits_64) t2.at "d" . value_type . should_equal (Value_Type.Float Bits.Bits_64) - t3 = make_table "decimals" [["a", "decimal"], ["b", "numeric"], ["c", "decimal(10, 2)"], ["d", "numeric(20, 4)"], ["e", "decimal(10)"], ["f", "numeric(20)"]] + t3 = data.make_table "decimals" [["a", "decimal"], ["b", "numeric"], ["c", "decimal(10, 2)"], ["d", "numeric(20, 4)"], ["e", "decimal(10)"], ["f", "numeric(20)"]] t3.at "a" . value_type . should_equal Value_Type.Decimal t3.at "b" . value_type . should_equal Value_Type.Decimal t3.at "c" . value_type . should_equal (Value_Type.Decimal precision=10 scale=2) @@ -47,41 +56,41 @@ spec connection = t3.at "e" . value_type . should_equal (Value_Type.Decimal precision=10 scale=0) t3.at "f" . value_type . should_equal (Value_Type.Decimal precision=20 scale=0) - Test.specify "text" <| - t = make_table "texts" [["a", "char(10)"], ["b", "varchar"], ["c", "varchar(20)"], ["d", "text"]] + group_builder.specify "text" <| + t = data.make_table "texts" [["a", "char(10)"], ["b", "varchar"], ["c", "varchar(20)"], ["d", "text"]] t.at "a" . value_type . should_equal (Value_Type.Char size=10 variable_length=False) t.at "b" . value_type . should_equal default_text t.at "c" . value_type . should_equal (Value_Type.Char size=20 variable_length=True) t.at "d" . value_type . should_equal default_text - Test.specify "binary" <| + group_builder.specify "binary" <| # Postgres does not support a BLOB type, it has `bytea` instead. - t = make_table "binaries" [["a", "bytea"]] + t = data.make_table "binaries" [["a", "bytea"]] t.at "a" . value_type . should_equal (Value_Type.Binary size=max_int4 variable_length=True) - Test.specify "datetime" <| - t = make_table "dates" [["a", "date"]] + group_builder.specify "datetime" <| + t = data.make_table "dates" [["a", "date"]] t.at "a" . value_type . should_equal Value_Type.Date - t2 = make_table "times" [["a", "time"], ["b", "timetz"], ["c", "time without time zone"], ["d", "time with time zone"]] + t2 = data.make_table "times" [["a", "time"], ["b", "timetz"], ["c", "time without time zone"], ["d", "time with time zone"]] t2.at "a" . value_type . should_equal Value_Type.Time t2.at "b" . value_type . should_equal Value_Type.Time t2.at "c" . value_type . should_equal Value_Type.Time t2.at "d" . value_type . should_equal Value_Type.Time - t3 = make_table "timestamps" [["a", "timestamp"], ["b", "timestamptz"], ["c", "timestamp without time zone"], ["d", "timestamp with time zone"]] + t3 = data.make_table "timestamps" [["a", "timestamp"], ["b", "timestamptz"], ["c", "timestamp without time zone"], ["d", "timestamp with time zone"]] t3.at "a" . value_type . should_equal (Value_Type.Date_Time with_timezone=False) t3.at "b" . value_type . should_equal (Value_Type.Date_Time with_timezone=True) t3.at "c" . value_type . should_equal (Value_Type.Date_Time with_timezone=False) t3.at "d" . value_type . should_equal (Value_Type.Date_Time with_timezone=True) - Test.specify "boolean" <| - t = make_table "bools" [["a", "boolean"], ["b", "bool"]] + group_builder.specify "boolean" <| + t = data.make_table "bools" [["a", "boolean"], ["b", "bool"]] t.at "a" . value_type . should_equal Value_Type.Boolean t.at "b" . value_type . should_equal Value_Type.Boolean - Test.specify "should correctly handle types through operations" <| - t = make_table "foo" [["a", "int2"], ["b", "text"], ["c", "boolean"], ["d", "double precision"], ["e", "int4"]] + group_builder.specify "should correctly handle types through operations" <| + t = data.make_table "foo" [["a", "int2"], ["b", "text"], ["c", "boolean"], ["d", "double precision"], ["e", "int4"]] t.evaluate_expression 'starts_with([b], "1")' . value_type . should_equal Value_Type.Boolean t.evaluate_expression '[a] * [d]' . value_type . should_equal (Value_Type.Float Bits.Bits_64) @@ -106,33 +115,33 @@ spec connection = t2.at "First c" . value_type . should_equal Value_Type.Boolean # Postgres does not try to be clever and two fixed-length columns concatenated get promoted to a varying column. - t3 = make_table "foo2" [["a", "char(5)"], ["b", "char(8)"]] + t3 = data.make_table "foo2" [["a", "char(5)"], ["b", "char(8)"]] t3.evaluate_expression '[a] + [b]' . value_type . should_equal default_text - Test.specify "other types" <| - t = make_table "other" [["a", "box"], ["b", "polygon"]] + group_builder.specify "other types" <| + t = data.make_table "other" [["a", "box"], ["b", "polygon"]] t.at "a" . value_type . should_be_a (Value_Type.Unsupported_Data_Type ...) t.at "a" . value_type . type_name . should_equal "box" t.at "b" . value_type . should_be_a (Value_Type.Unsupported_Data_Type ...) t.at "b" . value_type . type_name . should_equal "polygon" # Bit strings are not supported by Enso tables at the moment. - t2 = make_table "bit strings" [["a", "bit(2)"], ["b", "bit varying"], ["c", "bit varying(10)"]] + t2 = data.make_table "bit strings" [["a", "bit(2)"], ["b", "bit varying"], ["c", "bit varying(10)"]] t2.at "a" . value_type . should_be_a (Value_Type.Unsupported_Data_Type ...) t2.at "a" . value_type . type_name . should_equal "bit" t2.at "b" . value_type . should_be_a (Value_Type.Unsupported_Data_Type ...) t2.at "c" . value_type . should_be_a (Value_Type.Unsupported_Data_Type ...) - Test.specify "should approximate types to the closest supported one" <| - t = make_table "T" [["b", "INT"]] + group_builder.specify "should approximate types to the closest supported one" <| + t = data.make_table "T" [["b", "INT"]] t2 = t.cast "b" Value_Type.Byte t2.at "b" . value_type . should_equal (Value_Type.Integer Bits.Bits_16) Problems.expect_warning Inexact_Type_Coercion t2 - Test.group "[PostgreSQL] Type Edge Cases" <| - Test.specify "will fail to read a BigDecimal column and suggest to cast it to Float" <| + suite_builder.group "[PostgreSQL] Type Edge Cases" group_builder-> + group_builder.specify "will fail to read a BigDecimal column and suggest to cast it to Float" <| table_name = Name_Generator.random_name "BigDecimal" - table = connection.create_table table_name [Column_Description.Value "B" (Value_Type.Decimal precision=100 scale=5)] primary_key=[] + table = data.connection.create_table table_name [Column_Description.Value "B" (Value_Type.Decimal precision=100 scale=5)] primary_key=[] Problems.assume_no_problems table Problems.expect_only_warning Inexact_Type_Coercion <| @@ -147,20 +156,20 @@ spec connection = w2.requested_type . should_equal (Value_Type.Decimal precision=100 scale=5) w2.actual_type . should_equal Value_Type.Float - Test.specify "should warn when fetching a Binary column and coercing it to Mixed because in-memory does not support Binary" <| + group_builder.specify "should warn when fetching a Binary column and coercing it to Mixed because in-memory does not support Binary" <| table_name = Name_Generator.random_name "Bin" - table = connection.create_table table_name [Column_Description.Value "B" (Value_Type.Binary size=10)] primary_key=[] + table = data.connection.create_table table_name [Column_Description.Value "B" (Value_Type.Binary size=10)] primary_key=[] w0 = Problems.expect_only_warning Inexact_Type_Coercion table w0.requested_type . should_equal (Value_Type.Binary size=10) w0.actual_type . should_equal (Value_Type.Binary variable_length=True size=2147483647) table_clean = table.remove_warnings Problems.assume_no_problems <| - table_clean.update_rows (connection.query 'SELECT decode(\'ffff\', \'hex\') AS "B"') update_action=Update_Action.Insert + table_clean.update_rows (data.connection.query 'SELECT decode(\'ffff\', \'hex\') AS "B"') update_action=Update_Action.Insert Problems.assume_no_problems <| - table_clean.update_rows (connection.query 'SELECT decode(\'caffee\', \'hex\') AS "B"') update_action=Update_Action.Insert + table_clean.update_rows (data.connection.query 'SELECT decode(\'caffee\', \'hex\') AS "B"') update_action=Update_Action.Insert Problems.assume_no_problems <| - table_clean.update_rows (connection.query 'SELECT decode(\'beef\', \'hex\') AS "B"') update_action=Update_Action.Insert + table_clean.update_rows (data.connection.query 'SELECT decode(\'beef\', \'hex\') AS "B"') update_action=Update_Action.Insert materialized_table = table_clean.read materialized_table.at "B" . value_type . should_equal Value_Type.Mixed @@ -174,9 +183,11 @@ spec connection = beef = [-66, -17] materialized_table.at "B" . to_vector . should_equal [ffff, caffee, beef] -main = Test_Suite.run_main <| - connection = create_connection_builder Nothing - spec connection +main = + create_connection_fn = _-> create_connection_builder Nothing + suite = Test.build suite_builder-> + add_specs suite_builder create_connection_fn + suite.run_with_filter max_int4 = 2147483647 default_text = Value_Type.Char size=Nothing variable_length=True From 9955e637ce63fdda5f199e71ad3eb8aec4670e3f Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Mon, 22 Jan 2024 18:47:31 +0100 Subject: [PATCH 63/93] Add teardowns to Column_Operations_Spec --- .../Column_Operations_Spec.enso | 613 +++++++++++++----- .../src/Common_Table_Operations/Util.enso | 14 +- test/Tests/src/Data/Round_Spec.enso | 556 ++++++++-------- 3 files changed, 744 insertions(+), 439 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso index 5de441273ec5..b11557b90fc6 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso @@ -12,6 +12,7 @@ from Standard.Table.Data.Type.Value_Type import Bits from Standard.Table.Errors import all from Standard.Database.Errors import all +import Standard.Database.Data.Table.Table as Database_Table from Standard.Test_New import all @@ -22,19 +23,164 @@ from project.Common_Table_Operations.Util import run_default_backend main = run_default_backend add_specs +type Data + Value ~connection + + setup create_connection_fn = Data.Value <| + connection = create_connection_fn Nothing + connection + + teardown self = + self.connection.close + +type Arithmetic_Data + Value ~data + + connection self = self.data.at 0 + x self = self.data.at 1 + y self = self.data.at 2 + + setup create_connection_fn table_builder = Arithmetic_Data.Value <| + connection = create_connection_fn Nothing + t2 = table_builder [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]]] connection=connection + x = t2.at "x" + y = t2.at "y" + [connection, x, y] + + teardown self = + self.connection.close + +type Min_Max_Data + Value ~data + + connection self = self.data.at 0 + a self = self.data.at 1 + b self = self.data.at 2 + c self = self.data.at 3 + t self = self.data.at 4 + + setup create_connection_fn table_builder = Min_Max_Data.Value <| + connection = create_connection_fn Nothing + t = table_builder [["a", [1, 2, 3]], ["b", [4.5, 5.5, 6.5]], ["c", ['a', 'b', 'c']], ["d", [True, False, True]]] connection=connection + a = t.at "a" + b = t.at "b" + c = t.at "c" + [connection, a, b, c, t] + + teardown self = + drop_table self.connection self.t + self.connection.close + +type Literal_Data + Value ~data + + connection self = self.data.at 0 + col0 self = self.data.at 1 + col1 self = self.data.at 2 + + setup create_connection_fn table_builder = Literal_Data.Value <| + connection = create_connection_fn Nothing + col0 = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO']]] . at "x" + col1 = table_builder [["x", ['a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']]] . at "x" + [connection, col0, col1] + + teardown self = + self.connection.close + +type Replace_Data + Value ~data + + connection self = self.data.at 0 + col self = self.data.at 1 + patterns self = self.data.at 2 + replacements self = self.data.at 3 + + setup create_connection_fn table_builder = Replace_Data.Value <| + connection = create_connection_fn Nothing + table = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO', 'a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']], ["patterns", ['hello', 'hello', 'hello', 'a[bcd]', 'a[bcd]']], ["replacements", ['bye', 'bye', 'bye', 'hey', 'hey']]] + col = table.at "x" + patterns = table.at "patterns" + replacements = table.at "replacements" + [connection, col, patterns, replacements] + + teardown self = + self.connection.close + + +type Text_Replace_Data + Value ~data + + connection self = self.data.at 0 + a self = self.data.at 1 + b self = self.data.at 2 + c self = self.data.at 3 + d self = self.data.at 4 + + setup create_connection_fn table_builder = Text_Replace_Data.Value <| + connection = create_connection_fn Nothing + t4 = table_builder [["A", ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]], ["B", ["A","O","a","E","o","O"]], ["C", [1,2,3,4,5,6]], ["D", ['',Nothing,'',Nothing,'','']]] connection=connection + a = t4.at "A" + b = t4.at "B" + c = t4.at "C" + d = t4.at "D" + [connection, a, b, c, d] + + teardown self = + self.connection.close + + +type Trim_Data + Value ~data + + connection self = self.data.at 0 + a self = self.data.at 1 + b self = self.data.at 2 + c self = self.data.at 3 + + setup create_connection_fn table_builder = Trim_Data.Value <| + connection = create_connection_fn Nothing + table = table_builder [["A", [" A ", ' \t\n\rA\r\n\t ', "xxxAxx"]], ["B", [" ",' \t',"x"]], ["C", [1,2,3]]] + a = table.at "A" + b = table.at "B" + c = table.at "C" + + [connection, a, b, c] + + teardown self = + self.connection.close + + +type Names_Data + Value ~data + + connection self = self.data.at 0 + t self = self.data.at 1 + + setup create_connection_fn table_builder = Names_Data.Value <| + connection = create_connection_fn Nothing + t = table_builder [["a", [1, 2, 3]], ["b", ['x', 'y', 'z']], ["c", [1.0, 2.0, 3.0]], ["d", [True, False, True]]] + [connection, t] + + teardown self = + self.connection.close + + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend." - do_op n op = - table = table_builder [["x", [n]]] - result = table.at "x" |> op - result.to_vector.at 0 + suite_builder.group prefix+"Rounding numeric tests" group_builder-> + data = Data.setup create_connection_fn - do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers) + do_op n op = + table = setup.table_builder [["x", [n]]] connection=data.connection + result = table.at "x" |> op + result.to_vector.at 0 - Round_Spec.add_specs suite_builder prefix do_round + do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers) + + Round_Spec.add_specs group_builder do_round ## Runs the provided callback with a few combinations of columns, where some of them are made Mixed (but still contain only the original values). @@ -52,18 +198,26 @@ add_specs suite_builder setup = Test.with_clue clue <| callback table case setup.test_selection.supports_mixed_columns of - False -> callback_with_clue (table_builder table_structure) + False -> callback_with_clue (setup.table_builder table_structure) True -> all_combinations (Vector.fill table_structure.length [Nothing, Mixed_Type_Object.Value]) . each combination-> amended_table_structure = table_structure.zip combination column_definition-> prefix-> name = column_definition.first values = column_definition.second [name, [prefix]+values] - mixed_table = table_builder amended_table_structure + mixed_table = setup.table_builder amended_table_structure aligned_table = mixed_table.drop 1 callback_with_clue aligned_table suite_builder.group prefix+"Boolean Column Operations" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "iif" <| t = table_builder [["X", [True, False, Nothing, True]]] c1 = t.at "X" . iif 22 33 @@ -216,6 +370,14 @@ add_specs suite_builder setup = ((t.at "Y") . iif 10 20) . should_fail_with Invalid_Value_Type suite_builder.group prefix+"Column Operations - Equality & Null Handling" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should provide basic == and != comparisons" pending="TODO figure out proper null handling" <| with_mixed_columns_if_supported [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]]] t2-> x = t2.at "x" @@ -294,6 +456,14 @@ add_specs suite_builder setup = Problems.expect_warning Floating_Point_Equality r2 suite_builder.group prefix+"Column.fill_nothing/empty" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow to fill_nothing from a value" <| col = table_builder [["col", [0, Nothing, 4, 5, Nothing, Nothing]]] . at "col" default = 300 @@ -321,6 +491,14 @@ add_specs suite_builder setup = r.name . should_equal "col" suite_builder.group prefix+"Table.fill_nothing/empty" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow to fill_nothing from a value" <| t = table_builder [["col0", [0, Nothing, 4, 5, Nothing, Nothing]], ["col1", [Nothing, 200, Nothing, 400, 500, Nothing]]] default = 1000 @@ -419,6 +597,14 @@ add_specs suite_builder setup = actual.column_names . should_equal ["col0", "col_between", "col1", "def"] suite_builder.group prefix+"Table.text_replace" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow to replace values in a table" <| with_mixed_columns_if_supported [["col0", ["abc", "def", "ghi"]], ["col1", ["nabc", "ndef", "asdf"]]] t-> actual = t.text_replace ["col0", "col1"] "ab" "xy" @@ -456,6 +642,14 @@ add_specs suite_builder setup = t1.at "txt" . to_vector . should_equal ["aXc", "Yef", "ghZ"] suite_builder.group prefix+"Column Comparisons" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow to compare numbers" <| with_mixed_columns_if_supported [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]]] t2-> x = t2.at "x" @@ -564,18 +758,23 @@ add_specs suite_builder setup = ((t.at "A").between (t.at "B") (t.at "C")) . to_vector . should_equal r suite_builder.group prefix+"Arithmetic Column Operations" group_builder-> - t2 = table_builder [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]]] - x = t2.at "x" - y = t2.at "y" + data = Arithmetic_Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow basic operations" <| - (x + y).to_vector . should_equal [3, 7.25, 10, Nothing] - (x - y).to_vector . should_equal [-1.0, 0.75, 0.0, Nothing] - (x * y).to_vector . should_equal [2.0, 13.0, 25.0, Nothing] + (data.x + data.y).to_vector . should_equal [3, 7.25, 10, Nothing] + (data.x - data.y).to_vector . should_equal [-1.0, 0.75, 0.0, Nothing] + (data.x * data.y).to_vector . should_equal [2.0, 13.0, 25.0, Nothing] group_builder.specify "should allow combining a column with a scalar" <| - (x + 100).to_vector . should_equal [101, 104, 105, Nothing] - (x * 10).to_vector . should_equal [10, 40, 50, Nothing] - (x - 10).to_vector . should_equal [-9, -6, -5, Nothing] + (data.x + 100).to_vector . should_equal [101, 104, 105, Nothing] + (data.x * 10).to_vector . should_equal [10, 40, 50, Nothing] + (data.x - 10).to_vector . should_equal [-9, -6, -5, Nothing] group_builder.specify "should work with mixed types" <| with_mixed_columns_if_supported [["X", [100, 25]], ["Y", [2, 5]]] t-> @@ -589,22 +788,22 @@ add_specs suite_builder setup = (x ^ y).to_vector . should_equal [10000, 9765625] group_builder.specify "should correctly infer the types" <| - (x + x).value_type . is_integer . should_be_true - (x + y).value_type . is_floating_point . should_be_true - (x + 2).value_type . is_integer . should_be_true - (x + 1.5).value_type . is_floating_point . should_be_true + (data.x + data.x).value_type . is_integer . should_be_true + (data.x + data.y).value_type . is_floating_point . should_be_true + (data.x + 2).value_type . is_integer . should_be_true + (data.x + 1.5).value_type . is_floating_point . should_be_true - (x - x).value_type . is_integer . should_be_true - (x - y).value_type . is_floating_point . should_be_true - (x - 2).value_type . is_integer . should_be_true - (x - 1.5).value_type . is_floating_point . should_be_true + (data.x - data.x).value_type . is_integer . should_be_true + (data.x - data.y).value_type . is_floating_point . should_be_true + (data.x - 2).value_type . is_integer . should_be_true + (data.x - 1.5).value_type . is_floating_point . should_be_true - (x * x).value_type . is_integer . should_be_true - (x * y).value_type . is_floating_point . should_be_true - (x * 2).value_type . is_integer . should_be_true - (x * 1.5).value_type . is_floating_point . should_be_true + (data.x * data.x).value_type . is_integer . should_be_true + (data.x * data.y).value_type . is_floating_point . should_be_true + (data.x * 2).value_type . is_integer . should_be_true + (data.x * 1.5).value_type . is_floating_point . should_be_true - (x ^ x).value_type . is_numeric . should_be_true + (data.x ^ data.x).value_type . is_numeric . should_be_true group_builder.specify "should check types" <| t = table_builder [["X", [1, 2]], ["Y", ["a", "b"]], ["Z", [True, False]]] @@ -761,12 +960,20 @@ add_specs suite_builder setup = group_builder.specify "should return null if one of arguments is missing" <| nulls = [Nothing, Nothing, Nothing, Nothing] - (x + Nothing).to_vector . should_equal nulls - (x - Nothing).to_vector . should_equal nulls - (x * Nothing).to_vector . should_equal nulls - (x / Nothing).to_vector . should_equal nulls + (data.x + Nothing).to_vector . should_equal nulls + (data.x - Nothing).to_vector . should_equal nulls + (data.x * Nothing).to_vector . should_equal nulls + (data.x / Nothing).to_vector . should_equal nulls suite_builder.group prefix+"Rounding-like operations" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should name a rounding column correctly" <| table = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] table.at "x" . round . name . should_equal "round([x])" @@ -876,6 +1083,14 @@ add_specs suite_builder setup = x.truncate . to_vector . should_equal [1, -2, 3] suite_builder.group prefix+"Date truncation" pending=pending_datetime group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should be able to truncate a column of Date_Times" <| dates = [Date_Time.new 2020 10 24 1 2 3, Date_Time.new 2020 10 24 1 2 3] table = table_builder [["foo", dates]] @@ -886,6 +1101,14 @@ add_specs suite_builder setup = truncated.name . should_equal "truncate([foo])" suite_builder.group prefix+"Text Column Operations" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should handle operations like starts_with, ends_with, contains" <| with_mixed_columns_if_supported [["s1", ["foobar", "bar", "baz", "BAB", Nothing]], ["s2", ["foo", "ar", "a", "b", Nothing]]] t3-> s1 = t3.at "s1" @@ -1064,60 +1287,61 @@ add_specs suite_builder setup = c3.value_type.is_text . should_be_true suite_builder.group prefix+"Min/Max Operations" group_builder-> - t = table_builder [["a", [1, 2, 3]], ["b", [4.5, 5.5, 6.5]], ["c", ['a', 'b', 'c']], ["d", [True, False, True]]] - a = t.at "a" - b = t.at "b" - c = t.at "c" + data = Min_Max_Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + group_builder.specify "should allow one or more args and return the correct type" <| - c1 = a.min 2 + c1 = data.a.min 2 c1.to_vector . should_equal [1, 2, 2] c1.value_type.is_integer . should_be_true - c2 = a.max 2 + c2 = data.a.max 2 c2.to_vector . should_equal [2, 2, 3] c2.value_type.is_integer . should_be_true - c3 = a.min [2.5, 2] + c3 = data.a.min [2.5, 2] c3.to_vector . should_equal [1, 2, 2] Test.with_clue "c3.value_type="+c3.value_type.to_display_text <| c3.value_type.is_floating_point . should_be_true - c4 = a.max [2.5, 2] + c4 = data.a.max [2.5, 2] c4.to_vector . should_equal [2.5, 2.5, 3] c4.value_type.is_floating_point . should_be_true - c5 = a.min b + c5 = data.a.min data.b c5.to_vector . should_equal [1, 2, 3] Test.with_clue "c5.value_type="+c5.value_type.to_display_text+": " <| c5.value_type.is_floating_point . should_be_true - c6 = a.max b + c6 = data.a.max data.b c6.to_vector . should_equal [4.5, 5.5, 6.5] c6.value_type.is_floating_point . should_be_true - c7 = a.min [a, b, 1] + c7 = data.a.min [data.a, data.b, 1] c7.to_vector . should_equal [1, 1, 1] c7.value_type.is_floating_point . should_be_true - c8 = a.max [a, b, 1] + c8 = data.a.max [data.a, data.b, 1] c8.to_vector . should_equal [4.5, 5.5, 6.5] c8.value_type.is_floating_point . should_be_true - c9 = (t.at "d").min False + c9 = (data.t.at "d").min False c9.to_vector . should_equal [False, False, False] c9.value_type.is_boolean . should_be_true - c10 = (t.at "d").max False + c10 = (data.t.at "d").max False c10.to_vector . should_equal [True, False, True] c10.value_type.is_boolean . should_be_true group_builder.specify "should check types" <| [(.min), (.max)].each op-> - op a c . should_fail_with Invalid_Value_Type - op a [1, 2, c] . should_fail_with Invalid_Value_Type - op a [1, Nothing, c, Nothing] . should_fail_with Invalid_Value_Type - op c 1 . should_fail_with Invalid_Value_Type - op a True . should_fail_with Invalid_Value_Type + op data.a data.c . should_fail_with Invalid_Value_Type + op data.a [1, 2, data.c] . should_fail_with Invalid_Value_Type + op data.a [1, Nothing, data.c, Nothing] . should_fail_with Invalid_Value_Type + op data.c 1 . should_fail_with Invalid_Value_Type + op data.a True . should_fail_with Invalid_Value_Type do_replace column term new_text case_sensitivity=Case_Sensitivity.Default only_first=False expected = case setup.is_database of @@ -1136,41 +1360,49 @@ add_specs suite_builder setup = result . to_vector . should_equal expected suite_builder.group prefix+"replace: literal text pattern and replacement" group_builder-> - col0 = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO']]] . at "x" - col1 = table_builder [["x", ['a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']]] . at "x" + data = Literal_Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=false" - do_replace col0 'hello' 'bye' expected=['bye Hello', 'bye bye', 'HELLO HELLO'] - do_replace col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Default expected=['bye Hello', 'bye bye', 'HELLO HELLO'] - do_replace col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Sensitive expected=['bye Hello', 'bye bye', 'HELLO HELLO'] - do_replace col1 'a[bcd]' 'hey' expected=['hey A[bCd] hey', 'abac ad Ab aCAd'] + do_replace data.col0 'hello' 'bye' expected=['bye Hello', 'bye bye', 'HELLO HELLO'] + do_replace data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Default expected=['bye Hello', 'bye bye', 'HELLO HELLO'] + do_replace data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Sensitive expected=['bye Hello', 'bye bye', 'HELLO HELLO'] + do_replace data.col1 'a[bcd]' 'hey' expected=['hey A[bCd] hey', 'abac ad Ab aCAd'] group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=true" - do_replace col0 'hello' 'bye' only_first=True expected=['bye Hello', 'bye hello', 'HELLO HELLO'] - do_replace col1 'a[bcd]' 'hey' only_first=True expected=['hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] + do_replace data.col0 'hello' 'bye' only_first=True expected=['bye Hello', 'bye hello', 'HELLO HELLO'] + do_replace data.col1 'a[bcd]' 'hey' only_first=True expected=['hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] group_builder.specify "case_sensitivity=insensitive use_regex=false only_first=false" - do_replace col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Insensitive expected=['bye bye', 'bye bye', 'bye bye'] - do_replace col1 'a[bcd]' 'hey' case_sensitivity=Case_Sensitivity.Insensitive expected=['hey hey hey', 'abac ad Ab aCAd'] + do_replace data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Insensitive expected=['bye bye', 'bye bye', 'bye bye'] + do_replace data.col1 'a[bcd]' 'hey' case_sensitivity=Case_Sensitivity.Insensitive expected=['hey hey hey', 'abac ad Ab aCAd'] group_builder.specify "case_sensitivity=insensitive use_regex=false only_first=true" - do_replace col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['bye Hello', 'bye hello', 'bye HELLO'] - do_replace col1 'a[bcd]' 'hey' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] + do_replace data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['bye Hello', 'bye hello', 'bye HELLO'] + do_replace data.col1 'a[bcd]' 'hey' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] suite_builder.group prefix+"replace: literal regex pattern and replacement" group_builder-> - col1 = table_builder [["x", ['a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']]] . at "x" + data = Literal_Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection group_builder.specify "case_sensitivity=sensitive/default use_regex=True only_first=false" - do_replace col1 'a[bcd]'.to_regex 'hey' expected=['a[bcd] A[bCd] a[bcd]', 'heyhey hey Ab aCAd'] + do_replace data.col1 'a[bcd]'.to_regex 'hey' expected=['a[bcd] A[bCd] a[bcd]', 'heyhey hey Ab aCAd'] group_builder.specify "case_sensitivity=sensitive/default use_regex=True only_first=true" - do_replace col1 'a[bcd]'.to_regex 'hey' only_first=True expected=['a[bcd] A[bCd] a[bcd]', 'heyac ad Ab aCAd'] + do_replace data.col1 'a[bcd]'.to_regex 'hey' only_first=True expected=['a[bcd] A[bCd] a[bcd]', 'heyac ad Ab aCAd'] group_builder.specify "case_sensitivity=insensitive use_regex=True only_first=false" - do_replace col1 'a[bcd]'.to_regex 'hey' case_sensitivity=Case_Sensitivity.Insensitive expected=['a[bcd] A[bCd] a[bcd]', 'heyhey hey hey heyhey'] + do_replace data.col1 'a[bcd]'.to_regex 'hey' case_sensitivity=Case_Sensitivity.Insensitive expected=['a[bcd] A[bCd] a[bcd]', 'heyhey hey hey heyhey'] group_builder.specify "case_sensitivity=insensitive use_regex=True only_first=true" - do_replace col1 'a[bcd]'.to_regex 'hey' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['a[bcd] A[bCd] a[bcd]', 'heyac ad Ab aCAd'] + do_replace data.col1 'a[bcd]'.to_regex 'hey' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['a[bcd] A[bCd] a[bcd]', 'heyac ad Ab aCAd'] group_builder.specify "can properly escape complex regexes" <| regex = "^([^\(]+)|(?\w\d[a-z])+$" @@ -1178,26 +1410,34 @@ add_specs suite_builder setup = do_replace col regex "asdf" ["asdf"] suite_builder.group prefix+"replace: pattern and replacement columns" group_builder-> - table = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO', 'a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']], ["patterns", ['hello', 'hello', 'hello', 'a[bcd]', 'a[bcd]']], ["replacements", ['bye', 'bye', 'bye', 'hey', 'hey']]] - col = table.at "x" - patterns = table.at "patterns" - replacements = table.at "replacements" + data = Replace_Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=false" - do_replace col patterns replacements expected=['bye Hello', 'bye bye', 'HELLO HELLO', 'hey A[bCd] hey', 'abac ad Ab aCAd'] - do_replace col patterns replacements case_sensitivity=Case_Sensitivity.Default expected=['bye Hello', 'bye bye', 'HELLO HELLO', 'hey A[bCd] hey', 'abac ad Ab aCAd'] - do_replace col patterns replacements case_sensitivity=Case_Sensitivity.Sensitive expected=['bye Hello', 'bye bye', 'HELLO HELLO', 'hey A[bCd] hey', 'abac ad Ab aCAd'] + do_replace data.col data.patterns data.replacements expected=['bye Hello', 'bye bye', 'HELLO HELLO', 'hey A[bCd] hey', 'abac ad Ab aCAd'] + do_replace data.col data.patterns data.replacements case_sensitivity=Case_Sensitivity.Default expected=['bye Hello', 'bye bye', 'HELLO HELLO', 'hey A[bCd] hey', 'abac ad Ab aCAd'] + do_replace data.col data.patterns data.replacements case_sensitivity=Case_Sensitivity.Sensitive expected=['bye Hello', 'bye bye', 'HELLO HELLO', 'hey A[bCd] hey', 'abac ad Ab aCAd'] group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=true" - do_replace col patterns replacements only_first=True expected=['bye Hello', 'bye hello', 'HELLO HELLO', 'hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] + do_replace data.col data.patterns data.replacements only_first=True expected=['bye Hello', 'bye hello', 'HELLO HELLO', 'hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] group_builder.specify "case_sensitivity=insensitive use_regex=false only_first=false" - do_replace col patterns replacements case_sensitivity=Case_Sensitivity.Insensitive expected=['bye bye', 'bye bye', 'bye bye', 'hey hey hey', 'abac ad Ab aCAd'] + do_replace data.col data.patterns data.replacements case_sensitivity=Case_Sensitivity.Insensitive expected=['bye bye', 'bye bye', 'bye bye', 'hey hey hey', 'abac ad Ab aCAd'] group_builder.specify "case_sensitivity=insensitive use_regex=false only_first=true" - do_replace col patterns replacements case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['bye Hello', 'bye hello', 'bye HELLO', 'hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] + do_replace data.col data.patterns data.replacements case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['bye Hello', 'bye hello', 'bye HELLO', 'hey A[bCd] a[bcd]', 'abac ad Ab aCAd'] suite_builder.group prefix+"replace: empty table and nothings" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should work on empty tables" <| col = table_builder [["x", ['hello Hello']]] . filter "x" (Filter_Condition.Is_Nothing) . at "x" do_replace col 'hello' 'bye' expected=[] @@ -1207,8 +1447,16 @@ add_specs suite_builder setup = do_replace col 'hello' 'bye' expected=[Nothing] if setup.is_database then suite_builder.group prefix+"replace: DB specific edge-cases" group_builder-> - col = table_builder [["A", ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]]] . at 'A' + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should not allow Case_Sensitivity.Insensitive with a non-default locale" <| + col = table_builder [["A", ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]]] . at 'A' locale = Locale.new "en" "GB" "UTF-8" col.replace 'asdf' 'zxcv' case_sensitivity=(Case_Sensitivity.Insensitive locale) . should_fail_with Illegal_Argument @@ -1227,26 +1475,29 @@ add_specs suite_builder setup = col.replace patterns replacements . name . should_equal 'replace([x], [patterns], [replacements])' suite_builder.group prefix+"Column Operations - Text Replace (in-memory only)" group_builder-> + if setup.is_database.not then - t4 = table_builder [["A", ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]], ["B", ["A","O","a","E","o","O"]], ["C", [1,2,3,4,5,6]], ["D", ['',Nothing,'',Nothing,'','']]] - a = t4.at "A" - b = t4.at "B" - c = t4.at "C" - d = t4.at "D" + data = Text_Replace_Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection group_builder.specify "should allow simple replacing" <| - a.replace "a" "#" . to_vector . should_equal ["Alph#", "Br#vo", "Ch#rlie", "Delt#", "Echo", "Foxtrot"] - a.replace "o" "#" . to_vector . should_equal ["Alpha", "Brav#", "Charlie", "Delta", "Ech#", "F#xtr#t"] - a.replace b "#" . to_vector . should_equal ["#lpha", "Bravo", "Ch#rlie", "Delta", "Ech#", "Foxtrot"] - a.replace "o" "#" only_first=True . to_vector . should_equal ["Alpha", "Brav#", "Charlie", "Delta", "Ech#", "F#xtrot"] - a.replace "a" "#" Case_Sensitivity.Insensitive . to_vector . should_equal ["#lph#", "Br#vo", "Ch#rlie", "Delt#", "Echo", "Foxtrot"] - a.replace b "#" Case_Sensitivity.Insensitive . to_vector . should_equal ["#lph#", "Brav#", "Ch#rlie", "D#lta", "Ech#", "F#xtr#t"] - a.replace b "#" Case_Sensitivity.Insensitive only_first=True . to_vector . should_equal ["#lpha", "Brav#", "Ch#rlie", "D#lta", "Ech#", "F#xtrot"] + data.a.replace "a" "#" . to_vector . should_equal ["Alph#", "Br#vo", "Ch#rlie", "Delt#", "Echo", "Foxtrot"] + data.a.replace "o" "#" . to_vector . should_equal ["Alpha", "Brav#", "Charlie", "Delta", "Ech#", "F#xtr#t"] + data.a.replace data.b "#" . to_vector . should_equal ["#lpha", "Bravo", "Ch#rlie", "Delta", "Ech#", "Foxtrot"] + data.a.replace "o" "#" only_first=True . to_vector . should_equal ["Alpha", "Brav#", "Charlie", "Delta", "Ech#", "F#xtrot"] + data.a.replace "a" "#" Case_Sensitivity.Insensitive . to_vector . should_equal ["#lph#", "Br#vo", "Ch#rlie", "Delt#", "Echo", "Foxtrot"] + data.a.replace data.b "#" Case_Sensitivity.Insensitive . to_vector . should_equal ["#lph#", "Brav#", "Ch#rlie", "D#lta", "Ech#", "F#xtr#t"] + data.a.replace data.b "#" Case_Sensitivity.Insensitive only_first=True . to_vector . should_equal ["#lpha", "Brav#", "Ch#rlie", "D#lta", "Ech#", "F#xtrot"] group_builder.specify "should allow regex based replacing" <| - a.replace "[aeiou]".to_regex "#" . to_vector . should_equal ["Alph#", "Br#v#", "Ch#rl##", "D#lt#", "Ech#", "F#xtr#t"] - a.replace "[aeiou]".to_regex "#" . to_vector . should_equal ["Alph#", "Br#v#", "Ch#rl##", "D#lt#", "Ech#", "F#xtr#t"] - a.replace "([aeiou])(.*?)[aeiou]".to_regex "$1$2$1" . to_vector . should_equal ["Alpha", "Brava", "Charlae", "Delte", "Echo", "Foxtrot"] + data.a.replace "[aeiou]".to_regex "#" . to_vector . should_equal ["Alph#", "Br#v#", "Ch#rl##", "D#lt#", "Ech#", "F#xtr#t"] + data.a.replace "[aeiou]".to_regex "#" . to_vector . should_equal ["Alph#", "Br#v#", "Ch#rl##", "D#lt#", "Ech#", "F#xtr#t"] + data.a.replace "([aeiou])(.*?)[aeiou]".to_regex "$1$2$1" . to_vector . should_equal ["Alpha", "Brava", "Charlae", "Delte", "Echo", "Foxtrot"] group_builder.specify "should handle unicode" <| table = table_builder [["x", ["śćxx", "ąąasdfąą", "affib"]], ["patterns", ["ć", "ąą", "ffi"]], ["replacements", ["abc", "def", "ghi"]]] @@ -1265,15 +1516,15 @@ add_specs suite_builder setup = col.replace patterns replacements . to_vector . should_equal ["hxyzo", "qwerat", "yasdf"] group_builder.specify "should only allow replace on Text columns" <| - c.replace "a" "#" . should_fail_with Invalid_Value_Type - a.replace 1 "#" . should_fail_with Invalid_Value_Type - a.replace c "#" . should_fail_with Invalid_Value_Type - a.replace "a" 1 . should_fail_with Invalid_Value_Type - a.replace "a" c . should_fail_with Invalid_Value_Type + data.c.replace "a" "#" . should_fail_with Invalid_Value_Type + data.a.replace 1 "#" . should_fail_with Invalid_Value_Type + data.a.replace data.c "#" . should_fail_with Invalid_Value_Type + data.a.replace "a" 1 . should_fail_with Invalid_Value_Type + data.a.replace "a" data.c . should_fail_with Invalid_Value_Type group_builder.specify "should not replace if Empty term" <| - a.replace '' "#" . to_vector . should_equal ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"] - a.replace d "#" . to_vector . should_equal ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"] + data.a.replace '' "#" . to_vector . should_equal ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"] + data.a.replace data.d "#" . to_vector . should_equal ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"] group_builder.specify "should infer correct return type" <| c = table_builder [["texts", ["foo", "bar"]]] . at "texts" @@ -1297,10 +1548,10 @@ add_specs suite_builder setup = vt3.variable_length.should_be_true suite_builder.group prefix+"Column Operations - Text Trim" group_builder-> - t5 = table_builder [["A", [" A ", ' \t\n\rA\r\n\t ', "xxxAxx"]], ["B", [" ",' \t',"x"]], ["C", [1,2,3]]] - a = t5.at "A" - b = t5.at "B" - c = t5.at "C" + data = Trim_Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown group_builder.specify "should trim whitespace by default" <| with_mixed_columns_if_supported [["A", [" A ", ' \t\n\rA\r\n\t ', "xxxAxx"]]] t-> @@ -1310,20 +1561,28 @@ add_specs suite_builder setup = a.trim Location.End . to_vector . should_equal [" A", ' \t\n\rA', "xxxAxx"] group_builder.specify "should trim custom characters" <| - a.trim what='x' . to_vector . should_equal [" A ", ' \t\n\rA\r\n\t ', "A"] - a.trim what='x' Location.Start . to_vector . should_equal [" A ", ' \t\n\rA\r\n\t ', "Axx"] - a.trim what='x' Location.End . to_vector . should_equal [" A ", ' \t\n\rA\r\n\t ', "xxxA"] - a.trim what=' ' . to_vector . should_equal ["A", '\t\n\rA\r\n\t', "xxxAxx"] - a.trim what=' \t' . to_vector . should_equal ["A", '\n\rA\r\n', "xxxAxx"] - a.trim what=' \r' . to_vector . should_equal ["A", '\t\n\rA\r\n\t', "xxxAxx"] - a.trim what=b . to_vector . should_equal ["A", '\n\rA\r\n', "A"] + data.a.trim what='x' . to_vector . should_equal [" A ", ' \t\n\rA\r\n\t ', "A"] + data.a.trim what='x' Location.Start . to_vector . should_equal [" A ", ' \t\n\rA\r\n\t ', "Axx"] + data.a.trim what='x' Location.End . to_vector . should_equal [" A ", ' \t\n\rA\r\n\t ', "xxxA"] + data.a.trim what=' ' . to_vector . should_equal ["A", '\t\n\rA\r\n\t', "xxxAxx"] + data.a.trim what=' \t' . to_vector . should_equal ["A", '\n\rA\r\n', "xxxAxx"] + data.a.trim what=' \r' . to_vector . should_equal ["A", '\t\n\rA\r\n\t', "xxxAxx"] + data.a.trim what=data.b . to_vector . should_equal ["A", '\n\rA\r\n', "A"] group_builder.specify "should only allow trim on Text columns" <| - c.trim what="a" . should_fail_with Invalid_Value_Type - a.trim what=1 . should_fail_with Invalid_Value_Type - a.trim what=c . should_fail_with Invalid_Value_Type + data.c.trim what="a" . should_fail_with Invalid_Value_Type + data.a.trim what=1 . should_fail_with Invalid_Value_Type + data.a.trim what=data.c . should_fail_with Invalid_Value_Type suite_builder.group prefix+"Other Column Operations" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "is_in" <| t = table_builder [["X", [1, 2, 3, 4]], ["Y", [4, 3, 100, 200]]] x = t.at "X" @@ -1338,60 +1597,67 @@ add_specs suite_builder setup = c2.value_type.should_equal Value_Type.Boolean suite_builder.group prefix+"Colum Operations - Names" group_builder-> - t = table_builder [["a", [1, 2, 3]], ["b", ['x', 'y', 'z']], ["c", [1.0, 2.0, 3.0]], ["d", [True, False, True]]] + data = Names_Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "arithmetic" <| - ((t.at "a") + 42) . name . should_equal "[a] + 42" - ((t.at "a") - (t.at "c")) . name . should_equal "[a] - [c]" - ((t.at "a") * (t.at "c")) . name . should_equal "[a] * [c]" - ((t.at "a") / (t.at "c")) . name . should_equal "[a] / [c]" - ((t.at "a") % (t.at "c")) . name . should_equal "[a] % [c]" - ((t.at "a") ^ (t.at "c")) . name . should_equal "[a] ^ [c]" - ((t.at "b") + 'xyz') . name . should_equal "[b] + 'xyz'" - ((t.at "b") + '\0') . name . should_equal "[b] + '\0'" + ((data.t.at "a") + 42) . name . should_equal "[a] + 42" + ((data.t.at "a") - (data.t.at "c")) . name . should_equal "[a] - [c]" + ((data.t.at "a") * (data.t.at "c")) . name . should_equal "[a] * [c]" + ((data.t.at "a") / (data.t.at "c")) . name . should_equal "[a] / [c]" + ((data.t.at "a") % (data.t.at "c")) . name . should_equal "[a] % [c]" + ((data.t.at "a") ^ (data.t.at "c")) . name . should_equal "[a] ^ [c]" + ((data.t.at "b") + 'xyz') . name . should_equal "[b] + 'xyz'" + ((data.t.at "b") + '\0') . name . should_equal "[b] + '\0'" group_builder.specify "comparison" <| - ((t.at "b") == '\0\0') . name . should_equal "[b] == '\0\0'" - ((t.at "b") != '\0\0') . name . should_equal "[b] != '\0\0'" - ((t.at "a") < 0) . name . should_equal "[a] < 0" - ((t.at "a") <= 0) . name . should_equal "[a] <= 0" - ((t.at "a") > 0) . name . should_equal "[a] > 0" - ((t.at "b") >= 'X') . name . should_equal "[b] >= 'X'" - ((t.at "a").between (t.at "c") 42) . name . should_equal "[a] between [c] and 42" + ((data.t.at "b") == '\0\0') . name . should_equal "[b] == '\0\0'" + ((data.t.at "b") != '\0\0') . name . should_equal "[b] != '\0\0'" + ((data.t.at "a") < 0) . name . should_equal "[a] < 0" + ((data.t.at "a") <= 0) . name . should_equal "[a] <= 0" + ((data.t.at "a") > 0) . name . should_equal "[a] > 0" + ((data.t.at "b") >= 'X') . name . should_equal "[b] >= 'X'" + ((data.t.at "a").between (data.t.at "c") 42) . name . should_equal "[a] between [c] and 42" group_builder.specify "logical" <| - ((t.at "d") || False) . name . should_equal "[d] || False" - ((t.at "d") && True) . name . should_equal "[d] && True" - ((t.at "d").not) . name . should_equal "not [d]" - ((t.at "d").iif 10 20) . name . should_equal "if [d] then 10 else 20" + ((data.t.at "d") || False) . name . should_equal "[d] || False" + ((data.t.at "d") && True) . name . should_equal "[d] && True" + ((data.t.at "d").not) . name . should_equal "not [d]" + ((data.t.at "d").iif 10 20) . name . should_equal "if [d] then 10 else 20" group_builder.specify "text" <| - t.at "b" . equals_ignore_case "abc" . name . should_equal "equals_ignore_case([b], 'abc')" - t.at "b" . starts_with "abc" . name . should_equal "starts_with([b], 'abc')" - t.at "b" . contains "abc" . name . should_equal "contains([b], 'abc')" - t.at "b" . like "%abc%" . name . should_equal "[b] like '%abc%'" - t.at "b" . ends_with "abc" . name . should_equal "ends_with([b], 'abc')" - t.at "b" . is_empty . name . should_equal "[b] is empty" - t.at "b" . fill_empty "" . name . should_equal "b" + data.t.at "b" . equals_ignore_case "abc" . name . should_equal "equals_ignore_case([b], 'abc')" + data.t.at "b" . starts_with "abc" . name . should_equal "starts_with([b], 'abc')" + data.t.at "b" . contains "abc" . name . should_equal "contains([b], 'abc')" + data.t.at "b" . like "%abc%" . name . should_equal "[b] like '%abc%'" + data.t.at "b" . ends_with "abc" . name . should_equal "ends_with([b], 'abc')" + data.t.at "b" . is_empty . name . should_equal "[b] is empty" + data.t.at "b" . fill_empty "" . name . should_equal "b" group_builder.specify "nulls" <| - t.at "a" . coalesce [Nothing, 42] . name . should_equal "coalesce([a], Nothing, 42)" - t.at "a" . is_nothing . name . should_equal "[a] is Nothing" - t.at "a" . is_present . name . should_equal "is_present([a])" - t.at "a" . is_blank . name . should_equal "is_blank([a])" - t.at "a" . fill_nothing 100 . name . should_equal "a" + data.t.at "a" . coalesce [Nothing, 42] . name . should_equal "coalesce([a], Nothing, 42)" + data.t.at "a" . is_nothing . name . should_equal "[a] is Nothing" + data.t.at "a" . is_present . name . should_equal "is_present([a])" + data.t.at "a" . is_blank . name . should_equal "is_blank([a])" + data.t.at "a" . fill_nothing 100 . name . should_equal "a" group_builder.specify "misc" - t.at "a" . min [1, 2] . name . should_equal "min([a], 1, 2)" - t.at "a" . max 33 . name . should_equal "max([a], 33)" - t.at "a" . is_in [1, 2, 3] . name . should_equal "[a] in [1, 2, 3]" + data.t.at "a" . min [1, 2] . name . should_equal "min([a], 1, 2)" + data.t.at "a" . max 33 . name . should_equal "max([a], 33)" + data.t.at "a" . is_in [1, 2, 3] . name . should_equal "[a] in [1, 2, 3]" group_builder.specify "composed operations" <| # These look a bit weird, but they are consistent with the column name escaping scheme. - ((t.at "a" + 42) * (t.at "c")) . name . should_equal "[[a] + 42] * [c]" - ((t.at "a" + 42) * (t.at "c") - 33) . name . should_equal "[[[a] + 42] * [c]] - 33" + ((data.t.at "a" + 42) * (data.t.at "c")) . name . should_equal "[[a] + 42] * [c]" + ((data.t.at "a" + 42) * (data.t.at "c") - 33) . name . should_equal "[[[a] + 42] * [c]] - 33" group_builder.specify "sort" <| - t.at "a" . sort . name . should_equal "a" + data.t.at "a" . sort . name . should_equal "a" if setup.is_database.not then group_builder.specify "parse" <| @@ -1399,10 +1665,18 @@ add_specs suite_builder setup = t2.at "X" . parse . name . should_equal "X" group_builder.specify "map and zip" <| - t.at "a" . map (x -> x + 1) . name . should_equal "a" - t.at "a" . zip (t.at "b") [_, _] . name . should_equal "[a] x [b]" + data.t.at "a" . map (x -> x + 1) . name . should_equal "a" + data.t.at "a" . zip (data.t.at "b") [_, _] . name . should_equal "[a] x [b]" suite_builder.group prefix+"Column.rename" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should not allow illegal names" <| t = table_builder [["a", [1, 2, 3]]] c = t.at "a" @@ -1413,6 +1687,14 @@ add_specs suite_builder setup = c.rename '\0' . should_fail_with Invalid_Column_Names suite_builder.group prefix+"Column.const" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "Should allow the creation of constant columns" <| t = table_builder [["x", ["1", "2", "3"]]] t.at "x" . const True . to_vector . should_equal [True, True, True] @@ -1429,6 +1711,14 @@ add_specs suite_builder setup = t.at "x" . const (t.at "x") . should_fail_with Illegal_Argument suite_builder.group prefix+"Table.make_constant_column" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "Should allow the creation of constant columns" <| t = table_builder [["x", ["1", "2", "3"]]] t.make_constant_column True . to_vector . should_equal [True, True, True] @@ -1475,3 +1765,10 @@ all_combinations variables = go (current_choices + [x]) go [] result.to_vector + + +drop_table connection table = + case table of + _ : Database_Table -> + connection.drop_table table.name + _ -> Nothing diff --git a/test/Table_Tests/src/Common_Table_Operations/Util.enso b/test/Table_Tests/src/Common_Table_Operations/Util.enso index d7d51485f420..ffcf1f069f4d 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Util.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Util.enso @@ -3,7 +3,6 @@ from Standard.Table import Table from Standard.Test_New import all -import project.In_Memory.Common_Spec as In_Memory_Table_Spec import project.Common_Table_Operations.Main.Test_Setup import project.Common_Table_Operations.Main.Test_Selection import project.Common_Table_Operations.Aggregate_Spec @@ -11,6 +10,16 @@ import project.Common_Table_Operations.Aggregate_Spec expect_column_names names table = table.columns . map .name . should_equal names frames_to_skip=2 +type Dummy_Connection + Value + + close self = Nothing + + drop_table self table = + _ = table + Nothing + + ## These tests are parametrized by various backends and so they should be run in context of a specific backend. However, for the purpose of testing we provide a shortcut that allows to run these tests with the in-memory backend. @@ -34,7 +43,8 @@ run_default_backend add_specs = _ = connection Table.new cols - create_connection_func _ = Nothing + create_connection_func _ = + Dummy_Connection.Value setup = Test_Setup.Config "[In-Memory] " table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func suite = Test.build suite_builder-> diff --git a/test/Tests/src/Data/Round_Spec.enso b/test/Tests/src/Data/Round_Spec.enso index 2650746a3895..70ee6ded2f08 100644 --- a/test/Tests/src/Data/Round_Spec.enso +++ b/test/Tests/src/Data/Round_Spec.enso @@ -2,287 +2,285 @@ from Standard.Base import all import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument -import Standard.Test.Extensions -from Standard.Test import Test, Test_Suite +from Standard.Test_New import all polyglot java import java.math.BigInteger ## PRIVATE -add_specs suite_builder prefix round_fun = - suite_builder.group prefix+"Rounding numeric tests" group_builder-> - group_builder.specify "Can round positive decimals correctly" <| - round_fun 3.0 . should_equal 3 - round_fun 3.00001 . should_equal 3 - round_fun 3.3 . should_equal 3 - round_fun 3.49999 . should_equal 3 - round_fun 3.5 . should_equal 4 - round_fun 3.50001 . should_equal 4 - round_fun 3.99999 . should_equal 4 - - group_builder.specify "Can round negative decimals correctly" <| - round_fun -3.0 . should_equal -3 - round_fun -3.00001 . should_equal -3 - round_fun -3.3 . should_equal -3 - round_fun -3.49999 . should_equal -3 - round_fun -3.5 . should_equal -4 - round_fun -3.50001 . should_equal -4 - round_fun -3.99999 . should_equal -4 - - group_builder.specify "Explicit and implicit 0 decimal places work the same" <| - round_fun 3.00001 0 . should_equal 3 - round_fun 3.3 0 . should_equal 3 - round_fun 3.00001 . should_equal 3 - round_fun 3.3 . should_equal 3 - - group_builder.specify "Can round zero and small decimals correctly" <| - round_fun 0.0 . should_equal 0 - round_fun 0.00001 . should_equal 0 - round_fun -0.00001 . should_equal 0 - - group_builder.specify "Can round positive decimals to a specified number of decimal places" <| - round_fun 3.0001 2 . should_equal 3.0 - round_fun 3.1414 2 . should_equal 3.14 - round_fun 3.1415 2 . should_equal 3.14 - round_fun 3.1416 2 . should_equal 3.14 - round_fun 3.9999 2 . should_equal 4.0 - - round_fun 3.0001 3 . should_equal 3.0 - round_fun 3.1414 3 . should_equal 3.141 - round_fun 3.1415 3 . should_equal 3.142 - round_fun 3.1416 3 . should_equal 3.142 - round_fun 3.9999 3 . should_equal 4.0 - - group_builder.specify "Can round negative decimals to a specified number of decimal places" <| - round_fun -3.0001 2 . should_equal -3.0 - round_fun -3.1414 2 . should_equal -3.14 - round_fun -3.1415 2 . should_equal -3.14 - round_fun -3.1416 2 . should_equal -3.14 - round_fun -3.9999 2 . should_equal -4.0 - - round_fun -3.0001 3 . should_equal -3.0 - round_fun -3.1414 3 . should_equal -3.141 - round_fun -3.1415 3 . should_equal -3.142 - round_fun -3.1416 3 . should_equal -3.142 - round_fun -3.9999 3 . should_equal -4.0 - - group_builder.specify "Can round positive decimals to a specified negative number of decimal places" <| - round_fun 1234.0 -1 . should_equal 1230 - round_fun 1234.0 -2 . should_equal 1200 - round_fun 1234.0 -3 . should_equal 1000 - round_fun 1234.0 -4 . should_equal 0 - - round_fun 1499.0 -1 . should_equal 1500 - round_fun 1499.0 -2 . should_equal 1500 - round_fun 1499.0 -3 . should_equal 1000 - - round_fun 1495.0 -1 . should_equal 1500 - round_fun 1494.0 -1 . should_equal 1490 - round_fun 1495.0 -2 . should_equal 1500 - round_fun 1494.0 -2 . should_equal 1500 - - group_builder.specify "Can round negative decimals to a specified negative number of decimal places" <| - round_fun -1234.0 -1 . should_equal -1230 - round_fun -1234.0 -2 . should_equal -1200 - round_fun -1234.0 -3 . should_equal -1000 - round_fun -1234.0 -4 . should_equal 0 - - round_fun -1499.0 -1 . should_equal -1500 - round_fun -1499.0 -2 . should_equal -1500 - round_fun -1499.0 -3 . should_equal -1000 - - round_fun -1495.0 -1 . should_equal -1500 - round_fun -1494.0 -1 . should_equal -1490 - round_fun -1495.0 -2 . should_equal -1500 - round_fun -1494.0 -2 . should_equal -1500 - - group_builder.specify "Banker's rounding handles half-way values correctly" <| - round_fun -3.5 use_bankers=True . should_equal -4 - round_fun -2.5 use_bankers=True . should_equal -2 - round_fun -1.5 use_bankers=True . should_equal -2 - round_fun -0.5 use_bankers=True . should_equal 0 - round_fun 0.5 use_bankers=True . should_equal 0 - round_fun 1.5 use_bankers=True . should_equal 2 - round_fun 2.5 use_bankers=True . should_equal 2 - round_fun 3.5 use_bankers=True . should_equal 4 - - round_fun 0.235 2 use_bankers=True . should_equal 0.24 - round_fun 0.225 2 use_bankers=True . should_equal 0.22 - round_fun -0.235 2 use_bankers=True . should_equal -0.24 - round_fun -0.225 2 use_bankers=True . should_equal -0.22 - - round_fun 12350.0 -2 use_bankers=True . should_equal 12400 - round_fun 12250.0 -2 use_bankers=True . should_equal 12200 - round_fun -12350.0 -2 use_bankers=True . should_equal -12400 - round_fun -12250.0 -2 use_bankers=True . should_equal -12200 - - group_builder.specify "Banker's rounding handles non-half-way values just like normal rounding" <| - round_fun 3.0 use_bankers=True . should_equal 3 - round_fun 3.00001 use_bankers=True . should_equal 3 - round_fun 3.3 use_bankers=True . should_equal 3 - round_fun 3.49999 use_bankers=True . should_equal 3 - round_fun 3.50001 use_bankers=True . should_equal 4 - round_fun 3.99999 use_bankers=True . should_equal 4 - - round_fun -3.0 . should_equal -3 - round_fun -3.00001 . should_equal -3 - round_fun -3.3 . should_equal -3 - round_fun -3.49999 . should_equal -3 - round_fun -3.50001 . should_equal -4 - round_fun -3.99999 . should_equal -4 - - group_builder.specify "Can round correctly near the precision limit" <| - round_fun 1.22222222225 10 . should_equal 1.2222222223 - round_fun 1.222222222225 11 . should_equal 1.22222222223 - round_fun 1.2222222222225 12 . should_equal 1.222222222223 - round_fun 1.22222222222225 13 . should_equal 1.2222222222223 - round_fun 1.222222222222225 14 . should_equal 1.22222222222223 - - round_fun -1.22222222225 10 . should_equal -1.2222222223 - round_fun -1.222222222225 11 . should_equal -1.22222222223 - round_fun -1.2222222222225 12 . should_equal -1.222222222223 - round_fun -1.22222222222225 13 . should_equal -1.2222222222223 - round_fun -1.222222222222225 14 . should_equal -1.22222222222223 - - round_fun 1.22222222235 10 . should_equal 1.2222222224 - round_fun 1.222222222235 11 . should_equal 1.22222222224 - round_fun 1.2222222222235 12 . should_equal 1.222222222224 - round_fun 1.22222222222235 13 . should_equal 1.2222222222224 - round_fun 1.222222222222235 14 . should_equal 1.22222222222224 - - round_fun -1.22222222235 10 . should_equal -1.2222222224 - round_fun -1.222222222235 11 . should_equal -1.22222222224 - round_fun -1.2222222222235 12 . should_equal -1.222222222224 - round_fun -1.22222222222235 13 . should_equal -1.2222222222224 - round_fun -1.222222222222235 14 . should_equal -1.22222222222224 - - group_builder.specify "Can round correctly near the precision limit, using banker's rounding" <| - round_fun 1.22222222225 10 use_bankers=True . should_equal 1.2222222222 - round_fun 1.222222222225 11 use_bankers=True . should_equal 1.22222222222 - round_fun 1.2222222222225 12 use_bankers=True . should_equal 1.222222222222 - round_fun 1.22222222222225 13 use_bankers=True . should_equal 1.2222222222222 - round_fun 1.222222222222225 14 use_bankers=True . should_equal 1.22222222222222 - - round_fun -1.22222222225 10 use_bankers=True . should_equal -1.2222222222 - round_fun -1.222222222225 11 use_bankers=True . should_equal -1.22222222222 - round_fun -1.2222222222225 12 use_bankers=True . should_equal -1.222222222222 - round_fun -1.22222222222225 13 use_bankers=True . should_equal -1.2222222222222 - round_fun -1.222222222222225 14 use_bankers=True . should_equal -1.22222222222222 - - round_fun 1.22222222235 10 use_bankers=True . should_equal 1.2222222224 - round_fun 1.222222222235 11 use_bankers=True . should_equal 1.22222222224 - round_fun 1.2222222222235 12 use_bankers=True . should_equal 1.222222222224 - round_fun 1.22222222222235 13 use_bankers=True . should_equal 1.2222222222224 - round_fun 1.222222222222235 14 use_bankers=True . should_equal 1.22222222222224 - - round_fun -1.22222222235 10 use_bankers=True . should_equal -1.2222222224 - round_fun -1.222222222235 11 use_bankers=True . should_equal -1.22222222224 - round_fun -1.2222222222235 12 use_bankers=True . should_equal -1.222222222224 - round_fun -1.22222222222235 13 use_bankers=True . should_equal -1.2222222222224 - round_fun -1.222222222222235 14 use_bankers=True . should_equal -1.22222222222224 - - group_builder.specify "Decimal places out of range" <| - round_fun 3.1 16 . should_fail_with Illegal_Argument - round_fun 3.1 -16 . should_fail_with Illegal_Argument - - group_builder.specify "Floating point imperfect representation counter-examples" <| - round_fun 1.225 2 use_bankers=True . should_equal 1.22 # Actual result 1.23 - round_fun 37.785 2 . should_equal 37.79 - - group_builder.specify "Can round small integers to a specified number of decimal places correctly (value is unchanged)" - round_fun 0 . should_equal 0 - round_fun 3 . should_equal 3 - round_fun -3 . should_equal -3 - round_fun 3 0 . should_equal 3 - round_fun -3 0 . should_equal -3 - round_fun 3 1 . should_equal 3 - round_fun -3 1 . should_equal -3 - - group_builder.specify "Can round integers to a specified number of negative places correctly" - round_fun 0 -1 . should_equal 0 - round_fun 4 -1 . should_equal 0 - round_fun 5 -1 . should_equal 10 - round_fun 6 -1 . should_equal 10 - round_fun 9 -1 . should_equal 10 - round_fun 10 -1 . should_equal 10 - round_fun 11 -1 . should_equal 10 - round_fun 24 -1 . should_equal 20 - round_fun 25 -1 . should_equal 30 - round_fun 29 -1 . should_equal 30 - round_fun 30 -1 . should_equal 30 - round_fun 31 -1 . should_equal 30 - - round_fun 2000 -3 . should_equal 2000 - round_fun 2001 -3 . should_equal 2000 - round_fun 2412 -3 . should_equal 2000 - round_fun 2499 -3 . should_equal 2000 - round_fun 2500 -3 . should_equal 3000 - round_fun 2501 -3 . should_equal 3000 - round_fun 2511 -3 . should_equal 3000 - round_fun 2907 -3 . should_equal 3000 - round_fun 2999 -3 . should_equal 3000 - round_fun 3000 -3 . should_equal 3000 - round_fun 3001 -3 . should_equal 3000 - round_fun 3098 -3 . should_equal 3000 - round_fun 3101 -3 . should_equal 3000 - - group_builder.specify "Can round negative integers to a specified number of negative places correctly" - round_fun -4 -1 . should_equal 0 - round_fun -5 -1 . should_equal -10 - round_fun -6 -1 . should_equal -10 - round_fun -9 -1 . should_equal -10 - round_fun -10 -1 . should_equal -10 - round_fun -11 -1 . should_equal -10 - round_fun -24 -1 . should_equal -20 - round_fun -25 -1 . should_equal -30 - round_fun -29 -1 . should_equal -30 - round_fun -30 -1 . should_equal -30 - round_fun -31 -1 . should_equal -30 - - round_fun -2000 -3 . should_equal -2000 - round_fun -2001 -3 . should_equal -2000 - round_fun -2412 -3 . should_equal -2000 - round_fun -2499 -3 . should_equal -2000 - round_fun -2500 -3 . should_equal -3000 - round_fun -2501 -3 . should_equal -3000 - round_fun -2511 -3 . should_equal -3000 - round_fun -2907 -3 . should_equal -3000 - round_fun -2999 -3 . should_equal -3000 - round_fun -3000 -3 . should_equal -3000 - round_fun -3001 -3 . should_equal -3000 - round_fun -3098 -3 . should_equal -3000 - round_fun -3101 -3 . should_equal -3000 - - group_builder.specify "Can round negative integers to a specified number of negative places with banker's rounding correctly" <| - round_fun 12300 -2 use_bankers=True . should_equal 12300 - round_fun 12301 -2 use_bankers=True . should_equal 12300 - round_fun 12330 -2 use_bankers=True . should_equal 12300 - round_fun 12349 -2 use_bankers=True . should_equal 12300 - round_fun 12350 -2 use_bankers=True . should_equal 12400 - round_fun 12351 -2 use_bankers=True . should_equal 12400 - round_fun 12370 -2 use_bankers=True . should_equal 12400 - round_fun 12430 -2 use_bankers=True . should_equal 12400 - round_fun 12470 -2 use_bankers=True . should_equal 12500 - - round_fun 12249 -2 use_bankers=True . should_equal 12200 - round_fun 12250 -2 use_bankers=True . should_equal 12200 - round_fun 12251 -2 use_bankers=True . should_equal 12300 - - round_fun -12300 -2 use_bankers=True . should_equal -12300 - round_fun -12301 -2 use_bankers=True . should_equal -12300 - round_fun -12330 -2 use_bankers=True . should_equal -12300 - round_fun -12349 -2 use_bankers=True . should_equal -12300 - round_fun -12350 -2 use_bankers=True . should_equal -12400 - round_fun -12351 -2 use_bankers=True . should_equal -12400 - round_fun -12370 -2 use_bankers=True . should_equal -12400 - round_fun -12430 -2 use_bankers=True . should_equal -12400 - round_fun -12470 -2 use_bankers=True . should_equal -12500 - - round_fun -12249 -2 use_bankers=True . should_equal -12200 - round_fun -12250 -2 use_bankers=True . should_equal -12200 - round_fun -12251 -2 use_bankers=True . should_equal -12300 - - group_builder.specify "Handles incorrect argument types" <| - Test.expect_panic_with (round_fun 123 "two") Type_Error - Test.expect_panic_with (round_fun 123 use_bankers="no") Type_Error - Test.expect_panic_with (round_fun 123 use_bankers=0) Type_Error +add_specs group_builder round_fun = + group_builder.specify "Can round positive decimals correctly" <| + round_fun 3.0 . should_equal 3 + round_fun 3.00001 . should_equal 3 + round_fun 3.3 . should_equal 3 + round_fun 3.49999 . should_equal 3 + round_fun 3.5 . should_equal 4 + round_fun 3.50001 . should_equal 4 + round_fun 3.99999 . should_equal 4 + + group_builder.specify "Can round negative decimals correctly" <| + round_fun -3.0 . should_equal -3 + round_fun -3.00001 . should_equal -3 + round_fun -3.3 . should_equal -3 + round_fun -3.49999 . should_equal -3 + round_fun -3.5 . should_equal -4 + round_fun -3.50001 . should_equal -4 + round_fun -3.99999 . should_equal -4 + + group_builder.specify "Explicit and implicit 0 decimal places work the same" <| + round_fun 3.00001 0 . should_equal 3 + round_fun 3.3 0 . should_equal 3 + round_fun 3.00001 . should_equal 3 + round_fun 3.3 . should_equal 3 + + group_builder.specify "Can round zero and small decimals correctly" <| + round_fun 0.0 . should_equal 0 + round_fun 0.00001 . should_equal 0 + round_fun -0.00001 . should_equal 0 + + group_builder.specify "Can round positive decimals to a specified number of decimal places" <| + round_fun 3.0001 2 . should_equal 3.0 + round_fun 3.1414 2 . should_equal 3.14 + round_fun 3.1415 2 . should_equal 3.14 + round_fun 3.1416 2 . should_equal 3.14 + round_fun 3.9999 2 . should_equal 4.0 + + round_fun 3.0001 3 . should_equal 3.0 + round_fun 3.1414 3 . should_equal 3.141 + round_fun 3.1415 3 . should_equal 3.142 + round_fun 3.1416 3 . should_equal 3.142 + round_fun 3.9999 3 . should_equal 4.0 + + group_builder.specify "Can round negative decimals to a specified number of decimal places" <| + round_fun -3.0001 2 . should_equal -3.0 + round_fun -3.1414 2 . should_equal -3.14 + round_fun -3.1415 2 . should_equal -3.14 + round_fun -3.1416 2 . should_equal -3.14 + round_fun -3.9999 2 . should_equal -4.0 + + round_fun -3.0001 3 . should_equal -3.0 + round_fun -3.1414 3 . should_equal -3.141 + round_fun -3.1415 3 . should_equal -3.142 + round_fun -3.1416 3 . should_equal -3.142 + round_fun -3.9999 3 . should_equal -4.0 + + group_builder.specify "Can round positive decimals to a specified negative number of decimal places" <| + round_fun 1234.0 -1 . should_equal 1230 + round_fun 1234.0 -2 . should_equal 1200 + round_fun 1234.0 -3 . should_equal 1000 + round_fun 1234.0 -4 . should_equal 0 + + round_fun 1499.0 -1 . should_equal 1500 + round_fun 1499.0 -2 . should_equal 1500 + round_fun 1499.0 -3 . should_equal 1000 + + round_fun 1495.0 -1 . should_equal 1500 + round_fun 1494.0 -1 . should_equal 1490 + round_fun 1495.0 -2 . should_equal 1500 + round_fun 1494.0 -2 . should_equal 1500 + + group_builder.specify "Can round negative decimals to a specified negative number of decimal places" <| + round_fun -1234.0 -1 . should_equal -1230 + round_fun -1234.0 -2 . should_equal -1200 + round_fun -1234.0 -3 . should_equal -1000 + round_fun -1234.0 -4 . should_equal 0 + + round_fun -1499.0 -1 . should_equal -1500 + round_fun -1499.0 -2 . should_equal -1500 + round_fun -1499.0 -3 . should_equal -1000 + + round_fun -1495.0 -1 . should_equal -1500 + round_fun -1494.0 -1 . should_equal -1490 + round_fun -1495.0 -2 . should_equal -1500 + round_fun -1494.0 -2 . should_equal -1500 + + group_builder.specify "Banker's rounding handles half-way values correctly" <| + round_fun -3.5 use_bankers=True . should_equal -4 + round_fun -2.5 use_bankers=True . should_equal -2 + round_fun -1.5 use_bankers=True . should_equal -2 + round_fun -0.5 use_bankers=True . should_equal 0 + round_fun 0.5 use_bankers=True . should_equal 0 + round_fun 1.5 use_bankers=True . should_equal 2 + round_fun 2.5 use_bankers=True . should_equal 2 + round_fun 3.5 use_bankers=True . should_equal 4 + + round_fun 0.235 2 use_bankers=True . should_equal 0.24 + round_fun 0.225 2 use_bankers=True . should_equal 0.22 + round_fun -0.235 2 use_bankers=True . should_equal -0.24 + round_fun -0.225 2 use_bankers=True . should_equal -0.22 + + round_fun 12350.0 -2 use_bankers=True . should_equal 12400 + round_fun 12250.0 -2 use_bankers=True . should_equal 12200 + round_fun -12350.0 -2 use_bankers=True . should_equal -12400 + round_fun -12250.0 -2 use_bankers=True . should_equal -12200 + + group_builder.specify "Banker's rounding handles non-half-way values just like normal rounding" <| + round_fun 3.0 use_bankers=True . should_equal 3 + round_fun 3.00001 use_bankers=True . should_equal 3 + round_fun 3.3 use_bankers=True . should_equal 3 + round_fun 3.49999 use_bankers=True . should_equal 3 + round_fun 3.50001 use_bankers=True . should_equal 4 + round_fun 3.99999 use_bankers=True . should_equal 4 + + round_fun -3.0 . should_equal -3 + round_fun -3.00001 . should_equal -3 + round_fun -3.3 . should_equal -3 + round_fun -3.49999 . should_equal -3 + round_fun -3.50001 . should_equal -4 + round_fun -3.99999 . should_equal -4 + + group_builder.specify "Can round correctly near the precision limit" <| + round_fun 1.22222222225 10 . should_equal 1.2222222223 + round_fun 1.222222222225 11 . should_equal 1.22222222223 + round_fun 1.2222222222225 12 . should_equal 1.222222222223 + round_fun 1.22222222222225 13 . should_equal 1.2222222222223 + round_fun 1.222222222222225 14 . should_equal 1.22222222222223 + + round_fun -1.22222222225 10 . should_equal -1.2222222223 + round_fun -1.222222222225 11 . should_equal -1.22222222223 + round_fun -1.2222222222225 12 . should_equal -1.222222222223 + round_fun -1.22222222222225 13 . should_equal -1.2222222222223 + round_fun -1.222222222222225 14 . should_equal -1.22222222222223 + + round_fun 1.22222222235 10 . should_equal 1.2222222224 + round_fun 1.222222222235 11 . should_equal 1.22222222224 + round_fun 1.2222222222235 12 . should_equal 1.222222222224 + round_fun 1.22222222222235 13 . should_equal 1.2222222222224 + round_fun 1.222222222222235 14 . should_equal 1.22222222222224 + + round_fun -1.22222222235 10 . should_equal -1.2222222224 + round_fun -1.222222222235 11 . should_equal -1.22222222224 + round_fun -1.2222222222235 12 . should_equal -1.222222222224 + round_fun -1.22222222222235 13 . should_equal -1.2222222222224 + round_fun -1.222222222222235 14 . should_equal -1.22222222222224 + + group_builder.specify "Can round correctly near the precision limit, using banker's rounding" <| + round_fun 1.22222222225 10 use_bankers=True . should_equal 1.2222222222 + round_fun 1.222222222225 11 use_bankers=True . should_equal 1.22222222222 + round_fun 1.2222222222225 12 use_bankers=True . should_equal 1.222222222222 + round_fun 1.22222222222225 13 use_bankers=True . should_equal 1.2222222222222 + round_fun 1.222222222222225 14 use_bankers=True . should_equal 1.22222222222222 + + round_fun -1.22222222225 10 use_bankers=True . should_equal -1.2222222222 + round_fun -1.222222222225 11 use_bankers=True . should_equal -1.22222222222 + round_fun -1.2222222222225 12 use_bankers=True . should_equal -1.222222222222 + round_fun -1.22222222222225 13 use_bankers=True . should_equal -1.2222222222222 + round_fun -1.222222222222225 14 use_bankers=True . should_equal -1.22222222222222 + + round_fun 1.22222222235 10 use_bankers=True . should_equal 1.2222222224 + round_fun 1.222222222235 11 use_bankers=True . should_equal 1.22222222224 + round_fun 1.2222222222235 12 use_bankers=True . should_equal 1.222222222224 + round_fun 1.22222222222235 13 use_bankers=True . should_equal 1.2222222222224 + round_fun 1.222222222222235 14 use_bankers=True . should_equal 1.22222222222224 + + round_fun -1.22222222235 10 use_bankers=True . should_equal -1.2222222224 + round_fun -1.222222222235 11 use_bankers=True . should_equal -1.22222222224 + round_fun -1.2222222222235 12 use_bankers=True . should_equal -1.222222222224 + round_fun -1.22222222222235 13 use_bankers=True . should_equal -1.2222222222224 + round_fun -1.222222222222235 14 use_bankers=True . should_equal -1.22222222222224 + + group_builder.specify "Decimal places out of range" <| + round_fun 3.1 16 . should_fail_with Illegal_Argument + round_fun 3.1 -16 . should_fail_with Illegal_Argument + + group_builder.specify "Floating point imperfect representation counter-examples" <| + round_fun 1.225 2 use_bankers=True . should_equal 1.22 # Actual result 1.23 + round_fun 37.785 2 . should_equal 37.79 + + group_builder.specify "Can round small integers to a specified number of decimal places correctly (value is unchanged)" + round_fun 0 . should_equal 0 + round_fun 3 . should_equal 3 + round_fun -3 . should_equal -3 + round_fun 3 0 . should_equal 3 + round_fun -3 0 . should_equal -3 + round_fun 3 1 . should_equal 3 + round_fun -3 1 . should_equal -3 + + group_builder.specify "Can round integers to a specified number of negative places correctly" + round_fun 0 -1 . should_equal 0 + round_fun 4 -1 . should_equal 0 + round_fun 5 -1 . should_equal 10 + round_fun 6 -1 . should_equal 10 + round_fun 9 -1 . should_equal 10 + round_fun 10 -1 . should_equal 10 + round_fun 11 -1 . should_equal 10 + round_fun 24 -1 . should_equal 20 + round_fun 25 -1 . should_equal 30 + round_fun 29 -1 . should_equal 30 + round_fun 30 -1 . should_equal 30 + round_fun 31 -1 . should_equal 30 + + round_fun 2000 -3 . should_equal 2000 + round_fun 2001 -3 . should_equal 2000 + round_fun 2412 -3 . should_equal 2000 + round_fun 2499 -3 . should_equal 2000 + round_fun 2500 -3 . should_equal 3000 + round_fun 2501 -3 . should_equal 3000 + round_fun 2511 -3 . should_equal 3000 + round_fun 2907 -3 . should_equal 3000 + round_fun 2999 -3 . should_equal 3000 + round_fun 3000 -3 . should_equal 3000 + round_fun 3001 -3 . should_equal 3000 + round_fun 3098 -3 . should_equal 3000 + round_fun 3101 -3 . should_equal 3000 + + group_builder.specify "Can round negative integers to a specified number of negative places correctly" + round_fun -4 -1 . should_equal 0 + round_fun -5 -1 . should_equal -10 + round_fun -6 -1 . should_equal -10 + round_fun -9 -1 . should_equal -10 + round_fun -10 -1 . should_equal -10 + round_fun -11 -1 . should_equal -10 + round_fun -24 -1 . should_equal -20 + round_fun -25 -1 . should_equal -30 + round_fun -29 -1 . should_equal -30 + round_fun -30 -1 . should_equal -30 + round_fun -31 -1 . should_equal -30 + + round_fun -2000 -3 . should_equal -2000 + round_fun -2001 -3 . should_equal -2000 + round_fun -2412 -3 . should_equal -2000 + round_fun -2499 -3 . should_equal -2000 + round_fun -2500 -3 . should_equal -3000 + round_fun -2501 -3 . should_equal -3000 + round_fun -2511 -3 . should_equal -3000 + round_fun -2907 -3 . should_equal -3000 + round_fun -2999 -3 . should_equal -3000 + round_fun -3000 -3 . should_equal -3000 + round_fun -3001 -3 . should_equal -3000 + round_fun -3098 -3 . should_equal -3000 + round_fun -3101 -3 . should_equal -3000 + + group_builder.specify "Can round negative integers to a specified number of negative places with banker's rounding correctly" <| + round_fun 12300 -2 use_bankers=True . should_equal 12300 + round_fun 12301 -2 use_bankers=True . should_equal 12300 + round_fun 12330 -2 use_bankers=True . should_equal 12300 + round_fun 12349 -2 use_bankers=True . should_equal 12300 + round_fun 12350 -2 use_bankers=True . should_equal 12400 + round_fun 12351 -2 use_bankers=True . should_equal 12400 + round_fun 12370 -2 use_bankers=True . should_equal 12400 + round_fun 12430 -2 use_bankers=True . should_equal 12400 + round_fun 12470 -2 use_bankers=True . should_equal 12500 + + round_fun 12249 -2 use_bankers=True . should_equal 12200 + round_fun 12250 -2 use_bankers=True . should_equal 12200 + round_fun 12251 -2 use_bankers=True . should_equal 12300 + + round_fun -12300 -2 use_bankers=True . should_equal -12300 + round_fun -12301 -2 use_bankers=True . should_equal -12300 + round_fun -12330 -2 use_bankers=True . should_equal -12300 + round_fun -12349 -2 use_bankers=True . should_equal -12300 + round_fun -12350 -2 use_bankers=True . should_equal -12400 + round_fun -12351 -2 use_bankers=True . should_equal -12400 + round_fun -12370 -2 use_bankers=True . should_equal -12400 + round_fun -12430 -2 use_bankers=True . should_equal -12400 + round_fun -12470 -2 use_bankers=True . should_equal -12500 + + round_fun -12249 -2 use_bankers=True . should_equal -12200 + round_fun -12250 -2 use_bankers=True . should_equal -12200 + round_fun -12251 -2 use_bankers=True . should_equal -12300 + + group_builder.specify "Handles incorrect argument types" <| + Test.expect_panic_with (round_fun 123 "two") Type_Error + Test.expect_panic_with (round_fun 123 use_bankers="no") Type_Error + Test.expect_panic_with (round_fun 123 use_bankers=0) Type_Error From d85b1380e01822d0bd7f3401d84e0645594bff54 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Mon, 22 Jan 2024 18:47:45 +0100 Subject: [PATCH 64/93] Enable some temporary disabled tests --- test/Table_Tests/src/Database/Common/Common_Spec.enso | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/Table_Tests/src/Database/Common/Common_Spec.enso b/test/Table_Tests/src/Database/Common/Common_Spec.enso index b6ea1393c61d..8aa0d6677371 100644 --- a/test/Table_Tests/src/Database/Common/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Common_Spec.enso @@ -155,8 +155,7 @@ type Missing_Values_Data add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : (Nothing -> Any)) = Default_Ordering_Spec.add_specs suite_builder prefix create_connection_fn - # TODO: - # Names_Length_Limits_Spec.add_specs suite_builder prefix create_connection_fn + Names_Length_Limits_Spec.add_specs suite_builder prefix create_connection_fn suite_builder.group (prefix + "Basic Table Access") group_builder-> data = Basic_Data.setup create_connection_fn From f4906ba0654405f5e3baec7764c9b28784f0d398 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Mon, 22 Jan 2024 19:04:05 +0100 Subject: [PATCH 65/93] Fix teardowns in Aggregate_Spec --- .../Aggregate_Spec.enso | 172 +++++++++++++++--- 1 file changed, 149 insertions(+), 23 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso index bbfabcedae0f..c29abbe87450 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso @@ -17,6 +17,9 @@ polyglot java import java.lang.Double type Test_Selection Config problem_handling=True advanced_stats=True text_concat=True text_shortest_longest=True first_last=True first_last_row_order=True std_dev=True multi_distinct=True aggregation_problems=True nan=True date_support=True +main = + run_default_backend add_specs + type Data Value ~data @@ -30,6 +33,9 @@ type Data empty_table = empty_table_fn Nothing [connection, table, empty_table] + teardown self = + self.connection.close + ## Runs the common aggregate tests. add_specs suite_builder setup = @@ -37,7 +43,6 @@ add_specs suite_builder setup = create_connection_fn = setup.create_connection_func table_fn = setup.table_fn empty_table_fn = setup.empty_table_fn - table_builder = setup.table_builder materialize = setup.materialize is_database = setup.is_database test_selection = setup.aggregate_test_selection @@ -879,6 +884,14 @@ add_specs suite_builder setup = materialized.columns.at 2 . at idx . length . should_equal 381 suite_builder.group prefix+"Table.aggregate Shortest" (pending = resolve_pending test_selection.text_shortest_longest) group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should correctly handle empty strings versus missing (null) strings" <| table = table_builder [["A", ["abcd", "f", ""]], ["B", [Nothing, "f", "abc"]]] result = table.aggregate [Shortest "A", Shortest "B"] @@ -894,8 +907,14 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.aggregate Concatenate" (pending = resolve_pending test_selection.text_concat) group_builder-> data = Data.setup create_connection_fn table_fn empty_table_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should insert the separator, add prefix and suffix" <| - table = table_builder [["A", ["foo", "bar", "foo", "foo"]], ["B", ["a", "b", "c", "d"]]] connection=data.connection + table = table_builder [["A", ["foo", "bar", "foo", "foo"]], ["B", ["a", "b", "c", "d"]]] result = table.aggregate [Group_By "A", (Concatenate "B" prefix="[[" suffix="]]" separator="; ")] result.row_count . should_equal 2 materialized = materialize result . order_by ([Sort_Column.Name "A"]) @@ -907,7 +926,7 @@ add_specs suite_builder setup = materialized.columns.at 1 . to_vector . should_equal ["[[b]]", "[[a; c; d]]"] group_builder.specify "should correctly escape separator and quote characters but only if necessary" <| - table = table_builder [["A", ["1,0", "b", "'c", "''", ","]]] connection=data.connection + table = table_builder [["A", ["1,0", "b", "'c", "''", ","]]] result = table.aggregate [(Concatenate "A" prefix="[[" suffix="]]" separator="," quote_char="'")] result.row_count . should_equal 1 materialized = materialize result @@ -917,7 +936,7 @@ add_specs suite_builder setup = materialized.columns.at 0 . to_vector . should_equal ["[['1,0',b,'''c','''''',',']]"] group_builder.specify "should correctly handle missing values and empty values with quote character" <| - table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] connection=data.connection + table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] result = table.aggregate [(Concatenate "A" prefix="[[" suffix="]]" separator="," quote_char="'")] result.row_count . should_equal 1 materialized = materialize result @@ -927,7 +946,7 @@ add_specs suite_builder setup = materialized.columns.at 0 . to_vector . should_equal ["[['1,0',A,'','',B,,,C]]"] group_builder.specify "will not be able to distinguish missing values from empty values without quote character" <| - table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] connection=data.connection + table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] result = table.aggregate [(Concatenate "A" prefix="[[" suffix="]]" separator=",")] result.row_count . should_equal 1 materialized = materialize result @@ -942,7 +961,7 @@ add_specs suite_builder setup = materialized.columns.at 0 . to_vector . should_equal ["[[1,0,A,,,B,,,C]]"] group_builder.specify "should work with empty separator" <| - table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] connection=data.connection + table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]] result = table.aggregate [(Concatenate "A")] result.row_count . should_equal 1 materialized = materialize result @@ -952,7 +971,7 @@ add_specs suite_builder setup = materialized.columns.at 0 . to_vector . should_equal ["1,0ABC"] group_builder.specify "should work with empty separator but non-empty quote" <| - table = table_builder [["A", ["1'0", "A", "", "", "B", Nothing, Nothing, "C"]]] connection=data.connection + table = table_builder [["A", ["1'0", "A", "", "", "B", Nothing, Nothing, "C"]]] result = table.aggregate [(Concatenate "A" quote_char="'")] result.row_count . should_equal 1 materialized = materialize result @@ -964,6 +983,12 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.aggregate Count_Distinct" group_builder-> data = Data.setup create_connection_fn table_fn empty_table_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should correctly count missing values" <| get_value t = columns = materialize t . columns @@ -973,7 +998,7 @@ add_specs suite_builder setup = ## TODO currently our builder does not allow all-null tables, so we create one with a 0 and remove it by filter. See #6159. - t0 = table_builder [["A", [0]]] connection=data.connection + t0 = table_builder [["A", [0]]] t1 = t0.filter "A" (Filter_Condition.Is_Nothing) t1.row_count . should_equal 0 t1.at "A" . to_vector . should_equal [] @@ -983,7 +1008,7 @@ add_specs suite_builder setup = ## TODO currently our builder does not allow all-null tables, so we create one with a 0 and remove it by filter. See #6159. - t0_2 = table_builder [["A", [0, Nothing, Nothing]]] connection=data.connection + t0_2 = table_builder [["A", [0, Nothing, Nothing]]] t2 = t0_2.filter "A" (Filter_Condition.Is_Nothing) t2.row_count . should_equal 2 t2.at "A" . to_vector . should_equal [Nothing, Nothing] @@ -991,15 +1016,15 @@ add_specs suite_builder setup = get_value (t2.aggregate [Count_Distinct "A" (ignore_nothing=True)]) . should_equal 0 get_value (t2.aggregate [Count_Distinct "A" (ignore_nothing=False)]) . should_equal 1 - t3 = table_builder [["A", [1, 2]]] connection=data.connection + t3 = table_builder [["A", [1, 2]]] get_value (t3.aggregate [Count_Distinct "A" (ignore_nothing=True)]) . should_equal 2 get_value (t3.aggregate [Count_Distinct "A" (ignore_nothing=False)]) . should_equal 2 - t4 = table_builder [["A", [1, 2, Nothing, Nothing]]] connection=data.connection + t4 = table_builder [["A", [1, 2, Nothing, Nothing]]] get_value (t4.aggregate [Count_Distinct "A" (ignore_nothing=True)]) . should_equal 2 get_value (t4.aggregate [Count_Distinct "A" (ignore_nothing=False)]) . should_equal 3 - t5 = table_builder [["G", ["foo", "foo", "bar", "foo"]], ["A", [Nothing, 0, Nothing, Nothing]]] connection=data.connection + t5 = table_builder [["G", ["foo", "foo", "bar", "foo"]], ["A", [Nothing, 0, Nothing, Nothing]]] r1 = t5.aggregate [Group_By "G", Count_Distinct "A" (ignore_nothing=True)] r1.row_count . should_equal 2 @@ -1016,7 +1041,7 @@ add_specs suite_builder setup = m2.columns.second.to_vector . should_equal [1, 2] group_builder.specify "should correctly count all-null keys in multi-column mode" (pending = resolve_pending test_selection.multi_distinct) <| - table = table_builder [["A", ["foo", "foo", Nothing, Nothing, Nothing]], ["B", ["baz", Nothing, Nothing, Nothing, "baz"]], ["C", [1, 2, 3, Nothing, 5]]] connection=data.connection + table = table_builder [["A", ["foo", "foo", Nothing, Nothing, Nothing]], ["B", ["baz", Nothing, Nothing, Nothing, "baz"]], ["C", [1, 2, 3, Nothing, 5]]] r2 = table.aggregate [Count_Distinct ["A", "B"] (ignore_nothing=False)] r2.row_count.should_equal 1 @@ -1033,6 +1058,14 @@ add_specs suite_builder setup = m1.columns.first.to_vector . should_equal [3] suite_builder.group prefix+"Table.aggregate Standard_Deviation" pending=(resolve_pending test_selection.std_dev) group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should correctly handle single elements" <| r1 = table_builder [["X", [1]]] . aggregate [Standard_Deviation "X" (population=False), Standard_Deviation "X" (population=True)] r1.row_count.should_equal 1 @@ -1042,6 +1075,14 @@ add_specs suite_builder setup = m1.columns.second.at 0 . should_equal 0 suite_builder.group prefix+"Table.aggregate should correctly select result types" group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "widening to decimals on Average" <| table = table_builder [["G", ["a", "a", "b", "b"]], ["X", [0, 1, 1, Nothing]]] r1 = table.aggregate [Average "X"] @@ -1093,18 +1134,25 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.aggregate should correctly handle infinities" group_builder-> data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + pos_inf = 1/0 neg_inf = -1/0 group_builder.specify "on Average" <| - t1 = table_builder [["X", [Nothing, pos_inf, pos_inf, 0]]] connection=data.connection + t1 = table_builder [["X", [Nothing, pos_inf, pos_inf, 0]]] r1 = t1.aggregate [Average "X"] r1.row_count.should_equal 1 m1 = materialize r1 m1.column_count . should_equal 1 m1.columns.first.at 0 . should_equal pos_inf - t2 = table_builder [["X", [Nothing, pos_inf, neg_inf, 0]]] connection=data.connection + t2 = table_builder [["X", [Nothing, pos_inf, neg_inf, 0]]] r2 = t2.aggregate [Average "X"] r2.row_count.should_equal 1 m2 = materialize r2 @@ -1112,28 +1160,28 @@ add_specs suite_builder setup = expect_null_or_nan <| m2.columns.first.at 0 group_builder.specify "on Median" (pending = resolve_pending test_selection.advanced_stats) <| - t1 = table_builder [["X", [Nothing, neg_inf, pos_inf, 0, pos_inf, pos_inf]]] connection=data.connection + t1 = table_builder [["X", [Nothing, neg_inf, pos_inf, 0, pos_inf, pos_inf]]] r1 = t1.aggregate [Median "X"] r1.row_count.should_equal 1 m1 = materialize r1 m1.column_count . should_equal 1 m1.columns.first.at 0 . should_equal pos_inf - t2 = table_builder [["X", [pos_inf, pos_inf, neg_inf, neg_inf]]] connection=data.connection + t2 = table_builder [["X", [pos_inf, pos_inf, neg_inf, neg_inf]]] r2 = t2.aggregate [Median "X"] r2.row_count.should_equal 1 m2 = materialize r2 m2.column_count . should_equal 1 expect_null_or_nan <| m2.columns.first.at 0 - t3 = table_builder [["X", [pos_inf, pos_inf, Nothing, 0, 10, 20, neg_inf, neg_inf]]] connection=data.connection + t3 = table_builder [["X", [pos_inf, pos_inf, Nothing, 0, 10, 20, neg_inf, neg_inf]]] r3 = t3.aggregate [Median "X"] r3.row_count.should_equal 1 m3 = materialize r3 m3.column_count . should_equal 1 m3.columns.first.at 0 . should_equal 10 - t4 = table_builder [["X", [Nothing, pos_inf, pos_inf, 10, 12]]] connection=data.connection + t4 = table_builder [["X", [Nothing, pos_inf, pos_inf, 10, 12]]] r4 = t4.aggregate [Median "X"] r4.row_count.should_equal 1 m4 = materialize r4 @@ -1141,21 +1189,21 @@ add_specs suite_builder setup = m4.columns.first.at 0 . should_equal pos_inf group_builder.specify "on Percentile" (pending = resolve_pending test_selection.advanced_stats) <| - t1 = table_builder [["X", [Nothing, neg_inf, 2, 3, 4, pos_inf]]] connection=data.connection + t1 = table_builder [["X", [Nothing, neg_inf, 2, 3, 4, pos_inf]]] r1 = t1.aggregate [Percentile 0.3 "X"] r1.row_count.should_equal 1 m1 = materialize r1 m1.column_count . should_equal 1 m1.columns.first.at 0 . should_equal 2.2 - t2 = table_builder [["X", [Nothing, neg_inf, neg_inf, 3, 4, pos_inf]]] connection=data.connection + t2 = table_builder [["X", [Nothing, neg_inf, neg_inf, 3, 4, pos_inf]]] r2 = t2.aggregate [Percentile 0.25 "X"] r2.row_count.should_equal 1 m2 = materialize r2 m2.column_count . should_equal 1 m2.columns.first.at 0 . should_equal neg_inf - t3 = table_builder [["X", [Nothing, neg_inf, neg_inf, pos_inf, pos_inf, pos_inf]]] connection=data.connection + t3 = table_builder [["X", [Nothing, neg_inf, neg_inf, pos_inf, pos_inf, pos_inf]]] r3 = t3.aggregate [Percentile 0.3 "X"] r3.row_count.should_equal 1 m3 = materialize r3 @@ -1163,7 +1211,7 @@ add_specs suite_builder setup = expect_null_or_nan <| m3.columns.first.at 0 group_builder.specify "on Standard_Deviation" (pending = resolve_pending test_selection.std_dev) <| - t1 = table_builder [["X", [neg_inf, 1]]] connection=data.connection + t1 = table_builder [["X", [neg_inf, 1]]] r1 = t1.aggregate [Standard_Deviation "X" (population=True), Standard_Deviation "X" (population=False)] r1.row_count.should_equal 1 m1 = materialize r1 @@ -1172,6 +1220,14 @@ add_specs suite_builder setup = expect_null_or_nan <| m1.columns.second.at 0 suite_builder.group prefix+"Table.aggregate should correctly handle NaN" pending=(resolve_pending test_selection.nan) group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + nan = 0.log 0 group_builder.specify "on Average" <| t1 = table_builder [["X", [Nothing, nan, 0, 1, 2]]] @@ -1215,6 +1271,14 @@ add_specs suite_builder setup = Double.isNaN (m1.columns.second.at 0) . should_be_true suite_builder.group prefix+"Table.aggregate Mode" (pending = resolve_pending test_selection.advanced_stats) group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should ignore missing values" <| t1 = table_builder [["X", [Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 2, 2, 1]]] r1 = t1.aggregate [Mode "X"] @@ -1224,6 +1288,14 @@ add_specs suite_builder setup = m1.columns.first.at 0 . should_equal 2 suite_builder.group prefix+"Table.aggregate First and Last" group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should not return the same value for groups with different values but equal ordering keys" (pending = resolve_pending test_selection.first_last) <| t1 = table_builder [["G", ["a", "a"]], ["X", [1, 2]]] order = [Sort_Column.Name "G"] @@ -1236,6 +1308,14 @@ add_specs suite_builder setup = (first != last).should_be_true suite_builder.group prefix+"Table.aggregate" group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should work even if no aggregations apart from groupings are specified" <| table = table_builder [["A", [1, 1, 2, 1]], ["B", [3, 2, 2, 3]], ["C", [11, 12, 13, 14]]] grouped = table.aggregate [Group_By "B", Group_By "A"] @@ -1312,6 +1392,12 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.aggregate+Expressions" group_builder-> data = Data.setup create_connection_fn table_fn empty_table_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + ## TODO we probably should check all kinds of aggregate columns to verify that all of them correctly support expressions. group_builder.specify "should allow expressions in aggregates" <| @@ -1345,6 +1431,14 @@ add_specs suite_builder setup = err4.expression_error.should_equal (No_Such_Column.Error "MISSING") suite_builder.group prefix+"Table.aggregate should raise warnings when there are issues" group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + table = col1 = ["Index", [1, 2, 3]] col2 = ["Value", [1, 2, 3]] @@ -1433,6 +1527,14 @@ add_specs suite_builder setup = Problems.test_problem_handling action problems tester suite_builder.group prefix+"Table.aggregate should report warnings and errors based on types" group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should warn if grouping on a floating point" <| t = table_builder [["X", [1.1, 2.2, 3.3, 2.2]]] action = t.aggregate [Group_By "X"] on_problems=_ @@ -1486,6 +1588,14 @@ add_specs suite_builder setup = t2.at "Concatenate Text" . value_type . is_text . should_be_true suite_builder.group prefix+"Table.aggregate should raise warnings when there are issues computing aggregation" pending=(resolve_pending test_selection.aggregation_problems) group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + table = col1 = ["Index", [1, 2, 3]] col2 = ["Value", [1, 2, 3.1]] @@ -1517,6 +1627,14 @@ add_specs suite_builder setup = Problems.test_problem_handling action problems tester suite_builder.group prefix+"Table.aggregate should merge warnings when issues computing aggregation" pending=(resolve_pending test_selection.aggregation_problems) group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should merge Invalid Aggregation warnings" <| table = table_builder [["X", (0.up_to 16).map (_-> ",")]] new_table = table.aggregate [Concatenate "X" separator=","] @@ -1540,6 +1658,14 @@ add_specs suite_builder setup = if is_database then suite_builder.group prefix+"Table.aggregate should report unsupported operations but not block other aggregations in warning mode" group_builder-> + data = Data.setup create_connection_fn table_fn empty_table_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + expect_sum_and_unsupported_errors error_count result = result.column_count . should_equal 1 result.row_count . should_equal 1 From bd676f928b408f8420835bea47bab42faf3a51fe Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 12:01:27 +0100 Subject: [PATCH 66/93] Add more teardowns. We need to close the connections --- .../Add_Row_Number_Spec.enso | 42 ++++++++- .../Aggregate_Spec.enso | 3 + .../Column_Name_Edge_Cases_Spec.enso | 28 +++--- .../Conversion_Spec.enso | 86 ++++++++++++++++++- .../Common_Table_Operations/Core_Spec.enso | 73 ++++++++++++---- .../Cross_Tab_Spec.enso | 71 +++++++++------ .../Date_Time_Spec.enso | 74 ++++++++++------ .../Derived_Columns_Spec.enso | 64 ++++++++------ .../Distinct_Spec.enso | 20 ++++- .../Common_Table_Operations/Filter_Spec.enso | 80 ++++++++++------- .../Integration_Tests.enso | 36 +++++--- .../src/Common_Table_Operations/Map_Spec.enso | 29 ++++++- .../Missing_Values_Spec.enso | 24 ++++-- .../Order_By_Spec.enso | 23 +++-- .../Select_Columns_Spec.enso | 60 ++++++++++--- .../Take_Drop_Spec.enso | 20 ++--- .../Temp_Column_Spec.enso | 22 ++++- .../Transpose_Spec.enso | 20 ++++- .../src/Database/Codegen_Spec.enso | 4 +- .../Common/Default_Ordering_Spec.enso | 19 +++- .../Common/Names_Length_Limits_Spec.enso | 10 ++- .../src/Database/Transaction_Spec.enso | 6 ++ .../Types/Postgres_Type_Mapping_Spec.enso | 15 +++- .../Types/SQLite_Type_Mapping_Spec.enso | 6 ++ .../Table_Tests/src/Database/Upload_Spec.enso | 30 +++++++ 25 files changed, 660 insertions(+), 205 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso index 1879254490e8..c845ff0074ba 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso @@ -16,13 +16,32 @@ from project.Common_Table_Operations.Util import run_default_backend polyglot java import java.lang.Long as Java_Long +main = run_default_backend add_specs + +type Data + Value ~connection + + setup create_connection_fn = Data.Value <| + create_connection_fn Nothing + + teardown self = + self.connection.close + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder materialize = setup.materialize + create_connection_fn = setup.create_connection_func suite_builder.group prefix+"Table.add_row_number (common)" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should rename existing column upon a name clash" <| t1 = table_builder [["X", ['a', 'b']], ["Y", ['c', 'd']], ["Z", [40, 20]]] t2 = t1.add_row_number name="Y" order_by=["X"] |> materialize |> _.order_by "X" @@ -107,6 +126,14 @@ add_specs suite_builder setup = t2.at "Row" . to_vector . should_equal [1, 2, 3] if setup.is_database.not then suite_builder.group prefix+"Table.add_row_number (in-memory specific)" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should add a row numbering column" <| t = table_builder [["X", ['a', 'b', 'a', 'a', 'c']]] t1 = t.add_row_number @@ -195,10 +222,17 @@ add_specs suite_builder setup = t1.add_row_number from=(max_long + 10) group_by=["Y"] order_by=["Z"] . should_fail_with Illegal_Argument if setup.is_database then suite_builder.group prefix+"Table.add_row_number (Database specific)" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "will use the primary key by default" <| - connection = setup.create_connection_func Nothing - src = table_builder [["X", [500, 400, 30, 1, 2]], ["Y", [10, 20, 30, 40, 50]]] connection=connection - db_table = src.select_into_database_table connection (Name_Generator.random_name "add-row-number-test-1") temporary=True primary_key=["X"] + src = table_builder [["X", [500, 400, 30, 1, 2]], ["Y", [10, 20, 30, 40, 50]]] + db_table = src.select_into_database_table data.connection (Name_Generator.random_name "add-row-number-test-1") temporary=True primary_key=["X"] t2 = db_table.add_row_number |> materialize |> _.order_by ["Y"] t2.at "Y" . to_vector . should_equal [10, 20, 30, 40, 50] diff --git a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso index c29abbe87450..f84fb7f230f3 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso @@ -61,6 +61,9 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.aggregate should summarize whole table" group_builder-> data = Data.setup create_connection_fn table_fn empty_table_fn + group_builder.teardown <| + data.teardown + group_builder.specify "should be able to count" <| grouped = data.table.aggregate [Count] materialized = materialize grouped diff --git a/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso index 93e01998d0ce..fb50ffd59285 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Column_Name_Edge_Cases_Spec.enso @@ -18,17 +18,25 @@ type Data setup create_connection_fn = Data.Value (create_connection_fn Nothing) + teardown self = + self.connection.close + add_specs suite_builder setup = - table_builder = setup.table_builder materialize = setup.materialize create_connection_fn = setup.create_connection_func is_case_sensitive = setup.test_selection.supports_case_sensitive_columns suite_builder.group setup.prefix+"Column Naming edge cases" group_builder-> data = Data.setup create_connection_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "case insensitive name collisions - set" <| - t1 = table_builder [["X", [1]]] connection=data.connection + t1 = table_builder [["X", [1]]] Problems.assume_no_problems (t1.at "X" . rename "x") t2 = t1.set "[X] + 100" "x" case is_case_sensitive of @@ -60,7 +68,7 @@ add_specs suite_builder setup = t6.at "Right x" . to_vector . should_equal [101, 101, 101, 101] group_builder.specify "case insensitive name collisions - rename" <| - t1 = table_builder [["X", [1]], ["Y", [2]]] connection=data.connection + t1 = table_builder [["X", [1]], ["Y", [2]]] t2 = t1.rename_columns [["X", "A"], ["Y", "a"]] case is_case_sensitive of True -> @@ -80,7 +88,7 @@ add_specs suite_builder setup = Problems.expect_only_warning Duplicate_Output_Column_Names t3 group_builder.specify "case insensitive name collisions - aggregate" <| - t1 = table_builder [["X", [2, 1, 3, 2]]] connection=data.connection + t1 = table_builder [["X", [2, 1, 3, 2]]] t2 = t1.aggregate [Aggregate_Column.Maximum "X" "A", Aggregate_Column.Minimum "X" "a"] case is_case_sensitive of @@ -100,8 +108,8 @@ add_specs suite_builder setup = t3.at 1 . to_vector . should_equal [1] group_builder.specify "case insensitive name collisions - joins" <| - t1 = table_builder [["X", [1, 2]], ["a", [3, 4]]] connection=data.connection - t2 = table_builder [["X", [2, 1]], ["A", [5, 6]]] connection=data.connection + t1 = table_builder [["X", [1, 2]], ["a", [3, 4]]] + t2 = table_builder [["X", [2, 1]], ["A", [5, 6]]] t3 = t1.join t2 on="X" join_kind=Join_Kind.Inner case is_case_sensitive of @@ -127,7 +135,7 @@ add_specs suite_builder setup = t5.column_names . should_equal ["X", "a"] group_builder.specify "case insensitive name collisions - cross_tab" <| - t0 = table_builder [["X", ["a", "A", "b"]], ["Y", [4, 5, 6]]] connection=data.connection + t0 = table_builder [["X", ["a", "A", "b"]], ["Y", [4, 5, 6]]] t1 = t0.cross_tab group_by=[] name_column="X" values=[Aggregate_Column.First "Y"] . sort_columns case setup.is_database of # TODO remove this check once implemented @@ -145,7 +153,7 @@ add_specs suite_builder setup = t1.should_fail_with Clashing_Column_Name group_builder.specify "case insensitive name collisions - transpose" <| - t0 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] connection=data.connection + t0 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t1 = t0.transpose attribute_column_name="a" value_column_name="A" case setup.is_database of # TODO remove this check once implemented @@ -163,8 +171,8 @@ add_specs suite_builder setup = group_builder.specify "unicode-normalized-equality vs selecting columns" <| ## In Enso column 'ś' and 's\u0301' are the same entity. But in Databases, quite not necessarily. - t1 = table_builder [['ś', [1, 2]], ['X', ['a', 'b']]] connection=data.connection - t2 = table_builder [['s\u0301', [2, 1]], ['Y', ['x', 'y']]] connection=data.connection + t1 = table_builder [['ś', [1, 2]], ['X', ['a', 'b']]] + t2 = table_builder [['s\u0301', [2, 1]], ['Y', ['x', 'y']]] # The two representations of the same string just address the same column: t1.at 'ś' . to_vector . should_equal [1, 2] diff --git a/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso index eb9c937ce41b..dfa1addb184d 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso @@ -13,6 +13,7 @@ from project.Common_Table_Operations.Util import run_default_backend polyglot java import java.lang.Long as Java_Long +main = run_default_backend add_specs type My_Type Value x @@ -20,13 +21,32 @@ type My_Type to_text : Text to_text self = "{{{MY Type [x="+self.x.to_text+"] }}}" + +type Data + Value ~connection + + setup create_connection_fn = Data.Value <| + create_connection_fn Nothing + + teardown self = + self.connection.close + + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder materialize = setup.materialize supports_dates = setup.test_selection.date_time + create_connection_fn = setup.create_connection_func supports_conversion_failure_reporting = setup.is_database.not suite_builder.group prefix+"Table/Column.cast - to text" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow to cast columns of various basic types to text" <| t = table_builder [["X", [1, 2, 3000]], ["Y", [True, False, True]], ["Z", [1.5, 0.125, -2.5]], ["W", ["a", "DEF", "a slightly longer text"]]] t2 = t.cast t.column_names Value_Type.Char @@ -123,6 +143,14 @@ add_specs suite_builder setup = r2.should_fail_with Illegal_Argument suite_builder.group prefix+"Table/Column.cast - numeric" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow to cast a boolean column to integer" <| t = table_builder [["X", [True, False, True]]] c = t.at "X" . cast Value_Type.Integer @@ -232,6 +260,14 @@ add_specs suite_builder setup = if supports_dates then suite_builder.group prefix+"Table/Column.cast - date/time" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow to get the Date part from a Date_Time" <| t = table_builder [["X", [Date_Time.new 2015 1 2 3 4 5, Date_Time.new 2023 12 31 23 56 59]]] c = t.at "X" . cast Value_Type.Date @@ -257,6 +293,14 @@ add_specs suite_builder setup = diff . should_equal expected_diff suite_builder.group prefix+"Table/Column.cast - checking invariants" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should report an error for unsupported conversions" <| t = table_builder [["X", [1, 2, 3]]] r1 = t.at "X" . cast Value_Type.Boolean @@ -469,6 +513,14 @@ add_specs suite_builder setup = r3.should_fail_with Illegal_Argument suite_builder.group prefix+"Simple variant of Table/Column.parse in all backends" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should be able to parse simple integers" <| t = table_builder [["X", ["42", "0", "-1"]]] @@ -540,6 +592,14 @@ add_specs suite_builder setup = r3.catch.criteria . should_equal ["Y"] if setup.is_database then suite_builder.group prefix+"Table/Column auto value type" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should report unsupported" <| t = table_builder [["X", [1, 2, 3]]] t.auto_value_types . should_fail_with Unsupported_Database_Operation @@ -547,17 +607,41 @@ add_specs suite_builder setup = # The in-memory functionality of `expand_column` is tested in test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso if setup.is_database then suite_builder.group prefix+"Table.expand_column" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should report unsupported" <| table = table_builder [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]] table.expand_column "bbb" . should_fail_with Unsupported_Database_Operation # The in-memory functionality of `expand_to_rows` is tested in test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso if setup.is_database then suite_builder.group prefix+"Table.expand_to_rows" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should report unsupported" <| table = table_builder [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]] table.expand_to_rows "bbb" . should_fail_with Unsupported_Database_Operation if setup.is_database.not then suite_builder.group prefix+"Table/Column auto value type" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow to narrow down types of a Mixed column" <| [True, False].each shrink_types-> mixer = My_Type.Value 1 diff --git a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso index 3ef01fa124b2..f935a453790a 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso @@ -17,9 +17,12 @@ from project.Common_Table_Operations.Util import run_default_backend main = run_default_backend add_specs type Data - Value ~table + Value ~data + + connection self = self.data.at 0 + table self = self.data.at 1 - setup create_connection_fn table_builder = + setup create_connection_fn table_builder = Data.Value <| connection = create_connection_fn Nothing table = col1 = ["foo", [1,2,3]] @@ -30,15 +33,26 @@ type Data col6 = ["ab.+123", [16,17,18]] col7 = ["abcd123", [19,20,21]] table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection - Data.Value table + [connection, table] + + teardown self = + self.connection.close + type Rows_Data - Value ~table + Value ~data - setup create_connection_fn table_builder = + connection self = self.data.at 0 + table self = self.data.at 1 + + setup create_connection_fn table_builder = Rows_Data.Value <| connection = create_connection_fn Nothing table = table_builder [["X", [1, 2, 3, 4]], ["Y", [5, 6, 7, 8]], ["Z", ["A", "B", "C", "D"]]] connection=connection - Rows_Data.Value table + [connection, table] + + teardown self = + self.connection.close + type Read_Data Value ~data @@ -53,15 +67,20 @@ type Read_Data t_small = table_builder [["X", (0.up_to 10)]] connection=connection [connection, t_big, t_small] + teardown self = + self.connection.close + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder create_connection_fn = setup.create_connection_func suite_builder.group prefix+"Table.at" group_builder-> - data = Data.setup create_connection_fn table_builder - + data = Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + group_builder.specify "should allow selecting columns by name" <| column_1 = data.table.at "bar" column_1.name . should_equal "bar" @@ -101,7 +120,10 @@ add_specs suite_builder setup = data.table.at (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair." suite_builder.group prefix+"Table.get" group_builder-> - data = Data.setup create_connection_fn table_builder + data = Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown group_builder.specify "should allow selecting columns by name" <| column_1 = data.table.get "bar" @@ -132,7 +154,13 @@ add_specs suite_builder setup = data.table.get (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair." suite_builder.group prefix+"Table.set" group_builder-> - data = Data.setup create_connection_fn table_builder + data = Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection group_builder.specify "should allow adding a column" <| bar2 = data.table.get "bar" . rename "bar2" @@ -212,7 +240,13 @@ add_specs suite_builder setup = False -> r1.should_fail_with Illegal_Argument suite_builder.group prefix+"Table.column_names" group_builder-> - data = Data.setup create_connection_fn table_builder + data = Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection group_builder.specify "should return the names of all columns" <| data.table.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] @@ -227,13 +261,19 @@ add_specs suite_builder setup = table.at name . to_vector . should_equal [100+ix, 2, 3] suite_builder.group prefix+"Table.column_count" group_builder-> - data = Data.setup create_connection_fn table_builder + data = Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown group_builder.specify "should allow getting the column count" <| data.table.column_count . should_equal 7 suite_builder.group prefix+"Table.rows" group_builder-> - data = Rows_Data.setup create_connection_fn table_builder + data = Rows_Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown group_builder.specify "should allow to get a Vector of Table rows" <| rows = data.table.rows @@ -303,7 +343,10 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.read" group_builder-> - data = Read_Data.setup create_connection_fn table_builder + data = Read_Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown has_default_row_limit = setup.is_database diff --git a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso index 84304226cd90..46fc62b929b4 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso @@ -12,17 +12,40 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend +main = run_default_backend add_specs + +type Data + Value ~data + + connection self = self.data.at 0 + table self = self.data.at 1 + table2 self = self.data.at 2 + + setup create_connection_fn table_builder = Data.Value <| + connection = create_connection_fn Nothing + table = table_builder [["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] connection=connection + table2 = table_builder [["Group", ["A","B","A","B","A","B","A","B","A"]], ["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] connection=connection + [connection, table, table2] + + teardown self = + self.connection.close + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func db_todo = if setup.is_database.not then Nothing else "Table.cross_tab is not implemented yet in Database." suite_builder.group prefix+"Table.cross_tab" pending=db_todo group_builder-> - table = table_builder [["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] - table2 = table_builder [["Group", ["A","B","A","B","A","B","A","B","A"]], ["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] + data = Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection group_builder.specify "should cross_tab counts by default using first column as names" <| - t1 = table.cross_tab [] "Key" + t1 = data.table.cross_tab [] "Key" t1.column_names . should_equal ["x", "y", "z"] t1.row_count . should_equal 1 t1.at "x" . to_vector . should_equal [4] @@ -30,7 +53,7 @@ add_specs suite_builder setup = t1.at "z" . to_vector . should_equal [2] group_builder.specify "should allow a different aggregate" <| - t1 = table.cross_tab [] "Key" values=[Sum "Value"] + t1 = data.table.cross_tab [] "Key" values=[Sum "Value"] t1.column_names . should_equal ["x", "y", "z"] t1.row_count . should_equal 1 t1.at "x" . to_vector . should_equal [10] @@ -38,7 +61,7 @@ add_specs suite_builder setup = t1.at "z" . to_vector . should_equal [17] group_builder.specify "should allow a custom expression for the aggregate" <| - t1 = table.cross_tab [] "Key" values=[Sum "[Value]*[Value]"] + t1 = data.table.cross_tab [] "Key" values=[Sum "[Value]*[Value]"] t1.column_names . should_equal ["x", "y", "z"] t1.row_count . should_equal 1 t1.at "x" . to_vector . should_equal [30] @@ -62,7 +85,7 @@ add_specs suite_builder setup = t2.at "z" . to_vector . should_equal [2] group_builder.specify "should allow a grouping" <| - t1 = table2.cross_tab ["Group"] "Key" + t1 = data.table2.cross_tab ["Group"] "Key" t1.column_names . should_equal ["Group", "x", "y", "z"] t1.row_count . should_equal 2 t1.at "Group" . to_vector . should_equal ["A", "B"] @@ -71,7 +94,7 @@ add_specs suite_builder setup = t1.at "z" . to_vector . should_equal [1, 1] group_builder.specify "should allow a grouping by Aggregate_Column" <| - t1 = table2.cross_tab [Group_By "Group"] "Key" + t1 = data.table2.cross_tab [Group_By "Group"] "Key" t1.column_names . should_equal ["Group", "x", "y", "z"] t1.row_count . should_equal 2 t1.at "Group" . to_vector . should_equal ["A", "B"] @@ -79,7 +102,7 @@ add_specs suite_builder setup = t1.at "y" . to_vector . should_equal [2, 1] t1.at "z" . to_vector . should_equal [1, 1] - table2.cross_tab [Sum "Group"] "Key" . should_fail_with Illegal_Argument + data.table2.cross_tab [Sum "Group"] "Key" . should_fail_with Illegal_Argument group_builder.specify "should allow a grouping by Aggregate_Colum, with some empty bins" <| table3 = table_builder [["Group", ["B","A","B","A","A"]], ["Key", ["x", "y", "y", "y", "z"]], ["Value", [4, 5, 6, 7, 8]]] @@ -92,7 +115,7 @@ add_specs suite_builder setup = t1.at "z" . to_vector . should_equal [1, 0] group_builder.specify "should allow a grouping by text" <| - t1 = table2.cross_tab "Group" "Key" + t1 = data.table2.cross_tab "Group" "Key" t1.column_names . should_equal ["Group", "x", "y", "z"] t1.row_count . should_equal 2 t1.at "Group" . to_vector . should_equal ["A", "B"] @@ -100,11 +123,11 @@ add_specs suite_builder setup = t1.at "y" . to_vector . should_equal [2, 1] t1.at "z" . to_vector . should_equal [1, 1] - t2 = table2.cross_tab ["Group", "Group"] "Key" + t2 = data.table2.cross_tab ["Group", "Group"] "Key" t2.column_names . should_equal ["Group", "x", "y", "z"] group_builder.specify "should allow multiple values aggregates" <| - t1 = table.cross_tab [] "Key" values=[Count, Sum "Value"] + t1 = data.table.cross_tab [] "Key" values=[Count, Sum "Value"] t1.column_names . should_equal ["x Count", "x Sum", "y Count", "y Sum", "z Count", "z Sum"] t1.row_count . should_equal 1 t1.at "x Count" . to_vector . should_equal [4] @@ -115,55 +138,55 @@ add_specs suite_builder setup = t1.at "z Sum" . to_vector . should_equal [17] group_builder.specify "should fail if name_column is not found" <| - err1 = table.cross_tab [] "Name" + err1 = data.table.cross_tab [] "Name" err1.should_fail_with Missing_Input_Columns err1.catch.criteria . should_equal ["Name"] - err2 = table.cross_tab [] 42 + err2 = data.table.cross_tab [] 42 err2.should_fail_with Missing_Input_Columns err2.catch.criteria . should_equal [42] group_builder.specify "should fail if group-by contains missing columns" <| - err1 = table2.cross_tab ["Group", "Nonexistent Group", "OTHER"] "Key" + err1 = data.table2.cross_tab ["Group", "Nonexistent Group", "OTHER"] "Key" err1.should_fail_with Missing_Input_Columns err1.catch.criteria . should_equal ["Nonexistent Group", "OTHER"] - err2 = table2.cross_tab [0, 42] "Key" + err2 = data.table2.cross_tab [0, 42] "Key" err2.should_fail_with Missing_Input_Columns err2.catch.criteria . should_equal [42] group_builder.specify "should fail if aggregate values contain missing columns" <| - err1 = table.cross_tab [] "Key" values=[Count, Sum "Nonexistent Value", Sum "Value", Sum "OTHER"] + err1 = data.table.cross_tab [] "Key" values=[Count, Sum "Nonexistent Value", Sum "Value", Sum "OTHER"] err1.should_fail_with Invalid_Aggregate_Column err1.catch.name . should_equal "Nonexistent Value" - err2 = table.cross_tab [] "Key" values=[Count, Sum "Nonexistent Value", Sum "Value", Sum 42] + err2 = data.table.cross_tab [] "Key" values=[Count, Sum "Nonexistent Value", Sum "Value", Sum 42] err2.should_fail_with Missing_Input_Columns err2.catch.criteria . should_equal [42] group_builder.specify "should fail if aggregate values contain invalid expressions" <| - err1 = table.cross_tab [] "Key" values=[Sum "[MISSING]*10"] + err1 = data.table.cross_tab [] "Key" values=[Sum "[MISSING]*10"] err1.should_fail_with Invalid_Aggregate_Column err1.catch.name . should_equal "[MISSING]*10" err1.catch.expression_error . should_equal (No_Such_Column.Error "MISSING") - err2 = table.cross_tab [] "Key" values=[Sum "[[["] + err2 = data.table.cross_tab [] "Key" values=[Sum "[[["] err2.should_fail_with Invalid_Aggregate_Column err2.catch.name . should_equal "[[[" err2.catch.expression_error . should_be_a Expression_Error.Syntax_Error group_builder.specify "should not allow Group_By for values" <| - err1 = table.cross_tab [] "Key" values=[Count, Group_By "Value"] on_problems=Problem_Behavior.Ignore + err1 = data.table.cross_tab [] "Key" values=[Count, Group_By "Value"] on_problems=Problem_Behavior.Ignore err1.should_fail_with Illegal_Argument group_builder.specify "should gracefully handle duplicate aggregate names" <| - action = table.cross_tab [] "Key" values=[Count new_name="Agg1", Sum "Value" new_name="Agg1"] on_problems=_ + action = data.table.cross_tab [] "Key" values=[Count new_name="Agg1", Sum "Value" new_name="Agg1"] on_problems=_ tester table = table.column_names . should_equal ["x Agg1", "x Agg1 1", "y Agg1", "y Agg1 1", "z Agg1", "z Agg1 1"] problems = [Duplicate_Output_Column_Names.Error ["x Agg1", "y Agg1", "z Agg1"]] Problems.test_problem_handling action problems tester - table3 = table2.rename_columns (Map.from_vector [["Group", "x"]]) + table3 = data.table2.rename_columns (Map.from_vector [["Group", "x"]]) action3 = table3.cross_tab ["x"] "Key" on_problems=_ tester3 table = table.column_names . should_equal ["x", "x 1", "y", "z"] @@ -240,7 +263,7 @@ add_specs suite_builder setup = r1.should_fail_with Invalid_Column_Names r1.catch.to_display_text . should_contain "cannot contain the NUL character" - r2 = table2.cross_tab [] "Key" values=[Average "Value" new_name='x\0'] + r2 = data.table2.cross_tab [] "Key" values=[Average "Value" new_name='x\0'] r2.print r2.should_fail_with Invalid_Column_Names r2.catch.to_display_text . should_contain "cannot contain the NUL character" diff --git a/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso index 3f1b3945c22f..452bed13ffb1 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso @@ -27,15 +27,25 @@ type Data datetimes = table_builder [["A", [Date_Time.new 2020 12 31 23 59 59 millisecond=567 nanosecond=123, Date_Time.new 2024 2 29 2 30 44 nanosecond=1002000, Date_Time.new 1990 1 1 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]] connection=connection [connection, dates, times, datetimes] + teardown self = + self.connection.close + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder create_connection_fn = setup.create_connection_func pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend." suite_builder.group prefix+"Date-Time support" pending=pending_datetime group_builder-> + data = Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow to create Table with Date columns and round-trip them back to Enso" <| d = Date.new 2020 10 24 table = table_builder [["A", [d]], ["X", [123]]] @@ -66,7 +76,13 @@ add_specs suite_builder setup = table.at "X" . to_vector . should_equal xs suite_builder.group prefix+"Date-Time operations" pending=pending_datetime group_builder-> - data = Data.setup create_connection_fn table_builder + data = Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection group_builder.specify "should allow to get the year/month/day of a Date" <| t = data.dates @@ -98,7 +114,7 @@ add_specs suite_builder setup = ((a.day) == (t.at "X")).to_vector . should_equal [False, True, True, Nothing] group_builder.specify "should allow to evaluate expressions with year/month/day" <| - t = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [0, 2, 1, 100]], ["B", [Date_Time.new 2020 10 31 23 59 59, Date_Time.new 2024 4 29 2 30 44, Date_Time.new 1990 10 1 0 0 0, Nothing]]] connection=data.connection + t = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [0, 2, 1, 100]], ["B", [Date_Time.new 2020 10 31 23 59 59, Date_Time.new 2024 4 29 2 30 44, Date_Time.new 1990 10 1 0 0 0, Nothing]]] c = t.evaluate_expression "year([A]) + [X] + day([A]) * month([B])" Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <| c.value_type.is_integer.should_be_true @@ -174,7 +190,7 @@ add_specs suite_builder setup = group_builder.specify "should allow to compare dates" <| - t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date.new 2021 12 5]]] connection=data.connection + t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date.new 2021 12 5]]] [(<), (<=), (>), (>=), (==), (!=)].each op-> op (t.at "X") (t.at "Y") . value_type . should_equal Value_Type.Boolean @@ -182,7 +198,7 @@ add_specs suite_builder setup = op (t.at "X") (Date.new 2021 12 4) . to_vector . should_succeed group_builder.specify "should allow to compare date-times" <| - t = table_builder [["X", [Date_Time.new 2021 12 3 12 30 0]], ["Y", [Date_Time.new 2021 12 5 12 30 0]]] connection=data.connection + t = table_builder [["X", [Date_Time.new 2021 12 3 12 30 0]], ["Y", [Date_Time.new 2021 12 5 12 30 0]]] [(<), (<=), (>), (>=), (==), (!=)].each op-> op (t.at "X") (t.at "Y") . value_type . should_equal Value_Type.Boolean @@ -190,7 +206,7 @@ add_specs suite_builder setup = op (t.at "X") (Date_Time.new 2021 12 4 12 30 0) . to_vector . should_succeed group_builder.specify "should allow to compare time-of-day" <| - t = table_builder [["X", [Time_Of_Day.new 12 30 0]], ["Y", [Time_Of_Day.new 12 30 1]]] connection=data.connection + t = table_builder [["X", [Time_Of_Day.new 12 30 0]], ["Y", [Time_Of_Day.new 12 30 1]]] [(<), (<=), (>), (>=), (==), (!=)].each op-> op (t.at "X") (t.at "Y") . value_type . should_equal Value_Type.Boolean @@ -198,7 +214,7 @@ add_specs suite_builder setup = op (t.at "X") (Time_Of_Day.new 12 30 0) . to_vector . should_succeed group_builder.specify "should not allow to mix types in ordering comparisons" <| - t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]] connection=data.connection + t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]] [(<), (<=), (>), (>=)].each op-> op (t.at "X") (t.at "Y") . should_fail_with Invalid_Value_Type @@ -206,14 +222,14 @@ add_specs suite_builder setup = if setup.test_selection.supports_time_duration then group_builder.specify "should allow to subtract two Dates" <| - t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] connection=data.connection + t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] ((t.at "Y") - (t.at "X")) . to_vector . should_equal [Period.new months=1 days=2] ((t.at "Y") - (Date.new 2020 12 5)) . to_vector . should_equal [Period.new years=1] group_builder.specify "should allow to subtract two Date_Times" <| dx = Date_Time.new 2021 11 30 10 15 0 - t = table_builder [["X", [dx]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]] connection=data.connection + t = table_builder [["X", [dx]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]] hours = 2 + 24 * 5 diff = Duration.new hours=hours minutes=15 seconds=20 @@ -221,16 +237,16 @@ add_specs suite_builder setup = ((t.at "Y") - dx) . to_vector . should_equal [diff] group_builder.specify "should allow to subtract two Time_Of_Days" <| - t = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]] connection=data.connection + t = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]] ((t.at "Y") - (t.at "X")) . to_vector . should_equal [Duration.new hours=2 minutes=15 seconds=20, Duration.new hours=(-1) minutes=0 seconds=0] ((t.at "Y") - (Time_Of_Day.new 0 0 0)) . to_vector . should_equal [Duration.new hours=12 minutes=30 seconds=20, Duration.zero] if setup.test_selection.supports_time_duration.not then group_builder.specify "should report unsupported operation for subtracting date/time" <| - t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] connection=data.connection - t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]] connection=data.connection - t3 = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]] connection=data.connection + t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] + t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]] + t3 = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]] ((t1.at "Y") - (t1.at "X")) . should_fail_with Unsupported_Database_Operation ((t1.at "Y") - (Date.new 2020 12 5)) . should_fail_with Unsupported_Database_Operation @@ -240,7 +256,7 @@ add_specs suite_builder setup = ((t3.at "Y") - (Time_Of_Day.new 0 0 0)) . should_fail_with Unsupported_Database_Operation group_builder.specify "should report an Invalid_Value_Type error when subtracting mixed date/time types" <| - t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]] connection=data.connection + t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]] ((t.at "Y") - (t.at "X")) . should_fail_with Invalid_Value_Type ((t.at "Y") - (Time_Of_Day.new 12 30 0)) . should_fail_with Invalid_Value_Type @@ -250,7 +266,7 @@ add_specs suite_builder setup = ((t.at "Z") - (Date.new 2021 11 3)) . should_fail_with Invalid_Value_Type group_builder.specify "should allow computing a SQL-like difference" <| - t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] connection=data.connection + t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]] (t1.at "X").date_diff (t1.at "Y") Date_Period.Day . to_vector . should_equal [32] (t1.at "Y").date_diff (t1.at "X") Date_Period.Day . to_vector . should_equal [-32] @@ -280,7 +296,7 @@ add_specs suite_builder setup = (t1.at "X").date_diff (t1.at "Y") Time_Period.Hour . should_fail_with Illegal_Argument zone = Time_Zone.parse "Europe/Warsaw" - t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0 zone=zone]], ["Y", [Date_Time.new 2021 12 5 12 30 20 zone=zone]]] connection=data.connection + t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0 zone=zone]], ["Y", [Date_Time.new 2021 12 5 12 30 20 zone=zone]]] (t2.at "X").date_diff (t2.at "Y") Date_Period.Day . to_vector . should_equal [32] (t2.at "Y").date_diff (t2.at "X") Date_Period.Day . to_vector . should_equal [-32] @@ -317,7 +333,7 @@ add_specs suite_builder setup = (t2.at "X").date_diff (t2.at "Y") Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation (t2.at "X").date_diff (Date_Time.new 2021 11 3 10 15 30 123 456 789 zone=zone) Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation - t3 = table_builder [["X", [Time_Of_Day.new 10 15 0]], ["Y", [Time_Of_Day.new 12 30 20]]] connection=data.connection + t3 = table_builder [["X", [Time_Of_Day.new 10 15 0]], ["Y", [Time_Of_Day.new 12 30 20]]] # There is no default period: (t3.at "X").date_diff (t3.at "Y") . should_be_a Function @@ -350,7 +366,7 @@ add_specs suite_builder setup = (t3.at "X").date_diff (Time_Of_Day.new 10 15 12 34 56 78) Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation group_builder.specify "date_diff should return integers" <| - t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]] connection=data.connection + t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]] time_periods = [Time_Period.Hour, Time_Period.Minute, Time_Period.Second] date_periods = [Date_Period.Day, Date_Period.Week, Date_Period.Month, Date_Period.Quarter, Date_Period.Year] @@ -365,7 +381,7 @@ add_specs suite_builder setup = (t.at "Z").date_diff (Date_Time.new 2021 12 05 01 02) p . value_type . is_integer . should_be_true group_builder.specify "should not allow mixing types in date_diff" <| - t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]] connection=data.connection + t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]] (t.at "X").date_diff (t.at "Y") Date_Period.Day . should_fail_with Invalid_Value_Type (t.at "Z").date_diff (t.at "X") Date_Period.Day . should_fail_with Invalid_Value_Type (t.at "Y").date_diff (t.at "Z") Time_Period.Hour . should_fail_with Invalid_Value_Type @@ -377,7 +393,7 @@ add_specs suite_builder setup = (t.at "Z").date_diff (Time_Of_Day.new 12 30 20) Time_Period.Hour . should_fail_with Invalid_Value_Type group_builder.specify "should allow an SQL-like shift" <| - t1 = table_builder [["X", [Date.new 2021 01 31, Date.new 2021 01 01, Date.new 2021 12 31]], ["Y", [5, -1, 0]]] connection=data.connection + t1 = table_builder [["X", [Date.new 2021 01 31, Date.new 2021 01 01, Date.new 2021 12 31]], ["Y", [5, -1, 0]]] (t1.at "X").date_add (t1.at "Y") Date_Period.Day . to_vector . should_equal [Date.new 2021 02 05, Date.new 2020 12 31, Date.new 2021 12 31] (t1.at "X").date_add -1 Date_Period.Day . to_vector . should_equal [Date.new 2021 01 30, Date.new 2020 12 31, Date.new 2021 12 30] (t1.at "X").date_add (t1.at "Y") Date_Period.Month . to_vector . should_equal [Date.new 2021 06 30, Date.new 2020 12 01, Date.new 2021 12 31] @@ -394,7 +410,7 @@ add_specs suite_builder setup = # Will accept Time_Period.Day as alias of Date_Period.Day (t1.at "X").date_add 1 Time_Period.Day . to_vector . should_equal [Date.new 2021 02 01, Date.new 2021 01 02, Date.new 2022 01 01] - t2 = table_builder [["X", [Date_Time.new 2021 01 31 12 30 0, Date_Time.new 2021 01 01 12 30 0, Date_Time.new 2021 12 31 12 30 0]], ["Y", [5, -1, 0]]] connection=data.connection + t2 = table_builder [["X", [Date_Time.new 2021 01 31 12 30 0, Date_Time.new 2021 01 01 12 30 0, Date_Time.new 2021 12 31 12 30 0]], ["Y", [5, -1, 0]]] (t2.at "X").date_add (t2.at "Y") Date_Period.Day . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 02 05 12 30 0, Date_Time.new 2020 12 31 12 30 0, Date_Time.new 2021 12 31 12 30 0] (t2.at "X").date_add -1 Time_Period.Day . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 30 12 30 0, Date_Time.new 2020 12 31 12 30 0, Date_Time.new 2021 12 30 12 30 0] (t2.at "X").date_add (t2.at "Y") Date_Period.Month . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 06 30 12 30 0, Date_Time.new 2020 12 01 12 30 0, Date_Time.new 2021 12 31 12 30 0] @@ -416,7 +432,7 @@ add_specs suite_builder setup = False -> (t2.at "X").date_add 1 Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation - t3 = table_builder [["X", [Time_Of_Day.new 12 30 0, Time_Of_Day.new 23 45 0, Time_Of_Day.new 1 30 0]], ["Y", [5, -1, 0]]] connection=data.connection + t3 = table_builder [["X", [Time_Of_Day.new 12 30 0, Time_Of_Day.new 23 45 0, Time_Of_Day.new 1 30 0]], ["Y", [5, -1, 0]]] (t3.at "X").date_add (t3.at "Y") Time_Period.Hour . to_vector . should_equal [Time_Of_Day.new 17 30 0, Time_Of_Day.new 22 45 0, Time_Of_Day.new 1 30 0] (t3.at "X").date_add 1 Time_Period.Hour . to_vector . should_equal [Time_Of_Day.new 13 30 0, Time_Of_Day.new 0 45 0, Time_Of_Day.new 2 30 0] @@ -443,13 +459,13 @@ add_specs suite_builder setup = (t3.at "X").date_add (t3.at "Y") . to_vector . should_equal [Time_Of_Day.new 17 30 0, Time_Of_Day.new 22 45 0, Time_Of_Day.new 1 30 0] group_builder.specify "should check shift_amount type in date_add" <| - t = table_builder [["X", [Date.new 2021 01 31]]] connection=data.connection + t = table_builder [["X", [Date.new 2021 01 31]]] t.at "X" . date_add "text" Date_Period.Day . should_fail_with Invalid_Value_Type group_builder.specify "date_diff and date_add should work correctly with DST" pending="May be failing on some Database configurations. ToDo: investigate - https://github.com/enso-org/enso/issues/7326" <| zone = Time_Zone.parse "Europe/Warsaw" dt1 = Date_Time.new 2023 03 26 00 30 00 zone=zone - t = table_builder [["X", [dt1]]] connection=data.connection + t = table_builder [["X", [dt1]]] x = t.at "X" # +24h will shift 1 day and 1 hour, because they 26th of March has only 23 hours within it @@ -477,7 +493,7 @@ add_specs suite_builder setup = dt3 = Date_Time.new 2023 03 28 01 30 00 zone=zone dt4 = Date_Time.new 2023 03 29 00 30 00 zone=zone - t2 = table_builder [["X", [dt3]]] connection=data.connection + t2 = table_builder [["X", [dt3]]] # No DST switch here, so all backends agree that 0 days elapsed in the 23 hours. (t2.at "X").date_diff dt4 Date_Period.Day . to_vector . should_equal [0] (t2.at "X").date_diff dt4 Time_Period.Day . to_vector . should_equal [0] @@ -485,6 +501,14 @@ add_specs suite_builder setup = if setup.test_selection.date_time.not then suite_builder.group prefix+"partial Date-Time support" group_builder-> + data = Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "will fail when uploading a Table containing Dates" <| d = Date.new 2020 10 24 table = table_builder [["A", [d]], ["X", [123]]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso index b9cccc09b105..78501361fa8e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso @@ -19,17 +19,25 @@ type Data setup create_connection_fn = Data.Value (create_connection_fn Nothing) + teardown self = + self.connection.close + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder create_connection_fn = setup.create_connection_func pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend." suite_builder.group prefix+"Table.set with Column_Operation" group_builder-> data = Data.setup create_connection_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "arithmetics" <| - t = table_builder [["A", [1, 2]], ["B", [10, 40]]] connection=data.connection + t = table_builder [["A", [1, 2]], ["B", [10, 40]]] t.set (Column_Operation.Add (Column_Ref.Name "A") (Column_Ref.Name "B")) "C" . at "C" . to_vector . should_equal [11, 42] t.set (Column_Operation.Add 100 (Column_Ref.Name "B")) "C" . at "C" . to_vector . should_equal [110, 140] t.set (Column_Operation.Add (Column_Ref.Name "A") 100) "C" . at "C" . to_vector . should_equal [101, 102] @@ -50,7 +58,7 @@ add_specs suite_builder setup = t.set (Column_Operation.Divide 1 (Column_Ref.Name "A")) "C" . at "C" . to_vector . should_equal [1, 0.5] t.set (Column_Operation.Divide 1 2) "C" . at "C" . to_vector . should_equal [0.5, 0.5] - t2 = table_builder [["A", [23, 42]], ["B", [10, 3]]] connection=data.connection + t2 = table_builder [["A", [23, 42]], ["B", [10, 3]]] t2.set (Column_Operation.Mod (Column_Ref.Name "A") (Column_Ref.Name "B")) "C" . at "C" . to_vector . should_equal [3, 0] t2.set (Column_Operation.Mod (Column_Ref.Name "A") 10) "C" . at "C" . to_vector . should_equal [3, 2] t2.set (Column_Operation.Mod 7 5) "C" . at "C" . to_vector . should_equal [2, 2] @@ -64,7 +72,7 @@ add_specs suite_builder setup = t.set (Column_Operation.Add 42 "y") . should_fail_with Illegal_Argument group_builder.specify "rounding" <| - t = table_builder [["A", [1.13333, 122.74463, 32.52424, -12.7]]] connection=data.connection + t = table_builder [["A", [1.13333, 122.74463, 32.52424, -12.7]]] t.set (Column_Operation.Round (Column_Ref.Name "A")) "Z" . at "Z" . to_vector . should_equal [1, 123, 33, -13] t.set (Column_Operation.Round (Column_Ref.Name "A") precision=1) "Z" . at "Z" . to_vector . should_equal [1.1, 122.7, 32.5, -12.7] t.set (Column_Operation.Round (Column_Ref.Name "A") precision=-1) "Z" . at "Z" . to_vector . should_equal [0, 120, 30, -10] @@ -77,7 +85,7 @@ add_specs suite_builder setup = Test.expect_panic Type_Error <| t.set (Column_Operation.Truncate "1.23") group_builder.specify "date/time" pending=pending_datetime <| - t = table_builder [["A", [Date_Time.new 2023 1 12 12 45, Date_Time.new 2020 5 12 1 45]], ["B", [Date_Time.new 2023 1 15 18 45, Date_Time.new 2020 6 12 22 20]], ["x", [1, 3]]] connection=data.connection + t = table_builder [["A", [Date_Time.new 2023 1 12 12 45, Date_Time.new 2020 5 12 1 45]], ["B", [Date_Time.new 2023 1 15 18 45, Date_Time.new 2020 6 12 22 20]], ["x", [1, 3]]] # TODO ticket for truncate for DB if setup.is_database.not then @@ -92,7 +100,7 @@ add_specs suite_builder setup = t.set (Column_Operation.Date_Part (Column_Ref.Name "A") Date_Period.Year) "Z" . at "Z" . to_vector . should_equal [2023, 2020] t.set (Column_Operation.Date_Part (Column_Ref.Name "A") Time_Period.Minute) "Z" . at "Z" . to_vector . should_equal [45, 45] - t2 = table_builder [["C", [Date.new 2002 12 10, Date.new 2005 01 01]], ["D", [Time_Of_Day.new 12 45, Time_Of_Day.new 01 01]]] connection=data.connection + t2 = table_builder [["C", [Date.new 2002 12 10, Date.new 2005 01 01]], ["D", [Time_Of_Day.new 12 45, Time_Of_Day.new 01 01]]] t2.set (Column_Operation.Date_Add (Column_Ref.Name "C") 5 Date_Period.Month) "Z" . at "Z" . to_vector . should_equal [Date.new 2003 5 10, Date.new 2005 6 01] t2.set (Column_Operation.Date_Add (Column_Ref.Name "D") 15 Time_Period.Hour) "Z" . at "Z" . to_vector . should_equal [Time_Of_Day.new 03 45, Time_Of_Day.new 16 01] @@ -109,7 +117,7 @@ add_specs suite_builder setup = Test.expect_panic Type_Error <| t2.set (Column_Operation.Date_Diff 42 "x" Date_Period.Year) group_builder.specify "boolean" <| - t = table_builder [["A", [True, False]], ["T", [True, True]]] connection=data.connection + t = table_builder [["A", [True, False]], ["T", [True, True]]] t.set (Column_Operation.And (Column_Ref.Name "A") (Column_Ref.Name "T")) "Z" . at "Z" . to_vector . should_equal [True, False] t.set (Column_Operation.And (Column_Ref.Name "A") False) "Z" . at "Z" . to_vector . should_equal [False, False] @@ -126,14 +134,14 @@ add_specs suite_builder setup = Test.expect_panic_with (t.set (Column_Operation.Or (Column_Ref.Name "A") "x")) Type_Error group_builder.specify "if" <| - t = table_builder [["A", [1, 100]], ["B", [10, 40]], ["C", [23, 55]]] connection=data.connection + t = table_builder [["A", [1, 100]], ["B", [10, 40]], ["C", [23, 55]]] t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=(Column_Ref.Name "B"))) "Z" . at "Z" . to_vector . should_equal [False, True] t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=20) "T" "F") "Z" . at "Z" . to_vector . should_equal ["F", "T"] t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Less than=20) (Column_Ref.Name "B") (Column_Ref.Name "C")) "Z" . at "Z" . to_vector . should_equal [10, 55] t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than="X") "T" "F") . should_fail_with Invalid_Value_Type - t2 = table_builder [["A", ["a", "c"]], ["B", ["c", "b"]], ["C", [23, 55]]] connection=data.connection + t2 = table_builder [["A", ["a", "c"]], ["B", ["c", "b"]], ["C", [23, 55]]] t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=(Column_Ref.Name "B"))) "Z" . at "Z" . to_vector . should_equal [False, True] t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=(Column_Ref.Name "B")) (Column_Ref.Name "C") 0) "Z" . at "Z" . to_vector . should_equal [0, 55] t2.set (Column_Operation.If "A" (Filter_Condition.Greater than="B") (Column_Ref.Name "C") 0) "Z" . at "Z" . to_vector . should_equal [0, 0] @@ -146,7 +154,7 @@ add_specs suite_builder setup = # Passing a column does not work row-by-row, but looks at whole column contents. t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Is_In (t2.at "B")) "TT" "FF") "Z" . at "Z" . to_vector . should_equal ["FF", "TT"] - t3 = table_builder [["x", ["e", "e", "a"]]] connection=data.connection + t3 = table_builder [["x", ["e", "e", "a"]]] t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Is_In (t3.at "x")) "TT" "FF") "Z" . at "Z" . to_vector . should_equal ["TT", "FF"] # Thus, passing a Column_Ref into Is_In/Not_In is not allowed as it would be confusing. @@ -155,7 +163,7 @@ add_specs suite_builder setup = t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Not_In [Column_Ref.Name "B", "X"]) "TT" "FF") . should_fail_with Illegal_Argument group_builder.specify "text" <| - t = table_builder [["A", [" a ", "b"]], ["B", ["c", " d "]]] connection=data.connection + t = table_builder [["A", [" a ", "b"]], ["B", ["c", " d "]]] t.set (Column_Operation.Trim (Column_Ref.Name "A")) "Z" . at "Z" . to_vector . should_equal ["a", "b"] t.set (Column_Operation.Trim (Column_Ref.Name "A") Location.End) "Z" . at "Z" . to_vector . should_equal [" a", "b"] @@ -168,11 +176,11 @@ add_specs suite_builder setup = t.set (Column_Operation.Add (Column_Ref.Name "A") "!") "Z" . at "Z" . to_vector . should_equal [" a !", "b!"] t.set (Column_Operation.Add "O" "!") "Z" . at "Z" . to_vector . should_equal ["O!", "O!"] - t2 = table_builder [["A", [42]]] connection=data.connection + t2 = table_builder [["A", [42]]] t2.set (Column_Operation.Trim (Column_Ref.Name "A")) . should_fail_with Invalid_Value_Type group_builder.specify "min/max" <| - t = table_builder [["A", [1, 20]], ["B", [10, 2]]] connection=data.connection + t = table_builder [["A", [1, 20]], ["B", [10, 2]]] t.set (Column_Operation.Min (Column_Ref.Name "A") (Column_Ref.Name "B")) "Z" . at "Z" . to_vector . should_equal [1, 2] t.set (Column_Operation.Min (Column_Ref.Name "A") 5) "Z" . at "Z" . to_vector . should_equal [1, 5] @@ -183,7 +191,7 @@ add_specs suite_builder setup = t.set (Column_Operation.Max 2 5) "Z" . at "Z" . to_vector . should_equal [5, 5] t.set (Column_Operation.Min 2 5) "Z" . at "Z" . to_vector . should_equal [2, 2] - t2 = table_builder [["A", ["aardvark", "zebra"]], ["B", ["cat", "dog"]], ["x", [1, 20]]] connection=data.connection + t2 = table_builder [["A", ["aardvark", "zebra"]], ["B", ["cat", "dog"]], ["x", [1, 20]]] t2.set (Column_Operation.Min (Column_Ref.Name "A") (Column_Ref.Name "B")) "Z" . at "Z" . to_vector . should_equal ["aardvark", "dog"] t2.set (Column_Operation.Max (Column_Ref.Name "A") (Column_Ref.Name "B")) "Z" . at "Z" . to_vector . should_equal ["cat", "zebra"] t2.set (Column_Operation.Min (Column_Ref.Name "A") "animal") "Z" . at "Z" . to_vector . should_equal ["aardvark", "animal"] @@ -195,16 +203,16 @@ add_specs suite_builder setup = t2.set (Column_Operation.Min (Column_Ref.Name "x") (Column_Ref.Name "A")) . should_fail_with Invalid_Value_Type if pending_datetime.is_nothing then - t3 = table_builder [["A", [Date.new 2002 12 10, Date.new 2005 01 01]]] connection=data.connection + t3 = table_builder [["A", [Date.new 2002 12 10, Date.new 2005 01 01]]] t3.set (Column_Operation.Min (Column_Ref.Name "A") (Date.new 2003)) "Z" . at "Z" . to_vector . should_equal [Date.new 2002 12 10, Date.new 2003 01 01] t3.set (Column_Operation.Max (Column_Ref.Name "A") (Date.new 2003)) "Z" . at "Z" . to_vector . should_equal [Date.new 2003 01 01, Date.new 2005 01 01] group_builder.specify "allows also indexing columns numerically" <| - t = table_builder [["X", [1, 2]], ["Y", [3, 4]]] connection=data.connection + t = table_builder [["X", [1, 2]], ["Y", [3, 4]]] t.set (Column_Operation.Add (Column_Ref.Index 0) (Column_Ref.Index 1)) "Z" . at "Z" . to_vector . should_equal [4, 6] group_builder.specify "will forward column resolution errors" <| - t = table_builder [["X", [1, 2]], ["Y", [3, 4]]] connection=data.connection + t = table_builder [["X", [1, 2]], ["Y", [3, 4]]] t.set (Column_Operation.Add (Column_Ref.Name "X") (Column_Ref.Name "Z")) . should_fail_with No_Such_Column t.set (Column_Operation.Not (Column_Ref.Name "zzz")) . should_fail_with No_Such_Column t.set (Column_Operation.Not (Column_Ref.Index 42)) . should_fail_with Index_Out_Of_Bounds @@ -212,8 +220,14 @@ add_specs suite_builder setup = suite_builder.group prefix+"Unique derived column names" group_builder-> data = Data.setup create_connection_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "Should not disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add_Or_Update" <| - t = table_builder [["X", [1, 2, 3]]] connection=data.connection + t = table_builder [["X", [1, 2, 3]]] column_op = Column_Operation.Power 2 (Column_Ref.Name "X") t2 = t.set column_op . set column_op t2.column_names . should_equal ["X", "[2] ^ [X]"] @@ -221,7 +235,7 @@ add_specs suite_builder setup = t2.at "[2] ^ [X]" . to_vector . should_equal [2, 4, 8] group_builder.specify "Should disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add" <| - t = table_builder [["X", [1, 2, 3]]] connection=data.connection + t = table_builder [["X", [1, 2, 3]]] column_op = Column_Operation.Power 2 (Column_Ref.Name "X") t2 = t.set column_op set_mode=Set_Mode.Add . set column_op set_mode=Set_Mode.Add t2.column_names . should_equal ["X", "[2] ^ [X]", "[2] ^ [X] 1"] @@ -230,7 +244,7 @@ add_specs suite_builder setup = t2.at "[2] ^ [X] 1" . to_vector . should_equal [2, 4, 8] group_builder.specify "Should disambiguate two derived columns that would otherwise have had the same name, within the same expression" <| - t = table_builder [["X", [1, 2, 3]]] connection=data.connection + t = table_builder [["X", [1, 2, 3]]] expression = "2 + (2 * 2) + (2 ^ [X])" t2 = t.set expression t2.column_names . should_equal ["X", expression] @@ -238,7 +252,7 @@ add_specs suite_builder setup = t2.at expression . to_vector . should_equal [8, 10, 14] group_builder.specify "Should use .pretty to distinguish string constants from regular column names" <| - t = table_builder [["X", ["a", "b", "c"]]] connection=data.connection + t = table_builder [["X", ["a", "b", "c"]]] expression = '"foo" + [X] + "bar"' t2 = t.set expression t2.column_names . should_equal ["X", expression] @@ -246,7 +260,7 @@ add_specs suite_builder setup = t2.at expression . to_vector . should_equal ["fooabar", "foobbar", "foocbar"] group_builder.specify "Should disambiguate between a column reference and a literal string" <| - t = table_builder [["X", ["a", "b", "c"]]] connection=data.connection + t = table_builder [["X", ["a", "b", "c"]]] t2 = t.set (Column_Operation.Add "prefix" (Column_Ref.Name "X")) t3 = t2.set (Column_Operation.Add "prefix" "X") @@ -255,13 +269,13 @@ add_specs suite_builder setup = t3.at "['prefix'] + 'X'" . to_vector . should_equal ["prefixX", "prefixX", "prefixX"] group_builder.specify "Should not disambiguate if set_mode is Update" <| - t = table_builder [["X", [1, 2, 3]]] connection=data.connection + t = table_builder [["X", [1, 2, 3]]] t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1) set_mode=Set_Mode.Update t2.column_names . should_equal ["X"] t2.at "X" . to_vector . should_equal [2, 3, 4] group_builder.specify "Should not disambiguate if set_mode is Add_Or_Update" <| - t = table_builder [["X", [1, 2, 3]], ["[X] + 1", [10, 20, 30]]] connection=data.connection + t = table_builder [["X", [1, 2, 3]], ["[X] + 1", [10, 20, 30]]] # set_mode=Set_Mode.Add_Or_Update is the default t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1) t2.column_names . should_equal ["X", "[X] + 1"] @@ -269,7 +283,7 @@ add_specs suite_builder setup = t2.at "[X] + 1" . to_vector . should_equal [2, 3, 4] group_builder.specify "Should not disambiguate if the new name is explicitly set" <| - t = table_builder [["X", [1, 2, 3]]] connection=data.connection + t = table_builder [["X", [1, 2, 3]]] t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1) new_name="X" t2.column_names . should_equal ["X"] t2.at "X" . to_vector . should_equal [2, 3, 4] diff --git a/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso index 1f07e7f41521..60108ffb6dea 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso @@ -10,10 +10,28 @@ from project.Common_Table_Operations.Util import run_default_backend main = run_default_backend add_specs +type Data + Value ~connection + + setup create_connection_fn = Data.Value <| + create_connection_fn Nothing + + teardown self = + self.connection.close + + add_specs suite_builder setup = - table_builder = setup.table_builder materialize = setup.materialize + create_connection_fn = setup.create_connection_func suite_builder.group setup.prefix+"Table.distinct" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should group by all columns by default" <| a = ["A", ["a", "b", "a", "b", "a", "b"]] b = ["B", [2, 1, 2, 2, 2, 1]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso index 29a3f0020ae8..f75c45bc9ce1 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso @@ -25,6 +25,9 @@ type Data setup create_connection_fn = Data.Value (create_connection_fn Nothing) + teardown self = + self.connection.close + ## Currently these tests rely on filtering preserving the insertion ordering within tables. This is not necessarily guaranteed by RDBMS, so we may adapt @@ -32,15 +35,20 @@ type Data as that seems to be the case. add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder test_selection = setup.test_selection create_connection_fn = setup.create_connection_func suite_builder.group prefix+"Table.filter" group_builder-> data = Data.setup create_connection_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "by integer comparisons" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]] t1 = t.filter "X" (Filter_Condition.Less than=10) t1.at "ix" . to_vector . should_equal [2, 4] t1.at "X" . to_vector . should_equal [3, 4] @@ -83,7 +91,7 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Is_Nan) . at "ix" . to_vector . should_equal [] group_builder.specify "by float operations" <| - t = table_builder [["ix", [1, 2, 3, 4, 5, 6]], ["X", [100.0, 2.5, Nothing, Number.nan, Number.positive_infinity, Number.negative_infinity]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5, 6]], ["X", [100.0, 2.5, Nothing, Number.nan, Number.positive_infinity, Number.negative_infinity]]] t.filter "X" (Filter_Condition.Less than=10.0) . at "X" . to_vector . should_equal [2.5, Number.negative_infinity] @@ -102,7 +110,7 @@ add_specs suite_builder setup = t.filter "X" Filter_Condition.Is_Nan . at "ix" . to_vector . should_equal [4] group_builder.specify "Not_Equal test cases" pending="Specification needs clarifying, see: https://github.com/enso-org/enso/issues/5241#issuecomment-1480167927" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]] t3 = t.filter "X" (Filter_Condition.Not_Equal to=100) t3 . at "X" . to_vector . should_equal [3, Nothing, 4, 12] t3 . at "ix" . to_vector . should_equal [2, 3, 4, 5] @@ -110,7 +118,7 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Not_Equal to=(Column_Ref.Name "Y")) . at "X" . to_vector . should_equal [3, Nothing, 4, 12] group_builder.specify "by text comparisons" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "baca", "b", Nothing, "c"]], ["Y", ["a", "b", "b", "c", "c"]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "baca", "b", Nothing, "c"]], ["Y", ["a", "b", "b", "c", "c"]]] t1 = t.filter "X" (Filter_Condition.Less than="c") t1.at "ix" . to_vector . should_equal [1, 2, 3] t1.at "X" . to_vector . should_equal ["abb", "baca", "b"] @@ -138,13 +146,13 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Equal to=(Column_Ref.Name "Y")) . at "X" . to_vector . should_equal ["b", "c"] t.filter "X" (Filter_Condition.Between (Column_Ref.Name "Y") "bzzzz") . at "X" . to_vector . should_equal ["abb", "baca", "b"] - t2 = table_builder [["X", ["A", "a", "b"]], ["Y", ["a", "B", "b"]]] connection=data.connection + t2 = table_builder [["X", ["A", "a", "b"]], ["Y", ["a", "B", "b"]]] t2.filter "X" (Filter_Condition.Equal to="a") . at "X" . to_vector . should_equal ["a"] t2.filter "X" (Filter_Condition.Equal_Ignore_Case to="a") . at "X" . to_vector . should_equal ["A", "a"] t2.filter "X" (Filter_Condition.Equal_Ignore_Case to=(Column_Ref.Name "Y")) . at "X" . to_vector . should_equal ["A", "b"] group_builder.specify "by text search (contains, starts_with, ends_with, not_contains)" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]] t.filter "X" (Filter_Condition.Starts_With "ba") . at "X" . to_vector . should_equal ["bacb", "banana"] t.filter "X" (Filter_Condition.Starts_With "BA" Case_Sensitivity.Sensitive) . at "X" . to_vector . should_equal [] @@ -178,7 +186,7 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Not_Contains (Column_Ref.Name "Y") Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["banana"] group_builder.specify "by text search (like, not_like)" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]] t.filter "X" (Filter_Condition.Like "%an%") . at "X" . to_vector . should_equal ["banana", "nana"] t.filter "X" (Filter_Condition.Like "_a%") . at "X" . to_vector . should_equal ["bacb", "banana", "nana"] @@ -190,7 +198,7 @@ add_specs suite_builder setup = t.filter "Z" (Filter_Condition.Not_Like "[ab]%") . at "Z" . to_vector . should_equal ["aaaaa", "bbbbb"] group_builder.specify "text operations should also match newlines" <| - t = table_builder [["X", ['a\n\n\n', 'a\n', 'a\n\n\nb', 'a\nb', 'caa\nbb']]] connection=data.connection + t = table_builder [["X", ['a\n\n\n', 'a\n', 'a\n\n\nb', 'a\nb', 'caa\nbb']]] t.filter "X" (Filter_Condition.Like 'a_') . at "X" . to_vector . should_equal ['a\n'] t.filter "X" (Filter_Condition.Like 'a%') . at "X" . to_vector . should_equal ['a\n\n\n', 'a\n', 'a\n\n\nb', 'a\nb'] t.filter "X" (Filter_Condition.Like 'a_b') . at "X" . to_vector . should_equal ['a\nb'] @@ -203,7 +211,7 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Not_Contains '\nb') . at "X" . to_vector . should_equal ['a\n\n\n', 'a\n'] if test_selection.supports_unicode_normalization then - t = table_builder [["X", ['śnieg', 's\u0301nieg', 'X', Nothing, 'połać', 'połac\u0301']]] connection=data.connection + t = table_builder [["X", ['śnieg', 's\u0301nieg', 'X', Nothing, 'połać', 'połac\u0301']]] group_builder.specify "text operations should support Unicode normalization" <| t.filter "X" (Filter_Condition.Starts_With 'ś') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg'] t.filter "X" (Filter_Condition.Contains 'ś') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg'] @@ -219,12 +227,12 @@ add_specs suite_builder setup = t.filter "X" (Filter_Condition.Like 'ś%') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg'] group_builder.specify "by empty text" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "", " ", Nothing, "nana"]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "", " ", Nothing, "nana"]]] t.filter "X" Filter_Condition.Is_Empty . at "X" . to_vector . should_equal ["", Nothing] t.filter "X" Filter_Condition.Not_Empty . at "X" . to_vector . should_equal ["abb", " ", "nana"] group_builder.specify "should check types for text operations" <| - t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, "A", "", " "]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, "A", "", " "]]] check_problem result = result.should_fail_with Invalid_Value_Type result.catch.expected . should_equal "Char" @@ -267,7 +275,7 @@ add_specs suite_builder setup = check_scalar_type_error_handling (t.filter "X" (Filter_Condition.Not_Contains 42)) group_builder.specify "by nulls" <| - t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, 1, Nothing, 4]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, 1, Nothing, 4]]] t1 = t.filter "X" Filter_Condition.Is_Nothing t1.at "ix" . to_vector . should_equal [1, 3] t1.at "X" . to_vector . should_equal [Nothing, Nothing] @@ -277,8 +285,8 @@ add_specs suite_builder setup = t2.at "X" . to_vector . should_equal [1, 4] group_builder.specify "by an Is_In check" <| - t = table_builder [["ix", [1, 2, 3, Nothing, 5, 6]], ["X", ["a", "b", "ccc", "X", "f", "2"]]] connection=data.connection - t1 = table_builder [["txt", ["X", "a", "c", Nothing]], ["int", [Nothing, 2, 5, 4]], ["bool", [True, Nothing, Nothing, True]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, Nothing, 5, 6]], ["X", ["a", "b", "ccc", "X", "f", "2"]]] + t1 = table_builder [["txt", ["X", "a", "c", Nothing]], ["int", [Nothing, 2, 5, 4]], ["bool", [True, Nothing, Nothing, True]]] t.filter "X" (Filter_Condition.Is_In (t1.at "txt")) . at "X" . to_vector . should_equal ["a", "X"] t.filter "X" (Filter_Condition.Is_In (t1.at "txt" . to_vector)) . at "X" . to_vector . should_equal ["a", "X"] @@ -309,7 +317,7 @@ add_specs suite_builder setup = True -> v2.should_equal [2, 3] False -> v2.should_fail_with SQL_Error - t2 = table_builder [["A", [True, False, True]], ["B", [False, False, False]], ["C", [True, False, Nothing]]] connection=data.connection + t2 = table_builder [["A", [True, False, True]], ["B", [False, False, False]], ["C", [True, False, Nothing]]] t2.filter "A" (Filter_Condition.Is_In (t1.at "bool")) . at "A" . to_vector . should_equal [True, True] t2.filter "A" (Filter_Condition.Is_In (t1.at "bool" . to_vector)) . at "A" . to_vector . should_equal [True, True] t2.filter "B" (Filter_Condition.Is_In [True, Nothing]) . at "B" . to_vector . should_equal [] @@ -323,7 +331,7 @@ add_specs suite_builder setup = NOT on a row-by-row basis like all other operations. Column_Ref is used with row-by-row ops, so this would only cause confusion. Very rarely someone wants to filter a column by Is_In within the same table - and that's the only approach Column_Ref would support. - t = table_builder [["A", [1, 2, 3]], ["B", [2, 3, 4]]] connection=data.connection + t = table_builder [["A", [1, 2, 3]], ["B", [2, 3, 4]]] t.filter "A" (Filter_Condition.Is_In (Column_Ref.Name "B")) . should_fail_with Illegal_Argument @@ -331,12 +339,12 @@ add_specs suite_builder setup = t.filter "A" (Filter_Condition.Is_In (t.at "B")) . at "A" . to_vector . should_equal [2, 3] group_builder.specify "by a boolean mask" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] t.filter "b" . at "ix" . to_vector . should_equal [1, 4, 5] t.filter "b" Filter_Condition.Is_False . at "ix" . to_vector . should_equal [2] group_builder.specify "should correctly mask all kinds of columns" <| - t = table_builder [["ints", [1, 2, 3, Nothing, 4]], ["floats", [4.0, Nothing, 3.0, 2.0, 1.0]], ["bools", [False, False, True, Nothing, False]], ["strings", ["a", Nothing, "b", "c", "d"]], ["mask", [False, True, True, True, Nothing]]] connection=data.connection + t = table_builder [["ints", [1, 2, 3, Nothing, 4]], ["floats", [4.0, Nothing, 3.0, 2.0, 1.0]], ["bools", [False, False, True, Nothing, False]], ["strings", ["a", Nothing, "b", "c", "d"]], ["mask", [False, True, True, True, Nothing]]] t2 = t.filter "mask" t2.at "ints" . to_vector . should_equal [2, 3, Nothing] t2.at "floats" . to_vector . should_equal [Nothing, 3.0, 2.0] @@ -345,7 +353,7 @@ add_specs suite_builder setup = t2.at "mask" . to_vector . should_equal [True, True, True] group_builder.specify "should check types of boolean operations" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] check_problem result = result.should_fail_with Invalid_Value_Type result.catch.expected . should_equal Value_Type.Boolean @@ -354,7 +362,7 @@ add_specs suite_builder setup = check_problem <| t.filter "ix" Filter_Condition.Is_False group_builder.specify "should check types of numeric operations" <| - t = table_builder [["a", ["a", "b"]]] connection=data.connection + t = table_builder [["a", ["a", "b"]]] check_problem result = result.should_fail_with Invalid_Value_Type result.catch.expected . should_equal "a numeric" @@ -366,32 +374,32 @@ add_specs suite_builder setup = check_problem <| t.filter "a" Filter_Condition.Is_Nan group_builder.specify "by a custom expression built from table's columns" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10, 20, 13, 4, 5]], ["Y", [0, -100, 8, 2, 5]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10, 20, 13, 4, 5]], ["Y", [0, -100, 8, 2, 5]]] t.filter (t.at "X" + t.at "Y" > 9) . at "ix" . to_vector . should_equal [1, 3, 5] group_builder.specify "should handle selection errors: unknown column name" <| - t = table_builder [["X", [10, 20, 13, 4, 5]]] connection=data.connection + t = table_builder [["X", [10, 20, 13, 4, 5]]] t.filter "unknown column" . should_fail_with No_Such_Column t.filter "unknown column" . catch . should_equal (No_Such_Column.Error "unknown column") group_builder.specify "should handle selection errors: out of bounds index" <| - t = table_builder [["X", [10, 20, 13, 4, 5]]] connection=data.connection + t = table_builder [["X", [10, 20, 13, 4, 5]]] t.filter 4 . should_fail_with Index_Out_Of_Bounds t.filter 4 . catch . should_equal (Index_Out_Of_Bounds.Error 4 1) group_builder.specify "should handle illegal arguments" <| - t = table_builder [["X", [10, 20, 13, 4, 5]]] connection=data.connection + t = table_builder [["X", [10, 20, 13, 4, 5]]] Test.expect_panic_with (t.filter "X" "NOT A CONDITION") Type_Error group_builder.specify "should nicely handle Filter_Condition with unapplied arguments" <| - t = table_builder [["X", [10, 20, 13, 4, 5]]] connection=data.connection + t = table_builder [["X", [10, 20, 13, 4, 5]]] t.filter "X" (Filter_Condition.Equal) . should_fail_with Illegal_Argument t.filter "X" (Filter_Condition.Starts_With) . should_fail_with Illegal_Argument t.filter "X" (Filter_Condition.Between) . should_fail_with Illegal_Argument t.filter "X" (Filter_Condition.Between 1) . should_fail_with Illegal_Argument group_builder.specify "should report issues: floating point equality" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, 2.0]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, 2.0]]] r1 = t.filter "X" (Filter_Condition.Equal 2) on_problems=Problem_Behavior.Ignore r1.at "ix" . to_vector . should_equal [3, 5] @@ -409,7 +417,7 @@ add_specs suite_builder setup = Problems.expect_warning Floating_Point_Equality r4 group_builder.specify "already existing warnings should not be escalated to errors in error handling mode" <| - t1 = table_builder [["X", [1.5, 2.0, 0.0]], ["ix", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["X", [1.5, 2.0, 0.0]], ["ix", [1, 2, 3]]] c1 = Warning.attach (Illegal_State.Error "FOO") (t1.evaluate_expression "3.0 + [X]") Problems.expect_warning Illegal_State c1 @@ -445,18 +453,24 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.filter_by_expression" group_builder-> data = Data.setup create_connection_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "by a boolean column" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] t.filter_by_expression "[b]" . at "ix" . to_vector . should_equal [1, 4, 5] t.filter_by_expression "![b]" . at "ix" . to_vector . should_equal [2] group_builder.specify "by an integer comparison" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] t.filter_by_expression "[ix]==3" . at "ix" . to_vector . should_equal [3] t.filter_by_expression "[ix]>2" . at "ix" . to_vector . should_equal [3, 4, 5] group_builder.specify "fail gracefully" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]] t.filter_by_expression "[ix" . should_fail_with Expression_Error t.filter_by_expression "[ix" . catch . should_be_a Expression_Error.Syntax_Error t.filter_by_expression "Starts_With([b])" . should_fail_with Expression_Error @@ -479,7 +493,7 @@ add_specs suite_builder setup = t.filter_by_expression "is_empty('', 42)" . catch . should_be_a Expression_Error.Argument_Mismatch group_builder.specify "should report issues: floating point equality" <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, -2.0]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, -2.0]]] r1 = t.filter_by_expression "[X] * [X] == 4.0" on_problems=Problem_Behavior.Ignore Problems.assume_no_problems r1 r1.at "ix" . to_vector . should_equal [3, 5] @@ -498,7 +512,7 @@ add_specs suite_builder setup = db_pending = if setup.is_database then "Arithmetic error handling is currently not implemented for the Database backend." group_builder.specify "should report issues: arithmetic error" pending=db_pending <| - t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [2.0, 2.0, 0.0, 1.0, 2.0]]] connection=data.connection + t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [2.0, 2.0, 0.0, 1.0, 2.0]]] action = t.filter_by_expression "8.0 / [X] <= 4.0" on_problems=_ tester table = table . at "ix" . to_vector . should_equal [1, 2, 5] diff --git a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso index 058767096634..d9f3c639f31b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso @@ -19,14 +19,22 @@ type Data setup create_connection_fn = Data.Value (create_connection_fn Nothing) + teardown self = + self.connection.close + add_specs suite_builder setup = - table_builder = setup.table_builder create_connection_fn = setup.create_connection_func materialize = setup.materialize suite_builder.group setup.prefix+" Interactions Between various operations" group_builder-> data = Data.setup create_connection_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols + group_builder.specify "using weird column names with operations and filtering" <| columns = weird_names.map_with_index ix-> name-> [name, [100+ix, 200+ix, 300+ix]] @@ -43,8 +51,8 @@ add_specs suite_builder setup = result.at name . to_vector . should_equal [200+ix] group_builder.specify "aggregates and joins" <| - t1 = table_builder [["Count", [1, 2, 3]], ["Class", ["X", "Y", "Z"]]] connection=data.connection - t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "A", "C", "D", "D", "B", "B"]]] connection=data.connection + t1 = table_builder [["Count", [1, 2, 3]], ["Class", ["X", "Y", "Z"]]] + t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "A", "C", "D", "D", "B", "B"]]] t3 = t2.aggregate [Group_By "Letter", Count] t4 = t3.join t1 on="Count" join_kind=Join_Kind.Left_Outer |> materialize |> _.order_by "Letter" @@ -56,7 +64,7 @@ add_specs suite_builder setup = rows.at 3 . should_equal ["D", 2, 2, "Y"] group_builder.specify "aggregates and distinct" <| - t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C"]], ["Points", [2, 5, 2, 1, 10, 3]]] connection=data.connection + t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C"]], ["Points", [2, 5, 2, 1, 10, 3]]] t3 = t2.aggregate [Group_By "Letter", Sum "Points"] t4 = t3.distinct "Sum Points" |> materialize |> _.order_by "Sum Points" @@ -71,7 +79,7 @@ add_specs suite_builder setup = rows.at 1 . should_equal ["C", 13] group_builder.specify "aggregates and filtering" <| - t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C", "B"]], ["Points", [2, 5, 2, 1, 10, 3, 0]]] connection=data.connection + t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C", "B"]], ["Points", [2, 5, 2, 1, 10, 3, 0]]] t3 = t2.aggregate [Group_By "Letter", Sum "Points"] t4 = t3.filter "Sum Points" (Filter_Condition.Equal 5) |> materialize |> _.order_by "Letter" @@ -81,7 +89,7 @@ add_specs suite_builder setup = rows.at 1 . should_equal ["B", 5] group_builder.specify "aggregates and ordering" <| - t1 = table_builder [["Letter", ["C", "A", "B", "A", "A", "C", "C", "B"]], ["Points", [0, -100, 5, 2, 1, 10, 3, 0]]] connection=data.connection + t1 = table_builder [["Letter", ["C", "A", "B", "A", "A", "C", "C", "B"]], ["Points", [0, -100, 5, 2, 1, 10, 3, 0]]] t2 = t1.aggregate [Group_By "Letter", Sum "Points"] t3 = t2.order_by "Sum Points" |> materialize t3.columns.map .name . should_equal ["Letter", "Sum Points"] @@ -89,7 +97,7 @@ add_specs suite_builder setup = t3.at "Sum Points" . to_vector . should_equal [-97, 5, 13] group_builder.specify "distinct and ordering" <| - t1 = table_builder [["X", [1, 2, 2, 1]], ["Y", ["a", "b", "b", "a"]], ["Z", [1, 2, 3, 4]]] connection=data.connection + t1 = table_builder [["X", [1, 2, 2, 1]], ["Y", ["a", "b", "b", "a"]], ["Z", [1, 2, 3, 4]]] # These are 'adversarial' white-box examples constructed knowing that Postgres' DISTINCT ON does not play too well with ORDER BY and it needs to be handled carefully. t2 = t1.order_by "X" . distinct "X" |> materialize @@ -104,7 +112,7 @@ add_specs suite_builder setup = a = ["A", ["a", "a", "a", "a", "a", "a"]] b = ["B", [1, 1, 2, 2, 1, 2]] c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]] - t = (table_builder [a, b, c] connection=data.connection) . order_by ([(Sort_Column.Name "C" Sort_Direction.Descending)]) + t = (table_builder [a, b, c]) . order_by ([(Sort_Column.Name "C" Sort_Direction.Descending)]) t2 = t.distinct ["A", "B"] on_problems=Report_Error # Now, reverse the order! @@ -148,8 +156,8 @@ add_specs suite_builder setup = ## This mostly checks that various operations handle all kinds of Integer storage implementations (add_row_number may use a different storage than regular columns) if setup.is_database.not then group_builder.specify "add_row_number and other operations" <| - t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]], ["Z", [0.25, 0.5, 0.75]]] connection=data.connection - t2 = table_builder [["X", ["ddd", "eee", "fff"]]] connection=data.connection + t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]], ["Z", [0.25, 0.5, 0.75]]] + t2 = table_builder [["X", ["ddd", "eee", "fff"]]] t11 = t1.add_row_number (t11.at "Row" + 2) . to_vector . should_equal [3, 4, 5] @@ -168,7 +176,7 @@ add_specs suite_builder setup = r12.at "X" . to_vector . should_equal ["a", "b", "c", "ddd", "eee", "fff"] r12.at "Row" . to_vector . should_equal [1, 2, 3, 100, 101, 102] - t3 = table_builder [["X", ["a", "b", "c"]], ["Row", [1.5, 2.5, 3.5]]] connection=data.connection + t3 = table_builder [["X", ["a", "b", "c"]], ["Row", [1.5, 2.5, 3.5]]] t123 = ((t1.add_row_number).union [(t2.add_row_number), t3]) r123 = t123 |> materialize @@ -177,8 +185,8 @@ add_specs suite_builder setup = if setup.test_selection.fixed_length_text_columns then group_builder.specify "types of unioned fixed-length columns should be correctly inferred after passing through other operations that infer types from Database, like aggregate Shortest" <| - t1 = (table_builder [["X", ["a", "b", "c"]], ["Y", [1, 0, 2]]] connection=data.connection) . cast "X" (Value_Type.Char 1 False) - t2 = (table_builder [["X", ["ddd", "eee", "fff"]], ["Y", [0, 1, 0]]] connection=data.connection) . cast "X" (Value_Type.Char 3 False) + t1 = (table_builder [["X", ["a", "b", "c"]], ["Y", [1, 0, 2]]]) . cast "X" (Value_Type.Char 1 False) + t2 = (table_builder [["X", ["ddd", "eee", "fff"]], ["Y", [0, 1, 0]]]) . cast "X" (Value_Type.Char 3 False) t3 = t1.union t2 vt1 = t3.at "X" . value_type @@ -196,7 +204,7 @@ add_specs suite_builder setup = t5.at "Shortest X" . to_vector . should_equal ["b", "a", "c"] group_builder.specify "types should be correctly preserved after aggregation after iif" <| - t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]] connection=data.connection + t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]] t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=3 variable_length=False) x = t1.at "x" diff --git a/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso index 01cd0426a554..2b651e472f28 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Map_Spec.enso @@ -14,11 +14,30 @@ from project.Common_Table_Operations.Util import run_default_backend main = run_default_backend add_specs +type Data + Value ~connection + + setup create_connection_fn = Data.Value <| + connection = create_connection_fn Nothing + connection + + teardown self = + self.connection.close + + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func suite_builder.group prefix+"Column.map" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + if setup.is_database then group_builder.specify "should report unsupported error" <| t = table_builder [["X", [1, 2, 3]]] @@ -133,6 +152,14 @@ add_specs suite_builder setup = r8.catch.to_display_text . should_contain "Expected type Date, but got a value 42 of type Integer (16 bits)" suite_builder.group prefix+"Column.zip" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + if setup.is_database then group_builder.specify "should report unsupported error" <| t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso index 6cfb6f5d6ab4..92fe66b0c8d1 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso @@ -42,24 +42,22 @@ type Data Data.Value [connection, t0, t1, t3, t4] teardown self = - self.connection.drop_table self.t0.name - self.connection.drop_table self.t1.name - self.connection.drop_table self.t3.name - self.connection.drop_table self.t4.name self.connection.close add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder create_connection_fn = setup.create_connection_func test_selection = setup.test_selection suite_builder.group prefix+"Dropping Missing Values" group_builder-> - data = Data.setup create_connection_fn table_builder + data = Data.setup create_connection_fn setup.table_builder group_builder.teardown <| data.teardown + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "filter_blank_rows should drop rows that contain at least one missing cell" <| d = data.t0.filter_blank_rows when=Blank_Selector.Any_Cell d.row_count . should_equal 1 @@ -76,7 +74,7 @@ add_specs suite_builder setup = group_builder.specify "filter_blank_rows should deal with edge cases" <| ## TODO currently our builder does not allow all-null tables, so we create one with a 0 and remove it by filter. See #6159. - t0 = table_builder [["X", [0, Nothing, Nothing, Nothing]]] connection=data.connection + t0 = table_builder [["X", [0, Nothing, Nothing, Nothing]]] t1 = t0.filter "X" (Filter_Condition.Is_Nothing) t1.row_count . should_equal 3 t1.at "X" . to_vector . should_equal [Nothing, Nothing, Nothing] @@ -85,7 +83,7 @@ add_specs suite_builder setup = t2.row_count . should_equal 0 t2.at "X" . to_vector . should_equal [] - t3 = table_builder [["X", ["", "", Nothing]]] connection=data.connection + t3 = table_builder [["X", ["", "", Nothing]]] t4 = t3.filter_blank_rows when=Blank_Selector.All_Cells t4.row_count . should_equal 0 t4.at "X" . to_vector . should_equal [] @@ -93,7 +91,7 @@ add_specs suite_builder setup = group_builder.specify "filter_blank_rows should work with a table with many columns" <| cols = Vector.new 60 i-> ["col_"+i.to_text, [i, Nothing]] - t1 = table_builder cols connection=data.connection + t1 = table_builder cols t2 = t1.filter_blank_rows t2.columns.length . should_equal 60 t2.row_count . should_equal 1 @@ -187,6 +185,14 @@ add_specs suite_builder setup = r3.catch.to_display_text . should_equal "No columns in the result, because of another problem: No columns were blank." suite_builder.group prefix+"Filling Missing Values" group_builder-> + data = Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should coerce long and double types to double" <| table = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [0.5, Nothing, Nothing, 0.25]]] ints = table.at "X" diff --git a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso index b0fa3d272986..989b27375548 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso @@ -13,9 +13,12 @@ from project.Common_Table_Operations.Util import run_default_backend main = run_default_backend add_specs type Data - Value ~table + Value ~data + + connection self = self.data.at 0 + table self = self.data.at 1 - setup create_connection_fn table_builder = + setup create_connection_fn table_builder = Data.Value <| connection = create_connection_fn Nothing mk_table = col1 = ["alpha", [3, 2, 1, 0]] @@ -29,8 +32,11 @@ type Data col9 = ["tau", [32.0, 0.5, -0.1, 1.6]] col10 = ["rho", ["BB", Nothing, Nothing, "B"]] table_builder [col1, col2, col3, col4, col5, col6, col7, col8, col9, col10] connection=connection - Data.Value mk_table - + [connection, mk_table] + + teardown self = + self.connection.close + type My_Type Foo x @@ -38,13 +44,18 @@ type My_Type add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder create_connection_fn = setup.create_connection_func test_selection = setup.test_selection order_by_pending = if test_selection.order_by.not then "ToDo: order_by is not yet supported by this backend." suite_builder.group prefix+"Table.order_by" pending=order_by_pending group_builder-> - data = Data.setup create_connection_fn table_builder + data = Data.setup create_connection_fn setup.table_builder + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection group_builder.specify "should work as shown in the doc examples" <| t1 = data.table.order_by ["alpha"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso index 2787da2eb232..7d7660ea2f64 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso @@ -11,9 +11,12 @@ from project.Common_Table_Operations.Util import expect_column_names, run_defaul main = run_default_backend add_specs type Select_Columns_Data - Value ~table + Value ~data + + connection self = self.data.at 0 + table self = self.data.at 1 - setup create_connection_fn table_builder = + setup create_connection_fn table_builder = Select_Columns_Data.Value <| connection = create_connection_fn Nothing table = col1 = ["foo", [1,2,3]] @@ -24,13 +27,18 @@ type Select_Columns_Data col6 = ["ab.+123", [16,17,18]] col7 = ["abcd123", [19,20,21]] table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection - Select_Columns_Data.Value table + [connection, table] + teardown self = + self.connection.close type Sort_Columns_Data - Value ~table - - setup create_connection_fn table_builder = + Value ~data + + connection self = self.data.at 0 + table self = self.data.at 1 + + setup create_connection_fn table_builder = Sort_Columns_Data.Value <| connection = create_connection_fn Nothing table = col1 = ["foo 21", [1,2,3]] @@ -41,13 +49,19 @@ type Sort_Columns_Data col6 = ["foo 001", [16,17,18]] col7 = ["bar", [19,20,21]] table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection - Sort_Columns_Data.Value table + [connection, table] + + teardown self = + self.connection.close type Rename_Columns_Data - Value ~table - - setup create_connection_fn table_builder = + Value ~data + + connection self = self.data.at 0 + table self = self.data.at 1 + + setup create_connection_fn table_builder = Rename_Columns_Data.Value <| connection = create_connection_fn Nothing table = col1 = ["alpha", [1,2,3]] @@ -55,7 +69,10 @@ type Rename_Columns_Data col3 = ["gamma", [16,17,18]] col4 = ["delta", [19,20,21]] table_builder [col1, col2, col3, col4] connection=connection - Rename_Columns_Data.Value table + [connection, table] + + teardown self = + self.connection.close add_specs suite_builder setup = @@ -66,7 +83,10 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.select_columns" group_builder-> data = Select_Columns_Data.setup create_connection_fn table_builder - + + group_builder.teardown <| + data.teardown + group_builder.specify "should work as shown in the doc examples" <| expect_column_names ["foo", "bar"] <| data.table.select_columns ["bar", "foo"] expect_column_names ["bar", "Baz", "foo 1", "foo 2"] <| data.table.select_columns ["foo.+".to_regex, "b.*".to_regex True] @@ -185,7 +205,10 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.remove_columns" group_builder-> data = Select_Columns_Data.setup create_connection_fn table_builder - + + group_builder.teardown <| + data.teardown + group_builder.specify "should work as shown in the doc examples" <| expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| data.table.remove_columns ["bar", "foo"] expect_column_names ["foo", "ab.+123", "abcd123"] <| data.table.remove_columns ["foo.+".to_regex, "b.*".to_regex] Case_Sensitivity.Insensitive @@ -270,7 +293,10 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.reorder_columns" group_builder-> data = Select_Columns_Data.setup create_connection_fn table_builder - + + group_builder.teardown <| + data.teardown + group_builder.specify "should work as shown in the doc examples" <| expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| data.table.reorder_columns "foo" Position.After_Other_Columns expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] <| data.table.reorder_columns ["foo", "bar"] Position.After_Other_Columns @@ -340,6 +366,9 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.sort_columns" group_builder-> data = Sort_Columns_Data.setup create_connection_fn table_builder + group_builder.teardown <| + data.teardown + group_builder.specify "should work as shown in the doc examples" <| sorted = data.table.sort_columns expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 100", "foo 21", "foo 3"] sorted @@ -360,6 +389,9 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.rename_columns" group_builder-> data = Rename_Columns_Data.setup create_connection_fn table_builder + group_builder.teardown <| + data.teardown + group_builder.specify "should work as shown in the doc examples" <| expect_column_names ["FirstColumn", "beta", "gamma", "delta"] <| data.table.rename_columns ["FirstColumn"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso index 8dc3450268be..b5cf768471b0 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso @@ -31,8 +31,6 @@ type Table_Take_Drop_Data [connection, table, empty] teardown self = - self.connection.drop_table self.table.name - self.connection.drop_table self.empty.name self.connection.close @@ -62,25 +60,22 @@ type Column_Take_Drop_Data [connection, table, alpha, beta, empty_alpha, empty_beta] teardown self = - self.connection.drop_table self.table.name - self.connection.drop_table self.alpha.name - self.connection.drop_table self.beta.name - self.connection.drop_table self.empty_alpha.name - self.connection.drop_table self.empty_beta.name self.connection.close add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder create_connection_fn = setup.create_connection_func suite_builder.group prefix+"Table.take/drop" group_builder-> - data = Table_Take_Drop_Data.setup create_connection_fn table_builder + data = Table_Take_Drop_Data.setup create_connection_fn setup.table_builder group_builder.teardown <| data.teardown + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow selecting first or last N rows" <| data.table.take.at "alpha" . to_vector . should_equal [1] data.table.take.at "beta" . to_vector . should_equal ["A"] @@ -258,7 +253,7 @@ add_specs suite_builder setup = False -> unordered_table.take . at "alpha" . to_vector . should_equal [1] group_builder.specify "Should work correctly after aggregation" <| - t0 = table_builder [["X", ["a", "b", "a", "c"]], ["Y", [1, 2, 4, 8]]] connection=data.connection + t0 = table_builder [["X", ["a", "b", "a", "c"]], ["Y", [1, 2, 4, 8]]] t1 = t0.aggregate [Group_By "X", Sum "Y"] t2 = t1.order_by "X" . take 2 @@ -270,11 +265,14 @@ add_specs suite_builder setup = t3.at "Sum Y" . to_vector . should_equal [2.0, 8.0] suite_builder.group prefix+"Column.take/drop" group_builder-> - data = Column_Take_Drop_Data.setup create_connection_fn table_builder + data = Column_Take_Drop_Data.setup create_connection_fn setup.table_builder group_builder.teardown <| data.teardown + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow selecting first or last N rows" <| data.alpha.take.to_vector . should_equal [1] data.beta.take.to_vector . should_equal ["A"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Temp_Column_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Temp_Column_Spec.enso index 57823e5aa596..0cd9bd2b7f55 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Temp_Column_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Temp_Column_Spec.enso @@ -5,12 +5,32 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend +main = run_default_backend add_specs + +type Data + Value ~connection + + setup create_connection_fn = Data.Value <| + connection = create_connection_fn Nothing + connection + + teardown self = + self.connection.close + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func suite_builder.group prefix+"Temp column" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "Can generate a temp column" <| t1 = table_builder [["X", ['a', 'b']], ["Y", ['c', 'd']], ["Z", [40, 20]]] t1.make_temp_column_name . should_equal "temp" diff --git a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso index 494d941b3e04..ccda92bbe0ea 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso @@ -8,11 +8,29 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import run_default_backend +type Data + Value ~connection + + setup create_connection_fn = Data.Value <| + create_connection_fn Nothing + + teardown self = + self.connection.close + + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder + create_connection_fn = setup.create_connection_func db_todo = if setup.is_database.not then Nothing else "Table.transpose is not implemented yet in Database." suite_builder.group prefix+"Table.transpose" pending=db_todo group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should transpose all columns by default" <| t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]] t1 = t.transpose diff --git a/test/Table_Tests/src/Database/Codegen_Spec.enso b/test/Table_Tests/src/Database/Codegen_Spec.enso index f514f22b2c75..aa5972b90abe 100644 --- a/test/Table_Tests/src/Database/Codegen_Spec.enso +++ b/test/Table_Tests/src/Database/Codegen_Spec.enso @@ -22,13 +22,13 @@ type Data connection self = self.data.at 0 t1 self = self.data.at 1 - setup = + setup = Data.Value <| c = Database.connect (SQLite In_Memory) c.create_table "T1" [Column_Description.Value "A" Value_Type.Integer, Column_Description.Value "B" Value_Type.Char, Column_Description.Value "C" Value_Type.Boolean] c.create_table "T2" [Column_Description.Value "D" Value_Type.Integer, Column_Description.Value "E" Value_Type.Integer, Column_Description.Value "F" Value_Type.Boolean] c.create_table "T3" [Column_Description.Value "A" Value_Type.Integer, Column_Description.Value "E" Value_Type.Boolean, Column_Description.Value "F" Value_Type.Integer] t1 = c.query (SQL_Query.Table_Name "T1") - Data.Value [c, t1] + [c, t1] teardown self = self.connection.close diff --git a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso index 2b0bc5e2d95f..caf812c62781 100644 --- a/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Default_Ordering_Spec.enso @@ -14,19 +14,27 @@ import Standard.Test_New.Suite.Suite_Builder import project.Util import project.Database.Helpers.Name_Generator +main = + suite = Test.build suite_builder-> + add_specs suite_builder "[SQLite]" (_-> Database.connect (SQLite In_Memory)) + suite.run_with_filter + + type Data Value ~data - db_table_without_key self = self.data.first - db_table_with_key self = self.data.second + connection self = self.data.at 0 + db_table_without_key self = self.data.at 1 + db_table_with_key self = self.data.at 2 setup create_connection_func = Data.Value <| - IO.println <| " Default_Ordering_Spec_New.setup" connection = create_connection_func Nothing src_table = Table.new [["X", [1, 2, 3]], ["Y", [30, 20, 10]]] db_table_without_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=Nothing db_table_with_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=["X"] - [db_table_without_key, db_table_with_key] + [connection, db_table_without_key, db_table_with_key] + + teardown self = self.connection.close ## Adds test specifications for default ordering to the given `suite_builder`. Adds it as @@ -41,6 +49,9 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn suite_builder.group group_name group_builder-> data = Data.setup create_connection_fn + group_builder.teardown <| + data.teardown + group_builder.specify "will return Nothing if no primary key is defined" <| data.db_table_without_key.default_ordering . should_equal Nothing diff --git a/test/Table_Tests/src/Database/Common/Names_Length_Limits_Spec.enso b/test/Table_Tests/src/Database/Common/Names_Length_Limits_Spec.enso index e752e0d70b52..78345282c035 100644 --- a/test/Table_Tests/src/Database/Common/Names_Length_Limits_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Names_Length_Limits_Spec.enso @@ -18,11 +18,17 @@ type Data setup create_connection_func = Data.Value (create_connection_func Nothing) + teardown self = self.connection.close + add_specs suite_builder prefix create_connection_func = - data = Data.setup create_connection_func - + suite_builder.group prefix+"Support for Long Column/Table names" group_builder-> + data = Data.setup create_connection_func + + group_builder.teardown <| + data.teardown + entity_naming_properties = data.connection.base_connection.entity_naming_properties max_table_name_length = entity_naming_properties.for_table_names.size_limit max_column_name_length = entity_naming_properties.for_column_names.size_limit diff --git a/test/Table_Tests/src/Database/Transaction_Spec.enso b/test/Table_Tests/src/Database/Transaction_Spec.enso index c9bbec2fd70f..fac818a9f848 100644 --- a/test/Table_Tests/src/Database/Transaction_Spec.enso +++ b/test/Table_Tests/src/Database/Transaction_Spec.enso @@ -25,10 +25,16 @@ type Data simple_table_structure = [Column_Description.Value "X" Value_Type.Integer] [connection, simple_table_structure] + teardown self = + self.connection.close + add_specs suite_builder create_connection_func prefix = suite_builder.group prefix+"Transaction Support" group_builder-> data = Data.setup create_connection_func + group_builder.teardown <| + data.teardown + insert_value table_name x = pairs = [["X", SQL_Expression.Constant x]] sql = data.connection.dialect.generate_sql <| Query.Insert table_name pairs diff --git a/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso b/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso index 7697ac24e704..5d68c7a130d9 100644 --- a/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso +++ b/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso @@ -27,11 +27,17 @@ type Data Problems.assume_no_problems <| self.connection.execute_update stmt self.connection.query (SQL_Query.Table_Name name) + teardown self = + self.connection.close -add_specs suite_builder create_connection_fn = - data = Data.setup create_connection_fn +add_specs suite_builder create_connection_fn = suite_builder.group "[PostgreSQL] Type Mapping" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + group_builder.specify "numeric" <| t = data.make_table "ints" [["a", "smallint"], ["b", "int2"], ["c", "int"], ["d", "integer"], ["e", "int4"], ["f", "bigint"], ["g", "int8"]] t.at "a" . value_type . should_equal (Value_Type.Integer Bits.Bits_16) @@ -139,6 +145,11 @@ add_specs suite_builder create_connection_fn = Problems.expect_warning Inexact_Type_Coercion t2 suite_builder.group "[PostgreSQL] Type Edge Cases" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + group_builder.specify "will fail to read a BigDecimal column and suggest to cast it to Float" <| table_name = Name_Generator.random_name "BigDecimal" table = data.connection.create_table table_name [Column_Description.Value "B" (Value_Type.Decimal precision=100 scale=5)] primary_key=[] diff --git a/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso b/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso index e07e48e0539a..35b7aac72a01 100644 --- a/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso +++ b/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso @@ -22,10 +22,16 @@ type Data connection = Database.connect (SQLite In_Memory) Data.Value connection + teardown self = self.connection.close + + add_specs suite_builder = suite_builder.group "[SQLite] Type Mapping" group_builder-> data = Data.setup + group_builder.teardown <| + data.teardown + make_table prefix columns = name = prefix column_exprs = columns.map col_def-> diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso index af2d1775bea9..eb0ce0927661 100644 --- a/test/Table_Tests/src/Database/Upload_Spec.enso +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -44,6 +44,9 @@ type Data in_memory_table = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] [connection, in_memory_table] + teardown self = + self.connection.close + in_memory_table_builder args primary_key=[] connection = _ = [primary_key, connection] @@ -70,6 +73,9 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = suite_builder.group prefix+"Creating an empty table" group_builder-> data = Data.setup make_new_connection + group_builder.teardown <| + data.teardown + group_builder.specify "should allow to specify the column names and types" <| t = data.connection.create_table (Name_Generator.random_name "creating-table") structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char] temporary=True t.column_names . should_equal ["X", "Y"] @@ -295,6 +301,9 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = suite_builder.group prefix+"Uploading an in-memory Table" group_builder-> data = Data.setup make_new_connection + group_builder.teardown <| + data.teardown + group_builder.specify "should create a database table with the same contents as the source" <| db_table = data.in_memory_table.select_into_database_table data.connection (Name_Generator.random_name "creating-table") temporary=True db_table.column_names . should_equal ["X", "Y"] @@ -452,6 +461,9 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = suite_builder.group prefix+"Persisting a Database Table (query)" group_builder-> data = Data.setup make_new_connection + group_builder.teardown <| + data.teardown + group_builder.specify "should be able to create a persistent copy of a DB table" <| t = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [1.0, 2.0, 3.0]]] tmp_connection = make_new_connection Nothing @@ -597,6 +609,9 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = suite_builder.group prefix+"Appending an in-memory table to a Database table" group_builder-> data = Data.setup make_new_connection + group_builder.teardown <| + data.teardown + test_table_append group_builder data in_memory_table_builder (database_table_builder "target-table") group_builder.specify "will issue a friendly error if using in-memory table as target" <| @@ -609,21 +624,33 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = suite_builder.group prefix+"Appending a Database table to a Database table" group_builder-> data = Data.setup make_new_connection + group_builder.teardown <| + data.teardown + test_table_append group_builder data (database_table_builder "source-table") (database_table_builder "target-table") suite_builder.group prefix+"Deleting rows from a Database table (source=in-memory)" group_builder-> data = Data.setup make_new_connection + group_builder.teardown <| + data.teardown + test_table_delete group_builder data in_memory_table_builder (database_table_builder "target-table") suite_builder.group prefix+"Deleting rows from a Database table (source=Database)" group_builder-> data = Data.setup make_new_connection + group_builder.teardown <| + data.teardown + test_table_delete group_builder data (database_table_builder "source-table") (database_table_builder "target-table") suite_builder.group prefix+"Deleting rows from a Database table" group_builder-> data = Data.setup make_new_connection + group_builder.teardown <| + data.teardown + group_builder.specify "[ADVANCED] it should be possible to truncate the whole table" <| name = Name_Generator.random_name "table-to-truncate" t = (Table.new [["X", [1, 2, 3]]]).select_into_database_table data.connection name temporary=True primary_key=[] @@ -658,6 +685,9 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = suite_builder.group execution_context_group_name group_builder-> data = Data.setup make_new_connection + group_builder.teardown <| + data.teardown + group_builder.specify "should forbid executing updates" <| Context.Output.with_disabled <| r1 = data.connection.execute_update "CREATE TEMPORARY TABLE foo (x INTEGER)" From 32ac4e847e61423f9d38a985108af6a8602957f0 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 12:03:29 +0100 Subject: [PATCH 67/93] Add docs how to run Postgres in Docker for testing --- test/Table_Tests/src/Database/README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/Table_Tests/src/Database/README.md b/test/Table_Tests/src/Database/README.md index 72e8d693e1b1..55c9fab8341d 100644 --- a/test/Table_Tests/src/Database/README.md +++ b/test/Table_Tests/src/Database/README.md @@ -29,6 +29,14 @@ If the database server is remote, `ENSO_DATABASE_TEST_HOST` may be set to tell the test suite where to connect. If that variable is not set, the test suite will attempt to connect to a local server. +### Setup via Docker +The following commands will download the latest Postgres Docker image and run +it with the default configuration with password "pwd": +```sh +docker pull postgres:latest +docker run -it -e POSTGRES_PASSWORD=pwd -p 5432:5432 postgres +``` + ### Testing SSL connectivity The SSL connection by providing a root certificate file. The path to this is From 9864a5a0f85698d60832dd5a3492f05db8a38a80 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 12:09:14 +0100 Subject: [PATCH 68/93] Fix typo in Integration_Tests --- .../src/Common_Table_Operations/Integration_Tests.enso | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso index d9f3c639f31b..a34d7a7735be 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso @@ -33,7 +33,7 @@ add_specs suite_builder setup = data.teardown table_builder cols = - setup.table_builder cols + setup.table_builder cols connection=data.connection group_builder.specify "using weird column names with operations and filtering" <| columns = weird_names.map_with_index ix-> name-> From 602fb47aa17f7d6ce50b76eb42beb70583c6f246 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 12:30:52 +0100 Subject: [PATCH 69/93] Refactor Redshift tests to Test_New --- test/Table_Tests/src/Database/Main.enso | 13 +-- .../src/Database/Redshift_Spec.enso | 83 ++++++++++++------- 2 files changed, 60 insertions(+), 36 deletions(-) diff --git a/test/Table_Tests/src/Database/Main.enso b/test/Table_Tests/src/Database/Main.enso index 3055c8ec58e8..0a55f1d889da 100644 --- a/test/Table_Tests/src/Database/Main.enso +++ b/test/Table_Tests/src/Database/Main.enso @@ -7,10 +7,11 @@ import project.Database.SQLite_Spec import project.Database.Postgres_Spec import project.Database.Redshift_Spec -spec = - Codegen_Spec.spec - SQLite_Spec.spec - Postgres_Spec.spec - Redshift_Spec.spec -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + Codegen_Spec.add_specs suite_builder + SQLite_Spec.add_specs suite_builder + Postgres_Spec.add_specs suite_builder + Redshift_Spec.add_specs suite_builder + suite.run_with_filter diff --git a/test/Table_Tests/src/Database/Redshift_Spec.enso b/test/Table_Tests/src/Database/Redshift_Spec.enso index 3d3436e752f3..d82c6c9d6201 100644 --- a/test/Table_Tests/src/Database/Redshift_Spec.enso +++ b/test/Table_Tests/src/Database/Redshift_Spec.enso @@ -8,15 +8,20 @@ from Standard.Database import all from Standard.AWS import Redshift_Details, AWS_Credential -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all import project.Database.Common.Common_Spec import project.Database.Helpers.Name_Generator import project.Common_Table_Operations -redshift_specific_spec connection = - Test.group "[Redshift] Info" <| +type Data + Value ~data + + connection self = self.data.at 0 + t self = self.data.at 1 + + setup create_connection_fn = Data.Value <| + connection = create_connection_fn Nothing tinfo = Name_Generator.random_name "Tinfo" connection.execute_update 'CREATE TEMPORARY TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' t = connection.query (SQL_Query.Table_Name tinfo) @@ -25,45 +30,59 @@ redshift_specific_spec connection = row3 = ["def", 42, True, 1.4] Panic.rethrow <| t.update_rows (Table.from_rows ["strs", "ints", "bools", "reals"] [row1, row2, row3]) update_action=Update_Action.Insert + [connection, t] + + teardown self = self.connection.close + + +add_redshift_specific_specs suite_builder create_connection_fn = + suite_builder.group "[Redshift] Info" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown - Test.specify "should return Table information" <| - i = t.info + group_builder.specify "should return Table information" <| + i = data.t.info i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "reals"] i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3] i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer Bits.Bits_32, Value_Type.Boolean, Value_Type.Float Bits.Bits_32] - Test.specify "should infer standard types correctly" <| - t.at "strs" . value_type . is_text . should_be_true - t.at "ints" . value_type . is_integer . should_be_true - t.at "bools" . value_type . is_boolean . should_be_true - t.at "reals" . value_type . is_floating_point . should_be_true - connection.execute_update 'DROP TABLE "'+tinfo+'"' - -run_tests connection = + + group_builder.specify "should infer standard types correctly" <| + data.t.at "strs" . value_type . is_text . should_be_true + data.t.at "ints" . value_type . is_integer . should_be_true + data.t.at "bools" . value_type . is_boolean . should_be_true + data.t.at "reals" . value_type . is_floating_point . should_be_true + +add_specs suite_builder create_connection_fn = prefix = "[Redshift] " name_counter = Ref.new 0 - tables = Vector.new_builder - table_builder columns = + table_builder columns connection=(create_connection_fn Nothing) = ix = name_counter.get name_counter . put ix+1 name = Name_Generator.random_name "table_"+ix.to_text in_mem_table = Table.new columns in_mem_table.select_into_database_table connection name primary_key=Nothing temporary=True + materialize = .read - Common_Spec.spec prefix connection - redshift_specific_spec connection + Common_Spec.add_specs suite_builder prefix create_connection_fn + add_redshift_specific_specs suite_builder create_connection_fn common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False supports_decimal_type=True aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False date_support=False agg_in_memory_table = (enso_project.data / "data.csv") . read - agg_table = agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True - tables.append agg_table.name - empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True - tables.append empty_agg_table.name + agg_table_fn = _-> + connection = create_connection_fn Nothing + agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True - setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection connection=connection - Common_Table_Operations.Main.spec setup + empty_agg_table_fn = _-> + connection = create_connection_fn Nothing + (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True + + setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn + Common_Table_Operations.Main.add_specs suite_builder setup connect_via_json_config = credentials = enso_project.data / 'redshift_credentials.json' @@ -107,16 +126,20 @@ uri_parse uri = db_name = if host_db_split.length == 1 then '' else host_db_split.at 1 [db_host, db_port, db_name] -spec = +add_all_specs suite_builder = db_host_port = Environment.get "ENSO_REDSHIFT_URI" connection_details = if db_host_port.is_nothing then connect_via_json_config else connect_via_aws_environment db_host_port case connection_details of _ : Text -> - Test.group "[Redshift] Database tests" pending=connection_details Nothing + suite_builder.group "[Redshift] Database tests" pending=connection_details (_-> Nothing) _ -> - connection = Database.connect connection_details - run_tests connection - -main = Test_Suite.run_main spec + create_connection_fn = _-> + Database.connect connection_details + add_specs suite_builder create_connection_fn + +main = + suite = Test.build suite_builder-> + add_all_specs suite_builder + suite.run_with_filter From 14fa1bceb4c1a48e5af43a074633ac596a914c6a Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 12:31:44 +0100 Subject: [PATCH 70/93] Fix typos --- test/Table_Tests/src/Database/Postgres_Spec.enso | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/Table_Tests/src/Database/Postgres_Spec.enso b/test/Table_Tests/src/Database/Postgres_Spec.enso index 25a3a33bdf0c..6b7fe7c9911a 100644 --- a/test/Table_Tests/src/Database/Postgres_Spec.enso +++ b/test/Table_Tests/src/Database/Postgres_Spec.enso @@ -688,7 +688,7 @@ add_table_specs suite_builder = case create_connection_builder of Nothing -> message = "PostgreSQL test database is not configured. See README.md for instructions." - Test.group "[PostgreSQL] Database tests" pending=message (_-> Nothing) + suite_builder.group "[PostgreSQL] Database tests" pending=message (_-> Nothing) connection_builder -> add_postgres_specs suite_builder connection_builder db_name Postgres_Type_Mapping_Spec.add_specs suite_builder connection_builder From 0b9d063c695ff8f3ccffa43e204a79a8ce8369b1 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 12:43:45 +0100 Subject: [PATCH 71/93] Remove empty file --- test/Base_Tests/src/Main_New.enso | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 test/Base_Tests/src/Main_New.enso diff --git a/test/Base_Tests/src/Main_New.enso b/test/Base_Tests/src/Main_New.enso deleted file mode 100644 index e69de29bb2d1..000000000000 From 37ed88e3793da1fef7a5c389c175d483467d9dd3 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 12:46:54 +0100 Subject: [PATCH 72/93] Remove Phases from Test_New --- .../lib/Standard/Test_New/0.0.0-dev/src/Phases.enso | 6 ------ distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso | 6 ++---- 2 files changed, 2 insertions(+), 10 deletions(-) delete mode 100644 distribution/lib/Standard/Test_New/0.0.0-dev/src/Phases.enso diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Phases.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Phases.enso deleted file mode 100644 index ae4c95f7ea74..000000000000 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Phases.enso +++ /dev/null @@ -1,6 +0,0 @@ -private - -type Building_Tests - -type Running_Tests - diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index 716a00e09082..a6aea3dad465 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -6,7 +6,6 @@ import Standard.Base.Runtime.State import project.Group.Group import project.Group.Group_Builder import project.Helpers -import project.Phases.Running_Tests import project.Suite_Config.Suite_Config import project.Test_Reporter @@ -68,9 +67,8 @@ type Suite junit_sb_builder = if config.should_output_junit then StringBuilder.new else Nothing Test_Reporter.wrap_junit_testsuites config junit_sb_builder <| filtered_groups.each group-> - State.run Running_Tests True <| - results = Helpers.run_group_with_filter group spec_filter_conv - Test_Reporter.print_report results config junit_sb_builder + results = Helpers.run_group_with_filter group spec_filter_conv + Test_Reporter.print_report results config junit_sb_builder group_names self = self.groups.map (_.name) From 5ea134a02dd389614fd380be6de5f010d59ce3b2 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 13:03:40 +0100 Subject: [PATCH 73/93] Add docs to Test_New --- .../Test_New/0.0.0-dev/src/Group.enso | 1 + .../Test_New/0.0.0-dev/src/Helpers.enso | 3 --- .../Test_New/0.0.0-dev/src/Suite.enso | 21 +++++++++++-------- .../Test_New/0.0.0-dev/src/Test_Result.enso | 1 + 4 files changed, 14 insertions(+), 12 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso index c556c73a39c2..894c3eafd167 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Group.enso @@ -15,6 +15,7 @@ type Group_Builder ## Specifies a single test. Arguments: + - name: The name of the test spec. Should be unique within the group. - code: A thunk that takes no arguments. - pending: Contains a reason for why the test should be ignored. If Nothing, the test is not ignored. diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso index 56d4ee4d6fc3..5b337e843cca 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso @@ -30,11 +30,8 @@ run_specs_from_group (specs : Vector Spec) (group : Group) = case specs.is_empty of True -> [] False -> - IO.println <| "Running tests in group '" + group.name + "'..." test_results = specs.map spec-> - IO.println <| " Running spec '" + spec.name + "'..." pair = run_spec spec - IO.println <| " Finished spec '" + spec.name + "'." spec_res = pair.second time_taken = pair.first Test_Result.Impl group.name spec.name spec_res time_taken diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index a6aea3dad465..851ee44515d4 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -13,11 +13,18 @@ polyglot java import java.lang.StringBuilder type Suite_Builder ## PRIVATE + + Arguments: + - builder: Vector builder that holds all the groups. Impl builder - ## Add a group to the builder. + ## Add a group to the suite builder. Arguments: + - name: Name of the group. When collecting multiple groups into a single suite, all the + group names must be unique. Otherwise, a panic is thrown. + - fn: A callback function that takes `Group_Builder` as an argument. You can call the + `specify` method on group builder. See its docs. - pending: Contains a reason for why the test group should be ignored. If Nothing, the test is not ignored. group : Text -> (Group_Builder -> Any) -> Nothing @@ -28,17 +35,13 @@ type Suite_Builder self.builder.append group +## A testing suite that holds all the groups with test specs. + + Users are expected to build the suite via `Suite_Builder`. That is, via `Test.build` method. type Suite + ## PRIVATE Impl (groups : Vector Group) - ## Merges multiple suites into one suite - merge : (Vector Suite) -> Suite - merge suites = - all_groups = suites.map (_.groups) . flatten - all_group_names = all_groups.map (_.name) - if all_group_names.distinct.length != all_group_names.length then Panic.throw "Group names not unique" else - Suite.Impl all_groups - run_all self = self.run_with_filter Nothing Nothing diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso index 078afb916155..dc2d69be9726 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Test_Result.enso @@ -3,6 +3,7 @@ from Standard.Base import all import project.Spec_Result.Spec_Result +## A wrapper for `Spec_Result` that contains also name of the group and name of the spec. type Test_Result Impl (group_name : Text) (spec_name : Text) (spec_result : Spec_Result) (time_taken : Duration) From 7bc400bc5e397c2407c3f4904332307c813ee056 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 13:09:47 +0100 Subject: [PATCH 74/93] Add docs to Test_New --- .../lib/Standard/Test_New/0.0.0-dev/src/Suite.enso | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index 851ee44515d4..e8b440445b3a 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -45,8 +45,14 @@ type Suite run_all self = self.run_with_filter Nothing Nothing - ## Run only tests that match the provided filters. The filters are converted to - a regular expression. If a filter is Nothing, it matches all the names. + ## Run only tests that match the provided filters. + + A filter can be either Regex, Text, or Nothing. If Nothing, it matches all the names. + If Text, it matches only names that contains that text as a substring. + If a Regex, the `regex.match` method is invoked for every name. + + Note that you can specify the regex such that it matches, e.g., exactly two test spec names + via something like `Regex.compile "(name1|name2)"`. Arguments: - group_filter: Filter for group names. From 587b28720107e59a68eae9c7ce44ae533889f13c Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 13:10:15 +0100 Subject: [PATCH 75/93] Remove Suite.run_all method --- distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso | 3 --- 1 file changed, 3 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index e8b440445b3a..af77b5f1f5a6 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -42,9 +42,6 @@ type Suite ## PRIVATE Impl (groups : Vector Group) - run_all self = - self.run_with_filter Nothing Nothing - ## Run only tests that match the provided filters. A filter can be either Regex, Text, or Nothing. If Nothing, it matches all the names. From cffdad3bfbb0e4d46839b066ddf5fd3885611878 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 13:22:42 +0100 Subject: [PATCH 76/93] Filter can be either Regex, Text, or Nothing --- .../Standard/Test_New/0.0.0-dev/src/Helpers.enso | 7 +++++-- .../Standard/Test_New/0.0.0-dev/src/Suite.enso | 15 +++++---------- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso index 5b337e843cca..49d02fb89bff 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso @@ -14,9 +14,12 @@ import project.Test_Reporter import project.Test.Test run_group_with_filter : Group -> Regex -> Vector Test_Result -run_group_with_filter (group : Group) (spec_filter : Regex) = +run_group_with_filter (group : Group) (spec_filter : (Regex|Text|Nothing)) = filtered_specs = group.specs.filter spec-> - (spec_filter.match spec.name) != Nothing + case spec_filter of + regex : Regex -> (regex.match spec.name) != Nothing + text : Text -> spec.name.contains text + Nothing -> True run_specs_from_group filtered_specs group diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index af77b5f1f5a6..0d84b1ff963b 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -57,23 +57,18 @@ type Suite run_with_filter : (Regex | Text | Nothing) -> (Regex | Text | Nothing) -> Nothing run_with_filter self group_filter=Nothing spec_filter=Nothing = config = Suite_Config.from_environment - convert_filter filter = - case filter of - r : Regex -> r - t : Text -> Regex.compile t - Nothing -> Regex.compile ".*" - _ -> Error.throw (Illegal_Argument.Error "Wrong specification of a filter: "+filter.to_text) - group_filter_conv = convert_filter group_filter - spec_filter_conv = convert_filter spec_filter filtered_groups = self.groups.filter group-> - group_name_matches = (group_filter_conv.match group.name) != Nothing + group_name_matches = case group_filter of + regex : Regex -> (regex.match group.name) != Nothing + text : Text -> group.name.contains text + Nothing -> True case group_name_matches of False -> False True -> group.pending == Nothing junit_sb_builder = if config.should_output_junit then StringBuilder.new else Nothing Test_Reporter.wrap_junit_testsuites config junit_sb_builder <| filtered_groups.each group-> - results = Helpers.run_group_with_filter group spec_filter_conv + results = Helpers.run_group_with_filter group spec_filter Test_Reporter.print_report results config junit_sb_builder group_names self = From 0a7a527d8c67b6866ce50b891a9a77469e1c22cf Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 13:33:22 +0100 Subject: [PATCH 77/93] Print test stats at the end of the run --- .../Standard/Test_New/0.0.0-dev/src/Helpers.enso | 2 +- .../lib/Standard/Test_New/0.0.0-dev/src/Suite.enso | 13 +++++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso index 49d02fb89bff..ad61cac17d00 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Helpers.enso @@ -13,7 +13,7 @@ import project.Test_Result.Test_Result import project.Test_Reporter import project.Test.Test -run_group_with_filter : Group -> Regex -> Vector Test_Result +run_group_with_filter : Group -> (Regex|Text|Nothing) -> Vector Test_Result run_group_with_filter (group : Group) (spec_filter : (Regex|Text|Nothing)) = filtered_specs = group.specs.filter spec-> case spec_filter of diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index 0d84b1ff963b..ba08a41caeec 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -66,10 +66,19 @@ type Suite False -> False True -> group.pending == Nothing junit_sb_builder = if config.should_output_junit then StringBuilder.new else Nothing - Test_Reporter.wrap_junit_testsuites config junit_sb_builder <| - filtered_groups.each group-> + all_results = Test_Reporter.wrap_junit_testsuites config junit_sb_builder <| + filtered_groups.flat_map group-> results = Helpers.run_group_with_filter group spec_filter Test_Reporter.print_report results config junit_sb_builder + results + succ_tests = all_results.filter (_.is_success) . length + failed_tests = all_results.filter (_.is_fail) . length + skipped_tests = all_results.filter (_.is_pending) . length + IO.println <| succ_tests.to_text + " tests succeeded." + IO.println <| failed_tests.to_text + " tests failed." + IO.println <| skipped_tests.to_text + " tests skipped." + exit_code = if failed_tests > 0 then 1 else 0 + System.exit exit_code group_names self = self.groups.map (_.name) From ef3a4083354d6ba9af68f4a6013ef7971ef0377a Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 13:37:57 +0100 Subject: [PATCH 78/93] Refactor Table_Tests/src/Main.enso to Test_New --- test/Table_Tests/src/Main.enso | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/test/Table_Tests/src/Main.enso b/test/Table_Tests/src/Main.enso index b67eef28c59e..c89b9d3d5d30 100644 --- a/test/Table_Tests/src/Main.enso +++ b/test/Table_Tests/src/Main.enso @@ -1,6 +1,6 @@ from Standard.Base import all hiding IO -from Standard.Test import Test_Suite +from Standard.Test_New import Test import project.Database import project.Formatting @@ -8,9 +8,11 @@ import project.Helpers import project.In_Memory import project.IO -main = Test_Suite.run_main <| - In_Memory.Main.spec - IO.Main.spec - Formatting.Main.spec - Database.Main.spec - Helpers.Main.spec +main = + suite = Test.build suite_builder-> + In_Memory.Main.add_specs suite_builder + IO.Main.add_specs suite_builder + Formatting.Main.add_specs suite_builder + Database.Main.add_specs suite_builder + Helpers.Main.add_specs suite_builder + suite.run_with_filter From 357b084438e7234598fa7dad0417b7755e07e857 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 13:48:31 +0100 Subject: [PATCH 79/93] Implement `add_specs` method in all Main.enso files. So that it can all be collected from the parent Main --- test/Table_Tests/src/Database/Main.enso | 12 +++++---- .../src/Database/Redshift_Spec.enso | 8 +++--- .../Table_Tests/src/Database/SQLite_Spec.enso | 8 +++--- test/Table_Tests/src/Formatting/Main.enso | 7 +++-- test/Table_Tests/src/Helpers/Main.enso | 9 ++++--- test/Table_Tests/src/IO/Main.enso | 17 +++++++----- test/Table_Tests/src/In_Memory/Main.enso | 27 ++++++++++--------- 7 files changed, 51 insertions(+), 37 deletions(-) diff --git a/test/Table_Tests/src/Database/Main.enso b/test/Table_Tests/src/Database/Main.enso index 0a55f1d889da..fc9fc69ac2c8 100644 --- a/test/Table_Tests/src/Database/Main.enso +++ b/test/Table_Tests/src/Database/Main.enso @@ -1,17 +1,19 @@ from Standard.Base import all -from Standard.Test import Test_Suite +from Standard.Test_New import Test import project.Database.Codegen_Spec import project.Database.SQLite_Spec import project.Database.Postgres_Spec import project.Database.Redshift_Spec +add_specs suite_builder = + Codegen_Spec.add_specs suite_builder + SQLite_Spec.add_specs suite_builder + Postgres_Spec.add_specs suite_builder + Redshift_Spec.add_specs suite_builder main = suite = Test.build suite_builder-> - Codegen_Spec.add_specs suite_builder - SQLite_Spec.add_specs suite_builder - Postgres_Spec.add_specs suite_builder - Redshift_Spec.add_specs suite_builder + add_specs suite_builder suite.run_with_filter diff --git a/test/Table_Tests/src/Database/Redshift_Spec.enso b/test/Table_Tests/src/Database/Redshift_Spec.enso index d82c6c9d6201..e9fe3c23d74b 100644 --- a/test/Table_Tests/src/Database/Redshift_Spec.enso +++ b/test/Table_Tests/src/Database/Redshift_Spec.enso @@ -54,7 +54,7 @@ add_redshift_specific_specs suite_builder create_connection_fn = data.t.at "bools" . value_type . is_boolean . should_be_true data.t.at "reals" . value_type . is_floating_point . should_be_true -add_specs suite_builder create_connection_fn = +add_database_specs suite_builder create_connection_fn = prefix = "[Redshift] " name_counter = Ref.new 0 table_builder columns connection=(create_connection_fn Nothing) = @@ -126,7 +126,7 @@ uri_parse uri = db_name = if host_db_split.length == 1 then '' else host_db_split.at 1 [db_host, db_port, db_name] -add_all_specs suite_builder = +add_specs suite_builder = db_host_port = Environment.get "ENSO_REDSHIFT_URI" connection_details = if db_host_port.is_nothing then connect_via_json_config else connect_via_aws_environment db_host_port @@ -137,9 +137,9 @@ add_all_specs suite_builder = _ -> create_connection_fn = _-> Database.connect connection_details - add_specs suite_builder create_connection_fn + add_database_specs suite_builder create_connection_fn main = suite = Test.build suite_builder-> - add_all_specs suite_builder + add_specs suite_builder suite.run_with_filter diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 4a8ea31f2001..b69c5b51a289 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -389,7 +389,7 @@ type File_Connection self.file.delete -suite = Test.build suite_builder-> +add_specs suite_builder = in_file_prefix = "[SQLite File] " sqlite_spec suite_builder in_file_prefix (_ -> create_file_connection backing_file) @@ -449,6 +449,6 @@ suite = Test.build suite_builder-> Warning.get_all t2 . length . should_equal 1 main = - group_filter = Regex.compile ".*" - spec_filter = Regex.compile ".*" - suite.run_with_filter group_filter spec_filter + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter diff --git a/test/Table_Tests/src/Formatting/Main.enso b/test/Table_Tests/src/Formatting/Main.enso index b6541499bd3d..296a6b159c8f 100644 --- a/test/Table_Tests/src/Formatting/Main.enso +++ b/test/Table_Tests/src/Formatting/Main.enso @@ -5,8 +5,11 @@ from Standard.Test_New import all import project.Formatting.Data_Formatter_Spec import project.Formatting.Parse_Values_Spec +add_specs suite_builder = + Data_Formatter_Spec.add_specs suite_builder + Parse_Values_Spec.add_specs suite_builder + main = suite = Test.build suite_builder-> - Data_Formatter_Spec.add_specs suite_builder - Parse_Values_Spec.add_specs suite_builder + add_specs suite_builder suite.run_with_filter diff --git a/test/Table_Tests/src/Helpers/Main.enso b/test/Table_Tests/src/Helpers/Main.enso index 19a9f9de1ca4..62f95ad0190f 100644 --- a/test/Table_Tests/src/Helpers/Main.enso +++ b/test/Table_Tests/src/Helpers/Main.enso @@ -6,9 +6,12 @@ import project.Helpers.Sorted_List_Index_Spec import project.Helpers.Unique_Naming_Strategy_Spec import project.Helpers.Value_Type_Spec +add_specs suite_builder = + Unique_Naming_Strategy_Spec.add_specs suite_builder + Sorted_List_Index_Spec.add_specs suite_builder + Value_Type_Spec.add_specs suite_builder + main = suite = Test.build suite_builder-> - Unique_Naming_Strategy_Spec.add_specs suite_builder - Sorted_List_Index_Spec.add_specs suite_builder - Value_Type_Spec.add_specs suite_builder + add_specs suite_builder suite.run_with_filter diff --git a/test/Table_Tests/src/IO/Main.enso b/test/Table_Tests/src/IO/Main.enso index ad285ae46f29..17814c5b672b 100644 --- a/test/Table_Tests/src/IO/Main.enso +++ b/test/Table_Tests/src/IO/Main.enso @@ -10,13 +10,16 @@ import project.IO.Fetch_Spec import project.IO.Formats_Spec import project.IO.Json_Spec +add_specs suite_builder = + Csv_Spec.add_specs suite_builder + Delimited_Read_Spec.add_specs suite_builder + Delimited_Write_Spec.add_specs suite_builder + Excel_Spec.add_specs suite_builder + Formats_Spec.add_specs suite_builder + Fetch_Spec.add_specs suite_builder + Json_Spec.add_specs suite_builder + main = suite = Test.build suite_builder-> - Csv_Spec.add_specs suite_builder - Delimited_Read_Spec.add_specs suite_builder - Delimited_Write_Spec.add_specs suite_builder - Excel_Spec.add_specs suite_builder - Formats_Spec.add_specs suite_builder - Fetch_Spec.add_specs suite_builder - Json_Spec.add_specs suite_builder + add_specs suite_builder suite.run_with_filter diff --git a/test/Table_Tests/src/In_Memory/Main.enso b/test/Table_Tests/src/In_Memory/Main.enso index 820f21adc953..ef24fc1046ad 100644 --- a/test/Table_Tests/src/In_Memory/Main.enso +++ b/test/Table_Tests/src/In_Memory/Main.enso @@ -15,18 +15,21 @@ import project.In_Memory.Table_Date_Spec import project.In_Memory.Table_Date_Time_Spec import project.In_Memory.Table_Time_Of_Day_Spec +add_specs suite_builder = + Table_Spec.add_specs suite_builder + Column_Spec.add_specs suite_builder + Column_Format_Spec.add_specs suite_builder + Integer_Overflow_Spec.add_specs suite_builder + Lossy_Conversions_Spec.add_specs suite_builder + Table_Date_Spec.add_specs suite_builder + Table_Date_Time_Spec.add_specs suite_builder + Table_Time_Of_Day_Spec.add_specs suite_builder + Aggregate_Column_Spec.add_specs suite_builder + Builders_Spec.add_specs suite_builder + Split_Tokenize_Spec.add_specs suite_builder + Parse_To_Table_Spec.add_specs suite_builder + main = suite = Test.build suite_builder-> - Table_Spec.add_specs suite_builder - Column_Spec.add_specs suite_builder - Column_Format_Spec.add_specs suite_builder - Integer_Overflow_Spec.add_specs suite_builder - Lossy_Conversions_Spec.add_specs suite_builder - Table_Date_Spec.add_specs suite_builder - Table_Date_Time_Spec.add_specs suite_builder - Table_Time_Of_Day_Spec.add_specs suite_builder - Aggregate_Column_Spec.add_specs suite_builder - Builders_Spec.add_specs suite_builder - Split_Tokenize_Spec.add_specs suite_builder - Parse_To_Table_Spec.add_specs suite_builder + add_specs suite_builder suite.run_with_filter From 6b50c3d5b03a91e364682333faa51a51f9f20c4e Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 13:55:10 +0100 Subject: [PATCH 80/93] Remove temporary printing --- test/Table_Tests/src/Database/Common/Common_Spec.enso | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/Table_Tests/src/Database/Common/Common_Spec.enso b/test/Table_Tests/src/Database/Common/Common_Spec.enso index 8aa0d6677371..bc901a7e7a49 100644 --- a/test/Table_Tests/src/Database/Common/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Common_Spec.enso @@ -100,7 +100,6 @@ type Aggregation_Data t9 self = self.data.second setup create_connection_fn = Aggregation_Data.Value <| - IO.println <| " Common_Spec_New.Aggregation_Data.setup" connection = create_connection_fn Nothing builders = [Vector.new_builder,Vector.new_builder,Vector.new_builder] insert v = @@ -137,7 +136,6 @@ type Missing_Values_Data t4 self = self.data.second setup create_connection_fn = Missing_Values_Data.Value <| - IO.println <| " Common_Spec_New.Missing_Values_Data.setup" connection = create_connection_fn Nothing t4 = upload connection "T4" <| Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]]] From 54c216ff74cbee2ccd4b8815cac58c4e1b0870ea Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Tue, 23 Jan 2024 17:47:27 +0100 Subject: [PATCH 81/93] Set ENSO_TEST_ANSI_COLORS to true when running enso tests --- build/build/src/enso.rs | 2 ++ build/build/src/paths.rs | 3 +++ 2 files changed, 5 insertions(+) diff --git a/build/build/src/enso.rs b/build/build/src/enso.rs index b6c9a0b150fb..cc5d5371737b 100644 --- a/build/build/src/enso.rs +++ b/build/build/src/enso.rs @@ -2,6 +2,7 @@ use crate::prelude::*; use crate::paths::Paths; use crate::paths::ENSO_ENABLE_ASSERTIONS; +use crate::paths::ENSO_TEST_ANSI_COLORS; use crate::paths::ENSO_META_TEST_ARGS; use crate::paths::ENSO_META_TEST_COMMAND; use crate::postgres; @@ -121,6 +122,7 @@ impl BuiltEnso { ENSO_META_TEST_ARGS.set(&format!("{} --run", ir_caches.flag()))?; ENSO_ENABLE_ASSERTIONS.set("true")?; + ENSO_TEST_ANSI_COLORS.set("true")?; // Prepare Engine Test Environment if let Ok(gdoc_key) = std::env::var("GDOC_KEY") { diff --git a/build/build/src/paths.rs b/build/build/src/paths.rs index 34c499add7f7..41cdc058d5ad 100644 --- a/build/build/src/paths.rs +++ b/build/build/src/paths.rs @@ -33,6 +33,9 @@ ide_ci::define_env_var! { /// If Enso-specific assertions should be enabled. ENSO_ENABLE_ASSERTIONS, String; + /// If true, enso tests output will be colored. + ENSO_TEST_ANSI_COLORS, String; + /// Can be set to `"espresso"` to enable Espresso interpreter support. ENSO_JAVA, String; } From 84c2f61287a21859f829e692cf0e5f485351e3dd Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 24 Jan 2024 10:59:17 +0100 Subject: [PATCH 82/93] Add In_Memory/Common_Spec I have accidentally removed this test file --- .../src/In_Memory/Common_Spec.enso | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 test/Table_Tests/src/In_Memory/Common_Spec.enso diff --git a/test/Table_Tests/src/In_Memory/Common_Spec.enso b/test/Table_Tests/src/In_Memory/Common_Spec.enso new file mode 100644 index 000000000000..5b94545e2e9a --- /dev/null +++ b/test/Table_Tests/src/In_Memory/Common_Spec.enso @@ -0,0 +1,39 @@ +from Standard.Base import all + +from Standard.Table import Table + +from Standard.Test_New import Test + +import project.Common_Table_Operations + +type Dummy_Connection + close = Nothing + +add_specs suite_builder = + selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by=True natural_ordering=True case_insensitive_ordering=True order_by_unicode_normalization_by_default=True supports_unicode_normalization=True supports_time_duration=True supports_nanoseconds_in_time=True supports_mixed_columns=True fixed_length_text_columns=True supports_8bit_integer=True + aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config + + agg_table_fn = _ -> + (enso_project.data / "data.csv") . read + + empty_table_fn = _ -> + table = (enso_project.data / "data.csv") . read + table.take 0 + + materialize = x->x + + table_builder columns connection=Nothing = + _ = connection + Table.new columns + + create_connection_func = _-> Dummy_Connection + + setup = Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " agg_table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func + + Common_Table_Operations.Main.add_specs suite_builder setup + + +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter From e69f50bce26afb0d6ab8023f2244dba4c23dc71a Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 24 Jan 2024 12:15:36 +0100 Subject: [PATCH 83/93] Fix teardowns in Common_Table_Operations/Join --- .../Join/Cross_Join_Spec.enso | 47 +++--- .../Join/Join_Spec.enso | 156 +++++++++--------- .../Join/Lookup_Spec.enso | 120 +++++++------- .../Join/Union_Spec.enso | 144 ++++++++-------- .../Join/Zip_Spec.enso | 19 ++- .../src/Database/Common/Common_Spec.enso | 1 - 6 files changed, 265 insertions(+), 222 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso index cc0ef5f64225..79183aa13f6a 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso @@ -20,17 +20,24 @@ type Data setup create_connection_fn = Data.Value (create_connection_fn Nothing) + teardown self = self.connection.close + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder create_connection_fn = setup.create_connection_func materialize = setup.materialize suite_builder.group prefix+"Table.cross_join" group_builder-> data = Data.setup create_connection_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow to create a cross product of two tables in the right order" <| - t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] connection=data.connection - t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]] connection=data.connection + t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] + t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]] t3 = t1.cross_join t2 expect_column_names ["X", "Y", "Z", "W"] t3 @@ -49,8 +56,8 @@ add_specs suite_builder setup = False -> r.should_equal expected_rows group_builder.specify "should work correctly with empty tables" <| - t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] connection=data.connection - t2 = table_builder [["Z", ['a']], ["W", ['c']]] connection=data.connection + t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] + t2 = table_builder [["Z", ['a']], ["W", ['c']]] # Workaround to easily create empty table until table builder allows that directly. empty = t2.filter "Z" Filter_Condition.Is_Nothing empty.row_count . should_equal 0 @@ -66,10 +73,10 @@ add_specs suite_builder setup = t4.at "X" . to_vector . should_equal [] group_builder.specify "should respect the right row limit" <| - t2 = table_builder [["X", [1, 2]]] connection=data.connection - t3 = table_builder [["X", [1, 2, 3]]] connection=data.connection - t100 = table_builder [["Y", 0.up_to 100 . to_vector]] connection=data.connection - t101 = table_builder [["Y", 0.up_to 101 . to_vector]] connection=data.connection + t2 = table_builder [["X", [1, 2]]] + t3 = table_builder [["X", [1, 2, 3]]] + t100 = table_builder [["Y", 0.up_to 100 . to_vector]] + t101 = table_builder [["Y", 0.up_to 101 . to_vector]] t2.cross_join t100 . row_count . should_equal 200 t101.cross_join t2 . row_count . should_equal 202 @@ -83,8 +90,8 @@ add_specs suite_builder setup = t2.cross_join t3 right_row_limit=2 on_problems=Problem_Behavior.Report_Error . should_fail_with Cross_Join_Row_Limit_Exceeded group_builder.specify "should ensure 1-1 mapping even with duplicate rows" <| - t1 = table_builder [["X", [2, 1, 2, 2]], ["Y", [5, 4, 5, 5]]] connection=data.connection - t2 = table_builder [["Z", ['a', 'b', 'a', 'b']]] connection=data.connection + t1 = table_builder [["X", [2, 1, 2, 2]], ["Y", [5, 4, 5, 5]]] + t2 = table_builder [["Z", ['a', 'b', 'a', 'b']]] t3 = t1.cross_join t2 expect_column_names ["X", "Y", "Z"] t3 @@ -101,7 +108,7 @@ add_specs suite_builder setup = False -> r.should_equal expected_rows group_builder.specify "should allow self-joins" <| - t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] connection=data.connection + t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] t2 = t1.cross_join t1 expect_column_names ["X", "Y", "Right X", "Right Y"] t2 @@ -118,8 +125,8 @@ add_specs suite_builder setup = False -> r.should_equal expected_rows group_builder.specify "should rename columns of the right table to avoid duplicates" <| - t1 = table_builder [["X", [1]], ["Y", [5]], ["Right Y", [10]]] connection=data.connection - t2 = table_builder [["X", ['a']], ["Y", ['d']]] connection=data.connection + t1 = table_builder [["X", [1]], ["Y", [5]], ["Right Y", [10]]] + t2 = table_builder [["X", ['a']], ["Y", ['d']]] t3 = t1.cross_join t2 expect_column_names ["X", "Y", "Right Y", "Right X", "Right Y 1"] t3 @@ -135,13 +142,13 @@ add_specs suite_builder setup = expect_column_names ["X", "Y", "Right Y", "X 1", "Y 1"] (t1.cross_join t2 right_prefix="") - t4 = table_builder [["X", [1]], ["Right X", [5]]] connection=data.connection + t4 = table_builder [["X", [1]], ["Right X", [5]]] expect_column_names ["X", "Y", "Right Y", "Right X 1", "Right X"] (t1.cross_join t4) expect_column_names ["X", "Right X", "Right X 1", "Y", "Right Y"] (t4.cross_join t1) group_builder.specify "should respect the column ordering" <| - t1 = table_builder [["X", [100, 2]], ["Y", [4, 5]]] connection=data.connection - t2 = table_builder [["Z", ['a', 'b', 'c']], ["W", ['x', 'd', 'd']]] connection=data.connection + t1 = table_builder [["X", [100, 2]], ["Y", [4, 5]]] + t2 = table_builder [["Z", ['a', 'b', 'c']], ["W", ['x', 'd', 'd']]] t3 = t1.order_by "X" t4 = t2.order_by ([Sort_Column.Name "Z" Sort_Direction.Descending]) @@ -164,12 +171,12 @@ add_specs suite_builder setup = False -> r.should_equal expected_rows group_builder.specify "Cross join is not possible via call to .join" <| - t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] connection=data.connection - t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]] connection=data.connection + t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] + t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]] Test.expect_panic_with (t1.join t2 join_kind=Join_Kind_Cross.Cross on=[]) Type_Error group_builder.specify "should gracefully handle tables from different backends" <| - t1 = table_builder [["A", ["a", "b"]]] connection=data.connection + t1 = table_builder [["A", ["a", "b"]]] alternative_connection = Database.connect (SQLite In_Memory) t0 = (Table.new [["X", [1, 2, 4]], ["Z", [10, 20, 30]]]).select_into_database_table alternative_connection "T0" temporary=True diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso index c2af9307e06e..c6639367ea13 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso @@ -42,6 +42,8 @@ type Data t4 = table_builder [["X", [1, 1, 3, 2, 2, 4]], ["Y", ["B", "B", "C", "C", "D", "A"]], ["Z", [1, 2, 3, 4, 5, 6]]] connection=connection [connection, t1, t2, t3, t4] + teardown self = self.connection.close + add_specs suite_builder setup = prefix = setup.prefix @@ -52,8 +54,14 @@ add_specs suite_builder setup = suite_builder.group prefix+"Table.join" group_builder-> data = Data.setup create_connection_fn table_builder + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should by default do a Left Outer join on equality of first column in the left table, correlated with column of the same name in the right one" <| - t3 = table_builder [["Z", [4, 5, 6, 7]], ["X", [2, 3, 2, 4]]] connection=data.connection + t3 = table_builder [["Z", [4, 5, 6, 7]], ["X", [2, 3, 2, 4]]] t4 = data.t1.join t3 |> materialize |> _.order_by ["X", "Z"] expect_column_names ["X", "Y", "Z", "Right X"] t4 t4.at "X" . to_vector . should_equal [1, 2, 2, 3] @@ -118,8 +126,8 @@ add_specs suite_builder setup = check_xy_joined r group_builder.specify "should correctly handle duplicated rows in Equals" <| - t1 = table_builder [["X", [1, 2, 2, 3]]] connection=data.connection - t2 = table_builder [["X", [1, 2, 2, 4]]] connection=data.connection + t1 = table_builder [["X", [1, 2, 2, 3]]] + t2 = table_builder [["X", [1, 2, 2, 4]]] r1 = t1.join t2 join_kind=Join_Kind.Full on="X" . order_by "X" within_table r1 <| # Both 2's from t1 match with _both_ ones from t2 _each_, so in total we get 4 `2` pairs: @@ -127,8 +135,8 @@ add_specs suite_builder setup = r1.at "Right X" . to_vector . should_equal [4, 1, 2, 2, 2, 2, Nothing] group_builder.specify "should allow to join on text equality ignoring case" <| - t1 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]] connection=data.connection - t2 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]] + t2 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]] r1 = t1.join t2 join_kind=Join_Kind.Inner expect_column_names ["X", "Y", "Z"] r1 @@ -145,8 +153,8 @@ add_specs suite_builder setup = if setup.test_selection.supports_unicode_normalization then group_builder.specify "should correctly handle Unicode equality" <| - t1 = table_builder [["X", ['s\u0301', 'S\u0301']], ["Y", [1, 2]]] connection=data.connection - t2 = table_builder [["X", ['s', 'S', 'ś']], ["Z", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["X", ['s\u0301', 'S\u0301']], ["Y", [1, 2]]] + t2 = table_builder [["X", ['s', 'S', 'ś']], ["Z", [1, 2, 3]]] r1 = t1.join t2 join_kind=Join_Kind.Inner expect_column_names ["X", "Y", "Z"] r1 @@ -163,8 +171,8 @@ add_specs suite_builder setup = # This may need a test_selection toggle in the future, depending on how well databases like coercing decimals and integers. group_builder.specify "should correctly handle Enso Float-Integer equality" <| - t1 = table_builder [["X", [1, 2]], ["Y", [10, 20]]] connection=data.connection - t2 = table_builder [["X", [2.0, 2.1, 0.0]], ["Z", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["X", [1, 2]], ["Y", [10, 20]]] + t2 = table_builder [["X", [2.0, 2.1, 0.0]], ["Z", [1, 2, 3]]] r1 = t1.join t2 join_kind=Join_Kind.Inner expect_column_names ["X", "Y", "Z"] r1 @@ -174,8 +182,8 @@ add_specs suite_builder setup = if setup.supports_custom_objects then group_builder.specify "should allow equality joins for custom objects" <| - t1 = table_builder [["X", [My_Type.Value 1 2, My_Type.Value 2 3]], ["Y", [1, 2]]] connection=data.connection - t2 = table_builder [["X", [My_Type.Value 5 0, My_Type.Value 2 1]], ["Z", [10, 20]]] connection=data.connection + t1 = table_builder [["X", [My_Type.Value 1 2, My_Type.Value 2 3]], ["Y", [1, 2]]] + t2 = table_builder [["X", [My_Type.Value 5 0, My_Type.Value 2 1]], ["Z", [10, 20]]] r1 = t1.join t2 join_kind=Join_Kind.Inner |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "Z"] r1 @@ -189,8 +197,8 @@ add_specs suite_builder setup = r1 . at "Z" . to_vector . should_equal [20, 10] group_builder.specify "should allow range-based joins (using Between) for numbers" <| - t1 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]] connection=data.connection - t2 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]] connection=data.connection + t1 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]] + t2 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]] r1 = t1.join join_kind=Join_Kind.Inner t2 on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["X", "Z"] r1.column_names . should_equal ["X", "Y", "lower", "upper", "Z"] @@ -201,8 +209,8 @@ add_specs suite_builder setup = r1 . at "Z" . to_vector . should_equal [1, 2, 3, 2, 3] group_builder.specify "should allow range-based joins (using Between) for text" <| - t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]]] connection=data.connection - t2 = table_builder [["lower", ["a", "b"]], ["upper", ["a", "ccc"]], ["Z", [10, 20]]] connection=data.connection + t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]]] + t2 = table_builder [["lower", ["a", "b"]], ["upper", ["a", "ccc"]], ["Z", [10, 20]]] r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["X", "Z"] r1.column_names . should_equal ["X", "Y", "lower", "upper", "Z"] @@ -218,8 +226,8 @@ add_specs suite_builder setup = # 3. empty bounds (lower > upper: 10 > 0) # 4. equal bounds (10 = 10) # 5. unmatched rows on both sides - Full join - t1 = table_builder [["X", [1, 10, 20, 1, 2, 1, 1]], ["id", [1, 2, 3, 4, 5, 7, 7]]] connection=data.connection - t2 = table_builder [["lower", [0, 10, 10]], ["upper", [3, 10, 0]], ["Z", ['a', 'b', 'c']]] connection=data.connection + t1 = table_builder [["X", [1, 10, 20, 1, 2, 1, 1]], ["id", [1, 2, 3, 4, 5, 7, 7]]] + t2 = table_builder [["lower", [0, 10, 10]], ["upper", [3, 10, 0]], ["Z", ['a', 'b', 'c']]] r1 = t1.join t2 join_kind=Join_Kind.Full on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["Z", "id"] within_table r1 <| r1.column_names . should_equal ["X", "id", "lower", "upper", "Z"] @@ -240,15 +248,15 @@ add_specs suite_builder setup = xs = [0, 0, 1, 1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 4] ys = [1, 2, 3, 1, 9, 2, 3, 2, 4, 2, 1, 1, 1, 2] pts = xs.zip ys . take (Index_Sub_Range.Sample xs.length seed=42) - t1 = table_builder [["X", pts.map .first], ["Y", pts.map .second]] connection=data.connection + t1 = table_builder [["X", pts.map .first], ["Y", pts.map .second]] - t2 = table_builder [["lx", [1]], ["ux", [3]], ["ly", [1]], ["uy", [2]]] connection=data.connection + t2 = table_builder [["lx", [1]], ["ux", [3]], ["ly", [1]], ["uy", [2]]] r2 = t1.join t2 join_kind=Join_Kind.Inner on=[Join_Condition.Between "X" "lx" "ux", Join_Condition.Between "Y" "ly" "uy"] |> materialize |> _.order_by ["X", "Y"] within_table r2 <| r2.at "X" . to_vector . should_equal [1, 1, 2, 3, 3] r2.at "Y" . to_vector . should_equal [1, 2, 2, 1, 2] - t3 = table_builder [["lx", [1.9]], ["ux", [3]], ["ly", [1]], ["uy", [2]]] connection=data.connection + t3 = table_builder [["lx", [1.9]], ["ux", [3]], ["ly", [1]], ["uy", [2]]] r3 = t1.join t3 join_kind=Join_Kind.Inner on=[Join_Condition.Between "X" "lx" "ux", Join_Condition.Between "Y" "ly" "uy"] |> materialize |> _.order_by ["X", "Y"] within_table r3 <| r3.at "X" . to_vector . should_equal [2, 3, 3] @@ -256,8 +264,8 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle Between edge cases (3)" <| # 7. duplicated rows on both sides - t1 = table_builder [["X", [10, 20, 20]]] connection=data.connection - t2 = table_builder [["low", [15, 15]], ["high", [30, 30]]] connection=data.connection + t1 = table_builder [["X", [10, 20, 20]]] + t2 = table_builder [["low", [15, 15]], ["high", [30, 30]]] r1 = t1.join t2 join_kind=Join_Kind.Right_Outer on=(Join_Condition.Between "X" "low" "high") within_table r1 <| r1.at "X" . to_vector . should_equal [20, 20, 20, 20] @@ -272,8 +280,8 @@ add_specs suite_builder setup = if setup.test_selection.supports_unicode_normalization then group_builder.specify "should allow range-based joins (using Between) for text with Unicode normalization" <| - t1 = table_builder [["X", ['s\u0301', 's']], ["Y", [1, 2]]] connection=data.connection - t2 = table_builder [["lower", ['s', 'ś']], ["upper", ['sa', 'ś']], ["Z", [10, 20]]] connection=data.connection + t1 = table_builder [["X", ['s\u0301', 's']], ["Y", [1, 2]]] + t2 = table_builder [["lower", ['s', 'ś']], ["upper", ['sa', 'ś']], ["Z", [10, 20]]] r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "lower", "upper", "Z"] r1 @@ -285,8 +293,8 @@ add_specs suite_builder setup = if setup.supports_custom_objects then group_builder.specify "should allow range-based joins (using Between) for custom objects" <| - t1 = table_builder [["X", [My_Type.Value 20 30, My_Type.Value 1 2]], ["Y", [1, 2]]] connection=data.connection - t2 = table_builder [["lower", [My_Type.Value 3 0, My_Type.Value 10 10]], ["upper", [My_Type.Value 2 1, My_Type.Value 100 0]], ["Z", [10, 20]]] connection=data.connection + t1 = table_builder [["X", [My_Type.Value 20 30, My_Type.Value 1 2]], ["Y", [1, 2]]] + t2 = table_builder [["lower", [My_Type.Value 3 0, My_Type.Value 10 10]], ["upper", [My_Type.Value 2 1, My_Type.Value 100 0]], ["Z", [10, 20]]] r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["Z"] expect_column_names ["X", "Y", "lower", "upper", "Z"] r1 @@ -297,8 +305,8 @@ add_specs suite_builder setup = r1 . at "Z" . to_vector . should_equal [10, 20] group_builder.specify "should allow to mix join conditions of various kinds" <| - t1 = table_builder [["X", [1, 12, 12, 0]], ["Y", [1, 2, 3, 4]], ["Z", ["a", "A", "a", "ą"]], ["W", [1, 2, 3, 4]]] connection=data.connection - t2 = table_builder [["X", [12, 12, 1]], ["l", [0, 100, 100]], ["u", [10, 100, 200]], ["Z", ["A", "A", "A"]], ["W'", [10, 20, 30]]] connection=data.connection + t1 = table_builder [["X", [1, 12, 12, 0]], ["Y", [1, 2, 3, 4]], ["Z", ["a", "A", "a", "ą"]], ["W", [1, 2, 3, 4]]] + t2 = table_builder [["X", [12, 12, 1]], ["l", [0, 100, 100]], ["u", [10, 100, 200]], ["Z", ["A", "A", "A"]], ["W'", [10, 20, 30]]] conditions = [Join_Condition.Between "Y" "l" "u", Join_Condition.Equals_Ignore_Case "Z" "Z", Join_Condition.Equals "X" "X"] r1 = t1.join t2 join_kind=Join_Kind.Inner on=conditions |> materialize |> _.order_by ["Y"] @@ -334,16 +342,16 @@ add_specs suite_builder setup = r = data.t3.join data.t4 join_kind=Join_Kind.Inner on=["X", "X", "Y", "X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] check_xy_joined r - t5 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]] connection=data.connection - t6 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]] connection=data.connection + t5 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]] + t6 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]] r1 = t5.join t6 join_kind=Join_Kind.Inner on=[Join_Condition.Between "X" "lower" "upper", Join_Condition.Between "X" "lower" "upper", Join_Condition.Between "X" "lower" "upper"] |> materialize |> _.order_by ["X", "Z"] r1 . at "X" . to_vector . should_equal [1, 10, 10, 12, 12] r1 . at "Y" . to_vector . should_equal [1, 2, 2, 3, 3] r1 . at "Z" . to_vector . should_equal [1, 2, 3, 2, 3] - t7 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]] connection=data.connection - t8 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]] connection=data.connection + t7 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]] + t8 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]] r2 = t7.join t8 join_kind=Join_Kind.Inner on=[Join_Condition.Equals_Ignore_Case "X", Join_Condition.Equals_Ignore_Case "X", Join_Condition.Equals_Ignore_Case "X" "X"] |> materialize |> _.order_by ["Z"] r2 . at "X" . to_vector . should_equal ["a", "a", "B"] @@ -351,7 +359,7 @@ add_specs suite_builder setup = r2 . at "Z" . to_vector . should_equal [1, 2, 3] group_builder.specify "should correctly handle joining a table with itself" <| - t1 = table_builder [["X", [0, 1, 2, 3, 2]], ["Y", [1, 2, 3, 4, 100]], ["A", ["B", "C", "D", "E", "X"]]] connection=data.connection + t1 = table_builder [["X", [0, 1, 2, 3, 2]], ["Y", [1, 2, 3, 4, 100]], ["A", ["B", "C", "D", "E", "X"]]] t2 = t1.join t1 join_kind=Join_Kind.Inner on=(Join_Condition.Equals left="X" right="Y") |> materialize |> _.order_by ["X", "Y"] expect_column_names ["X", "Y", "A", "Right X", "Right Y", "Right A"] t2 @@ -373,7 +381,7 @@ add_specs suite_builder setup = t3.at "Right X" . to_vector . should_equal [2, 3, Nothing, 0, 1, 1, 2] t3.at "Right A" . to_vector . should_equal ["X", "E", Nothing, "B", "C", "C", "D"] - t4 = table_builder [["X", [Nothing, "a", "B"]], ["Y", ["ą", "b", Nothing]], ["Z", [1, 2, 3]]] connection=data.connection + t4 = table_builder [["X", [Nothing, "a", "B"]], ["Y", ["ą", "b", Nothing]], ["Z", [1, 2, 3]]] t5 = t4.join t4 join_kind=Join_Kind.Inner on=(Join_Condition.Equals_Ignore_Case left="Y" right="X") |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "Z", "Right X", "Right Y", "Right Z"] t5 # TODO enable once we handle nothing properly @@ -386,8 +394,8 @@ add_specs suite_builder setup = # t5.at "Right Z" . to_vector . should_equal [1, 3] group_builder.specify "should gracefully handle unmatched columns in Join_Conditions" <| - t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] connection=data.connection - t2 = table_builder [["Z", [2, 1]], ["W", [5, 6]]] connection=data.connection + t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] + t2 = table_builder [["Z", [2, 1]], ["W", [5, 6]]] # Report error if the default fails - the right table does not have a column with same name as first column of left one: r1 = t1.join t2 @@ -408,8 +416,8 @@ add_specs suite_builder setup = r3.catch.to_display_text.should_equal "The criteria 'baz', 42 (index), -3 (index) did not match any columns in the right table." group_builder.specify "should report Invalid_Value_Type if non-text columns are provided to Equals_Ignore_Case" <| - t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] connection=data.connection - t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] + t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]] test result = result.should_fail_with Invalid_Value_Type @@ -421,23 +429,23 @@ add_specs suite_builder setup = t1.join t2 on=(Join_Condition.Equals_Ignore_Case "Y" "Z") on_problems=Problem_Behavior.Ignore group_builder.specify "should report Invalid_Value_Type if incompatible types are correlated" <| - t1 = table_builder [["X", ["1", "2", "c"]]] connection=data.connection - t2 = table_builder [["X", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["X", ["1", "2", "c"]]] + t2 = table_builder [["X", [1, 2, 3]]] r1 = t1.join t2 on_problems=Problem_Behavior.Ignore r1.should_fail_with Invalid_Value_Type group_builder.specify "should report Invalid_Value_Type if incompatible columns types are correlated in Between" <| - t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] connection=data.connection - t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] + t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]] t1.join t2 on=(Join_Condition.Between "X" "W" "W") . should_fail_with Invalid_Value_Type t1.join t2 on=(Join_Condition.Between "Y" "W" "Z") . should_fail_with Invalid_Value_Type t1.join t2 on=(Join_Condition.Between "Y" "Z" "W") . should_fail_with Invalid_Value_Type group_builder.specify "should warn when joining on equality of Float columns" <| - t1 = table_builder [["X", [1.5, 2.0, 2.00000000001]], ["Y", [10, 20, 30]]] connection=data.connection - t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["X", [1.5, 2.0, 2.00000000001]], ["Y", [10, 20, 30]]] + t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] action1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Z") on_problems=_ tester1 table = @@ -466,8 +474,8 @@ add_specs suite_builder setup = Problems.assume_no_problems r2 if setup.supports_custom_objects then - t1 = table_builder [["X", [My_Type.Value 1 2, 2.0, 2]], ["Y", [10, 20, 30]]] connection=data.connection - t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["X", [My_Type.Value 1 2, 2.0, 2]], ["Y", [10, 20, 30]]] + t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] r3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Z") on_problems=Problem_Behavior.Report_Warning r3.column_names.should_equal ["X", "Y", "Z", "W"] r4 = r3.order_by ["Y", "W"] @@ -479,8 +487,8 @@ add_specs suite_builder setup = Problems.get_attached_warnings r3 . should_contain_the_same_elements_as expected_problems group_builder.specify "should correctly handle nulls in equality conditions" pending=db_todo <| - t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] connection=data.connection - t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] connection=data.connection + t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] + t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] r1 = t1.join t2 join_kind=Join_Kind.Inner |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "Z"] r1 @@ -489,8 +497,8 @@ add_specs suite_builder setup = r1.at "Z" . to_vector . should_equal [20, 30, 10, 20, 30] group_builder.specify "should correctly handle nulls in case-insensitive equality conditions" pending=db_todo <| - t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] connection=data.connection - t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] connection=data.connection + t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] + t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals_Ignore_Case "X") |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "Right X", "Z"] r1 @@ -500,8 +508,8 @@ add_specs suite_builder setup = r1.at "Z" . to_vector . should_equal [10, 20, 30, 10, 20, 30] group_builder.specify "should correctly handle nulls in Between conditions" <| - t1 = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [0, 1, 2, 3]]] connection=data.connection - t2 = table_builder [["l", [Nothing, 0, 1]], ["u", [100, 10, Nothing]], ["Z", [10, 20, 30]]] connection=data.connection + t1 = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [0, 1, 2, 3]]] + t2 = table_builder [["l", [Nothing, 0, 1]], ["u", [100, 10, Nothing]], ["Z", [10, 20, 30]]] r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "l" "u") |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "l", "u", "Z"] r1 @@ -512,8 +520,8 @@ add_specs suite_builder setup = r1.at "Z" . to_vector . should_equal [20, 20] group_builder.specify "should rename columns of the right table to avoid duplicates" <| - t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]], ["Right Y", [5, 6]]] connection=data.connection - t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]]] connection=data.connection + t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]], ["Right Y", [5, 6]]] + t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]]] t3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Y") |> materialize |> _.order_by ["Right X"] Problems.get_attached_warnings t3 . should_equal [Duplicate_Output_Column_Names.Error ["Right Y"]] @@ -528,8 +536,8 @@ add_specs suite_builder setup = err1.should_fail_with Duplicate_Output_Column_Names err1.catch.column_names . should_equal ["Right Y"] - t4 = table_builder [["Right X", [1, 1]], ["X", [1, 2]], ["Y", [3, 4]], ["Right Y 2", [2, 2]]] connection=data.connection - t5 = table_builder [["Right X", [2, 1]], ["X", [2, 2]], ["Y", [2, 2]], ["Right Y", [2, 2]], ["Right Y 1", [2, 2]], ["Right Y 4", [2, 2]]] connection=data.connection + t4 = table_builder [["Right X", [1, 1]], ["X", [1, 2]], ["Y", [3, 4]], ["Right Y 2", [2, 2]]] + t5 = table_builder [["Right X", [2, 1]], ["X", [2, 2]], ["Y", [2, 2]], ["Right Y", [2, 2]], ["Right Y 1", [2, 2]], ["Right Y 4", [2, 2]]] t6 = t4.join t5 on=(Join_Condition.Equals "X" "Y") t6.column_names.should_equal ["Right X", "X", "Y", "Right Y 2"]+["Right Right X", "Right X 1", "Right Y 3", "Right Y", "Right Y 1", "Right Y 4"] @@ -548,8 +556,8 @@ add_specs suite_builder setup = t8.column_names.should_equal ["X", "Y", "Right Y", "PY"] group_builder.specify "should warn about renamed columns" <| - t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] connection=data.connection - t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]], ["Right Y", [2, 44]]] connection=data.connection + t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] + t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]], ["Right Y", [2, 44]]] action1 = t1.join t2 on=(Join_Condition.Equals "X" "Y") on_problems=_ tester1 table = @@ -569,8 +577,8 @@ add_specs suite_builder setup = data.t1.join data.t2 on=[error, "X"] . should_fail_with Illegal_State group_builder.specify "should correctly handle all null rows" pending=db_todo <| - t1 = table_builder [["A", [Nothing, 2, Nothing, 1]], ["B", [Nothing, 3, 4, 7]]] connection=data.connection - t2 = table_builder [["C", [Nothing, 2, Nothing, 4]], ["D", [Nothing, 5, 6, Nothing]]] connection=data.connection + t1 = table_builder [["A", [Nothing, 2, Nothing, 1]], ["B", [Nothing, 3, 4, 7]]] + t2 = table_builder [["C", [Nothing, 2, Nothing, 4]], ["D", [Nothing, 5, 6, Nothing]]] t3 = t1.join t2 on=[Join_Condition.Equals "A" "C"] join_kind=Join_Kind.Inner expect_column_names ["A", "B", "C", "D"] t3 @@ -632,8 +640,8 @@ add_specs suite_builder setup = t6.at "C" . to_vector . should_equal [4] t6.at "D" . to_vector . should_equal [Nothing] - t7 = table_builder [["A", [Nothing, 2]], ["B", [Nothing, 3]]] connection=data.connection - t8 = table_builder [["C", [2, 3]], ["D", [4, 5]]] connection=data.connection + t7 = table_builder [["A", [Nothing, 2]], ["B", [Nothing, 3]]] + t8 = table_builder [["C", [2, 3]], ["D", [4, 5]]] t9 = t7.join t8 on=[Join_Condition.Equals "A" "C"] join_kind=Join_Kind.Inner r9 = materialize t9 . order_by ["A", "B", "D"] . rows . map .to_vector @@ -676,9 +684,9 @@ add_specs suite_builder setup = r12.at 0 . should_equal [3, 5] group_builder.specify "should work correctly when composing multiple join operations" <| - ta = table_builder [["id", [0, 1]], ["name", ["Foo", "X"]]] connection=data.connection - tb = table_builder [["id", [2, 0]], ["name", ["Bar", "Y"]]] connection=data.connection - tc = table_builder [["id_a", [0, 1]], ["id_b", [0, 2]]] connection=data.connection + ta = table_builder [["id", [0, 1]], ["name", ["Foo", "X"]]] + tb = table_builder [["id", [2, 0]], ["name", ["Bar", "Y"]]] + tc = table_builder [["id_a", [0, 1]], ["id_b", [0, 2]]] res = (tc.join ta on=(Join_Condition.Equals "id_a" "id")) . join tb on=(Join_Condition.Equals "id_b" "id") right_prefix="b_" sel = res.select_columns ["name", "b_name"] @@ -688,7 +696,7 @@ add_specs suite_builder setup = r.at 1 . should_equal ["X", "Bar"] group_builder.specify "should work correctly when the join is performed on a transformed table" <| - t1 = table_builder [["X", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["X", [1, 2, 3]]] t1_2 = t1.set "10*[X]+1" new_name="A" t1_3 = t1.set "[X]+20" new_name="B" @@ -699,8 +707,8 @@ add_specs suite_builder setup = t2.at "B" . to_vector . should_equal [21] t2.at "Right X" . to_vector . should_equal [1] - t4 = table_builder [["X", [1, 2, 3]], ["Y", [10, 20, 30]]] connection=data.connection - t5 = table_builder [["X", [5, 7, 1]], ["Z", [100, 200, 300]]] connection=data.connection + t4 = table_builder [["X", [1, 2, 3]], ["Y", [10, 20, 30]]] + t5 = table_builder [["X", [5, 7, 1]], ["Z", [100, 200, 300]]] t4_2 = t4.set "2*[X]+1" new_name="C" t6 = t4_2.join t5 on=(Join_Condition.Equals "C" "X") join_kind=Join_Kind.Inner @@ -711,8 +719,8 @@ add_specs suite_builder setup = r2.at 1 . should_equal [3, 30, 7, 7, 200] group_builder.specify "should allow full joins with more complex join conditions" <| - t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [10, 20, 30]]] connection=data.connection - t2 = table_builder [["X", ["Ć", "A", "b"]], ["Z", [100, 200, 300]]] connection=data.connection + t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [10, 20, 30]]] + t2 = table_builder [["X", ["Ć", "A", "b"]], ["Z", [100, 200, 300]]] t3 = t1.join t2 on=(Join_Condition.Equals_Ignore_Case "X") join_kind=Join_Kind.Full expect_column_names ["X", "Y", "Right X", "Z"] t3 @@ -723,8 +731,8 @@ add_specs suite_builder setup = r.at 2 . should_equal ["b", 20, "b", 300] r.at 3 . should_equal ["c", 30, Nothing, Nothing] - t4 = table_builder [["X", [1, 2, 3]], ["Y", [10, 20, 30]]] connection=data.connection - t5 = table_builder [["X", [5, 7, 1]], ["Z", [100, 200, 300]]] connection=data.connection + t4 = table_builder [["X", [1, 2, 3]], ["Y", [10, 20, 30]]] + t5 = table_builder [["X", [5, 7, 1]], ["Z", [100, 200, 300]]] t4_2 = t4.set "2*[X]+1" new_name="C" t6 = t4_2.join t5 on=(Join_Condition.Equals "C" "X") join_kind=Join_Kind.Full @@ -736,7 +744,7 @@ add_specs suite_builder setup = r2.at 2 . should_equal [2, 20, 5, 5, 100] r2.at 3 . should_equal [3, 30, 7, 7, 200] - t4_3 = table_builder [["X", [Nothing, 2, 3]], ["Y", [10, 20, 30]]] connection=data.connection + t4_3 = table_builder [["X", [Nothing, 2, 3]], ["Y", [10, 20, 30]]] t4_4 = t4_3.set (t4_3.at "X" . fill_nothing 7) new_name="C" t7 = t4_4.join t5 on=(Join_Condition.Equals "C" "X") join_kind=Join_Kind.Full within_table t7 <| @@ -749,7 +757,7 @@ add_specs suite_builder setup = r3.at 3 . should_equal [2, 20, 2, Nothing, Nothing] r3.at 4 . should_equal [3, 30, 3, Nothing, Nothing] - t8 = table_builder [["X", [2, 99]], ["Y", [20, 99]], ["C", [5, 99]]] connection=data.connection + t8 = table_builder [["X", [2, 99]], ["Y", [20, 99]], ["C", [5, 99]]] t9 = t4_2.join t8 join_kind=Join_Kind.Full on=["X", "Y", "C"] within_table t9 <| t9.column_names . should_equal ["X", "Y", "C", "Right X", "Right Y", "Right C"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso index 5c62579533c0..0b7ba2436554 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso @@ -18,21 +18,27 @@ type Data Value ~connection setup create_connection_fn = - connection = create_connection_fn Nothing - Data.Value connection + Data.Value (create_connection_fn Nothing) + + teardown self = self.connection.close add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder create_connection_fn = setup.create_connection_func materialize = setup.materialize suite_builder.group prefix+"Table.merge" group_builder-> data = Data.setup create_connection_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should allow to simply update columns based on a lookup table" <| - lookup = table_builder [["Y", ["A", "B", "A"]], ["X", [1, 2, 3]]] connection=data.connection - my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup = table_builder [["Y", ["A", "B", "A"]], ["X", [1, 2, 3]]] + my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] t2 = my_table.merge lookup key_columns="X" t2.column_names . should_equal ["X", "Y", "Z"] @@ -42,8 +48,8 @@ add_specs suite_builder setup = m2.at "Y" . to_vector . should_equal ["A", "B", "A", "B"] group_builder.specify "should allow to add new columns from a lookup table" <| - lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]] connection=data.connection - my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]] connection=data.connection + lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]] + my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]] t2 = my_table.merge lookup key_columns="code" add_new_columns=True t2.column_names . should_equal ["id", "code", "hmm", "status"] @@ -53,8 +59,8 @@ add_specs suite_builder setup = m2.at "hmm" . to_vector . should_equal [10, 20, 30, 40] m2.at "status" . to_vector . should_equal ["new", "changed", "changed", "old"] - lookup2 = table_builder [["is_X", [True, False]], ["X", ["Yes", "No"]]] connection=data.connection - my_table2 = table_builder [["A", [1, 2, 3, 4]], ["is_X", [True, True, False, True]]] connection=data.connection + lookup2 = table_builder [["is_X", [True, False]], ["X", ["Yes", "No"]]] + my_table2 = table_builder [["A", [1, 2, 3, 4]], ["is_X", [True, True, False, True]]] t3 = my_table2.merge lookup2 key_columns="is_X" add_new_columns=True t3.column_names . should_equal ["A", "is_X", "X"] m3 = t3 |> materialize |> _.order_by "A" @@ -63,8 +69,8 @@ add_specs suite_builder setup = m3.at "X" . to_vector . should_equal ["Yes", "Yes", "No", "Yes"] group_builder.specify "will warn if extra columns are unexpected (add_new_columns=False) (default)" <| - lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]] connection=data.connection - my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]] connection=data.connection + lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]] + my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]] t2 = my_table.merge lookup key_columns="code" t2.column_names . should_equal ["id", "code", "hmm"] @@ -82,7 +88,7 @@ add_specs suite_builder setup = err2.should_fail_with Unexpected_Extra_Columns err2.catch.columns . should_equal ["status"] - lookup2 = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]], ["hmm", [111, 222, 333]]] connection=data.connection + lookup2 = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]], ["hmm", [111, 222, 333]]] t3 = my_table.merge lookup2 key_columns=["code"] add_new_columns=False t3.column_names . should_equal ["id", "code", "hmm"] m3 = t3 |> materialize |> _.order_by "id" @@ -93,8 +99,8 @@ add_specs suite_builder setup = w3.columns . should_equal ["status"] group_builder.specify "will only update rows that are matched and skip others (default - allow_unmatched_rows=True)" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection - my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] t2 = my_table.merge lookup key_columns=["X"] t2.column_names . should_equal ["X", "Y", "Z"] @@ -105,8 +111,8 @@ add_specs suite_builder setup = m2.at "Y" . to_vector . should_equal ["A", "B", "ZZZ", "B"] group_builder.specify "will fill new columns of unmatched rows with Nothing (allow_unmatched_rows=True)" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]], ["W", [1.5, 2.0]]] connection=data.connection - my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]], ["W", [1.5, 2.0]]] + my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] t2 = my_table.merge lookup key_columns=["X"] allow_unmatched_rows=True add_new_columns=True t2.column_names . should_equal ["X", "Y", "Z", "W"] @@ -117,8 +123,8 @@ add_specs suite_builder setup = m2.at "W" . to_vector . should_equal [1.5, 2.0, Nothing, 2.0] group_builder.specify "will report unmatched rows (if allow_unmatched_rows=False)" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection - my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] r2 = my_table.merge lookup key_columns=["X"] allow_unmatched_rows=False add_new_columns=True r2.should_fail_with Unmatched_Rows_In_Lookup @@ -126,8 +132,8 @@ add_specs suite_builder setup = r2.catch.to_display_text . should_contain "[3]" # But lookup table containing other keys that are not present in source is NOT a problem. - lookup2 = table_builder [["X", [1, 2, 3]], ["Y", ["A", "B", "C"]]] connection=data.connection - my_table2 = table_builder [["X", [1, 2, 1, 1]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup2 = table_builder [["X", [1, 2, 3]], ["Y", ["A", "B", "C"]]] + my_table2 = table_builder [["X", [1, 2, 1, 1]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] t3 = my_table2.merge lookup2 key_columns=["X"] add_new_columns=True m3 = t3 |> materialize |> _.order_by "Z" m3.at "X" . to_vector . should_equal [1, 2, 1, 1] @@ -135,8 +141,8 @@ add_specs suite_builder setup = m3.at "Z" . to_vector . should_equal [10, 20, 30, 40] group_builder.specify "will fail on missing key columns in either table" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection - my_table = table_builder [["X", [1, 2, 3, 2]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + my_table = table_builder [["X", [1, 2, 3, 2]], ["Z", [10, 20, 30, 40]]] r2 = my_table.merge lookup key_columns=["Y"] r2.should_fail_with Missing_Input_Columns @@ -148,8 +154,8 @@ add_specs suite_builder setup = r3.catch.to_display_text . should_contain "in the lookup table" group_builder.specify "should allow matching by multiple key columns" <| - lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "B"]], ["Z", [100, 100, 200]]] connection=data.connection - my_table = table_builder [["X", [1, 1, 1, 2]], ["Y", ["A", "B", "A", "B"]], ["Z", [10, 20, 30, 40]], ["W", [1000, 2000, 3000, 4000]]] connection=data.connection + lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "B"]], ["Z", [100, 100, 200]]] + my_table = table_builder [["X", [1, 1, 1, 2]], ["Y", ["A", "B", "A", "B"]], ["Z", [10, 20, 30, 40]], ["W", [1000, 2000, 3000, 4000]]] t2 = my_table.merge lookup key_columns=["X", "Y"] t2.column_names . should_equal ["X", "Y", "Z", "W"] @@ -161,8 +167,8 @@ add_specs suite_builder setup = m2.at "Z" . to_vector . should_equal [100, 200, 100, 100] group_builder.specify "will fail on duplicate matches in the lookup table" <| - lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "C"]]] connection=data.connection - my_table = table_builder [["X", [4, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "C"]]] + my_table = table_builder [["X", [4, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] # If the duplicates do not show up in result - it is accepted. t2 = my_table.merge lookup key_columns=["X"] @@ -172,14 +178,14 @@ add_specs suite_builder setup = m2.at "X" . to_vector . should_equal [4, 2, 3, 2] m2.at "Y" . to_vector . should_equal ["Z", "B", "ZZZ", "B"] - my_table2 = table_builder [["X", [1, 2]], ["Y", ["Z", "ZZ"]], ["Z", [10, 20]]] connection=data.connection + my_table2 = table_builder [["X", [1, 2]], ["Y", ["Z", "ZZ"]], ["Z", [10, 20]]] r2 = my_table2.merge lookup key_columns=["X"] r2.should_fail_with Non_Unique_Key r2.catch.key_column_names . should_equal ["X"] r2.catch.clashing_example_key_values . should_equal [1] r2.catch.clashing_example_row_count . should_equal 2 - lookup2 = table_builder [["X", [1, 1]], ["Y", ["A", "A"]], ["Z", [100, 100]]] connection=data.connection + lookup2 = table_builder [["X", [1, 1]], ["Y", ["A", "A"]], ["Z", [100, 100]]] Problems.assume_no_problems <| my_table.merge lookup2 key_columns=["X", "Y"] r3 = my_table2.merge lookup2 key_columns=["X"] r3.should_fail_with Non_Unique_Key @@ -189,7 +195,7 @@ add_specs suite_builder setup = m3.at "X" . to_vector . should_equal [1, 2] m3.at "Y" . to_vector . should_equal ["Z", "ZZ"] - my_table3 = table_builder [["X", [1, 1, 2]], ["Y", ["A", "Z", "ZZ"]], ["Z", [10, 20, 30]]] connection=data.connection + my_table3 = table_builder [["X", [1, 1, 2]], ["Y", ["A", "Z", "ZZ"]], ["Z", [10, 20, 30]]] r4 = my_table3.merge lookup2 key_columns=["X", "Y"] r4.should_fail_with Non_Unique_Key r4.catch.key_column_names . should_equal ["X", "Y"] @@ -197,8 +203,8 @@ add_specs suite_builder setup = r4.catch.clashing_example_row_count . should_equal 2 group_builder.specify "will preserve count of rows, even if there are duplicates" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection - my_table = table_builder [["X", [1, 2, 2, 2, 1]], ["Z", [10, 20, 20, 20, 50]]] connection=data.connection + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + my_table = table_builder [["X", [1, 2, 2, 2, 1]], ["Z", [10, 20, 20, 20, 50]]] t2 = my_table.merge lookup key_columns=["X"] add_new_columns=True t2.column_names . should_equal ["X", "Z", "Y"] @@ -208,8 +214,8 @@ add_specs suite_builder setup = m2.at "Z" . to_vector . should_equal [10, 20, 20, 20, 50] group_builder.specify "should correctly preserve types of original, merged and added columns" <| - table = table_builder [["key1", [0, 1]], ["key2", ["o", "?"]], ["X", [1, 10]], ["Y", ["A", "E"]], ["Z", [1.5, 2.0]], ["W", [True, False]], ["A", [2, 22]], ["B", ["1", "2"]], ["C", [2.0, 2.5]], ["D", [False, False]]] connection=data.connection - lookup = table_builder [["key1", [0, 2]], ["key2", ["o", "?"]], ["X2", [100, 1000]], ["Y2", ["foo", "bar"]], ["Z2", [0.5, 4.0]], ["W2", [False, True]], ["A", [3, 55]], ["B", ["F", "F"]], ["C", [3.0, 10.5]], ["D", [True, False]]] connection=data.connection + table = table_builder [["key1", [0, 1]], ["key2", ["o", "?"]], ["X", [1, 10]], ["Y", ["A", "E"]], ["Z", [1.5, 2.0]], ["W", [True, False]], ["A", [2, 22]], ["B", ["1", "2"]], ["C", [2.0, 2.5]], ["D", [False, False]]] + lookup = table_builder [["key1", [0, 2]], ["key2", ["o", "?"]], ["X2", [100, 1000]], ["Y2", ["foo", "bar"]], ["Z2", [0.5, 4.0]], ["W2", [False, True]], ["A", [3, 55]], ["B", ["F", "F"]], ["C", [3.0, 10.5]], ["D", [True, False]]] [True, False].each allow_unmatched_rows-> table_prepared = if allow_unmatched_rows then table else @@ -237,8 +243,8 @@ add_specs suite_builder setup = t2.at "W2" . value_type . should_equal Value_Type.Boolean if setup.test_selection.fixed_length_text_columns then group_builder.specify "should correctly preserve types of original, merged and added columns (various Char types test case)" <| - table2 = (table_builder [["key", ["0"]], ["X", ["a"]], ["A", ["bbbbb"]]] connection=data.connection) . cast "key" (Value_Type.Char size=50) . cast "X" (Value_Type.Char size=1) . cast "A" (Value_Type.Char size=5 variable_length=False) - lookup2 = (table_builder [["key", ["0"]], ["X2", ["ccc"]], ["A", ["dddd"]]] connection=data.connection) . cast "key" (Value_Type.Char size=100) . cast "X2" (Value_Type.Char size=3 variable_length=False) . cast "A" (Value_Type.Char size=4 variable_length=False) + table2 = (table_builder [["key", ["0"]], ["X", ["a"]], ["A", ["bbbbb"]]]) . cast "key" (Value_Type.Char size=50) . cast "X" (Value_Type.Char size=1) . cast "A" (Value_Type.Char size=5 variable_length=False) + lookup2 = (table_builder [["key", ["0"]], ["X2", ["ccc"]], ["A", ["dddd"]]]) . cast "key" (Value_Type.Char size=100) . cast "X2" (Value_Type.Char size=3 variable_length=False) . cast "A" (Value_Type.Char size=4 variable_length=False) table2.at "key" . value_type . should_equal (Value_Type.Char size=50 variable_length=True) table2.at "X" . value_type . should_equal (Value_Type.Char size=1 variable_length=True) @@ -266,8 +272,8 @@ add_specs suite_builder setup = t3.at "A" . value_type . should_equal (Value_Type.Char size=4 variable_length=False) group_builder.specify "will report Floating_Point_Equality if floating-point columns are used as key" <| - lookup = table_builder [["X", [1.0, 2.0, 3.0]], ["Y", ["A", "B", "C"]]] connection=data.connection - my_table = table_builder [["X", [2.0, 3.0, 2.0, 3.0]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup = table_builder [["X", [1.0, 2.0, 3.0]], ["Y", ["A", "B", "C"]]] + my_table = table_builder [["X", [2.0, 3.0, 2.0, 3.0]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] lookup.at "X" . value_type . is_floating_point . should_be_true t2 = my_table.merge lookup key_columns="X" @@ -281,8 +287,8 @@ add_specs suite_builder setup = w2.to_display_text . should_contain "X" group_builder.specify "will fail with No_Common_Type if types of updated columns are not compatible" <| - lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]] connection=data.connection - my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]] + my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] r2 = my_table.merge lookup key_columns="X" r2.should_fail_with No_Common_Type @@ -291,8 +297,8 @@ add_specs suite_builder setup = r2.catch.to_display_text . should_contain "when unifying column [Y]" group_builder.specify "will allow incompatible types if allow_unmatched_rows=False" <| - lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]] connection=data.connection - my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]] + my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] my_table.at "Y" . value_type . is_text . should_be_true t2 = my_table.merge lookup key_columns="X" allow_unmatched_rows=False @@ -304,14 +310,14 @@ add_specs suite_builder setup = m2.at "Z" . to_vector . should_equal [10, 20, 30, 40] group_builder.specify "will fail if key columns of the lookup table contain Nothing" <| - lookup1 = table_builder [["X", [1, 2, Nothing]], ["Y", ["A", "B", "C"]]] connection=data.connection - my_table1 = table_builder [["X", [2, 3, 2, 3]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup1 = table_builder [["X", [1, 2, Nothing]], ["Y", ["A", "B", "C"]]] + my_table1 = table_builder [["X", [2, 3, 2, 3]], ["Z", [10, 20, 30, 40]]] r1 = my_table1.merge lookup1 key_columns="X" add_new_columns=True r1.should_fail_with Null_Values_In_Key_Columns # But NULLs in source table key are OK - lookup2 = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection - my_table2 = table_builder [["X", [2, 3, Nothing, 3]], ["Z", [10, 20, 30, 40]]] connection=data.connection + lookup2 = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + my_table2 = table_builder [["X", [2, 3, Nothing, 3]], ["Z", [10, 20, 30, 40]]] t2 = my_table2.merge lookup2 key_columns="X" allow_unmatched_rows=True add_new_columns=True m2 = t2 |> materialize |> _.order_by "Z" m2.at "X" . to_vector . should_equal [2, 3, Nothing, 3] @@ -323,17 +329,17 @@ add_specs suite_builder setup = r3.should_fail_with Unmatched_Rows_In_Lookup group_builder.specify "will not allow providing no key_columns" <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection - my_table = table_builder [["X", [2, 1]], ["Z", [10, 20]]] connection=data.connection + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + my_table = table_builder [["X", [2, 1]], ["Z", [10, 20]]] r2 = my_table.merge lookup key_columns=[] add_new_columns=True r2.should_fail_with Illegal_Argument if setup.is_database.not then group_builder.specify "(in-memory only) will preserve the order of rows from the original table" <| - lookup = table_builder [["Y", [1, 0]], ["V", ["TRUE", "FALSE"]]] connection=data.connection + lookup = table_builder [["Y", [1, 0]], ["V", ["TRUE", "FALSE"]]] xs = 0.up_to 50 . to_vector ys = xs.map x-> x%2 - my_table = table_builder [["X", xs], ["Y", ys]] connection=data.connection + my_table = table_builder [["X", xs], ["Y", ys]] t2 = my_table.merge lookup key_columns="Y" add_new_columns=True t2.column_names . should_equal ["X", "Y", "V"] @@ -345,8 +351,8 @@ add_specs suite_builder setup = if setup.is_database then group_builder.specify "(database-only) will fail if pre-checked invariants get invalidated between the query is constructed and then materialized" <| Test.with_clue "(lookup is unique check) " <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection - table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] connection=data.connection + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] r1 = table.merge lookup key_columns="X" # Immediately, the query is all good. @@ -385,8 +391,8 @@ add_specs suite_builder setup = r3.at "Z" . to_vector . should_fail_with Invariant_Violation Test.with_clue "(no unmatched rows check - added a row in source) " <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection - table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] connection=data.connection + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] r1 = table.merge lookup key_columns="X" allow_unmatched_rows=False # Immediately, the query is all good. @@ -407,8 +413,8 @@ add_specs suite_builder setup = m2.should_fail_with Invariant_Violation Test.with_clue "(no unmatched rows check - removed a row in lookup) " <| - lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] connection=data.connection - table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] connection=data.connection + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] r1 = table.merge lookup key_columns="X" allow_unmatched_rows=False # Immediately, the query is all good. @@ -430,7 +436,7 @@ add_specs suite_builder setup = # This does not seem useful really, but there is no reason to disallow it, so we should ensure it does not crash. group_builder.specify "(edge-case) should allow lookup with itself" <| - table = table_builder [["X", [1, 2, 3]], ["Y", ["A", "B", "C"]]] connection=data.connection + table = table_builder [["X", [1, 2, 3]], ["Y", ["A", "B", "C"]]] t2 = table.merge table key_columns="X" t2.column_names . should_equal ["X", "Y"] @@ -439,7 +445,7 @@ add_specs suite_builder setup = m2.at "Y" . to_vector . should_equal ["A", "B", "C"] group_builder.specify "should gracefully handle tables from different backends" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] alternative_connection = Database.connect (SQLite In_Memory) t0 = (Table.new [["A", [3, 2, 1]], ["B", ["x", "y", "z"]]]).select_into_database_table alternative_connection "T0" temporary=True diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso index 6c74ef8f6014..4cd0ec647cce 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso @@ -22,21 +22,27 @@ type Data Value ~connection setup create_connection_fn = - connection = create_connection_fn Nothing - Data.Value connection + Data.Value (create_connection_fn Nothing) + + teardown self = self.connection.close add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder create_connection_fn = setup.create_connection_func suite_builder.group prefix+"Table.union" group_builder-> data = Data.setup create_connection_fn + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + group_builder.specify "should merge columns from multiple tables" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, True]]] connection=data.connection - t2 = table_builder [["A", [4, 5, 6]], ["B", ["d", "e", "f"]], ["C", [False, True, False]]] connection=data.connection - t3 = table_builder [["A", [7, 8, 9]], ["B", ["g", "h", "i"]], ["C", [True, False, False]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, True]]] + t2 = table_builder [["A", [4, 5, 6]], ["B", ["d", "e", "f"]], ["C", [False, True, False]]] + t3 = table_builder [["A", [7, 8, 9]], ["B", ["g", "h", "i"]], ["C", [True, False, False]]] t4 = t1.union t2 expect_column_names ["A", "B", "C"] t4 @@ -51,9 +57,9 @@ add_specs suite_builder setup = t5.at "C" . to_vector . should_equal [True, False, False, True, False, True, False, True, False] group_builder.specify "should fill unmatched columns (by name matching) with nulls and report a warning by default" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection - t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] connection=data.connection - t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] + t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] action = t1.union [t2, t3] on_problems=_ tester table = @@ -75,9 +81,9 @@ add_specs suite_builder setup = Problems.test_problem_handling action2 problems2 tester2 group_builder.specify "should drop unmatched columns if asked to" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection - t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] connection=data.connection - t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] + t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] t4 = t1.union [t2, t3] keep_unmatched_columns=False on_problems=Problem_Behavior.Report_Error Problems.assume_no_problems t4 @@ -85,9 +91,9 @@ add_specs suite_builder setup = t4.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6, Nothing, Nothing, 0] group_builder.specify "should keep unmatched columns without errors if asked to" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection - t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] connection=data.connection - t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] + t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] t4 = t1.union [t2, t3] keep_unmatched_columns=True on_problems=Problem_Behavior.Report_Error Problems.assume_no_problems t4 @@ -97,17 +103,17 @@ add_specs suite_builder setup = t4.at "C" . to_vector . should_equal [Nothing, Nothing, Nothing, "d", "e", "f", "g", "h", "i"] group_builder.specify "should fail if asked to drop unmatched columns but the set of common columns is empty" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection - t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] connection=data.connection - t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]] + t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]] t4 = t1.union [t2, t3] keep_unmatched_columns=False on_problems=Problem_Behavior.Ignore t4.should_fail_with No_Output_Columns t4.catch.to_display_text . should_equal "No columns in the result, because of another problem: Unmatched columns are set to be dropped, but no common column names were found." group_builder.specify "should ignore column names when matching by position" <| - t1 = table_builder [["A", [1, 2, 3]], ["Y", ["a", "b", "c"]]] connection=data.connection - t2 = table_builder [["X", [4, 5, 6]], ["A", ["d", "e", "f"]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["Y", ["a", "b", "c"]]] + t2 = table_builder [["X", [4, 5, 6]], ["A", ["d", "e", "f"]]] t3 = t1.union t2 match_columns=Match_Columns.By_Position expect_column_names ["A", "Y"] t3 @@ -115,9 +121,9 @@ add_specs suite_builder setup = t3.at "Y" . to_vector . should_equal ["a", "b", "c", "d", "e", "f"] group_builder.specify "should fill extra columns (positional matching) with nulls and report a warning by default" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection - t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] connection=data.connection - t3 = table_builder [["A2", [10, 20, 30]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] + t3 = table_builder [["A2", [10, 20, 30]]] action = t1.union [t2, t3] match_columns=Match_Columns.By_Position on_problems=_ tester table = @@ -129,9 +135,9 @@ add_specs suite_builder setup = Problems.test_problem_handling action problems tester group_builder.specify "should keep the least number of columns with positional matching if asked to drop unmatched ones" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection - t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] connection=data.connection - t3 = table_builder [["A2", [10, 20, 30]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] + t3 = table_builder [["A2", [10, 20, 30]]] t4 = t1.union [t2, t3] keep_unmatched_columns=False match_columns=Match_Columns.By_Position on_problems=Problem_Behavior.Report_Error Problems.assume_no_problems t4 @@ -139,9 +145,9 @@ add_specs suite_builder setup = t4.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6, 10, 20, 30] group_builder.specify "should keep the greatest number of columns with positional matching if asked to keep unmatched ones, filling missing values with null and reporting no problems" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection - t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] connection=data.connection - t3 = table_builder [["A2", [10, 20, 30]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]] + t3 = table_builder [["A2", [10, 20, 30]]] t4 = t1.union [t2, t3] match_columns=Match_Columns.By_Position keep_unmatched_columns=True on_problems=Problem_Behavior.Ignore Problems.assume_no_problems t4 @@ -151,8 +157,8 @@ add_specs suite_builder setup = t4.at "C" . to_vector . should_equal [Nothing, Nothing, Nothing, 7, 8, 9, Nothing, Nothing, Nothing] group_builder.specify "should use column names from the first table that has enough columns in positional matching mode" <| - t1 = table_builder [["A", [1, 2, 3]]] connection=data.connection - t2 = table_builder [["X", [4, 5, 6]], ["A", ["a", "b", "c"]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]]] + t2 = table_builder [["X", [4, 5, 6]], ["A", ["a", "b", "c"]]] check table = expect_column_names ["X", "A"] table @@ -168,29 +174,29 @@ add_specs suite_builder setup = within_table t4 <| check t4 - t5 = table_builder [["Y", [7, 8, 9]], ["A", ["d", "e", "f"]], ["Z", [10, 11, 12]]] connection=data.connection - t6 = table_builder [["W", [0]]] connection=data.connection - t7 = table_builder [["X", [7, 8, 9]], ["Y", ["d", "e", "f"]], ["Z", [10, 11, 12]]] connection=data.connection + t5 = table_builder [["Y", [7, 8, 9]], ["A", ["d", "e", "f"]], ["Z", [10, 11, 12]]] + t6 = table_builder [["W", [0]]] + t7 = table_builder [["X", [7, 8, 9]], ["Y", ["d", "e", "f"]], ["Z", [10, 11, 12]]] t8 = t1.union [t2, t5, t6, t7] match_columns=Match_Columns.By_Position expect_column_names ["Y", "A", "Z"] t8 group_builder.specify "should allow to merge a table with itself" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] t2 = t1.union [t1, t1] expect_column_names ["A", "B"] t2 t2.at "A" . to_vector . should_equal [1, 2, 3, 1, 2, 3, 1, 2, 3] t2.at "B" . to_vector . should_equal ["a", "b", "c", "a", "b", "c", "a", "b", "c"] group_builder.specify "should not de-duplicate rows" <| - t1 = table_builder [["A", [1, 1, 3]], ["B", ["a", "a", "c"]]] connection=data.connection - t2 = table_builder [["A", [1, 2, 2]], ["B", ["a", "b", "b"]]] connection=data.connection + t1 = table_builder [["A", [1, 1, 3]], ["B", ["a", "a", "c"]]] + t2 = table_builder [["A", [1, 2, 2]], ["B", ["a", "b", "b"]]] t3 = t1.union t2 expect_column_names ["A", "B"] t3 t3.at "A" . to_vector . should_equal [1, 1, 3, 1, 2, 2] t3.at "B" . to_vector . should_equal ["a", "a", "c", "a", "b", "b"] group_builder.specify "should gracefully handle the case where no tables to union were provided" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] check_same table = expect_column_names ["A", "B"] table @@ -207,8 +213,8 @@ add_specs suite_builder setup = check_same <| t1.union [] match_columns=Match_Columns.By_Position keep_unmatched_columns=True group_builder.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <| - t1 = (table_builder [["A", ["a", "b", "c"]]] connection=data.connection) . cast "A" (Value_Type.Char size=1 variable_length=False) - t2 = (table_builder [["A", ["xyz", "abc", "def"]]] connection=data.connection) . cast "A" (Value_Type.Char size=3 variable_length=False) + t1 = (table_builder [["A", ["a", "b", "c"]]]) . cast "A" (Value_Type.Char size=1 variable_length=False) + t2 = (table_builder [["A", ["xyz", "abc", "def"]]]) . cast "A" (Value_Type.Char size=3 variable_length=False) t1.at "A" . value_type . should_equal (Value_Type.Char size=1 variable_length=False) t2.at "A" . value_type . should_equal (Value_Type.Char size=3 variable_length=False) @@ -221,8 +227,8 @@ add_specs suite_builder setup = t3.at "A" . value_type . variable_length . should_be_true group_builder.specify "should find a common type that will fit the merged columns" <| - t1 = table_builder [["A", [0, 1, 2]]] connection=data.connection - t2 = table_builder [["A", [1.0, 2.0, 2.5]]] connection=data.connection + t1 = table_builder [["A", [0, 1, 2]]] + t2 = table_builder [["A", [1.0, 2.0, 2.5]]] t1.at "A" . value_type . is_integer . should_be_true t2.at "A" . value_type . is_floating_point . should_be_true @@ -234,7 +240,7 @@ add_specs suite_builder setup = # Specific type tests that apply to in-memory. Database behaviour is up to implementation. if setup.is_database.not then - t4 = table_builder [["A", [2^100, 2^10, 2]]] connection=data.connection + t4 = table_builder [["A", [2^100, 2^10, 2]]] t4.at "A" . value_type . should_be_a (Value_Type.Decimal ...) t5 = t2.union t4 @@ -252,8 +258,8 @@ add_specs suite_builder setup = group_builder.specify "should resort to Mixed value type only if at least one column is already Mixed" <| ## TODO currently no way to retype a column to Mixed, so we are using a custom object - t1 = table_builder [["A", [1, 2, 3]], ["mixed", ["a", My_Type.Value 1 2, Nothing]]] connection=data.connection - t2 = table_builder [["A", [4, 5, 6]], ["mixed", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["mixed", ["a", My_Type.Value 1 2, Nothing]]] + t2 = table_builder [["A", [4, 5, 6]], ["mixed", [1, 2, 3]]] t1.at "mixed" . value_type . should_equal Value_Type.Mixed t2.at "mixed" . value_type . should_equal Value_Type.Integer @@ -263,8 +269,8 @@ add_specs suite_builder setup = t3.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6] t3.at "mixed" . to_vector . should_equal ["a", My_Type.Value 1 2, Nothing, 1, 2, 3] - t4 = table_builder [["A", [1, 3]], ["mixed", [True, False]]] connection=data.connection - t5 = table_builder [["A", [4, 5]], ["mixed", ["X", "y"]]] connection=data.connection + t4 = table_builder [["A", [1, 3]], ["mixed", [True, False]]] + t5 = table_builder [["A", [4, 5]], ["mixed", ["X", "y"]]] t4.at "mixed" . value_type . should_equal Value_Type.Boolean t5.at "mixed" . value_type . should_equal Value_Type.Char @@ -276,8 +282,8 @@ add_specs suite_builder setup = t6.at "mixed" . value_type . should_equal Value_Type.Mixed group_builder.specify "if no common type can be found, should report error and drop the problematic column" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, Nothing]]] connection=data.connection - t2 = table_builder [["C", ["x", "Y", "Z"]], ["A", [4, 5, 6]], ["B", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, Nothing]]] + t2 = table_builder [["C", ["x", "Y", "Z"]], ["A", [4, 5, 6]], ["B", [1, 2, 3]]] r1 = t1.union t2 on_problems=Problem_Behavior.Report_Error r1.should_fail_with No_Common_Type @@ -303,8 +309,8 @@ add_specs suite_builder setup = r4.should_fail_with No_Common_Type group_builder.specify "if type widening is not allowed, should use the type from first table that contained the given column" <| - t1 = table_builder [["A", [1, 2, 3]]] connection=data.connection - t2 = table_builder [["A", [4, 5, 6]], ["B", [1.2, 2.2, 3.1]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]]] + t2 = table_builder [["A", [4, 5, 6]], ["B", [1.2, 2.2, 3.1]]] t3 = t1.union t2 allow_type_widening=False keep_unmatched_columns=True within_table t3 <| @@ -317,8 +323,8 @@ add_specs suite_builder setup = t3.at "B" . value_type . is_floating_point . should_be_true group_builder.specify "if type widening is not allowed and types do not match, should report error and drop the problematic column" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", [1, 2, 3]], ["E", [1.1, 2.5, 3.2]]] connection=data.connection - t2 = table_builder [["A", [4, 5, 6]], ["B", [1.5, 2.5, 3.5]], ["E", [1, 2, 3]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", [1, 2, 3]], ["E", [1.1, 2.5, 3.2]]] + t2 = table_builder [["A", [4, 5, 6]], ["B", [1.5, 2.5, 3.5]], ["E", [1, 2, 3]]] t1.at "B" . value_type . is_integer . should_be_true t1.at "E" . value_type . is_floating_point . should_be_true @@ -343,11 +349,11 @@ add_specs suite_builder setup = # Database backends are not required to support Mixed types. if setup.is_database.not then group_builder.specify "even if type widening is not allowed, if the first column is mixed, it should accept any column to be concatenated to it" <| - t1 = table_builder [["X", ["a", 1, Nothing]]] connection=data.connection - t2 = table_builder [["X", [1]]] connection=data.connection - t3 = table_builder [["X", [1.2, 2.3, 3.4]]] connection=data.connection - t4 = table_builder [["X", ["a", "b"]]] connection=data.connection - t5 = table_builder [["X", [True, False]]] connection=data.connection + t1 = table_builder [["X", ["a", 1, Nothing]]] + t2 = table_builder [["X", [1]]] + t3 = table_builder [["X", [1.2, 2.3, 3.4]]] + t4 = table_builder [["X", ["a", "b"]]] + t5 = table_builder [["X", [True, False]]] t1.at "X" . value_type . should_equal Value_Type.Mixed t2.at "X" . value_type . should_equal Value_Type.Integer @@ -358,9 +364,9 @@ add_specs suite_builder setup = t6.at "X" . to_vector . should_equal ["a", 1, Nothing, 1, 1.2, 2.3, 3.4, "a", "b", True, False] group_builder.specify "when finding a common type for numeric columns to be Float, any precision loss should be reported" <| - t1 = table_builder [["X", [1, (2^62)-1, 3]]] connection=data.connection - t2 = table_builder [["X", [1.5, 2.5, 3.5]]] connection=data.connection - t3 = table_builder [["X", [(2^100)+1, 2^10, 2]]] connection=data.connection + t1 = table_builder [["X", [1, (2^62)-1, 3]]] + t2 = table_builder [["X", [1.5, 2.5, 3.5]]] + t3 = table_builder [["X", [(2^100)+1, 2^10, 2]]] t1.at "X" . value_type . should_equal Value_Type.Integer t2.at "X" . value_type . should_equal Value_Type.Float @@ -376,19 +382,19 @@ add_specs suite_builder setup = w.affected_rows_count . should_equal 2 group_builder.specify "if type mismatches cause all columns to be dropped, fail with No_Output_Columns" <| - t1 = table_builder [["A", [1, 2, 3]]] connection=data.connection - t2 = table_builder [["A", ['x']]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]]] + t2 = table_builder [["A", ['x']]] e3 = t1.union t2 allow_type_widening=True on_problems=Problem_Behavior.Ignore e3.should_fail_with No_Output_Columns - t4 = table_builder [["A", [1.5]]] connection=data.connection + t4 = table_builder [["A", [1.5]]] e5 = t1.union t4 allow_type_widening=False on_problems=Problem_Behavior.Ignore e5.should_fail_with No_Output_Columns group_builder.specify "should find a common type (2)" <| - t1 = (table_builder [["X", [0, 1, 2]], ["Y", ['aa', 'bb', 'cc']]] connection=data.connection) . cast "X" (Value_Type.Integer Bits.Bits_16) . cast "Y" (Value_Type.Char size=2 variable_length=False) - t2 = (table_builder [["X", [3, 4, 5]], ["Y", ['x', 'y', 'z']]] connection=data.connection) . cast "X" (Value_Type.Integer Bits.Bits_32) . cast "Y" (Value_Type.Char size=1 variable_length=False) + t1 = (table_builder [["X", [0, 1, 2]], ["Y", ['aa', 'bb', 'cc']]]) . cast "X" (Value_Type.Integer Bits.Bits_16) . cast "Y" (Value_Type.Char size=2 variable_length=False) + t2 = (table_builder [["X", [3, 4, 5]], ["Y", ['x', 'y', 'z']]]) . cast "X" (Value_Type.Integer Bits.Bits_32) . cast "Y" (Value_Type.Char size=1 variable_length=False) supports_complex_types = (t1.is_error || t2.is_error || Problems.get_attached_warnings t1 . not_empty).not case supports_complex_types of False -> Nothing @@ -402,8 +408,8 @@ add_specs suite_builder setup = t12.at "Y" . to_vector . should_equal ['aa', 'bb', 'cc', 'x', 'y', 'z'] group_builder.specify "should fail to find a common type if widening is not allowed (2)" <| - t1 = (table_builder [["X", [0, 1, 2]], ["Y", ['aa', 'bb', 'cc']]] connection=data.connection) . cast "X" (Value_Type.Integer Bits.Bits_16) . cast "Y" (Value_Type.Char size=2 variable_length=False) - t2 = (table_builder [["X", [3, 4, 5]], ["Y", ['x', 'y', 'z']]] connection=data.connection) . cast "X" (Value_Type.Integer Bits.Bits_32) . cast "Y" (Value_Type.Char size=1 variable_length=False) + t1 = (table_builder [["X", [0, 1, 2]], ["Y", ['aa', 'bb', 'cc']]]) . cast "X" (Value_Type.Integer Bits.Bits_16) . cast "Y" (Value_Type.Char size=2 variable_length=False) + t2 = (table_builder [["X", [3, 4, 5]], ["Y", ['x', 'y', 'z']]]) . cast "X" (Value_Type.Integer Bits.Bits_32) . cast "Y" (Value_Type.Char size=1 variable_length=False) supports_complex_types = (t1.is_error || t2.is_error || Problems.get_attached_warnings t1 . not_empty).not case supports_complex_types of False -> Nothing @@ -417,7 +423,7 @@ add_specs suite_builder setup = t1.union t2 allow_type_widening=False on_problems=Problem_Behavior.Report_Error . should_fail_with Column_Type_Mismatch group_builder.specify "should gracefully handle tables from different backends" <| - t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] connection=data.connection + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] alternative_connection = Database.connect (SQLite In_Memory) t0 = (Table.new [["A", [1, 2, 4]], ["B", ["10", "20", "30"]]]).select_into_database_table alternative_connection "T0" temporary=True diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso index c933c745a69b..70dfa2d2d3f7 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso @@ -13,12 +13,29 @@ from Standard.Test_New import all from project.Common_Table_Operations.Util import expect_column_names, run_default_backend +type Data + Value ~connection + + setup create_connection_fn = + Data.Value (create_connection_fn Nothing) + + teardown self = self.connection.close + + add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder materialize = setup.materialize + create_connection_fn = setup.create_connection_func db_todo = if setup.is_database.not then Nothing else "Table.zip is still WIP for the DB backend." suite_builder.group prefix+"Table.zip" pending=db_todo group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder cols = + setup.table_builder cols connection=data.connection + if setup.is_database.not then group_builder.specify "should allow to zip two tables, preserving memory layout order" <| t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] diff --git a/test/Table_Tests/src/Database/Common/Common_Spec.enso b/test/Table_Tests/src/Database/Common/Common_Spec.enso index bc901a7e7a49..c65b9f63d944 100644 --- a/test/Table_Tests/src/Database/Common/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Common_Spec.enso @@ -186,7 +186,6 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn group_builder.specify "should allow to access a Table by name" <| name = data.t1.name - IO.println <| " Querying table with name " + name tmp = data.connection.query (SQL_Query.Table_Name name) tmp.read . should_equal data.t1.read From 06be31ec0bd9a42984d610600f2604a6a6e77e9c Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 24 Jan 2024 17:42:24 +0100 Subject: [PATCH 84/93] Merge Test/Extensions into Test_New/Extensions --- .../Test_New/0.0.0-dev/src/Extensions.enso | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso index 4f400efaafa9..1c7dd5f4076a 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Extensions.enso @@ -11,6 +11,8 @@ import project.Test.Test - matcher: The expected type of dataflow error contained in `self`. - frames_to_skip (optional, advanced): used to alter the location which is displayed as the source of this error. + - unwrap_errors: If true, remove any wrapping errors from the result before + checking against the expected warning. > Example Assert that a computation should return an error of a given type. @@ -20,8 +22,9 @@ import project.Test.Test example_should_fail_with = Examples.throw_error . should_fail_with Examples.My_Error -Any.should_fail_with : Any -> Integer -> Spec_Result -Any.should_fail_with self matcher frames_to_skip=0 = +Any.should_fail_with : Any -> Integer -> Boolean -> Spec_Result +Any.should_fail_with self matcher frames_to_skip=0 unwrap_errors=True = + _ = unwrap_errors loc = Meta.get_source_location 1+frames_to_skip matcher_text = matcher . to_text Test.fail ("Expected an error " + matcher_text + " but no error occurred, instead got: " + self.to_text + " (at " + loc + ").") @@ -32,6 +35,8 @@ Any.should_fail_with self matcher frames_to_skip=0 = - matcher: The expected type of dataflow error contained in `self`. - frames_to_skip (optional, advanced): used to alter the location which is displayed as the source of this error. + - unwrap_errors: If true, remove any wrapping errors from the result before + checking against the expected warning. > Example Assert that a computation should return an error of a given type. @@ -41,9 +46,10 @@ Any.should_fail_with self matcher frames_to_skip=0 = example_should_fail_with = Examples.throw_error . should_fail_with Examples.My_Error -Error.should_fail_with : Any -> Integer -> Spec_Result -Error.should_fail_with self matcher frames_to_skip=0 = - caught = self.catch +Error.should_fail_with : Any -> Integer -> Boolean -> Spec_Result +Error.should_fail_with self matcher frames_to_skip=0 unwrap_errors=True = + unwrap_maybe error = if unwrap_errors then Error.unwrap error else error + caught = unwrap_maybe self.catch if caught == matcher || caught.is_a matcher then Nothing else loc = Meta.get_source_location 2+frames_to_skip matcher_text = matcher . to_text From bb18f4218c107228a855be9206a5d71e193d2528 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Wed, 24 Jan 2024 17:42:49 +0100 Subject: [PATCH 85/93] Remove temporary printing --- test/Table_Tests/src/Database/SQLite_Spec.enso | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index b69c5b51a289..e6960d2a2195 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -375,7 +375,6 @@ type File_Connection Value ~file setup = File_Connection.Value <| - IO.println <| " SQLite_Spec_New.File_Connection.setup" tmp_file = backing_file con = create_file_connection backing_file con.close @@ -384,7 +383,6 @@ type File_Connection teardown self = - IO.println <| " SQLite_Spec_New.File_Connection.teardown" assert self.file.exists self.file.delete From 390e7e3bdf1fe3426659ebc389e999fd005a48cf Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 25 Jan 2024 11:54:15 +0100 Subject: [PATCH 86/93] Refactor Nothing_Spec to Test_New after merge --- .../Common_Table_Operations/Nothing_Spec.enso | 187 +++++++++++++----- 1 file changed, 142 insertions(+), 45 deletions(-) diff --git a/test/Table_Tests/src/Common_Table_Operations/Nothing_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Nothing_Spec.enso index 57ac35f56e19..0b484a889291 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Nothing_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Nothing_Spec.enso @@ -9,19 +9,23 @@ import Standard.Test.Extensions from project.Common_Table_Operations.Util import run_default_backend -main = run_default_backend spec +main = run_default_backend add_specs type My_Type Value x:Text +type Data + Value ~connection -spec setup = + setup create_connection_fn = + Data.Value (create_connection_fn Nothing) + + teardown self = self.connection.close + + +add_specs suite_builder setup = prefix = setup.prefix - table_builder = setup.table_builder - table_builder_typed columns value_type = - cast_columns = columns.map c-> - Column.from_vector (c.at 0) (c.at 1) . cast value_type - setup.table_builder cast_columns + create_connection_fn = setup.create_connection_func # We cannot create a column of Nothing/NULL in the database without casting it to a non-mixed type. type_for_nothing_column = if setup.is_database then Value_Type.Char else Value_Type.Mixed @@ -42,7 +46,17 @@ spec setup = + mixed_values values_with_nothing = values_without_nothing + [[Nothing, Nothing, type_for_nothing_column]] - Test.group prefix+"Comparisons" <| + suite_builder.group prefix+"Comparisons" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder_typed columns value_type = + cast_columns = columns.map c-> + Column.from_vector (c.at 0) (c.at 1) . cast value_type + setup.table_builder cast_columns connection=data.connection + comparisons = [["==", ==], ["!=", !=], ["<", <], ["<=", <=], [">", >], [">=", >=]] comparisons.map pair-> @@ -53,99 +67,159 @@ spec setup = value = triple.at 0 value_type = triple.at 2 - table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type - - Test.specify "Correctly handle Nothing in: "+value_type.to_text+" "+comparison_name+" Nothing value" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" "+comparison_name+" Nothing value" <| + table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type co = comparison (table.at "x") Nothing co.to_vector . should_equal [Nothing] - Test.specify "Correctly handle Nothing in: "+value_type.to_text+" "+comparison_name+" Nothing column" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" "+comparison_name+" Nothing column" <| + table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type co = comparison (table.at "x") (table.at "n") co.to_vector . should_equal [Nothing] - Test.specify "Correctly handle Nothing in: Nothing column "+comparison_name+" "+value_type.to_text <| + group_builder.specify "Correctly handle Nothing in: Nothing column "+comparison_name+" "+value_type.to_text <| + table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type co = comparison (table.at "n") (table.at "x") co.to_vector . should_equal [Nothing] - Test.group prefix+"between" <| + suite_builder.group prefix+"between" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder_typed columns value_type = + cast_columns = columns.map c-> + Column.from_vector (c.at 0) (c.at 1) . cast value_type + setup.table_builder cast_columns connection=data.connection + values_with_nothing.map triple-> value = triple.at 0 value_type = triple.at 2 - table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type - - Test.specify "Correctly handle Nothing in: Nothing column between "+value_type.to_text+" and "+value_type.to_text <| + group_builder.specify "Correctly handle Nothing in: Nothing column between "+value_type.to_text+" and "+value_type.to_text <| + table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type co = table.at "n" . between (table.at "x") (table.at "y") co.to_vector . should_equal [Nothing] - Test.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing column and "+value_type.to_text <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing column and "+value_type.to_text <| + table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type co = table.at "x" . between (table.at "n") (table.at "y") co.to_vector . should_equal [Nothing] - Test.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing column" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing column" <| + table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type co = table.at "x" . between (table.at "y") (table.at "n") co.to_vector . should_equal [Nothing] - Test.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing value and "+value_type.to_text <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing value and "+value_type.to_text <| + table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type co = table.at "x" . between Nothing (table.at "y") co.to_vector . should_equal [Nothing] - Test.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing value" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing value" <| + table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type co = table.at "x" . between (table.at "y") Nothing co.to_vector . should_equal [Nothing] - Test.group prefix+"is_nothing" <| + suite_builder.group prefix+"is_nothing" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder_typed columns value_type = + cast_columns = columns.map c-> + Column.from_vector (c.at 0) (c.at 1) . cast value_type + setup.table_builder cast_columns connection=data.connection + values_with_nothing.map triple-> value = triple.at 0 value_type = triple.at 2 - Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_nothing" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_nothing" <| table = table_builder_typed [["x", [value]]] value_type co = table.at "x" . is_nothing co.to_vector . should_equal [value == Nothing] - Test.group prefix+"not" <| - Test.specify "Correctly handle Nothing in .not" <| + suite_builder.group prefix+"not" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder columns = + setup.table_builder columns connection=data.connection + + group_builder.specify "Correctly handle Nothing in .not" <| table = table_builder [["x", [True, False, Nothing]]] table.at "x" . not . to_vector . should_equal [False, True, Nothing] - Test.group prefix+"is_in" <| + suite_builder.group prefix+"is_in" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder_typed columns value_type = + cast_columns = columns.map c-> + Column.from_vector (c.at 0) (c.at 1) . cast value_type + setup.table_builder cast_columns connection=data.connection + values_with_nothing.map triple-> value = triple.at 0 other_value = triple.at 1 value_type = triple.at 2 - table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type true_if_not_nothing = if value == Nothing then Nothing else True - Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column (returning True)" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column (returning True)" <| + table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type table.at "x" . is_in (table.at "z") . to_vector . should_equal [true_if_not_nothing, Nothing] - Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column with Nothings (returning True)" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column with Nothings (returning True)" <| + table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type table.at "x" . is_in (table.at "x") . to_vector . should_equal [true_if_not_nothing, Nothing] - Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column with Nothings (returning Nothing)" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column with Nothings (returning Nothing)" <| + table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type table.at "x" . is_in (table.at "y") . to_vector . should_equal [Nothing, Nothing] - Test.specify "Correctly handle Nothing in: Nothing.is_in Column with Nothings (returning Nothing)" <| + group_builder.specify "Correctly handle Nothing in: Nothing.is_in Column with Nothings (returning Nothing)" <| + table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type table.at "n" . is_in (table.at "x") . to_vector . should_equal [Nothing, Nothing] - Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector (returning True)" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector (returning True)" <| + table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type table.at "x" . is_in (table.at "z" . to_vector) . to_vector . should_equal [true_if_not_nothing, Nothing] - Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning True)" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning True)" <| + table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type table.at "x" . is_in (table.at "x" . to_vector) . to_vector . should_equal [true_if_not_nothing, Nothing] - Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning Nothing)" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning Nothing)" <| + table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type table.at "x" . is_in (table.at "y" . to_vector) . to_vector . should_equal [Nothing, Nothing] - Test.specify "Correctly handle Nothing in: Nothing.is_in Vector with Nothings (returning Nothing)" <| + group_builder.specify "Correctly handle Nothing in: Nothing.is_in Vector with Nothings (returning Nothing)" <| + table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type table.at "n" . is_in (table.at "x" . to_vector) . to_vector . should_equal [Nothing, Nothing] - Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in empty Vector (returning False)" <| + group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in empty Vector (returning False)" <| + table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type table.at "x" . is_in [] . to_vector . should_equal [False, False] - Test.group prefix+"Boolean is_in" <| + suite_builder.group prefix+"Boolean is_in" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder_typed columns value_type = + cast_columns = columns.map c-> + Column.from_vector (c.at 0) (c.at 1) . cast value_type + setup.table_builder cast_columns connection=data.connection + make_containing_values had_null had_true had_false = null_maybe = if had_null then [Nothing] else [] true_maybe = if had_true then [True] else [] @@ -183,7 +257,7 @@ spec setup = output = cs.at 4 containing_values = make_containing_values (cs.at 1) (cs.at 2) (cs.at 3) - Test.specify "Boolean is_in: (Vector), "+negation_desc+" "+cs.to_text <| + group_builder.specify "Boolean is_in: (Vector), "+negation_desc+" "+cs.to_text <| input_column = transform_input [input] t = table_builder_typed [["input", input_column]] Value_Type.Boolean @@ -191,7 +265,7 @@ spec setup = c.to_vector . should_equal [output] - Test.specify "Boolean is_in: (Column), "+negation_desc+" "+cs.to_text <| + group_builder.specify "Boolean is_in: (Column), "+negation_desc+" "+cs.to_text <| input_column = transform_input (Vector.fill containing_values.length input) t = table_builder_typed [["input", input_column], ["containing", transform_argument containing_values]] Value_Type.Boolean expected_output = if input_column.is_empty then [] else [output] @@ -201,26 +275,49 @@ spec setup = c.to_vector . length . should_equal input_column.length c.to_vector.distinct . should_equal expected_output - Test.group prefix+"distinct" <| + suite_builder.group prefix+"distinct" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder columns = + setup.table_builder columns connection=data.connection + + table_builder_typed columns value_type = + cast_columns = columns.map c-> + Column.from_vector (c.at 0) (c.at 1) . cast value_type + setup.table_builder cast_columns connection=data.connection + values_without_nothing.map triple-> value = triple.at 0 other_value = triple.at 1 value_type = triple.at 2 - Test.specify "Correctly handle Nothing in .distinct for "+value_type.to_text <| + group_builder.specify "Correctly handle Nothing in .distinct for "+value_type.to_text <| table = table_builder [["x", [value, other_value, other_value, Nothing, value, Nothing]]] t1 = table . distinct ["x"] v = t1.at "x" . to_vector v . length . should_equal 3 v . should_contain_the_same_elements_as [value, other_value, Nothing] - Test.specify "Correctly handle Nothing in .distinct for Nothing" <| + group_builder.specify "Correctly handle Nothing in .distinct for Nothing" <| table = table_builder_typed [["x", [Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]]] Value_Type.Char t1 = table . distinct ["x"] v = t1.at "x" . to_vector v . should_equal [Nothing] - Test.group prefix+"order_by" <| + suite_builder.group prefix+"order_by" group_builder-> + data = Data.setup create_connection_fn + + group_builder.teardown <| + data.teardown + + table_builder_typed columns value_type = + cast_columns = columns.map c-> + Column.from_vector (c.at 0) (c.at 1) . cast value_type + setup.table_builder cast_columns connection=data.connection + values_with_nothing.map triple-> value = triple.at 0 other_value = triple.at 1 @@ -231,10 +328,10 @@ spec setup = if is_comparable then table = table_builder_typed [["x", [value, Nothing, other_value, other_value, Nothing, value, Nothing]]] value_type - Test.specify "Correctly handle Nothing in .order_by (asc) for "+value_type.to_text <| + group_builder.specify "Correctly handle Nothing in .order_by (asc) for "+value_type.to_text <| t1 = table . order_by [Sort_Column.Name "x" Sort_Direction.Ascending] t1.at "x" . to_vector . should_equal [Nothing, Nothing, Nothing, value, value, other_value, other_value] - Test.specify "Correctly handle Nothing in .order_by (desc) for "+value_type.to_text <| + group_builder.specify "Correctly handle Nothing in .order_by (desc) for "+value_type.to_text <| t1 = table . order_by [Sort_Column.Name "x" Sort_Direction.Descending] t1.at "x" . to_vector . should_equal [other_value, other_value, value, value, Nothing, Nothing, Nothing] From b1e45d38bf43e4d7d1940606fb24b66bac1936c2 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 25 Jan 2024 13:01:49 +0100 Subject: [PATCH 87/93] Fix cargo lint --- build/build/src/enso.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build/build/src/enso.rs b/build/build/src/enso.rs index cc5d5371737b..ae2d82126a34 100644 --- a/build/build/src/enso.rs +++ b/build/build/src/enso.rs @@ -2,9 +2,9 @@ use crate::prelude::*; use crate::paths::Paths; use crate::paths::ENSO_ENABLE_ASSERTIONS; -use crate::paths::ENSO_TEST_ANSI_COLORS; use crate::paths::ENSO_META_TEST_ARGS; use crate::paths::ENSO_META_TEST_COMMAND; +use crate::paths::ENSO_TEST_ANSI_COLORS; use crate::postgres; use crate::postgres::EndpointConfiguration; use crate::postgres::Postgresql; From d8af7ea90db71b5ad03e5c635687d463c777c2ce Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 25 Jan 2024 13:16:05 +0100 Subject: [PATCH 88/93] Refactor Base_Tests/Numbers_Spec to Test_New --- test/Base_Tests/src/Data/Numbers_Spec.enso | 139 +++++++++--------- test/Base_Tests/src/Main.enso | 161 +++++++++++---------- 2 files changed, 154 insertions(+), 146 deletions(-) diff --git a/test/Base_Tests/src/Data/Numbers_Spec.enso b/test/Base_Tests/src/Data/Numbers_Spec.enso index 3399425e63d1..4482383d73ec 100644 --- a/test/Base_Tests/src/Data/Numbers_Spec.enso +++ b/test/Base_Tests/src/Data/Numbers_Spec.enso @@ -6,8 +6,7 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument from Standard.Base.Data.Numbers import Number_Parse_Error -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions +from Standard.Test_New import all import project.Data.Round_Spec @@ -27,7 +26,7 @@ type Complex Complex.from (that:Number) = Complex.new that -spec = +add_specs suite_builder = eps = 0.000001 almost_max_long = 9223372036854775806 almost_max_long_times_three = 27670116110564327418 @@ -36,61 +35,60 @@ spec = hundred_factorial = 93326215443944152681699238856266700490715968264381621468592963895217599993229915608941463976156518286253697920827223758251185210916864000000000000000000000000 very_negative = -99223372036854775808 - Round_Spec.spec "Number " .round + suite_builder.group "Integers" group_builder-> + Round_Spec.add_specs group_builder (.round) - Test.group "Integers" <| - - Test.specify "should be of unbound size when multiplied" <| + group_builder.specify "should be of unbound size when multiplied" <| 1.up_to 101 . fold 1 (*) . should_equal hundred_factorial - Test.specify "should be of unbound size when added" <| + group_builder.specify "should be of unbound size when added" <| (almost_max_long + almost_max_long + almost_max_long).should_equal almost_max_long_times_three - Test.specify "should be of unbound size when subtracted" <| + group_builder.specify "should be of unbound size when subtracted" <| (0 - almost_max_long - almost_max_long - almost_max_long).should_equal almost_max_long_times_three.negate - Test.specify "should be of unbound size when dividing" <| + group_builder.specify "should be of unbound size when dividing" <| expected = 3372816184472482867110284450043137767873196479305249187406461598235841786750685581361224832688174410089430537516012695688121622150430744676 ((1.up_to 101 . fold 1 (*)).div 3*almost_max_long).should_equal expected - Test.specify "should be of unbound size when taking remainder" <| + group_builder.specify "should be of unbound size when taking remainder" <| expected = 3191479909175673432 ((1.up_to 101 . fold 1 (*)) % 3*almost_max_long).should_equal expected - Test.specify "should allow defining extension methods through the Integer type for any number size" <| + group_builder.specify "should allow defining extension methods through the Integer type for any number size" <| 876543.is_even.should_be_false (1.up_to 101 . fold 1 (*)).is_even.should_be_true - Test.specify "should handle the negation edge cases" <| + group_builder.specify "should handle the negation edge cases" <| x = 9223372036854775808 y = -x z = -9223372036854775808 y.should_equal z - Test.specify "should handle equality between small and big integers" <| + group_builder.specify "should handle equality between small and big integers" <| (1 == hundred_factorial).should_be_false (hundred_factorial == 1).should_be_false - Test.specify "should properly handle going to big numbers and back" <| + group_builder.specify "should properly handle going to big numbers and back" <| ((almost_max_long * 3) / 3) . should_equal almost_max_long - Test.specify "should use floating point arithmetic for division" <| + group_builder.specify "should use floating point arithmetic for division" <| (3 / 4) . should_equal 0.75 epsilon=eps (almost_max_long * 2 / almost_max_long_times_three) . should_equal 0.6666666 epsilon=eps - Test.specify "should support integer division" <| + group_builder.specify "should support integer division" <| (10.div 3) . should_equal 3 (10.div 0).should_fail_with Arithmetic_Error - Test.specify "should support integral binary literals" <| + group_builder.specify "should support integral binary literals" <| lit = 2_01101101 lit . should_equal 109 - Test.specify "should support integral hexadecimal literals" <| + group_builder.specify "should support integral hexadecimal literals" <| lit = 16_6D lit . should_equal 109 - Test.specify "should support bitwise and" <| + group_builder.specify "should support bitwise and" <| left = 2_01101101 right = 2_11000100 big_left = 16_17ffffffffffffffa @@ -100,7 +98,7 @@ spec = big_left.bit_and right . should_equal 2_11000000 big_left.bit_and big_right . should_equal 16_17ffffffffffffff8 - Test.specify "should support bitwise or" <| + group_builder.specify "should support bitwise or" <| left = 2_01101101 right = 2_11000100 big_left = 16_17ffffffffffffffa @@ -110,7 +108,7 @@ spec = big_left.bit_or right . should_equal 16_17ffffffffffffffe big_left.bit_or right . should_equal 16_17ffffffffffffffe - Test.specify "should support bitwise exclusive or" <| + group_builder.specify "should support bitwise exclusive or" <| left = 2_01101101 right = 2_11000100 big_left = 16_17ffffffffffffffa @@ -120,7 +118,7 @@ spec = big_left.bit_xor right . should_equal 16_17fffffffffffff3e big_left.bit_xor big_right . should_equal 2_00000110 - Test.specify "should support bitwise negation" <| + group_builder.specify "should support bitwise negation" <| bits = 2_01101101 big_bits = 16_17ffffffffffffffa bits.bit_not . should_equal -2_01101110 @@ -128,7 +126,7 @@ spec = big_bits.bit_not . should_equal -16_17ffffffffffffffb big_bits.bit_not.bit_not . should_equal big_bits - Test.specify "should support left bit shifts" <| + group_builder.specify "should support left bit shifts" <| positive_bits = 2_01101101 negative_bits = -2_01101101 positive_big_bits = almost_max_long_times_three @@ -162,7 +160,7 @@ spec = (negative_big_bits.bit_shift_l positive_big_bits).should_fail_with Arithmetic_Error negative_big_bits.bit_shift_l negative_big_bits . should_equal -1 - Test.specify "should support right bit shifts, preserving sign" <| + group_builder.specify "should support right bit shifts, preserving sign" <| positive_bits = 2_01101101 negative_bits = -2_01101101 positive_big_bits = almost_max_long_times_three @@ -196,7 +194,7 @@ spec = (negative_big_bits.bit_shift_r negative_big_bits).should_fail_with Arithmetic_Error negative_big_bits.bit_shift_r positive_big_bits . should_equal -1 - Test.specify "should be able to parse" <| + group_builder.specify "should be able to parse" <| Integer.parse "1245623" . should_equal 1245623 Integer.parse "012345" . should_equal 12345 Integer.parse "-1234567" . should_equal -1234567 @@ -206,17 +204,17 @@ spec = Integer.parse "123A" . should_fail_with Number_Parse_Error Integer.parse "aaaa" . should_fail_with Number_Parse_Error - Test.specify "should parse hundred factorial" <| + group_builder.specify "should parse hundred factorial" <| txt = hundred_factorial.to_text number = Integer.parse txt number . should_equal hundred_factorial - Test.specify "should fail on too huge decimal" <| + group_builder.specify "should fail on too huge decimal" <| txt = hundred_factorial.to_text + ".345" number = Integer.parse txt number . should_fail_with Number_Parse_Error - Test.specify "should be able to parse alternate bases" <| + group_builder.specify "should be able to parse alternate bases" <| Integer.parse "1245623" 8 . should_equal 347027 Integer.parse "-1245623" 8 . should_equal -347027 Integer.parse "0001245623" 8 . should_equal 347027 @@ -235,22 +233,22 @@ spec = Integer.parse "-101021010" 2 . should_fail_with Number_Parse_Error Integer.parse "123" 128 . should_fail_with Number_Parse_Error - Test.specify "should be able to invoke methods on Integer via static method call" <| + group_builder.specify "should be able to invoke methods on Integer via static method call" <| Integer.+ 1 2 . should_equal 3 Integer.+ 1 2.5 . should_equal 3.5 Test.expect_panic_with (Integer.+ 1.5 1) Type_Error Test.expect_panic_with (Integer.+ 1.5 2.5) Type_Error Test.expect_panic_with (Integer.+ 1 "hello") Type_Error - Test.group "Floats" <| + suite_builder.group "Floats" group_builder-> - Test.specify "should exist and expose basic arithmetic operations" <| + group_builder.specify "should exist and expose basic arithmetic operations" <| ((1.5 + 1.5)*1.3 / 2 - 3) . should_equal -1.05 epsilon=eps - Test.specify "should allow defining extension methods through the Float type" <| + group_builder.specify "should allow defining extension methods through the Float type" <| 32.5.get_fun_factor.should_equal "Wow, 32.5 is such a fun number!" - Test.specify "should be able to be parsed" <| + group_builder.specify "should be able to be parsed" <| Float.parse "32.5" . should_equal 32.5 Float.parse "0122.5" . should_equal 122.5 Float.parse "-98.5" . should_equal -98.5 @@ -258,7 +256,7 @@ spec = Float.parse "000000.0001" . should_equal 0.0001 Float.parse "aaaa" . should_fail_with Number_Parse_Error - Test.specify "parse with locale" <| + group_builder.specify "parse with locale" <| l = Locale.new "cs" Float.parse "32,5" l . should_equal 32.5 Float.parse "0122,5" l . should_equal 122.5 @@ -267,21 +265,21 @@ spec = Float.parse "000000,0001" l . should_equal 0.0001 Float.parse "aaaa" l . should_fail_with Number_Parse_Error - Test.specify "decimal should parse hundred factorial well" <| + group_builder.specify "decimal should parse hundred factorial well" <| txt = hundred_factorial.to_text + ".345" decimal = Float.parse txt is_huge = decimal > (hundred_factorial / 5) is_huge . should_equal True - Test.group "Numbers" <| + suite_builder.group "Numbers" group_builder-> - Test.specify "should define addition" <| + group_builder.specify "should define addition" <| 2+3 . should_equal 5 - Test.specify "should define multiplication" <| + group_builder.specify "should define multiplication" <| 2*3 . should_equal 6 - Test.specify "should define modulo for integers and decimals" <| + group_builder.specify "should define modulo for integers and decimals" <| 5%3 . should_equal 2 5%3 . should_be_a Integer @@ -318,7 +316,7 @@ spec = hundred_factorial%hundred_factorial . should_equal 0 10%hundred_factorial . should_equal 10 - Test.specify "should support less than operator" <| + group_builder.specify "should support less than operator" <| (1 < 2).should_be_true (1 < 1).should_be_false (1 < 0).should_be_false @@ -342,7 +340,7 @@ spec = (very_negative < hundred_factorial).should_be_true (very_negative < Nothing).should_fail_with Incomparable_Values - Test.specify "should support less than or equal to operator" <| + group_builder.specify "should support less than or equal to operator" <| (1 <= 2).should_be_true (1 <= 1).should_be_true (1 <= 0).should_be_false @@ -366,7 +364,7 @@ spec = (very_negative <= hundred_factorial).should_be_true (very_negative <= Nothing).should_fail_with Incomparable_Values - Test.specify "should support greater than operator" <| + group_builder.specify "should support greater than operator" <| (1 > 2).should_be_false (1 > 1).should_be_false (1 > 0).should_be_true @@ -390,7 +388,7 @@ spec = (very_negative > hundred_factorial).should_be_false (very_negative > Nothing).should_fail_with Incomparable_Values - Test.specify "should support greater than or equal to operator" <| + group_builder.specify "should support greater than or equal to operator" <| (1 >= 2).should_be_false (1 >= 1).should_be_true (1 >= 0).should_be_true @@ -414,7 +412,7 @@ spec = (very_negative >= hundred_factorial).should_be_false (very_negative >= Nothing).should_fail_with Incomparable_Values - Test.specify "should be ordered by the default comparator" <| + group_builder.specify "should be ordered by the default comparator" <| Ordering.compare 1 2 . should_equal Ordering.Less Ordering.compare 1 1 . should_equal Ordering.Equal Ordering.compare 1 0 . should_equal Ordering.Greater @@ -438,7 +436,7 @@ spec = Ordering.compare very_negative hundred_factorial . should_equal Ordering.Less Ordering.compare very_negative Nothing . should_fail_with Incomparable_Values - Test.specify "should expose exponentiation operations" <| + group_builder.specify "should expose exponentiation operations" <| (3.14 ^ 2.71).should_equal 22.216689546 epsilon=eps (3.14 ^ 14).should_equal 9057640.36635 epsilon=eps a = almost_max_long @@ -447,26 +445,26 @@ spec = (2 ^ 0.5).should_equal 1.41421356237 epsilon=eps (a^2)^0.5 . should_equal a epsilon=eps - Test.specify "should expose more involved mathematical functions" <| + group_builder.specify "should expose more involved mathematical functions" <| Math.pi.sin.should_equal 0 epsilon=eps (Math.pi / 4).sin.should_equal (2 ^ 0.5)/2 epsilon=eps (Math.pi / 6).cos.should_equal (3.sqrt / 2) epsilon=eps (17 ^ 0.13).log base=17 . should_equal 0.13 epsilon=eps 0.exp.should_equal 1 - Test.specify "should allow calculating the floor value" <| + group_builder.specify "should allow calculating the floor value" <| 1.2314.floor . should_equal 1 1.floor . should_equal 1 almost_max_long_times_three_decimal.floor.to_float . should_equal almost_max_long_times_three.to_float almost_max_long_times_three.floor . should_equal almost_max_long_times_three - Test.specify "should allow calculating the ceil value" <| + group_builder.specify "should allow calculating the ceil value" <| 1.2314.ceil . should_equal 2 1.ceil . should_equal 1 almost_max_long_times_three_decimal.ceil.to_float . should_equal almost_max_long_times_three_plus_1.to_float almost_max_long_times_three_plus_1.ceil . should_equal almost_max_long_times_three_plus_1 - Test.specify "should expose a NaN value" <| + group_builder.specify "should expose a NaN value" <| Number.nan.is_nan . should_be_true 0.is_nan . should_be_false Number.positive_infinity.is_nan . should_be_false @@ -481,7 +479,7 @@ spec = Number.nan==0 . should_be_false Number.nan!=Number.nan . should_be_true - Test.specify "should support inexact equality comparisons" <| + group_builder.specify "should support inexact equality comparisons" <| 1.0001 . equals 1.0002 epsilon=0.01 . should_be_true 1.0001 . equals 1.0002 epsilon=0.0000001 . should_be_false @@ -499,14 +497,14 @@ spec = Number.nan . equals Number.nan . should_fail_with Incomparable_Values Number.nan . equals 0 . should_fail_with Incomparable_Values - Test.group "rounding" <| - Test.specify "Returns the correct type" <| + suite_builder.group "rounding" group_builder-> + group_builder.specify "Returns the correct type" <| 231 . round 1 . should_be_a Integer 231 . round 0 . should_be_a Integer 231 . round . should_be_a Integer 231 . round -1 . should_be_a Integer - Test.specify "Input out of range" <| + group_builder.specify "Input out of range" <| 100000000000000 . round -2 . should_fail_with Illegal_Argument -100000000000000 . round -2 . should_fail_with Illegal_Argument 100000000000000 . round . should_fail_with Illegal_Argument @@ -518,17 +516,17 @@ spec = 99999999999999 . round -2 . should_equal 100000000000000 -99999999999999 . round -2 . should_equal -100000000000000 - Test.specify "Reject bigints before reaching the Java" <| + group_builder.specify "Reject bigints before reaching the Java" <| 922337203685477580700000 . round . should_fail_with Illegal_Argument -922337203685477580700000 . round . should_fail_with Illegal_Argument - Test.specify "Can handle small numbers computed from bigints" <| + group_builder.specify "Can handle small numbers computed from bigints" <| (922337203685477580712345 - 922337203685477580700000) . round . should_equal 12345 ((99999999999998 * 1000).div 1000) . round . should_equal 99999999999998 - Test.group "Float.truncate" + suite_builder.group "Float.truncate" group_builder-> - Test.specify "Correctly converts to Integer" <| + group_builder.specify "Correctly converts to Integer" <| 0.1.truncate . should_equal 0 0.9.truncate . should_equal 0 3.1.truncate . should_equal 3 @@ -538,24 +536,24 @@ spec = -3.1.truncate . should_equal -3 -3.9.truncate . should_equal -3 - Test.group "Integer.truncate" + suite_builder.group "Integer.truncate" group_builder-> - Test.specify "Returns its argument" <| + group_builder.specify "Returns its argument" <| 0.truncate . should_equal 0 3.truncate . should_equal 3 -3.truncate . should_equal -3 - Test.group "Number Conversions" <| - Test.specify "Returns its argument" <| + suite_builder.group "Number Conversions" group_builder-> + group_builder.specify "Returns its argument" <| v1 = (Complex.new 1 2) + (Complex.new 3) v2 = (Complex.new 1 2) + 3 v1 . should_equal v2 - Test.group "BigInts" <| + suite_builder.group "BigInts" group_builder-> expected_value = 2002115494039257055317447151023464523838443110452722331466645440244415760562579268801894716412 bigint_spec name create mul = - Test.specify name+" 6 * 7" <| + group_builder.specify name+" 6 * 7" <| six = create 6 seven = create 7 both = six*seven @@ -565,7 +563,7 @@ spec = seven.is_a Integer . should_be_true both.is_a Integer . should_be_true - Test.specify name+" power of 3 with running_fold" <| + group_builder.specify name+" power of 3 with running_fold" <| three = create 3 two_hundred = 1.up_to 200 . running_fold three a->_-> @@ -575,7 +573,7 @@ spec = sum.is_a Integer . should_be_true sum.div two_hundred.length . should_equal expected_value - Test.specify name+" power of 3 with map" <| + group_builder.specify name+" power of 3 with map" <| three = create 3 two_hundred = 1.up_to 200 . map i-> @@ -588,7 +586,7 @@ spec = v -> Test.fail "Expecting Integer, but got: "+(Meta.type_of v).to_text sum.div two_hundred.length . should_equal expected_value - Test.specify name+" BigInteger to BigDecimal test" <| + group_builder.specify name+" BigInteger to BigDecimal test" <| h = 2^70 bd1 = BigDecimal.new h 0 bd2 = BigDecimal.new h @@ -606,7 +604,7 @@ spec = bigint_spec "Java" to_java_bigint java_bigint_mul bigint_spec "JavaScript" to_js_bigint js_bigint_mul - Test.specify "Matrix of values" <| + group_builder.specify "Matrix of values" <| x = to_java_bigint 10 y = 10 z = 10.0 @@ -616,7 +614,10 @@ spec = [x, y, z, w].each b-> a+b . should_equal 20 -main = Test_Suite.run_main spec +main = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter foreign js to_js_bigint n = """ return BigInt(n) diff --git a/test/Base_Tests/src/Main.enso b/test/Base_Tests/src/Main.enso index b96dc38177d8..4c8ac0d14ef3 100644 --- a/test/Base_Tests/src/Main.enso +++ b/test/Base_Tests/src/Main.enso @@ -89,80 +89,87 @@ import project.System.Temporary_File_Spec import project.Random_Spec -main = Test_Suite.run_main <| - Any_Spec.spec - Array_Spec.spec - Array_Proxy_Spec.spec - Array_Polyglot_Spec.spec - Bool_Spec.spec - Base_64_Spec.spec - Function_Spec.spec - Case_Spec.spec - Conversion_Spec.spec - Deep_Export_Spec.spec - Error_Spec.spec - Environment_Spec.spec - File_Spec.spec - Temporary_File_Spec.spec - File_Read_Spec.spec - Reporting_Stream_Decoder_Spec.spec - Reporting_Stream_Encoder_Spec.spec - Http_Header_Spec.spec - Http_Request_Spec.spec - Http_Spec.spec - Http_Auto_Parse_Spec.spec - Enso_Cloud_Main.spec - Import_Loop_Spec.spec - Interval_Spec.spec - Java_Interop_Spec.spec - Js_Interop_Spec.spec - Json_Spec.spec - List_Spec.spec - Locale_Spec.spec - Map_Spec.spec - Set_Spec.spec - Maybe_Spec.spec - Meta_Spec.spec - Instrumentor_Spec.spec - Meta_Location_Spec.spec - Names_Spec.spec - Equals_Spec.spec - Numbers_Spec.spec - Ordering_Spec.spec - Comparator_Spec.spec - Natural_Order_Spec.spec - Vector_Lexicographic_Order_Spec.spec - Process_Spec.spec - Python_Interop_Spec.spec - R_Interop_Spec.spec - Pair_Spec.spec - Parse_Spec.spec - Problems_Spec.spec - Range_Spec.spec - Ref_Spec.spec - Regex_Spec.spec - Ascribed_Parameters_Spec.spec - Asserts_Spec.spec - Lazy_Spec.spec - Runtime_Spec.spec - Self_Type_Spec.spec - Span_Spec.spec - State_Spec.spec - Encoding_Spec.spec - Text_Sub_Range_Spec.spec - Managed_Resource_Spec.spec - Missing_Required_Arguments_Spec.spec - Lazy_Generator_Spec.spec - Stack_Traces_Spec.spec - Utils_Spec.spec - Text_Spec.spec - Time_Spec.spec - URI_Spec.spec - Vector_Spec.spec - Slicing_Helpers_Spec.spec - Statistics_Spec.spec - Regression_Spec.spec - Warnings_Spec.spec - System_Spec.spec - Random_Spec.spec - XML_Spec.spec +main = + Test_Suite.run_main <| + Any_Spec.spec + Array_Spec.spec + Array_Proxy_Spec.spec + Array_Polyglot_Spec.spec + Bool_Spec.spec + Base_64_Spec.spec + Function_Spec.spec + Case_Spec.spec + Conversion_Spec.spec + Deep_Export_Spec.spec + Error_Spec.spec + Environment_Spec.spec + File_Spec.spec + Temporary_File_Spec.spec + File_Read_Spec.spec + Reporting_Stream_Decoder_Spec.spec + Reporting_Stream_Encoder_Spec.spec + Http_Header_Spec.spec + Http_Request_Spec.spec + Http_Spec.spec + Http_Auto_Parse_Spec.spec + Enso_Cloud_Main.spec + Import_Loop_Spec.spec + Interval_Spec.spec + Java_Interop_Spec.spec + Js_Interop_Spec.spec + Json_Spec.spec + List_Spec.spec + Locale_Spec.spec + Map_Spec.spec + Set_Spec.spec + Maybe_Spec.spec + Meta_Spec.spec + Instrumentor_Spec.spec + Meta_Location_Spec.spec + Names_Spec.spec + Equals_Spec.spec + Ordering_Spec.spec + Comparator_Spec.spec + Natural_Order_Spec.spec + Vector_Lexicographic_Order_Spec.spec + Process_Spec.spec + Python_Interop_Spec.spec + R_Interop_Spec.spec + Pair_Spec.spec + Parse_Spec.spec + Problems_Spec.spec + Range_Spec.spec + Ref_Spec.spec + Regex_Spec.spec + Ascribed_Parameters_Spec.spec + Asserts_Spec.spec + Lazy_Spec.spec + Runtime_Spec.spec + Self_Type_Spec.spec + Span_Spec.spec + State_Spec.spec + Encoding_Spec.spec + Text_Sub_Range_Spec.spec + Managed_Resource_Spec.spec + Missing_Required_Arguments_Spec.spec + Lazy_Generator_Spec.spec + Stack_Traces_Spec.spec + Utils_Spec.spec + Text_Spec.spec + Time_Spec.spec + URI_Spec.spec + Vector_Spec.spec + Slicing_Helpers_Spec.spec + Statistics_Spec.spec + Regression_Spec.spec + Warnings_Spec.spec + System_Spec.spec + Random_Spec.spec + XML_Spec.spec + + # Round_Spec needs to be migrated to Test_New, as it is used also from Table_Tests, that + # are entirely migrated to Test_New. This is just a temporary workaround, until the migration + # is complete. + suite = Test_New.build suite_builder-> + Numbers_Spec.add_specs suite_builder + suite.run_with_filter From 774ccbf2c57a20e037703d02d0e5ec3f0d6cdaaf Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 25 Jan 2024 14:01:11 +0100 Subject: [PATCH 89/93] In_Memory/Main includes specs from In_Memory/Common_Spec --- test/Table_Tests/src/In_Memory/Main.enso | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/Table_Tests/src/In_Memory/Main.enso b/test/Table_Tests/src/In_Memory/Main.enso index ef24fc1046ad..5da25c55f889 100644 --- a/test/Table_Tests/src/In_Memory/Main.enso +++ b/test/Table_Tests/src/In_Memory/Main.enso @@ -6,6 +6,7 @@ import project.In_Memory.Aggregate_Column_Spec import project.In_Memory.Builders_Spec import project.In_Memory.Column_Spec import project.In_Memory.Column_Format_Spec +import project.In_Memory.Common_Spec import project.In_Memory.Integer_Overflow_Spec import project.In_Memory.Lossy_Conversions_Spec import project.In_Memory.Parse_To_Table_Spec @@ -19,6 +20,7 @@ add_specs suite_builder = Table_Spec.add_specs suite_builder Column_Spec.add_specs suite_builder Column_Format_Spec.add_specs suite_builder + Common_Spec.add_specs suite_builder Integer_Overflow_Spec.add_specs suite_builder Lossy_Conversions_Spec.add_specs suite_builder Table_Date_Spec.add_specs suite_builder From 9b2b4e7ae9bcef332cba024d9ae8b83e7832ba99 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 25 Jan 2024 16:58:47 +0100 Subject: [PATCH 90/93] Run both old and new tests in Base_Tests/Main.enso --- test/Base_Tests/src/Main.enso | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/test/Base_Tests/src/Main.enso b/test/Base_Tests/src/Main.enso index 4c8ac0d14ef3..830c303cca82 100644 --- a/test/Base_Tests/src/Main.enso +++ b/test/Base_Tests/src/Main.enso @@ -2,6 +2,7 @@ from Standard.Base import all from Standard.Test import Test_Suite import Standard.Test.Extensions +import Standard.Test_New import project.Semantic.Any_Spec import project.Semantic.Case_Spec @@ -90,6 +91,14 @@ import project.System.Temporary_File_Spec import project.Random_Spec main = + # Round_Spec needs to be migrated to Test_New, as it is used also from Table_Tests, that + # are entirely migrated to Test_New. This is just a temporary workaround, until the migration + # is complete. + suite = Test_New.Test.build suite_builder-> + Numbers_Spec.add_specs suite_builder + succeeded = suite.run_with_filter should_exit=False + if succeeded.not then System.exit 1 + Test_Suite.run_main <| Any_Spec.spec Array_Spec.spec @@ -167,9 +176,3 @@ main = Random_Spec.spec XML_Spec.spec - # Round_Spec needs to be migrated to Test_New, as it is used also from Table_Tests, that - # are entirely migrated to Test_New. This is just a temporary workaround, until the migration - # is complete. - suite = Test_New.build suite_builder-> - Numbers_Spec.add_specs suite_builder - suite.run_with_filter From 305f02428a272720f17e167fddd6ee690f7a6510 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 25 Jan 2024 16:59:03 +0100 Subject: [PATCH 91/93] Add should_exit parameter to Suite.run_with_filter --- .../Standard/Test_New/0.0.0-dev/src/Suite.enso | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso index ba08a41caeec..b2cabf485ca3 100644 --- a/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso +++ b/distribution/lib/Standard/Test_New/0.0.0-dev/src/Suite.enso @@ -54,8 +54,9 @@ type Suite Arguments: - group_filter: Filter for group names. - spec_filter: Filter for spec names. + - should_exit: If true, executes `System.exit` at the end. run_with_filter : (Regex | Text | Nothing) -> (Regex | Text | Nothing) -> Nothing - run_with_filter self group_filter=Nothing spec_filter=Nothing = + run_with_filter self group_filter=Nothing spec_filter=Nothing should_exit=True = config = Suite_Config.from_environment filtered_groups = self.groups.filter group-> group_name_matches = case group_filter of @@ -74,11 +75,15 @@ type Suite succ_tests = all_results.filter (_.is_success) . length failed_tests = all_results.filter (_.is_fail) . length skipped_tests = all_results.filter (_.is_pending) . length - IO.println <| succ_tests.to_text + " tests succeeded." - IO.println <| failed_tests.to_text + " tests failed." - IO.println <| skipped_tests.to_text + " tests skipped." - exit_code = if failed_tests > 0 then 1 else 0 - System.exit exit_code + case should_exit of + True -> + IO.println <| succ_tests.to_text + " tests succeeded." + IO.println <| failed_tests.to_text + " tests failed." + IO.println <| skipped_tests.to_text + " tests skipped." + exit_code = if failed_tests > 0 then 1 else 0 + System.exit exit_code + False -> + failed_tests == 0 group_names self = self.groups.map (_.name) From 6496a8613d9b0cb15ebf8ab67f071b77899767d9 Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Thu, 25 Jan 2024 16:59:12 +0100 Subject: [PATCH 92/93] Fix some typos --- test/Table_Tests/src/In_Memory/Table_Date_Spec.enso | 2 +- test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso index 5c8c07db9577..bab520709c74 100644 --- a/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso @@ -54,7 +54,7 @@ add_specs suite_builder = data.table.at 'To' . should_equal (Column.from_vector data.c_to.first data.c_to.second) data.table.should_equal data.expected - suite_builder.group "Should be able to serialise a data.table with Dates to Text" group_builder-> + suite_builder.group "Should be able to serialise a table with Dates to Text" group_builder-> group_builder.specify "should serialise back to input" <| expected_text = normalize_lines <| (enso_project.data / "prime_ministers.csv").read_text diff --git a/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso index b96dd46a7c74..c679a4ce0cbc 100644 --- a/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso @@ -48,7 +48,7 @@ add_specs suite_builder = data.table.at 'Posting time' . should_equal (Column.from_vector data.c_time.first data.c_time.second) data.table.should_equal data.expected - suite_builder.group "Should be able to serialise a data.table with Time_Of_Days to Text" group_builder-> + suite_builder.group "Should be able to serialise a table with Time_Of_Days to Text" group_builder-> group_builder.specify "should serialise back to input" <| expected_text = normalize_lines <| (enso_project.data / "time_of_day_sample_normalized_hours.csv").read_text From 31a340a82aa8d9ef60c18deeaace15942c01b9ed Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 26 Jan 2024 11:41:53 +0100 Subject: [PATCH 93/93] fmt --- test/Table_Tests/src/Database/README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/test/Table_Tests/src/Database/README.md b/test/Table_Tests/src/Database/README.md index 55c9fab8341d..822e321797ca 100644 --- a/test/Table_Tests/src/Database/README.md +++ b/test/Table_Tests/src/Database/README.md @@ -30,8 +30,10 @@ the test suite where to connect. If that variable is not set, the test suite will attempt to connect to a local server. ### Setup via Docker -The following commands will download the latest Postgres Docker image and run -it with the default configuration with password "pwd": + +The following commands will download the latest Postgres Docker image and run it +with the default configuration with password "pwd": + ```sh docker pull postgres:latest docker run -it -e POSTGRES_PASSWORD=pwd -p 5432:5432 postgres