changeset 1232:a8fa8c1137cc refactor/grids

Merge refactor/LazyTensors/tuple_manipulation
author Jonatan Werpers <jonatan@werpers.com>
date Sun, 19 Feb 2023 22:07:57 +0100
parents 5f677cd6f0b6 (current diff) de6a9635f293 (diff)
children 3924c1f6ec6d
files Notes.md
diffstat 5 files changed, 88 insertions(+), 91 deletions(-) [+]
line wrap: on
line diff
--- a/Notes.md	Sat Feb 18 11:37:35 2023 +0100
+++ b/Notes.md	Sun Feb 19 22:07:57 2023 +0100
@@ -388,3 +388,9 @@
 
 ## Name of the `VolumeOperator` type for constant stencils
 It seems that the name is too general. The name of the method `volume_operator` makes sense. It should return different types of `LazyTensor` specialized for the grid. A suggetion for a better name is `ConstantStencilVolumeOperator`
+
+
+## Implementation of LazyOuterProduct
+Could the implementation of LazyOuterProduct be simplified by making it a
+struct containing two or more LazyTensors? (using split_tuple in a similar way
+as TensorGrid)
--- a/TODO.md	Sat Feb 18 11:37:35 2023 +0100
+++ b/TODO.md	Sun Feb 19 22:07:57 2023 +0100
@@ -5,7 +5,6 @@
  - [ ] Ändra namn på variabler och funktioner så att det följer style-guide
  - [ ] Add new Laplace operator to DiffOps, probably named WaveEqOp(?!!?)
  - [ ] Create a struct that bundles the necessary Tensor operators for solving the wave equation.
- - [ ] Replace getindex hack for flattening tuples with flatten_tuple. (eg. `getindex.(range_size.(L.D2),1)`)
  - [ ] Use `@inferred` in a lot of tests.
  - [ ] Replace `@inferred` tests with a benchmark suite that automatically tests for regressions.
  - [ ] Make sure we are setting tolerances in tests in a consistent way
--- a/src/LazyTensors/lazy_tensor_operations.jl	Sat Feb 18 11:37:35 2023 +0100
+++ b/src/LazyTensors/lazy_tensor_operations.jl	Sun Feb 19 22:07:57 2023 +0100
@@ -176,7 +176,7 @@
 # TODO: Implement some pretty printing in terms of ⊗. E.g InflatedTensor(I(3),B,I(2)) -> I(3)⊗B⊗I(2)
 
 function range_size(itm::InflatedTensor)
-    return flatten_tuple(
+    return concatenate_tuples(
         range_size(itm.before),
         range_size(itm.tm),
         range_size(itm.after),
@@ -184,7 +184,7 @@
 end
 
 function domain_size(itm::InflatedTensor)
-    return flatten_tuple(
+    return concatenate_tuples(
         domain_size(itm.before),
         domain_size(itm.tm),
         domain_size(itm.after),
@@ -197,7 +197,7 @@
     dim_range = range_dim(itm.tm)
     dim_after = range_dim(itm.after)
 
-    view_index, inner_index = split_index(Val(dim_before), Val(dim_domain), Val(dim_range), Val(dim_after), I...)
+    view_index, inner_index = split_index(dim_before, dim_domain, dim_range, dim_after, I...)
 
     v_inner = view(v, view_index...)
     return apply(itm.tm, v_inner, inner_index...)
@@ -209,7 +209,7 @@
     dim_range = range_dim(itm.tm)
     dim_after = range_dim(itm.after)
 
-    view_index, inner_index = split_index(Val(dim_before), Val(dim_range), Val(dim_domain), Val(dim_after), I...)
+    view_index, inner_index = split_index(dim_before, dim_range, dim_domain, dim_after, I...)
 
     v_inner = view(v, view_index...)
     return apply_transpose(itm.tm, v_inner, inner_index...)
--- a/src/LazyTensors/tuple_manipulation.jl	Sat Feb 18 11:37:35 2023 +0100
+++ b/src/LazyTensors/tuple_manipulation.jl	Sun Feb 19 22:07:57 2023 +0100
@@ -1,11 +1,12 @@
 """
-    split_index(::Val{dim_before}, ::Val{dim_view}, ::Val{dim_index}, ::Val{dim_after}, I...)
+    split_index(dim_before, dim_view, dim_index, dim_after, I...)
 
 Splits the multi-index `I` into two parts. One part which is expected to be
 used as a view, and one which is expected to be used as an index.
 Eg.
-```
-split_index(Val(1),Val(3),Val(2),Val(1),(1,2,3,4)) -> (1,:,:,:,4), (2,3)
+```julia-repl
+julia> LazyTensors.split_index(1, 3, 2, 1, (1,2,3,4)...)
+((1, Colon(), Colon(), Colon(), 4), (2, 3))
 ```
 
 `dim_view` controls how many colons are in the view, and `dim_index` controls
@@ -18,62 +19,52 @@
  * `length(view_index) == dim_before + dim_view + dim_after`
  * `length(I_middle) == dim_index`
 """
-function split_index(::Val{dim_before}, ::Val{dim_view}, ::Val{dim_index}, ::Val{dim_after}, I...) where {dim_before,dim_view, dim_index,dim_after}
-    I_before, I_middle, I_after = split_tuple(I, Val(dim_before), Val(dim_index))
+function split_index(dim_before, dim_view, dim_index, dim_after, I...)
+    @inline
+    I_before, I_middle, I_after = split_tuple(I, (dim_before, dim_index, dim_after))
 
     view_index = (I_before..., ntuple((i)->:, dim_view)..., I_after...)
 
     return view_index, I_middle
 end
 
-# TODO: Can this be replaced by something more elegant while still being type stable? 2020-10-21
-# See:
-# https://github.com/JuliaLang/julia/issues/34884
-# https://github.com/JuliaLang/julia/issues/30386
-"""
-    slice_tuple(t, Val(l), Val(u))
-
-Get a slice of a tuple in a type stable way.
-Equivalent to `t[l:u]` but type stable.
-"""
-function slice_tuple(t,::Val{L},::Val{U}) where {L,U}
-    return ntuple(i->t[i+L-1], U-L+1)
-end
 
 """
-    split_tuple(t::Tuple{...}, ::Val{M}) where {N,M}
+    split_tuple(t, szs)
+
+Split the tuple `t` into a set of tuples of the sizes given in `szs`.
+`sum(szs)` should equal `lenght(t)`.
 
-Split the tuple `t` into two parts. the first part is `M` long.
 E.g
-```julia
-split_tuple((1,2,3,4),Val(3)) -> (1,2,3), (4,)
+```julia-repl
+julia> LazyTensors.split_tuple((1,2,3,4,5,6), (3,1,2))
+((1, 2, 3), (4,), (5, 6))
 ```
 """
-function split_tuple(t::NTuple{N,Any},::Val{M}) where {N,M}
-    return slice_tuple(t,Val(1), Val(M)), slice_tuple(t,Val(M+1), Val(N))
+function split_tuple(t, szs)
+    @inline
+    if length(t) != sum(szs; init=0)
+        throw(ArgumentError("length(t) must equal sum(szs)"))
+    end
+
+    rs = sizes_to_ranges(szs)
+    return map(r->t[r], rs)
 end
 
-"""
-    split_tuple(t::Tuple{...},::Val{M},::Val{K}) where {N,M,K}
-
-Same as `split_tuple(t::NTuple{N},::Val{M})` but splits the tuple in three parts. With the first
-two parts having lenght `M` and `K`.
-"""
-function split_tuple(t::NTuple{N,Any},::Val{M},::Val{K}) where {N,M,K}
-    p1, tail = split_tuple(t, Val(M))
-    p2, p3 = split_tuple(tail, Val(K))
-    return p1,p2,p3
+function sizes_to_ranges(szs)
+    cum_szs = cumsum((0, szs...))
+    return ntuple(i->cum_szs[i]+1:cum_szs[i+1], length(szs))
 end
 
 
 """
-    flatten_tuple(t)
+    concatenate_tuples(t...)
 
-Takes a nested tuple and flattens the whole structure
+Concatenate tuples.
 """
-flatten_tuple(t::NTuple{N, Number} where N) = t
-flatten_tuple(t::Tuple) = ((flatten_tuple.(t)...)...,) # simplify?
-flatten_tuple(ts::Vararg) = flatten_tuple(ts)
+concatenate_tuples(t::Tuple,ts::Vararg{Tuple}) = (t..., concatenate_tuples(ts...)...)
+concatenate_tuples(t::Tuple) = t
+
 
 """
     left_pad_tuple(t, val, N)
--- a/test/LazyTensors/tuple_manipulation_test.jl	Sat Feb 18 11:37:35 2023 +0100
+++ b/test/LazyTensors/tuple_manipulation_test.jl	Sun Feb 19 22:07:57 2023 +0100
@@ -2,63 +2,64 @@
 using Sbplib.LazyTensors
 
 @testset "split_index" begin
-    @test LazyTensors.split_index(Val(2),Val(1),Val(2),Val(2),1,2,3,4,5,6) == ((1,2,:,5,6),(3,4))
-    @test LazyTensors.split_index(Val(2),Val(3),Val(2),Val(2),1,2,3,4,5,6) == ((1,2,:,:,:,5,6),(3,4))
-    @test LazyTensors.split_index(Val(3),Val(1),Val(1),Val(2),1,2,3,4,5,6) == ((1,2,3,:,5,6),(4,))
-    @test LazyTensors.split_index(Val(3),Val(2),Val(1),Val(2),1,2,3,4,5,6) == ((1,2,3,:,:,5,6),(4,))
-    @test LazyTensors.split_index(Val(1),Val(1),Val(2),Val(3),1,2,3,4,5,6) == ((1,:,4,5,6),(2,3))
-    @test LazyTensors.split_index(Val(1),Val(2),Val(2),Val(3),1,2,3,4,5,6) == ((1,:,:,4,5,6),(2,3))
+    @test LazyTensors.split_index(2,1,2,2, 1,2,3,4,5,6) == ((1,2,:,5,6),(3,4))
+    @test LazyTensors.split_index(2,3,2,2, 1,2,3,4,5,6) == ((1,2,:,:,:,5,6),(3,4))
+    @test LazyTensors.split_index(3,1,1,2, 1,2,3,4,5,6) == ((1,2,3,:,5,6),(4,))
+    @test LazyTensors.split_index(3,2,1,2, 1,2,3,4,5,6) == ((1,2,3,:,:,5,6),(4,))
+    @test LazyTensors.split_index(1,1,2,3, 1,2,3,4,5,6) == ((1,:,4,5,6),(2,3))
+    @test LazyTensors.split_index(1,2,2,3, 1,2,3,4,5,6) == ((1,:,:,4,5,6),(2,3))
 
-    @test LazyTensors.split_index(Val(0),Val(1),Val(3),Val(3),1,2,3,4,5,6) == ((:,4,5,6),(1,2,3))
-    @test LazyTensors.split_index(Val(3),Val(1),Val(3),Val(0),1,2,3,4,5,6) == ((1,2,3,:),(4,5,6))
-
-    @inferred LazyTensors.split_index(Val(2),Val(3),Val(2),Val(2),1,2,3,2,2,4)
-end
+    @test LazyTensors.split_index(0,1,3,3, 1,2,3,4,5,6) == ((:,4,5,6),(1,2,3))
+    @test LazyTensors.split_index(3,1,3,0, 1,2,3,4,5,6) == ((1,2,3,:),(4,5,6))
 
-@testset "slice_tuple" begin
-    @test LazyTensors.slice_tuple((1,2,3),Val(1), Val(3)) == (1,2,3)
-    @test LazyTensors.slice_tuple((1,2,3,4,5,6),Val(2), Val(5)) == (2,3,4,5)
-    @test LazyTensors.slice_tuple((1,2,3,4,5,6),Val(1), Val(3)) == (1,2,3)
-    @test LazyTensors.slice_tuple((1,2,3,4,5,6),Val(4), Val(6)) == (4,5,6)
+    split_index_static(::Val{dim_before}, ::Val{dim_view}, ::Val{dim_index}, ::Val{dim_after}, I...) where {dim_before,dim_view,dim_index,dim_after} = LazyTensors.split_index(dim_before, dim_view, dim_index, dim_after, I...)
+    @inferred split_index_static(Val(2),Val(3),Val(2),Val(2),1,2,3,2,2,4)
 end
 
 @testset "split_tuple" begin
-    @testset "2 parts" begin
-        @test LazyTensors.split_tuple((),Val(0)) == ((),())
-        @test LazyTensors.split_tuple((1,),Val(0)) == ((),(1,))
-        @test LazyTensors.split_tuple((1,),Val(1)) == ((1,),())
-
-        @test LazyTensors.split_tuple((1,2,3,4),Val(0)) == ((),(1,2,3,4))
-        @test LazyTensors.split_tuple((1,2,3,4),Val(1)) == ((1,),(2,3,4))
-        @test LazyTensors.split_tuple((1,2,3,4),Val(2)) == ((1,2),(3,4))
-        @test LazyTensors.split_tuple((1,2,3,4),Val(3)) == ((1,2,3),(4,))
-        @test LazyTensors.split_tuple((1,2,3,4),Val(4)) == ((1,2,3,4),())
-
-        @test LazyTensors.split_tuple((1,2,true,4),Val(3)) == ((1,2,true),(4,))
+    @testset "general" begin
+        @test LazyTensors.split_tuple((),()) == ()
+        @test LazyTensors.split_tuple((),(0,)) == ((),)
+        @test LazyTensors.split_tuple((1,), (1,)) == tuple((1,))
+        @test LazyTensors.split_tuple((1,2), (1,1)) == tuple((1,),(2,))
+        @test LazyTensors.split_tuple((1,2), (0,1,1)) == tuple((),(1,),(2,))
+        @test LazyTensors.split_tuple((1,2), (1,0,1)) == tuple((1,),(),(2,))
+        @test LazyTensors.split_tuple((1,2), (1,1,0)) == tuple((1,),(2,),())
+        @test LazyTensors.split_tuple((1,2,3,4), (2,0,1,1)) == tuple((1,2),(),(3,),(4,))
 
-        @inferred LazyTensors.split_tuple((1,2,3,4),Val(3))
-        @inferred LazyTensors.split_tuple((1,2,true,4),Val(3))
-    end
+        err_msg = "length(t) must equal sum(szs)"
+        @test_throws ArgumentError(err_msg) LazyTensors.split_tuple((), (2,))
+        @test_throws ArgumentError(err_msg) LazyTensors.split_tuple((2,), ())
+        @test_throws ArgumentError(err_msg) LazyTensors.split_tuple((1,), (2,))
+        @test_throws ArgumentError(err_msg) LazyTensors.split_tuple((1,2), (1,2))
+        @test_throws ArgumentError(err_msg) LazyTensors.split_tuple((1,2), (1))
 
-    @testset "3 parts" begin
-        @test LazyTensors.split_tuple((),Val(0),Val(0)) == ((),(),())
-        @test LazyTensors.split_tuple((1,2,3),Val(1), Val(1)) == ((1,),(2,),(3,))
-        @test LazyTensors.split_tuple((1,true,3),Val(1), Val(1)) == ((1,),(true,),(3,))
-
-        @test LazyTensors.split_tuple((1,2,3,4,5,6),Val(1),Val(2)) == ((1,),(2,3),(4,5,6))
-        @test LazyTensors.split_tuple((1,2,3,4,5,6),Val(3),Val(2)) == ((1,2,3),(4,5),(6,))
-
-        @inferred LazyTensors.split_tuple((1,2,3,4,5,6),Val(3),Val(2))
-        @inferred LazyTensors.split_tuple((1,true,3),Val(1), Val(1))
+        split_tuple_static(t, ::Val{SZS}) where {SZS} = LazyTensors.split_tuple(t,SZS)
+        @inferred split_tuple_static((1,2,3,4,5,6), Val((3,1,2)))
+        @inferred split_tuple_static((1,2,3,4),Val((3,1)))
+        @inferred split_tuple_static((1,2,true,4),Val((3,1)))
+        @inferred split_tuple_static((1,2,3,4,5,6),Val((3,2,1)))
+        @inferred split_tuple_static((1,true,3),Val((1,1,1)))
     end
 end
 
-@testset "flatten_tuple" begin
-    @test LazyTensors.flatten_tuple((1,)) == (1,)
-    @test LazyTensors.flatten_tuple((1,2,3,4,5,6)) == (1,2,3,4,5,6)
-    @test LazyTensors.flatten_tuple((1,2,(3,4),5,6)) == (1,2,3,4,5,6)
-    @test LazyTensors.flatten_tuple((1,2,(3,(4,5)),6)) == (1,2,3,4,5,6)
-    @test LazyTensors.flatten_tuple(((1,2),(3,4),(5,),6)) == (1,2,3,4,5,6)
+@testset "sizes_to_ranges" begin
+    @test LazyTensors.sizes_to_ranges((1,)) == (1:1,)
+    @test LazyTensors.sizes_to_ranges((2,)) == (1:2,)
+    @test LazyTensors.sizes_to_ranges((2,3)) == (1:2,3:5)
+    @test LazyTensors.sizes_to_ranges((3,2,4)) == (1:3,4:5,6:9)
+    @test LazyTensors.sizes_to_ranges((0,2)) == (1:0,1:2)
+    @test LazyTensors.sizes_to_ranges((2,0)) == (1:2,2:1)
+    @test LazyTensors.sizes_to_ranges((2,0,3)) == (1:2,2:1,3:5)
+end
+
+@testset "concatenate_tuples" begin
+    @test LazyTensors.concatenate_tuples(()) == ()
+    @test LazyTensors.concatenate_tuples((1,)) == (1,)
+    @test LazyTensors.concatenate_tuples((1,), ()) == (1,)
+    @test LazyTensors.concatenate_tuples((),(1,)) == (1,)
+    @test LazyTensors.concatenate_tuples((1,2,3),(4,5)) == (1,2,3,4,5)
+    @test LazyTensors.concatenate_tuples((1,2,3),(4,5),(6,7)) == (1,2,3,4,5,6,7)
 end
 
 @testset "left_pad_tuple" begin