changeset 274:11010bb74260 boundary_conditions

Dispatch getindex for TensorMappingApplication on region indices. Dispatch apply for TensorMappingBinaryOperation on region indices. Update tests. Update todo
author Vidar Stiernström <vidar.stiernstrom@it.uu.se>
date Mon, 06 Jan 2020 10:54:48 +0100
parents babc4288e6a6
children 591609cdcd9b
files LazyTensors/Manifest.toml LazyTensors/Project.toml LazyTensors/src/LazyTensors.jl LazyTensors/src/lazy_tensor_operations.jl LazyTensors/test/runtests.jl Manifest.toml RegionIndices/src/RegionIndices.jl TODO.txt
diffstat 8 files changed, 53 insertions(+), 33 deletions(-) [+]
line wrap: on
line diff
--- a/LazyTensors/Manifest.toml	Mon Jan 06 10:48:38 2020 +0100
+++ b/LazyTensors/Manifest.toml	Mon Jan 06 10:54:48 2020 +0100
@@ -1,2 +1,6 @@
 # This file is machine-generated - editing it directly is not advised
 
+[[RegionIndices]]
+path = "../RegionIndices"
+uuid = "5d527584-97f1-11e9-084c-4540c7ecf219"
+version = "0.1.0"
--- a/LazyTensors/Project.toml	Mon Jan 06 10:48:38 2020 +0100
+++ b/LazyTensors/Project.toml	Mon Jan 06 10:54:48 2020 +0100
@@ -3,6 +3,9 @@
 authors = ["Jonatan Werpers <jonatan.werpers@it.uu.se>"]
 version = "0.1.0"
 
+[deps]
+RegionIndices = "5d527584-97f1-11e9-084c-4540c7ecf219"
+
 [extras]
 Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
 
--- a/LazyTensors/src/LazyTensors.jl	Mon Jan 06 10:48:38 2020 +0100
+++ b/LazyTensors/src/LazyTensors.jl	Mon Jan 06 10:54:48 2020 +0100
@@ -1,5 +1,5 @@
 module LazyTensors
-
+using RegionIndices
 include("tensor_mapping.jl")
 include("lazy_array.jl")
 include("lazy_tensor_operations.jl")
--- a/LazyTensors/src/lazy_tensor_operations.jl	Mon Jan 06 10:48:38 2020 +0100
+++ b/LazyTensors/src/lazy_tensor_operations.jl	Mon Jan 06 10:54:48 2020 +0100
@@ -14,14 +14,14 @@
 export LazyTensorMappingApplication
 
 Base.:*(tm::TensorMapping{T,R,D}, o::AbstractArray{T,D}) where {T,R,D} = LazyTensorMappingApplication(tm,o)
-
-Base.getindex(ta::LazyTensorMappingApplication{T,R,D}, I::Vararg{Int,R}) where {T,R,D} = apply(ta.t, ta.o, I)
+Base.getindex(ta::LazyTensorMappingApplication{T,R,D}, I::Vararg{Index{<:Region},R}) where {T,R,D} = apply(ta.t, ta.o, I)
+Base.getindex(ta::LazyTensorMappingApplication{T,R,D}, I::Vararg{Int,R}) where {T,R,D} = apply(ta.t, ta.o, Index{Unknown}.(I))
 Base.size(ta::LazyTensorMappingApplication{T,R,D}) where {T,R,D} = range_size(ta.t,size(ta.o))
 # TODO: What else is needed to implement the AbstractArray interface?
 
 # # We need the associativity to be a→b→c = a→(b→c), which is the case for '→'
 Base.:*(a::TensorMapping{T,R,D}, b::TensorMapping{T,D,K}, args::Union{TensorMapping{T}, AbstractArray{T}}...) where {T,R,D,K} = foldr(*,(a,b,args...))
-# # Should we overload some other infix binary operator?
+# # Should we overload some other infix binary opesrator?
 # →(tm::TensorMapping{T,R,D}, o::AbstractArray{T,D}) where {T,R,D} = LazyTensorMappingApplication(tm,o)
 # TODO: We need to be really careful about good error messages.
 # For example what happens if you try to multiply LazyTensorMappingApplication with a TensorMapping(wrong order)?
@@ -41,11 +41,11 @@
 export LazyTensorMappingTranspose
 
 # # TBD: Should this be implemented on a type by type basis or through a trait to provide earlier errors?
-Base.adjoint(t::TensorMapping) = LazyTensorMappingTranspose(t)
-Base.adjoint(t::LazyTensorMappingTranspose) = t.tm
+Base.adjoint(tm::TensorMapping) = LazyTensorMappingTranspose(tm)
+Base.adjoint(tmt::LazyTensorMappingTranspose) = tmt.tm
 
-apply(tm::LazyTensorMappingTranspose{T,R,D}, v::AbstractArray{T,R}, I::NTuple{D,Int}) where {T,R,D} = apply_transpose(tm.tm, v, I)
-apply_transpose(tm::LazyTensorMappingTranspose{T,R,D}, v::AbstractArray{T,D}, I::NTuple{R,Int}) where {T,R,D} = apply(tm.tm, v, I)
+apply(tmt::LazyTensorMappingTranspose{T,R,D}, v::AbstractArray{T,R}, I::NTuple{D,Int}) where {T,R,D} = apply_transpose(tmt.tm, v, I)
+apply_transpose(tmt::LazyTensorMappingTranspose{T,R,D}, v::AbstractArray{T,D}, I::NTuple{R,Int}) where {T,R,D} = apply(tmt.tm, v, I)
 
 range_size(tmt::LazyTensorMappingTranspose{T,R,D}, d_size::NTuple{R,Integer}) where {T,R,D} = domain_size(tmt.tm, d_size)
 domain_size(tmt::LazyTensorMappingTranspose{T,R,D}, r_size::NTuple{D,Integer}) where {T,R,D} = range_size(tmt.tm, r_size)
@@ -54,22 +54,22 @@
 
 
 struct LazyTensorMappingBinaryOperation{Op,T,R,D,T1<:TensorMapping{T,R,D},T2<:TensorMapping{T,R,D}} <: TensorMapping{T,D,R}
-    A::T1
-    B::T2
+    tm1::T1
+    tm2::T2
 
-    @inline function LazyTensorMappingBinaryOperation{Op,T,R,D}(A::T1,B::T2) where {Op,T,R,D, T1<:TensorMapping{T,R,D},T2<:TensorMapping{T,R,D}}
-        return new{Op,T,R,D,T1,T2}(A,B)
+    @inline function LazyTensorMappingBinaryOperation{Op,T,R,D}(tm1::T1,tm2::T2) where {Op,T,R,D, T1<:TensorMapping{T,R,D},T2<:TensorMapping{T,R,D}}
+        return new{Op,T,R,D,T1,T2}(tm1,tm2)
     end
 end
 
-apply(mb::LazyTensorMappingBinaryOperation{:+,T,R,D}, v::AbstractArray{T,D}, I::NTuple{R,Int}) where {T,R,D} = apply(mb.A, v, I...) + apply(mb.B,v,I...)
-apply(mb::LazyTensorMappingBinaryOperation{:-,T,R,D}, v::AbstractArray{T,D}, I::NTuple{R,Int}) where {T,R,D} = apply(mb.A, v, I...) - apply(mb.B,v,I...)
+apply(tmBinOp::LazyTensorMappingBinaryOperation{:+,T,R,D}, v::AbstractArray{T,D}, I::NTuple{R,Index{<:Region}}) where {T,R,D} = apply(tmBinOp.tm1, v, I) + apply(tmBinOp.tm2, v, I)
+apply(tmBinOp::LazyTensorMappingBinaryOperation{:-,T,R,D}, v::AbstractArray{T,D}, I::NTuple{R,Index{<:Region}}) where {T,R,D} = apply(tmBinOp.tm1, v, I) - apply(tmBinOp.tm2, v, I)
 
-range_size(mp::LazyTensorMappingBinaryOperation{Op,T,R,D}, domain_size::NTuple{D,Integer}) where {Op,T,R,D} = range_size(mp.A, domain_size)
-domain_size(mp::LazyTensorMappingBinaryOperation{Op,T,R,D}, range_size::NTuple{R,Integer}) where {Op,T,R,D} = domain_size(mp.A, range_size)
+range_size(tmBinOp::LazyTensorMappingBinaryOperation{Op,T,R,D}, domain_size::NTuple{D,Integer}) where {Op,T,R,D} = range_size(tmBinOp.tm1, domain_size)
+domain_size(tmBinOp::LazyTensorMappingBinaryOperation{Op,T,R,D}, range_size::NTuple{R,Integer}) where {Op,T,R,D} = domain_size(tmBinOp.tm2, range_size)
 
-Base.:+(A::TensorMapping{T,R,D}, B::TensorMapping{T,R,D}) where {T,R,D} = LazyTensorMappingBinaryOperation{:+,T,R,D}(A,B)
-Base.:-(A::TensorMapping{T,R,D}, B::TensorMapping{T,R,D}) where {T,R,D} = LazyTensorMappingBinaryOperation{:-,T,R,D}(A,B)
+Base.:+(tm1::TensorMapping{T,R,D}, tm2::TensorMapping{T,R,D}) where {T,R,D} = LazyTensorMappingBinaryOperation{:+,T,R,D}(tm1,tm2)
+Base.:-(tm1::TensorMapping{T,R,D}, tm2::TensorMapping{T,R,D}) where {T,R,D} = LazyTensorMappingBinaryOperation{:-,T,R,D}(tm1,tm2)
 
 
 # TODO: Write tests and documentation for LazyTensorMappingComposition
--- a/LazyTensors/test/runtests.jl	Mon Jan 06 10:48:38 2020 +0100
+++ b/LazyTensors/test/runtests.jl	Mon Jan 06 10:54:48 2020 +0100
@@ -1,12 +1,13 @@
 using Test
 using LazyTensors
+using RegionIndices
 
 @testset "Generic Mapping methods" begin
     struct DummyMapping{T,R,D} <: TensorMapping{T,R,D} end
-    LazyTensors.apply(m::DummyMapping{T,R,D}, v, i::NTuple{R,Int}) where {T,R,D} = :apply
+    LazyTensors.apply(m::DummyMapping{T,R,D}, v, i::NTuple{R,Index{<:Region}}) where {T,R,D} = :apply
     @test range_dim(DummyMapping{Int,2,3}()) == 2
     @test domain_dim(DummyMapping{Int,2,3}()) == 3
-    @test apply(DummyMapping{Int,2,3}(), zeros(Int, (0,0,0)),(0,0)) == :apply
+    @test apply(DummyMapping{Int,2,3}(), zeros(Int, (0,0,0)),(Index{Unknown}(0),Index{Unknown}(0))) == :apply
 end
 
 @testset "Generic Operator methods" begin
@@ -38,7 +39,7 @@
 @testset "TensorApplication" begin
     struct DummyMapping{T,R,D} <: TensorMapping{T,R,D} end
 
-    LazyTensors.apply(m::DummyMapping{T,R,D}, v, i::NTuple{R,Int}) where {T,R,D} = (:apply,v,i)
+    LazyTensors.apply(m::DummyMapping{T,R,D}, v, i::NTuple{R,Index{<:Region}}) where {T,R,D} = (:apply,v,i)
     LazyTensors.apply_transpose(m::DummyMapping{T,R,D}, v, i::NTuple{D,Int}) where {T,R,D} = :apply_transpose
 
     LazyTensors.range_size(m::DummyMapping{T,R,D}, domain_size::NTuple{D,Integer}) where {T,R,D} = 2 .* domain_size
@@ -49,11 +50,15 @@
     v = [0,1,2]
     @test m*v isa AbstractVector{Int}
     @test size(m*v) == 2 .*size(v)
-    @test (m*v)[0] == (:apply,v,(0,))
+    @test (m*v)[Index{Upper}(0)] == (:apply,v,(Index{Upper}(0),))
+    @test (m*v)[0] == (:apply,v,(Index{Unknown}(0),))
     @test m*m*v isa AbstractVector{Int}
-    @test (m*m*v)[1] == (:apply,m*v,(1,))
-    @test (m*m*v)[3] == (:apply,m*v,(3,))
-    @test (m*m*v)[6] == (:apply,m*v,(6,))
+    @test (m*m*v)[Index{Upper}(1)] == (:apply,m*v,(Index{Upper}(1),))
+    @test (m*m*v)[1] == (:apply,m*v,(Index{Unknown}(1),))
+    @test (m*m*v)[Index{Interior}(3)] == (:apply,m*v,(Index{Interior}(3),))
+    @test (m*m*v)[3] == (:apply,m*v,(Index{Unknown}(3),))
+    @test (m*m*v)[Index{Lower}(6)] == (:apply,m*v,(Index{Lower}(6),))
+    @test (m*m*v)[6] == (:apply,m*v,(Index{Unknown}(6),))
     @test_broken BoundsError == (m*m*v)[0]
     @test_broken BoundsError == (m*m*v)[7]
 
@@ -66,7 +71,7 @@
         λ::T
     end
 
-    LazyTensors.apply(m::ScalingOperator{T,D}, v, I::Tuple{Int}) where {T,D} = m.λ*v[I...]
+    LazyTensors.apply(m::ScalingOperator{T,D}, v, I::Tuple{Index{<:Region}}) where {T,D} = m.λ*v[I...]
 
     A = ScalingOperator{Int,1}(2)
 
@@ -79,7 +84,7 @@
         λ::T
     end
 
-    LazyTensors.apply(m::ScalarMapping{T,R,D}, v, i) where {T,R,D} = m.λ*v[i]
+    LazyTensors.apply(m::ScalarMapping{T,R,D}, v, I::Tuple{Index{<:Region}}) where {T,R,D} = m.λ*v[I...]
     LazyTensors.range_size(m::ScalarMapping, domain_size) = domain_size
     LazyTensors.domain_size(m::ScalarMapping, range_sizes) = range_sizes
 
@@ -87,7 +92,6 @@
     B = ScalarMapping{Float64,1,1}(3.0)
 
     v = [1.1,1.2,1.3]
-
     for i ∈ eachindex(v)
         @test ((A+B)*v)[i] == 2*v[i] + 3*v[i]
     end
--- a/Manifest.toml	Mon Jan 06 10:48:38 2020 +0100
+++ b/Manifest.toml	Mon Jan 06 10:54:48 2020 +0100
@@ -24,6 +24,7 @@
 uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
 
 [[LazyTensors]]
+deps = ["RegionIndices"]
 path = "LazyTensors"
 uuid = "62fbed2c-918d-11e9-279b-eb3a325b37d3"
 version = "0.1.0"
@@ -36,16 +37,16 @@
 uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
 
 [[OffsetArrays]]
-git-tree-sha1 = "1af2f79c7eaac3e019a0de41ef63335ff26a0a57"
+git-tree-sha1 = "87d0a91efe29352d5caaa271ae3927083c096e33"
 uuid = "6fe1bfb0-de20-5000-8ca7-80f57d26f881"
-version = "0.11.1"
+version = "0.11.4"
 
 [[Random]]
 deps = ["Serialization"]
 uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
 
 [[RegionIndices]]
-path = "RegionIndices"
+path = "/Users/vidar/dev/hg/sbpteam/sbplib_julia/RegionIndices"
 uuid = "5d527584-97f1-11e9-084c-4540c7ecf219"
 version = "0.1.0"
 
--- a/RegionIndices/src/RegionIndices.jl	Mon Jan 06 10:48:38 2020 +0100
+++ b/RegionIndices/src/RegionIndices.jl	Mon Jan 06 10:54:48 2020 +0100
@@ -30,6 +30,7 @@
 Base.convert(::Type{CartesianIndex}, I::NTuple{N,Index} where N) = CartesianIndex(convert.(Int, I))
 
 Base.Int(I::Index) = I.i
+Base.to_index(I::Index) = Int(I) #How to get this to work for all cases??
 
 function Index(i::Integer, boundary_width::Integer, dim_size::Integer)
     return Index{getregion(i,boundary_width,dim_size)}(i)
--- a/TODO.txt	Mon Jan 06 10:48:38 2020 +0100
+++ b/TODO.txt	Mon Jan 06 10:54:48 2020 +0100
@@ -8,9 +8,16 @@
 
 Profilera
 
-Konvertera till paket
 Skriv tester
 
 Specificera operatorer i TOML eller något liknande?
 
-Ska TensorMapping hantera regionerna? Abstraktion utanför?
+Borde det finns motsvarande apply_stencil för apply_quadrature,
+apply_boundary_value och apply_normal_derivative?
+
+Borde man alltid skicka in N som parameter i apply_2nd_derivative, t.ex som i
+apply_quadrature?
+
+Just nu agerar apply_normal_derivative, apply_boundary_value på inte på v som
+en vektor, utan randvärdet plockas ut utanför. Känns inte konsistent med övrig
+design