Mercurial > repos > public > sbplib_julia
changeset 323:b2ddc5e4d41a
Merge
author | Vidar Stiernström <vidar.stiernstrom@it.uu.se> |
---|---|
date | Thu, 24 Sep 2020 21:04:25 +0200 |
parents | 777063b6f049 (current diff) 277dff5b071a (diff) |
children | 047dee8efaef 4c8f1e9c6d73 |
files | |
diffstat | 5 files changed, 35 insertions(+), 31 deletions(-) [+] |
line wrap: on
line diff
--- a/DiffOps/Manifest.toml Wed Sep 09 21:42:55 2020 +0200 +++ b/DiffOps/Manifest.toml Thu Sep 24 21:04:25 2020 +0200 @@ -18,6 +18,7 @@ uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[LazyTensors]] +deps = ["RegionIndices"] path = "../LazyTensors" uuid = "62fbed2c-918d-11e9-279b-eb3a325b37d3" version = "0.1.0"
--- a/LazyTensors/test/runtests.jl Wed Sep 09 21:42:55 2020 +0200 +++ b/LazyTensors/test/runtests.jl Thu Sep 24 21:04:25 2020 +0200 @@ -19,8 +19,8 @@ @testset "Mapping transpose" begin struct DummyMapping{T,R,D} <: TensorMapping{T,R,D} end - LazyTensors.apply(m::DummyMapping{T,R,D}, v, I::NTuple{R,Index{<:Region}}) where {T,R,D} = :apply - LazyTensors.apply_transpose(m::DummyMapping{T,R,D}, v, I::NTuple{D,Index{<:Region}}) where {T,R,D} = :apply_transpose + LazyTensors.apply(m::DummyMapping{T,R,D}, v, I::Vararg{Index{<:Region},R}) where {T,R,D} = :apply + LazyTensors.apply_transpose(m::DummyMapping{T,R,D}, v, I::Vararg{Index{<:Region},D}) where {T,R,D} = :apply_transpose LazyTensors.range_size(m::DummyMapping{T,R,D}, domain_size::NTuple{D,Integer}) where {T,R,D} = :range_size LazyTensors.domain_size(m::DummyMapping{T,R,D}, range_size::NTuple{R,Integer}) where {T,R,D} = :domain_size @@ -29,9 +29,9 @@ I = Index{Unknown}(0) @test m' isa TensorMapping{Float64, 3,2} @test m'' == m - @test apply(m',zeros(Float64,(0,0)), (I,I,I)) == :apply_transpose - @test apply(m'',zeros(Float64,(0,0,0)),(I,I)) == :apply - @test apply_transpose(m', zeros(Float64,(0,0,0)),(I,I)) == :apply + @test apply(m',zeros(Float64,(0,0)), I, I, I) == :apply_transpose + @test apply(m'',zeros(Float64,(0,0,0)), I, I) == :apply + @test apply_transpose(m', zeros(Float64,(0,0,0)), I, I) == :apply @test range_size(m', (0,0)) == :domain_size @test domain_size(m', (0,0,0)) == :range_size @@ -40,7 +40,7 @@ @testset "TensorApplication" begin struct DummyMapping{T,R,D} <: TensorMapping{T,R,D} end - LazyTensors.apply(m::DummyMapping{T,R,D}, v, i::NTuple{R,Index{<:Region}}) where {T,R,D} = (:apply,v,i) + LazyTensors.apply(m::DummyMapping{T,R,D}, v, i::Vararg{Index{<:Region},R}) where {T,R,D} = (:apply,v,i) LazyTensors.range_size(m::DummyMapping{T,R,D}, domain_size::NTuple{D,Integer}) where {T,R,D} = 2 .* domain_size LazyTensors.domain_size(m::DummyMapping{T,R,D}, range_size::NTuple{R,Integer}) where {T,R,D} = range_size.÷2 @@ -75,7 +75,7 @@ λ::T end - LazyTensors.apply(m::ScalingOperator{T,D}, v, I::NTuple{D, Index}) where {T,D} = m.λ*v[I] + LazyTensors.apply(m::ScalingOperator{T,D}, v, I::Vararg{Index,D}) where {T,D} = m.λ*v[I] m = ScalingOperator{Int,1}(2) v = [1,2,3] @@ -94,7 +94,7 @@ λ::T end - LazyTensors.apply(m::ScalarMapping{T,R,D}, v, I::Tuple{Index{<:Region}}) where {T,R,D} = m.λ*v[I...] + LazyTensors.apply(m::ScalarMapping{T,R,D}, v, I::Vararg{Index{<:Region}}) where {T,R,D} = m.λ*v[I...] LazyTensors.range_size(m::ScalarMapping, domain_size) = domain_size LazyTensors.domain_size(m::ScalarMapping, range_sizes) = range_sizes
--- a/Manifest.toml Wed Sep 09 21:42:55 2020 +0200 +++ b/Manifest.toml Thu Sep 24 21:04:25 2020 +0200 @@ -46,7 +46,7 @@ uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" [[RegionIndices]] -path = "/Users/vidar/dev/hg/sbpteam/sbplib_julia/RegionIndices" +path = "RegionIndices" uuid = "5d527584-97f1-11e9-084c-4540c7ecf219" version = "0.1.0"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/Notes.md Thu Sep 24 21:04:25 2020 +0200 @@ -0,0 +1,22 @@ +# Notes + +## Known size of range and domain? +It might be a good idea let tensormappings know the size of their range and domain as a constant. This probably can't be enforced on the abstract type but maybe we should write our difference operators this way. Having this as default should clean up the thinking around adjoints of boundary operators. It could also simplify getting high performance out of repeated application of regioned TensorMappings. +Is there any reason to use a trait to differentiate between fixed size and unknown size? + +## Test setup +Once we figure out how to organize the subpackages we should update test folders to Project. As of writing this there seems to be and issue with this approach combined with dev'ed packages so we can't do it yet. It seems that Pkg might fix this in the future. + +## Reasearch and thinking + - [ ] Use a trait to indicate if a TensorMapping uses indices with regions. + The default should be that they do NOT. + - [ ] What to name this trait? Can we call it IndexStyle but not export it to avoid conflicts with Base.IndexStyle? + - [ ] Use a trait to indicate that a TensorMapping har the same range and domain? + - [ ] Rename all the Tensor stuff to just LazyOperator, LazyApplication and so on? + - [ ] Figure out repeated application of regioned TensorMappings. Maybe an instance of a tensor mapping needs to know the exact size of the range and domain for this to work? + - [ ] Check how the native julia doc generator works + - [ ] Check if Vidars design docs fit in there + - [ ] Formalize how range_size() and domain_size() are supposed to work in TensorMappings where dim(domain) != dim(range) (add tests or document) + - [ ] Create a macro @lazy which replaces a binary op (+,-) by its lazy equivalent? Would be a neat way to indicate which evaluations are lazy without cluttering/confusing with special characters. + - [ ] Specificera operatorer i TOML eller något liknande? + H.. H_gamma etc.)
--- a/TODO.md Wed Sep 09 21:42:55 2020 +0200 +++ b/TODO.md Thu Sep 24 21:04:25 2020 +0200 @@ -8,26 +8,10 @@ - [ ] Add new Laplace opertor to DiffOps, probably named WaveEqOp(?!!?) - [ ] Add 1D operators (D1, D2, e, d ... ) as TensorOperators - [ ] Create a struct that bundles the necessary Tensor operators for solving the wave equation. - - [ ] Use traits like IndexStyle, IndexLinear, IndexCartesian to differentiate - TensorMappings that are flexible in size and those that are fixed in size - - [ ] Use traits for symmetric tensor mappings such that apply_transpoe = apply for all such mappings - - [x] Move Laplace tensor operator to different package - - [x] Remove grid as a property of the Laplace tensor operator - - [ ] Update how dependencies are handled for tests. This was updated in Julia v1.2 and would allow us to use test specific dev packages. + - [ ] Add a quick and simple way of running all tests for all subpackages. -## Reasearch and thinking - - [ ] Redo all Tensor applys to take Vararg instead of tuple of Index? - Have we been down that road before? Is there any reason not to do this? - - [ ] Check how the native julia doc generator works - - [ ] Check if Vidars design docs fit in there - - [ ] Formalize how range_size() and domain_size() are supposed to work in TensorMappings where dim(domain) != dim(range) (add tests or document) - - [x] Should there be some kind of collection struct for SBP operators (as TensorOperators), providing easy access to all parts (D2, e, d , -> YES! - H.. H_gamma etc.) - - [x] Is "missing" a good value for unknown dimension sizes (of `e*g` for example) - - [ ] Create a macro @lazy which replaces a binary op (+,-) by its lazy equivalent? Would be a neat way to indicate which evaluations are lazy without cluttering/confusing with special characters. -# Wrap up task - +# Wrap up tasks - [ ] Kolla att vi har @inbounds och @propagate_inbounds på rätt ställen - [ ] Kolla att vi gör boundschecks överallt och att de är markerade med @boundscheck - [ ] Kolla att vi har @inline på rätt ställen @@ -42,7 +26,4 @@ apply_quadrature? Just nu agerar apply_normal_derivative, apply_boundary_value på inte på v som -en vektor, utan randvärdet plockas ut utanför. Känns inte konsistent med övrig -design - -Specificera operatorer i TOML eller något liknande? +en vektor, utan randvärdet plockas ut utanför. Känns inte konsistent med övrig design.