Mercurial > repos > public > sbplib_julia
comparison DiffOps/src/laplace.jl @ 244:a827568fc251 boundary_conditions
Fix NormalDerivative and add tests
author | Jonatan Werpers <jonatan@werpers.com> |
---|---|
date | Wed, 26 Jun 2019 21:22:36 +0200 |
parents | 9819243102dd |
children | d9e262cb2e8d ed29ee13e92e |
comparison
equal
deleted
inserted
replaced
243:01a67d1b8b5d | 244:a827568fc251 |
---|---|
8 grid::EquidistantGrid | 8 grid::EquidistantGrid |
9 bId::CartesianBoundary | 9 bId::CartesianBoundary |
10 end | 10 end |
11 export NormalDerivative | 11 export NormalDerivative |
12 | 12 |
13 # TODO: This is obviouly strange. Is domain_size just discarded? Is there a way to avoid storing grid in BoundaryValue? | |
14 # Can we give special treatment to TensorMappings that go to a higher dim? | |
15 LazyTensors.range_size(e::NormalDerivative{T}, domain_size::NTuple{1,Integer}) where T = size(e.grid) | |
16 LazyTensors.domain_size(e::NormalDerivative{T}, range_size::NTuple{2,Integer}) where T = (range_size[3-dim(e.bId)],) | |
17 | |
13 # Not correct abstraction level | 18 # Not correct abstraction level |
14 # TODO: Not type stable D:< | 19 # TODO: Not type stable D:< |
15 function LazyTensors.apply(d::NormalDerivative, v::AbstractArray, I::NTuple{2,Int}) | 20 function LazyTensors.apply(d::NormalDerivative, v::AbstractArray, I::NTuple{2,Int}) |
16 i = I[dim(d.bId)] | 21 i = I[dim(d.bId)] |
17 j = I[3-dim(d.bId)] | 22 j = I[3-dim(d.bId)] |
18 N_i = d.grid.size[dim(d.bId)] | 23 N_i = size(d.grid)[dim(d.bId)] |
19 | 24 |
20 r = getregion(i, closureSize(d.op), N_i) | 25 if region(d.bId) == Lower |
21 | |
22 if r != region(d.bId) | |
23 return 0 | |
24 end | |
25 | |
26 if r == Lower | |
27 # Note, closures are indexed by offset. Fix this D:< | 26 # Note, closures are indexed by offset. Fix this D:< |
28 return d.grid.inverse_spacing[dim(d.bId)]*d.op.dClosure[i-1]*v[j] | 27 return d.grid.inverse_spacing[dim(d.bId)]*d.op.dClosure[i-1]*v[j] |
29 elseif r == Upper | 28 elseif region(d.bId) == Upper |
30 return d.grid.inverse_spacing[dim(d.bId)]*d.op.dClosure[N_i-j]*v[j] | 29 return -d.grid.inverse_spacing[dim(d.bId)]*d.op.dClosure[N_i-i]*v[j] |
31 end | 30 end |
32 end | 31 end |
33 | 32 |
34 function LazyTensors.apply_transpose(d::NormalDerivative, v::AbstractArray, I::NTuple{1,Int}) | 33 function LazyTensors.apply_transpose(d::NormalDerivative, v::AbstractArray, I::NTuple{1,Int}) |
35 u = selectdim(v,3-dim(d.bId),I) | 34 u = selectdim(v,3-dim(d.bId),I[1]) |
36 return apply_d(d.op, d.grid.inverse_spacing[dim(d.bId)], u, region(d.bId)) | 35 return apply_d(d.op, d.grid.inverse_spacing[dim(d.bId)], u, region(d.bId)) |
37 end | 36 end |
38 | 37 |
39 | 38 |
40 """ | 39 """ |
50 export BoundaryValue | 49 export BoundaryValue |
51 | 50 |
52 # TODO: This is obviouly strange. Is domain_size just discarded? Is there a way to avoid storing grid in BoundaryValue? | 51 # TODO: This is obviouly strange. Is domain_size just discarded? Is there a way to avoid storing grid in BoundaryValue? |
53 # Can we give special treatment to TensorMappings that go to a higher dim? | 52 # Can we give special treatment to TensorMappings that go to a higher dim? |
54 LazyTensors.range_size(e::BoundaryValue{T}, domain_size::NTuple{1,Integer}) where T = size(e.grid) | 53 LazyTensors.range_size(e::BoundaryValue{T}, domain_size::NTuple{1,Integer}) where T = size(e.grid) |
55 LazyTensors.domain_size(e::BoundaryValue{T}, range_size::NTuple{2,Integer}) where T = (range_size[3-dim(e.bId)],); | 54 LazyTensors.domain_size(e::BoundaryValue{T}, range_size::NTuple{2,Integer}) where T = (range_size[3-dim(e.bId)],) |
56 | 55 |
57 function LazyTensors.apply(e::BoundaryValue, v::AbstractArray, I::NTuple{2,Int}) | 56 function LazyTensors.apply(e::BoundaryValue, v::AbstractArray, I::NTuple{2,Int}) |
58 i = I[dim(e.bId)] | 57 i = I[dim(e.bId)] |
59 j = I[3-dim(e.bId)] | 58 j = I[3-dim(e.bId)] |
60 N_i = size(e.grid)[dim(e.bId)] | 59 N_i = size(e.grid)[dim(e.bId)] |