Mercurial > repos > public > sbplib_julia
diff src/LazyTensors/lazy_tensor_operations.jl @ 992:bc384aaade30 refactor/lazy_tensors
Add a bunch of todos and add a ScalingTensor
author | Jonatan Werpers <jonatan@werpers.com> |
---|---|
date | Fri, 18 Mar 2022 17:28:07 +0100 |
parents | 043d13ef8898 |
children | 55ab7801c45f |
line wrap: on
line diff
--- a/src/LazyTensors/lazy_tensor_operations.jl Wed Mar 16 18:39:00 2022 +0100 +++ b/src/LazyTensors/lazy_tensor_operations.jl Fri Mar 18 17:28:07 2022 +0100 @@ -1,3 +1,5 @@ +# TBD: Is there a good way to split this file? + """ LazyTensorMappingApplication{T,R,D} <: LazyArray{T,R} @@ -186,7 +188,27 @@ @boundscheck check_domain_size(tm, range_size(tmi)) return tmi end +# TODO: Implement the above as TensorMappingComposition instead +# TODO: Move the operator definitions to one place +""" + ScalingTensor{T,D} <: TensorMapping{T,D,D} + +A Lazy tensor operator that scales its input with `λ`. +""" +struct ScalingTensor{T,D} <: TensorMapping{T,D,D} + λ::T + size::NTuple{D,Int} +end + +LazyTensors.apply(tm::ScalingTensor{T,D}, v::AbstractArray{<:Any,D}, I::Vararg{Any,D}) where {T,D} = tm.λ*v[I...] +LazyTensors.apply_transpose(tm::ScalingTensor{T,D}, v::AbstractArray{<:Any,D}, I::Vararg{Any,D}) where {T,D} = tm.λ*v[I...] + +LazyTensors.range_size(m::ScalingTensor) = m.size +LazyTensors.domain_size(m::ScalingTensor) = m.size + +# TODO: Rename everything with mapping +# TODO: Remove ScalingOperator from tests """ InflatedTensorMapping{T,R,D} <: TensorMapping{T,R,D}