Skip to content

Commit

Permalink
changed some variable names
Browse files Browse the repository at this point in the history
  • Loading branch information
sshin23 committed Jul 30, 2023
1 parent a1ce6e7 commit bbf1a6d
Show file tree
Hide file tree
Showing 10 changed files with 144 additions and 142 deletions.
8 changes: 4 additions & 4 deletions ext/SIMDiffKernelAbstractions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -250,22 +250,22 @@ end

@kernel function kerh(y1,y2,@Const(f),@Const(itr),@Const(x),@Const(adj1),@Const(adj2))
I = @index(Global)
SIMDiff.hrpass0(f.f(itr[I], SIMDiff.TripleSource(x)), f.comp2, y1, y2, SIMDiff.offset2(f,I), 0, adj1, adj2)
SIMDiff.hrpass0(f.f(itr[I], SIMDiff.SecondAdjointNodeSource(x)), f.comp2, y1, y2, SIMDiff.offset2(f,I), 0, adj1, adj2)
end

@kernel function kerh2(y1,y2,@Const(f),@Const(itr),@Const(x),@Const(adjs1),@Const(adj2))
I = @index(Global)
SIMDiff.hrpass0(f.f(itr[I], SIMDiff.TripleSource(x)), f.comp2, y1, y2, SIMDiff.offset2(f,I), 0, adjs1[SIMDiff.offset0(f,itr,I)], adj2)
SIMDiff.hrpass0(f.f(itr[I], SIMDiff.SecondAdjointNodeSource(x)), f.comp2, y1, y2, SIMDiff.offset2(f,I), 0, adjs1[SIMDiff.offset0(f,itr,I)], adj2)
end

@kernel function kerj(y1,y2,@Const(f),@Const(itr),@Const(x),@Const(adj))
I = @index(Global)
SIMDiff.jrpass(f.f(itr[I], SIMDiff.DualSource(x)), f.comp1, SIMDiff.offset0(f,itr,I), y1, y2, SIMDiff.offset1(f,I), 0, adj)
SIMDiff.jrpass(f.f(itr[I], SIMDiff.AdjointNodeSource(x)), f.comp1, SIMDiff.offset0(f,itr,I), y1, y2, SIMDiff.offset1(f,I), 0, adj)
end

@kernel function kerg(y,@Const(f),@Const(itr),@Const(x),@Const(adj))
I = @index(Global)
SIMDiff.grpass(f.f(itr[I], SIMDiff.DualSource(x)), f.comp1, y, SIMDiff.offset1(f,I), 0, adj)
SIMDiff.grpass(f.f(itr[I], SIMDiff.AdjointNodeSource(x)), f.comp1, y, SIMDiff.offset1(f,I), 0, adj)
end

@kernel function kerf(y,@Const(f),@Const(itr),@Const(x))
Expand Down
4 changes: 1 addition & 3 deletions src/SIMDiff.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@ module SIMDiff
import NLPModels

include("graph.jl")
include("dual.jl")
include("triple.jl")
include("register.jl")
include("functionlist.jl")
include("function.jl")
Expand All @@ -14,6 +12,6 @@ include("hessian.jl")
include("nlp.jl")
include("templates.jl")

export data, variable, objective, constraint, constraint!, WrapperModel
export data, variable, objective, constraint, constraint!

end # module SIMDiffes
29 changes: 0 additions & 29 deletions src/dual.jl

This file was deleted.

4 changes: 2 additions & 2 deletions src/function.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@ function Func(gen::Base.Generator, o0=0, o1=0, o2=0)
f = gen.f(p)


d = f(Identity(),DualSource(NaNSource{Float16}()))
d = f(Identity(),AdjointNodeSource(NaNSource{Float16}()))
y1 = []
SIMDiff.grpass(d,nothing,y1,nothing,0,NaN16)

t = f(Identity(),TripleSource(NaNSource{Float16}()))
t = f(Identity(),SecondAdjointNodeSource(NaNSource{Float16}()))
y2 = []
SIMDiff.hrpass0(t,nothing,y2,nothing,nothing,0,NaN16,NaN16)

Expand Down
20 changes: 10 additions & 10 deletions src/gradient.jl
Original file line number Diff line number Diff line change
@@ -1,53 +1,53 @@
@inbounds @inline function drpass(d::D, y, adj) where D <: Dual1
@inbounds @inline function drpass(d::D, y, adj) where D <: AdjointNode1
offset = drpass(d.inner, y, adj * d.y)
nothing
end
@inbounds @inline function drpass(d::D, y, adj) where D <: Dual2
@inbounds @inline function drpass(d::D, y, adj) where D <: AdjointNode2
offset = drpass(d.inner1, y, adj * d.y1)
offset = drpass(d.inner2, y, adj * d.y2)
nothing
end
@inbounds @inline function drpass(d::D, y, adj) where D <: DualVar
@inbounds @inline function drpass(d::D, y, adj) where D <: AdjointNodeVar
y[d.i] += adj
nothing
end
@inbounds @inline function drpass(f::F, x, y, adj) where F <: Func
end
function gradient!(y,f,x, adj)
@simd for k in eachindex(f.itr)
drpass(f.f.f(f.itr[k],DualSource(x)), y, adj)
drpass(f.f.f(f.itr[k],AdjointNodeSource(x)), y, adj)
end
return y
end


@inbounds @inline function grpass(d::D, comp, y, o1, cnt, adj) where D <: Dual1
@inbounds @inline function grpass(d::D, comp, y, o1, cnt, adj) where D <: AdjointNode1
cnt = grpass(d.inner, comp, y, o1, cnt, adj * d.y)
return cnt
end
@inbounds @inline function grpass(d::D, comp, y, o1, cnt, adj) where D <: Dual2
@inbounds @inline function grpass(d::D, comp, y, o1, cnt, adj) where D <: AdjointNode2
cnt = grpass(d.inner1, comp, y, o1, cnt, adj * d.y1)
cnt = grpass(d.inner2, comp, y, o1, cnt, adj * d.y2)
return cnt
end
@inbounds @inline function grpass(d::D, comp, y, o1, cnt, adj) where D <: DualVar
@inbounds @inline function grpass(d::D, comp, y, o1, cnt, adj) where D <: AdjointNodeVar
y[o1 + comp(cnt += 1)] += adj
return cnt
end

@inbounds @inline function grpass(d::DualVar, comp::Nothing, y, o1, cnt, adj) # despecialization
@inbounds @inline function grpass(d::AdjointNodeVar, comp::Nothing, y, o1, cnt, adj) # despecialization
push!(y,d.i)
return (cnt += 1)
end
@inbounds @inline function grpass(d::D, comp, y::V, o1, cnt, adj) where {D <: DualVar, V <: AbstractVector{Tuple{Int,Int}}}
@inbounds @inline function grpass(d::D, comp, y::V, o1, cnt, adj) where {D <: AdjointNodeVar, V <: AbstractVector{Tuple{Int,Int}}}
ind = o1 + comp(cnt += 1)
y[ind] = (d.i,ind)
return cnt
end

function sgradient!(y,f,x, adj)
@simd for k in eachindex(f.itr)
grpass(f.f.f(f.itr[k], DualSource(x)), f.itr.comp1, y, offset1(f,k), 0, adj)
grpass(f.f.f(f.itr[k], AdjointNodeSource(x)), f.itr.comp1, y, offset1(f,k), 0, adj)
end
return y
end
70 changes: 68 additions & 2 deletions src/graph.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
abstract type AbstractIndex end
abstract type AbstractNode end
abstract type AbstractPar <: AbstractNode end
abstract type AbstractDual end
abstract type AbstractTriple end
abstract type AbstractAdjointNode end
abstract type AbstractSecondAdjointNode end

struct Var{I} <: AbstractNode
i::I
Expand Down Expand Up @@ -58,3 +58,69 @@ struct NaNSource{T} <: AbstractVector{T} end
@inbounds (v::ParIndexed)(i::Identity,x) = NaN16 # despecialized
@inbounds (v::Par)(i::Identity,x) = NaN16 # despecialized
@inbounds (v::Var)(i::Identity,x) = x[v.i] # despecialized


struct AdjointNode1{F, T, I} <: AbstractAdjointNode
x::T
y::T
inner::I
end
struct AdjointNode2{F, T, I1,I2} <: AbstractAdjointNode
x::T
y1::T
y2::T
inner1::I1
inner2::I2
end
struct AdjointNodeVar{I,T} <: AbstractAdjointNode
i::I
x::T
end
struct AdjointNodeSource{T, VT <: AbstractVector{T}}
inner::VT
end
struct AdjointNodeNullSource end

@inline AdjointNode1(f::F,x::T,y,inner::I) where {F,T,I} = AdjointNode1{F,T,I}(x,y,inner)
@inline AdjointNode2(f::F,x::T,y1,y2,inner1::I1,inner2::I2) where {F,T,I1,I2} = AdjointNode2{F,T,I1,I2}(x,y1,y2,inner1,inner2)


AdjointNodeSource(::Nothing) = AdjointNodeNullSource()

@inbounds @inline Base.getindex(x::I,i) where I <: AdjointNodeNullSource = AdjointNodeVar(i,NaN16)
@inbounds @inline Base.getindex(x::I,i) where I <: AdjointNodeSource = AdjointNodeVar(i, x.inner[i])


struct SecondAdjointNode1{F, T, I} <: AbstractSecondAdjointNode
x::T
y::T
h::T
inner::I
end
struct SecondAdjointNode2{F, T, I1,I2} <: AbstractSecondAdjointNode
x::T
y1::T
y2::T
h11::T
h12::T
h22::T
inner1::I1
inner2::I2
end

struct SecondAdjointNodeVar{I,T} <: AbstractSecondAdjointNode
i::I
x::T
end
struct SecondAdjointNodeSource{T, VT <: AbstractVector{T}}
inner::VT
end

@inline SecondAdjointNode1(f::F,x::T,y,h,inner::I) where {F,T,I} = SecondAdjointNode1{F,T,I}(x,y,h,inner)
@inline SecondAdjointNode2(f::F,x::T,y1,y2,h11,h12,h22,inner1::I1,inner2::I2) where {F,T,I1,I2} = SecondAdjointNode2{F,T,I1,I2}(x,y1,y2,h11,h12,h22,inner1,inner2)

struct SecondAdjointNodeNullSource end
SecondAdjointNodeSource(::Nothing) = SecondAdjointNodeNullSource()

@inbounds @inline Base.getindex(x::I,i) where I <: SecondAdjointNodeNullSource = SecondAdjointNodeVar(i,NaN)
@inbounds @inline Base.getindex(x::I,i) where I <: SecondAdjointNodeSource = SecondAdjointNodeVar(i, x.inner[i])
Loading

0 comments on commit bbf1a6d

Please sign in to comment.