From 7a0ae41e7ad6eee0430122e3104f04e0c57943b0 Mon Sep 17 00:00:00 2001 From: AstitvaAggarwal Date: Fri, 24 Apr 2026 16:50:04 +0100 Subject: [PATCH 1/4] Add minimal gradient API: GradientOrder trait, value_and_gradient!!, value_and_jacobian!! --- Project.toml | 5 ++- src/ADTypes.jl | 7 +++ src/gradient_api.jl | 103 +++++++++++++++++++++++++++++++++++++++++++ test/gradient_api.jl | 23 ++++++++++ test/public.jl | 5 +++ test/runtests.jl | 3 ++ 6 files changed, 145 insertions(+), 1 deletion(-) create mode 100644 src/gradient_api.jl create mode 100644 test/gradient_api.jl diff --git a/Project.toml b/Project.toml index 6316e86..f5c7648 100644 --- a/Project.toml +++ b/Project.toml @@ -1,5 +1,6 @@ name = "ADTypes" uuid = "47edcb42-4c32-4615-8424-f2b9edc5f35b" +version = "1.21.0" authors = ["Vaibhav Dixit , Guillaume Dalle and contributors"] version = "1.22.0" @@ -17,6 +18,7 @@ ADTypesEnzymeCoreExt = "EnzymeCore" ChainRulesCore = "1.0.2" ConstructionBase = "1.5" EnzymeCore = "0.5.3,0.6,0.7,0.8" +Mooncake = "0.5.26" julia = "1.10" [extras] @@ -24,8 +26,9 @@ Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869" JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b" +Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Aqua", "ChainRulesCore", "EnzymeCore", "JET", "Setfield", "Test"] +test = ["Aqua", "ChainRulesCore", "EnzymeCore", "JET", "Mooncake", "Setfield", "Test"] diff --git a/src/ADTypes.jl b/src/ADTypes.jl index de5f756..8f81b1f 100644 --- a/src/ADTypes.jl +++ b/src/ADTypes.jl @@ -25,6 +25,7 @@ include("dense.jl") include("sparse.jl") include("legacy.jl") include("symbols.jl") +include("gradient_api.jl") # Automatic Differentiation export AbstractADType @@ -55,6 +56,12 @@ export AutoChainRules, @public mode @public Auto +# Gradient API (minimal interface for backends to implement) +@public GradientOrder +@public gradient_order +@public value_and_gradient!! +@public value_and_jacobian!! + # Sparse Automatic Differentiation export AutoSparse @public dense_ad diff --git a/src/gradient_api.jl b/src/gradient_api.jl new file mode 100644 index 0000000..5b388b0 --- /dev/null +++ b/src/gradient_api.jl @@ -0,0 +1,103 @@ +## Capability trait + +""" + GradientOrder{K} + +Trait indicating that an AD backend supports computing derivatives up to order `K`: + + - `GradientOrder{0}()`: primal evaluation only + - `GradientOrder{1}()`: value + gradient / Jacobian + - `GradientOrder{2}()`: value + gradient + Hessian + +Backends declare their capability by implementing [`gradient_order`](@ref). +Consumers can compare orders: `GradientOrder{1}() ≤ GradientOrder{2}()`. +""" +struct GradientOrder{K} + function GradientOrder{K}() where {K} + _K = Int(K) + _K ≥ 0 || throw(ArgumentError("GradientOrder requires K ≥ 0, got $_K")) + new{_K}() + end +end + +GradientOrder(K::Integer) = GradientOrder{Int(K)}() + +Base.isless(::GradientOrder{J}, ::GradientOrder{K}) where {J, K} = J < K + +""" + gradient_order(backend::AbstractADType) -> GradientOrder{K} or nothing + +Return the [`GradientOrder`](@ref) supported by `backend`, or `nothing` if the backend +does not implement the ADTypes gradient API. + +Backends declare support by adding a method: + + ADTypes.gradient_order(::MyBackend) = GradientOrder{1}() +""" +gradient_order(::AbstractADType) = nothing + +## Interface functions + +""" + value_and_gradient!!(f, backend::AbstractADType, x) + +Compute the primal value `y = f(x)` and gradient `∇f(x)` for a scalar-valued function `f`. + +Returns `(y, g)` where `g` has the same structure as `x`. + +The `!!` signals that the backend may mutate internal cache state. The caller owns the +returned values: mutable components (e.g. gradient arrays) may be overwritten on the next +call with the same backend, so copy if you need to retain them. + +# Interface + +Backends supporting first-order derivatives implement: + + ADTypes.value_and_gradient!!(f, ::MyBackend, x) = ... + +and declare: + + ADTypes.gradient_order(::MyBackend) = GradientOrder{1}() + +See also: [`value_and_jacobian!!`](@ref), [`gradient_order`](@ref). +""" +function value_and_gradient!! end + +""" + value_and_jacobian!!(f, backend::AbstractADType, x) + +Compute the primal value `y = f(x)` and the Jacobian `∂f(x)` for a general function `f`. + + - If `f` is scalar-valued, this is equivalent to [`value_and_gradient!!`](@ref). + - If `f` is vector-valued (`f : ℝⁿ → ℝᵐ`), returns the full `m × n` Jacobian matrix. + +The `!!` signals that the backend may mutate internal cache state. The caller owns the +returned values. + +# Interface + +Backends implement: + + ADTypes.value_and_jacobian!!(f, ::MyBackend, x) = ... + +See also: [`value_and_gradient!!`](@ref), [`gradient_order`](@ref). +""" +function value_and_jacobian!! end + +## Error fallbacks + +function value_and_gradient!!(f::F, ::T, x) where {F, T<:AbstractADType} + throw(ArgumentError( + "`ADTypes.value_and_gradient!!` is not implemented for backend `$T`. " * + "Add a method:\n ADTypes.value_and_gradient!!(f, ::$T, x) = ...\n" * + "and declare:\n ADTypes.gradient_order(::$T) = GradientOrder{1}()" + )) +end + +function value_and_jacobian!!(f::F, ::T, x) where {F, T<:AbstractADType} + throw(ArgumentError( + "`ADTypes.value_and_jacobian!!` is not implemented for backend `$T`. " * + "Add a method:\n ADTypes.value_and_jacobian!!(f, ::$T, x) = ...\n" * + "and declare:\n ADTypes.gradient_order(::$T) = GradientOrder{1}()" + )) +end diff --git a/test/gradient_api.jl b/test/gradient_api.jl new file mode 100644 index 0000000..4ef6fe6 --- /dev/null +++ b/test/gradient_api.jl @@ -0,0 +1,23 @@ +using ADTypes: GradientOrder, gradient_order + +struct UnimplementedBackend <: AbstractADType end + +@testset "GradientOrder trait" begin + @test GradientOrder{0}() isa GradientOrder + @test GradientOrder{1}() isa GradientOrder + @test GradientOrder{0}() < GradientOrder{1}() + @test GradientOrder{1}() < GradientOrder{2}() + @test !(GradientOrder{1}() < GradientOrder{1}()) + @test_throws ArgumentError GradientOrder{-1}() +end + +@testset "gradient_order" begin + @test gradient_order(UnimplementedBackend()) === nothing +end + +@testset "Error fallbacks" begin + f = x -> x^2 + backend = UnimplementedBackend() + @test_throws ArgumentError ADTypes.value_and_gradient!!(f, backend, 1.0) + @test_throws ArgumentError ADTypes.value_and_jacobian!!(f, backend, 1.0) +end diff --git a/test/public.jl b/test/public.jl index b8ed0c6..d890cde 100644 --- a/test/public.jl +++ b/test/public.jl @@ -19,5 +19,10 @@ public_symbols = ( # Matrix coloring :coloring_algorithm, :NoColoringAlgorithm, + # Gradient API + :GradientOrder, + :gradient_order, + :value_and_gradient!!, + :value_and_jacobian!!, ) @test public_symbols ⊆ names(ADTypes) diff --git a/test/runtests.jl b/test/runtests.jl index c77d322..33171e1 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -115,6 +115,9 @@ end @testset "Miscellaneous" begin include("misc.jl") end + @testset "Gradient API" begin + include("gradient_api.jl") + end if VERSION >= v"1.11.0-DEV.469" @testset "Public" begin include("public.jl") From 2acc5d1c3de616107921c2783a8053f6dc42a19e Mon Sep 17 00:00:00 2001 From: AstitvaAggarwal Date: Sun, 26 Apr 2026 14:08:57 +0100 Subject: [PATCH 2/4] fix vers --- Project.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/Project.toml b/Project.toml index f5c7648..f5e7125 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,5 @@ name = "ADTypes" uuid = "47edcb42-4c32-4615-8424-f2b9edc5f35b" -version = "1.21.0" authors = ["Vaibhav Dixit , Guillaume Dalle and contributors"] version = "1.22.0" From 3bb8b272de87604f2fce478f29f529b15e314e93 Mon Sep 17 00:00:00 2001 From: AstitvaAggarwal Date: Sun, 26 Apr 2026 14:21:32 +0100 Subject: [PATCH 3/4] fix docs errors --- docs/src/index.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/src/index.md b/docs/src/index.md index 85e7e55..867a691 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -123,6 +123,15 @@ ADTypes.SymbolicMode ADTypes.Auto ``` +## Gradient API + +```@docs +ADTypes.GradientOrder +ADTypes.gradient_order +ADTypes.value_and_gradient!! +ADTypes.value_and_jacobian!! +``` + ## Deprecated ```@docs From 6f2c298fd82d509644bec51bd4af1c9538e735bf Mon Sep 17 00:00:00 2001 From: AstitvaAggarwal Date: Sun, 26 Apr 2026 15:03:51 +0100 Subject: [PATCH 4/4] remove Mooncake --- Project.toml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Project.toml b/Project.toml index f5e7125..6316e86 100644 --- a/Project.toml +++ b/Project.toml @@ -17,7 +17,6 @@ ADTypesEnzymeCoreExt = "EnzymeCore" ChainRulesCore = "1.0.2" ConstructionBase = "1.5" EnzymeCore = "0.5.3,0.6,0.7,0.8" -Mooncake = "0.5.26" julia = "1.10" [extras] @@ -25,9 +24,8 @@ Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869" JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b" -Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Aqua", "ChainRulesCore", "EnzymeCore", "JET", "Mooncake", "Setfield", "Test"] +test = ["Aqua", "ChainRulesCore", "EnzymeCore", "JET", "Setfield", "Test"]