Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
47 commits
Select commit Hold shift + click to select a range
b4ae4d7
Working BP Commit
JoeyT1994 Oct 2, 2025
d77d063
BP Code
JoeyT1994 Oct 23, 2025
b80e36e
Express BP in terms of `SweepIterator` interface
jack-dunham Oct 28, 2025
fe44b80
Add method for `setmessages!` that allows messages from one cache to …
jack-dunham Oct 31, 2025
3ce0898
Network is now passed to `forest_cover_edge_sequence` directly.
jack-dunham Nov 10, 2025
f6e4fd0
test file formatting
jack-dunham Nov 25, 2025
63840a9
Add `DataGraphsPartitionedGraphsExt` glue for `TensorNetwork` type
jack-dunham Nov 25, 2025
ba22ab5
Make abstract tensor network interface more generic.
jack-dunham Nov 25, 2025
49b0870
BP Caching overhauls
jack-dunham Nov 25, 2025
db46c04
Remove dead deps
jack-dunham Nov 25, 2025
400e373
Fix merge
jack-dunham Nov 25, 2025
b9aafe8
Fix type inference in TensorNetwork construction
jack-dunham Nov 25, 2025
4090e61
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Nov 25, 2025
be0750e
Remove `ITensorBase` dep
jack-dunham Nov 25, 2025
b971b89
`forest_cover_edge_sequence` now constructs a temporary `NamedGraph` …
jack-dunham Dec 1, 2025
9ebf031
[LazyNamedDimsArrays] Fix `parenttype` method
jack-dunham Jan 6, 2026
16fe303
BP Cache now uses new `DataGraphs`interface
jack-dunham Jan 6, 2026
24a4335
Adjust `default_message` to take a `message` type as its first argument
jack-dunham Jan 6, 2026
c43884e
Remove unnecessary code and fix ambiguities in `AbstractTensorNetwork`
jack-dunham Jan 6, 2026
dd6f645
`TensorNetwork` type now uses new DataGraphs interface
jack-dunham Jan 6, 2026
7bb579c
Sweeping algorithms based on AlgorithmsInterface.jl (#30)
mtfishman Dec 19, 2025
032447a
Upgrade to NamedDimsArrays.jl v0.11 (#38)
mtfishman Dec 23, 2025
b256d79
[LazyNamedDimsArrays] New `symnameddims` method that pulls out indice…
jack-dunham Jan 9, 2026
b2da9d8
The function `region_scalar` should now return a scalar, rather than …
jack-dunham Jan 9, 2026
8506e26
Fix double counting in `edge_scalars` function
jack-dunham Jan 9, 2026
938180a
Minor code formatting
jack-dunham Jan 9, 2026
4461967
Expressed belief propagation in terms of AlgorithmsInterface
jack-dunham Jan 9, 2026
d68860a
Fixes to TensorNetwork construction from tensor list
jack-dunham Jan 9, 2026
2f5c783
Minor simplifications to `contract_network` interface.
jack-dunham Jan 9, 2026
9a45a5b
Merge branch 'main' into bp
jack-dunham Feb 13, 2026
4eec9b6
Upgrade DataGraphs and NamedGraphs dependencies
jack-dunham Feb 10, 2026
202724c
[AlgorithmsInterfaceExtensions] Allowing mapping over a generic itera…
jack-dunham Feb 10, 2026
69542e3
Upgrade serial BP to use own `<:Algorithm` structs.
jack-dunham Feb 11, 2026
9925069
Simplify BP cache to only store factors
jack-dunham Feb 13, 2026
292f2fa
Upgrade to DataGraphs v0.3.1 and NamedGraphs v0.10
jack-dunham Feb 13, 2026
9d937aa
Fix compat
jack-dunham Feb 13, 2026
5432fe2
Fix broken merge
jack-dunham Feb 13, 2026
c916c84
Bug fix; upgrade tests
jack-dunham Feb 19, 2026
4a511a1
Add 2D TN test
jack-dunham Feb 24, 2026
5b97af3
Formatting
jack-dunham Feb 24, 2026
fef588d
Merge branch 'main' into bp
jack-dunham Feb 24, 2026
3654af6
Working Parallel BP
jack-dunham Feb 13, 2026
3b868f3
Remove basic `Distributed.jl` implementation.
jack-dunham Feb 26, 2026
37fe650
Fix imports.
jack-dunham Feb 26, 2026
3f488fa
Fix Project.toml
jack-dunham Feb 26, 2026
dbc6067
The `NestedAlgorithm` abstract type now takes the type of the child a…
jack-dunham Mar 2, 2026
fe3ac5c
Simplify parallel code.
jack-dunham Mar 2, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,25 +29,27 @@ WrappedUnions = "325db55a-9c6c-5b90-b1a2-ec87e7a38c44"

[weakdeps]
TensorOperations = "6aa20fa7-93e2-5fca-9bc0-fbd0db3c71a2"
Dagger = "d58978e5-989f-55fb-8d15-ea34adc7bf54"

[extensions]
ITensorNetworksNextTensorOperationsExt = "TensorOperations"
ITensorNetworksNextDaggerExt = "Dagger"

[compat]
AbstractTrees = "0.4.5"
Adapt = "4.3"
AlgorithmsInterface = "0.1"
BackendSelection = "0.1.6"
Combinatorics = "1"
DataGraphs = "0.2.7"
DataGraphs = "0.3.1"
DiagonalArrays = "0.3.31"
Dictionaries = "0.4.5"
FunctionImplementations = "0.4"
FunctionImplementations = "0.4.1"
Graphs = "1.13.1"
LinearAlgebra = "1.10"
MacroTools = "0.5.16"
NamedDimsArrays = "0.14.2"
NamedGraphs = "0.6.9, 0.7, 0.8"
NamedDimsArrays = "0.14.3"
NamedGraphs = "0.10"
SimpleTraits = "0.9.5"
SplitApplyCombine = "1.2.3"
TensorOperations = "5.3.1"
Expand Down
63 changes: 63 additions & 0 deletions ext/ITensorNetworksNextDaggerExt/ITensorNetworksNextDaggerExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
module ITensorNetworksNextDaggerExt

import AlgorithmsInterface as AI
import ITensorNetworksNext.AlgorithmsInterfaceExtensions as AIE
import ITensorNetworksNext.ITensorNetworksNextParallel as ITNNP
using Dagger
using ITensorNetworksNext.ITensorNetworksNextParallel:
DaggerNestedAlgorithm, DaggerState, ITensorNetworksNextParallel
using Dictionaries: set!

function ITNNP.DaggerNestedAlgorithm(f, iterable; kwargs...)
return DaggerNestedAlgorithm(; algorithms = map(f, iterable), kwargs...)
end

function ITNNP.dagger_algorithm(f::Base.Callable, iterable; kwargs...)
return DaggerNestedAlgorithm(f, iterable; kwargs...)
end

function ITNNP.initialize_dagger_state(
problem::AIE.Problem, algorithm::AIE.Algorithm; iterate
)
stopping_criterion_state = AI.initialize_state(
problem, algorithm, algorithm.stopping_criterion
)

remote_results = Dictionary{Int, Dagger.DTask}()

return ITNNP.DaggerState(;
iterate,
remote_results,
stopping_criterion_state
)
end

function AI.initialize_state(
problem::AIE.Problem,
algorithm::ITNNP.DaggerNestedAlgorithm;
kwargs...
)
return ITNNP.initialize_dagger_state(problem, algorithm; kwargs...)
end

function AI.step!(
problem::AIE.Problem,
algorithm::ITNNP.DaggerNestedAlgorithm,
state::ITNNP.DaggerState;
kwargs...
)
subproblem = problem
subalgorithm = algorithm.algorithms[state.iteration]

iterate = ITNNP.get_subiterate(subproblem, subalgorithm, state)

dtask = Dagger.@spawn AI.solve(subproblem, subalgorithm; iterate)

set!(state.remote_results, state.iteration, dtask)

return state
end

include("daggerbeliefpropagation.jl")

end # ITensorNetworksNextDaggerExt
194 changes: 194 additions & 0 deletions ext/ITensorNetworksNextDaggerExt/daggerbeliefpropagation.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,194 @@
import ITensorNetworksNext.ITensorNetworksNextParallel as ITNNP
using Dagger
using DataGraphs: DataGraphs, edge_data, get_edge_data, get_vertex_data, is_edge_assigned,
is_vertex_assigned, set_edge_data!, set_vertex_data!, underlying_graph
using Dictionaries: Dictionary, Indices, getindices
using Graphs: AbstractEdge, AbstractGraph, dst, edges, src, vertices
using ITensorNetworksNext: ITensorNetworksNext, BeliefPropagation, BeliefPropagationCache,
BeliefPropagationProblem, BeliefPropagationState, beliefpropagation,
forest_cover_edge_sequence, select_algorithm, subcache
using NamedGraphs.GraphsExtensions: boundary_edges
using NamedGraphs.PartitionedGraphs: QuotientVertex, quotientedges, quotientvertices
using NamedGraphs: NamedGraphs

const DaggerBeliefPropagation = BeliefPropagation{<:ITNNP.DaggerNestedAlgorithm};

function ITNNP.DaggerBeliefPropagationCache(
network::AbstractGraph;
workers = nothing,
scopes = nothing
)
underlying_cache = BeliefPropagationCache(network)

keys = Indices(quotientvertices(underlying_cache))

if isnothing(scopes)
workers = isnothing(workers) ? Dagger.Distributed.workers() : workers

sorted_workers = Iterators.take(Iterators.cycle(workers), length(keys))

scopes = map(Dagger.ProcessScope, collect(sorted_workers))
else
if length(keys) != length(scopes)
throw(
ArgumentError(
"Number of provided scopes must match the number of vertex partitions of underlying graph"
)
)
end
end

scope_dict = Dictionary(keys, scopes)

quotient_chunks = map(keys) do quotient_vertex
scope = scope_dict[quotient_vertex]
iterate = subcache(underlying_cache, quotient_vertex)
chunk = Dagger.@mutable scope = scope BeliefPropagationState(; iterate)
return chunk
end

return ITNNP.DaggerBeliefPropagationCache(underlying_cache, quotient_chunks)
end

function DataGraphs.underlying_graph(cache::ITNNP.DaggerBeliefPropagationCache)
return underlying_graph(cache.underlying_cache)
end

function DataGraphs.is_vertex_assigned(bpc::ITNNP.DaggerBeliefPropagationCache, vertex)
return is_vertex_assigned(bpc.underlying_cache, vertex)
end
function DataGraphs.is_edge_assigned(bpc::ITNNP.DaggerBeliefPropagationCache, edge)
return is_edge_assigned(bpc.underlying_cache, edge)
end

function DataGraphs.get_vertex_data(bpc::ITNNP.DaggerBeliefPropagationCache, vertex)
return get_vertex_data(bpc.underlying_cache, vertex)
end
function DataGraphs.get_edge_data(
bpc::ITNNP.DaggerBeliefPropagationCache,
edge::AbstractEdge
)
return get_edge_data(bpc.underlying_cache, edge)
end

function DataGraphs.set_vertex_data!(bpc::ITNNP.DaggerBeliefPropagationCache, val, vertex)
return set_vertex_data!(bpc.underlying_cache, val, vertex)
end
function DataGraphs.set_edge_data!(bpc::ITNNP.DaggerBeliefPropagationCache, val, edge)
return set_edge_data!(bpc.underlying_cache, val, edge)
end

NamedGraphs.to_graph_index(::ITNNP.DaggerBeliefPropagationCache, qv::QuotientVertex) = qv
function DataGraphs.get_index_data(
cache::ITNNP.DaggerBeliefPropagationCache,
qv::QuotientVertex
)
return cache.quotient_chunks[qv]
end

function ITensorNetworksNext.beliefpropagation_sweep(
cache::ITNNP.DaggerBeliefPropagationCache;
edges,
kwargs...
)
return ITNNP.dagger_algorithm(quotientvertices(cache)) do quotient_vertex
substate = fetch(cache[quotient_vertex])
subcache = substate.iterate

subcache_edges = forest_cover_edge_sequence(subcache) ∩ edges
incoming_edges = boundary_edges(cache, vertices(cache, quotient_vertex); dir = :in)

alg = select_algorithm(
beliefpropagation,
subcache;
# Don't update the incoming messages
edges = setdiff(subcache_edges, incoming_edges),
maxiter = 1,
kwargs...
)

return alg
end
end

function AI.initialize_state(
problem::AIE.Problem,
algorithm::BeliefPropagation{<:ITNNP.DaggerNestedAlgorithm};
kwargs...
)
return ITNNP.initialize_dagger_state(problem, algorithm; kwargs...)
end

function ITNNP.get_subiterate(
::BeliefPropagationProblem,
::BeliefPropagation, # Our parallel region runs a small BP
state::ITNNP.DaggerState
)
cache = state.iterate.iterate

quotient_vertex = collect(quotientvertices(cache))[state.iteration]

subiterate = cache[quotient_vertex]

return subiterate
end

function AIE.set_substate!(
problem::BeliefPropagationProblem,
algorithm::AIE.NestedAlgorithm,
state::AIE.State,
substate::ITNNP.DaggerState
)
dst_cache = state.iterate.iterate

state.iterate.maxdiff = 0.0

maxdiff_dtasks = map(substate.remote_results) do remote_result
return Dagger.spawn(dtask -> dtask.iterate.maxdiff, remote_result)
end

maxdiff = maximum(fetch, maxdiff_dtasks)

if maxdiff > state.iterate.maxdiff
state.iterate.maxdiff = maxdiff
end

transfer_dtasks = map(quotientedges(dst_cache)) do quotient_edge
src_subcache = dst_cache[src(quotient_edge)]
dst_subcache = dst_cache[dst(quotient_edge)]

src_subcache = fetch(src_subcache)

return Dagger.spawn(
dst_subcache,
fetch(src_subcache),
edges(dst_cache, quotient_edge)
) do dst, src, edges
src_subcache = src.iterate
dst_subcache = dst.iterate
for edge in edges
dst_subcache[edge] = src_subcache[edge]
end
end
end

foreach(wait, transfer_dtasks)

return state
end

function ITNNP.finalize_state!(
::BeliefPropagationProblem,
::BeliefPropagation,
state::ITNNP.DaggerState
)
dst_cache = state.iterate.iterate

for quotient_vertex in quotientvertices(dst_cache)
substate = fetch(dst_cache[quotient_vertex])
subcache = substate.iterate
edge_data(dst_cache) .= edge_data(subcache)
end

return state
end
Original file line number Diff line number Diff line change
Expand Up @@ -149,10 +149,10 @@ end

# ============================ NestedAlgorithm =============================================

abstract type NestedAlgorithm <: Algorithm end
abstract type NestedAlgorithm{Child} <: Algorithm end

function nested_algorithm(f::Function, nalgorithms::Int; kwargs...)
return DefaultNestedAlgorithm(f, nalgorithms; kwargs...)
function nested_algorithm(f::Function, iterable; kwargs...)
return DefaultNestedAlgorithm(f, iterable; kwargs...)
end

max_iterations(algorithm::NestedAlgorithm) = length(algorithm.algorithms)
Expand Down Expand Up @@ -202,7 +202,7 @@ from a list of stored algorithms.
ChildAlgorithm <: Algorithm,
Algorithms <: AbstractVector{ChildAlgorithm},
StoppingCriterion <: AI.StoppingCriterion,
} <: NestedAlgorithm
} <: NestedAlgorithm{ChildAlgorithm}
algorithms::Algorithms
stopping_criterion::StoppingCriterion = AI.StopAfterIteration(length(algorithms))
end
Expand Down
6 changes: 6 additions & 0 deletions src/ITensorNetworksNext.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,10 @@ include("contract_network.jl")
include("sweeping/utils.jl")
include("sweeping/eigenproblem.jl")

include("beliefpropagation/abstractbeliefpropagationcache.jl")
include("beliefpropagation/beliefpropagationcache.jl")
include("beliefpropagation/beliefpropagationproblem.jl")

include("ITensorNetworksNextParallel/ITensorNetworksNextParallel.jl")

end
39 changes: 39 additions & 0 deletions src/ITensorNetworksNextParallel/ITensorNetworksNextParallel.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
module ITensorNetworksNextParallel

import AlgorithmsInterface as AI
import ITensorNetworksNext.AlgorithmsInterfaceExtensions as AIE

abstract type ParallelAlgorithm{Child} <: AIE.NestedAlgorithm{Child} end
const IterativeParallelAlgorithm{Child <: ParallelAlgorithm} = AIE.NestedAlgorithm{Child}

"""
get_subiterate(subproblem::AI.Problem, subalgorithm::AI.Algorithm, state::AI.State)

For a given `subproblem` and `subalgorithm` of a parent nested algorithm,
derive (from the parent state `state`) the iterate to be used in the associated sub state.
The returned value of this function is then pass to a remote call of `initialize_state`.
"""
get_subiterate(::AI.Problem, ::AI.Algorithm, state::AI.State) = state.iterate

finalize_state!(::AI.Problem, ::AI.Algorithm, state::AI.State) = state

function AI.is_finished!(
problem::AI.Problem,
algorithm::IterativeParallelAlgorithm,
state::AI.State
)
c = algorithm.stopping_criterion
st = state.stopping_criterion_state

isfinished = AI.is_finished!(problem, algorithm, state, c, st)

if isfinished
finalize_state!(problem, algorithm, state)
end

return isfinished
end

include("dagger.jl")

end # ITensorNetworksNextParallel
Loading
Loading