Thanks @anon94023334 - I have no memory of commenting on that issue! I saw the filtering in the docs, but was wondering if there’s an indexing method that’s as fast as a dictionary lookup would be. I hadn’t tested it - I just assumed the filtering was slower. Sure enough:
using LightGraphs
using MetaGraphs
using BenchmarkTools
function set_up()
G = MetaGraph(1000)
sp = Dict("sp$i" => i for i in 1:500)
me = Dict("me$i" => i+500 for i in 1:500)
for i in 1:1000
if i <= 500
set_prop!(G, i, :name, "sp$i")
else
set_prop!(G, i, :name, "me$(i-500)")
end
end
x = rand(1:500, 10000)
y = rand(501:1000, 10000)
w = rand(10000)
return (G, x, y, w)
end
function name_lookup(mgraph::MetaGraph, name::String)
return collect(filter_vertices(mgraph, :name, name))[1]
end
function dict_assign!(mgraph::MetaGraph, spindex::Vector{Int}, meindex::Vector{Int}, weights::Vector{Float64})
for i in 1:10000
spi = sp["sp$(spindex[i])"]
mei = me["me$(meindex[i]-500)"]
weight = weights[i]
(has_prop(mgraph, Edge(spi, mei), :weight) && weight <= get_prop(mgraph, Edge(spi, mei), :weight)) && continue
set_prop!(mgraph, Edge(spi, mei), :weight, weight)
end
end
function filter_assign!(mgraph::MetaGraph, spindex::Vector{Int}, meindex::Vector{Int}, weights::Vector{Float64})
for i in 1:10000
spi = name_lookup(mgraph, "sp$(spindex[i])")
mei = name_lookup(mgraph, "me$(meindex[i]-500)")
weight = weights[i]
(has_prop(mgraph, Edge(spi, mei), :weight) && weight <= get_prop(mgraph, Edge(spi, mei), :weight)) && continue
set_prop!(mgraph, Edge(spi, mei), :weight, weight)
end
end
function test_dict()
(G, x, y, w) = set_up()
dict_assign!(G, x, y, w)
end
function test_filter()
(G, x, y, w) = set_up()
filter_assign!(G, x, y, w)
end
@benchmark test_dict()
#=
BenchmarkTools.Trial:
memory estimate: 28.45 MiB
allocs estimate: 377474
--------------
minimum time: 34.503 ms (21.11% GC)
median time: 40.698 ms (24.59% GC)
mean time: 41.800 ms (28.85% GC)
maximum time: 53.724 ms (27.73% GC)
--------------
samples: 120
evals/sample: 1
=#
@benchmark test_filter()
#=
BenchmarkTools.Trial:
memory estimate: 22.68 GiB
allocs estimate: 160517950
--------------
minimum time: 16.994 s (32.95% GC)
median time: 16.994 s (32.95% GC)
mean time: 16.994 s (32.95% GC)
maximum time: 16.994 s (32.95% GC)
--------------
samples: 1
evals/sample: 1
=#
Not sure if I did it quite right, but using the filter method looks to be substantially slower. I’m wondering if it might be worth adding a metaindex
or something that could be used to index a MetaGraph
with an arbitrary type (eg String
) that would be unique per node. I’d be happy to open an issue or start a PR if that seems like something that would be useful.