How to manage memory when using JuMP+Gurobi?

Observe this

usr@usr:~/julia_projects/uc/Uc$ julia --project=. --threads=1,0
               _
   _       _ _(_)_     |  Documentation: https://docs.julialang.org
  (_)     | (_) (_)    |
   _ _   _| |_  __ _   |  Type "?" for help, "]?" for Pkg help.
  | | | | | | |/ _` |  |
  | | |_| | | | (_| |  |  Version 1.12.6 (2026-04-09)
 _/ |\__'_|_|_|\__'_|  |  Official https://julialang.org release
|__/                   |

julia> import Gurobi

julia> e = Gurobi.Env(); # omit output

julia> Gurobi.GRBfreeenv(e)

julia> sleep(0.001)

julia> exit()

[3525351] signal 11 (1): Segmentation fault
in expression starting at REPL[5]:1
pthread_mutex_lock at /lib/x86_64-linux-gnu/libc.so.6 (unknown line)
GRBfreeenv at /opt/gurobi1301/linux64cuda13/lib/libgurobi130.so (unknown line)
GRBfreeenv at /home/amd/.julia/packages/Gurobi/K2XSK/src/gen130/libgrb_api.jl:914 [inlined]
#4 at /home/amd/.julia/packages/Gurobi/K2XSK/src/MOI_wrapper/MOI_wrapper.jl:158
jfptr_YY.4_5073 at /home/amd/.julia/compiled/v1.12/Gurobi/do9v6_POo9T.so (unknown line)
run_finalizer at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/gc-common.c:180
jl_gc_run_finalizers_in_list at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/gc-common.c:270
run_finalizers at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/gc-common.c:316
ijl_atexit_hook at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/init.c:292
ijl_exit at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/init.c:196
jlplt_ijl_exit_25543.1 at /home/amd/.julia/juliaup/julia-1.12.6+0.x64.linux.gnu/lib/julia/sys.so (unknown line)
exit at ./initdefs.jl:28
exit at ./initdefs.jl:29
jfptr_exit_63577.1 at /home/amd/.julia/juliaup/julia-1.12.6+0.x64.linux.gnu/lib/julia/sys.so (unknown line)
jl_apply at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/julia.h:2391 [inlined]
do_call at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/interpreter.c:123
eval_value at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/interpreter.c:243
eval_stmt_value at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/interpreter.c:194 [inlined]
eval_body at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/interpreter.c:707
jl_interpret_toplevel_thunk at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/interpreter.c:898
jl_toplevel_eval_flex at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/toplevel.c:1035
__repl_entry_eval_expanded_with_loc at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/usr/share/julia/stdlib/v1.12/REPL/src/REPL.jl:301
jl_apply at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/julia.h:2391 [inlined]
jl_f_invokelatest at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/builtins.c:881
toplevel_eval_with_hooks at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/usr/share/julia/stdlib/v1.12/REPL/src/REPL.jl:308
toplevel_eval_with_hooks at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/usr/share/julia/stdlib/v1.12/REPL/src/REPL.jl:312
toplevel_eval_with_hooks at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/usr/share/julia/stdlib/v1.12/REPL/src/REPL.jl:305 [inlined]
eval_user_input at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/usr/share/julia/stdlib/v1.12/REPL/src/REPL.jl:330
repl_backend_loop at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/usr/share/julia/stdlib/v1.12/REPL/src/REPL.jl:452
#start_repl_backend#41 at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/usr/share/julia/stdlib/v1.12/REPL/src/REPL.jl:427
start_repl_backend at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/usr/share/julia/stdlib/v1.12/REPL/src/REPL.jl:424 [inlined]
#run_repl#50 at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/usr/share/julia/stdlib/v1.12/REPL/src/REPL.jl:653
run_repl at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/usr/share/julia/stdlib/v1.12/REPL/src/REPL.jl:639
jfptr_run_repl_19665.1 at /home/amd/.julia/juliaup/julia-1.12.6+0.x64.linux.gnu/share/julia/compiled/v1.12/REPL/u0gqU_E4m7X.so (unknown line)
run_std_repl at ./client.jl:478
jfptr_run_std_repl_24985.1 at /home/amd/.julia/juliaup/julia-1.12.6+0.x64.linux.gnu/lib/julia/sys.so (unknown line)
jl_apply at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/julia.h:2391 [inlined]
jl_f_invokelatest at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/builtins.c:881
run_main_repl at ./client.jl:499
repl_main at ./client.jl:586 [inlined]
_start at ./client.jl:561
jfptr__start_63319.1 at /home/amd/.julia/juliaup/julia-1.12.6+0.x64.linux.gnu/lib/julia/sys.so (unknown line)
jl_apply at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/julia.h:2391 [inlined]
true_main at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/jlapi.c:971
jl_repl_entrypoint at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/src/jlapi.c:1139
main at /cache/build/builder-amdci5-4/julialang/julia-release-1-dot-12/cli/loader_exe.c:58
unknown function (ip: 0x7cfc3c42a1c9) at /lib/x86_64-linux-gnu/libc.so.6
__libc_start_main at /lib/x86_64-linux-gnu/libc.so.6 (unknown line)
unknown function (ip: 0x4010b8) at /workspace/srcdir/glibc-2.17/csu/../sysdeps/x86_64/start.S
Allocations: 2947422 (Pool: 2947289; Big: 133); GC: 4
Segmentation fault (core dumped)

Another question is on the title, since I notice that julia GC seems not to automatically release all memory. So it appears that within one julia REPL session I’ll have memory leakage. What should I do to recycle memory?

At the moment it is illegal to call GRBfreeenv on an environment because GRBfreeenv is called as part of the finaliser.

One option is:

julia> import Gurobi

julia> env = Gurobi.Env();
Set parameter WLSAccessID
Set parameter WLSSecret
Set parameter LicenseID to value 722777
WLS license 722777 - registered to JuMP Development

julia> finalize(env)

julia> env
Gurobi.Env(Ptr{Nothing}(0x0000000000000000), true, 0)

julia> exit()

I have been thinking to add the syntax:

Gurobi.Env() do env
    # use the env
end

which would explicitly scope when the environment is created and destroyed.

Edit: I remember why I didn’t do this before. Consider the situation:

using Gurobi
Gurobi.Env() do env
    model_1 = Gurobi.Optimizer(env)
    # Stuff
    model_2 = Gurobi.Optimizer(env)
    # Stuff
    return
end

Before we can run the finaliser for env we must first have finalised all the referenced models. If the function runs, we finalise env, and then sometime later the GC finalises model_1 Gurobi crashes.

I think for practical use cases, the Envs are allocated once. (For me, solving Gurobi models in parallel, I’ll have to allocate a vector of independent Envs.)

So the Envs may not be needed to be freed manually by the user.

But the models built on top of the Envs may required to be cleaned. Otherwise the only option for the user is to abandon the existing julia REPL and start a new REPL, which is not very ideal.

A more relevant question is thus:

import JuMP, Gurobi
const S = 3;
const C = Dict{String, Any}("OutputFlag"=>0,"Threads"=>1);
env_vec = [Gurobi.Env(C) for _ = 1:S];
model_vec = map(JuMP.direct_model ∘ Gurobi.Optimizer, env_vec)
for model = model_vec
    JuMP.@variables(model, ...) # add many
    JuMP.@constraints(model, ...) # add many
end; # now the model_vec is huge in memory

how to reclaim memory pertaining to model_vec? The associated memory both in julia and in Gurobi’s C-libs.

You need to ensure that there are no more references to the models in model_vec, nor any of their component parts like variables or constraints. Then, run the GC.

An example would be:

using JuMP, Gurobi
const NUM_SCENARIOS = 3
const GUROBI_CONFIG = Dict{String,Any}("OutputFlag" => 0, "Threads" => 1)
grb_environments = [Gurobi.Env(GUROBI_CONFIG) for s in 1:NUM_SCENARIOS]
models = [direct_model(Gurobi.Optimizer(env)) for env in grb_environments]
for model in models
    @variables(model, ...)
    @constraints(model, ...)
end
models = nothing
GC.gc()

The fundamental issue is that Julia is a GC’d language. We don’t have a strong control around when objects are finalised.

Gurobipy does this: Python Env Class Reference - Gurobi Optimizer Reference Manual

with gp.Env() as env:
    with gp.Model(env=env) as model:
        model.optimize()

so it has explicit control over when the model and env is GC’d.

I guess we could do something like:

Gurobi.Env() do env
    Gurobi.Optimizer(env) do optimizer
        direct_model(optimizer)
        # ...
        return
    end
    return
end

but it doesn’t compose very well with JuMP.Model or your list of models.

Thanks. I’ll do some tests later on to see if that holds in my experiments.

But before that, it appears that creating envs in parallel is significantly faster than an ordinary sequential loop.

julia> import Gurobi

julia> const C = Dict{String, Any}("OutputFlag"=>0,"Threads"=>1)

julia> Env() = Gurobi.Env(C);

julia> function one_th(N)
           v = Vector{Gurobi.Env}(undef, N)
           @time for i=eachindex(v)
               v[i] = Env()
           end
           v
       end;

julia> function multi_th(N) # only adds a `Threads.@threads`
           v = Vector{Gurobi.Env}(undef, N)
           @time Threads.@threads for i=eachindex(v)
               v[i] = Env()
           end
           v
       end;

julia> one_th(3);
  0.518030 seconds (13 allocations: 384 bytes)

julia> multi_th(3);
  0.061154 seconds (13.56 k allocations: 702.922 KiB, 4374.49% compilation time)

julia> one_th(1000);
 10.598288 seconds (3.00 k allocations: 62.500 KiB)

julia> multi_th(1000); # is this safe?
  2.147298 seconds (3.67 k allocations: 102.328 KiB)

Is calling multi_th(1000) here safe? Here is the related link GitHub - jump-dev/Gurobi.jl: A Julia interface to the Gurobi Optimizer · GitHub

It appears I still have memory leakage (I suspect that the memory allocated by Gurobi internally are leaked). Take a look at my experiment:

usr@usr:~/julia_projects/uc/Uc$ ./j
               _
   _       _ _(_)_     |  Documentation: https://docs.julialang.org
  (_)     | (_) (_)    |
   _ _   _| |_  __ _   |  Type "?" for help, "]?" for Pkg help.
  | | | | | | |/ _` |  |
  | | |_| | | | (_| |  |  Version 1.12.6 (2026-04-09)
 _/ |\__'_|_|_|\__'_|  |  Official https://julialang.org release
|__/                   |

julia> import Random, Gurobi, JuMP

julia> Random.seed!(hash(1));

julia> const GrbCfg = Dict{String,Any}("OutputFlag"=>0,"Threads"=>1);

julia> const lus = [-Inf, Inf, Inf];

julia> const S = 50 * Threads.nthreads(); # 50 * 96

julia> const Envs = @time let v = Vector{Gurobi.Env}(undef, S)
           Threads.@threads for i=eachindex(v) v[i]=Gurobi.Env(GrbCfg) end
           v
       end;
 10.505098 seconds (191.14 k allocations: 8.958 MiB, 132.02% compilation time)

julia> const tks = similar(Envs, Task);

julia> t, T = 1, 23;

julia> ################################################
       # if S doesn't need to be modified, reuse Envs
       ################################################
       include("src/Settings.jl");

julia> include("src/Case2383.jl");

julia> include("src/WindGen.jl");

julia> include("src/Static.jl");

julia> include("src/Uvx.jl");

julia> include("src/General.jl");

julia> include("src/TSSP.jl");

julia> Random.seed!(hash(1));

julia> sub = similar(Envs, General.SubType);

julia> mst = TSSP.build_2ssp!(sub, t, T, tks);
[info | PowerModels]: Suppressing information and warning messages for the rest of this session.  Use the Memento package for more fine-grained control of logging.
698.640797 seconds (66.51 G allocations: 1.956 TiB, 80.46% gc time, 52.66% compilation time)

julia> @time TSSP.at_root_lp(mst, sub, tks)
pi_warm_up phase...
subMax:8.286930084228516s, mst:0.02107691764831543s, Perfect_Info_LB = 5994.031439946436
pi_bound added: 310.033105 seconds (178.49 k allocations: 1.172 GiB, 1.19% compilation time)
t=    9.4, lb=5.99403e+03, vio=8.75429e+03, msttime=   0.0, rNcut=  0.0, rNnz=     0.0
t=   18.3, lb=5.99403e+03, vio=8.79989e+03, msttime=   0.2, rNcut=  0.0, rNnz=     2.2
t=   25.7, lb=5.99403e+03, vio=3.32518e+02, msttime=   0.1, rNcut=  0.0, rNnz=     4.4
t=   31.2, lb=5.99403e+03, vio=3.28456e+02, msttime=   0.2, rNcut=  0.1, rNnz=     6.9
t=   36.1, lb=5.99403e+03, vio=8.81109e+01, msttime=   0.2, rNcut=  0.1, rNnz=     9.2
t=   41.9, lb=5.99408e+03, vio=1.26662e+02, msttime=   0.2, rNcut=  0.1, rNnz=    11.8
t=   48.5, lb=5.99412e+03, vio=1.04483e+02, msttime=   0.1, rNcut=  0.1, rNnz=    14.3
t=   55.6, lb=5.99424e+03, vio=1.60475e+02, msttime=   0.2, rNcut=  0.1, rNnz=    16.8
t=   61.7, lb=5.99442e+03, vio=1.77944e+02, msttime=   0.2, rNcut=  0.2, rNnz=    19.3
t=   67.7, lb=5.99456e+03, vio=7.78164e+01, msttime=   0.1, rNcut=  0.2, rNnz=    21.8
t=   72.3, lb=5.99469e+03, vio=3.84470e+01, msttime=   0.2, rNcut=  0.2, rNnz=    24.4
t=   79.0, lb=5.99504e+03, vio=2.94714e+01, msttime=   0.2, rNcut=  0.2, rNnz=    27.0
t=   86.5, lb=5.99543e+03, vio=2.38065e+01, msttime=   0.2, rNcut=  0.2, rNnz=    29.5
t=   94.0, lb=5.99584e+03, vio=2.85124e+01, msttime=   0.3, rNcut=  0.3, rNnz=    32.1
t=  100.5, lb=5.99626e+03, vio=2.83942e+01, msttime=   0.3, rNcut=  0.3, rNnz=    34.7
t=  105.5, lb=5.99675e+03, vio=2.93687e+01, msttime=   0.3, rNcut=  0.3, rNnz=    37.3
t=  111.9, lb=5.99703e+03, vio=2.98261e+01, msttime=   0.2, rNcut=  0.3, rNnz=    39.8
t=  116.2, lb=5.99745e+03, vio=3.16581e+01, msttime=   0.3, rNcut=  0.3, rNnz=    42.4
t=  121.6, lb=5.99790e+03, vio=2.78515e+01, msttime=   0.3, rNcut=  0.4, rNnz=    45.0
t=  126.9, lb=5.99839e+03, vio=2.74544e+01, msttime=   0.3, rNcut=  0.4, rNnz=    47.6
t=  132.2, lb=5.99880e+03, vio=2.68167e+01, msttime=   0.3, rNcut=  0.4, rNnz=    50.1
t=  138.7, lb=5.99922e+03, vio=3.24802e+01, msttime=   0.3, rNcut=  0.4, rNnz=    52.7
t=  143.1, lb=5.99966e+03, vio=2.89852e+01, msttime=   0.3, rNcut=  0.4, rNnz=    55.2
t=  150.6, lb=6.00002e+03, vio=3.03893e+01, msttime=   0.2, rNcut=  0.5, rNnz=    57.8
t=  155.5, lb=6.00040e+03, vio=2.94789e+01, msttime=   0.3, rNcut=  0.5, rNnz=    60.4
t=  161.1, lb=6.00081e+03, vio=2.61429e+01, msttime=   0.2, rNcut=  0.5, rNnz=    63.0
t=  167.1, lb=6.00118e+03, vio=2.44467e+01, msttime=   0.3, rNcut=  0.5, rNnz=    65.5
t=  175.7, lb=6.00156e+03, vio=2.34019e+01, msttime=   0.3, rNcut=  0.5, rNnz=    68.1
t=  182.4, lb=6.00192e+03, vio=2.53204e+01, msttime=   0.2, rNcut=  0.6, rNnz=    70.7
t=  190.5, lb=6.00228e+03, vio=2.96276e+01, msttime=   0.2, rNcut=  0.6, rNnz=    73.3
t=  196.7, lb=6.00267e+03, vio=2.53413e+01, msttime=   0.3, rNcut=  0.6, rNnz=    75.9
t=  202.5, lb=6.00308e+03, vio=2.75397e+01, msttime=   0.3, rNcut=  0.6, rNnz=    78.5
t=  206.5, lb=6.00351e+03, vio=2.76331e+01, msttime=   0.4, rNcut=  0.6, rNnz=    81.1
t=  213.7, lb=6.00390e+03, vio=3.08805e+01, msttime=   0.2, rNcut=  0.7, rNnz=    83.7
t=  218.8, lb=6.00431e+03, vio=2.29920e+01, msttime=   0.4, rNcut=  0.7, rNnz=    86.2
t=  229.1, lb=6.00467e+03, vio=3.02792e+01, msttime=   0.3, rNcut=  0.7, rNnz=    88.8
t=  235.2, lb=6.00512e+03, vio=2.22339e+01, msttime=   0.4, rNcut=  0.7, rNnz=    91.4
t=  242.0, lb=6.00538e+03, vio=2.62785e+01, msttime=   0.4, rNcut=  0.7, rNnz=    94.0
t=  251.0, lb=6.00584e+03, vio=2.95090e+01, msttime=   0.4, rNcut=  0.8, rNnz=    96.6
t=  256.8, lb=6.00632e+03, vio=2.64078e+01, msttime=   0.4, rNcut=  0.8, rNnz=    99.2
t=  262.7, lb=6.00674e+03, vio=2.65341e+01, msttime=   0.3, rNcut=  0.8, rNnz=   101.8
t=  271.6, lb=6.00718e+03, vio=2.52047e+01, msttime=   0.4, rNcut=  0.8, rNnz=   104.4
t=  278.0, lb=6.00760e+03, vio=2.53038e+01, msttime=   0.3, rNcut=  0.8, rNnz=   107.0
t=  283.5, lb=6.00798e+03, vio=2.57696e+01, msttime=   0.2, rNcut=  0.9, rNnz=   109.6
t=  288.6, lb=6.00833e+03, vio=3.04862e+01, msttime=   0.4, rNcut=  0.9, rNnz=   112.2
t=  294.3, lb=6.00881e+03, vio=3.24027e+01, msttime=   0.3, rNcut=  0.9, rNnz=   114.8
t=  305.3, lb=6.00928e+03, vio=2.63615e+01, msttime=   0.4, rNcut=  0.9, rNnz=   117.3
t=  309.4, lb=6.00971e+03, vio=2.42648e+01, msttime=   0.4, rNcut=  0.9, rNnz=   119.9
t=  319.5, lb=6.01005e+03, vio=2.91102e+01, msttime=   0.4, rNcut=  1.0, rNnz=   122.5
t=  325.4, lb=6.01049e+03, vio=2.99882e+01, msttime=   0.5, rNcut=  1.0, rNnz=   125.1
t=  332.0, lb=6.01083e+03, vio=2.35310e+01, msttime=   0.3, rNcut=  1.0, rNnz=   127.7
t=  339.7, lb=6.01115e+03, vio=2.49091e+01, msttime=   0.3, rNcut=  1.0, rNnz=   130.8
t=  342.3, lb=6.01157e+03, vio=2.26405e+01, msttime=   0.5, rNcut=  1.0, rNnz=   133.9
t=  344.1, lb=6.01199e+03, vio=2.72346e+01, msttime=   0.3, rNcut=  1.1, rNnz=   136.6
t=  345.2, lb=6.01241e+03, vio=1.59719e+01, msttime=   0.3, rNcut=  1.1, rNnz=   139.2
t=  346.2, lb=6.01268e+03, vio=1.90922e+01, msttime=   0.3, rNcut=  1.1, rNnz=   141.8
t=  347.3, lb=6.01296e+03, vio=1.80952e+01, msttime=   0.4, rNcut=  1.1, rNnz=   144.5
t=  348.7, lb=6.01325e+03, vio=1.80269e+01, msttime=   0.4, rNcut=  1.1, rNnz=   147.1
t=  350.2, lb=6.01353e+03, vio=2.02718e+01, msttime=   0.2, rNcut=  1.2, rNnz=   149.7
t=  351.2, lb=6.01383e+03, vio=1.86288e+01, msttime=   0.3, rNcut=  1.2, rNnz=   152.3
t=  351.8, lb=6.01402e+03, vio=1.39610e+01, msttime=   0.3, rNcut=  1.2, rNnz=   154.9
t=  352.2, lb=6.01423e+03, vio=1.45608e+01, msttime=   0.3, rNcut=  1.2, rNnz=   157.5
t=  352.6, lb=6.01425e+03, vio=6.49860e+00, msttime=   0.1, rNcut=  1.2, rNnz=   160.1
t=  353.0, lb=6.01430e+03, vio=5.51643e+00, msttime=   0.2, rNcut=  1.3, rNnz=   162.8
t=  353.3, lb=6.01435e+03, vio=3.64135e+00, msttime=   0.2, rNcut=  1.3, rNnz=   165.3
t=  353.8, lb=6.01436e+03, vio=5.94809e+00, msttime=   0.2, rNcut=  1.3, rNnz=   168.0
t=  354.2, lb=6.01444e+03, vio=5.67637e+00, msttime=   0.2, rNcut=  1.3, rNnz=   170.5
t=  354.6, lb=6.01446e+03, vio=9.08315e+00, msttime=   0.2, rNcut=  1.3, rNnz=   173.1
t=  355.0, lb=6.01457e+03, vio=4.84261e+00, msttime=   0.3, rNcut=  1.4, rNnz=   175.7
t=  355.3, lb=6.01459e+03, vio=5.65252e+00, msttime=   0.2, rNcut=  1.4, rNnz=   178.3
t=  355.6, lb=6.01465e+03, vio=3.81513e+00, msttime=   0.3, rNcut=  1.4, rNnz=   180.9
t=  356.1, lb=6.01467e+03, vio=4.27588e+00, msttime=   0.2, rNcut=  1.4, rNnz=   183.5
t=  356.4, lb=6.01472e+03, vio=5.98535e+00, msttime=   0.3, rNcut=  1.4, rNnz=   186.1
t=  356.9, lb=6.01478e+03, vio=4.94240e+00, msttime=   0.3, rNcut=  1.5, rNnz=   188.6
t=  357.4, lb=6.01481e+03, vio=4.35937e+00, msttime=   0.2, rNcut=  1.5, rNnz=   191.2
t=  357.8, lb=6.01487e+03, vio=5.50191e+00, msttime=   0.2, rNcut=  1.5, rNnz=   193.8
t=  358.2, lb=6.01496e+03, vio=4.55966e+00, msttime=   0.2, rNcut=  1.5, rNnz=   196.4
t=  358.8, lb=6.01502e+03, vio=4.00122e+00, msttime=   0.3, rNcut=  1.5, rNnz=   199.0
t=  359.3, lb=6.01510e+03, vio=5.00506e+00, msttime=   0.4, rNcut=  1.6, rNnz=   204.2
t=  359.7, lb=6.01514e+03, vio=2.25313e+00, msttime=   0.3, rNcut=  1.6, rNnz=   206.8
t=  360.1, lb=6.01516e+03, vio=6.36750e+00, msttime=   0.2, rNcut=  1.6, rNnz=   209.4
t=  360.7, lb=6.01526e+03, vio=3.79182e+00, msttime=   0.3, rNcut=  1.6, rNnz=   212.0
t=  361.1, lb=6.01532e+03, vio=3.75685e+00, msttime=   0.3, rNcut=  1.7, rNnz=   217.2
t=  361.4, lb=6.01535e+03, vio=2.78585e+00, msttime=   0.3, rNcut=  1.7, rNnz=   219.9
t=  361.8, lb=6.01538e+03, vio=3.78959e+00, msttime=   0.3, rNcut=  1.7, rNnz=   222.5
t=  362.2, lb=6.01544e+03, vio=2.91214e+00, msttime=   0.3, rNcut=  1.7, rNnz=   225.1
t=  362.9, lb=6.01547e+03, vio=1.91660e+00, msttime=   0.3, rNcut=  1.8, rNnz=   227.6
t=  363.7, lb=6.01551e+03, vio=2.26731e+00, msttime=   0.3, rNcut=  1.8, rNnz=   230.2
t=  364.3, lb=6.01557e+03, vio=1.70657e+00, msttime=   0.4, rNcut=  1.8, rNnz=   235.3
t=  364.9, lb=6.01561e+03, vio=3.11427e+00, msttime=   0.4, rNcut=  1.9, rNnz=   240.6
t=  365.5, lb=6.01569e+03, vio=3.02228e+00, msttime=   0.4, rNcut=  1.9, rNnz=   245.8
t=  366.3, lb=6.01577e+03, vio=2.36111e+00, msttime=   0.5, rNcut=  1.9, rNnz=   251.0
t=  367.0, lb=6.01585e+03, vio=2.73340e+00, msttime=   0.5, rNcut=  2.0, rNnz=   256.2
t=  367.6, lb=6.01594e+03, vio=1.77754e+00, msttime=   0.5, rNcut=  2.0, rNnz=   261.9
t=  368.3, lb=6.01598e+03, vio=3.12344e+00, msttime=   0.4, rNcut=  2.1, rNnz=   267.8
t=  369.1, lb=6.01607e+03, vio=1.12732e+00, msttime=   0.6, rNcut=  2.1, rNnz=   273.0
t=  370.0, lb=6.01613e+03, vio=2.79583e+00, msttime=   0.7, rNcut=  2.2, rNnz=   280.7
t=  370.8, lb=6.01621e+03, vio=2.10980e+00, msttime=   0.6, rNcut=  2.2, rNnz=   288.7
t=  371.5, lb=6.01629e+03, vio=8.79393e-01, msttime=   0.6, rNcut=  2.3, rNnz=   293.9
t=  372.0, lb=6.01631e+03, vio=7.23270e-01, msttime=   0.4, rNcut=  2.3, rNnz=   299.1
t=  372.5, lb=6.01632e+03, vio=8.56182e-01, msttime=   0.4, rNcut=  2.3, rNnz=   304.3
t=  373.0, lb=6.01634e+03, vio=7.64846e-01, msttime=   0.4, rNcut=  2.4, rNnz=   309.5
t=  373.6, lb=6.01635e+03, vio=5.33925e-01, msttime=   0.6, rNcut=  2.4, rNnz=   314.6
t=  374.1, lb=6.01637e+03, vio=7.81306e-01, msttime=   0.5, rNcut=  2.5, rNnz=   319.8
t=  375.0, lb=6.01638e+03, vio=5.41335e-01, msttime=   0.6, rNcut=  2.5, rNnz=   324.9
t=  375.6, lb=6.01640e+03, vio=3.69068e-01, msttime=   0.6, rNcut=  2.6, rNnz=   335.2
t=  376.3, lb=6.01641e+03, vio=4.04529e-01, msttime=   0.5, rNcut=  2.6, rNnz=   340.5
t=  376.9, lb=6.01642e+03, vio=1.95611e-01, msttime=   0.6, rNcut=  2.7, rNnz=   348.2
t=  377.9, lb=6.01643e+03, vio=2.54017e-01, msttime=   0.8, rNcut=  2.8, rNnz=   358.3
t=  378.9, lb=6.01645e+03, vio=8.80731e-02, msttime=   0.9, rNcut=  2.9, rNnz=   371.0
t=  379.9, lb=6.01646e+03, vio=2.63737e-01, msttime=   0.9, rNcut=  3.0, rNnz=   386.5
t=  381.1, lb=6.01648e+03, vio=1.00299e-01, msttime=   1.2, rNcut=  3.1, rNnz=   400.5
t=  382.3, lb=6.01649e+03, vio=9.33022e-02, msttime=   1.0, rNcut=  3.2, rNnz=   418.2
t=  383.7, lb=6.01649e+03, vio=1.43391e-01, msttime=   1.4, rNcut=  3.3, rNnz=   435.7
t=  385.4, lb=6.01650e+03, vio=3.97028e-02, msttime=   1.6, rNcut=  3.5, rNnz=   458.2
t=  386.9, lb=6.01650e+03, vio=3.08636e-02, msttime=   1.4, rNcut=  3.7, rNnz=   481.6
t=  388.6, lb=6.01650e+03, vio=8.14669e-02, msttime=   1.5, rNcut=  3.9, rNnz=   504.1
t=  390.6, lb=6.01651e+03, vio=1.11804e-02, msttime=   1.9, rNcut=  4.1, rNnz=   531.4
t=  393.1, lb=6.01651e+03, vio=6.27422e-03, msttime=   2.3, rNcut=  4.3, rNnz=   559.4
t=  395.4, lb=6.01651e+03, vio=1.89892e-03, msttime=   2.3, rNcut=  4.5, rNnz=   592.9
t=  397.3, lb=6.01651e+03, vio=1.26346e-03, msttime=   1.8, rNcut=  4.6, rNnz=   605.1
t=  398.5, lb=6.01651e+03, vio=2.68449e-04, msttime=   1.0, rNcut=  4.7, rNnz=   613.4
t=  399.0, lb=6.01651e+03, vio=3.20652e-04, msttime=   0.5, rNcut=  4.7, rNnz=   613.6
t=  399.7, lb=6.01651e+03, vio=2.99980e-04, msttime=   0.6, rNcut=  4.7, rNnz=   613.9
t=  400.4, lb=6.01651e+03, vio=3.89617e-04, msttime=   0.6, rNcut=  4.7, rNnz=   614.2
t=  401.1, lb=6.01651e+03, vio=3.74930e-04, msttime=   0.6, rNcut=  4.7, rNnz=   614.5
t=  401.8, lb=6.01651e+03, vio=2.36957e-04, msttime=   0.7, rNcut=  4.7, rNnz=   614.9
t=  402.5, lb=6.01651e+03, vio=1.73493e-04, msttime=   0.6, rNcut=  4.7, rNnz=   615.1
t=  403.0, lb=6.01651e+03, vio=1.73786e-04, msttime=   0.5, rNcut=  4.7, rNnz=   615.1
t=  403.5, lb=6.01651e+03, vio=1.43508e-04, msttime=   0.5, rNcut=  4.7, rNnz=   615.2
t=  404.1, lb=6.01651e+03, vio=1.46482e-04, msttime=   0.5, rNcut=  4.7, rNnz=   615.2
t=  405.0, lb=6.01651e+03, vio=2.04806e-04, msttime=   0.5, rNcut=  4.7, rNnz=   615.3
t=  406.6, lb=6.01651e+03, vio=1.87169e-04, msttime=   0.5, rNcut=  4.7, rNnz=   615.3
t=  408.9, lb=6.01651e+03, vio=1.57621e-04, msttime=   0.5, rNcut=  4.7, rNnz=   615.3
722.679933 seconds (1.64 M allocations: 1.253 GiB, 1.74% compilation time)

julia> Sys.free_memory() / 1024^3
78.28041076660156

julia> sub = mst = nothing;

julia> GC.gc()

julia> Sys.free_memory() / 1024^3
120.69858932495117

julia> GC.gc()

julia> Sys.free_memory() / 1024^3
120.73344421386719

julia> const tks = nothing; GC.gc(); Sys.free_memory() / 1024^3
120.75796127319336

julia> const tks = similar(Envs, Task); # Notice here I start the 2nd run, in an identical style

julia> t, T = 1, 23;

julia> ################################################
       # if S doesn't need to be modified, reuse Envs
       ################################################
       Random.seed!(hash(1));

julia> include("src/Settings.jl");

julia> include("src/Case2383.jl");

julia> include("src/WindGen.jl");

julia> include("src/Static.jl");

julia> include("src/Uvx.jl");

julia> include("src/General.jl");

julia> include("src/TSSP.jl");

julia> sub = similar(Envs, General.SubType);

julia> mst = TSSP.build_2ssp!(sub, t, T, tks);
495.025740 seconds (66.51 G allocations: 1.956 TiB, 79.17% gc time, 56.34% compilation time)

julia> @time TSSP.at_root_lp(mst, sub, tks)
pi_warm_up phase...
./j: line 1: 3559004 Killed                  julia --project=. --threads=96,1
usr@usr:~/julia_projects/uc/Uc$ ./j
               _
   _       _ _(_)_     |  Documentation: https://docs.julialang.org
  (_)     | (_) (_)    |
   _ _   _| |_  __ _   |  Type "?" for help, "]?" for Pkg help.
  | | | | | | |/ _` |  |
  | | |_| | | | (_| |  |  Version 1.12.6 (2026-04-09)
 _/ |\__'_|_|_|\__'_|  |  Official https://julialang.org release
|__/                   |


julia> Sys.free_memory() / 1024^3
496.64697265625

Notice that Out-of-Memory kill just above the second Julia Caption.
Note that by GC.gc() I can at most retrieve 42.42GiB memory, which is a small portion of the memory I had used. (My total free memory is like that shown at the end.)

I sent you the src code via a e-mail. Just by configure
const S = 50 * Threads.nthreads()
properly according to hardware then it can be run.

See also your previous question

I tried

julia> ccall(:malloc_trim, Int32, (Int32,), 0)
1

, (I’ve set all names to nothing, and then did gc() and executed the malloc_trim above.). But the memory usage can not be decreased (e.g. observing htop).

In my code I built a vector of JuMP Models (Gurobi’s direct_model). Some memory was allocated when building the model. And when the solver starts to run, the memory shoot up again aggressively. Finally my algorithm terminates, but then there is no way to reclaim my memory back. Such that if I attempt to drop the same code for the second time to the REPL, I would hit OOM and julia get killed.

I did a small test with this toy-example, which shows that GC.gc() is working normally, if all objects are defined locally inside functions.

I guess I just need to write a all-encompassing main() function, like those I learnt in the C-programming language classroom.:smiling_face_with_tear:

code
module Settings
import JuMP, Gurobi
function Model!(mv::Vector{JuMP.Model}, i, #=with existing ones=# ev::Vector{Gurobi.Env})
    m = JuMP.direct_model(Gurobi.Optimizer(ev[i]))
    JuMP.set_string_names_on_creation(m, false)
    mv[i] = m
end
Model!(mv::Vector{JuMP.Model}, #=with existing ones=# ev::Vector{Gurobi.Env}) = Threads.@threads for i=eachindex(mv)
    Model!(mv, i, ev)
end
printinfo() = (th = map(Threads.nthreads, (:default, :interactive)); println("Settings> Threads=$th"))
end;

import Gurobi, JuMP
const CONFIG = Dict{String,Any}("OutputFlag"=>0,"Threads"=>1,"TimeLimit"=>1);
Env() = Gurobi.Env(CONFIG);
function build!(m, N)
    x = JuMP.@variable(m, [1:N], binary=true)
    y = JuMP.@variable(m, [1:N], binary=true)
    z = JuMP.@variable(m, [1:N, 1:N], lower_bound=0)
    JuMP.@constraint(m, [i=1:N, j=1:N], z[i, j] <= x[i])
    JuMP.@constraint(m, [i=1:N, j=1:N], z[i, j] <= y[j])
    JuMP.@constraint(m, [i=1:N, j=1:N], z[i, j] >= x[i] + y[j] - 1)
    JuMP.@objective(m, Min, sum(rand(-1:.0017:1)i for i=z))
end
function main(S, N, K)
    Settings.printinfo()
    Envs = @time "Create Gurobi Envs:" let v = Vector{Gurobi.Env}(undef, S)
        Threads.@threads for i=eachindex(v) v[i]=Env() end
        v
    end
    sub = let inn = similar(Envs, JuMP.Model)
        Settings.Model!(inn, Envs)
        map(n -> (m = n, o = n.moi_backend), inn)
    end
    for k = 1:K
        @time "($k-th) JuMP modifying models:" Threads.@threads for m=sub build!(m.m, N) end
        @time "Solving MILPs:" Threads.@threads for m=sub Gurobi.GRBoptimize(m.o) end
    end
end;

# run the following code repeatedly
Sys.free_memory()/1024^3
main(4, 200, 23)
GC.gc()