'High erlang process memory executing mnesia:dirty_select/2
I have an mnesia table gen_saga_persist which is queried by a process every 1 second.
The function used is mnesia:dirty_select(gen_saga_persist, [{#gen_saga_persist{id = '$1', app = undefined, partition = 33, execution_type = auto, _ = '_'}, [], ['$1']}])
Now as the table grows, i observed my process querying this table grows in terms of memory. Can anyone help me understand why my process memory is increasing (mnesia newbie here), as i am not even storing the result in my process data or anywhere.
Mnesia table info:
[{access_mode,read_write},
{active_replicas,[server@localhost]},
{all_nodes,[server@localhost]},
{arity,12},
{attributes,[id,partition,app,type,subtype,name,status,
execution_type,retry_count,update_time,controller_state]},
{checkpoints,[]},
{commit_work,[{index,ordered_set,
[{{9,ordered},{ram,#Ref<0.3472765205.1199964162.76896>}},
{{8,ordered},{ram,#Ref<0.3472765205.1199964162.76882>}},
{{7,ordered},{ram,#Ref<0.3472765205.1199964163.74776>}},
{{6,ordered},{ram,#Ref<0.3472765205.1199964162.76856>}},
{{5,ordered},{ram,#Ref<0.3472765205.1199964161.79429>}},
{{4,ordered},{ram,#Ref<0.3472765205.1199964161.79409>}},
{{3,ordered},{ram,#Ref<0.3472765205.1199964162.76810>}}]}]},
{cookie,{{1632211298276938000,-576460752303418559,1},
server@localhost}},
{cstruct,{cstruct,gen_saga_persist,ordered_set,[],
[server@localhost],
[],[],0,read_write,false,
[{3,ordered},
{4,ordered},
{5,ordered},
{6,ordered},
{7,ordered},
{8,ordered},
{9,ordered}],
[],false,gen_saga_persist,
[id,partition,app,type,subtype,name,status,execution_type,
retry_count,update_time,controller_state],
[],[],[],
{{1632211298276938000,-576460752303418559,1},
server@localhost},
{{2,0},[]}}},
{disc_copies,[server@localhost]},
{disc_only_copies,[]},
{external_copies,[]},
{frag_properties,[]},
{index,[9,8,7,6,5,4,3]},
{index_info,{index,ordered_set,
[{{9,ordered},{ram,#Ref<0.3472765205.1199964162.76896>}},
{{8,ordered},{ram,#Ref<0.3472765205.1199964162.76882>}},
{{7,ordered},{ram,#Ref<0.3472765205.1199964163.74776>}},
{{6,ordered},{ram,#Ref<0.3472765205.1199964162.76856>}},
{{5,ordered},{ram,#Ref<0.3472765205.1199964161.79429>}},
{{4,ordered},{ram,#Ref<0.3472765205.1199964161.79409>}},
{{3,ordered},{ram,#Ref<0.3472765205.1199964162.76810>}}]}},
{load_by_force,false},
{load_node,server@localhost},
{load_order,0},
{load_reason,local_only},
{local_content,false},
{majority,false},
{master_nodes,[]},
{memory,2455944},
{ram_copies,[]},
{record_name,gen_saga_persist},
{record_validation,{gen_saga_persist,12,ordered_set}},
{size,7747},
{snmp,[]},
{storage_properties,[]},
{storage_type,disc_copies},
{subscribers,[]},
{type,ordered_set},
{user_properties,[]},
{version,{{2,0},[]}},
{where_to_commit,[{server@localhost,disc_copies}]},
{where_to_read,server@localhost},
{where_to_wlock,{[server@localhost],false}},
{where_to_write,[server@localhost]},
{wild_pattern,#gen_saga_persist{id = '_',partition = '_',
app = '_',type = '_',subtype = '_',name = '_',status = '_',
execution_type = '_',retry_count = '_',update_time = '_',
controller_state = '_'}},
{{index,3},#Ref<0.3472765205.1199964162.76810>},
{{index,4},#Ref<0.3472765205.1199964161.79409>},
{{index,5},#Ref<0.3472765205.1199964161.79429>},
{{index,6},#Ref<0.3472765205.1199964162.76856>},
{{index,7},#Ref<0.3472765205.1199964163.74776>},
{{index,8},#Ref<0.3472765205.1199964162.76882>},
{{index,9},#Ref<0.3472765205.1199964162.76896>}]
Process info:
[{current_function,{timer,sleep,1}},
{initial_call,{bin_updates_dispatcher,poll_func,3}},
{status,waiting},
{message_queue_len,0},
{links,[]},
{dictionary,[]},
{trap_exit,false},
{error_handler,error_handler},
{priority,normal},
{group_leader,<105603.71.0>},
{total_heap_size,4781410},
{heap_size,1199557},
{stack_size,6},
{reductions,155758726},
{garbage_collection,[{max_heap_size,#{error_logger => true,kill => true,size => 0}},
{min_bin_vheap_size,46422},
{min_heap_size,233},
{fullsweep_after,65535},
{minor_gcs,11}]},
{suspending,[]}]
Process function:
poll_gen_saga_persist_table(Delay) ->
Pid = spawn(bin_updates_dispatcher, poll_func, [Delay, gen_saga_persist,
[{#gen_saga_persist{
id = '$1', app = undefined, partition = 33, execution_type = auto, _ = '_'
}, [], ['$1']}]]),
unlink(Pid),
Pid.
poll_func(Delay, TableName, [{MatchSpec, Guards, Return}]) ->
mnesia:dirty_select(TableName, [{MatchSpec, Guards, Return}]),
timer:sleep(Delay),
poll_func(Delay, TableName, [{MatchSpec, Guards, Return}]).
Solution 1:[1]
I think it is not related with mneisa, it's caused by your poll_func. It's running forever in the self-loop.
poll_func(Delay, TableName, [{MatchSpec, Guards, Return}]) ->
mnesia:dirty_select(TableName, [{MatchSpec, Guards, Return}]),
timer:sleep(Delay),
poll_func(Delay, TableName, [{MatchSpec, Guards, Return}]). <--- it can't release any memory.
You can rewrite the following code by gen_server by using timer:send_after message
Pid = spawn(bin_updates_dispatcher, poll_func, [Delay, gen_saga_persist,
[{#gen_saga_persist{
id = '$1', app = undefined, partition = 33, execution_type = auto, _ = '_'
}, [], ['$1']}]]),
and simplifize poll_func
poll_func(Delay, TableName, [{MatchSpec, Guards, Return}]) ->
mnesia:dirty_select(TableName, [{MatchSpec, Guards, Return}]).
Therefore for the pid, the memory caused by dirty_select will be released after completing the calling, and be reused in the next loop.
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
| Solution | Source |
|---|---|
| Solution 1 |
