[rabbitmq-discuss] Producer eating up RAM
Suhail Doshi
suhail at mixpanel.com
Mon Oct 5 02:08:02 BST 2009
Here's part of the queue code we use:
-export([amqp_lifecycle/0, send_message/5, log/2]).
-include_lib("rabbitmq-erlang-client/rabbitmq_server/include/rabbit.hrl").
-include_lib("rabbitmq-erlang-client/rabbitmq_server/include/rabbit_framing.hrl").
-include("rabbitmq-erlang-client/include/amqp_client.hrl").
-record(rabbit_info, {channel, ticket, exchange, routing_key}).
amqp_lifecycle() ->
User = "user",
Password = "pass",
Realm = <<"realm">>, %% virtual_host
Connection = amqp_connection:start(User, Password, "127.0.0.1", Realm),
Channel = amqp_connection:open_channel(Connection),
Access = #'access.request'{
realm = Realm,
exclusive = false,
passive = true,
active = true,
write = true,
read = true
},
#'access.request_ok'{ticket = Ticket} = amqp_channel:call(Channel,
Access),
Q = <<"q">>,
X = <<"x">>,
BindKey = <<"b">>,
QueueDeclare = #'queue.declare'{ticket = Ticket, queue = Q,
passive = false, durable = true,
exclusive = false, auto_delete = false,
nowait = false, arguments = []},
#'queue.declare_ok'{queue = Q} = amqp_channel:call(Channel,
QueueDeclare),
ExchangeDeclare = #'exchange.declare'{ticket = Ticket,
exchange = X, type = <<"direct">>,
passive = false, durable = true,
auto_delete = false, internal =
false,
nowait = false, arguments = []},
#'exchange.declare_ok'{} = amqp_channel:call(Channel, ExchangeDeclare),
QueueBind = #'queue.bind'{ticket = Ticket,
queue = Q,
exchange = X,
routing_key = BindKey,
nowait = false, arguments = []},
#'queue.bind_ok'{} = amqp_channel:call(Channel, QueueBind),
RabbitInfo = #'rabbit_info'{
channel = Channel,
ticket = Ticket,
exchange = X,
routing_key = BindKey
},
RabbitInfo.
send_message(Channel, Ticket, X, RoutingKey, Payload) ->
BasicPublish = #'basic.publish'{ticket = Ticket,
exchange = X,
routing_key = RoutingKey,
mandatory = false,
immediate = false},
BasicProperties = amqp_util:basic_properties(),
Properties = BasicProperties#'P_basic'{delivery_mode=2}, %% Persistence
plz
Content = #content{class_id = 60,
properties = Properties,
properties_bin = none,
payload_fragments_rev = [Payload]
},
amqp_channel:cast(Channel, BasicPublish, Content).
On Sun, Oct 4, 2009 at 6:03 PM, Suhail Doshi <suhail at mixpanel.com> wrote:
> Problem is, it only happens every 5+ hours and not sure how to duplicate it
> but here is a crash dump:
> =erl_crash_dump:0.1
> Sat Oct 3 20:18:09 2009
> Slogan: eheap_alloc: Cannot allocate 6960012640 bytes of memory (of type
> "heap").
> System version: Erlang R13B01 (erts-5.7.2) [source] [64-bit] [smp:4:4]
> [rq:4] [async-threads:0] [hipe] [kernel-poll:false]
> Compiled: Tue Jun 23 19:56:26 2009
> Atoms: 7552
> =memory
> total: 11040213416
> processes: 10989180488
> processes_used: 10989135536
> system: 51032928
> atom: 507961
> atom_used: 494181
> binary: 43474800
> code: 4668039
> ets: 311600
> =hash_table:atom_tab
> size: 4813
> used: 3776
> objs: 7552
> depth: 8
> =index_table:atom_tab
> size: 8192
> limit: 1048576
> entries: 7552
> =hash_table:module_code
> size: 97
> used: 71
> objs: 119
> depth: 4
> =index_table:module_code
> size: 1024
> limit: 65536
> entries: 119
> =hash_table:export_list
> size: 2411
> used: 1770
> objs: 3269
> depth: 8
> =index_table:export_list
> size: 4096
> limit: 65536
> entries: 3269
> =hash_table:secondary_export_table
> size: 97
> used: 0
> objs: 0
> depth: 0
> =hash_table:process_reg
> size: 47
> used: 31
> objs: 43
> depth: 4
> =hash_table:fun_table
> size: 397
> used: 301
> objs: 566
> depth: 6
> =hash_table:node_table
> size: 11
> used: 1
> objs: 1
> depth: 1
> =hash_table:dist_table
> size: 11
> used: 1
> objs: 1
> depth: 1
> =allocated_areas
> sys_misc: 80890
> static: 991232
> atom_space: 98328 84868
> atom_table: 104153
> module_table: 9084
> export_table: 52172
> register_table: 468
> fun_table: 3266
> module_refs: 2048
> loaded_code: 4228461
> dist_table: 507
> node_table: 227
> bits_bufs_size: 0
> bif_timer: 80192
> link_lh: 0
> proc: 75296 39952
> atom_entry: 305480 305160
> export_entry: 316248 315192
> module_entry: 7784 7720
> reg_proc: 2480 1800
> atom_entry: 305480 305160
> export_entry: 316248 315192
> module_entry: 7784 7720
> reg_proc: 2480 1800
> monitor_sh: 4400 512
> nlink_sh: 11368 6328
> fun_entry: 51328 50096
> db_tab: 6624 5640
> driver_event_data_state: 56 56
> driver_select_data_state: 1352 200
> =allocator:sys_alloc
> option e: true
> option m: libc
> option tt: 131072
> option tp: 0
> =allocator:temp_alloc[0]
> versions: 2.1 2.2
> option e: true
> option t: false
> option ramv: false
> option sbct: 524288
> option asbcst: 4145152
> option rsbcst: 90
> option rsbcmt: 80
> option rmbcmt: 100
> option mmbcs: 131072
> option mmsbc: 256
> option mmmbc: 10
> option lmbcs: 10485760
> option smbcs: 1048576
> option mbcgs: 10
> option mbsd: 3
> option as: gf
> mbcs blocks: 0 136 136
> mbcs blocks size: 0 46568 46568
> mbcs carriers: 1 1 1
> mbcs mseg carriers: 0
> mbcs sys_alloc carriers: 1
> mbcs carriers size: 131112 131112 131112
> mbcs mseg carriers size: 0
> mbcs sys_alloc carriers size: 131112
> sbcs blocks: 0 0 0
> sbcs blocks size: 0 0 0
> sbcs carriers: 0 0 0
> sbcs mseg carriers: 0
>
>
> On Sun, Oct 4, 2009 at 2:17 PM, Ben Hood <0x6e6562 at gmail.com> wrote:
>
>> Suhail,
>>
>> On Sun, Oct 4, 2009 at 8:43 PM, Suhail Doshi <suhail at mixpanel.com> wrote:
>> > Any ideas why my producer would suddenly jump and consume all the RAM
>> > available? I am getting a serious amount of volume of items hitting the
>> > queue.
>>
>> In general, a producer is a client process running outside of
>> RabbitMQ, so it is difficult to see how RabbitMQ is affecting the
>> memory consumption of this process. Maybe you can post a cut down
>> version of your producer application that reproduces the symptoms.
>>
>> Ben
>>
>
>
>
> --
> http://mixpanel.com
> Blog: http://blog.mixpanel.com
>
--
http://mixpanel.com
Blog: http://blog.mixpanel.com
-------------- next part --------------
An HTML attachment was scrubbed...
URL: http://lists.rabbitmq.com/pipermail/rabbitmq-discuss/attachments/20091004/015f5d84/attachment.htm
More information about the rabbitmq-discuss
mailing list