Hello All,

Happy New Year...

I was analyzing my library to find the performance of the gRPC & Protobuf 
and found the below memory leak. Can you anyone confirm do we have any 
memory leak on this version? and mitigation steps to avoid it?

CallStack[2]: memory expires with 456 bytes, backtrace:

    0x00007ff07bf23ea0  libjemalloc.so  __libc_calloc()+0

    0x00007fefee6de830  libgpr.so  gpr_zalloc()+43

    0x00007feff07b8a74  libgrpc.so  
_Z37grpc_completion_queue_create_internal23grpc_cq_completion_type20grpc_cq_polling_typeP29grpc_completion_queue_functor()+252

    0x00007feff07bc241  libgrpc.so  
_ZL14default_createPK29grpc_completion_queue_factoryPK32grpc_completion_queue_attributes()+47

    0x00007feff07bc3b2  libgrpc.so  
grpc_completion_queue_create_for_pluck()+129

    0x00007feff0f3c499  libgrpc++.so  
_ZN4grpc6Server11SyncRequestC2EPS0_PNS_8internal16RpcServiceMethodE()+169

    0x00007feff0f3b93e  libgrpc++.so  
_ZN4grpc6Server11SyncRequestC1EPS0_PNS_8internal16RpcServiceMethodEPN9grpc_core6Server24RegisteredCallAllocationE()+48

    0x00007feff0f3c7e2  libgrpc++.so  
_ZZN4grpc6Server24SyncRequestThreadManager13AddSyncMethodEPNS_8internal16RpcServiceMethodEPvENKUlvE_clEv()+66

    0x00007feff0f3f6a4  libgrpc++.so  
_ZNSt17_Function_handlerIFN9grpc_core6Server24RegisteredCallAllocationEvEZN4grpc6Server24SyncRequestThreadManager13AddSyncMethodEPNS4_8internal16RpcServiceMethodEPvEUlvE_E9_M_invokeERKSt9_Any_data()+46

    0x00007feff07c53a5  libgrpc.so  
_ZNKSt8functionIFN9grpc_core6Server24RegisteredCallAllocationEvEEclEv()+61

    0x00007feff07c4705  libgrpc.so  
_ZN9grpc_core6Server34AllocatingRequestMatcherRegistered12MatchOrQueueEmPNS0_8CallDataE()+87

    0x00007feff07c1cec  libgrpc.so  
_ZN9grpc_core6Server8CallData13PublishNewRpcEPvP10grpc_error()+212

    0x00007feff049c26b  libgrpc.so  
_ZN9grpc_core7Closure3RunERKNS_13DebugLocationEP12grpc_closureP10grpc_error()+271

    0x00007feff07ac264  libgrpc.so  
_ZL21post_batch_completionP13batch_control()+687

    0x00007feff07ac2f3  libgrpc.so  
_ZL17finish_batch_stepP13batch_control()+49

    0x00007feff07ac37a  libgrpc.so  
_ZL25continue_receiving_slicesP13batch_control()+132

    0x00007feff07ac7ae  libgrpc.so  
_ZL21process_data_after_mdP13batch_control()+413

    0x00007feff07ac8b2  libgrpc.so  
_ZL22receiving_stream_readyPvP10grpc_error()+252

    0x00007feff07ac90e  libgrpc.so  
_ZL39receiving_stream_ready_in_call_combinerPvP10grpc_error()+89

    0x00007feff049c26b  libgrpc.so  
_ZN9grpc_core7Closure3RunERKNS_13DebugLocationEP12grpc_closureP10grpc_error()+271

    0x00007feff05fdc18  libgrpc.so  
_ZL18recv_message_readyPvP10grpc_error()+638

    0x00007feff049c26b  libgrpc.so  
_ZN9grpc_core7Closure3RunERKNS_13DebugLocationEP12grpc_closureP10grpc_error()+271

    0x00007feff05fb211  libgrpc.so  
_ZL21hs_recv_message_readyPvP10grpc_error()+213

    0x00007feff049c26b  libgrpc.so  
_ZN9grpc_core7Closure3RunERKNS_13DebugLocationEP12grpc_closureP10grpc_error()+271

    0x00007feff05f99df  libgrpc.so  
_ZN9grpc_core12_GLOBAL__N_18CallData32ContinueRecvMessageReadyCallbackEP10grpc_error()+105

    0x00007feff05f94b5  libgrpc.so  
_ZN9grpc_core12_GLOBAL__N_18CallData18OnRecvMessageReadyEPvP10grpc_error()+711

    0x00007feff0723c3b  libgrpc.so  
_ZL12exec_ctx_runP12grpc_closureP10grpc_error()+181

    0x00007feff072416b  libgrpc.so  _ZN9grpc_core7ExecCtx5FlushEv()+139

    0x00007feff071b7d1  libgrpc.so  
_ZL12pollset_workP12grpc_pollsetPP19grpc_pollset_workerl()+406

    0x00007feff0723426  libgrpc.so  
_ZL12pollset_workP12grpc_pollsetPP19grpc_pollset_workerl()+127

    0x00007feff0729927  libgrpc.so  
_Z17grpc_pollset_workP12grpc_pollsetPP19grpc_pollset_workerl()+51

    0x00007feff07ba594  libgrpc.so  
_ZL7cq_nextP21grpc_completion_queue12gpr_timespecPv()+1047

    0x00007feff07baaf2  libgrpc.so  grpc_completion_queue_next()+63

    0x00007feff0f08c3a  libgrpc++.so  
_ZN4grpc15CompletionQueue17AsyncNextInternalEPPvPb12gpr_timespec()+66

    0x00007feff0f3d9fa  libgrpc++.so  
_ZN4grpc15CompletionQueue9AsyncNextI12gpr_timespecEENS0_10NextStatusEPPvPbRKT_()+88

    0x00007feff0f3c689  libgrpc++.so  
_ZN4grpc6Server24SyncRequestThreadManager11PollForWorkEPPvPb()+139

    0x00007feff0f4d20a  libgrpc++.so  
_ZN4grpc13ThreadManager12MainWorkLoopEv()+46

    0x00007feff0f4ca9b  libgrpc++.so  
_ZN4grpc13ThreadManager12WorkerThread3RunEv()+27

    0x00007feff0f4c95a  libgrpc++.so  
_ZZN4grpc13ThreadManager12WorkerThreadC4EPS0_ENKUlPvE_clES3_()+28

    0x00007feff0f4c97a  libgrpc++.so  
_ZZN4grpc13ThreadManager12WorkerThreadC4EPS0_ENUlPvE_4_FUNES3_()+29

    0x00007fefee6ee48b  libgpr.so  
_ZZN9grpc_core12_GLOBAL__N_120ThreadInternalsPosixC4EPKcPFvPvES4_PbRKNS_6Thread7OptionsEENKUlS4_E_clES4_()+287

    0x00007fefee6ee4bc  libgpr.so  
_ZZN9grpc_core12_GLOBAL__N_120ThreadInternalsPosixC4EPKcPFvPvES4_PbRKNS_6Thread7OptionsEENUlS4_E_4_FUNES4_()+29

    0x00007ff07bafb1ca  libpthread-2.28.so  start_thread()+234

    0x00007ff07b7568d3  libc-2.28.so  __GI___clone()+67 

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to grpc-io+unsubscr...@googlegroups.com.
To view this discussion visit 
https://groups.google.com/d/msgid/grpc-io/6dad3717-9302-4e6c-b921-68155837c309n%40googlegroups.com.

Reply via email to