What was your previous version? 1.89.2 stable or trunk?

On Sep 26, 2:47 pm, Phyo Arkar <phyo.arkarl...@gmail.com> wrote:
> Hello Web2py.
>
>  Rocket server was very  well stable for me until today , after 2
> weeks of update and while testing usual Json output which was never
> changed and working fine fore 8 months.
>
> This happens!
>
> ERROR:Rocket.Errors.Thread-6:Unhandled Error when serving connection:
> Traceback (most recent call last):
>
>   File "/root/web2py/gluon/rocket.py", line 1064, in run
>     self.run_app(conn)
>
>   File "/root/web2py/gluon/rocket.py", line 1552, in run_app
>     self.write(data, sections)
>
>   File "/root/web2py/gluon/rocket.py", line 1472, in write
>     self.send_headers(data, sections)
>
>   File "/root/web2py/gluon/rocket.py", line 1456, in send_headers
>     self.conn.sendall(b(header_data))
>
>   File "/usr/lib/python2.7/socket.py", line 224, in meth
>     return getattr(self._sock,name)(*args)
>
> error: [Errno 32] Broken pipe
>
> and it is on and off randomly.
>
> Why?
>
> heres my Json Controller :
>
> def rows():
>
>         if REQUEST.vars.db_name:
>                 casesdb = DAL( 'mysql://root@localhost/' + 
> REQUEST.vars.db_name )
>                 casesdb.define_table( 'email_data', migrate = False, 
> *email_halfschema )
>                 casesdb.define_table( 'loosefile_data', migrate = False, 
> *file_halfschema )
>                 casesdb.define_table( 'attach_data', migrate = False, 
> *file_halfschema )
>         elif SESSION.db_name:
>                 casesdb = DAL( 'mysql://root@localhost/' + SESSION.db_name )
>                 casesdb.define_table( 'email_data', migrate = False, 
> *email_halfschema )
>                 casesdb.define_table( 'loosefile_data', migrate = False, 
> *file_halfschema )
>                 casesdb.define_table( 'attach_data', migrate = False, 
> *file_halfschema )
>
>         fields = ['id', 'filePath', 'fileName', 'cus', 'sentto',\
>                          'emailFrom', 'subject', 'cc', 'bcc', 'extracted', 
> 'hdrs',\
>                          
> 'DateTime','OriginalDateTime','TimeZone','reply_to',"master",'duplicated',' 
> MD5Hash','msgID','fqdn']
>         rows = []
>         page = int( request.vars.page )
>
>         max = request.vars.totalrows
>         if max:
>                 pagesize = int( max )
>         else:
>                 pagesize = int( request.vars.rows )
>         print pagesize
>         limitby = ( page * pagesize - pagesize, page * pagesize )
> #       limitby = ( 1, 25 )
>         print "str( page * pagesize - pagesize )" + str( page * pagesize -
> pagesize ) + " limitby " + str( page * pagesize )
> #       orderby = request.vars.sidx
>         orderby = casesdb.email_data[request.vars.sidx]
>         print orderby
>         if request.vars.sord == 'desc':
>                 orderby = ~orderby
>
>         query = casesdb.email_data.id > 0
>
>         for r in casesdb( query ).select( orderby = orderby, limitby = 
> limitby ):
>                 vals = []
>                 for f in fields:
>                         if f == 'extracted':
>                                 vals.append( _sentisize(r[f]) )
>                         else :
>                                 vals.append( r[f] )
>                 rows.append( dict( id = r.id, cell = vals ) )
>
>         total = casesdb( query ).count()
>         print total
>         if total % pagesize >0:
>                 pages = int( total / pagesize ) +1
>         else:
>                 pages = int( total / pagesize )
>         #if total % pagesize == 0: pages -= 1
>         data = dict( total = pages, page = page, rows = rows )
>         return data

Reply via email to