Hi, I think web2py should support NDB for the Google Datastore. NDB a newer datastore api which has automatic caching features built in.
https://developers.google.com/appengine/docs/python/ndb/ I think it would be great for GAE users to be able to use NDB because it potentially has better performance and can reduce the costs of your app (dependent of the type of app off course). In fact, i think it wouldn't be that hard to integrate it. I had a go at it and i only had to change a couple of lines in dal.py and it seems to work. You can switch between NDB and DB because they both should store exactly the same data in the datastore. I attached a patch i made based on version 2.6.4 Maybe one of the developers could have a look. I'm not a web2py expert so this does need to be reviewed by someone with more in depth knowledge. But maybe this could serve as a starting point. Thanks! -- Resources: - http://web2py.com - http://web2py.com/book (Documentation) - http://github.com/web2py/web2py (Source code) - https://code.google.com/p/web2py/issues/list (Report Issues) --- You received this message because you are subscribed to the Google Groups "web2py-users" group. To unsubscribe from this group and stop receiving emails from it, send an email to web2py+unsubscr...@googlegroups.com. For more options, visit https://groups.google.com/groups/opt_out.
270a271 > from google.appengine.ext import ndb 273a275 > from google.appengine.ext.ndb.polymodel import PolyModel as NDBPolyModel 517a520,554 > class NDBDecimalProperty(ndb.Property): > """ > NDB decimal implementation > """ > data_type = decimal.Decimal > > def __init__(self, precision, scale, **kwargs): > super(NDBDecimalProperty, self).__init__(self, **kwargs) > d = '1.' > for x in range(scale): > d += '0' > self.round = decimal.Decimal(d) > > def _to_base_type(self, value): > value = super(NDBDecimalProperty, self)\ > ._to_base_type(value) > if value is None or value == '': > return None > else: > return str(value) > > def _from_base_type(self, value): > if value is None or value == '': > return None > else: > return decimal.Decimal(value).quantize(self.round) > > def _validate(self, value): > value = super(NDBDecimalProperty, self)._validate(value) > if value is None or isinstance(value, decimal.Decimal): > return value > elif isinstance(value, basestring): > return decimal.Decimal(value) > raise TypeError("Property %s must be a Decimal or string."\ > % self._name) 2118c2155,2158 < id = value.key().id_or_name() --- > if isinstance(self, GoogleDatastoreNDBAdapter): > id = value.key.id() > else: > id = value.key().id_or_name() 4451c4491 < if isinstance(fieldtype, gae.Property): --- > if isinstance(fieldtype, gae.Property or ndb.Property): 4648a4689 > self.keyfunc = Key.from_path 4739c4780 < second = Key.from_path(first._tablename, long(second)) --- > second = self.keyfunc(first._tablename, long(second)) 4746c4787 < second = Key.from_path(first._tablename, long(second)) --- > second = self.keyfunc(first._tablename, long(second)) 4753c4794 < second = Key.from_path(first._tablename, long(second)) --- > second = self.keyfunc(first._tablename, long(second)) 4760c4801 < second = Key.from_path(first._tablename, long(second)) --- > second = self.keyfunc(first._tablename, long(second)) 4767c4808 < second = Key.from_path(first._tablename, long(second)) --- > second = self.keyfunc(first._tablename, long(second)) 4782c4823 < second = [Key.from_path(first._tablename, int(i)) for i in second] --- > second = [self.keyfunc(first._tablename, int(i)) for i in second] 5020a5062,5350 > class GoogleDatastoreNDBAdapter(GoogleDatastoreAdapter): > """ > Adapter for GAE which uses NDB > See: https://developers.google.com/appengine/docs/python/ndb/ > > You can pass per model caching settings like this: > > define a dict in you model: > ndb_settings = {<table_name>:{<variable_name>:<variable_value>}} > > and pass it to DAL: > db = DAL('google:datastore_ndb', adapter_args={'ndb_settings':ndb_settings}) > > See: https://developers.google.com/appengine/docs/python/ndb/cache > """ > def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', > credential_decoder=IDENTITY, driver_args={}, > adapter_args={}, do_connect=True, after_connection=None): > self.types.update({ > 'boolean': ndb.BooleanProperty, > 'string': (lambda **kwargs: ndb.StringProperty(**kwargs)), > 'text': ndb.TextProperty, > 'json': ndb.TextProperty, > 'password': ndb.StringProperty, > 'blob': ndb.BlobProperty, > 'upload': ndb.StringProperty, > 'integer': ndb.IntegerProperty, > 'bigint': ndb.IntegerProperty, > 'float': ndb.FloatProperty, > 'double': ndb.FloatProperty, > 'decimal': NDBDecimalProperty, > 'date': ndb.DateProperty, > 'time': ndb.TimeProperty, > 'datetime': ndb.DateTimeProperty, > 'id': None, > 'reference': ndb.IntegerProperty, > 'list:string': (lambda **kwargs: ndb.StringProperty(repeated=True,default=None, **kwargs)), > 'list:integer': (lambda **kwargs: ndb.IntegerProperty(repeated=True,default=None, **kwargs)), > 'list:reference': (lambda **kwargs: ndb.IntegerProperty(repeated=True,default=None, **kwargs)), > }) > self.db = db > self.uri = uri > self.dbengine = 'google:datastore' > self.folder = folder > db['_lastsql'] = '' > self.db_codec = 'UTF-8' > self._after_connection = after_connection > self.pool_size = 0 > match = self.REGEX_NAMESPACE.match(uri) > if match: > namespace_manager.set_namespace(match.group('namespace')) > self.keyfunc = ndb.Key > if 'ndb_settings' in adapter_args: > self.ndb_settings = adapter_args['ndb_settings'] > > def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None): > myfields = {} > for field in table: > if isinstance(polymodel,Table) and field.name in polymodel.fields(): > continue > attr = {} > if isinstance(field.custom_qualifier, dict): > #this is custom properties to add to the GAE field declartion > attr = field.custom_qualifier > field_type = field.type > if isinstance(field_type, SQLCustomType): > ftype = self.types[field_type.native or field_type.type](**attr) > elif isinstance(field_type, ndb.Property): > ftype = field_type > elif field_type.startswith('id'): > continue > elif field_type.startswith('decimal'): > precision, scale = field_type[7:].strip('()').split(',') > precision = int(precision) > scale = int(scale) > ftype = NDBDecimalProperty(precision, scale, **attr) > elif field_type.startswith('reference'): > if field.notnull: > attr = dict(required=True) > ftype = self.types[field_type[:9]](**attr) > elif field_type.startswith('list:reference'): > if field.notnull: > attr['required'] = True > ftype = self.types[field_type[:14]](**attr) > elif field_type.startswith('list:'): > ftype = self.types[field_type](**attr) > elif not field_type in self.types\ > or not self.types[field_type]: > raise SyntaxError('Field: unknown field type: %s' % field_type) > else: > ftype = self.types[field_type](**attr) > myfields[field.name] = ftype > if not polymodel: > table._tableobj = classobj(table._tablename, (ndb.Model, ), myfields) > # Set NDB caching variables > if table._tablename in self.ndb_settings: > for k, v in self.ndb_settings.iteritems(): > setattr(table._tableobj, k, v) > > elif polymodel==True: > table._tableobj = classobj(table._tablename, (NDBPolyModel, ), myfields) > elif isinstance(polymodel,Table): > table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields) > else: > raise SyntaxError("polymodel must be None, True, a table or a tablename") > return None > > def filter(self, query, tableobj, prop, op, value): > return { > '=': query.filter(getattr(tableobj, prop) == value), > '>': query.filter(getattr(tableobj, prop) > value), > '<': query.filter(getattr(tableobj, prop) < value), > '<=': query.filter(getattr(tableobj, prop) <= value), > '>=': query.filter(getattr(tableobj, prop) >= value), > }[op] > > def select_raw(self,query,fields=None,attributes=None): > db = self.db > fields = fields or [] > attributes = attributes or {} > args_get = attributes.get > new_fields = [] > for item in fields: > if isinstance(item,SQLALL): > new_fields += item._table > else: > new_fields.append(item) > fields = new_fields > if query: > tablename = self.get_table(query) > elif fields: > tablename = fields[0].tablename > query = db._adapter.id_query(fields[0].table) > else: > raise SyntaxError("Unable to determine a tablename") > > if query: > if use_common_filters(query): > query = self.common_filter(query,[tablename]) > > #tableobj is a NDB Model class (or subclass) > tableobj = db[tablename]._tableobj > filters = self.expand(query) > > projection = None > if len(db[tablename].fields) == len(fields): > #getting all fields, not a projection query > projection = None > elif args_get('projection') == True: > projection = [] > for f in fields: > if f.type in ['text', 'blob', 'json']: > raise SyntaxError( > "text and blob field types not allowed in projection queries") > else: > projection.append(f.name) > elif args_get('filterfields') == True: > projection = [] > for f in fields: > projection.append(f.name) > > # real projection's can't include 'id'. > # it will be added to the result later > query_projection = [ > p for p in projection if \ > p != db[tablename]._id.name] if projection and \ > args_get('projection') == True\ > else None > > cursor = None > if isinstance(args_get('reusecursor'), str): > cursor = args_get('reusecursor') > qo = ndb.QueryOptions(projection=query_projection, cursor=cursor) > items = tableobj.query(default_options=qo) > > for filter in filters: > if args_get('projection') == True and \ > filter.name in query_projection and \ > filter.op in ['=', '<=', '>=']: > raise SyntaxError( > "projection fields cannot have equality filters") > if filter.name=='__key__' and filter.op=='>' and filter.value==0: > continue > elif filter.name=='__key__' and filter.op=='=': > if filter.value==0: > items = [] > elif isinstance(filter.value, ndb.Key): > # key qeuries return a class instance, > # can't use projection > # extra values will be ignored in post-processing later > item = filter.value.get() > items = (item and [item]) or [] > else: > # key qeuries return a class instance, > # can't use projection > # extra values will be ignored in post-processing later > item = tableobj.get_by_id(filter.value) > items = (item and [item]) or [] > elif isinstance(items,list): # i.e. there is a single record! > items = [i for i in items if filter.apply( > getattr(item,filter.name),filter.value)] > else: > if filter.name=='__key__' and filter.op != 'in': > items.order(tableobj._key) > items = self.filter(items, tableobj, filter.name, filter.op, filter.value) > if not isinstance(items,list): > if args_get('left', None): > raise SyntaxError('Set: no left join in appengine') > if args_get('groupby', None): > raise SyntaxError('Set: no groupby in appengine') > orderby = args_get('orderby', False) > if orderby: > ### THIS REALLY NEEDS IMPROVEMENT !!! > if isinstance(orderby, (list, tuple)): > orderby = xorify(orderby) > if isinstance(orderby,Expression): > orderby = self.expand(orderby) > orders = orderby.split(', ') > for order in orders: > orderprop = None > try: > orderprop = getattr(tableobj, order) > except AttributeError: > pass > order={'-id':-tableobj._key,'id':tableobj._key}.get(order,orderprop) > items = items.order(order) > if args_get('limitby', None): > (lmin, lmax) = attributes['limitby'] > (limit, offset) = (lmax - lmin, lmin) > rows, cursor, more = items.fetch_page(limit,offset=offset) > #cursor is only useful if there was a limit and we didn't return > # all results > if args_get('reusecursor'): > db['_lastcursor'] = cursor > items = rows > return (items, tablename, projection or db[tablename].fields) > > def delete(self,tablename, query): > """ > This function was changed on 2010-05-04 because according to > http://code.google.com/p/googleappengine/issues/detail?id=3119 > GAE no longer supports deleting more than 1000 records. > """ > # self.db['_lastsql'] = self._delete(tablename,query) > (items, tablename, fields) = self.select_raw(query) > # items can be one item or a query > if not isinstance(items,list): > #use a keys_only query to ensure that this runs as a datastore > # small operations > leftitems = items.fetch(1000, keys_only=True) > counter = 0 > while len(leftitems): > counter += len(leftitems) > ndb.delete_multi([item.key for item in leftitems]) > leftitems = items.fetch(1000, keys_only=True) > else: > counter = len(items) > ndb.delete_multi([item.key for item in items]) > return counter > > def update(self,tablename,query,update_fields): > # self.db['_lastsql'] = self._update(tablename,query,update_fields) > (items, tablename, fields) = self.select_raw(query) > counter = 0 > for item in items: > for field, value in update_fields: > setattr(item, field.name, self.represent(value,field.type)) > item.put() > counter += 1 > LOGGER.info(str(counter)) > return counter > > def insert(self,table,fields): > dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields) > # table._db['_lastsql'] = self._insert(table,fields) > tmp = table._tableobj(**dfields) > tmp.put() > rid = Reference(tmp.key.id()) > (rid._table, rid._record, rid._gaekey) = (table, None, tmp.key) > return rid > > def bulk_insert(self,table,items): > parsed_items = [] > for item in items: > dfields=dict((f.name,self.represent(v,f.type)) for f,v in item) > parsed_items.append(table._tableobj(**dfields)) > ndb.put_multi(parsed_items) > return True > 6868a7199 > 'google:datastore_ndb': GoogleDatastoreNDBAdapter,