Source code for gclouddatastore.entity
from datetime import datetime
from gclouddatastore.key import Key
[docs]class Entity(dict):
"""
Entities are mutable and act like a subclass of a dictionary.
This means you could take an existing entity and change the key
to duplicate the object.
"""
def __init__(self, dataset, kind):
self._dataset = dataset
self._key = Key(dataset=dataset).kind(kind)
[docs] def dataset(self):
return self._dataset
[docs] def key(self, key=None):
if key:
self._key = key
return self
else:
return self._key
[docs] def kind(self):
if self.key():
return self.key().kind()
@classmethod
[docs] def from_key(cls, key, load_properties=True):
entity = cls(dataset=key.dataset(), kind=key.kind())
if load_properties:
entity = entity.reload()
return entity
@classmethod
[docs] def from_protobuf(cls, pb):
# This is here to avoid circular imports.
from gclouddatastore import helpers
key = Key.from_protobuf(pb.key)
entity = cls.from_key(key)
for property_pb in pb.property:
value = helpers.get_value_from_protobuf(property_pb)
entity[property_pb.name] = value
return entity
[docs] def reload(self):
"""Reloads the contents of this entity from the datastore."""
# Note that you must have a valid key, otherwise this makes no sense.
entity = self.dataset().connection().get_entities(self.key().to_protobuf())
# TODO(jjg): Raise an error if something dumb happens.
if entity:
self.update(entity)
return self
[docs] def save(self):
key = self.dataset().connection().save_entity(
dataset_id=self.dataset().id(), key_pb=self.key().to_protobuf(),
properties=dict(self))
self.key(Key.from_protobuf(key))
return self
[docs] def delete(self):
response = self.dataset().connection().delete_entity(
dataset_id=self.dataset().id(), key_pb=self.key().to_protobuf())
def __repr__(self):
# TODO: Make sure that this makes sense.
# An entity should have a key all the time (even if it's partial).
if self.key():
return '<Entity%s %s>' % (self.key().path(), super(Entity, self).__repr__())
else:
return '<Entity %s>' % (super(Entity, self).__repr__())