⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 store.py

📁 Python的一个ORM,现在很火
💻 PY
📖 第 1 页 / 共 3 页
字号:
## Copyright (c) 2006, 2007 Canonical## Written by Gustavo Niemeyer <gustavo@niemeyer.net>## This file is part of Storm Object Relational Mapper.## Storm is free software; you can redistribute it and/or modify# it under the terms of the GNU Lesser General Public License as# published by the Free Software Foundation; either version 2.1 of# the License, or (at your option) any later version.## Storm is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the# GNU Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public License# along with this program.  If not, see <http://www.gnu.org/licenses/>.#from weakref import WeakValueDictionary, WeakKeyDictionaryfrom storm.info import get_cls_info, get_obj_info, set_obj_info, get_infofrom storm.variables import Variable, LazyValuefrom storm.expr import (    Expr, Select, Insert, Update, Delete, Column, JoinExpr, Count, Max, Min,    Avg, Sum, Eq, And, Asc, Desc, compile_python, compare_columns, SQLRaw,    Union, Except, Intersect, Alias)from storm.exceptions import (    WrongStoreError, NotFlushedError, OrderLoopError, UnorderedError,    NotOneError, FeatureError, CompileError, LostObjectError, ClassInfoError)from storm import Undef__all__ = ["Store", "AutoReload", "EmptyResultSet"]PENDING_ADD = 1PENDING_REMOVE = 2class Store(object):    _result_set_factory = None    def __init__(self, database):        self._connection = database.connect()        self._cache = WeakValueDictionary()        self._dirty = {}        self._order = {} # (info, info) = count    @staticmethod    def of(obj):        try:            return get_obj_info(obj).get("store")        except (AttributeError, ClassInfoError):            return None    def execute(self, statement, params=None, noresult=False):        self.flush()        return self._connection.execute(statement, params, noresult)    def close(self):        self._connection.close()    def commit(self):        self.flush()        self.invalidate()        self._connection.commit()    def rollback(self):        for obj_info in self._dirty:            pending = obj_info.pop("pending", None)            if pending is PENDING_ADD:                # Object never got in the cache, so being "in the store"                # has no actual meaning for it.                del obj_info["store"]            elif pending is PENDING_REMOVE:                # Object never got removed, so it's still in the cache,                # and thus should continue to resolve from now on.                self._enable_lazy_resolving(obj_info)        self._dirty.clear()        self.invalidate()        self._connection.rollback()    def get(self, cls, key):        """Get object of type cls with the given primary key from the database.        If the object is cached the database won't be touched.        @param cls: Class of the object to be retrieved.        @param key: Primary key of object. May be a tuple for composed keys.        """        self.flush()        if type(key) != tuple:            key = (key,)        cls_info = get_cls_info(cls)        assert len(key) == len(cls_info.primary_key)        primary_vars = []        for column, variable in zip(cls_info.primary_key, key):            if not isinstance(variable, Variable):                variable = column.variable_factory(value=variable)            primary_vars.append(variable)        obj_info = self._cache.get((cls_info.cls, tuple(primary_vars)))        if obj_info is not None:            if obj_info.get("invalidated"):                try:                    self._fill_missing_values(obj_info, primary_vars)                except LostObjectError:                    return None            obj = obj_info.get_obj()            if obj is None:                obj = self._rebuild_deleted_object(obj_info)            return obj        where = compare_columns(cls_info.primary_key, primary_vars)        select = Select(cls_info.columns, where,                        default_tables=cls_info.table, limit=1)        result = self._connection.execute(select)        values = result.get_one()        if values is None:            return None        return self._load_object(cls_info, result, values)    def find(self, cls_spec, *args, **kwargs):        self.flush()        if type(cls_spec) is tuple:            cls_spec_info = tuple(get_cls_info(cls) for cls in cls_spec)            where = get_where_for_args(args, kwargs)        else:            cls_spec_info = get_cls_info(cls_spec)            where = get_where_for_args(args, kwargs, cls_spec)        return self._result_set_factory(self, cls_spec_info, where)    def using(self, *tables):        return self._table_set(self, tables)    def add(self, obj):        obj_info = get_obj_info(obj)        store = obj_info.get("store")        if store is not None and store is not self:            raise WrongStoreError("%s is part of another store" % repr(obj))        pending = obj_info.get("pending")        if pending is PENDING_ADD:            pass        elif pending is PENDING_REMOVE:            del obj_info["pending"]            self._enable_lazy_resolving(obj_info)            # obj_info.event.emit("added")        elif store is None:            obj_info["store"] = self            obj_info["pending"] = PENDING_ADD            self._set_dirty(obj_info)            self._enable_lazy_resolving(obj_info)            obj_info.event.emit("added")        return obj    def remove(self, obj):        obj_info = get_obj_info(obj)        if obj_info.get("store") is not self:            raise WrongStoreError("%s is not in this store" % repr(obj))        pending = obj_info.get("pending")        if pending is PENDING_REMOVE:            pass        elif pending is PENDING_ADD:            del obj_info["store"]            del obj_info["pending"]            self._set_clean(obj_info)            self._disable_lazy_resolving(obj_info)        else:            obj_info["pending"] = PENDING_REMOVE            self._set_dirty(obj_info)            self._disable_lazy_resolving(obj_info)    def reload(self, obj):        obj_info = get_obj_info(obj)        cls_info = obj_info.cls_info        if obj_info.get("store") is not self:            raise WrongStoreError("%s is not in this store" % repr(obj))        if "primary_vars" not in obj_info:            raise NotFlushedError("Can't reload an object if it was "                                  "never flushed")        where = compare_columns(cls_info.primary_key, obj_info["primary_vars"])        select = Select(cls_info.columns, where,                        default_tables=cls_info.table, limit=1)        result = self._connection.execute(select)        values = result.get_one()        self._set_values(obj_info, cls_info.columns, result, values)        obj_info.checkpoint()        self._set_clean(obj_info)    def autoreload(self, obj=None):        self._mark_autoreload(obj, False)    def invalidate(self, obj=None):        self._mark_autoreload(obj, True)    def _mark_autoreload(self, obj=None, invalidate=False):        if obj is None:            obj_infos = self._iter_cached()        else:            obj_infos = (get_obj_info(obj),)        for obj_info in obj_infos:            cls_info = obj_info.cls_info            for column in cls_info.columns:                if id(column) not in cls_info.primary_key_idx:                    obj_info.variables[column].set(AutoReload)            if invalidate:                # Marking an object with 'invalidated' means that we're                # not sure if the object is actually in the database                # anymore, so before the object is returned from the cache                # (e.g. by a get()), the database should be queried to see                # if the object's still there.                obj_info["invalidated"] = True        # We want to make sure we've marked all objects as invalidated and set        # up their autoreloads before calling the invalidated hook on *any* of        # them, because an invalidated hook might use other objects and we want        # to prevent invalidation ordering issues.        if invalidate:            for obj_info in obj_infos:                self._run_hook(obj_info, "__storm_invalidated__")    def add_flush_order(self, before, after):        pair = (get_obj_info(before), get_obj_info(after))        try:            self._order[pair] += 1        except KeyError:            self._order[pair] = 1    def remove_flush_order(self, before, after):        pair = (get_obj_info(before), get_obj_info(after))        try:            self._order[pair] -= 1        except KeyError:            pass    def flush(self):        for obj_info in self._iter_cached():            obj_info.event.emit("flush")        predecessors = {}        for (before_info, after_info), n in self._order.iteritems():            if n > 0:                before_set = predecessors.get(after_info)                if before_set is None:                    predecessors[after_info] = set((before_info,))                else:                    before_set.add(before_info)        while self._dirty:            for obj_info in self._dirty:                for before_info in predecessors.get(obj_info, ()):                    if before_info in self._dirty:                        break # A predecessor is still dirty.                else:                    break # Found an item without dirty predecessors.            else:                raise OrderLoopError("Can't flush due to ordering loop")            self._dirty.pop(obj_info, None)            self._flush_one(obj_info)        self._order.clear()    def _flush_one(self, obj_info):        cls_info = obj_info.cls_info        pending = obj_info.pop("pending", None)        if pending is PENDING_REMOVE:            expr = Delete(compare_columns(cls_info.primary_key,                                          obj_info["primary_vars"]),                          cls_info.table)            self._connection.execute(expr, noresult=True)            # We're sure the cache is valid at this point.            obj_info.pop("invalidated", None)            self._disable_change_notification(obj_info)            self._remove_from_cache(obj_info)            del obj_info["store"]        elif pending is PENDING_ADD:            columns = []            variables = []            for column in cls_info.columns:                variable = obj_info.variables[column]                if variable.is_defined():                    columns.append(column)                    variables.append(variable)                else:                    lazy_value = variable.get_lazy()                    if isinstance(lazy_value, Expr):                        columns.append(column)                        variables.append(lazy_value)            expr = Insert(columns, variables, cls_info.table)            result = self._connection.execute(expr)            # We're sure the cache is valid at this point. We just added            # the object.            obj_info.pop("invalidated", None)            self._fill_missing_values(obj_info, obj_info.primary_vars, result,                                      checkpoint=False, replace_lazy=True)            self._enable_change_notification(obj_info)            self._add_to_cache(obj_info)            obj_info.checkpoint()        else:            cached_primary_vars = obj_info["primary_vars"]            primary_key_idx = cls_info.primary_key_idx            changes = {}            for column in cls_info.columns:                variable = obj_info.variables[column]                if variable.has_changed():                    if variable.is_defined():                        changes[column] = variable                    else:                        lazy_value = variable.get_lazy()                        if isinstance(lazy_value, Expr):                            changes[column] = lazy_value            if changes:                expr = Update(changes,                              compare_columns(cls_info.primary_key,                                              cached_primary_vars),                              cls_info.table)                self._connection.execute(expr, noresult=True)                self._fill_missing_values(obj_info, obj_info.primary_vars,                                          checkpoint=False, replace_lazy=True)                self._add_to_cache(obj_info)            obj_info.checkpoint()        self._run_hook(obj_info, "__storm_flushed__")        obj_info.event.emit("flushed")    def _fill_missing_values(self, obj_info, primary_vars, result=None,                             checkpoint=True, replace_lazy=False):        """Query retrieve from the database any missing values in obj_info.        This method will verify which values are unset in obj_info,        and will retrieve them from the database to set them.        @param obj_info: ObjectInfo to have its values filled.        @param primary_vars: Variables composing the primary key with            up-to-date values (cached variables may be out-of-date when            this method is called).        @param result: If some value in the set of primary variables            isn't defined, it must be retrieved from the database            using database-dependent logic, which is provided by the            backend in the result of the query which inserted the object.        @param checkpoint: If true, variables will be checkpointed so that            they are aware that the value just set is the value currently            in the database.  Generally this will be false only when            checkpointing is being done at the calling place.        @param replace_lazy: If true, lazy values are handled as if they            were missing, and are replaced by values returned from the

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -