Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

178 rindas
5.8KB

  1. # ext/serializer.py
  2. # Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
  3. # <see AUTHORS file>
  4. #
  5. # This module is part of SQLAlchemy and is released under
  6. # the MIT License: http://www.opensource.org/licenses/mit-license.php
  7. """Serializer/Deserializer objects for usage with SQLAlchemy query structures,
  8. allowing "contextual" deserialization.
  9. Any SQLAlchemy query structure, either based on sqlalchemy.sql.*
  10. or sqlalchemy.orm.* can be used. The mappers, Tables, Columns, Session
  11. etc. which are referenced by the structure are not persisted in serialized
  12. form, but are instead re-associated with the query structure
  13. when it is deserialized.
  14. Usage is nearly the same as that of the standard Python pickle module::
  15. from sqlalchemy.ext.serializer import loads, dumps
  16. metadata = MetaData(bind=some_engine)
  17. Session = scoped_session(sessionmaker())
  18. # ... define mappers
  19. query = Session.query(MyClass).
  20. filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
  21. # pickle the query
  22. serialized = dumps(query)
  23. # unpickle. Pass in metadata + scoped_session
  24. query2 = loads(serialized, metadata, Session)
  25. print query2.all()
  26. Similar restrictions as when using raw pickle apply; mapped classes must be
  27. themselves be pickleable, meaning they are importable from a module-level
  28. namespace.
  29. The serializer module is only appropriate for query structures. It is not
  30. needed for:
  31. * instances of user-defined classes. These contain no references to engines,
  32. sessions or expression constructs in the typical case and can be serialized
  33. directly.
  34. * Table metadata that is to be loaded entirely from the serialized structure
  35. (i.e. is not already declared in the application). Regular
  36. pickle.loads()/dumps() can be used to fully dump any ``MetaData`` object,
  37. typically one which was reflected from an existing database at some previous
  38. point in time. The serializer module is specifically for the opposite case,
  39. where the Table metadata is already present in memory.
  40. """
  41. import re
  42. from .. import Column
  43. from .. import Table
  44. from ..engine import Engine
  45. from ..orm import class_mapper
  46. from ..orm.interfaces import MapperProperty
  47. from ..orm.mapper import Mapper
  48. from ..orm.session import Session
  49. from ..util import b64decode
  50. from ..util import b64encode
  51. from ..util import byte_buffer
  52. from ..util import pickle
  53. from ..util import text_type
  54. __all__ = ["Serializer", "Deserializer", "dumps", "loads"]
  55. def Serializer(*args, **kw):
  56. pickler = pickle.Pickler(*args, **kw)
  57. def persistent_id(obj):
  58. # print "serializing:", repr(obj)
  59. if isinstance(obj, Mapper) and not obj.non_primary:
  60. id_ = "mapper:" + b64encode(pickle.dumps(obj.class_))
  61. elif isinstance(obj, MapperProperty) and not obj.parent.non_primary:
  62. id_ = (
  63. "mapperprop:"
  64. + b64encode(pickle.dumps(obj.parent.class_))
  65. + ":"
  66. + obj.key
  67. )
  68. elif isinstance(obj, Table):
  69. if "parententity" in obj._annotations:
  70. id_ = "mapper_selectable:" + b64encode(
  71. pickle.dumps(obj._annotations["parententity"].class_)
  72. )
  73. else:
  74. id_ = "table:" + text_type(obj.key)
  75. elif isinstance(obj, Column) and isinstance(obj.table, Table):
  76. id_ = (
  77. "column:" + text_type(obj.table.key) + ":" + text_type(obj.key)
  78. )
  79. elif isinstance(obj, Session):
  80. id_ = "session:"
  81. elif isinstance(obj, Engine):
  82. id_ = "engine:"
  83. else:
  84. return None
  85. return id_
  86. pickler.persistent_id = persistent_id
  87. return pickler
  88. our_ids = re.compile(
  89. r"(mapperprop|mapper|mapper_selectable|table|column|"
  90. r"session|attribute|engine):(.*)"
  91. )
  92. def Deserializer(file, metadata=None, scoped_session=None, engine=None):
  93. unpickler = pickle.Unpickler(file)
  94. def get_engine():
  95. if engine:
  96. return engine
  97. elif scoped_session and scoped_session().bind:
  98. return scoped_session().bind
  99. elif metadata and metadata.bind:
  100. return metadata.bind
  101. else:
  102. return None
  103. def persistent_load(id_):
  104. m = our_ids.match(text_type(id_))
  105. if not m:
  106. return None
  107. else:
  108. type_, args = m.group(1, 2)
  109. if type_ == "attribute":
  110. key, clsarg = args.split(":")
  111. cls = pickle.loads(b64decode(clsarg))
  112. return getattr(cls, key)
  113. elif type_ == "mapper":
  114. cls = pickle.loads(b64decode(args))
  115. return class_mapper(cls)
  116. elif type_ == "mapper_selectable":
  117. cls = pickle.loads(b64decode(args))
  118. return class_mapper(cls).__clause_element__()
  119. elif type_ == "mapperprop":
  120. mapper, keyname = args.split(":")
  121. cls = pickle.loads(b64decode(mapper))
  122. return class_mapper(cls).attrs[keyname]
  123. elif type_ == "table":
  124. return metadata.tables[args]
  125. elif type_ == "column":
  126. table, colname = args.split(":")
  127. return metadata.tables[table].c[colname]
  128. elif type_ == "session":
  129. return scoped_session()
  130. elif type_ == "engine":
  131. return get_engine()
  132. else:
  133. raise Exception("Unknown token: %s" % type_)
  134. unpickler.persistent_load = persistent_load
  135. return unpickler
  136. def dumps(obj, protocol=pickle.HIGHEST_PROTOCOL):
  137. buf = byte_buffer()
  138. pickler = Serializer(buf, protocol)
  139. pickler.dump(obj)
  140. return buf.getvalue()
  141. def loads(data, metadata=None, scoped_session=None, engine=None):
  142. buf = byte_buffer(data)
  143. unpickler = Deserializer(buf, metadata, scoped_session, engine)
  144. return unpickler.load()