8
8
from beaker .exceptions import InvalidCacheBackendError , MissingCacheParameter
9
9
from beaker .synchronization import file_synchronizer , null_synchronizer
10
10
from beaker .util import verify_directory , SyncDict
11
+ from beaker .ext .sqla import SqlaNamespaceManager
11
12
12
13
log = logging .getLogger (__name__ )
13
14
14
15
sa = None
15
- pool = None
16
16
types = None
17
17
18
18
19
- class DatabaseNamespaceManager (OpenResourceNamespaceManager ):
20
- metadatas = SyncDict ()
21
- tables = SyncDict ()
19
+ class DatabaseNamespaceManager (SqlaNamespaceManager ):
22
20
23
21
@classmethod
24
22
def _init_dependencies (cls ):
25
- global sa , pool , types
23
+ SqlaNamespaceManager ._init_dependencies ()
24
+
25
+ global sa , types
26
26
if sa is not None :
27
27
return
28
- try :
29
- import sqlalchemy as sa
30
- import sqlalchemy .pool as pool
31
- from sqlalchemy import types
32
- except ImportError :
33
- raise InvalidCacheBackendError ("Database cache backend requires "
34
- "the 'sqlalchemy' library" )
35
-
36
- def __init__ (self , namespace , url = None , sa_opts = None , optimistic = False ,
37
- table_name = 'beaker_cache' , data_dir = None , lock_dir = None ,
38
- schema_name = None , ** params ):
28
+ # SqlaNamespaceManager will already error
29
+ import sqlalchemy as sa
30
+ from sqlalchemy import types
31
+
32
+ def __init__ (self , namespace , url = None , sa_opts = None , table_name = 'beaker_cache' ,
33
+ data_dir = None , lock_dir = None , schema_name = None , ** params ):
39
34
"""Creates a database namespace manager
40
35
41
36
``url``
42
37
SQLAlchemy compliant db url
43
38
``sa_opts``
44
39
A dictionary of SQLAlchemy keyword options to initialize the engine
45
40
with.
46
- ``optimistic``
47
- Use optimistic session locking, note that this will result in an
48
- additional select when updating a cache value to compare version
49
- numbers.
50
41
``table_name``
51
42
The table name to use in the database for the cache.
52
43
``schema_name``
53
44
The schema name to use in the database for the cache.
54
45
"""
55
- OpenResourceNamespaceManager .__init__ (self , namespace )
56
-
57
46
if sa_opts is None :
58
47
sa_opts = {}
59
48
@@ -67,113 +56,28 @@ def __init__(self, namespace, url=None, sa_opts=None, optimistic=False,
67
56
verify_directory (self .lock_dir )
68
57
69
58
# Check to see if the table's been created before
70
- url = url or sa_opts ['sa.url' ]
59
+ sa_opts [ 'sa.url' ] = url = url or sa_opts ['sa.url' ]
71
60
table_key = url + table_name
72
61
73
- def make_cache ():
74
- # Check to see if we have a connection pool open already
75
- meta_key = url + table_name
76
-
77
- def make_meta ():
78
- # SQLAlchemy pops the url, this ensures it sticks around
79
- # later
80
- sa_opts ['sa.url' ] = url
81
- engine = sa .engine_from_config (sa_opts , 'sa.' )
82
- meta = sa .MetaData ()
83
- meta .bind = engine
84
- return meta
85
- meta = DatabaseNamespaceManager .metadatas .get (meta_key , make_meta )
86
- # Create the table object and cache it now
87
- cache = sa .Table (table_name , meta ,
88
- sa .Column ('id' , types .Integer , primary_key = True ),
89
- sa .Column ('namespace' , types .String (255 ), nullable = False ),
90
- sa .Column ('accessed' , types .DateTime , nullable = False ),
91
- sa .Column ('created' , types .DateTime , nullable = False ),
92
- sa .Column ('data' , types .PickleType , nullable = False ),
93
- sa .UniqueConstraint ('namespace' ),
94
- schema = schema_name if schema_name else meta .schema
95
- )
96
- cache .create (checkfirst = True )
97
- return cache
98
- self .hash = {}
99
- self ._is_new = False
100
- self .loaded = False
101
- self .cache = DatabaseNamespaceManager .tables .get (table_key , make_cache )
102
-
103
- def get_access_lock (self ):
104
- return null_synchronizer ()
105
-
106
- def get_creation_lock (self , key ):
107
- return file_synchronizer (
108
- identifier = "databasecontainer/funclock/%s/%s" % (
109
- self .namespace , key
110
- ),
111
- lock_dir = self .lock_dir )
112
-
113
- def do_open (self , flags , replace ):
114
- # If we already loaded the data, don't bother loading it again
115
- if self .loaded :
116
- self .flags = flags
117
- return
118
-
119
- cache = self .cache
120
- result_proxy = sa .select ([cache .c .data ],
121
- cache .c .namespace == self .namespace
122
- ).execute ()
123
- result = result_proxy .fetchone ()
124
- result_proxy .close ()
125
-
126
- if not result :
127
- self ._is_new = True
128
- self .hash = {}
129
- else :
130
- self ._is_new = False
131
- try :
132
- self .hash = result ['data' ]
133
- except (IOError , OSError , EOFError , pickle .PickleError ,
134
- pickle .PickleError ):
135
- log .debug ("Couln't load pickle data, creating new storage" )
136
- self .hash = {}
137
- self ._is_new = True
138
- self .flags = flags
139
- self .loaded = True
140
-
141
- def do_close (self ):
142
- if self .flags is not None and (self .flags == 'c' or self .flags == 'w' ):
143
- cache = self .cache
144
- if self ._is_new :
145
- cache .insert ().execute (namespace = self .namespace , data = self .hash ,
146
- accessed = datetime .now (),
147
- created = datetime .now ())
148
- self ._is_new = False
149
- else :
150
- cache .update (cache .c .namespace == self .namespace ).execute (
151
- data = self .hash , accessed = datetime .now ())
152
- self .flags = None
153
-
154
- def do_remove (self ):
155
- cache = self .cache
156
- cache .delete (cache .c .namespace == self .namespace ).execute ()
157
- self .hash = {}
158
-
159
- # We can retain the fact that we did a load attempt, but since the
160
- # file is gone this will be a new namespace should it be saved.
161
- self ._is_new = True
162
-
163
- def __getitem__ (self , key ):
164
- return self .hash [key ]
165
-
166
- def __contains__ (self , key ):
167
- return key in self .hash
168
-
169
- def __setitem__ (self , key , value ):
170
- self .hash [key ] = value
171
-
172
- def __delitem__ (self , key ):
173
- del self .hash [key ]
174
-
175
- def keys (self ):
176
- return self .hash .keys ()
62
+ def make_table (engine ):
63
+ meta = sa .MetaData ()
64
+ meta .bind = engine
65
+ cache_table = sa .Table (table_name , meta ,
66
+ sa .Column ('id' , types .Integer , primary_key = True ),
67
+ sa .Column ('namespace' , types .String (255 ), nullable = False ),
68
+ sa .Column ('accessed' , types .DateTime , nullable = False ),
69
+ sa .Column ('created' , types .DateTime , nullable = False ),
70
+ sa .Column ('data' , types .PickleType , nullable = False ),
71
+ sa .UniqueConstraint ('namespace' ),
72
+ schema = schema_name if schema_name else meta .schema )
73
+ cache_table .create (bind = engine , checkfirst = True )
74
+ return cache_table
75
+
76
+ engine = self .__class__ .binds .get (url , lambda : sa .engine_from_config (sa_opts , 'sa.' ))
77
+ table = self .__class__ .tables .get (table_key , lambda : make_table (engine ))
78
+
79
+ SqlaNamespaceManager .__init__ (self , namespace , engine , table ,
80
+ data_dir = data_dir , lock_dir = lock_dir )
177
81
178
82
179
83
class DatabaseContainer (Container ):
0 commit comments