diff options
author | Jacek Antonelli | 2008-08-15 23:45:27 -0500 |
---|---|---|
committer | Jacek Antonelli | 2008-08-15 23:45:27 -0500 |
commit | a8a62201ba762e98dff92cf49033e577fc34d8d4 (patch) | |
tree | 11f8513c5cdc222f2fac0c93eb724c089803c200 /linden/indra/lib/python | |
parent | Second Life viewer sources 1.18.6.4-RC (diff) | |
download | meta-impy-a8a62201ba762e98dff92cf49033e577fc34d8d4.zip meta-impy-a8a62201ba762e98dff92cf49033e577fc34d8d4.tar.gz meta-impy-a8a62201ba762e98dff92cf49033e577fc34d8d4.tar.bz2 meta-impy-a8a62201ba762e98dff92cf49033e577fc34d8d4.tar.xz |
Second Life viewer sources 1.19.0.0
Diffstat (limited to 'linden/indra/lib/python')
-rw-r--r-- | linden/indra/lib/python/indra/base/lluuid.py | 7 | ||||
-rw-r--r-- | linden/indra/lib/python/indra/base/metrics.py | 42 | ||||
-rw-r--r-- | linden/indra/lib/python/indra/ipc/mysql_pool.py | 2 | ||||
-rw-r--r-- | linden/indra/lib/python/indra/ipc/saranwrap.py | 28 | ||||
-rw-r--r-- | linden/indra/lib/python/indra/util/llmanifest.py | 1 | ||||
-rw-r--r-- | linden/indra/lib/python/indra/util/named_query.py | 387 |
6 files changed, 412 insertions, 55 deletions
diff --git a/linden/indra/lib/python/indra/base/lluuid.py b/linden/indra/lib/python/indra/base/lluuid.py index e9916a1..f173310 100644 --- a/linden/indra/lib/python/indra/base/lluuid.py +++ b/linden/indra/lib/python/indra/base/lluuid.py | |||
@@ -212,6 +212,10 @@ class UUID(object): | |||
212 | _int2binstr(v3,4) + \ | 212 | _int2binstr(v3,4) + \ |
213 | _int2binstr(v4,4) | 213 | _int2binstr(v4,4) |
214 | 214 | ||
215 | |||
216 | # module-level null constant | ||
217 | NULL = UUID() | ||
218 | |||
215 | def printTranslatedMemory(four_hex_uints): | 219 | def printTranslatedMemory(four_hex_uints): |
216 | """ | 220 | """ |
217 | We expect to get the string as four hex units. eg: | 221 | We expect to get the string as four hex units. eg: |
@@ -276,7 +280,8 @@ try: | |||
276 | from mulib import stacked | 280 | from mulib import stacked |
277 | stacked.NoProducer() # just to exercise stacked | 281 | stacked.NoProducer() # just to exercise stacked |
278 | except: | 282 | except: |
279 | print "Couldn't import mulib.stacked, not registering UUID converter" | 283 | #print "Couldn't import mulib.stacked, not registering UUID converter" |
284 | pass | ||
280 | else: | 285 | else: |
281 | def convertUUID(uuid, req): | 286 | def convertUUID(uuid, req): |
282 | req.write(str(uuid)) | 287 | req.write(str(uuid)) |
diff --git a/linden/indra/lib/python/indra/base/metrics.py b/linden/indra/lib/python/indra/base/metrics.py new file mode 100644 index 0000000..e640c45 --- /dev/null +++ b/linden/indra/lib/python/indra/base/metrics.py | |||
@@ -0,0 +1,42 @@ | |||
1 | """\ | ||
2 | @file metrics.py | ||
3 | @author Phoenix | ||
4 | @date 2007-11-27 | ||
5 | @brief simple interface for logging metrics | ||
6 | |||
7 | $LicenseInfo:firstyear=2007&license=mit$ | ||
8 | |||
9 | Copyright (c) 2007-2008, Linden Research, Inc. | ||
10 | |||
11 | Permission is hereby granted, free of charge, to any person obtaining a copy | ||
12 | of this software and associated documentation files (the "Software"), to deal | ||
13 | in the Software without restriction, including without limitation the rights | ||
14 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
15 | copies of the Software, and to permit persons to whom the Software is | ||
16 | furnished to do so, subject to the following conditions: | ||
17 | |||
18 | The above copyright notice and this permission notice shall be included in | ||
19 | all copies or substantial portions of the Software. | ||
20 | |||
21 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
22 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
23 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||
24 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
25 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||
26 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||
27 | THE SOFTWARE. | ||
28 | $/LicenseInfo$ | ||
29 | """ | ||
30 | |||
31 | import sys | ||
32 | from indra.base import llsd | ||
33 | |||
34 | def log(location, stats, file=None): | ||
35 | "Write a standard llmetrics log" | ||
36 | metrics = {'location':location, 'stats':stats} | ||
37 | if file is None: | ||
38 | # do this check here in case sys.stdout changes at some | ||
39 | # point. as a default parameter, it will never be | ||
40 | # re-evaluated. | ||
41 | file = sys.stdout | ||
42 | print >>file, "LLMETRICS:", llsd.format_notation(metrics) | ||
diff --git a/linden/indra/lib/python/indra/ipc/mysql_pool.py b/linden/indra/lib/python/indra/ipc/mysql_pool.py index bdc7eff..0a06cdd 100644 --- a/linden/indra/lib/python/indra/ipc/mysql_pool.py +++ b/linden/indra/lib/python/indra/ipc/mysql_pool.py | |||
@@ -97,7 +97,7 @@ class ConnectionPool(Pool): | |||
97 | except (AttributeError, DeadProcess), e: | 97 | except (AttributeError, DeadProcess), e: |
98 | conn = self.create() | 98 | conn = self.create() |
99 | # TODO figure out if we're still connected to the database | 99 | # TODO figure out if we're still connected to the database |
100 | if conn: | 100 | if conn is not None: |
101 | Pool.put(self, conn) | 101 | Pool.put(self, conn) |
102 | else: | 102 | else: |
103 | self.current_size -= 1 | 103 | self.current_size -= 1 |
diff --git a/linden/indra/lib/python/indra/ipc/saranwrap.py b/linden/indra/lib/python/indra/ipc/saranwrap.py index 5a2cf72..e0205bf 100644 --- a/linden/indra/lib/python/indra/ipc/saranwrap.py +++ b/linden/indra/lib/python/indra/ipc/saranwrap.py | |||
@@ -110,6 +110,12 @@ _g_debug_mode = False | |||
110 | if _g_debug_mode: | 110 | if _g_debug_mode: |
111 | import traceback | 111 | import traceback |
112 | 112 | ||
113 | def pythonpath_sync(): | ||
114 | """ | ||
115 | @brief apply the current sys.path to the environment variable PYTHONPATH, so that child processes have the same paths as the caller does. | ||
116 | """ | ||
117 | pypath = os.pathsep.join(sys.path) | ||
118 | os.environ['PYTHONPATH'] = pypath | ||
113 | 119 | ||
114 | def wrap(obj, dead_callback = None): | 120 | def wrap(obj, dead_callback = None): |
115 | """ | 121 | """ |
@@ -119,6 +125,7 @@ def wrap(obj, dead_callback = None): | |||
119 | 125 | ||
120 | if type(obj).__name__ == 'module': | 126 | if type(obj).__name__ == 'module': |
121 | return wrap_module(obj.__name__, dead_callback) | 127 | return wrap_module(obj.__name__, dead_callback) |
128 | pythonpath_sync() | ||
122 | p = Process('python', [__file__, '--child'], dead_callback) | 129 | p = Process('python', [__file__, '--child'], dead_callback) |
123 | prox = Proxy(p, p) | 130 | prox = Proxy(p, p) |
124 | prox.obj = obj | 131 | prox.obj = obj |
@@ -129,6 +136,7 @@ def wrap_module(fqname, dead_callback = None): | |||
129 | @brief wrap a module in another process through a saranwrap proxy | 136 | @brief wrap a module in another process through a saranwrap proxy |
130 | @param fqname The fully qualified name of the module. | 137 | @param fqname The fully qualified name of the module. |
131 | @param dead_callback A callable to invoke if the process exits.""" | 138 | @param dead_callback A callable to invoke if the process exits.""" |
139 | pythonpath_sync() | ||
132 | global _g_debug_mode | 140 | global _g_debug_mode |
133 | if _g_debug_mode: | 141 | if _g_debug_mode: |
134 | p = Process('python', [__file__, '--module', fqname, '--logfile', '/tmp/saranwrap.log'], dead_callback) | 142 | p = Process('python', [__file__, '--module', fqname, '--logfile', '/tmp/saranwrap.log'], dead_callback) |
@@ -277,12 +285,13 @@ not supported, so you have to know what has been exported. | |||
277 | my_in = self.__local_dict['_in'] | 285 | my_in = self.__local_dict['_in'] |
278 | my_out = self.__local_dict['_out'] | 286 | my_out = self.__local_dict['_out'] |
279 | my_id = self.__local_dict['_id'] | 287 | my_id = self.__local_dict['_id'] |
288 | |||
280 | _dead_list = self.__local_dict['_dead_list'] | 289 | _dead_list = self.__local_dict['_dead_list'] |
281 | for dead_object in _dead_list: | 290 | for dead_object in _dead_list.copy(): |
282 | request = Request('del', {'id':dead_object}) | 291 | request = Request('del', {'id':dead_object}) |
283 | _write_request(request, my_out) | 292 | _write_request(request, my_out) |
284 | response = _read_response(my_id, attribute, my_in, my_out, _dead_list) | 293 | response = _read_response(my_id, attribute, my_in, my_out, _dead_list) |
285 | _dead_list.clear() | 294 | _dead_list.remove(dead_object) |
286 | 295 | ||
287 | # Pass all public attributes across to find out if it is | 296 | # Pass all public attributes across to find out if it is |
288 | # callable or a simple attribute. | 297 | # callable or a simple attribute. |
@@ -327,7 +336,7 @@ not need to deal with this class directly.""" | |||
327 | 336 | ||
328 | def __del__(self): | 337 | def __del__(self): |
329 | my_id = self.__local_dict['_id'] | 338 | my_id = self.__local_dict['_id'] |
330 | #_prnt"ObjectProxy::__del__ %s" % my_id | 339 | _prnt("ObjectProxy::__del__ %s" % my_id) |
331 | self.__local_dict['_dead_list'].add(my_id) | 340 | self.__local_dict['_dead_list'].add(my_id) |
332 | 341 | ||
333 | def __getitem__(self, key): | 342 | def __getitem__(self, key): |
@@ -369,6 +378,11 @@ not need to deal with this class directly.""" | |||
369 | # tack anything on to the return value here because str values are used as data. | 378 | # tack anything on to the return value here because str values are used as data. |
370 | return self.__str__() | 379 | return self.__str__() |
371 | 380 | ||
381 | def __len__(self): | ||
382 | # see description for __repr__, len(obj) is the same. Unfortunately, __len__ is also | ||
383 | # used when determining whether an object is boolean or not, e.g. if proxied_object: | ||
384 | return self.__len__() | ||
385 | |||
372 | def proxied_type(self): | 386 | def proxied_type(self): |
373 | if type(self) is not ObjectProxy: | 387 | if type(self) is not ObjectProxy: |
374 | return type(self) | 388 | return type(self) |
@@ -554,7 +568,7 @@ when the id is None.""" | |||
554 | @param value The value to test. | 568 | @param value The value to test. |
555 | @return Returns true if value is a simple serializeable set of data. | 569 | @return Returns true if value is a simple serializeable set of data. |
556 | """ | 570 | """ |
557 | return type(value) in (str,int,float,long,bool,type(None)) | 571 | return type(value) in (str,unicode,int,float,long,bool,type(None)) |
558 | 572 | ||
559 | def respond(self, body): | 573 | def respond(self, body): |
560 | _log("responding with: %s" % body) | 574 | _log("responding with: %s" % body) |
diff --git a/linden/indra/lib/python/indra/util/llmanifest.py b/linden/indra/lib/python/indra/util/llmanifest.py index 2e6f410..e5a732d 100644 --- a/linden/indra/lib/python/indra/util/llmanifest.py +++ b/linden/indra/lib/python/indra/util/llmanifest.py | |||
@@ -28,6 +28,7 @@ $/LicenseInfo$ | |||
28 | """ | 28 | """ |
29 | 29 | ||
30 | import commands | 30 | import commands |
31 | import errno | ||
31 | import filecmp | 32 | import filecmp |
32 | import fnmatch | 33 | import fnmatch |
33 | import getopt | 34 | import getopt |
diff --git a/linden/indra/lib/python/indra/util/named_query.py b/linden/indra/lib/python/indra/util/named_query.py index 1e1701c..c462d9f 100644 --- a/linden/indra/lib/python/indra/util/named_query.py +++ b/linden/indra/lib/python/indra/util/named_query.py | |||
@@ -28,20 +28,33 @@ THE SOFTWARE. | |||
28 | $/LicenseInfo$ | 28 | $/LicenseInfo$ |
29 | """ | 29 | """ |
30 | 30 | ||
31 | import errno | ||
31 | import MySQLdb | 32 | import MySQLdb |
33 | import MySQLdb.cursors | ||
32 | import os | 34 | import os |
33 | import os.path | 35 | import os.path |
36 | import re | ||
34 | import time | 37 | import time |
35 | 38 | ||
39 | #import sys # *TODO: remove. only used in testing. | ||
40 | #import pprint # *TODO: remove. only used in testing. | ||
41 | |||
42 | try: | ||
43 | set = set | ||
44 | except NameError: | ||
45 | from sets import Set as set | ||
46 | |||
36 | from indra.base import llsd | 47 | from indra.base import llsd |
37 | from indra.base import config | 48 | from indra.base import config |
38 | from indra.ipc import russ | ||
39 | 49 | ||
40 | _g_named_manager = None | 50 | _g_named_manager = None |
41 | 51 | ||
42 | # this function is entirely intended for testing purposes, | ||
43 | # because it's tricky to control the config from inside a test | ||
44 | def _init_g_named_manager(sql_dir = None): | 52 | def _init_g_named_manager(sql_dir = None): |
53 | """Initializes a global NamedManager object to point at a | ||
54 | specified named queries hierarchy. | ||
55 | |||
56 | This function is intended entirely for testing purposes, | ||
57 | because it's tricky to control the config from inside a test.""" | ||
45 | if sql_dir is None: | 58 | if sql_dir is None: |
46 | sql_dir = config.get('named-query-base-dir') | 59 | sql_dir = config.get('named-query-base-dir') |
47 | global _g_named_manager | 60 | global _g_named_manager |
@@ -49,14 +62,14 @@ def _init_g_named_manager(sql_dir = None): | |||
49 | os.path.abspath(os.path.realpath(sql_dir))) | 62 | os.path.abspath(os.path.realpath(sql_dir))) |
50 | 63 | ||
51 | def get(name): | 64 | def get(name): |
52 | "@brief get the named query object to be used to perform queries" | 65 | "Get the named query object to be used to perform queries" |
53 | if _g_named_manager is None: | 66 | if _g_named_manager is None: |
54 | _init_g_named_manager() | 67 | _init_g_named_manager() |
55 | return _g_named_manager.get(name) | 68 | return _g_named_manager.get(name) |
56 | 69 | ||
57 | def sql(name, params): | 70 | def sql(connection, name, params): |
58 | # use module-global NamedQuery object to perform default substitution | 71 | # use module-global NamedQuery object to perform default substitution |
59 | return get(name).sql(params) | 72 | return get(name).sql(connection, params) |
60 | 73 | ||
61 | def run(connection, name, params, expect_rows = None): | 74 | def run(connection, name, params, expect_rows = None): |
62 | """\ | 75 | """\ |
@@ -72,66 +85,243 @@ Note that this function will fetch ALL rows. | |||
72 | return get(name).run(connection, params, expect_rows) | 85 | return get(name).run(connection, params, expect_rows) |
73 | 86 | ||
74 | class ExpectationFailed(Exception): | 87 | class ExpectationFailed(Exception): |
88 | """ Exception that is raised when an expectation for an sql query | ||
89 | is not met.""" | ||
75 | def __init__(self, message): | 90 | def __init__(self, message): |
91 | Exception.__init__(self, message) | ||
76 | self.message = message | 92 | self.message = message |
77 | 93 | ||
78 | class NamedQuery(object): | 94 | class NamedQuery(object): |
79 | def __init__(self, name, filename): | 95 | def __init__(self, name, filename): |
80 | self._stat_interval = 5000 # 5 seconds | 96 | """ Construct a NamedQuery object. The name argument is an |
97 | arbitrary name as a handle for the query, and the filename is | ||
98 | a path to a file containing an llsd named query document.""" | ||
99 | self._stat_interval_seconds = 5 # 5 seconds | ||
81 | self._name = name | 100 | self._name = name |
82 | self._location = filename | 101 | self._location = filename |
102 | self._alternative = dict() | ||
103 | self._last_mod_time = 0 | ||
104 | self._last_check_time = 0 | ||
105 | self.deleted = False | ||
83 | self.load_contents() | 106 | self.load_contents() |
84 | 107 | ||
85 | def name(self): | 108 | def name(self): |
109 | """ The name of the query. """ | ||
86 | return self._name | 110 | return self._name |
87 | 111 | ||
88 | def get_modtime(self): | 112 | def get_modtime(self): |
89 | return os.path.getmtime(self._location) | 113 | """ Returns the mtime (last modified time) of the named query |
114 | file, if such exists.""" | ||
115 | if self._location: | ||
116 | return os.path.getmtime(self._location) | ||
117 | return 0 | ||
90 | 118 | ||
91 | def load_contents(self): | 119 | def load_contents(self): |
92 | self._contents = llsd.parse(open(self._location).read()) | 120 | """ Loads and parses the named query file into self. Does |
121 | nothing if self.location is nonexistant.""" | ||
122 | if self._location: | ||
123 | self._reference_contents(llsd.parse(open(self._location).read())) | ||
124 | # Check for alternative implementations | ||
125 | try: | ||
126 | for name, alt in self._contents['alternative'].items(): | ||
127 | nq = NamedQuery(name, None) | ||
128 | nq._reference_contents(alt) | ||
129 | self._alternative[name] = nq | ||
130 | except KeyError, e: | ||
131 | pass | ||
132 | self._last_mod_time = self.get_modtime() | ||
133 | self._last_check_time = time.time() | ||
134 | |||
135 | def _reference_contents(self, contents): | ||
136 | "Helper method which builds internal structure from parsed contents" | ||
137 | self._contents = contents | ||
93 | self._ttl = int(self._contents.get('ttl', 0)) | 138 | self._ttl = int(self._contents.get('ttl', 0)) |
94 | self._return_as_map = bool(self._contents.get('return_as_map', False)) | 139 | self._return_as_map = bool(self._contents.get('return_as_map', False)) |
95 | self._legacy_dbname = self._contents.get('legacy_dbname', None) | 140 | self._legacy_dbname = self._contents.get('legacy_dbname', None) |
96 | self._legacy_query = self._contents.get('legacy_query', None) | ||
97 | self._options = self._contents.get('options', {}) | ||
98 | self._base_query = self._contents['base_query'] | ||
99 | 141 | ||
100 | self._last_mod_time = self.get_modtime() | 142 | # reset these before doing the sql conversion because we will |
101 | self._last_check_time = time.time() | 143 | # read them there. reset these while loading so we pick up |
144 | # changes. | ||
145 | self._around = set() | ||
146 | self._append = set() | ||
147 | self._integer = set() | ||
148 | self._options = self._contents.get('dynamic_where', {}) | ||
149 | for key in self._options: | ||
150 | if isinstance(self._options[key], basestring): | ||
151 | self._options[key] = self._convert_sql(self._options[key]) | ||
152 | elif isinstance(self._options[key], list): | ||
153 | lines = [] | ||
154 | for line in self._options[key]: | ||
155 | lines.append(self._convert_sql(line)) | ||
156 | self._options[key] = lines | ||
157 | else: | ||
158 | moreopt = {} | ||
159 | for kk in self._options[key]: | ||
160 | moreopt[kk] = self._convert_sql(self._options[key][kk]) | ||
161 | self._options[key] = moreopt | ||
162 | self._base_query = self._convert_sql(self._contents['base_query']) | ||
163 | self._query_suffix = self._convert_sql( | ||
164 | self._contents.get('query_suffix', '')) | ||
165 | |||
166 | def _convert_sql(self, sql): | ||
167 | """convert the parsed sql into a useful internal structure. | ||
168 | |||
169 | This function has to turn the named query format into a pyformat | ||
170 | style. It also has to look for %:name% and :name% and | ||
171 | ready them for use in LIKE statements""" | ||
172 | if sql: | ||
173 | #print >>sys.stderr, "sql:",sql | ||
174 | expr = re.compile("(%?):([a-zA-Z][a-zA-Z0-9_-]*)%") | ||
175 | sql = expr.sub(self._prepare_like, sql) | ||
176 | expr = re.compile("#:([a-zA-Z][a-zA-Z0-9_-]*)") | ||
177 | sql = expr.sub(self._prepare_integer, sql) | ||
178 | expr = re.compile(":([a-zA-Z][a-zA-Z0-9_-]*)") | ||
179 | sql = expr.sub("%(\\1)s", sql) | ||
180 | return sql | ||
181 | |||
182 | def _prepare_like(self, match): | ||
183 | """This function changes LIKE statement replace behavior | ||
184 | |||
185 | It works by turning %:name% to %(_name_around)s and :name% to | ||
186 | %(_name_append)s. Since a leading '_' is not a valid keyname | ||
187 | input (enforced via unit tests), it will never clash with | ||
188 | existing keys. Then, when building the statement, the query | ||
189 | runner will generate corrected strings.""" | ||
190 | if match.group(1) == '%': | ||
191 | # there is a leading % so this is treated as prefix/suffix | ||
192 | self._around.add(match.group(2)) | ||
193 | return "%(" + self._build_around_key(match.group(2)) + ")s" | ||
194 | else: | ||
195 | # there is no leading %, so this is suffix only | ||
196 | self._append.add(match.group(2)) | ||
197 | return "%(" + self._build_append_key(match.group(2)) + ")s" | ||
198 | |||
199 | def _build_around_key(self, key): | ||
200 | return "_" + key + "_around" | ||
201 | |||
202 | def _build_append_key(self, key): | ||
203 | return "_" + key + "_append" | ||
204 | |||
205 | def _prepare_integer(self, match): | ||
206 | """This function adjusts the sql for #:name replacements | ||
207 | |||
208 | It works by turning #:name to %(_name_as_integer)s. Since a | ||
209 | leading '_' is not a valid keyname input (enforced via unit | ||
210 | tests), it will never clash with existing keys. Then, when | ||
211 | building the statement, the query runner will generate | ||
212 | corrected strings.""" | ||
213 | self._integer.add(match.group(1)) | ||
214 | return "%(" + self._build_integer_key(match.group(1)) + ")s" | ||
215 | |||
216 | def _build_integer_key(self, key): | ||
217 | return "_" + key + "_as_integer" | ||
218 | |||
219 | def _strip_wildcards_to_list(self, value): | ||
220 | """Take string, and strip out the LIKE special characters. | ||
221 | |||
222 | Technically, this is database dependant, but postgresql and | ||
223 | mysql use the same wildcards, and I am not aware of a general | ||
224 | way to handle this. I think you need a sql statement of the | ||
225 | form: | ||
226 | |||
227 | LIKE_STRING( [ANY,ONE,str]... ) | ||
228 | |||
229 | which would treat ANY as their any string, and ONE as their | ||
230 | single glyph, and str as something that needs database | ||
231 | specific encoding to not allow any % or _ to affect the query. | ||
232 | |||
233 | As it stands, I believe it's impossible to write a named query | ||
234 | style interface which uses like to search the entire space of | ||
235 | text available. Imagine the query: | ||
236 | |||
237 | % of brain used by average linden | ||
238 | |||
239 | In order to search for %, it must be escaped, so once you have | ||
240 | escaped the string to not do wildcard searches, and be escaped | ||
241 | for the database, and then prepended the wildcard you come | ||
242 | back with one of: | ||
243 | |||
244 | 1) %\% of brain used by average linden | ||
245 | 2) %%% of brain used by average linden | ||
246 | |||
247 | Then, when passed to the database to be escaped to be database | ||
248 | safe, you get back: | ||
249 | |||
250 | 1) %\\% of brain used by average linden | ||
251 | : which means search for any character sequence, followed by a | ||
252 | backslash, followed by any sequence, followed by ' of | ||
253 | brain...' | ||
254 | 2) %%% of brain used by average linden | ||
255 | : which (I believe) means search for a % followed by any | ||
256 | character sequence followed by 'of brain...' | ||
257 | |||
258 | Neither of which is what we want! | ||
259 | |||
260 | So, we need a vendor (or extention) for LIKE_STRING. Anyone | ||
261 | want to write it?""" | ||
262 | utf8_value = unicode(value, "utf-8") | ||
263 | esc_list = [] | ||
264 | remove_chars = set(u"%_") | ||
265 | for glyph in utf8_value: | ||
266 | if glyph in remove_chars: | ||
267 | continue | ||
268 | esc_list.append(glyph.encode("utf-8")) | ||
269 | return esc_list | ||
270 | |||
271 | def delete(self): | ||
272 | """ Makes this query unusable by deleting all the members and | ||
273 | setting the deleted member. This is desired when the on-disk | ||
274 | query has been deleted but the in-memory copy remains.""" | ||
275 | # blow away all members except _name, _location, and deleted | ||
276 | name, location = self._name, self._location | ||
277 | for key in self.__dict__.keys(): | ||
278 | del self.__dict__[key] | ||
279 | self.deleted = True | ||
280 | self._name, self._location = name, location | ||
102 | 281 | ||
103 | def ttl(self): | 282 | def ttl(self): |
283 | """ Estimated time to live of this query. Used for web | ||
284 | services to set the Expires header.""" | ||
104 | return self._ttl | 285 | return self._ttl |
105 | 286 | ||
106 | def legacy_dbname(self): | 287 | def legacy_dbname(self): |
107 | return self._legacy_dbname | 288 | return self._legacy_dbname |
108 | 289 | ||
109 | def legacy_query(self): | ||
110 | return self._legacy_query | ||
111 | |||
112 | def return_as_map(self): | 290 | def return_as_map(self): |
291 | """ Returns true if this query is configured to return its | ||
292 | results as a single map (as opposed to a list of maps, the | ||
293 | normal behavior).""" | ||
294 | |||
113 | return self._return_as_map | 295 | return self._return_as_map |
114 | 296 | ||
115 | def run(self, connection, params, expect_rows = None, use_dictcursor = True): | 297 | def for_schema(self, db_name): |
116 | """\ | 298 | "Look trough the alternates and return the correct query" |
117 | @brief given a connection, run a named query with the params | 299 | try: |
300 | return self._alternative[db_name] | ||
301 | except KeyError, e: | ||
302 | pass | ||
303 | return self | ||
118 | 304 | ||
119 | Note that this function will fetch ALL rows. We do this because it | 305 | def run(self, connection, params, expect_rows = None, use_dictcursor = True): |
120 | opens and closes the cursor to generate the values, and this isn't a generator so the | 306 | """given a connection, run a named query with the params |
121 | cursor has no life beyond the method call. | 307 | |
122 | @param cursor The connection to use (this generates its own cursor for the query) | 308 | Note that this function will fetch ALL rows. We do this because it |
123 | @param name The name of the query to run | 309 | opens and closes the cursor to generate the values, and this |
124 | @param params The parameters passed into the query | 310 | isn't a generator so the cursor has no life beyond the method call. |
125 | @param expect_rows The number of rows expected. Set to 1 if return_as_map is true. Raises ExpectationFailed if the number of returned rows doesn't exactly match. Kind of a hack. | 311 | |
126 | @param use_dictcursor Set to false to use a normal cursor and manually convert the rows to dicts. | 312 | @param cursor The connection to use (this generates its own cursor for the query) |
127 | @return Returns the result set as a list of dicts, or, if the named query has return_as_map set to true, returns a single dict. | 313 | @param name The name of the query to run |
314 | @param params The parameters passed into the query | ||
315 | @param expect_rows The number of rows expected. Set to 1 if return_as_map is true. Raises ExpectationFailed if the number of returned rows doesn't exactly match. Kind of a hack. | ||
316 | @param use_dictcursor Set to false to use a normal cursor and manually convert the rows to dicts. | ||
317 | @return Returns the result set as a list of dicts, or, if the named query has return_as_map set to true, returns a single dict. | ||
128 | """ | 318 | """ |
129 | if use_dictcursor: | 319 | if use_dictcursor: |
130 | cursor = connection.cursor(MySQLdb.cursors.DictCursor) | 320 | cursor = connection.cursor(MySQLdb.cursors.DictCursor) |
131 | else: | 321 | else: |
132 | cursor = connection.cursor() | 322 | cursor = connection.cursor() |
133 | 323 | ||
134 | statement = self.sql(params) | 324 | statement = self.sql(connection, params) |
135 | #print "SQL:", statement | 325 | #print "SQL:", statement |
136 | rows = cursor.execute(statement) | 326 | rows = cursor.execute(statement) |
137 | 327 | ||
@@ -169,47 +359,152 @@ cursor has no life beyond the method call. | |||
169 | return result_set[0] | 359 | return result_set[0] |
170 | return result_set | 360 | return result_set |
171 | 361 | ||
172 | def sql(self, params): | 362 | def sql(self, connection, params): |
363 | """ Generates an SQL statement from the named query document | ||
364 | and a dictionary of parameters. | ||
365 | |||
366 | """ | ||
173 | self.refresh() | 367 | self.refresh() |
174 | 368 | ||
175 | # build the query from the options available and the params | 369 | # build the query from the options available and the params |
176 | base_query = [] | 370 | base_query = [] |
177 | base_query.append(self._base_query) | 371 | base_query.append(self._base_query) |
372 | #print >>sys.stderr, "base_query:",base_query | ||
178 | for opt, extra_where in self._options.items(): | 373 | for opt, extra_where in self._options.items(): |
179 | if opt in params and (params[opt] == 0 or params[opt]): | 374 | if type(extra_where) in (dict, list, tuple): |
180 | if type(extra_where) in (dict, list, tuple): | 375 | if opt in params: |
181 | base_query.append(extra_where[params[opt]]) | 376 | base_query.append(extra_where[params[opt]]) |
182 | else: | 377 | else: |
378 | if opt in params and params[opt]: | ||
183 | base_query.append(extra_where) | 379 | base_query.append(extra_where) |
184 | 380 | if self._query_suffix: | |
381 | base_query.append(self._query_suffix) | ||
382 | #print >>sys.stderr, "base_query:",base_query | ||
185 | full_query = '\n'.join(base_query) | 383 | full_query = '\n'.join(base_query) |
186 | 384 | ||
187 | # do substitution | 385 | # Go through the query and rewrite all of the ones with the |
188 | sql = russ.format(full_query, params) | 386 | # @:name syntax. |
387 | rewrite = _RewriteQueryForArray(params) | ||
388 | expr = re.compile("@%\(([a-zA-Z][a-zA-Z0-9_-]*)\)s") | ||
389 | full_query = expr.sub(rewrite.operate, full_query) | ||
390 | params.update(rewrite.new_params) | ||
391 | |||
392 | # build out the params for like. We only have to do this | ||
393 | # parameters which were detected to have ued the where syntax | ||
394 | # during load. | ||
395 | # | ||
396 | # * treat the incoming string as utf-8 | ||
397 | # * strip wildcards | ||
398 | # * append or prepend % as appropriate | ||
399 | new_params = {} | ||
400 | for key in params: | ||
401 | if key in self._around: | ||
402 | new_value = ['%'] | ||
403 | new_value.extend(self._strip_wildcards_to_list(params[key])) | ||
404 | new_value.append('%') | ||
405 | new_params[self._build_around_key(key)] = ''.join(new_value) | ||
406 | if key in self._append: | ||
407 | new_value = self._strip_wildcards_to_list(params[key]) | ||
408 | new_value.append('%') | ||
409 | new_params[self._build_append_key(key)] = ''.join(new_value) | ||
410 | if key in self._integer: | ||
411 | new_params[self._build_integer_key(key)] = int(params[key]) | ||
412 | params.update(new_params) | ||
413 | |||
414 | # do substitution using the mysql (non-standard) 'literal' | ||
415 | # function to do the escaping. | ||
416 | sql = full_query % connection.literal(params) | ||
189 | return sql | 417 | return sql |
190 | 418 | ||
191 | def refresh(self): | 419 | def refresh(self): |
192 | # only stat the file every so often | 420 | """ Refresh self from the file on the filesystem. |
421 | |||
422 | This is optimized to be callable as frequently as you wish, | ||
423 | without adding too much load. It does so by only stat-ing the | ||
424 | file every N seconds, where N defaults to 5 and is | ||
425 | configurable through the member _stat_interval_seconds. If the stat | ||
426 | reveals that the file has changed, refresh will re-parse the | ||
427 | contents of the file and use them to update the named query | ||
428 | instance. If the stat reveals that the file has been deleted, | ||
429 | refresh will call self.delete to make the in-memory | ||
430 | representation unusable.""" | ||
193 | now = time.time() | 431 | now = time.time() |
194 | if(now - self._last_check_time > self._stat_interval): | 432 | if(now - self._last_check_time > self._stat_interval_seconds): |
195 | self._last_check_time = now | 433 | self._last_check_time = now |
196 | modtime = self.get_modtime() | 434 | try: |
197 | if(modtime > self._last_mod_time): | 435 | modtime = self.get_modtime() |
198 | self.load_contents() | 436 | if(modtime > self._last_mod_time): |
437 | self.load_contents() | ||
438 | except OSError, e: | ||
439 | if e.errno == errno.ENOENT: # file not found | ||
440 | self.delete() # clean up self | ||
441 | raise # pass the exception along to the caller so they know that this query disappeared | ||
199 | 442 | ||
200 | class NamedQueryManager(object): | 443 | class NamedQueryManager(object): |
444 | """ Manages the lifespan of NamedQuery objects, drawing from a | ||
445 | directory hierarchy of named query documents. | ||
446 | |||
447 | In practice this amounts to a memory cache of NamedQuery objects.""" | ||
448 | |||
201 | def __init__(self, named_queries_dir): | 449 | def __init__(self, named_queries_dir): |
450 | """ Initializes a manager to look for named queries in a | ||
451 | directory.""" | ||
202 | self._dir = os.path.abspath(os.path.realpath(named_queries_dir)) | 452 | self._dir = os.path.abspath(os.path.realpath(named_queries_dir)) |
203 | self._cached_queries = {} | 453 | self._cached_queries = {} |
204 | 454 | ||
205 | def sql(self, name, params): | 455 | def sql(self, connection, name, params): |
206 | nq = self.get(name) | 456 | nq = self.get(name) |
207 | return nq.sql(params) | 457 | return nq.sql(connection, params) |
208 | 458 | ||
209 | def get(self, name): | 459 | def get(self, name): |
210 | # new up/refresh a NamedQuery based on the name | 460 | """ Returns a NamedQuery instance based on the name, either |
461 | from memory cache, or by parsing from disk. | ||
462 | |||
463 | The name is simply a relative path to the directory associated | ||
464 | with the manager object. Before returning the instance, the | ||
465 | NamedQuery object is cached in memory, so that subsequent | ||
466 | accesses don't have to read from disk or do any parsing. This | ||
467 | means that NamedQuery objects returned by this method are | ||
468 | shared across all users of the manager object. | ||
469 | NamedQuery.refresh is used to bring the NamedQuery objects in | ||
470 | sync with the actual files on disk.""" | ||
211 | nq = self._cached_queries.get(name) | 471 | nq = self._cached_queries.get(name) |
212 | if nq is None: | 472 | if nq is None: |
213 | nq = NamedQuery(name, os.path.join(self._dir, name)) | 473 | nq = NamedQuery(name, os.path.join(self._dir, name)) |
214 | self._cached_queries[name] = nq | 474 | self._cached_queries[name] = nq |
475 | else: | ||
476 | try: | ||
477 | nq.refresh() | ||
478 | except OSError, e: | ||
479 | if e.errno == errno.ENOENT: # file not found | ||
480 | del self._cached_queries[name] | ||
481 | raise # pass exception along to caller so they know that the query disappeared | ||
482 | |||
215 | return nq | 483 | return nq |
484 | |||
485 | class _RewriteQueryForArray(object): | ||
486 | "Helper class for rewriting queries with the @:name syntax" | ||
487 | def __init__(self, params): | ||
488 | self.params = params | ||
489 | self.new_params = dict() | ||
490 | |||
491 | def operate(self, match): | ||
492 | "Given a match, return the string that should be in use" | ||
493 | key = match.group(1) | ||
494 | value = self.params[key] | ||
495 | if type(value) in (list,tuple): | ||
496 | rv = [] | ||
497 | for idx in range(len(value)): | ||
498 | new_key = "_" + key + "_" + str(idx) | ||
499 | self.new_params[new_key] = value[idx] | ||
500 | rv.append("%(" + new_key + ")s") | ||
501 | return ','.join(rv) | ||
502 | else: | ||
503 | # not something that can be expanded, so just drop the | ||
504 | # leading @ in the front of the match. This will mean that | ||
505 | # the single value we have, be it a string, int, whatever | ||
506 | # (other than dict) will correctly show up, eg: | ||
507 | # | ||
508 | # where foo in (@:foobar) -- foobar is a string, so we get | ||
509 | # where foo in (:foobar) | ||
510 | return match.group(0)[1:] | ||