aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/linden/indra/lib
diff options
context:
space:
mode:
authorJacek Antonelli2008-12-01 17:39:58 -0600
committerJacek Antonelli2008-12-01 17:40:06 -0600
commit7abecb48babe6a6f09bf6692ba55076546cfced9 (patch)
tree8d18a88513fb97adf32c10aae78f4be1984942db /linden/indra/lib
parentSecond Life viewer sources 1.21.6 (diff)
downloadmeta-impy-7abecb48babe6a6f09bf6692ba55076546cfced9.zip
meta-impy-7abecb48babe6a6f09bf6692ba55076546cfced9.tar.gz
meta-impy-7abecb48babe6a6f09bf6692ba55076546cfced9.tar.bz2
meta-impy-7abecb48babe6a6f09bf6692ba55076546cfced9.tar.xz
Second Life viewer sources 1.22.0-RC
Diffstat (limited to 'linden/indra/lib')
-rw-r--r--linden/indra/lib/python/indra/base/lllog.py2
-rw-r--r--linden/indra/lib/python/indra/base/llsd.py205
-rw-r--r--linden/indra/lib/python/indra/ipc/llmessage.py2
-rw-r--r--linden/indra/lib/python/indra/ipc/llsdhttp.py2
-rw-r--r--linden/indra/lib/python/indra/ipc/mysql_pool.py23
-rw-r--r--linden/indra/lib/python/indra/ipc/saranwrap.py651
-rw-r--r--linden/indra/lib/python/indra/ipc/siesta.py4
-rw-r--r--linden/indra/lib/python/indra/util/fastest_elementtree.py31
-rw-r--r--linden/indra/lib/python/indra/util/iterators.py63
-rwxr-xr-xlinden/indra/lib/python/indra/util/iterators_test.py72
-rw-r--r--linden/indra/lib/python/indra/util/llmanifest.py3
-rwxr-xr-xlinden/indra/lib/python/indra/util/llperformance.py158
-rw-r--r--linden/indra/lib/python/indra/util/named_query.py20
-rwxr-xr-xlinden/indra/lib/python/indra/util/simperf_host_xml_parser.py338
-rwxr-xr-xlinden/indra/lib/python/indra/util/simperf_oprof_interface.py160
-rwxr-xr-xlinden/indra/lib/python/indra/util/simperf_proc_interface.py164
16 files changed, 1157 insertions, 741 deletions
diff --git a/linden/indra/lib/python/indra/base/lllog.py b/linden/indra/lib/python/indra/base/lllog.py
index 99c50ef..1301894 100644
--- a/linden/indra/lib/python/indra/base/lllog.py
+++ b/linden/indra/lib/python/indra/base/lllog.py
@@ -59,7 +59,7 @@ class Logger(object):
59 return self._sequence 59 return self._sequence
60 60
61 def log(self, msg, llsd): 61 def log(self, msg, llsd):
62 payload = 'LLLOGMESSAGE (%d) %s %s' % (self.next(), msg, 62 payload = 'INFO: log: LLLOGMESSAGE (%d) %s %s' % (self.next(), msg,
63 format_notation(llsd)) 63 format_notation(llsd))
64 syslog.syslog(payload) 64 syslog.syslog(payload)
65 65
diff --git a/linden/indra/lib/python/indra/base/llsd.py b/linden/indra/lib/python/indra/base/llsd.py
index 995ace7..5b8f5d7 100644
--- a/linden/indra/lib/python/indra/base/llsd.py
+++ b/linden/indra/lib/python/indra/base/llsd.py
@@ -33,19 +33,26 @@ import time
33import types 33import types
34import re 34import re
35 35
36from indra.util.fastest_elementtree import fromstring 36from indra.util.fastest_elementtree import ElementTreeError, fromstring
37from indra.base import lluuid 37from indra.base import lluuid
38 38
39int_regex = re.compile("[-+]?\d+") 39try:
40real_regex = re.compile("[-+]?(\d+(\.\d*)?|\d*\.\d+)([eE][-+]?\d+)?") 40 import cllsd
41alpha_regex = re.compile("[a-zA-Z]+") 41except ImportError:
42date_regex = re.compile("(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})T(?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})(?P<second_float>\.\d{2})?Z") 42 cllsd = None
43#date: d"YYYY-MM-DDTHH:MM:SS.FFZ" 43
44int_regex = re.compile(r"[-+]?\d+")
45real_regex = re.compile(r"[-+]?(\d+(\.\d*)?|\d*\.\d+)([eE][-+]?\d+)?")
46alpha_regex = re.compile(r"[a-zA-Z]+")
47date_regex = re.compile(r"(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})T"
48 r"(?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})"
49 r"(?P<second_float>(\.\d+)?)Z")
50#date: d"YYYY-MM-DDTHH:MM:SS.FFFFFFZ"
44 51
45class LLSDParseError(Exception): 52class LLSDParseError(Exception):
46 pass 53 pass
47 54
48class LLSDSerializationError(Exception): 55class LLSDSerializationError(TypeError):
49 pass 56 pass
50 57
51 58
@@ -62,14 +69,7 @@ BOOL_FALSE = ('0', '0.0', 'false', '')
62 69
63def format_datestr(v): 70def format_datestr(v):
64 """ Formats a datetime object into the string format shared by xml and notation serializations.""" 71 """ Formats a datetime object into the string format shared by xml and notation serializations."""
65 second_str = "" 72 return v.isoformat() + 'Z'
66 if v.microsecond > 0:
67 seconds = v.second + float(v.microsecond) / 1000000
68 second_str = "%05.2f" % seconds
69 else:
70 second_str = "%d" % v.second
71 return '%s%sZ' % (v.strftime('%Y-%m-%dT%H:%M:'), second_str)
72
73 73
74def parse_datestr(datestr): 74def parse_datestr(datestr):
75 """Parses a datetime object from the string format shared by xml and notation serializations.""" 75 """Parses a datetime object from the string format shared by xml and notation serializations."""
@@ -89,7 +89,7 @@ def parse_datestr(datestr):
89 seconds_float = match.group('second_float') 89 seconds_float = match.group('second_float')
90 microsecond = 0 90 microsecond = 0
91 if seconds_float: 91 if seconds_float:
92 microsecond = int(seconds_float[1:]) * 10000 92 microsecond = int(float('0' + seconds_float) * 1e6)
93 return datetime.datetime(year, month, day, hour, minute, second, microsecond) 93 return datetime.datetime(year, month, day, hour, minute, second, microsecond)
94 94
95 95
@@ -116,7 +116,7 @@ def uuid_to_python(node):
116 return lluuid.UUID(node.text) 116 return lluuid.UUID(node.text)
117 117
118def str_to_python(node): 118def str_to_python(node):
119 return unicode(node.text or '').encode('utf8', 'replace') 119 return node.text or ''
120 120
121def bin_to_python(node): 121def bin_to_python(node):
122 return binary(base64.decodestring(node.text or '')) 122 return binary(base64.decodestring(node.text or ''))
@@ -189,9 +189,13 @@ class LLSDXMLFormatter(object):
189 if(contents is None or contents is ''): 189 if(contents is None or contents is ''):
190 return "<%s />" % (name,) 190 return "<%s />" % (name,)
191 else: 191 else:
192 if type(contents) is unicode:
193 contents = contents.encode('utf-8')
192 return "<%s>%s</%s>" % (name, contents, name) 194 return "<%s>%s</%s>" % (name, contents, name)
193 195
194 def xml_esc(self, v): 196 def xml_esc(self, v):
197 if type(v) is unicode:
198 v = v.encode('utf-8')
195 return v.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;') 199 return v.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;')
196 200
197 def LLSD(self, v): 201 def LLSD(self, v):
@@ -237,9 +241,14 @@ class LLSDXMLFormatter(object):
237 raise LLSDSerializationError("Cannot serialize unknown type: %s (%s)" % ( 241 raise LLSDSerializationError("Cannot serialize unknown type: %s (%s)" % (
238 t, something)) 242 t, something))
239 243
240 def format(self, something): 244 def _format(self, something):
241 return '<?xml version="1.0" ?>' + self.elt("llsd", self.generate(something)) 245 return '<?xml version="1.0" ?>' + self.elt("llsd", self.generate(something))
242 246
247 def format(self, something):
248 if cllsd:
249 return cllsd.llsd_to_xml(something)
250 return self._format(something)
251
243_g_xml_formatter = None 252_g_xml_formatter = None
244def format_xml(something): 253def format_xml(something):
245 global _g_xml_formatter 254 global _g_xml_formatter
@@ -356,8 +365,10 @@ class LLSDNotationFormatter(object):
356 def UUID(self, v): 365 def UUID(self, v):
357 return "u%s" % v 366 return "u%s" % v
358 def BINARY(self, v): 367 def BINARY(self, v):
359 raise LLSDSerializationError("binary notation not yet supported") 368 return 'b64"' + base64.encodestring(v) + '"'
360 def STRING(self, v): 369 def STRING(self, v):
370 if isinstance(v, unicode):
371 v = v.encode('utf-8')
361 return "'%s'" % v.replace("\\", "\\\\").replace("'", "\\'") 372 return "'%s'" % v.replace("\\", "\\\\").replace("'", "\\'")
362 def URI(self, v): 373 def URI(self, v):
363 return 'l"%s"' % str(v).replace("\\", "\\\\").replace('"', '\\"') 374 return 'l"%s"' % str(v).replace("\\", "\\\\").replace('"', '\\"')
@@ -366,16 +377,24 @@ class LLSDNotationFormatter(object):
366 def ARRAY(self, v): 377 def ARRAY(self, v):
367 return "[%s]" % ','.join([self.generate(item) for item in v]) 378 return "[%s]" % ','.join([self.generate(item) for item in v])
368 def MAP(self, v): 379 def MAP(self, v):
369 return "{%s}" % ','.join(["'%s':%s" % (key.replace("\\", "\\\\").replace("'", "\\'"), self.generate(value)) 380 def fix(key):
381 if isinstance(key, unicode):
382 return key.encode('utf-8')
383 return key
384 return "{%s}" % ','.join(["'%s':%s" % (fix(key).replace("\\", "\\\\").replace("'", "\\'"), self.generate(value))
370 for key, value in v.items()]) 385 for key, value in v.items()])
371 386
372 def generate(self, something): 387 def generate(self, something):
373 t = type(something) 388 t = type(something)
374 if self.type_map.has_key(t): 389 handler = self.type_map.get(t)
375 return self.type_map[t](something) 390 if handler:
391 return handler(something)
376 else: 392 else:
377 raise LLSDSerializationError("Cannot serialize unknown type: %s (%s)" % ( 393 try:
378 t, something)) 394 return self.ARRAY(iter(something))
395 except TypeError:
396 raise LLSDSerializationError(
397 "Cannot serialize unknown type: %s (%s)" % (t, something))
379 398
380 def format(self, something): 399 def format(self, something):
381 return self.generate(something) 400 return self.generate(something)
@@ -479,7 +498,6 @@ class LLSDBinaryParser(object):
479 raise LLSDParseError("invalid map key at byte %d." % ( 498 raise LLSDParseError("invalid map key at byte %d." % (
480 self._index - 1,)) 499 self._index - 1,))
481 value = self._parse() 500 value = self._parse()
482 #print "kv:",key,value
483 rv[key] = value 501 rv[key] = value
484 count += 1 502 count += 1
485 cc = self._buffer[self._index] 503 cc = self._buffer[self._index]
@@ -636,11 +654,23 @@ class LLSDNotationParser(object):
636 # 'd' = date in seconds since epoch 654 # 'd' = date in seconds since epoch
637 return self._parse_date() 655 return self._parse_date()
638 elif cc == 'b': 656 elif cc == 'b':
639 raise LLSDParseError("binary notation not yet supported") 657 return self._parse_binary()
640 else: 658 else:
641 raise LLSDParseError("invalid token at index %d: %d" % ( 659 raise LLSDParseError("invalid token at index %d: %d" % (
642 self._index - 1, ord(cc))) 660 self._index - 1, ord(cc)))
643 661
662 def _parse_binary(self):
663 i = self._index
664 if self._buffer[i:i+2] == '64':
665 q = self._buffer[i+2]
666 e = self._buffer.find(q, i+3)
667 try:
668 return base64.decodestring(self._buffer[i+3:e])
669 finally:
670 self._index = e + 1
671 else:
672 raise LLSDParseError('random horrible binary format not supported')
673
644 def _parse_map(self): 674 def _parse_map(self):
645 """ map: { string:object, string:object } """ 675 """ map: { string:object, string:object } """
646 rv = {} 676 rv = {}
@@ -653,30 +683,23 @@ class LLSDNotationParser(object):
653 if cc in ("'", '"', 's'): 683 if cc in ("'", '"', 's'):
654 key = self._parse_string(cc) 684 key = self._parse_string(cc)
655 found_key = True 685 found_key = True
656 #print "key:",key
657 elif cc.isspace() or cc == ',': 686 elif cc.isspace() or cc == ',':
658 cc = self._buffer[self._index] 687 cc = self._buffer[self._index]
659 self._index += 1 688 self._index += 1
660 else: 689 else:
661 raise LLSDParseError("invalid map key at byte %d." % ( 690 raise LLSDParseError("invalid map key at byte %d." % (
662 self._index - 1,)) 691 self._index - 1,))
692 elif cc.isspace() or cc == ':':
693 cc = self._buffer[self._index]
694 self._index += 1
695 continue
663 else: 696 else:
664 if cc.isspace() or cc == ':':
665 #print "skipping whitespace '%s'" % cc
666 cc = self._buffer[self._index]
667 self._index += 1
668 continue
669 self._index += 1 697 self._index += 1
670 value = self._parse() 698 value = self._parse()
671 #print "kv:",key,value
672 rv[key] = value 699 rv[key] = value
673 found_key = False 700 found_key = False
674 cc = self._buffer[self._index] 701 cc = self._buffer[self._index]
675 self._index += 1 702 self._index += 1
676 #if cc == '}':
677 # break
678 #cc = self._buffer[self._index]
679 #self._index += 1
680 703
681 return rv 704 return rv
682 705
@@ -840,6 +863,14 @@ def format_binary(something):
840 return '<?llsd/binary?>\n' + _format_binary_recurse(something) 863 return '<?llsd/binary?>\n' + _format_binary_recurse(something)
841 864
842def _format_binary_recurse(something): 865def _format_binary_recurse(something):
866 def _format_list(something):
867 array_builder = []
868 array_builder.append('[' + struct.pack('!i', len(something)))
869 for item in something:
870 array_builder.append(_format_binary_recurse(item))
871 array_builder.append(']')
872 return ''.join(array_builder)
873
843 if something is None: 874 if something is None:
844 return '!' 875 return '!'
845 elif isinstance(something, LLSD): 876 elif isinstance(something, LLSD):
@@ -857,7 +888,10 @@ def _format_binary_recurse(something):
857 return 'u' + something._bits 888 return 'u' + something._bits
858 elif isinstance(something, binary): 889 elif isinstance(something, binary):
859 return 'b' + struct.pack('!i', len(something)) + something 890 return 'b' + struct.pack('!i', len(something)) + something
860 elif isinstance(something, (str, unicode)): 891 elif isinstance(something, str):
892 return 's' + struct.pack('!i', len(something)) + something
893 elif isinstance(something, unicode):
894 something = something.encode('utf-8')
861 return 's' + struct.pack('!i', len(something)) + something 895 return 's' + struct.pack('!i', len(something)) + something
862 elif isinstance(something, uri): 896 elif isinstance(something, uri):
863 return 'l' + struct.pack('!i', len(something)) + something 897 return 'l' + struct.pack('!i', len(something)) + something
@@ -865,35 +899,50 @@ def _format_binary_recurse(something):
865 seconds_since_epoch = time.mktime(something.timetuple()) 899 seconds_since_epoch = time.mktime(something.timetuple())
866 return 'd' + struct.pack('!d', seconds_since_epoch) 900 return 'd' + struct.pack('!d', seconds_since_epoch)
867 elif isinstance(something, (list, tuple)): 901 elif isinstance(something, (list, tuple)):
868 array_builder = [] 902 return _format_list(something)
869 array_builder.append('[' + struct.pack('!i', len(something)))
870 for item in something:
871 array_builder.append(_format_binary_recurse(item))
872 array_builder.append(']')
873 return ''.join(array_builder)
874 elif isinstance(something, dict): 903 elif isinstance(something, dict):
875 map_builder = [] 904 map_builder = []
876 map_builder.append('{' + struct.pack('!i', len(something))) 905 map_builder.append('{' + struct.pack('!i', len(something)))
877 for key, value in something.items(): 906 for key, value in something.items():
907 if isinstance(key, unicode):
908 key = key.encode('utf-8')
878 map_builder.append('k' + struct.pack('!i', len(key)) + key) 909 map_builder.append('k' + struct.pack('!i', len(key)) + key)
879 map_builder.append(_format_binary_recurse(value)) 910 map_builder.append(_format_binary_recurse(value))
880 map_builder.append('}') 911 map_builder.append('}')
881 return ''.join(map_builder) 912 return ''.join(map_builder)
882 else: 913 else:
883 raise LLSDSerializationError("Cannot serialize unknown type: %s (%s)" % ( 914 try:
884 type(something), something)) 915 return _format_list(list(something))
916 except TypeError:
917 raise LLSDSerializationError(
918 "Cannot serialize unknown type: %s (%s)" %
919 (type(something), something))
920
921
922def parse_binary(something):
923 header = '<?llsd/binary?>\n'
924 if not something.startswith(header):
925 raise LLSDParseError('LLSD binary encoding header not found')
926 return LLSDBinaryParser().parse(something[len(header):])
927
928def parse_xml(something):
929 try:
930 return to_python(fromstring(something)[0])
931 except ElementTreeError, err:
932 raise LLSDParseError(*err.args)
885 933
934def parse_notation(something):
935 return LLSDNotationParser().parse(something)
886 936
887def parse(something): 937def parse(something):
888 try: 938 try:
889 if something.startswith('<?llsd/binary?>'): 939 if something.startswith('<?llsd/binary?>'):
890 just_binary = something.split('\n', 1)[1] 940 return parse_binary(something)
891 return LLSDBinaryParser().parse(just_binary)
892 # This should be better. 941 # This should be better.
893 elif something.startswith('<'): 942 elif something.startswith('<'):
894 return to_python(fromstring(something)[0]) 943 return parse_xml(something)
895 else: 944 else:
896 return LLSDNotationParser().parse(something) 945 return parse_notation(something)
897 except KeyError, e: 946 except KeyError, e:
898 raise Exception('LLSD could not be parsed: %s' % (e,)) 947 raise Exception('LLSD could not be parsed: %s' % (e,))
899 948
@@ -913,6 +962,9 @@ class LLSD(object):
913 962
914undef = LLSD(None) 963undef = LLSD(None)
915 964
965XML_MIME_TYPE = 'application/llsd+xml'
966BINARY_MIME_TYPE = 'application/llsd+binary'
967
916# register converters for llsd in mulib, if it is available 968# register converters for llsd in mulib, if it is available
917try: 969try:
918 from mulib import stacked, mu 970 from mulib import stacked, mu
@@ -922,7 +974,7 @@ except:
922 # mulib not available, don't print an error message since this is normal 974 # mulib not available, don't print an error message since this is normal
923 pass 975 pass
924else: 976else:
925 mu.add_parser(parse, 'application/llsd+xml') 977 mu.add_parser(parse, XML_MIME_TYPE)
926 mu.add_parser(parse, 'application/llsd+binary') 978 mu.add_parser(parse, 'application/llsd+binary')
927 979
928 def llsd_convert_xml(llsd_stuff, request): 980 def llsd_convert_xml(llsd_stuff, request):
@@ -931,11 +983,58 @@ else:
931 def llsd_convert_binary(llsd_stuff, request): 983 def llsd_convert_binary(llsd_stuff, request):
932 request.write(format_binary(llsd_stuff)) 984 request.write(format_binary(llsd_stuff))
933 985
934 for typ in [LLSD, dict, list, tuple, str, int, float, bool, unicode, type(None)]: 986 for typ in [LLSD, dict, list, tuple, str, int, long, float, bool, unicode, type(None)]:
935 stacked.add_producer(typ, llsd_convert_xml, 'application/llsd+xml') 987 stacked.add_producer(typ, llsd_convert_xml, XML_MIME_TYPE)
936 stacked.add_producer(typ, llsd_convert_xml, 'application/xml') 988 stacked.add_producer(typ, llsd_convert_xml, 'application/xml')
937 stacked.add_producer(typ, llsd_convert_xml, 'text/xml') 989 stacked.add_producer(typ, llsd_convert_xml, 'text/xml')
938 990
939 stacked.add_producer(typ, llsd_convert_binary, 'application/llsd+binary') 991 stacked.add_producer(typ, llsd_convert_binary, 'application/llsd+binary')
940 992
941 stacked.add_producer(LLSD, llsd_convert_xml, '*/*') 993 stacked.add_producer(LLSD, llsd_convert_xml, '*/*')
994
995 # in case someone is using the legacy mu.xml wrapper, we need to
996 # tell mu to produce application/xml or application/llsd+xml
997 # (based on the accept header) from raw xml. Phoenix 2008-07-21
998 stacked.add_producer(mu.xml, mu.produce_raw, XML_MIME_TYPE)
999 stacked.add_producer(mu.xml, mu.produce_raw, 'application/xml')
1000
1001
1002
1003# mulib wsgi stuff
1004# try:
1005# from mulib import mu, adapters
1006#
1007# # try some known attributes from mulib to be ultra-sure we've imported it
1008# mu.get_current
1009# adapters.handlers
1010# except:
1011# # mulib not available, don't print an error message since this is normal
1012# pass
1013# else:
1014# def llsd_xml_handler(content_type):
1015# def handle_llsd_xml(env, start_response):
1016# llsd_stuff, _ = mu.get_current(env)
1017# result = format_xml(llsd_stuff)
1018# start_response("200 OK", [('Content-Type', content_type)])
1019# env['mu.negotiated_type'] = content_type
1020# yield result
1021# return handle_llsd_xml
1022#
1023# def llsd_binary_handler(content_type):
1024# def handle_llsd_binary(env, start_response):
1025# llsd_stuff, _ = mu.get_current(env)
1026# result = format_binary(llsd_stuff)
1027# start_response("200 OK", [('Content-Type', content_type)])
1028# env['mu.negotiated_type'] = content_type
1029# yield result
1030# return handle_llsd_binary
1031#
1032# adapters.DEFAULT_PARSERS[XML_MIME_TYPE] = parse
1033
1034# for typ in [LLSD, dict, list, tuple, str, int, float, bool, unicode, type(None)]:
1035# for content_type in (XML_MIME_TYPE, 'application/xml'):
1036# adapters.handlers.set_handler(typ, llsd_xml_handler(content_type), content_type)
1037#
1038# adapters.handlers.set_handler(typ, llsd_binary_handler(BINARY_MIME_TYPE), BINARY_MIME_TYPE)
1039#
1040# adapters.handlers.set_handler(LLSD, llsd_xml_handler(XML_MIME_TYPE), '*/*')
diff --git a/linden/indra/lib/python/indra/ipc/llmessage.py b/linden/indra/lib/python/indra/ipc/llmessage.py
index 446679d..c2f3fd4 100644
--- a/linden/indra/lib/python/indra/ipc/llmessage.py
+++ b/linden/indra/lib/python/indra/ipc/llmessage.py
@@ -88,7 +88,7 @@ class Message:
88 UDPDEPRECATED = "UDPDeprecated" 88 UDPDEPRECATED = "UDPDeprecated"
89 UDPBLACKLISTED = "UDPBlackListed" 89 UDPBLACKLISTED = "UDPBlackListed"
90 deprecations = [ NOTDEPRECATED, UDPDEPRECATED, UDPBLACKLISTED, DEPRECATED ] 90 deprecations = [ NOTDEPRECATED, UDPDEPRECATED, UDPBLACKLISTED, DEPRECATED ]
91 # in order of increasing deprecation 91 # in order of increasing deprecation
92 92
93 def __init__(self, name, number, priority, trust, coding): 93 def __init__(self, name, number, priority, trust, coding):
94 self.name = name 94 self.name = name
diff --git a/linden/indra/lib/python/indra/ipc/llsdhttp.py b/linden/indra/lib/python/indra/ipc/llsdhttp.py
index 1cf1146..ed64899 100644
--- a/linden/indra/lib/python/indra/ipc/llsdhttp.py
+++ b/linden/indra/lib/python/indra/ipc/llsdhttp.py
@@ -51,6 +51,8 @@ request_ = suite.request_
51# import every httpc error exception into our namespace for convenience 51# import every httpc error exception into our namespace for convenience
52for x in httpc.status_to_error_map.itervalues(): 52for x in httpc.status_to_error_map.itervalues():
53 globals()[x.__name__] = x 53 globals()[x.__name__] = x
54ConnectionError = httpc.ConnectionError
55Retriable = httpc.Retriable
54 56
55for x in (httpc.ConnectionError,): 57for x in (httpc.ConnectionError,):
56 globals()[x.__name__] = x 58 globals()[x.__name__] = x
diff --git a/linden/indra/lib/python/indra/ipc/mysql_pool.py b/linden/indra/lib/python/indra/ipc/mysql_pool.py
index 507b185..25a66cf 100644
--- a/linden/indra/lib/python/indra/ipc/mysql_pool.py
+++ b/linden/indra/lib/python/indra/ipc/mysql_pool.py
@@ -30,8 +30,10 @@ import MySQLdb
30from eventlet import db_pool 30from eventlet import db_pool
31 31
32class DatabaseConnector(db_pool.DatabaseConnector): 32class DatabaseConnector(db_pool.DatabaseConnector):
33 def __init__(self, credentials, min_size = 0, max_size = 4, *args, **kwargs): 33 def __init__(self, credentials, *args, **kwargs):
34 super(DatabaseConnector, self).__init__(MySQLdb, credentials, min_size, max_size, conn_pool=db_pool.ConnectionPool, *args, **kwargs) 34 super(DatabaseConnector, self).__init__(MySQLdb, credentials,
35 conn_pool=db_pool.ConnectionPool,
36 *args, **kwargs)
35 37
36 # get is extended relative to eventlet.db_pool to accept a port argument 38 # get is extended relative to eventlet.db_pool to accept a port argument
37 def get(self, host, dbname, port=3306): 39 def get(self, host, dbname, port=3306):
@@ -42,7 +44,7 @@ class DatabaseConnector(db_pool.DatabaseConnector):
42 new_kwargs['host'] = host 44 new_kwargs['host'] = host
43 new_kwargs['port'] = port 45 new_kwargs['port'] = port
44 new_kwargs.update(self.credentials_for(host)) 46 new_kwargs.update(self.credentials_for(host))
45 dbpool = ConnectionPool(self._min_size, self._max_size, *self._args, **new_kwargs) 47 dbpool = ConnectionPool(*self._args, **new_kwargs)
46 self._databases[key] = dbpool 48 self._databases[key] = dbpool
47 49
48 return self._databases[key] 50 return self._databases[key]
@@ -51,8 +53,8 @@ class ConnectionPool(db_pool.TpooledConnectionPool):
51 """A pool which gives out saranwrapped MySQLdb connections from a pool 53 """A pool which gives out saranwrapped MySQLdb connections from a pool
52 """ 54 """
53 55
54 def __init__(self, min_size = 0, max_size = 4, *args, **kwargs): 56 def __init__(self, *args, **kwargs):
55 super(ConnectionPool, self).__init__(MySQLdb, min_size, max_size, *args, **kwargs) 57 super(ConnectionPool, self).__init__(MySQLdb, *args, **kwargs)
56 58
57 def get(self): 59 def get(self):
58 conn = super(ConnectionPool, self).get() 60 conn = super(ConnectionPool, self).get()
@@ -77,14 +79,3 @@ class ConnectionPool(db_pool.TpooledConnectionPool):
77 conn.connection_parameters = converted_kwargs 79 conn.connection_parameters = converted_kwargs
78 return conn 80 return conn
79 81
80 def clear(self):
81 """ Close all connections that this pool still holds a reference to, leaving it empty."""
82 for conn in self.free_items:
83 try:
84 conn.close()
85 except:
86 pass # even if stuff happens here, we still want to at least try to close all the other connections
87 self.free_items.clear()
88
89 def __del__(self):
90 self.clear()
diff --git a/linden/indra/lib/python/indra/ipc/saranwrap.py b/linden/indra/lib/python/indra/ipc/saranwrap.py
deleted file mode 100644
index e0205bf..0000000
--- a/linden/indra/lib/python/indra/ipc/saranwrap.py
+++ /dev/null
@@ -1,651 +0,0 @@
1"""\
2@file saranwrap.py
3@author Phoenix
4@date 2007-07-13
5@brief A simple, pickle based rpc mechanism which reflects python
6objects and callables.
7
8$LicenseInfo:firstyear=2007&license=mit$
9
10Copyright (c) 2007-2008, Linden Research, Inc.
11
12Permission is hereby granted, free of charge, to any person obtaining a copy
13of this software and associated documentation files (the "Software"), to deal
14in the Software without restriction, including without limitation the rights
15to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
16copies of the Software, and to permit persons to whom the Software is
17furnished to do so, subject to the following conditions:
18
19The above copyright notice and this permission notice shall be included in
20all copies or substantial portions of the Software.
21
22THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
25AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
27OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
28THE SOFTWARE.
29$/LicenseInfo$
30
31This file provides classes and exceptions used for simple python level
32remote procedure calls. This is achieved by intercepting the basic
33getattr and setattr calls in a client proxy, which commnicates those
34down to the server which will dispatch them to objects in it's process
35space.
36
37The basic protocol to get and set attributes is for the client proxy
38to issue the command:
39
40getattr $id $name
41setattr $id $name $value
42
43getitem $id $item
44setitem $id $item $value
45eq $id $rhs
46del $id
47
48When the get returns a callable, the client proxy will provide a
49callable proxy which will invoke a remote procedure call. The command
50issued from the callable proxy to server is:
51
52call $id $name $args $kwargs
53
54If the client supplies an id of None, then the get/set/call is applied
55to the object(s) exported from the server.
56
57The server will parse the get/set/call, take the action indicated, and
58return back to the caller one of:
59
60value $val
61callable
62object $id
63exception $excp
64
65To handle object expiration, the proxy will instruct the rpc server to
66discard objects which are no longer in use. This is handled by
67catching proxy deletion and sending the command:
68
69del $id
70
71The server will handle this by removing clearing it's own internal
72references. This does not mean that the object will necessarily be
73cleaned from the server, but no artificial references will remain
74after successfully completing. On completion, the server will return
75one of:
76
77value None
78exception $excp
79
80The server also accepts a special command for debugging purposes:
81
82status
83
84Which will be intercepted by the server to write back:
85
86status {...}
87
88The wire protocol is to pickle the Request class in this file. The
89request class is basically an action and a map of parameters'
90"""
91
92import os
93import cPickle
94import struct
95import sys
96
97try:
98 set = set
99 frozenset = frozenset
100except NameError:
101 from sets import Set as set, ImmutableSet as frozenset
102
103from eventlet.processes import Process
104from eventlet import api
105
106#
107# debugging hooks
108#
109_g_debug_mode = False
110if _g_debug_mode:
111 import traceback
112
113def pythonpath_sync():
114 """
115@brief apply the current sys.path to the environment variable PYTHONPATH, so that child processes have the same paths as the caller does.
116"""
117 pypath = os.pathsep.join(sys.path)
118 os.environ['PYTHONPATH'] = pypath
119
120def wrap(obj, dead_callback = None):
121 """
122@brief wrap in object in another process through a saranwrap proxy
123@param object The object to wrap.
124@param dead_callback A callable to invoke if the process exits."""
125
126 if type(obj).__name__ == 'module':
127 return wrap_module(obj.__name__, dead_callback)
128 pythonpath_sync()
129 p = Process('python', [__file__, '--child'], dead_callback)
130 prox = Proxy(p, p)
131 prox.obj = obj
132 return prox.obj
133
134def wrap_module(fqname, dead_callback = None):
135 """
136@brief wrap a module in another process through a saranwrap proxy
137@param fqname The fully qualified name of the module.
138@param dead_callback A callable to invoke if the process exits."""
139 pythonpath_sync()
140 global _g_debug_mode
141 if _g_debug_mode:
142 p = Process('python', [__file__, '--module', fqname, '--logfile', '/tmp/saranwrap.log'], dead_callback)
143 else:
144 p = Process('python', [__file__, '--module', fqname,], dead_callback)
145 prox = Proxy(p, p)
146 return prox
147
148def status(proxy):
149 """
150@brief get the status from the server through a proxy
151@param proxy a saranwrap.Proxy object connected to a server."""
152 _write_request(Request('status', {}), proxy.__local_dict['_out'])
153 return _read_response(None, None, proxy.__local_dict['_in'], proxy.__local_dict['_out'], None)
154
155class BadResponse(Exception):
156 """"This exception is raised by an saranwrap client when it could
157 parse but cannot understand the response from the server."""
158 pass
159
160class BadRequest(Exception):
161 """"This exception is raised by a saranwrap server when it could parse
162 but cannot understand the response from the server."""
163 pass
164
165class UnrecoverableError(Exception):
166 pass
167
168class Request(object):
169 "@brief A wrapper class for proxy requests to the server."
170 def __init__(self, action, param):
171 self._action = action
172 self._param = param
173 def __str__(self):
174 return "Request `"+self._action+"` "+str(self._param)
175 def __getitem__(self, name):
176 return self._param[name]
177 def action(self):
178 return self._action
179
180def _read_lp_hunk(stream):
181 len_bytes = stream.read(4)
182 length = struct.unpack('I', len_bytes)[0]
183 body = stream.read(length)
184 return body
185
186def _read_response(id, attribute, input, output, dead_list):
187 """@brief local helper method to read respones from the rpc server."""
188 try:
189 str = _read_lp_hunk(input)
190 _prnt(`str`)
191 response = cPickle.loads(str)
192 except AttributeError, e:
193 raise UnrecoverableError(e)
194 _prnt("response: %s" % response)
195 if response[0] == 'value':
196 return response[1]
197 elif response[0] == 'callable':
198 return CallableProxy(id, attribute, input, output, dead_list)
199 elif response[0] == 'object':
200 return ObjectProxy(input, output, response[1], dead_list)
201 elif response[0] == 'exception':
202 exp = response[1]
203 raise exp
204 else:
205 raise BadResponse(response[0])
206
207def _write_lp_hunk(stream, hunk):
208 write_length = struct.pack('I', len(hunk))
209 stream.write(write_length + hunk)
210 if hasattr(stream, 'flush'):
211 stream.flush()
212
213def _write_request(param, output):
214 _prnt("request: %s" % param)
215 str = cPickle.dumps(param)
216 _write_lp_hunk(output, str)
217
218def _is_local(attribute):
219 "Return true if the attribute should be handled locally"
220# return attribute in ('_in', '_out', '_id', '__getattribute__', '__setattr__', '__dict__')
221 # good enough for now. :)
222 if '__local_dict' in attribute:
223 return True
224 return False
225
226def _prnt(message):
227 global _g_debug_mode
228 if _g_debug_mode:
229 print message
230
231_g_logfile = None
232def _log(message):
233 global _g_logfile
234 if _g_logfile:
235 _g_logfile.write(str(os.getpid()) + ' ' + message)
236 _g_logfile.write('\n')
237 _g_logfile.flush()
238
239def _unmunge_attr_name(name):
240 """ Sometimes attribute names come in with classname prepended, not sure why.
241 This function removes said classname, because we're huge hackers and we didn't
242 find out what the true right thing to do is. *FIX: find out. """
243 if(name.startswith('_Proxy')):
244 name = name[len('_Proxy'):]
245 if(name.startswith('_ObjectProxy')):
246 name = name[len('_ObjectProxy'):]
247 return name
248
249
250class Proxy(object):
251 """\
252@class Proxy
253@brief This class wraps a remote python process, presumably available
254in an instance of an Server.
255
256This is the class you will typically use as a client to a child
257process. Simply instantiate one around a file-like interface and start
258calling methods on the thing that is exported. The dir() builtin is
259not supported, so you have to know what has been exported.
260"""
261 def __init__(self, input, output, dead_list = None):
262 """\
263@param input a file-like object which supports read().
264@param output a file-like object which supports write() and flush().
265@param id an identifier for the remote object. humans do not provide this.
266"""
267 # default dead_list inside the function because all objects in method
268 # argument lists are init-ed only once globally
269 if dead_list is None:
270 dead_list = set()
271 #_prnt("Proxy::__init__")
272 self.__local_dict = dict(
273 _in = input,
274 _out = output,
275 _dead_list = dead_list,
276 _id = None)
277
278 def __getattribute__(self, attribute):
279 #_prnt("Proxy::__getattr__: %s" % attribute)
280 if _is_local(attribute):
281 # call base class getattribute so we actually get the local variable
282 attribute = _unmunge_attr_name(attribute)
283 return super(Proxy, self).__getattribute__(attribute)
284 else:
285 my_in = self.__local_dict['_in']
286 my_out = self.__local_dict['_out']
287 my_id = self.__local_dict['_id']
288
289 _dead_list = self.__local_dict['_dead_list']
290 for dead_object in _dead_list.copy():
291 request = Request('del', {'id':dead_object})
292 _write_request(request, my_out)
293 response = _read_response(my_id, attribute, my_in, my_out, _dead_list)
294 _dead_list.remove(dead_object)
295
296 # Pass all public attributes across to find out if it is
297 # callable or a simple attribute.
298 request = Request('getattr', {'id':my_id, 'attribute':attribute})
299 _write_request(request, my_out)
300 return _read_response(my_id, attribute, my_in, my_out, _dead_list)
301
302 def __setattr__(self, attribute, value):
303 #_prnt("Proxy::__setattr__: %s" % attribute)
304 if _is_local(attribute):
305 # It must be local to this actual object, so we have to apply
306 # it to the dict in a roundabout way
307 attribute = _unmunge_attr_name(attribute)
308 super(Proxy, self).__getattribute__('__dict__')[attribute]=value
309 else:
310 my_in = self.__local_dict['_in']
311 my_out = self.__local_dict['_out']
312 my_id = self.__local_dict['_id']
313 _dead_list = self.__local_dict['_dead_list']
314 # Pass the set attribute across
315 request = Request('setattr', {'id':my_id, 'attribute':attribute, 'value':value})
316 _write_request(request, my_out)
317 return _read_response(my_id, attribute, my_in, my_out, _dead_list)
318
319class ObjectProxy(Proxy):
320 """\
321@class ObjectProxy
322@brief This class wraps a remote object in the Server
323
324This class will be created during normal operation, and users should
325not need to deal with this class directly."""
326
327 def __init__(self, input, output, id, dead_list):
328 """\
329@param input a file-like object which supports read().
330@param output a file-like object which supports write() and flush().
331@param id an identifier for the remote object. humans do not provide this.
332"""
333 Proxy.__init__(self, input, output, dead_list)
334 self.__local_dict['_id'] = id
335 #_prnt("ObjectProxy::__init__ %s" % self._id)
336
337 def __del__(self):
338 my_id = self.__local_dict['_id']
339 _prnt("ObjectProxy::__del__ %s" % my_id)
340 self.__local_dict['_dead_list'].add(my_id)
341
342 def __getitem__(self, key):
343 my_in = self.__local_dict['_in']
344 my_out = self.__local_dict['_out']
345 my_id = self.__local_dict['_id']
346 _dead_list = self.__local_dict['_dead_list']
347 request = Request('getitem', {'id':my_id, 'key':key})
348 _write_request(request, my_out)
349 return _read_response(my_id, key, my_in, my_out, _dead_list)
350
351 def __setitem__(self, key, value):
352 my_in = self.__local_dict['_in']
353 my_out = self.__local_dict['_out']
354 my_id = self.__local_dict['_id']
355 _dead_list = self.__local_dict['_dead_list']
356 request = Request('setitem', {'id':my_id, 'key':key, 'value':value})
357 _write_request(request, my_out)
358 return _read_response(my_id, key, my_in, my_out, _dead_list)
359
360 def __eq__(self, rhs):
361 my_in = self.__local_dict['_in']
362 my_out = self.__local_dict['_out']
363 my_id = self.__local_dict['_id']
364 _dead_list = self.__local_dict['_dead_list']
365 request = Request('eq', {'id':my_id, 'rhs':rhs.__local_dict['_id']})
366 _write_request(request, my_out)
367 return _read_response(my_id, None, my_in, my_out, _dead_list)
368
369 def __repr__(self):
370 # apparently repr(obj) skips the whole getattribute thing and just calls __repr__
371 # directly. Therefore we just pass it through the normal call pipeline, and
372 # tack on a little header so that you can tell it's an object proxy.
373 val = self.__repr__()
374 return "saran:%s" % val
375
376 def __str__(self):
377 # see description for __repr__, because str(obj) works the same. We don't
378 # tack anything on to the return value here because str values are used as data.
379 return self.__str__()
380
381 def __len__(self):
382 # see description for __repr__, len(obj) is the same. Unfortunately, __len__ is also
383 # used when determining whether an object is boolean or not, e.g. if proxied_object:
384 return self.__len__()
385
386def proxied_type(self):
387 if type(self) is not ObjectProxy:
388 return type(self)
389
390 my_in = self.__local_dict['_in']
391 my_out = self.__local_dict['_out']
392 my_id = self.__local_dict['_id']
393 request = Request('type', {'id':my_id})
394 _write_request(request, my_out)
395 # dead list can be none because we know the result will always be
396 # a value and not an ObjectProxy itself
397 return _read_response(my_id, None, my_in, my_out, None)
398
399class CallableProxy(object):
400 """\
401@class CallableProxy
402@brief This class wraps a remote function in the Server
403
404This class will be created by an Proxy during normal operation,
405and users should not need to deal with this class directly."""
406
407 def __init__(self, object_id, name, input, output, dead_list):
408 #_prnt("CallableProxy::__init__: %s, %s" % (object_id, name))
409 self._object_id = object_id
410 self._name = name
411 self._in = input
412 self._out = output
413 self._dead_list = dead_list
414
415 def __call__(self, *args, **kwargs):
416 #_prnt("CallableProxy::__call__: %s, %s" % (args, kwargs))
417
418 # Pass the call across. We never build a callable without
419 # having already checked if the method starts with '_' so we
420 # can safely pass this one to the remote object.
421 #_prnt("calling %s %s" % (self._object_id, self._name)
422 request = Request('call', {'id':self._object_id, 'name':self._name, 'args':args, 'kwargs':kwargs})
423 _write_request(request, self._out)
424 return _read_response(self._object_id, self._name, self._in, self._out, self._dead_list)
425
426class Server(object):
427 def __init__(self, input, output, export):
428 """\
429@param input a file-like object which supports read().
430@param output a file-like object which supports write() and flush().
431@param export an object, function, or map which is exported to clients
432when the id is None."""
433 #_log("Server::__init__")
434 self._in = input
435 self._out = output
436 self._export = export
437 self._next_id = 1
438 self._objects = {}
439
440 def handle_status(self, object, req):
441 return {
442 'object_count':len(self._objects),
443 'next_id':self._next_id,
444 'pid':os.getpid()}
445
446 def handle_getattr(self, object, req):
447 try:
448 return getattr(object, req['attribute'])
449 except AttributeError, e:
450 if hasattr(object, "__getitem__"):
451 return object[req['attribute']]
452 else:
453 raise e
454 #_log('getattr: %s' % str(response))
455
456 def handle_setattr(self, object, req):
457 try:
458 return setattr(object, req['attribute'], req['value'])
459 except AttributeError, e:
460 if hasattr(object, "__setitem__"):
461 return object.__setitem__(req['attribute'], req['value'])
462 else:
463 raise e
464
465 def handle_getitem(self, object, req):
466 return object[req['key']]
467
468 def handle_setitem(self, object, req):
469 object[req['key']] = req['value']
470 return None # *TODO figure out what the actual return value of __setitem__ should be
471
472 def handle_eq(self, object, req):
473 #_log("__eq__ %s %s" % (object, req))
474 rhs = None
475 try:
476 rhs = self._objects[req['rhs']]
477 except KeyError, e:
478 return False
479 return (object == rhs)
480
481 def handle_call(self, object, req):
482 #_log("calling %s " % (req['name']))
483 try:
484 fn = getattr(object, req['name'])
485 except AttributeError, e:
486 if hasattr(object, "__setitem__"):
487 fn = object[req['name']]
488 else:
489 raise e
490
491 return fn(*req['args'],**req['kwargs'])
492
493 def handle_del(self, object, req):
494 id = req['id']
495 _log("del %s from %s" % (id, self._objects))
496
497 # *TODO what does __del__ actually return?
498 del self._objects[id]
499 return None
500
501 def handle_type(self, object, req):
502 return type(object)
503
504 def loop(self):
505 """@brief Loop forever and respond to all requests."""
506 _log("Server::loop")
507 while True:
508 try:
509 try:
510 str = _read_lp_hunk(self._in)
511 except EOFError:
512 sys.exit(0) # normal exit
513 request = cPickle.loads(str)
514 _log("request: %s (%s)" % (request, self._objects))
515 req = request
516 id = None
517 object = None
518 try:
519 id = req['id']
520 if id:
521 id = int(id)
522 object = self._objects[id]
523 #_log("id, object: %d %s" % (id, object))
524 except Exception, e:
525 #_log("Exception %s" % str(e))
526 pass
527 if object is None or id is None:
528 id = None
529 object = self._export
530 #_log("found object %s" % str(object))
531
532 # Handle the request via a method with a special name on the server
533 handler_name = 'handle_%s' % request.action()
534
535 try:
536 handler = getattr(self, handler_name)
537 except AttributeError:
538 raise BadRequest, request.action()
539
540 response = handler(object, request)
541
542 # figure out what to do with the response, and respond
543 # apprpriately.
544 if request.action() in ['status', 'type']:
545 # have to handle these specially since we want to
546 # pickle up the actual value and not return a proxy
547 self.respond(['value', response])
548 elif callable(response):
549 #_log("callable %s" % response)
550 self.respond(['callable'])
551 elif self.is_value(response):
552 self.respond(['value', response])
553 else:
554 self._objects[self._next_id] = response
555 #_log("objects: %s" % str(self._objects))
556 self.respond(['object', self._next_id])
557 self._next_id += 1
558 except SystemExit, e:
559 raise e
560 except Exception, e:
561 self.write_exception(e)
562 except:
563 self.write_exception(sys.exc_info()[0])
564
565 def is_value(self, value):
566 """\
567@brief Test if value should be serialized as a simple dataset.
568@param value The value to test.
569@return Returns true if value is a simple serializeable set of data.
570"""
571 return type(value) in (str,unicode,int,float,long,bool,type(None))
572
573 def respond(self, body):
574 _log("responding with: %s" % body)
575 #_log("objects: %s" % self._objects)
576 s = cPickle.dumps(body)
577 _log(`s`)
578 str = _write_lp_hunk(self._out, s)
579
580 def write_exception(self, e):
581 """@brief Helper method to respond with an exception."""
582 #_log("exception: %s" % sys.exc_info()[0])
583 # TODO: serialize traceback using generalization of code from mulib.htmlexception
584 self.respond(['exception', e])
585 global _g_debug_mode
586 if _g_debug_mode:
587 _log("traceback: %s" % traceback.format_tb(sys.exc_info()[2]))
588
589
590# test function used for testing that final except clause
591def raise_a_weird_error():
592 raise "oh noes you can raise a string"
593
594# test function used for testing return of unpicklable exceptions
595def raise_an_unpicklable_error():
596 class Unpicklable(Exception):
597 pass
598 raise Unpicklable()
599
600# test function used for testing return of picklable exceptions
601def raise_standard_error():
602 raise FloatingPointError()
603
604# test function to make sure print doesn't break the wrapper
605def print_string(str):
606 print str
607
608# test function to make sure printing on stdout doesn't break the
609# wrapper
610def err_string(str):
611 print >>sys.stderr, str
612
613def main():
614 import optparse
615 parser = optparse.OptionParser(
616 usage="usage: %prog [options]",
617 description="Simple saranwrap.Server wrapper")
618 parser.add_option(
619 '-c', '--child', default=False, action='store_true',
620 help='Wrap an object serialed via setattr.')
621 parser.add_option(
622 '-m', '--module', type='string', dest='module', default=None,
623 help='a module to load and export.')
624 parser.add_option(
625 '-l', '--logfile', type='string', dest='logfile', default=None,
626 help='file to log to.')
627 options, args = parser.parse_args()
628 global _g_logfile
629 if options.logfile:
630 _g_logfile = open(options.logfile, 'a')
631 if options.module:
632 export = api.named(options.module)
633 server = Server(sys.stdin, sys.stdout, export)
634 elif options.child:
635 server = Server(sys.stdin, sys.stdout, {})
636
637 # *HACK: some modules may emit on stderr, which breaks everything.
638 class NullSTDOut(object):
639 def write(a, b):
640 pass
641 sys.stderr = NullSTDOut()
642 sys.stdout = NullSTDOut()
643
644 # Loop until EOF
645 server.loop()
646 if _g_logfile:
647 _g_logfile.close()
648
649
650if __name__ == "__main__":
651 main()
diff --git a/linden/indra/lib/python/indra/ipc/siesta.py b/linden/indra/lib/python/indra/ipc/siesta.py
index 5fbea29..b206f18 100644
--- a/linden/indra/lib/python/indra/ipc/siesta.py
+++ b/linden/indra/lib/python/indra/ipc/siesta.py
@@ -24,9 +24,9 @@ except ImportError:
24 24
25llsd_parsers = { 25llsd_parsers = {
26 'application/json': json_decode, 26 'application/json': json_decode,
27 'application/llsd+binary': llsd.parse_binary, 27 llsd.BINARY_MIME_TYPE: llsd.parse_binary,
28 'application/llsd+notation': llsd.parse_notation, 28 'application/llsd+notation': llsd.parse_notation,
29 'application/llsd+xml': llsd.parse_xml, 29 llsd.XML_MIME_TYPE: llsd.parse_xml,
30 'application/xml': llsd.parse_xml, 30 'application/xml': llsd.parse_xml,
31 } 31 }
32 32
diff --git a/linden/indra/lib/python/indra/util/fastest_elementtree.py b/linden/indra/lib/python/indra/util/fastest_elementtree.py
index 64aed09..2470143 100644
--- a/linden/indra/lib/python/indra/util/fastest_elementtree.py
+++ b/linden/indra/lib/python/indra/util/fastest_elementtree.py
@@ -2,9 +2,9 @@
2@file fastest_elementtree.py 2@file fastest_elementtree.py
3@brief Concealing some gnarly import logic in here. This should export the interface of elementtree. 3@brief Concealing some gnarly import logic in here. This should export the interface of elementtree.
4 4
5$LicenseInfo:firstyear=2006&license=mit$ 5$LicenseInfo:firstyear=2008&license=mit$
6 6
7Copyright (c) 2006-2008, Linden Research, Inc. 7Copyright (c) 2008, Linden Research, Inc.
8 8
9Permission is hereby granted, free of charge, to any person obtaining a copy 9Permission is hereby granted, free of charge, to any person obtaining a copy
10of this software and associated documentation files (the "Software"), to deal 10of this software and associated documentation files (the "Software"), to deal
@@ -26,27 +26,40 @@ THE SOFTWARE.
26$/LicenseInfo$ 26$/LicenseInfo$
27""" 27"""
28 28
29# Using celementree might cause some unforeseen problems so here's a 29# The parsing exception raised by the underlying library depends
30# on the ElementTree implementation we're using, so we provide an
31# alias here.
32#
33# Use ElementTreeError as the exception type for catching parsing
34# errors.
35
36
37# Using cElementTree might cause some unforeseen problems, so here's a
30# convenient off switch. 38# convenient off switch.
31 39
32# *NOTE: turned off cause of problems. :-( *TODO: debug 40use_celementree = True
33use_celementree = False
34 41
35try: 42try:
36 if not use_celementree: 43 if not use_celementree:
37 raise ImportError() 44 raise ImportError()
38 from cElementTree import * ## This does not work under Windows 45 # Python 2.3 and 2.4.
46 from cElementTree import *
47 ElementTreeError = SyntaxError
39except ImportError: 48except ImportError:
40 try: 49 try:
41 if not use_celementree: 50 if not use_celementree:
42 raise ImportError() 51 raise ImportError()
43 ## This is the name of cElementTree under python 2.5 52 # Python 2.5 and above.
44 from xml.etree.cElementTree import * 53 from xml.etree.cElementTree import *
54 ElementTreeError = SyntaxError
45 except ImportError: 55 except ImportError:
56 # Pure Python code.
46 try: 57 try:
47 ## This is the old name of elementtree, for use with 2.3 58 # Python 2.3 and 2.4.
48 from elementtree.ElementTree import * 59 from elementtree.ElementTree import *
49 except ImportError: 60 except ImportError:
50 ## This is the name of elementtree under python 2.5 61 # Python 2.5 and above.
51 from xml.etree.ElementTree import * 62 from xml.etree.ElementTree import *
52 63
64 # The pure Python ElementTree module uses Expat for parsing.
65 from xml.parsers.expat import ExpatError as ElementTreeError
diff --git a/linden/indra/lib/python/indra/util/iterators.py b/linden/indra/lib/python/indra/util/iterators.py
new file mode 100644
index 0000000..6a98c97
--- /dev/null
+++ b/linden/indra/lib/python/indra/util/iterators.py
@@ -0,0 +1,63 @@
1"""\
2@file iterators.py
3@brief Useful general-purpose iterators.
4
5$LicenseInfo:firstyear=2008&license=mit$
6
7Copyright (c) 2008, Linden Research, Inc.
8
9Permission is hereby granted, free of charge, to any person obtaining a copy
10of this software and associated documentation files (the "Software"), to deal
11in the Software without restriction, including without limitation the rights
12to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
13copies of the Software, and to permit persons to whom the Software is
14furnished to do so, subject to the following conditions:
15
16The above copyright notice and this permission notice shall be included in
17all copies or substantial portions of the Software.
18
19THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25THE SOFTWARE.
26$/LicenseInfo$
27"""
28
29from __future__ import nested_scopes
30
31def iter_chunks(rows, aggregate_size=100):
32 """
33 Given an iterable set of items (@p rows), produces lists of up to @p
34 aggregate_size items at a time, for example:
35
36 iter_chunks([1,2,3,4,5,6,7,8,9,10], 3)
37
38 Values for @p aggregate_size < 1 will raise ValueError.
39
40 Will return a generator that produces, in the following order:
41 - [1, 2, 3]
42 - [4, 5, 6]
43 - [7, 8, 9]
44 - [10]
45 """
46 if aggregate_size < 1:
47 raise ValueError()
48
49 def iter_chunks_inner():
50 row_iter = iter(rows)
51 done = False
52 agg = []
53 while not done:
54 try:
55 row = row_iter.next()
56 agg.append(row)
57 except StopIteration:
58 done = True
59 if agg and (len(agg) >= aggregate_size or done):
60 yield agg
61 agg = []
62
63 return iter_chunks_inner()
diff --git a/linden/indra/lib/python/indra/util/iterators_test.py b/linden/indra/lib/python/indra/util/iterators_test.py
new file mode 100755
index 0000000..7fd9e73
--- /dev/null
+++ b/linden/indra/lib/python/indra/util/iterators_test.py
@@ -0,0 +1,72 @@
1"""\
2@file iterators_test.py
3@brief Test cases for iterators module.
4
5$LicenseInfo:firstyear=2008&license=mit$
6
7Copyright (c) 2008, Linden Research, Inc.
8
9Permission is hereby granted, free of charge, to any person obtaining a copy
10of this software and associated documentation files (the "Software"), to deal
11in the Software without restriction, including without limitation the rights
12to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
13copies of the Software, and to permit persons to whom the Software is
14furnished to do so, subject to the following conditions:
15
16The above copyright notice and this permission notice shall be included in
17all copies or substantial portions of the Software.
18
19THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25THE SOFTWARE.
26$/LicenseInfo$
27"""
28
29import unittest
30
31from indra.util.iterators import iter_chunks
32
33class TestIterChunks(unittest.TestCase):
34 """Unittests for iter_chunks"""
35 def test_bad_agg_size(self):
36 rows = [1,2,3,4]
37 self.assertRaises(ValueError, iter_chunks, rows, 0)
38 self.assertRaises(ValueError, iter_chunks, rows, -1)
39
40 try:
41 for i in iter_chunks(rows, 0):
42 pass
43 except ValueError:
44 pass
45 else:
46 self.fail()
47
48 try:
49 result = list(iter_chunks(rows, 0))
50 except ValueError:
51 pass
52 else:
53 self.fail()
54 def test_empty(self):
55 rows = []
56 result = list(iter_chunks(rows))
57 self.assertEqual(result, [])
58 def test_small(self):
59 rows = [[1]]
60 result = list(iter_chunks(rows, 2))
61 self.assertEqual(result, [[[1]]])
62 def test_size(self):
63 rows = [[1],[2]]
64 result = list(iter_chunks(rows, 2))
65 self.assertEqual(result, [[[1],[2]]])
66 def test_multi_agg(self):
67 rows = [[1],[2],[3],[4],[5]]
68 result = list(iter_chunks(rows, 2))
69 self.assertEqual(result, [[[1],[2]],[[3],[4]],[[5]]])
70
71if __name__ == "__main__":
72 unittest.main()
diff --git a/linden/indra/lib/python/indra/util/llmanifest.py b/linden/indra/lib/python/indra/util/llmanifest.py
index 4675177..a00d242 100644
--- a/linden/indra/lib/python/indra/util/llmanifest.py
+++ b/linden/indra/lib/python/indra/util/llmanifest.py
@@ -584,7 +584,7 @@ class LLManifest(object):
584 584
585 def wildcard_regex(self, src_glob, dst_glob): 585 def wildcard_regex(self, src_glob, dst_glob):
586 src_re = re.escape(src_glob) 586 src_re = re.escape(src_glob)
587 src_re = src_re.replace('\*', '([-a-zA-Z0-9._ ]+)') 587 src_re = src_re.replace('\*', '([-a-zA-Z0-9._ ]*)')
588 dst_temp = dst_glob 588 dst_temp = dst_glob
589 i = 1 589 i = 1
590 while dst_temp.count("*") > 0: 590 while dst_temp.count("*") > 0:
@@ -621,6 +621,7 @@ class LLManifest(object):
621 count = 0 621 count = 0
622 if self.wildcard_pattern.search(src): 622 if self.wildcard_pattern.search(src):
623 for s,d in self.expand_globs(src, dst): 623 for s,d in self.expand_globs(src, dst):
624 assert(s != d)
624 count += self.process_file(s, d) 625 count += self.process_file(s, d)
625 else: 626 else:
626 # if we're specifying a single path (not a glob), 627 # if we're specifying a single path (not a glob),
diff --git a/linden/indra/lib/python/indra/util/llperformance.py b/linden/indra/lib/python/indra/util/llperformance.py
new file mode 100755
index 0000000..7c52730
--- /dev/null
+++ b/linden/indra/lib/python/indra/util/llperformance.py
@@ -0,0 +1,158 @@
1#!/usr/bin/python
2
3# ------------------------------------------------
4# Sim metrics utility functions.
5
6import glob, os, time, sys, stat, exceptions
7
8from indra.base import llsd
9
10gBlockMap = {} #Map of performance metric data with function hierarchy information.
11gCurrentStatPath = ""
12
13gIsLoggingEnabled=False
14
15class LLPerfStat:
16 def __init__(self,key):
17 self.mTotalTime = 0
18 self.mNumRuns = 0
19 self.mName=key
20 self.mTimeStamp = int(time.time()*1000)
21 self.mUTCTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
22
23 def __str__(self):
24 return "%f" % self.mTotalTime
25
26 def start(self):
27 self.mStartTime = int(time.time() * 1000000)
28 self.mNumRuns += 1
29
30 def stop(self):
31 execution_time = int(time.time() * 1000000) - self.mStartTime
32 self.mTotalTime += execution_time
33
34 def get_map(self):
35 results={}
36 results['name']=self.mName
37 results['utc_time']=self.mUTCTime
38 results['timestamp']=self.mTimeStamp
39 results['us']=self.mTotalTime
40 results['count']=self.mNumRuns
41 return results
42
43class PerfError(exceptions.Exception):
44 def __init__(self):
45 return
46
47 def __Str__(self):
48 print "","Unfinished LLPerfBlock"
49
50class LLPerfBlock:
51 def __init__( self, key ):
52 global gBlockMap
53 global gCurrentStatPath
54 global gIsLoggingEnabled
55
56 #Check to see if we're running metrics right now.
57 if gIsLoggingEnabled:
58 self.mRunning = True #Mark myself as running.
59
60 self.mPreviousStatPath = gCurrentStatPath
61 gCurrentStatPath += "/" + key
62 if gCurrentStatPath not in gBlockMap:
63 gBlockMap[gCurrentStatPath] = LLPerfStat(key)
64
65 self.mStat = gBlockMap[gCurrentStatPath]
66 self.mStat.start()
67
68 def finish( self ):
69 global gBlockMap
70 global gIsLoggingEnabled
71
72 if gIsLoggingEnabled:
73 self.mStat.stop()
74 self.mRunning = False
75 gCurrentStatPath = self.mPreviousStatPath
76
77# def __del__( self ):
78# if self.mRunning:
79# #SPATTERS FIXME
80# raise PerfError
81
82class LLPerformance:
83 #--------------------------------------------------
84 # Determine whether or not we want to log statistics
85
86 def __init__( self, process_name = "python" ):
87 self.process_name = process_name
88 self.init_testing()
89 self.mTimeStamp = int(time.time()*1000)
90 self.mUTCTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
91
92 def init_testing( self ):
93 global gIsLoggingEnabled
94
95 host_performance_file = "/dev/shm/simperf/simperf_proc_config.llsd"
96
97 #If file exists, open
98 if os.path.exists(host_performance_file):
99 file = open (host_performance_file,'r')
100
101 #Read serialized LLSD from file.
102 body = llsd.parse(file.read())
103
104 #Calculate time since file last modified.
105 stats = os.stat(host_performance_file)
106 now = time.time()
107 mod = stats[stat.ST_MTIME]
108 age = now - mod
109
110 if age < ( body['duration'] ):
111 gIsLoggingEnabled = True
112
113
114 def get ( self ):
115 global gIsLoggingEnabled
116 return gIsLoggingEnabled
117
118 #def output(self,ptr,path):
119 # if 'stats' in ptr:
120 # stats = ptr['stats']
121 # self.mOutputPtr[path] = stats.get_map()
122
123 # if 'children' in ptr:
124 # children=ptr['children']
125
126 # curptr = self.mOutputPtr
127 # curchildren={}
128 # curptr['children'] = curchildren
129
130 # for key in children:
131 # curchildren[key]={}
132 # self.mOutputPtr = curchildren[key]
133 # self.output(children[key],path + '/' + key)
134
135 def done(self):
136 global gBlockMap
137
138 if not self.get():
139 return
140
141 output_name = "/dev/shm/simperf/%s_proc.%d.llsd" % (self.process_name, os.getpid())
142 output_file = open(output_name, 'w')
143 process_info = {
144 "name" : self.process_name,
145 "pid" : os.getpid(),
146 "ppid" : os.getppid(),
147 "timestamp" : self.mTimeStamp,
148 "utc_time" : self.mUTCTime,
149 }
150 output_file.write(llsd.format_notation(process_info))
151 output_file.write('\n')
152
153 for key in gBlockMap.keys():
154 gBlockMap[key] = gBlockMap[key].get_map()
155 output_file.write(llsd.format_notation(gBlockMap))
156 output_file.write('\n')
157 output_file.close()
158
diff --git a/linden/indra/lib/python/indra/util/named_query.py b/linden/indra/lib/python/indra/util/named_query.py
index 20f2ec7..c5fb498 100644
--- a/linden/indra/lib/python/indra/util/named_query.py
+++ b/linden/indra/lib/python/indra/util/named_query.py
@@ -63,7 +63,7 @@ def _init_g_named_manager(sql_dir = None):
63 63
64 # extra fallback directory in case config doesn't return what we want 64 # extra fallback directory in case config doesn't return what we want
65 if sql_dir is None: 65 if sql_dir is None:
66 sql_dir = os.path.dirname(__file__) + "../../../../web/dataservice/sql" 66 sql_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..", "web", "dataservice", "sql")
67 67
68 global _g_named_manager 68 global _g_named_manager
69 _g_named_manager = NamedQueryManager( 69 _g_named_manager = NamedQueryManager(
@@ -103,11 +103,12 @@ class NamedQuery(object):
103 def __init__(self, name, filename): 103 def __init__(self, name, filename):
104 """ Construct a NamedQuery object. The name argument is an 104 """ Construct a NamedQuery object. The name argument is an
105 arbitrary name as a handle for the query, and the filename is 105 arbitrary name as a handle for the query, and the filename is
106 a path to a file containing an llsd named query document.""" 106 a path to a file or a file-like object containing an llsd named
107 query document."""
107 self._stat_interval_seconds = 5 # 5 seconds 108 self._stat_interval_seconds = 5 # 5 seconds
108 self._name = name 109 self._name = name
109 if (filename is not None) \ 110 if (filename is not None and isinstance(filename, (str, unicode))
110 and (NQ_FILE_SUFFIX != filename[-NQ_FILE_SUFFIX_LEN:]): 111 and NQ_FILE_SUFFIX != filename[-NQ_FILE_SUFFIX_LEN:]):
111 filename = filename + NQ_FILE_SUFFIX 112 filename = filename + NQ_FILE_SUFFIX
112 self._location = filename 113 self._location = filename
113 self._alternative = dict() 114 self._alternative = dict()
@@ -122,8 +123,8 @@ class NamedQuery(object):
122 123
123 def get_modtime(self): 124 def get_modtime(self):
124 """ Returns the mtime (last modified time) of the named query 125 """ Returns the mtime (last modified time) of the named query
125 file, if such exists.""" 126 filename. For file-like objects, expect a modtime of 0"""
126 if self._location: 127 if self._location and isinstance(self._location, (str, unicode)):
127 return os.path.getmtime(self._location) 128 return os.path.getmtime(self._location)
128 return 0 129 return 0
129 130
@@ -131,7 +132,12 @@ class NamedQuery(object):
131 """ Loads and parses the named query file into self. Does 132 """ Loads and parses the named query file into self. Does
132 nothing if self.location is nonexistant.""" 133 nothing if self.location is nonexistant."""
133 if self._location: 134 if self._location:
134 self._reference_contents(llsd.parse(open(self._location).read())) 135 if isinstance(self._location, (str, unicode)):
136 contents = llsd.parse(open(self._location).read())
137 else:
138 # we probably have a file-like object. Godspeed!
139 contents = llsd.parse(self._location.read())
140 self._reference_contents(contents)
135 # Check for alternative implementations 141 # Check for alternative implementations
136 try: 142 try:
137 for name, alt in self._contents['alternative'].items(): 143 for name, alt in self._contents['alternative'].items():
diff --git a/linden/indra/lib/python/indra/util/simperf_host_xml_parser.py b/linden/indra/lib/python/indra/util/simperf_host_xml_parser.py
new file mode 100755
index 0000000..b608415
--- /dev/null
+++ b/linden/indra/lib/python/indra/util/simperf_host_xml_parser.py
@@ -0,0 +1,338 @@
1#!/usr/bin/env python
2"""\
3@file simperf_host_xml_parser.py
4@brief Digest collector's XML dump and convert to simple dict/list structure
5
6$LicenseInfo:firstyear=2008&license=mit$
7
8Copyright (c) 2008, Linden Research, Inc.
9
10Permission is hereby granted, free of charge, to any person obtaining a copy
11of this software and associated documentation files (the "Software"), to deal
12in the Software without restriction, including without limitation the rights
13to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14copies of the Software, and to permit persons to whom the Software is
15furnished to do so, subject to the following conditions:
16
17The above copyright notice and this permission notice shall be included in
18all copies or substantial portions of the Software.
19
20THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26THE SOFTWARE.
27$/LicenseInfo$
28"""
29
30import sys, os, getopt, time
31import simplejson
32from xml import sax
33
34
35def usage():
36 print "Usage:"
37 print sys.argv[0] + " [options]"
38 print " Convert RRD's XML dump to JSON. Script to convert the simperf_host_collector-"
39 print " generated RRD dump into JSON. Steps include converting selected named"
40 print " fields from GAUGE type to COUNTER type by computing delta with preceding"
41 print " values. Top-level named fields are:"
42 print
43 print " lastupdate Time (javascript timestamp) of last data sample"
44 print " step Time in seconds between samples"
45 print " ds Data specification (name/type) for each column"
46 print " database Table of data samples, one time step per row"
47 print
48 print "Options:"
49 print " -i, --in Input settings filename. (Default: stdin)"
50 print " -o, --out Output settings filename. (Default: stdout)"
51 print " -h, --help Print this message and exit."
52 print
53 print "Example: %s -i rrddump.xml -o rrddump.json" % sys.argv[0]
54 print
55 print "Interfaces:"
56 print " class SimPerfHostXMLParser() # SAX content handler"
57 print " def simperf_host_xml_fixup(parser) # post-parse value fixup"
58
59class SimPerfHostXMLParser(sax.handler.ContentHandler):
60
61 def __init__(self):
62 pass
63
64 def startDocument(self):
65 self.rrd_last_update = 0 # public
66 self.rrd_step = 0 # public
67 self.rrd_ds = [] # public
68 self.rrd_records = [] # public
69 self._rrd_level = 0
70 self._rrd_parse_state = 0
71 self._rrd_chars = ""
72 self._rrd_capture = False
73 self._rrd_ds_val = {}
74 self._rrd_data_row = []
75 self._rrd_data_row_has_nan = False
76
77 def endDocument(self):
78 pass
79
80 # Nasty little ad-hoc state machine to extract the elements that are
81 # necessary from the 'rrdtool dump' XML output. The same element
82 # name '<ds>' is used for two different data sets so we need to pay
83 # some attention to the actual structure to get the ones we want
84 # and ignore the ones we don't.
85
86 def startElement(self, name, attrs):
87 self._rrd_level = self._rrd_level + 1
88 self._rrd_capture = False
89 if self._rrd_level == 1:
90 if name == "rrd" and self._rrd_parse_state == 0:
91 self._rrd_parse_state = 1 # In <rrd>
92 self._rrd_capture = True
93 self._rrd_chars = ""
94 elif self._rrd_level == 2:
95 if self._rrd_parse_state == 1:
96 if name == "lastupdate":
97 self._rrd_parse_state = 2 # In <rrd><lastupdate>
98 self._rrd_capture = True
99 self._rrd_chars = ""
100 elif name == "step":
101 self._rrd_parse_state = 3 # In <rrd><step>
102 self._rrd_capture = True
103 self._rrd_chars = ""
104 elif name == "ds":
105 self._rrd_parse_state = 4 # In <rrd><ds>
106 self._rrd_ds_val = {}
107 self._rrd_chars = ""
108 elif name == "rra":
109 self._rrd_parse_state = 5 # In <rrd><rra>
110 elif self._rrd_level == 3:
111 if self._rrd_parse_state == 4:
112 if name == "name":
113 self._rrd_parse_state = 6 # In <rrd><ds><name>
114 self._rrd_capture = True
115 self._rrd_chars = ""
116 elif name == "type":
117 self._rrd_parse_state = 7 # In <rrd><ds><type>
118 self._rrd_capture = True
119 self._rrd_chars = ""
120 elif self._rrd_parse_state == 5:
121 if name == "database":
122 self._rrd_parse_state = 8 # In <rrd><rra><database>
123 elif self._rrd_level == 4:
124 if self._rrd_parse_state == 8:
125 if name == "row":
126 self._rrd_parse_state = 9 # In <rrd><rra><database><row>
127 self._rrd_data_row = []
128 self._rrd_data_row_has_nan = False
129 elif self._rrd_level == 5:
130 if self._rrd_parse_state == 9:
131 if name == "v":
132 self._rrd_parse_state = 10 # In <rrd><rra><database><row><v>
133 self._rrd_capture = True
134 self._rrd_chars = ""
135
136 def endElement(self, name):
137 self._rrd_capture = False
138 if self._rrd_parse_state == 10:
139 self._rrd_capture = self._rrd_level == 6
140 if self._rrd_level == 5:
141 if self._rrd_chars == "NaN":
142 self._rrd_data_row_has_nan = True
143 else:
144 self._rrd_data_row.append(self._rrd_chars)
145 self._rrd_parse_state = 9 # In <rrd><rra><database><row>
146 elif self._rrd_parse_state == 9:
147 if self._rrd_level == 4:
148 if not self._rrd_data_row_has_nan:
149 self.rrd_records.append(self._rrd_data_row)
150 self._rrd_parse_state = 8 # In <rrd><rra><database>
151 elif self._rrd_parse_state == 8:
152 if self._rrd_level == 3:
153 self._rrd_parse_state = 5 # In <rrd><rra>
154 elif self._rrd_parse_state == 7:
155 if self._rrd_level == 3:
156 self._rrd_ds_val["type"] = self._rrd_chars
157 self._rrd_parse_state = 4 # In <rrd><ds>
158 elif self._rrd_parse_state == 6:
159 if self._rrd_level == 3:
160 self._rrd_ds_val["name"] = self._rrd_chars
161 self._rrd_parse_state = 4 # In <rrd><ds>
162 elif self._rrd_parse_state == 5:
163 if self._rrd_level == 2:
164 self._rrd_parse_state = 1 # In <rrd>
165 elif self._rrd_parse_state == 4:
166 if self._rrd_level == 2:
167 self.rrd_ds.append(self._rrd_ds_val)
168 self._rrd_parse_state = 1 # In <rrd>
169 elif self._rrd_parse_state == 3:
170 if self._rrd_level == 2:
171 self.rrd_step = long(self._rrd_chars)
172 self._rrd_parse_state = 1 # In <rrd>
173 elif self._rrd_parse_state == 2:
174 if self._rrd_level == 2:
175 self.rrd_last_update = long(self._rrd_chars)
176 self._rrd_parse_state = 1 # In <rrd>
177 elif self._rrd_parse_state == 1:
178 if self._rrd_level == 1:
179 self._rrd_parse_state = 0 # At top
180
181 if self._rrd_level:
182 self._rrd_level = self._rrd_level - 1
183
184 def characters(self, content):
185 if self._rrd_capture:
186 self._rrd_chars = self._rrd_chars + content.strip()
187
188def _make_numeric(value):
189 try:
190 value = float(value)
191 except:
192 value = ""
193 return value
194
195def simperf_host_xml_fixup(parser, filter_start_time = None, filter_end_time = None):
196 # Fixup for GAUGE fields that are really COUNTS. They
197 # were forced to GAUGE to try to disable rrdtool's
198 # data interpolation/extrapolation for non-uniform time
199 # samples.
200 fixup_tags = [ "cpu_user",
201 "cpu_nice",
202 "cpu_sys",
203 "cpu_idle",
204 "cpu_waitio",
205 "cpu_intr",
206 # "file_active",
207 # "file_free",
208 # "inode_active",
209 # "inode_free",
210 "netif_in_kb",
211 "netif_in_pkts",
212 "netif_in_errs",
213 "netif_in_drop",
214 "netif_out_kb",
215 "netif_out_pkts",
216 "netif_out_errs",
217 "netif_out_drop",
218 "vm_page_in",
219 "vm_page_out",
220 "vm_swap_in",
221 "vm_swap_out",
222 #"vm_mem_total",
223 #"vm_mem_used",
224 #"vm_mem_active",
225 #"vm_mem_inactive",
226 #"vm_mem_free",
227 #"vm_mem_buffer",
228 #"vm_swap_cache",
229 #"vm_swap_total",
230 #"vm_swap_used",
231 #"vm_swap_free",
232 "cpu_interrupts",
233 "cpu_switches",
234 "cpu_forks" ]
235
236 col_count = len(parser.rrd_ds)
237 row_count = len(parser.rrd_records)
238
239 # Process the last row separately, just to make all values numeric.
240 for j in range(col_count):
241 parser.rrd_records[row_count - 1][j] = _make_numeric(parser.rrd_records[row_count - 1][j])
242
243 # Process all other row/columns.
244 last_different_row = row_count - 1
245 current_row = row_count - 2
246 while current_row >= 0:
247 # Check for a different value than the previous row. If everything is the same
248 # then this is probably just a filler/bogus entry.
249 is_different = False
250 for j in range(col_count):
251 parser.rrd_records[current_row][j] = _make_numeric(parser.rrd_records[current_row][j])
252 if parser.rrd_records[current_row][j] != parser.rrd_records[last_different_row][j]:
253 # We're good. This is a different row.
254 is_different = True
255
256 if not is_different:
257 # This is a filler/bogus entry. Just ignore it.
258 for j in range(col_count):
259 parser.rrd_records[current_row][j] = float('nan')
260 else:
261 # Some tags need to be converted into deltas.
262 for j in range(col_count):
263 if parser.rrd_ds[j]["name"] in fixup_tags:
264 parser.rrd_records[last_different_row][j] = \
265 parser.rrd_records[last_different_row][j] - parser.rrd_records[current_row][j]
266 last_different_row = current_row
267
268 current_row -= 1
269
270 # Set fixup_tags in the first row to 'nan' since they aren't useful anymore.
271 for j in range(col_count):
272 if parser.rrd_ds[j]["name"] in fixup_tags:
273 parser.rrd_records[0][j] = float('nan')
274
275 # Add a timestamp to each row and to the catalog. Format and name
276 # chosen to match other simulator logging (hopefully).
277 start_time = parser.rrd_last_update - (parser.rrd_step * (row_count - 1))
278 # Build a filtered list of rrd_records if we are limited to a time range.
279 filter_records = False
280 if filter_start_time is not None or filter_end_time is not None:
281 filter_records = True
282 filtered_rrd_records = []
283 if filter_start_time is None:
284 filter_start_time = start_time * 1000
285 if filter_end_time is None:
286 filter_end_time = parser.rrd_last_update * 1000
287
288 for i in range(row_count):
289 record_timestamp = (start_time + (i * parser.rrd_step)) * 1000
290 parser.rrd_records[i].insert(0, record_timestamp)
291 if filter_records:
292 if filter_start_time <= record_timestamp and record_timestamp <= filter_end_time:
293 filtered_rrd_records.append(parser.rrd_records[i])
294
295 if filter_records:
296 parser.rrd_records = filtered_rrd_records
297
298 parser.rrd_ds.insert(0, {"type": "GAUGE", "name": "javascript_timestamp"})
299
300
301def main(argv=None):
302 opts, args = getopt.getopt(sys.argv[1:], "i:o:h", ["in=", "out=", "help"])
303 input_file = sys.stdin
304 output_file = sys.stdout
305 for o, a in opts:
306 if o in ("-i", "--in"):
307 input_file = open(a, 'r')
308 if o in ("-o", "--out"):
309 output_file = open(a, 'w')
310 if o in ("-h", "--help"):
311 usage()
312 sys.exit(0)
313
314 # Using the SAX parser as it is at least 4X faster and far, far
315 # smaller on this dataset than the DOM-based interface in xml.dom.minidom.
316 # With SAX and a 5.4MB xml file, this requires about seven seconds of
317 # wall-clock time and 32MB VSZ. With the DOM interface, about 22 seconds
318 # and over 270MB VSZ.
319
320 handler = SimPerfHostXMLParser()
321 sax.parse(input_file, handler)
322 if input_file != sys.stdin:
323 input_file.close()
324
325 # Various format fixups: string-to-num, gauge-to-counts, add
326 # a time stamp, etc.
327 simperf_host_xml_fixup(handler)
328
329 # Create JSONable dict with interesting data and format/print it
330 print >>output_file, simplejson.dumps({ "step" : handler.rrd_step,
331 "lastupdate": handler.rrd_last_update * 1000,
332 "ds" : handler.rrd_ds,
333 "database" : handler.rrd_records })
334
335 return 0
336
337if __name__ == "__main__":
338 sys.exit(main())
diff --git a/linden/indra/lib/python/indra/util/simperf_oprof_interface.py b/linden/indra/lib/python/indra/util/simperf_oprof_interface.py
new file mode 100755
index 0000000..a7e9a4c
--- /dev/null
+++ b/linden/indra/lib/python/indra/util/simperf_oprof_interface.py
@@ -0,0 +1,160 @@
1#!/usr/bin/env python
2"""\
3@file simperf_oprof_interface.py
4@brief Manage OProfile data collection on a host
5
6$LicenseInfo:firstyear=2008&license=internal$
7
8Copyright (c) 2008, Linden Research, Inc.
9
10The following source code is PROPRIETARY AND CONFIDENTIAL. Use of
11this source code is governed by the Linden Lab Source Code Disclosure
12Agreement ("Agreement") previously entered between you and Linden
13Lab. By accessing, using, copying, modifying or distributing this
14software, you acknowledge that you have been informed of your
15obligations under the Agreement and agree to abide by those obligations.
16
17ALL LINDEN LAB SOURCE CODE IS PROVIDED "AS IS." LINDEN LAB MAKES NO
18WARRANTIES, EXPRESS, IMPLIED OR OTHERWISE, REGARDING ITS ACCURACY,
19COMPLETENESS OR PERFORMANCE.
20$/LicenseInfo$
21"""
22
23import sys, os, getopt
24import simplejson
25
26
27def usage():
28 print "Usage:"
29 print sys.argv[0] + " [options]"
30 print " Digest the OProfile report forms that come out of the"
31 print " simperf_oprof_ctl program's -r/--report command. The result"
32 print " is an array of dictionaires with the following keys:"
33 print
34 print " symbol Name of sampled, calling, or called procedure"
35 print " file Executable or library where symbol resides"
36 print " percentage Percentage contribution to profile, calls or called"
37 print " samples Sample count"
38 print " calls Methods called by the method in question (full only)"
39 print " called_by Methods calling the method (full only)"
40 print
41 print " For 'full' reports the two keys 'calls' and 'called_by' are"
42 print " themselves arrays of dictionaries based on the first four keys."
43 print
44 print "Return Codes:"
45 print " None. Aggressively digests everything. Will likely mung results"
46 print " if a program or library has whitespace in its name."
47 print
48 print "Options:"
49 print " -i, --in Input settings filename. (Default: stdin)"
50 print " -o, --out Output settings filename. (Default: stdout)"
51 print " -h, --help Print this message and exit."
52 print
53 print "Interfaces:"
54 print " class SimPerfOProfileInterface()"
55
56class SimPerfOProfileInterface:
57 def __init__(self):
58 self.isBrief = True # public
59 self.isValid = False # public
60 self.result = [] # public
61
62 def parse(self, input):
63 in_samples = False
64 for line in input:
65 if in_samples:
66 if line[0:6] == "------":
67 self.isBrief = False
68 self._parseFull(input)
69 else:
70 self._parseBrief(input, line)
71 self.isValid = True
72 return
73 try:
74 hd1, remain = line.split(None, 1)
75 if hd1 == "samples":
76 in_samples = True
77 except ValueError:
78 pass
79
80 def _parseBrief(self, input, line1):
81 try:
82 fld1, fld2, fld3, fld4 = line1.split(None, 3)
83 self.result.append({"samples" : fld1,
84 "percentage" : fld2,
85 "file" : fld3,
86 "symbol" : fld4.strip("\n")})
87 except ValueError:
88 pass
89 for line in input:
90 try:
91 fld1, fld2, fld3, fld4 = line.split(None, 3)
92 self.result.append({"samples" : fld1,
93 "percentage" : fld2,
94 "file" : fld3,
95 "symbol" : fld4.strip("\n")})
96 except ValueError:
97 pass
98
99 def _parseFull(self, input):
100 state = 0 # In 'called_by' section
101 calls = []
102 called_by = []
103 current = {}
104 for line in input:
105 if line[0:6] == "------":
106 if len(current):
107 current["calls"] = calls
108 current["called_by"] = called_by
109 self.result.append(current)
110 state = 0
111 calls = []
112 called_by = []
113 current = {}
114 else:
115 try:
116 fld1, fld2, fld3, fld4 = line.split(None, 3)
117 tmp = {"samples" : fld1,
118 "percentage" : fld2,
119 "file" : fld3,
120 "symbol" : fld4.strip("\n")}
121 except ValueError:
122 continue
123 if line[0] != " ":
124 current = tmp
125 state = 1 # In 'calls' section
126 elif state == 0:
127 called_by.append(tmp)
128 else:
129 calls.append(tmp)
130 if len(current):
131 current["calls"] = calls
132 current["called_by"] = called_by
133 self.result.append(current)
134
135
136def main(argv=None):
137 opts, args = getopt.getopt(sys.argv[1:], "i:o:h", ["in=", "out=", "help"])
138 input_file = sys.stdin
139 output_file = sys.stdout
140 for o, a in opts:
141 if o in ("-i", "--in"):
142 input_file = open(a, 'r')
143 if o in ("-o", "--out"):
144 output_file = open(a, 'w')
145 if o in ("-h", "--help"):
146 usage()
147 sys.exit(0)
148
149 oprof = SimPerfOProfileInterface()
150 oprof.parse(input_file)
151 if input_file != sys.stdin:
152 input_file.close()
153
154 # Create JSONable dict with interesting data and format/print it
155 print >>output_file, simplejson.dumps(oprof.result)
156
157 return 0
158
159if __name__ == "__main__":
160 sys.exit(main())
diff --git a/linden/indra/lib/python/indra/util/simperf_proc_interface.py b/linden/indra/lib/python/indra/util/simperf_proc_interface.py
new file mode 100755
index 0000000..62a63fa
--- /dev/null
+++ b/linden/indra/lib/python/indra/util/simperf_proc_interface.py
@@ -0,0 +1,164 @@
1#!/usr/bin/python
2
3# ----------------------------------------------------
4# Utility to extract log messages from *.<pid>.llsd
5# files that contain performance statistics.
6
7# ----------------------------------------------------
8import sys, os
9
10if os.path.exists("setup-path.py"):
11 execfile("setup-path.py")
12
13from indra.base import llsd
14
15DEFAULT_PATH="/dev/shm/simperf/"
16
17
18# ----------------------------------------------------
19# Pull out the stats and return a single document
20def parse_logfile(filename, target_column=None, verbose=False):
21 full_doc = []
22 # Open source temp log file. Let exceptions percolate up.
23 sourcefile = open( filename,'r')
24
25 if verbose:
26 print "Reading " + filename
27
28 # Parse and output all lines from the temp file
29 for line in sourcefile.xreadlines():
30 partial_doc = llsd.parse(line)
31 if partial_doc is not None:
32 if target_column is None:
33 full_doc.append(partial_doc)
34 else:
35 trim_doc = { target_column: partial_doc[target_column] }
36 if target_column != "fps":
37 trim_doc[ 'fps' ] = partial_doc[ 'fps' ]
38 trim_doc[ '/total_time' ] = partial_doc[ '/total_time' ]
39 trim_doc[ 'utc_time' ] = partial_doc[ 'utc_time' ]
40 full_doc.append(trim_doc)
41
42 sourcefile.close()
43 return full_doc
44
45# Extract just the meta info line, and the timestamp of the first/last frame entry.
46def parse_logfile_info(filename, verbose=False):
47 # Open source temp log file. Let exceptions percolate up.
48 sourcefile = open(filename, 'rU') # U is to open with Universal newline support
49
50 if verbose:
51 print "Reading " + filename
52
53 # The first line is the meta info line.
54 info_line = sourcefile.readline()
55 if not info_line:
56 sourcefile.close()
57 return None
58
59 # The rest of the lines are frames. Read the first and last to get the time range.
60 info = llsd.parse( info_line )
61 info['start_time'] = None
62 info['end_time'] = None
63 first_frame = sourcefile.readline()
64 if first_frame:
65 try:
66 info['start_time'] = int(llsd.parse(first_frame)['timestamp'])
67 except:
68 pass
69
70 # Read the file backwards to find the last two lines.
71 sourcefile.seek(0, 2)
72 file_size = sourcefile.tell()
73 offset = 1024
74 num_attempts = 0
75 end_time = None
76 if file_size < offset:
77 offset = file_size
78 while 1:
79 sourcefile.seek(-1*offset, 2)
80 read_str = sourcefile.read(offset)
81 # Remove newline at the end
82 if read_str[offset - 1] == '\n':
83 read_str = read_str[0:-1]
84 lines = read_str.split('\n')
85 full_line = None
86 if len(lines) > 2: # Got two line
87 try:
88 end_time = llsd.parse(lines[-1])['timestamp']
89 except:
90 # We couldn't parse this line. Try once more.
91 try:
92 end_time = llsd.parse(lines[-2])['timestamp']
93 except:
94 # Nope. Just move on.
95 pass
96 break
97 if len(read_str) == file_size: # Reached the beginning
98 break
99 offset += 1024
100
101 info['end_time'] = int(end_time)
102
103 sourcefile.close()
104 return info
105
106
107def parse_proc_filename(filename):
108 try:
109 name_as_list = filename.split(".")
110 cur_stat_type = name_as_list[0].split("_")[0]
111 cur_pid = name_as_list[1]
112 except IndexError, ValueError:
113 return (None, None)
114 return (cur_pid, cur_stat_type)
115
116# ----------------------------------------------------
117def get_simstats_list(path=None):
118 """ Return stats (pid, type) listed in <type>_proc.<pid>.llsd """
119 if path is None:
120 path = DEFAULT_PATH
121 simstats_list = []
122 for file_name in os.listdir(path):
123 if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd":
124 simstats_info = parse_logfile_info(path + file_name)
125 if simstats_info is not None:
126 simstats_list.append(simstats_info)
127 return simstats_list
128
129def get_log_info_list(pid=None, stat_type=None, path=None, target_column=None, verbose=False):
130 """ Return data from all llsd files matching the pid and stat type """
131 if path is None:
132 path = DEFAULT_PATH
133 log_info_list = {}
134 for file_name in os.listdir ( path ):
135 if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd":
136 (cur_pid, cur_stat_type) = parse_proc_filename(file_name)
137 if cur_pid is None:
138 continue
139 if pid is not None and pid != cur_pid:
140 continue
141 if stat_type is not None and stat_type != cur_stat_type:
142 continue
143 log_info_list[cur_pid] = parse_logfile(path + file_name, target_column, verbose)
144 return log_info_list
145
146def delete_simstats_files(pid=None, stat_type=None, path=None):
147 """ Delete *.<pid>.llsd files """
148 if path is None:
149 path = DEFAULT_PATH
150 del_list = []
151 for file_name in os.listdir(path):
152 if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd":
153 (cur_pid, cur_stat_type) = parse_proc_filename(file_name)
154 if cur_pid is None:
155 continue
156 if pid is not None and pid != cur_pid:
157 continue
158 if stat_type is not None and stat_type != cur_stat_type:
159 continue
160 del_list.append(cur_pid)
161 # Allow delete related exceptions to percolate up if this fails.
162 os.unlink(os.path.join(DEFAULT_PATH, file_name))
163 return del_list
164