From 7abecb48babe6a6f09bf6692ba55076546cfced9 Mon Sep 17 00:00:00 2001 From: Jacek Antonelli Date: Mon, 1 Dec 2008 17:39:58 -0600 Subject: Second Life viewer sources 1.22.0-RC --- linden/indra/lib/python/indra/base/lllog.py | 2 +- linden/indra/lib/python/indra/base/llsd.py | 205 +++++-- linden/indra/lib/python/indra/ipc/llmessage.py | 2 +- linden/indra/lib/python/indra/ipc/llsdhttp.py | 2 + linden/indra/lib/python/indra/ipc/mysql_pool.py | 23 +- linden/indra/lib/python/indra/ipc/saranwrap.py | 651 --------------------- linden/indra/lib/python/indra/ipc/siesta.py | 4 +- .../lib/python/indra/util/fastest_elementtree.py | 31 +- linden/indra/lib/python/indra/util/iterators.py | 63 ++ .../indra/lib/python/indra/util/iterators_test.py | 72 +++ linden/indra/lib/python/indra/util/llmanifest.py | 3 +- .../indra/lib/python/indra/util/llperformance.py | 158 +++++ linden/indra/lib/python/indra/util/named_query.py | 20 +- .../python/indra/util/simperf_host_xml_parser.py | 338 +++++++++++ .../python/indra/util/simperf_oprof_interface.py | 160 +++++ .../python/indra/util/simperf_proc_interface.py | 164 ++++++ 16 files changed, 1157 insertions(+), 741 deletions(-) delete mode 100644 linden/indra/lib/python/indra/ipc/saranwrap.py create mode 100644 linden/indra/lib/python/indra/util/iterators.py create mode 100755 linden/indra/lib/python/indra/util/iterators_test.py create mode 100755 linden/indra/lib/python/indra/util/llperformance.py create mode 100755 linden/indra/lib/python/indra/util/simperf_host_xml_parser.py create mode 100755 linden/indra/lib/python/indra/util/simperf_oprof_interface.py create mode 100755 linden/indra/lib/python/indra/util/simperf_proc_interface.py (limited to 'linden/indra/lib') diff --git a/linden/indra/lib/python/indra/base/lllog.py b/linden/indra/lib/python/indra/base/lllog.py index 99c50ef..1301894 100644 --- a/linden/indra/lib/python/indra/base/lllog.py +++ b/linden/indra/lib/python/indra/base/lllog.py @@ -59,7 +59,7 @@ class Logger(object): return self._sequence def log(self, msg, llsd): - payload = 'LLLOGMESSAGE (%d) %s %s' % (self.next(), msg, + payload = 'INFO: log: LLLOGMESSAGE (%d) %s %s' % (self.next(), msg, format_notation(llsd)) syslog.syslog(payload) diff --git a/linden/indra/lib/python/indra/base/llsd.py b/linden/indra/lib/python/indra/base/llsd.py index 995ace7..5b8f5d7 100644 --- a/linden/indra/lib/python/indra/base/llsd.py +++ b/linden/indra/lib/python/indra/base/llsd.py @@ -33,19 +33,26 @@ import time import types import re -from indra.util.fastest_elementtree import fromstring +from indra.util.fastest_elementtree import ElementTreeError, fromstring from indra.base import lluuid -int_regex = re.compile("[-+]?\d+") -real_regex = re.compile("[-+]?(\d+(\.\d*)?|\d*\.\d+)([eE][-+]?\d+)?") -alpha_regex = re.compile("[a-zA-Z]+") -date_regex = re.compile("(?P\d{4})-(?P\d{2})-(?P\d{2})T(?P\d{2}):(?P\d{2}):(?P\d{2})(?P\.\d{2})?Z") -#date: d"YYYY-MM-DDTHH:MM:SS.FFZ" +try: + import cllsd +except ImportError: + cllsd = None + +int_regex = re.compile(r"[-+]?\d+") +real_regex = re.compile(r"[-+]?(\d+(\.\d*)?|\d*\.\d+)([eE][-+]?\d+)?") +alpha_regex = re.compile(r"[a-zA-Z]+") +date_regex = re.compile(r"(?P\d{4})-(?P\d{2})-(?P\d{2})T" + r"(?P\d{2}):(?P\d{2}):(?P\d{2})" + r"(?P(\.\d+)?)Z") +#date: d"YYYY-MM-DDTHH:MM:SS.FFFFFFZ" class LLSDParseError(Exception): pass -class LLSDSerializationError(Exception): +class LLSDSerializationError(TypeError): pass @@ -62,14 +69,7 @@ BOOL_FALSE = ('0', '0.0', 'false', '') def format_datestr(v): """ Formats a datetime object into the string format shared by xml and notation serializations.""" - second_str = "" - if v.microsecond > 0: - seconds = v.second + float(v.microsecond) / 1000000 - second_str = "%05.2f" % seconds - else: - second_str = "%d" % v.second - return '%s%sZ' % (v.strftime('%Y-%m-%dT%H:%M:'), second_str) - + return v.isoformat() + 'Z' def parse_datestr(datestr): """Parses a datetime object from the string format shared by xml and notation serializations.""" @@ -89,7 +89,7 @@ def parse_datestr(datestr): seconds_float = match.group('second_float') microsecond = 0 if seconds_float: - microsecond = int(seconds_float[1:]) * 10000 + microsecond = int(float('0' + seconds_float) * 1e6) return datetime.datetime(year, month, day, hour, minute, second, microsecond) @@ -116,7 +116,7 @@ def uuid_to_python(node): return lluuid.UUID(node.text) def str_to_python(node): - return unicode(node.text or '').encode('utf8', 'replace') + return node.text or '' def bin_to_python(node): return binary(base64.decodestring(node.text or '')) @@ -189,9 +189,13 @@ class LLSDXMLFormatter(object): if(contents is None or contents is ''): return "<%s />" % (name,) else: + if type(contents) is unicode: + contents = contents.encode('utf-8') return "<%s>%s" % (name, contents, name) def xml_esc(self, v): + if type(v) is unicode: + v = v.encode('utf-8') return v.replace('&', '&').replace('<', '<').replace('>', '>') def LLSD(self, v): @@ -237,9 +241,14 @@ class LLSDXMLFormatter(object): raise LLSDSerializationError("Cannot serialize unknown type: %s (%s)" % ( t, something)) - def format(self, something): + def _format(self, something): return '' + self.elt("llsd", self.generate(something)) + def format(self, something): + if cllsd: + return cllsd.llsd_to_xml(something) + return self._format(something) + _g_xml_formatter = None def format_xml(something): global _g_xml_formatter @@ -356,8 +365,10 @@ class LLSDNotationFormatter(object): def UUID(self, v): return "u%s" % v def BINARY(self, v): - raise LLSDSerializationError("binary notation not yet supported") + return 'b64"' + base64.encodestring(v) + '"' def STRING(self, v): + if isinstance(v, unicode): + v = v.encode('utf-8') return "'%s'" % v.replace("\\", "\\\\").replace("'", "\\'") def URI(self, v): return 'l"%s"' % str(v).replace("\\", "\\\\").replace('"', '\\"') @@ -366,16 +377,24 @@ class LLSDNotationFormatter(object): def ARRAY(self, v): return "[%s]" % ','.join([self.generate(item) for item in v]) def MAP(self, v): - return "{%s}" % ','.join(["'%s':%s" % (key.replace("\\", "\\\\").replace("'", "\\'"), self.generate(value)) + def fix(key): + if isinstance(key, unicode): + return key.encode('utf-8') + return key + return "{%s}" % ','.join(["'%s':%s" % (fix(key).replace("\\", "\\\\").replace("'", "\\'"), self.generate(value)) for key, value in v.items()]) def generate(self, something): t = type(something) - if self.type_map.has_key(t): - return self.type_map[t](something) + handler = self.type_map.get(t) + if handler: + return handler(something) else: - raise LLSDSerializationError("Cannot serialize unknown type: %s (%s)" % ( - t, something)) + try: + return self.ARRAY(iter(something)) + except TypeError: + raise LLSDSerializationError( + "Cannot serialize unknown type: %s (%s)" % (t, something)) def format(self, something): return self.generate(something) @@ -479,7 +498,6 @@ class LLSDBinaryParser(object): raise LLSDParseError("invalid map key at byte %d." % ( self._index - 1,)) value = self._parse() - #print "kv:",key,value rv[key] = value count += 1 cc = self._buffer[self._index] @@ -636,11 +654,23 @@ class LLSDNotationParser(object): # 'd' = date in seconds since epoch return self._parse_date() elif cc == 'b': - raise LLSDParseError("binary notation not yet supported") + return self._parse_binary() else: raise LLSDParseError("invalid token at index %d: %d" % ( self._index - 1, ord(cc))) + def _parse_binary(self): + i = self._index + if self._buffer[i:i+2] == '64': + q = self._buffer[i+2] + e = self._buffer.find(q, i+3) + try: + return base64.decodestring(self._buffer[i+3:e]) + finally: + self._index = e + 1 + else: + raise LLSDParseError('random horrible binary format not supported') + def _parse_map(self): """ map: { string:object, string:object } """ rv = {} @@ -653,30 +683,23 @@ class LLSDNotationParser(object): if cc in ("'", '"', 's'): key = self._parse_string(cc) found_key = True - #print "key:",key elif cc.isspace() or cc == ',': cc = self._buffer[self._index] self._index += 1 else: raise LLSDParseError("invalid map key at byte %d." % ( self._index - 1,)) + elif cc.isspace() or cc == ':': + cc = self._buffer[self._index] + self._index += 1 + continue else: - if cc.isspace() or cc == ':': - #print "skipping whitespace '%s'" % cc - cc = self._buffer[self._index] - self._index += 1 - continue self._index += 1 value = self._parse() - #print "kv:",key,value rv[key] = value found_key = False cc = self._buffer[self._index] self._index += 1 - #if cc == '}': - # break - #cc = self._buffer[self._index] - #self._index += 1 return rv @@ -840,6 +863,14 @@ def format_binary(something): return '\n' + _format_binary_recurse(something) def _format_binary_recurse(something): + def _format_list(something): + array_builder = [] + array_builder.append('[' + struct.pack('!i', len(something))) + for item in something: + array_builder.append(_format_binary_recurse(item)) + array_builder.append(']') + return ''.join(array_builder) + if something is None: return '!' elif isinstance(something, LLSD): @@ -857,7 +888,10 @@ def _format_binary_recurse(something): return 'u' + something._bits elif isinstance(something, binary): return 'b' + struct.pack('!i', len(something)) + something - elif isinstance(something, (str, unicode)): + elif isinstance(something, str): + return 's' + struct.pack('!i', len(something)) + something + elif isinstance(something, unicode): + something = something.encode('utf-8') return 's' + struct.pack('!i', len(something)) + something elif isinstance(something, uri): return 'l' + struct.pack('!i', len(something)) + something @@ -865,35 +899,50 @@ def _format_binary_recurse(something): seconds_since_epoch = time.mktime(something.timetuple()) return 'd' + struct.pack('!d', seconds_since_epoch) elif isinstance(something, (list, tuple)): - array_builder = [] - array_builder.append('[' + struct.pack('!i', len(something))) - for item in something: - array_builder.append(_format_binary_recurse(item)) - array_builder.append(']') - return ''.join(array_builder) + return _format_list(something) elif isinstance(something, dict): map_builder = [] map_builder.append('{' + struct.pack('!i', len(something))) for key, value in something.items(): + if isinstance(key, unicode): + key = key.encode('utf-8') map_builder.append('k' + struct.pack('!i', len(key)) + key) map_builder.append(_format_binary_recurse(value)) map_builder.append('}') return ''.join(map_builder) else: - raise LLSDSerializationError("Cannot serialize unknown type: %s (%s)" % ( - type(something), something)) + try: + return _format_list(list(something)) + except TypeError: + raise LLSDSerializationError( + "Cannot serialize unknown type: %s (%s)" % + (type(something), something)) + + +def parse_binary(something): + header = '\n' + if not something.startswith(header): + raise LLSDParseError('LLSD binary encoding header not found') + return LLSDBinaryParser().parse(something[len(header):]) + +def parse_xml(something): + try: + return to_python(fromstring(something)[0]) + except ElementTreeError, err: + raise LLSDParseError(*err.args) +def parse_notation(something): + return LLSDNotationParser().parse(something) def parse(something): try: if something.startswith(''): - just_binary = something.split('\n', 1)[1] - return LLSDBinaryParser().parse(just_binary) + return parse_binary(something) # This should be better. elif something.startswith('<'): - return to_python(fromstring(something)[0]) + return parse_xml(something) else: - return LLSDNotationParser().parse(something) + return parse_notation(something) except KeyError, e: raise Exception('LLSD could not be parsed: %s' % (e,)) @@ -913,6 +962,9 @@ class LLSD(object): undef = LLSD(None) +XML_MIME_TYPE = 'application/llsd+xml' +BINARY_MIME_TYPE = 'application/llsd+binary' + # register converters for llsd in mulib, if it is available try: from mulib import stacked, mu @@ -922,7 +974,7 @@ except: # mulib not available, don't print an error message since this is normal pass else: - mu.add_parser(parse, 'application/llsd+xml') + mu.add_parser(parse, XML_MIME_TYPE) mu.add_parser(parse, 'application/llsd+binary') def llsd_convert_xml(llsd_stuff, request): @@ -931,11 +983,58 @@ else: def llsd_convert_binary(llsd_stuff, request): request.write(format_binary(llsd_stuff)) - for typ in [LLSD, dict, list, tuple, str, int, float, bool, unicode, type(None)]: - stacked.add_producer(typ, llsd_convert_xml, 'application/llsd+xml') + for typ in [LLSD, dict, list, tuple, str, int, long, float, bool, unicode, type(None)]: + stacked.add_producer(typ, llsd_convert_xml, XML_MIME_TYPE) stacked.add_producer(typ, llsd_convert_xml, 'application/xml') stacked.add_producer(typ, llsd_convert_xml, 'text/xml') stacked.add_producer(typ, llsd_convert_binary, 'application/llsd+binary') stacked.add_producer(LLSD, llsd_convert_xml, '*/*') + + # in case someone is using the legacy mu.xml wrapper, we need to + # tell mu to produce application/xml or application/llsd+xml + # (based on the accept header) from raw xml. Phoenix 2008-07-21 + stacked.add_producer(mu.xml, mu.produce_raw, XML_MIME_TYPE) + stacked.add_producer(mu.xml, mu.produce_raw, 'application/xml') + + + +# mulib wsgi stuff +# try: +# from mulib import mu, adapters +# +# # try some known attributes from mulib to be ultra-sure we've imported it +# mu.get_current +# adapters.handlers +# except: +# # mulib not available, don't print an error message since this is normal +# pass +# else: +# def llsd_xml_handler(content_type): +# def handle_llsd_xml(env, start_response): +# llsd_stuff, _ = mu.get_current(env) +# result = format_xml(llsd_stuff) +# start_response("200 OK", [('Content-Type', content_type)]) +# env['mu.negotiated_type'] = content_type +# yield result +# return handle_llsd_xml +# +# def llsd_binary_handler(content_type): +# def handle_llsd_binary(env, start_response): +# llsd_stuff, _ = mu.get_current(env) +# result = format_binary(llsd_stuff) +# start_response("200 OK", [('Content-Type', content_type)]) +# env['mu.negotiated_type'] = content_type +# yield result +# return handle_llsd_binary +# +# adapters.DEFAULT_PARSERS[XML_MIME_TYPE] = parse + +# for typ in [LLSD, dict, list, tuple, str, int, float, bool, unicode, type(None)]: +# for content_type in (XML_MIME_TYPE, 'application/xml'): +# adapters.handlers.set_handler(typ, llsd_xml_handler(content_type), content_type) +# +# adapters.handlers.set_handler(typ, llsd_binary_handler(BINARY_MIME_TYPE), BINARY_MIME_TYPE) +# +# adapters.handlers.set_handler(LLSD, llsd_xml_handler(XML_MIME_TYPE), '*/*') diff --git a/linden/indra/lib/python/indra/ipc/llmessage.py b/linden/indra/lib/python/indra/ipc/llmessage.py index 446679d..c2f3fd4 100644 --- a/linden/indra/lib/python/indra/ipc/llmessage.py +++ b/linden/indra/lib/python/indra/ipc/llmessage.py @@ -88,7 +88,7 @@ class Message: UDPDEPRECATED = "UDPDeprecated" UDPBLACKLISTED = "UDPBlackListed" deprecations = [ NOTDEPRECATED, UDPDEPRECATED, UDPBLACKLISTED, DEPRECATED ] - # in order of increasing deprecation + # in order of increasing deprecation def __init__(self, name, number, priority, trust, coding): self.name = name diff --git a/linden/indra/lib/python/indra/ipc/llsdhttp.py b/linden/indra/lib/python/indra/ipc/llsdhttp.py index 1cf1146..ed64899 100644 --- a/linden/indra/lib/python/indra/ipc/llsdhttp.py +++ b/linden/indra/lib/python/indra/ipc/llsdhttp.py @@ -51,6 +51,8 @@ request_ = suite.request_ # import every httpc error exception into our namespace for convenience for x in httpc.status_to_error_map.itervalues(): globals()[x.__name__] = x +ConnectionError = httpc.ConnectionError +Retriable = httpc.Retriable for x in (httpc.ConnectionError,): globals()[x.__name__] = x diff --git a/linden/indra/lib/python/indra/ipc/mysql_pool.py b/linden/indra/lib/python/indra/ipc/mysql_pool.py index 507b185..25a66cf 100644 --- a/linden/indra/lib/python/indra/ipc/mysql_pool.py +++ b/linden/indra/lib/python/indra/ipc/mysql_pool.py @@ -30,8 +30,10 @@ import MySQLdb from eventlet import db_pool class DatabaseConnector(db_pool.DatabaseConnector): - def __init__(self, credentials, min_size = 0, max_size = 4, *args, **kwargs): - super(DatabaseConnector, self).__init__(MySQLdb, credentials, min_size, max_size, conn_pool=db_pool.ConnectionPool, *args, **kwargs) + def __init__(self, credentials, *args, **kwargs): + super(DatabaseConnector, self).__init__(MySQLdb, credentials, + conn_pool=db_pool.ConnectionPool, + *args, **kwargs) # get is extended relative to eventlet.db_pool to accept a port argument def get(self, host, dbname, port=3306): @@ -42,7 +44,7 @@ class DatabaseConnector(db_pool.DatabaseConnector): new_kwargs['host'] = host new_kwargs['port'] = port new_kwargs.update(self.credentials_for(host)) - dbpool = ConnectionPool(self._min_size, self._max_size, *self._args, **new_kwargs) + dbpool = ConnectionPool(*self._args, **new_kwargs) self._databases[key] = dbpool return self._databases[key] @@ -51,8 +53,8 @@ class ConnectionPool(db_pool.TpooledConnectionPool): """A pool which gives out saranwrapped MySQLdb connections from a pool """ - def __init__(self, min_size = 0, max_size = 4, *args, **kwargs): - super(ConnectionPool, self).__init__(MySQLdb, min_size, max_size, *args, **kwargs) + def __init__(self, *args, **kwargs): + super(ConnectionPool, self).__init__(MySQLdb, *args, **kwargs) def get(self): conn = super(ConnectionPool, self).get() @@ -77,14 +79,3 @@ class ConnectionPool(db_pool.TpooledConnectionPool): conn.connection_parameters = converted_kwargs return conn - def clear(self): - """ Close all connections that this pool still holds a reference to, leaving it empty.""" - for conn in self.free_items: - try: - conn.close() - except: - pass # even if stuff happens here, we still want to at least try to close all the other connections - self.free_items.clear() - - def __del__(self): - self.clear() diff --git a/linden/indra/lib/python/indra/ipc/saranwrap.py b/linden/indra/lib/python/indra/ipc/saranwrap.py deleted file mode 100644 index e0205bf..0000000 --- a/linden/indra/lib/python/indra/ipc/saranwrap.py +++ /dev/null @@ -1,651 +0,0 @@ -"""\ -@file saranwrap.py -@author Phoenix -@date 2007-07-13 -@brief A simple, pickle based rpc mechanism which reflects python -objects and callables. - -$LicenseInfo:firstyear=2007&license=mit$ - -Copyright (c) 2007-2008, Linden Research, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -$/LicenseInfo$ - -This file provides classes and exceptions used for simple python level -remote procedure calls. This is achieved by intercepting the basic -getattr and setattr calls in a client proxy, which commnicates those -down to the server which will dispatch them to objects in it's process -space. - -The basic protocol to get and set attributes is for the client proxy -to issue the command: - -getattr $id $name -setattr $id $name $value - -getitem $id $item -setitem $id $item $value -eq $id $rhs -del $id - -When the get returns a callable, the client proxy will provide a -callable proxy which will invoke a remote procedure call. The command -issued from the callable proxy to server is: - -call $id $name $args $kwargs - -If the client supplies an id of None, then the get/set/call is applied -to the object(s) exported from the server. - -The server will parse the get/set/call, take the action indicated, and -return back to the caller one of: - -value $val -callable -object $id -exception $excp - -To handle object expiration, the proxy will instruct the rpc server to -discard objects which are no longer in use. This is handled by -catching proxy deletion and sending the command: - -del $id - -The server will handle this by removing clearing it's own internal -references. This does not mean that the object will necessarily be -cleaned from the server, but no artificial references will remain -after successfully completing. On completion, the server will return -one of: - -value None -exception $excp - -The server also accepts a special command for debugging purposes: - -status - -Which will be intercepted by the server to write back: - -status {...} - -The wire protocol is to pickle the Request class in this file. The -request class is basically an action and a map of parameters' -""" - -import os -import cPickle -import struct -import sys - -try: - set = set - frozenset = frozenset -except NameError: - from sets import Set as set, ImmutableSet as frozenset - -from eventlet.processes import Process -from eventlet import api - -# -# debugging hooks -# -_g_debug_mode = False -if _g_debug_mode: - import traceback - -def pythonpath_sync(): - """ -@brief apply the current sys.path to the environment variable PYTHONPATH, so that child processes have the same paths as the caller does. -""" - pypath = os.pathsep.join(sys.path) - os.environ['PYTHONPATH'] = pypath - -def wrap(obj, dead_callback = None): - """ -@brief wrap in object in another process through a saranwrap proxy -@param object The object to wrap. -@param dead_callback A callable to invoke if the process exits.""" - - if type(obj).__name__ == 'module': - return wrap_module(obj.__name__, dead_callback) - pythonpath_sync() - p = Process('python', [__file__, '--child'], dead_callback) - prox = Proxy(p, p) - prox.obj = obj - return prox.obj - -def wrap_module(fqname, dead_callback = None): - """ -@brief wrap a module in another process through a saranwrap proxy -@param fqname The fully qualified name of the module. -@param dead_callback A callable to invoke if the process exits.""" - pythonpath_sync() - global _g_debug_mode - if _g_debug_mode: - p = Process('python', [__file__, '--module', fqname, '--logfile', '/tmp/saranwrap.log'], dead_callback) - else: - p = Process('python', [__file__, '--module', fqname,], dead_callback) - prox = Proxy(p, p) - return prox - -def status(proxy): - """ -@brief get the status from the server through a proxy -@param proxy a saranwrap.Proxy object connected to a server.""" - _write_request(Request('status', {}), proxy.__local_dict['_out']) - return _read_response(None, None, proxy.__local_dict['_in'], proxy.__local_dict['_out'], None) - -class BadResponse(Exception): - """"This exception is raised by an saranwrap client when it could - parse but cannot understand the response from the server.""" - pass - -class BadRequest(Exception): - """"This exception is raised by a saranwrap server when it could parse - but cannot understand the response from the server.""" - pass - -class UnrecoverableError(Exception): - pass - -class Request(object): - "@brief A wrapper class for proxy requests to the server." - def __init__(self, action, param): - self._action = action - self._param = param - def __str__(self): - return "Request `"+self._action+"` "+str(self._param) - def __getitem__(self, name): - return self._param[name] - def action(self): - return self._action - -def _read_lp_hunk(stream): - len_bytes = stream.read(4) - length = struct.unpack('I', len_bytes)[0] - body = stream.read(length) - return body - -def _read_response(id, attribute, input, output, dead_list): - """@brief local helper method to read respones from the rpc server.""" - try: - str = _read_lp_hunk(input) - _prnt(`str`) - response = cPickle.loads(str) - except AttributeError, e: - raise UnrecoverableError(e) - _prnt("response: %s" % response) - if response[0] == 'value': - return response[1] - elif response[0] == 'callable': - return CallableProxy(id, attribute, input, output, dead_list) - elif response[0] == 'object': - return ObjectProxy(input, output, response[1], dead_list) - elif response[0] == 'exception': - exp = response[1] - raise exp - else: - raise BadResponse(response[0]) - -def _write_lp_hunk(stream, hunk): - write_length = struct.pack('I', len(hunk)) - stream.write(write_length + hunk) - if hasattr(stream, 'flush'): - stream.flush() - -def _write_request(param, output): - _prnt("request: %s" % param) - str = cPickle.dumps(param) - _write_lp_hunk(output, str) - -def _is_local(attribute): - "Return true if the attribute should be handled locally" -# return attribute in ('_in', '_out', '_id', '__getattribute__', '__setattr__', '__dict__') - # good enough for now. :) - if '__local_dict' in attribute: - return True - return False - -def _prnt(message): - global _g_debug_mode - if _g_debug_mode: - print message - -_g_logfile = None -def _log(message): - global _g_logfile - if _g_logfile: - _g_logfile.write(str(os.getpid()) + ' ' + message) - _g_logfile.write('\n') - _g_logfile.flush() - -def _unmunge_attr_name(name): - """ Sometimes attribute names come in with classname prepended, not sure why. - This function removes said classname, because we're huge hackers and we didn't - find out what the true right thing to do is. *FIX: find out. """ - if(name.startswith('_Proxy')): - name = name[len('_Proxy'):] - if(name.startswith('_ObjectProxy')): - name = name[len('_ObjectProxy'):] - return name - - -class Proxy(object): - """\ -@class Proxy -@brief This class wraps a remote python process, presumably available -in an instance of an Server. - -This is the class you will typically use as a client to a child -process. Simply instantiate one around a file-like interface and start -calling methods on the thing that is exported. The dir() builtin is -not supported, so you have to know what has been exported. -""" - def __init__(self, input, output, dead_list = None): - """\ -@param input a file-like object which supports read(). -@param output a file-like object which supports write() and flush(). -@param id an identifier for the remote object. humans do not provide this. -""" - # default dead_list inside the function because all objects in method - # argument lists are init-ed only once globally - if dead_list is None: - dead_list = set() - #_prnt("Proxy::__init__") - self.__local_dict = dict( - _in = input, - _out = output, - _dead_list = dead_list, - _id = None) - - def __getattribute__(self, attribute): - #_prnt("Proxy::__getattr__: %s" % attribute) - if _is_local(attribute): - # call base class getattribute so we actually get the local variable - attribute = _unmunge_attr_name(attribute) - return super(Proxy, self).__getattribute__(attribute) - else: - my_in = self.__local_dict['_in'] - my_out = self.__local_dict['_out'] - my_id = self.__local_dict['_id'] - - _dead_list = self.__local_dict['_dead_list'] - for dead_object in _dead_list.copy(): - request = Request('del', {'id':dead_object}) - _write_request(request, my_out) - response = _read_response(my_id, attribute, my_in, my_out, _dead_list) - _dead_list.remove(dead_object) - - # Pass all public attributes across to find out if it is - # callable or a simple attribute. - request = Request('getattr', {'id':my_id, 'attribute':attribute}) - _write_request(request, my_out) - return _read_response(my_id, attribute, my_in, my_out, _dead_list) - - def __setattr__(self, attribute, value): - #_prnt("Proxy::__setattr__: %s" % attribute) - if _is_local(attribute): - # It must be local to this actual object, so we have to apply - # it to the dict in a roundabout way - attribute = _unmunge_attr_name(attribute) - super(Proxy, self).__getattribute__('__dict__')[attribute]=value - else: - my_in = self.__local_dict['_in'] - my_out = self.__local_dict['_out'] - my_id = self.__local_dict['_id'] - _dead_list = self.__local_dict['_dead_list'] - # Pass the set attribute across - request = Request('setattr', {'id':my_id, 'attribute':attribute, 'value':value}) - _write_request(request, my_out) - return _read_response(my_id, attribute, my_in, my_out, _dead_list) - -class ObjectProxy(Proxy): - """\ -@class ObjectProxy -@brief This class wraps a remote object in the Server - -This class will be created during normal operation, and users should -not need to deal with this class directly.""" - - def __init__(self, input, output, id, dead_list): - """\ -@param input a file-like object which supports read(). -@param output a file-like object which supports write() and flush(). -@param id an identifier for the remote object. humans do not provide this. -""" - Proxy.__init__(self, input, output, dead_list) - self.__local_dict['_id'] = id - #_prnt("ObjectProxy::__init__ %s" % self._id) - - def __del__(self): - my_id = self.__local_dict['_id'] - _prnt("ObjectProxy::__del__ %s" % my_id) - self.__local_dict['_dead_list'].add(my_id) - - def __getitem__(self, key): - my_in = self.__local_dict['_in'] - my_out = self.__local_dict['_out'] - my_id = self.__local_dict['_id'] - _dead_list = self.__local_dict['_dead_list'] - request = Request('getitem', {'id':my_id, 'key':key}) - _write_request(request, my_out) - return _read_response(my_id, key, my_in, my_out, _dead_list) - - def __setitem__(self, key, value): - my_in = self.__local_dict['_in'] - my_out = self.__local_dict['_out'] - my_id = self.__local_dict['_id'] - _dead_list = self.__local_dict['_dead_list'] - request = Request('setitem', {'id':my_id, 'key':key, 'value':value}) - _write_request(request, my_out) - return _read_response(my_id, key, my_in, my_out, _dead_list) - - def __eq__(self, rhs): - my_in = self.__local_dict['_in'] - my_out = self.__local_dict['_out'] - my_id = self.__local_dict['_id'] - _dead_list = self.__local_dict['_dead_list'] - request = Request('eq', {'id':my_id, 'rhs':rhs.__local_dict['_id']}) - _write_request(request, my_out) - return _read_response(my_id, None, my_in, my_out, _dead_list) - - def __repr__(self): - # apparently repr(obj) skips the whole getattribute thing and just calls __repr__ - # directly. Therefore we just pass it through the normal call pipeline, and - # tack on a little header so that you can tell it's an object proxy. - val = self.__repr__() - return "saran:%s" % val - - def __str__(self): - # see description for __repr__, because str(obj) works the same. We don't - # tack anything on to the return value here because str values are used as data. - return self.__str__() - - def __len__(self): - # see description for __repr__, len(obj) is the same. Unfortunately, __len__ is also - # used when determining whether an object is boolean or not, e.g. if proxied_object: - return self.__len__() - -def proxied_type(self): - if type(self) is not ObjectProxy: - return type(self) - - my_in = self.__local_dict['_in'] - my_out = self.__local_dict['_out'] - my_id = self.__local_dict['_id'] - request = Request('type', {'id':my_id}) - _write_request(request, my_out) - # dead list can be none because we know the result will always be - # a value and not an ObjectProxy itself - return _read_response(my_id, None, my_in, my_out, None) - -class CallableProxy(object): - """\ -@class CallableProxy -@brief This class wraps a remote function in the Server - -This class will be created by an Proxy during normal operation, -and users should not need to deal with this class directly.""" - - def __init__(self, object_id, name, input, output, dead_list): - #_prnt("CallableProxy::__init__: %s, %s" % (object_id, name)) - self._object_id = object_id - self._name = name - self._in = input - self._out = output - self._dead_list = dead_list - - def __call__(self, *args, **kwargs): - #_prnt("CallableProxy::__call__: %s, %s" % (args, kwargs)) - - # Pass the call across. We never build a callable without - # having already checked if the method starts with '_' so we - # can safely pass this one to the remote object. - #_prnt("calling %s %s" % (self._object_id, self._name) - request = Request('call', {'id':self._object_id, 'name':self._name, 'args':args, 'kwargs':kwargs}) - _write_request(request, self._out) - return _read_response(self._object_id, self._name, self._in, self._out, self._dead_list) - -class Server(object): - def __init__(self, input, output, export): - """\ -@param input a file-like object which supports read(). -@param output a file-like object which supports write() and flush(). -@param export an object, function, or map which is exported to clients -when the id is None.""" - #_log("Server::__init__") - self._in = input - self._out = output - self._export = export - self._next_id = 1 - self._objects = {} - - def handle_status(self, object, req): - return { - 'object_count':len(self._objects), - 'next_id':self._next_id, - 'pid':os.getpid()} - - def handle_getattr(self, object, req): - try: - return getattr(object, req['attribute']) - except AttributeError, e: - if hasattr(object, "__getitem__"): - return object[req['attribute']] - else: - raise e - #_log('getattr: %s' % str(response)) - - def handle_setattr(self, object, req): - try: - return setattr(object, req['attribute'], req['value']) - except AttributeError, e: - if hasattr(object, "__setitem__"): - return object.__setitem__(req['attribute'], req['value']) - else: - raise e - - def handle_getitem(self, object, req): - return object[req['key']] - - def handle_setitem(self, object, req): - object[req['key']] = req['value'] - return None # *TODO figure out what the actual return value of __setitem__ should be - - def handle_eq(self, object, req): - #_log("__eq__ %s %s" % (object, req)) - rhs = None - try: - rhs = self._objects[req['rhs']] - except KeyError, e: - return False - return (object == rhs) - - def handle_call(self, object, req): - #_log("calling %s " % (req['name'])) - try: - fn = getattr(object, req['name']) - except AttributeError, e: - if hasattr(object, "__setitem__"): - fn = object[req['name']] - else: - raise e - - return fn(*req['args'],**req['kwargs']) - - def handle_del(self, object, req): - id = req['id'] - _log("del %s from %s" % (id, self._objects)) - - # *TODO what does __del__ actually return? - del self._objects[id] - return None - - def handle_type(self, object, req): - return type(object) - - def loop(self): - """@brief Loop forever and respond to all requests.""" - _log("Server::loop") - while True: - try: - try: - str = _read_lp_hunk(self._in) - except EOFError: - sys.exit(0) # normal exit - request = cPickle.loads(str) - _log("request: %s (%s)" % (request, self._objects)) - req = request - id = None - object = None - try: - id = req['id'] - if id: - id = int(id) - object = self._objects[id] - #_log("id, object: %d %s" % (id, object)) - except Exception, e: - #_log("Exception %s" % str(e)) - pass - if object is None or id is None: - id = None - object = self._export - #_log("found object %s" % str(object)) - - # Handle the request via a method with a special name on the server - handler_name = 'handle_%s' % request.action() - - try: - handler = getattr(self, handler_name) - except AttributeError: - raise BadRequest, request.action() - - response = handler(object, request) - - # figure out what to do with the response, and respond - # apprpriately. - if request.action() in ['status', 'type']: - # have to handle these specially since we want to - # pickle up the actual value and not return a proxy - self.respond(['value', response]) - elif callable(response): - #_log("callable %s" % response) - self.respond(['callable']) - elif self.is_value(response): - self.respond(['value', response]) - else: - self._objects[self._next_id] = response - #_log("objects: %s" % str(self._objects)) - self.respond(['object', self._next_id]) - self._next_id += 1 - except SystemExit, e: - raise e - except Exception, e: - self.write_exception(e) - except: - self.write_exception(sys.exc_info()[0]) - - def is_value(self, value): - """\ -@brief Test if value should be serialized as a simple dataset. -@param value The value to test. -@return Returns true if value is a simple serializeable set of data. -""" - return type(value) in (str,unicode,int,float,long,bool,type(None)) - - def respond(self, body): - _log("responding with: %s" % body) - #_log("objects: %s" % self._objects) - s = cPickle.dumps(body) - _log(`s`) - str = _write_lp_hunk(self._out, s) - - def write_exception(self, e): - """@brief Helper method to respond with an exception.""" - #_log("exception: %s" % sys.exc_info()[0]) - # TODO: serialize traceback using generalization of code from mulib.htmlexception - self.respond(['exception', e]) - global _g_debug_mode - if _g_debug_mode: - _log("traceback: %s" % traceback.format_tb(sys.exc_info()[2])) - - -# test function used for testing that final except clause -def raise_a_weird_error(): - raise "oh noes you can raise a string" - -# test function used for testing return of unpicklable exceptions -def raise_an_unpicklable_error(): - class Unpicklable(Exception): - pass - raise Unpicklable() - -# test function used for testing return of picklable exceptions -def raise_standard_error(): - raise FloatingPointError() - -# test function to make sure print doesn't break the wrapper -def print_string(str): - print str - -# test function to make sure printing on stdout doesn't break the -# wrapper -def err_string(str): - print >>sys.stderr, str - -def main(): - import optparse - parser = optparse.OptionParser( - usage="usage: %prog [options]", - description="Simple saranwrap.Server wrapper") - parser.add_option( - '-c', '--child', default=False, action='store_true', - help='Wrap an object serialed via setattr.') - parser.add_option( - '-m', '--module', type='string', dest='module', default=None, - help='a module to load and export.') - parser.add_option( - '-l', '--logfile', type='string', dest='logfile', default=None, - help='file to log to.') - options, args = parser.parse_args() - global _g_logfile - if options.logfile: - _g_logfile = open(options.logfile, 'a') - if options.module: - export = api.named(options.module) - server = Server(sys.stdin, sys.stdout, export) - elif options.child: - server = Server(sys.stdin, sys.stdout, {}) - - # *HACK: some modules may emit on stderr, which breaks everything. - class NullSTDOut(object): - def write(a, b): - pass - sys.stderr = NullSTDOut() - sys.stdout = NullSTDOut() - - # Loop until EOF - server.loop() - if _g_logfile: - _g_logfile.close() - - -if __name__ == "__main__": - main() diff --git a/linden/indra/lib/python/indra/ipc/siesta.py b/linden/indra/lib/python/indra/ipc/siesta.py index 5fbea29..b206f18 100644 --- a/linden/indra/lib/python/indra/ipc/siesta.py +++ b/linden/indra/lib/python/indra/ipc/siesta.py @@ -24,9 +24,9 @@ except ImportError: llsd_parsers = { 'application/json': json_decode, - 'application/llsd+binary': llsd.parse_binary, + llsd.BINARY_MIME_TYPE: llsd.parse_binary, 'application/llsd+notation': llsd.parse_notation, - 'application/llsd+xml': llsd.parse_xml, + llsd.XML_MIME_TYPE: llsd.parse_xml, 'application/xml': llsd.parse_xml, } diff --git a/linden/indra/lib/python/indra/util/fastest_elementtree.py b/linden/indra/lib/python/indra/util/fastest_elementtree.py index 64aed09..2470143 100644 --- a/linden/indra/lib/python/indra/util/fastest_elementtree.py +++ b/linden/indra/lib/python/indra/util/fastest_elementtree.py @@ -2,9 +2,9 @@ @file fastest_elementtree.py @brief Concealing some gnarly import logic in here. This should export the interface of elementtree. -$LicenseInfo:firstyear=2006&license=mit$ +$LicenseInfo:firstyear=2008&license=mit$ -Copyright (c) 2006-2008, Linden Research, Inc. +Copyright (c) 2008, Linden Research, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -26,27 +26,40 @@ THE SOFTWARE. $/LicenseInfo$ """ -# Using celementree might cause some unforeseen problems so here's a +# The parsing exception raised by the underlying library depends +# on the ElementTree implementation we're using, so we provide an +# alias here. +# +# Use ElementTreeError as the exception type for catching parsing +# errors. + + +# Using cElementTree might cause some unforeseen problems, so here's a # convenient off switch. -# *NOTE: turned off cause of problems. :-( *TODO: debug -use_celementree = False +use_celementree = True try: if not use_celementree: raise ImportError() - from cElementTree import * ## This does not work under Windows + # Python 2.3 and 2.4. + from cElementTree import * + ElementTreeError = SyntaxError except ImportError: try: if not use_celementree: raise ImportError() - ## This is the name of cElementTree under python 2.5 + # Python 2.5 and above. from xml.etree.cElementTree import * + ElementTreeError = SyntaxError except ImportError: + # Pure Python code. try: - ## This is the old name of elementtree, for use with 2.3 + # Python 2.3 and 2.4. from elementtree.ElementTree import * except ImportError: - ## This is the name of elementtree under python 2.5 + # Python 2.5 and above. from xml.etree.ElementTree import * + # The pure Python ElementTree module uses Expat for parsing. + from xml.parsers.expat import ExpatError as ElementTreeError diff --git a/linden/indra/lib/python/indra/util/iterators.py b/linden/indra/lib/python/indra/util/iterators.py new file mode 100644 index 0000000..6a98c97 --- /dev/null +++ b/linden/indra/lib/python/indra/util/iterators.py @@ -0,0 +1,63 @@ +"""\ +@file iterators.py +@brief Useful general-purpose iterators. + +$LicenseInfo:firstyear=2008&license=mit$ + +Copyright (c) 2008, Linden Research, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +$/LicenseInfo$ +""" + +from __future__ import nested_scopes + +def iter_chunks(rows, aggregate_size=100): + """ + Given an iterable set of items (@p rows), produces lists of up to @p + aggregate_size items at a time, for example: + + iter_chunks([1,2,3,4,5,6,7,8,9,10], 3) + + Values for @p aggregate_size < 1 will raise ValueError. + + Will return a generator that produces, in the following order: + - [1, 2, 3] + - [4, 5, 6] + - [7, 8, 9] + - [10] + """ + if aggregate_size < 1: + raise ValueError() + + def iter_chunks_inner(): + row_iter = iter(rows) + done = False + agg = [] + while not done: + try: + row = row_iter.next() + agg.append(row) + except StopIteration: + done = True + if agg and (len(agg) >= aggregate_size or done): + yield agg + agg = [] + + return iter_chunks_inner() diff --git a/linden/indra/lib/python/indra/util/iterators_test.py b/linden/indra/lib/python/indra/util/iterators_test.py new file mode 100755 index 0000000..7fd9e73 --- /dev/null +++ b/linden/indra/lib/python/indra/util/iterators_test.py @@ -0,0 +1,72 @@ +"""\ +@file iterators_test.py +@brief Test cases for iterators module. + +$LicenseInfo:firstyear=2008&license=mit$ + +Copyright (c) 2008, Linden Research, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +$/LicenseInfo$ +""" + +import unittest + +from indra.util.iterators import iter_chunks + +class TestIterChunks(unittest.TestCase): + """Unittests for iter_chunks""" + def test_bad_agg_size(self): + rows = [1,2,3,4] + self.assertRaises(ValueError, iter_chunks, rows, 0) + self.assertRaises(ValueError, iter_chunks, rows, -1) + + try: + for i in iter_chunks(rows, 0): + pass + except ValueError: + pass + else: + self.fail() + + try: + result = list(iter_chunks(rows, 0)) + except ValueError: + pass + else: + self.fail() + def test_empty(self): + rows = [] + result = list(iter_chunks(rows)) + self.assertEqual(result, []) + def test_small(self): + rows = [[1]] + result = list(iter_chunks(rows, 2)) + self.assertEqual(result, [[[1]]]) + def test_size(self): + rows = [[1],[2]] + result = list(iter_chunks(rows, 2)) + self.assertEqual(result, [[[1],[2]]]) + def test_multi_agg(self): + rows = [[1],[2],[3],[4],[5]] + result = list(iter_chunks(rows, 2)) + self.assertEqual(result, [[[1],[2]],[[3],[4]],[[5]]]) + +if __name__ == "__main__": + unittest.main() diff --git a/linden/indra/lib/python/indra/util/llmanifest.py b/linden/indra/lib/python/indra/util/llmanifest.py index 4675177..a00d242 100644 --- a/linden/indra/lib/python/indra/util/llmanifest.py +++ b/linden/indra/lib/python/indra/util/llmanifest.py @@ -584,7 +584,7 @@ class LLManifest(object): def wildcard_regex(self, src_glob, dst_glob): src_re = re.escape(src_glob) - src_re = src_re.replace('\*', '([-a-zA-Z0-9._ ]+)') + src_re = src_re.replace('\*', '([-a-zA-Z0-9._ ]*)') dst_temp = dst_glob i = 1 while dst_temp.count("*") > 0: @@ -621,6 +621,7 @@ class LLManifest(object): count = 0 if self.wildcard_pattern.search(src): for s,d in self.expand_globs(src, dst): + assert(s != d) count += self.process_file(s, d) else: # if we're specifying a single path (not a glob), diff --git a/linden/indra/lib/python/indra/util/llperformance.py b/linden/indra/lib/python/indra/util/llperformance.py new file mode 100755 index 0000000..7c52730 --- /dev/null +++ b/linden/indra/lib/python/indra/util/llperformance.py @@ -0,0 +1,158 @@ +#!/usr/bin/python + +# ------------------------------------------------ +# Sim metrics utility functions. + +import glob, os, time, sys, stat, exceptions + +from indra.base import llsd + +gBlockMap = {} #Map of performance metric data with function hierarchy information. +gCurrentStatPath = "" + +gIsLoggingEnabled=False + +class LLPerfStat: + def __init__(self,key): + self.mTotalTime = 0 + self.mNumRuns = 0 + self.mName=key + self.mTimeStamp = int(time.time()*1000) + self.mUTCTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) + + def __str__(self): + return "%f" % self.mTotalTime + + def start(self): + self.mStartTime = int(time.time() * 1000000) + self.mNumRuns += 1 + + def stop(self): + execution_time = int(time.time() * 1000000) - self.mStartTime + self.mTotalTime += execution_time + + def get_map(self): + results={} + results['name']=self.mName + results['utc_time']=self.mUTCTime + results['timestamp']=self.mTimeStamp + results['us']=self.mTotalTime + results['count']=self.mNumRuns + return results + +class PerfError(exceptions.Exception): + def __init__(self): + return + + def __Str__(self): + print "","Unfinished LLPerfBlock" + +class LLPerfBlock: + def __init__( self, key ): + global gBlockMap + global gCurrentStatPath + global gIsLoggingEnabled + + #Check to see if we're running metrics right now. + if gIsLoggingEnabled: + self.mRunning = True #Mark myself as running. + + self.mPreviousStatPath = gCurrentStatPath + gCurrentStatPath += "/" + key + if gCurrentStatPath not in gBlockMap: + gBlockMap[gCurrentStatPath] = LLPerfStat(key) + + self.mStat = gBlockMap[gCurrentStatPath] + self.mStat.start() + + def finish( self ): + global gBlockMap + global gIsLoggingEnabled + + if gIsLoggingEnabled: + self.mStat.stop() + self.mRunning = False + gCurrentStatPath = self.mPreviousStatPath + +# def __del__( self ): +# if self.mRunning: +# #SPATTERS FIXME +# raise PerfError + +class LLPerformance: + #-------------------------------------------------- + # Determine whether or not we want to log statistics + + def __init__( self, process_name = "python" ): + self.process_name = process_name + self.init_testing() + self.mTimeStamp = int(time.time()*1000) + self.mUTCTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) + + def init_testing( self ): + global gIsLoggingEnabled + + host_performance_file = "/dev/shm/simperf/simperf_proc_config.llsd" + + #If file exists, open + if os.path.exists(host_performance_file): + file = open (host_performance_file,'r') + + #Read serialized LLSD from file. + body = llsd.parse(file.read()) + + #Calculate time since file last modified. + stats = os.stat(host_performance_file) + now = time.time() + mod = stats[stat.ST_MTIME] + age = now - mod + + if age < ( body['duration'] ): + gIsLoggingEnabled = True + + + def get ( self ): + global gIsLoggingEnabled + return gIsLoggingEnabled + + #def output(self,ptr,path): + # if 'stats' in ptr: + # stats = ptr['stats'] + # self.mOutputPtr[path] = stats.get_map() + + # if 'children' in ptr: + # children=ptr['children'] + + # curptr = self.mOutputPtr + # curchildren={} + # curptr['children'] = curchildren + + # for key in children: + # curchildren[key]={} + # self.mOutputPtr = curchildren[key] + # self.output(children[key],path + '/' + key) + + def done(self): + global gBlockMap + + if not self.get(): + return + + output_name = "/dev/shm/simperf/%s_proc.%d.llsd" % (self.process_name, os.getpid()) + output_file = open(output_name, 'w') + process_info = { + "name" : self.process_name, + "pid" : os.getpid(), + "ppid" : os.getppid(), + "timestamp" : self.mTimeStamp, + "utc_time" : self.mUTCTime, + } + output_file.write(llsd.format_notation(process_info)) + output_file.write('\n') + + for key in gBlockMap.keys(): + gBlockMap[key] = gBlockMap[key].get_map() + output_file.write(llsd.format_notation(gBlockMap)) + output_file.write('\n') + output_file.close() + diff --git a/linden/indra/lib/python/indra/util/named_query.py b/linden/indra/lib/python/indra/util/named_query.py index 20f2ec7..c5fb498 100644 --- a/linden/indra/lib/python/indra/util/named_query.py +++ b/linden/indra/lib/python/indra/util/named_query.py @@ -63,7 +63,7 @@ def _init_g_named_manager(sql_dir = None): # extra fallback directory in case config doesn't return what we want if sql_dir is None: - sql_dir = os.path.dirname(__file__) + "../../../../web/dataservice/sql" + sql_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..", "web", "dataservice", "sql") global _g_named_manager _g_named_manager = NamedQueryManager( @@ -103,11 +103,12 @@ class NamedQuery(object): def __init__(self, name, filename): """ Construct a NamedQuery object. The name argument is an arbitrary name as a handle for the query, and the filename is - a path to a file containing an llsd named query document.""" + a path to a file or a file-like object containing an llsd named + query document.""" self._stat_interval_seconds = 5 # 5 seconds self._name = name - if (filename is not None) \ - and (NQ_FILE_SUFFIX != filename[-NQ_FILE_SUFFIX_LEN:]): + if (filename is not None and isinstance(filename, (str, unicode)) + and NQ_FILE_SUFFIX != filename[-NQ_FILE_SUFFIX_LEN:]): filename = filename + NQ_FILE_SUFFIX self._location = filename self._alternative = dict() @@ -122,8 +123,8 @@ class NamedQuery(object): def get_modtime(self): """ Returns the mtime (last modified time) of the named query - file, if such exists.""" - if self._location: + filename. For file-like objects, expect a modtime of 0""" + if self._location and isinstance(self._location, (str, unicode)): return os.path.getmtime(self._location) return 0 @@ -131,7 +132,12 @@ class NamedQuery(object): """ Loads and parses the named query file into self. Does nothing if self.location is nonexistant.""" if self._location: - self._reference_contents(llsd.parse(open(self._location).read())) + if isinstance(self._location, (str, unicode)): + contents = llsd.parse(open(self._location).read()) + else: + # we probably have a file-like object. Godspeed! + contents = llsd.parse(self._location.read()) + self._reference_contents(contents) # Check for alternative implementations try: for name, alt in self._contents['alternative'].items(): diff --git a/linden/indra/lib/python/indra/util/simperf_host_xml_parser.py b/linden/indra/lib/python/indra/util/simperf_host_xml_parser.py new file mode 100755 index 0000000..b608415 --- /dev/null +++ b/linden/indra/lib/python/indra/util/simperf_host_xml_parser.py @@ -0,0 +1,338 @@ +#!/usr/bin/env python +"""\ +@file simperf_host_xml_parser.py +@brief Digest collector's XML dump and convert to simple dict/list structure + +$LicenseInfo:firstyear=2008&license=mit$ + +Copyright (c) 2008, Linden Research, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +$/LicenseInfo$ +""" + +import sys, os, getopt, time +import simplejson +from xml import sax + + +def usage(): + print "Usage:" + print sys.argv[0] + " [options]" + print " Convert RRD's XML dump to JSON. Script to convert the simperf_host_collector-" + print " generated RRD dump into JSON. Steps include converting selected named" + print " fields from GAUGE type to COUNTER type by computing delta with preceding" + print " values. Top-level named fields are:" + print + print " lastupdate Time (javascript timestamp) of last data sample" + print " step Time in seconds between samples" + print " ds Data specification (name/type) for each column" + print " database Table of data samples, one time step per row" + print + print "Options:" + print " -i, --in Input settings filename. (Default: stdin)" + print " -o, --out Output settings filename. (Default: stdout)" + print " -h, --help Print this message and exit." + print + print "Example: %s -i rrddump.xml -o rrddump.json" % sys.argv[0] + print + print "Interfaces:" + print " class SimPerfHostXMLParser() # SAX content handler" + print " def simperf_host_xml_fixup(parser) # post-parse value fixup" + +class SimPerfHostXMLParser(sax.handler.ContentHandler): + + def __init__(self): + pass + + def startDocument(self): + self.rrd_last_update = 0 # public + self.rrd_step = 0 # public + self.rrd_ds = [] # public + self.rrd_records = [] # public + self._rrd_level = 0 + self._rrd_parse_state = 0 + self._rrd_chars = "" + self._rrd_capture = False + self._rrd_ds_val = {} + self._rrd_data_row = [] + self._rrd_data_row_has_nan = False + + def endDocument(self): + pass + + # Nasty little ad-hoc state machine to extract the elements that are + # necessary from the 'rrdtool dump' XML output. The same element + # name '' is used for two different data sets so we need to pay + # some attention to the actual structure to get the ones we want + # and ignore the ones we don't. + + def startElement(self, name, attrs): + self._rrd_level = self._rrd_level + 1 + self._rrd_capture = False + if self._rrd_level == 1: + if name == "rrd" and self._rrd_parse_state == 0: + self._rrd_parse_state = 1 # In + self._rrd_capture = True + self._rrd_chars = "" + elif self._rrd_level == 2: + if self._rrd_parse_state == 1: + if name == "lastupdate": + self._rrd_parse_state = 2 # In + self._rrd_capture = True + self._rrd_chars = "" + elif name == "step": + self._rrd_parse_state = 3 # In + self._rrd_capture = True + self._rrd_chars = "" + elif name == "ds": + self._rrd_parse_state = 4 # In + self._rrd_ds_val = {} + self._rrd_chars = "" + elif name == "rra": + self._rrd_parse_state = 5 # In + elif self._rrd_level == 3: + if self._rrd_parse_state == 4: + if name == "name": + self._rrd_parse_state = 6 # In + self._rrd_capture = True + self._rrd_chars = "" + elif name == "type": + self._rrd_parse_state = 7 # In + self._rrd_capture = True + self._rrd_chars = "" + elif self._rrd_parse_state == 5: + if name == "database": + self._rrd_parse_state = 8 # In + elif self._rrd_level == 4: + if self._rrd_parse_state == 8: + if name == "row": + self._rrd_parse_state = 9 # In + self._rrd_data_row = [] + self._rrd_data_row_has_nan = False + elif self._rrd_level == 5: + if self._rrd_parse_state == 9: + if name == "v": + self._rrd_parse_state = 10 # In + self._rrd_capture = True + self._rrd_chars = "" + + def endElement(self, name): + self._rrd_capture = False + if self._rrd_parse_state == 10: + self._rrd_capture = self._rrd_level == 6 + if self._rrd_level == 5: + if self._rrd_chars == "NaN": + self._rrd_data_row_has_nan = True + else: + self._rrd_data_row.append(self._rrd_chars) + self._rrd_parse_state = 9 # In + elif self._rrd_parse_state == 9: + if self._rrd_level == 4: + if not self._rrd_data_row_has_nan: + self.rrd_records.append(self._rrd_data_row) + self._rrd_parse_state = 8 # In + elif self._rrd_parse_state == 8: + if self._rrd_level == 3: + self._rrd_parse_state = 5 # In + elif self._rrd_parse_state == 7: + if self._rrd_level == 3: + self._rrd_ds_val["type"] = self._rrd_chars + self._rrd_parse_state = 4 # In + elif self._rrd_parse_state == 6: + if self._rrd_level == 3: + self._rrd_ds_val["name"] = self._rrd_chars + self._rrd_parse_state = 4 # In + elif self._rrd_parse_state == 5: + if self._rrd_level == 2: + self._rrd_parse_state = 1 # In + elif self._rrd_parse_state == 4: + if self._rrd_level == 2: + self.rrd_ds.append(self._rrd_ds_val) + self._rrd_parse_state = 1 # In + elif self._rrd_parse_state == 3: + if self._rrd_level == 2: + self.rrd_step = long(self._rrd_chars) + self._rrd_parse_state = 1 # In + elif self._rrd_parse_state == 2: + if self._rrd_level == 2: + self.rrd_last_update = long(self._rrd_chars) + self._rrd_parse_state = 1 # In + elif self._rrd_parse_state == 1: + if self._rrd_level == 1: + self._rrd_parse_state = 0 # At top + + if self._rrd_level: + self._rrd_level = self._rrd_level - 1 + + def characters(self, content): + if self._rrd_capture: + self._rrd_chars = self._rrd_chars + content.strip() + +def _make_numeric(value): + try: + value = float(value) + except: + value = "" + return value + +def simperf_host_xml_fixup(parser, filter_start_time = None, filter_end_time = None): + # Fixup for GAUGE fields that are really COUNTS. They + # were forced to GAUGE to try to disable rrdtool's + # data interpolation/extrapolation for non-uniform time + # samples. + fixup_tags = [ "cpu_user", + "cpu_nice", + "cpu_sys", + "cpu_idle", + "cpu_waitio", + "cpu_intr", + # "file_active", + # "file_free", + # "inode_active", + # "inode_free", + "netif_in_kb", + "netif_in_pkts", + "netif_in_errs", + "netif_in_drop", + "netif_out_kb", + "netif_out_pkts", + "netif_out_errs", + "netif_out_drop", + "vm_page_in", + "vm_page_out", + "vm_swap_in", + "vm_swap_out", + #"vm_mem_total", + #"vm_mem_used", + #"vm_mem_active", + #"vm_mem_inactive", + #"vm_mem_free", + #"vm_mem_buffer", + #"vm_swap_cache", + #"vm_swap_total", + #"vm_swap_used", + #"vm_swap_free", + "cpu_interrupts", + "cpu_switches", + "cpu_forks" ] + + col_count = len(parser.rrd_ds) + row_count = len(parser.rrd_records) + + # Process the last row separately, just to make all values numeric. + for j in range(col_count): + parser.rrd_records[row_count - 1][j] = _make_numeric(parser.rrd_records[row_count - 1][j]) + + # Process all other row/columns. + last_different_row = row_count - 1 + current_row = row_count - 2 + while current_row >= 0: + # Check for a different value than the previous row. If everything is the same + # then this is probably just a filler/bogus entry. + is_different = False + for j in range(col_count): + parser.rrd_records[current_row][j] = _make_numeric(parser.rrd_records[current_row][j]) + if parser.rrd_records[current_row][j] != parser.rrd_records[last_different_row][j]: + # We're good. This is a different row. + is_different = True + + if not is_different: + # This is a filler/bogus entry. Just ignore it. + for j in range(col_count): + parser.rrd_records[current_row][j] = float('nan') + else: + # Some tags need to be converted into deltas. + for j in range(col_count): + if parser.rrd_ds[j]["name"] in fixup_tags: + parser.rrd_records[last_different_row][j] = \ + parser.rrd_records[last_different_row][j] - parser.rrd_records[current_row][j] + last_different_row = current_row + + current_row -= 1 + + # Set fixup_tags in the first row to 'nan' since they aren't useful anymore. + for j in range(col_count): + if parser.rrd_ds[j]["name"] in fixup_tags: + parser.rrd_records[0][j] = float('nan') + + # Add a timestamp to each row and to the catalog. Format and name + # chosen to match other simulator logging (hopefully). + start_time = parser.rrd_last_update - (parser.rrd_step * (row_count - 1)) + # Build a filtered list of rrd_records if we are limited to a time range. + filter_records = False + if filter_start_time is not None or filter_end_time is not None: + filter_records = True + filtered_rrd_records = [] + if filter_start_time is None: + filter_start_time = start_time * 1000 + if filter_end_time is None: + filter_end_time = parser.rrd_last_update * 1000 + + for i in range(row_count): + record_timestamp = (start_time + (i * parser.rrd_step)) * 1000 + parser.rrd_records[i].insert(0, record_timestamp) + if filter_records: + if filter_start_time <= record_timestamp and record_timestamp <= filter_end_time: + filtered_rrd_records.append(parser.rrd_records[i]) + + if filter_records: + parser.rrd_records = filtered_rrd_records + + parser.rrd_ds.insert(0, {"type": "GAUGE", "name": "javascript_timestamp"}) + + +def main(argv=None): + opts, args = getopt.getopt(sys.argv[1:], "i:o:h", ["in=", "out=", "help"]) + input_file = sys.stdin + output_file = sys.stdout + for o, a in opts: + if o in ("-i", "--in"): + input_file = open(a, 'r') + if o in ("-o", "--out"): + output_file = open(a, 'w') + if o in ("-h", "--help"): + usage() + sys.exit(0) + + # Using the SAX parser as it is at least 4X faster and far, far + # smaller on this dataset than the DOM-based interface in xml.dom.minidom. + # With SAX and a 5.4MB xml file, this requires about seven seconds of + # wall-clock time and 32MB VSZ. With the DOM interface, about 22 seconds + # and over 270MB VSZ. + + handler = SimPerfHostXMLParser() + sax.parse(input_file, handler) + if input_file != sys.stdin: + input_file.close() + + # Various format fixups: string-to-num, gauge-to-counts, add + # a time stamp, etc. + simperf_host_xml_fixup(handler) + + # Create JSONable dict with interesting data and format/print it + print >>output_file, simplejson.dumps({ "step" : handler.rrd_step, + "lastupdate": handler.rrd_last_update * 1000, + "ds" : handler.rrd_ds, + "database" : handler.rrd_records }) + + return 0 + +if __name__ == "__main__": + sys.exit(main()) diff --git a/linden/indra/lib/python/indra/util/simperf_oprof_interface.py b/linden/indra/lib/python/indra/util/simperf_oprof_interface.py new file mode 100755 index 0000000..a7e9a4c --- /dev/null +++ b/linden/indra/lib/python/indra/util/simperf_oprof_interface.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python +"""\ +@file simperf_oprof_interface.py +@brief Manage OProfile data collection on a host + +$LicenseInfo:firstyear=2008&license=internal$ + +Copyright (c) 2008, Linden Research, Inc. + +The following source code is PROPRIETARY AND CONFIDENTIAL. Use of +this source code is governed by the Linden Lab Source Code Disclosure +Agreement ("Agreement") previously entered between you and Linden +Lab. By accessing, using, copying, modifying or distributing this +software, you acknowledge that you have been informed of your +obligations under the Agreement and agree to abide by those obligations. + +ALL LINDEN LAB SOURCE CODE IS PROVIDED "AS IS." LINDEN LAB MAKES NO +WARRANTIES, EXPRESS, IMPLIED OR OTHERWISE, REGARDING ITS ACCURACY, +COMPLETENESS OR PERFORMANCE. +$/LicenseInfo$ +""" + +import sys, os, getopt +import simplejson + + +def usage(): + print "Usage:" + print sys.argv[0] + " [options]" + print " Digest the OProfile report forms that come out of the" + print " simperf_oprof_ctl program's -r/--report command. The result" + print " is an array of dictionaires with the following keys:" + print + print " symbol Name of sampled, calling, or called procedure" + print " file Executable or library where symbol resides" + print " percentage Percentage contribution to profile, calls or called" + print " samples Sample count" + print " calls Methods called by the method in question (full only)" + print " called_by Methods calling the method (full only)" + print + print " For 'full' reports the two keys 'calls' and 'called_by' are" + print " themselves arrays of dictionaries based on the first four keys." + print + print "Return Codes:" + print " None. Aggressively digests everything. Will likely mung results" + print " if a program or library has whitespace in its name." + print + print "Options:" + print " -i, --in Input settings filename. (Default: stdin)" + print " -o, --out Output settings filename. (Default: stdout)" + print " -h, --help Print this message and exit." + print + print "Interfaces:" + print " class SimPerfOProfileInterface()" + +class SimPerfOProfileInterface: + def __init__(self): + self.isBrief = True # public + self.isValid = False # public + self.result = [] # public + + def parse(self, input): + in_samples = False + for line in input: + if in_samples: + if line[0:6] == "------": + self.isBrief = False + self._parseFull(input) + else: + self._parseBrief(input, line) + self.isValid = True + return + try: + hd1, remain = line.split(None, 1) + if hd1 == "samples": + in_samples = True + except ValueError: + pass + + def _parseBrief(self, input, line1): + try: + fld1, fld2, fld3, fld4 = line1.split(None, 3) + self.result.append({"samples" : fld1, + "percentage" : fld2, + "file" : fld3, + "symbol" : fld4.strip("\n")}) + except ValueError: + pass + for line in input: + try: + fld1, fld2, fld3, fld4 = line.split(None, 3) + self.result.append({"samples" : fld1, + "percentage" : fld2, + "file" : fld3, + "symbol" : fld4.strip("\n")}) + except ValueError: + pass + + def _parseFull(self, input): + state = 0 # In 'called_by' section + calls = [] + called_by = [] + current = {} + for line in input: + if line[0:6] == "------": + if len(current): + current["calls"] = calls + current["called_by"] = called_by + self.result.append(current) + state = 0 + calls = [] + called_by = [] + current = {} + else: + try: + fld1, fld2, fld3, fld4 = line.split(None, 3) + tmp = {"samples" : fld1, + "percentage" : fld2, + "file" : fld3, + "symbol" : fld4.strip("\n")} + except ValueError: + continue + if line[0] != " ": + current = tmp + state = 1 # In 'calls' section + elif state == 0: + called_by.append(tmp) + else: + calls.append(tmp) + if len(current): + current["calls"] = calls + current["called_by"] = called_by + self.result.append(current) + + +def main(argv=None): + opts, args = getopt.getopt(sys.argv[1:], "i:o:h", ["in=", "out=", "help"]) + input_file = sys.stdin + output_file = sys.stdout + for o, a in opts: + if o in ("-i", "--in"): + input_file = open(a, 'r') + if o in ("-o", "--out"): + output_file = open(a, 'w') + if o in ("-h", "--help"): + usage() + sys.exit(0) + + oprof = SimPerfOProfileInterface() + oprof.parse(input_file) + if input_file != sys.stdin: + input_file.close() + + # Create JSONable dict with interesting data and format/print it + print >>output_file, simplejson.dumps(oprof.result) + + return 0 + +if __name__ == "__main__": + sys.exit(main()) diff --git a/linden/indra/lib/python/indra/util/simperf_proc_interface.py b/linden/indra/lib/python/indra/util/simperf_proc_interface.py new file mode 100755 index 0000000..62a63fa --- /dev/null +++ b/linden/indra/lib/python/indra/util/simperf_proc_interface.py @@ -0,0 +1,164 @@ +#!/usr/bin/python + +# ---------------------------------------------------- +# Utility to extract log messages from *..llsd +# files that contain performance statistics. + +# ---------------------------------------------------- +import sys, os + +if os.path.exists("setup-path.py"): + execfile("setup-path.py") + +from indra.base import llsd + +DEFAULT_PATH="/dev/shm/simperf/" + + +# ---------------------------------------------------- +# Pull out the stats and return a single document +def parse_logfile(filename, target_column=None, verbose=False): + full_doc = [] + # Open source temp log file. Let exceptions percolate up. + sourcefile = open( filename,'r') + + if verbose: + print "Reading " + filename + + # Parse and output all lines from the temp file + for line in sourcefile.xreadlines(): + partial_doc = llsd.parse(line) + if partial_doc is not None: + if target_column is None: + full_doc.append(partial_doc) + else: + trim_doc = { target_column: partial_doc[target_column] } + if target_column != "fps": + trim_doc[ 'fps' ] = partial_doc[ 'fps' ] + trim_doc[ '/total_time' ] = partial_doc[ '/total_time' ] + trim_doc[ 'utc_time' ] = partial_doc[ 'utc_time' ] + full_doc.append(trim_doc) + + sourcefile.close() + return full_doc + +# Extract just the meta info line, and the timestamp of the first/last frame entry. +def parse_logfile_info(filename, verbose=False): + # Open source temp log file. Let exceptions percolate up. + sourcefile = open(filename, 'rU') # U is to open with Universal newline support + + if verbose: + print "Reading " + filename + + # The first line is the meta info line. + info_line = sourcefile.readline() + if not info_line: + sourcefile.close() + return None + + # The rest of the lines are frames. Read the first and last to get the time range. + info = llsd.parse( info_line ) + info['start_time'] = None + info['end_time'] = None + first_frame = sourcefile.readline() + if first_frame: + try: + info['start_time'] = int(llsd.parse(first_frame)['timestamp']) + except: + pass + + # Read the file backwards to find the last two lines. + sourcefile.seek(0, 2) + file_size = sourcefile.tell() + offset = 1024 + num_attempts = 0 + end_time = None + if file_size < offset: + offset = file_size + while 1: + sourcefile.seek(-1*offset, 2) + read_str = sourcefile.read(offset) + # Remove newline at the end + if read_str[offset - 1] == '\n': + read_str = read_str[0:-1] + lines = read_str.split('\n') + full_line = None + if len(lines) > 2: # Got two line + try: + end_time = llsd.parse(lines[-1])['timestamp'] + except: + # We couldn't parse this line. Try once more. + try: + end_time = llsd.parse(lines[-2])['timestamp'] + except: + # Nope. Just move on. + pass + break + if len(read_str) == file_size: # Reached the beginning + break + offset += 1024 + + info['end_time'] = int(end_time) + + sourcefile.close() + return info + + +def parse_proc_filename(filename): + try: + name_as_list = filename.split(".") + cur_stat_type = name_as_list[0].split("_")[0] + cur_pid = name_as_list[1] + except IndexError, ValueError: + return (None, None) + return (cur_pid, cur_stat_type) + +# ---------------------------------------------------- +def get_simstats_list(path=None): + """ Return stats (pid, type) listed in _proc..llsd """ + if path is None: + path = DEFAULT_PATH + simstats_list = [] + for file_name in os.listdir(path): + if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd": + simstats_info = parse_logfile_info(path + file_name) + if simstats_info is not None: + simstats_list.append(simstats_info) + return simstats_list + +def get_log_info_list(pid=None, stat_type=None, path=None, target_column=None, verbose=False): + """ Return data from all llsd files matching the pid and stat type """ + if path is None: + path = DEFAULT_PATH + log_info_list = {} + for file_name in os.listdir ( path ): + if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd": + (cur_pid, cur_stat_type) = parse_proc_filename(file_name) + if cur_pid is None: + continue + if pid is not None and pid != cur_pid: + continue + if stat_type is not None and stat_type != cur_stat_type: + continue + log_info_list[cur_pid] = parse_logfile(path + file_name, target_column, verbose) + return log_info_list + +def delete_simstats_files(pid=None, stat_type=None, path=None): + """ Delete *..llsd files """ + if path is None: + path = DEFAULT_PATH + del_list = [] + for file_name in os.listdir(path): + if file_name.endswith(".llsd") and file_name != "simperf_proc_config.llsd": + (cur_pid, cur_stat_type) = parse_proc_filename(file_name) + if cur_pid is None: + continue + if pid is not None and pid != cur_pid: + continue + if stat_type is not None and stat_type != cur_stat_type: + continue + del_list.append(cur_pid) + # Allow delete related exceptions to percolate up if this fails. + os.unlink(os.path.join(DEFAULT_PATH, file_name)) + return del_list + -- cgit v1.1