desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Test Tvdb.search method'
def test_search(self):
results = self.t.search('my name is earl') all_ids = [x['seriesid'] for x in results] self.assertTrue(('75397' in all_ids))
'Tests basic access of series name alias'
def test_1(self):
results = self.t.search("Don't Trust the B---- in Apartment 23") series = results[0] self.assertTrue(('Apartment 23' in series['aliasnames']))
'Search all episodes in show. Can search all data, or a specific key (for example, episodename) Always returns an array (can be empty). First index contains the first match, and so on. Each array index is an Episode() instance, so doing search_results[0][\'episodename\'] will retrieve the episode name of the first matc...
def search(self, term=None, key=None):
results = [] for cur_season in self.values(): searchresult = cur_season.search(term=term, key=key) if (len(searchresult) != 0): results.extend(searchresult) return results
'The show attribute points to the parent show'
def __init__(self, show=None):
self.show = show
'Search all episodes in season, returns a list of matching Episode instances. >>> t = Tvdb() >>> t[\'scrubs\'][1].search(\'first day\') [<Episode 01x01 - My First Day>] See Show.search documentation for further information on search'
def search(self, term=None, key=None):
results = [] for ep in self.values(): searchresult = ep.search(term=term, key=key) if (searchresult is not None): results.append(searchresult) return results
'The season attribute points to the parent season'
def __init__(self, season=None):
self.season = season
'Search episode data for term, if it matches, return the Episode (self). The key parameter can be used to limit the search to a specific element, for example, episodename. This primarily for use use by Show.search and Season.search. See Show.search for further information on search Simple example: >>> e = Episode() >>>...
def search(self, term=None, key=None):
if (term == None): raise TypeError('must supply string to search for (contents)') term = unicode(term).lower() for (cur_key, cur_value) in self.items(): (cur_key, cur_value) = (unicode(cur_key).lower(), unicode(cur_value).lower()) if ((key is not None) and (cur_key ...
'interactive (True/False): When True, uses built-in console UI is used to select the correct show. When False, the first search result is used. select_first (True/False): Automatically selects the first series search result (rather than showing the user a list of more than one series). Is overridden by interactive = Fa...
def __init__(self, interactive=False, select_first=False, debug=False, cache=True, banners=False, actors=False, custom_ui=None, language=None, search_all_languages=False, apikey=None, forceConnect=False, useZip=False, dvdorder=False):
global lastTimeout if ((not forceConnect) and (lastTimeout != None) and ((datetime.datetime.now() - lastTimeout) < datetime.timedelta(minutes=1))): raise tvdb_error('We recently timed out, so giving up early this time') self.shows = ShowContainer() self.corrections = {...
'Returns the [system temp dir]/tvdb_api-u501 (or tvdb_api-myuser)'
def _getTempDir(self):
if hasattr(os, 'getuid'): uid = ('u%d' % os.getuid()) else: try: uid = getpass.getuser() except ImportError: return os.path.join(tempfile.gettempdir(), 'tvdb_api') return os.path.join(tempfile.gettempdir(), ('tvdb_api-%s' % uid))
'Loads a URL using caching, returns an ElementTree of the source'
def _getetsrc(self, url, language=None):
src = self._loadUrl(url, language=language) try: return ElementTree.fromstring(src.rstrip('\r')) except SyntaxError: src = self._loadUrl(url, recache=True, language=language) try: return ElementTree.fromstring(src.rstrip('\r')) except SyntaxError as exceptionmsg: ...
'Creates a new episode, creating Show(), Season() and Episode()s as required. Called by _getShowData to populate show Since the nice-to-use tvdb[1][24][\'name] interface makes it impossible to do tvdb[1][24][\'name] = "name" and still be capable of checking if an episode exists so we can raise tvdb_shownotfound, we hav...
def _setItem(self, sid, seas, ep, attrib, value):
if (sid not in self.shows): self.shows[sid] = Show() if (seas not in self.shows[sid]): self.shows[sid][seas] = Season(show=self.shows[sid]) if (ep not in self.shows[sid][seas]): self.shows[sid][seas][ep] = Episode(season=self.shows[sid][seas]) self.shows[sid][seas][ep][attrib] = ...
'Sets self.shows[sid] to a new Show instance, or sets the data'
def _setShowData(self, sid, key, value):
if (sid not in self.shows): self.shows[sid] = Show() self.shows[sid].data[key] = value
'Cleans up strings returned by TheTVDB.com Issues corrected: - Replaces &amp; with & - Trailing whitespace'
def _cleanData(self, data):
data = data.replace(u'&amp;', u'&') data = data.strip() return data
'This searches TheTVDB.com for the series name and returns the result list'
def search(self, series):
series = urllib.quote(series.encode('utf-8')) log().debug(('Searching for show %s' % series)) seriesEt = self._getetsrc((self.config['url_getSeries'] % series)) allSeries = [] for series in seriesEt: result = dict(((k.tag.lower(), k.text) for k in series.getchildren())) resu...
'This searches TheTVDB.com for the series name, If a custom_ui UI is configured, it uses this to select the correct series. If not, and interactive == True, ConsoleUI is used, if not BaseUI is used to select the first result.'
def _getSeries(self, series):
allSeries = self.search(series) if (len(allSeries) == 0): log().debug('Series result returned zero') raise tvdb_shownotfound('Show-name search returned zero results (cannot find show on TVDB)') if (self.config['custom_ui'] is not None): log().debug...
'Parses banners XML, from http://thetvdb.com/api/[APIKEY]/series/[SERIES ID]/banners.xml Banners are retrieved using t[\'show name][\'_banners\'], for example: >>> t = Tvdb(banners = True) >>> t[\'scrubs\'][\'_banners\'].keys() [\'fanart\', \'poster\', \'series\', \'season\'] >>> t[\'scrubs\'][\'_banners\'][\'poster\']...
def _parseBanners(self, sid):
log().debug(('Getting season banners for %s' % sid)) bannersEt = self._getetsrc((self.config['url_seriesBanner'] % sid)) banners = {} for cur_banner in bannersEt.findall('Banner'): bid = cur_banner.find('id').text btype = cur_banner.find('BannerType') btype2 = cur_ban...
'Parsers actors XML, from http://thetvdb.com/api/[APIKEY]/series/[SERIES ID]/actors.xml Actors are retrieved using t[\'show name][\'_actors\'], for example: >>> t = Tvdb(actors = True) >>> actors = t[\'scrubs\'][\'_actors\'] >>> type(actors) <class \'tvdb_api.Actors\'> >>> type(actors[0]) <class \'tvdb_api.Actor\'> >>>...
def _parseActors(self, sid):
log().debug(('Getting actors for %s' % sid)) actorsEt = self._getetsrc((self.config['url_actorsInfo'] % sid)) cur_actors = Actors() for curActorItem in actorsEt.findall('Actor'): curActor = Actor() for curInfo in curActorItem: tag = curInfo.tag.lower() va...
'Takes a series ID, gets the epInfo URL and parses the TVDB XML file into the shows dict in layout: shows[series_id][season_number][episode_number]'
def _getShowData(self, sid, language):
if (self.config['language'] is None): log().debug('Config language is none, using show language') if (language is None): raise tvdb_error("config['language'] was None, this should not happen") getShowInLanguage = language else: log(...
'Takes show name, returns the correct series ID (if the show has already been grabbed), or grabs all episodes and returns the correct SID.'
def _nameToSid(self, name):
if (name in self.corrections): log().debug(('Correcting %s to %s' % (name, self.corrections[name]))) sid = self.corrections[name] else: log().debug(('Getting show %s' % name)) selected_series = self._getSeries(name) (sname, sid) = (selected_series['seriesna...
'Handles tvdb_instance[\'seriesname\'] calls. The dict index should be the show id'
def __getitem__(self, key):
if isinstance(key, (int, long)): if (key not in self.shows): self._getShowData(key, self.config['language']) return self.shows[key] key = key.lower() sid = self._nameToSid(key) log().debug(('Got series id %s' % sid)) return self.shows[sid]
'The location of the cache directory'
@locked_function def __init__(self, cache_location, max_age=21600):
self.max_age = max_age self.cache_location = cache_location if (not os.path.exists(self.cache_location)): try: os.mkdir(self.cache_location) except OSError as e: if ((e.errno == errno.EEXIST) and os.path.isdir(self.cache_location)): pass el...
'Handles GET requests, if the response is cached it returns it'
def default_open(self, request):
if (request.get_method() is not 'GET'): return None if exists_in_cache(self.cache_location, request.get_full_url(), self.max_age): return CachedResponse(self.cache_location, request.get_full_url(), set_cache_header=True) else: return None
'Gets a HTTP response, if it was a GET request and the status code starts with 2 (200 OK etc) it caches it and returns a CachedResponse'
def http_response(self, request, response):
if ((request.get_method() == 'GET') and str(response.code).startswith('2')): if ('x-local-cache' not in response.info()): set_cache_header = store_in_cache(self.cache_location, request.get_full_url(), response) else: set_cache_header = True return CachedResponse(self....
'Returns headers'
def info(self):
return self.headers
'Returns original URL'
def geturl(self):
return self.url
'Helper function, lists series with corresponding ID'
def _displaySeries(self, allSeries, limit=6):
if (limit is not None): toshow = allSeries[:limit] else: toshow = allSeries print 'TVDB Search Results:' for (i, cshow) in enumerate(toshow): i_show = (i + 1) log().debug(('Showing allSeries[%s], series %s)' % (i_show, allSeries[i]['seriesname']))) ...
'Creates a profiler for a function. Every profiler has its own log file (the name of which is derived from the function name). FuncProfile registers an atexit handler that prints profiling information to sys.stderr when the program terminates.'
def __init__(self, fn, skip=0, filename=None, immediate=False, dirs=False, sort=None, entries=40):
self.fn = fn self.skip = skip self.filename = filename self.immediate = immediate self.dirs = dirs self.sort = (sort or ('cumulative', 'time', 'calls')) if isinstance(self.sort, str): self.sort = (self.sort,) self.entries = entries self.reset_stats() atexit.register(self....
'Profile a singe call to the function.'
def __call__(self, *args, **kw):
self.ncalls += 1 if (self.skip > 0): self.skip -= 1 self.skipped += 1 return self.fn(*args, **kw) if FuncProfile.in_profiler: return self.fn(*args, **kw) profiler = self.Profile() try: FuncProfile.in_profiler = True return profiler.runcall(self.fn, *ar...
'Print profile information to sys.stdout.'
def print_stats(self):
funcname = self.fn.__name__ filename = self.fn.func_code.co_filename lineno = self.fn.func_code.co_firstlineno print print '*** PROFILER RESULTS ***' print ('%s (%s:%s)' % (funcname, filename, lineno)) print ('function called %d times' % self.ncalls), if self.skipped...
'Reset accumulated profiler statistics.'
def reset_stats(self):
self.stats = pstats.Stats(Profile()) self.ncalls = 0 self.skipped = 0
'Stop profiling and print profile information to sys.stdout. This function is registered as an atexit hook.'
def atexit(self):
if (not self.immediate): self.print_stats()
'Creates a profiler for a function. Every profiler has its own log file (the name of which is derived from the function name). TraceFuncCoverage registers an atexit handler that prints profiling information to sys.stderr when the program terminates. The log file is not removed and remains there to clutter the current w...
def __init__(self, fn):
self.fn = fn self.logfilename = (fn.__name__ + '.cprof') self.ncalls = 0 atexit.register(self.atexit)
'Profile a singe call to the function.'
def __call__(self, *args, **kw):
self.ncalls += 1 if TraceFuncCoverage.tracing: return self.fn(*args, **kw) try: TraceFuncCoverage.tracing = True return self.tracer.runfunc(self.fn, *args, **kw) finally: TraceFuncCoverage.tracing = False
'Stop profiling and print profile information to sys.stderr. This function is registered as an atexit hook.'
def atexit(self):
funcname = self.fn.__name__ filename = self.fn.func_code.co_filename lineno = self.fn.func_code.co_firstlineno print print '*** COVERAGE RESULTS ***' print ('%s (%s:%s)' % (funcname, filename, lineno)) print ('function called %d times' % self.ncalls) print fs = F...
'Mark all executable source lines in fn as executed 0 times.'
def find_source_lines(self):
strs = trace.find_strings(self.filename) lines = trace.find_lines_from_code(self.fn.func_code, strs) self.firstcodelineno = sys.maxint for lineno in lines: self.firstcodelineno = min(self.firstcodelineno, lineno) self.sourcelines.setdefault(lineno, 0) if (self.firstcodelineno == sys....
'Mark a given source line as executed count times. Multiple calls to mark for the same lineno add up.'
def mark(self, lineno, count=1):
self.sourcelines[lineno] = (self.sourcelines.get(lineno, 0) + count)
'Count statements that were never executed.'
def count_never_executed(self):
lineno = self.firstlineno counter = 0 for line in self.source: if (self.sourcelines.get(lineno) == 0): if (not self.blank_rx.match(line)): counter += 1 lineno += 1 return counter
'Return annotated source code for the function.'
def __str__(self):
lines = [] lineno = self.firstlineno for line in self.source: counter = self.sourcelines.get(lineno) if (counter is None): prefix = (' ' * 7) elif (counter == 0): if self.blank_rx.match(line): prefix = (' ' * 7) else: ...
'Profile a singe call to the function.'
def __call__(self, *args, **kw):
fn = self.fn timer = self.timer self.ncalls += 1 try: start = timer() return fn(*args, **kw) finally: duration = (timer() - start) self.totaltime += duration if self.immediate: funcname = fn.__name__ filename = fn.func_code.co_filename ...
'handler is only used if value is not string nor unicode, prototype: def handler(value) -> str/unicode'
def __init__(self, key, priority, description, text_handler=None, type=None, filter=None, conversion=None):
assert (MIN_PRIORITY <= priority <= MAX_PRIORITY) assert isinstance(description, unicode) self.metadata = None self.key = key self.description = description self.values = [] if (type and (not isinstance(type, (tuple, list)))): type = (type,) self.type = type self.text_handler...
'Add a new value to data with name \'key\'. Skip duplicates.'
def __setattr__(self, key, value):
if (key not in self.__data): raise KeyError((_("%s has no metadata '%s'") % (self.__class__.__name__, key))) self.__data[key].add(value)
'Read first value of tag with name \'key\'. >>> from datetime import timedelta >>> a = RootMetadata() >>> a.duration = timedelta(seconds=2300) >>> a.get(\'duration\') datetime.timedelta(0, 2300) >>> a.get(\'author\', u\'Anonymous\') u\'Anonymous\''
def get(self, key, default=None, index=0):
item = self.getItem(key, index) if (item is None): if (default is None): raise ValueError(("Metadata has no value '%s' (index %s)" % (key, index))) else: return default return item.value
'Read first value, as unicode string, of tag with name \'key\'. >>> from datetime import timedelta >>> a = RootMetadata() >>> a.duration = timedelta(seconds=2300) >>> a.getText(\'duration\') u\'38 min 20 sec\' >>> a.getText(\'titre\', u\'Unknown\') u\'Unknown\''
def getText(self, key, default=None, index=0):
item = self.getItem(key, index) if (item is not None): return item.text else: return default
'Create a multi-line ASCII string (end of line is "\n") which represents all datas. >>> a = RootMetadata() >>> a.author = "haypo" >>> a.copyright = unicode("© Hachoir", "UTF-8") >>> print a Metadata: - Author: haypo - Copyright: \xa9 Hachoir @see __unicode__() and exportPlaintext()'
def __str__(self):
text = self.exportPlaintext() return '\n'.join((makePrintable(line, 'ASCII') for line in text))
'Create a multi-line Unicode string (end of line is "\n") which represents all datas. >>> a = RootMetadata() >>> a.copyright = unicode("© Hachoir", "UTF-8") >>> print repr(unicode(a)) u\'Metadata:\n- Copyright: \xa9 Hachoir\' @see __str__() and exportPlaintext()'
def __unicode__(self):
return '\n'.join(self.exportPlaintext())
'Convert metadata to multi-line Unicode string and skip datas with priority lower than specified priority. Default priority is Metadata.MAX_PRIORITY. If human flag is True, data key are translated to better human name (eg. "bit_rate" becomes "Bit rate") which may be translated using gettext. If priority is too small, m...
def exportPlaintext(self, priority=None, human=True, line_prefix=u'- ', title=None):
if (priority is not None): priority = max(priority, MIN_PRIORITY) priority = min(priority, MAX_PRIORITY) else: priority = MAX_PRIORITY if (not title): title = self.header text = [('%s:' % title)] for data in sorted(self): if (priority < data.priority): ...
'Add a new group (metadata of a sub-document). Returns False if the group is skipped, True if it has been added.'
def addGroup(self, key, metadata, header=None):
if (not metadata): self.warning(('Skip empty group %s' % key)) return False if key.endswith('[]'): key = key[:(-2)] if (key in self.__key_counter): self.__key_counter[key] += 1 else: self.__key_counter[key] = 1 key += ('[%u]' % sel...
'Use different min/max values depending on value type (datetime with timezone, datetime or date).'
def __call__(self, value):
if (not isinstance(value, self.types)): return True if (hasattr(value, 'tzinfo') and value.tzinfo): return (self.min_tz <= value <= self.max_tz) elif isinstance(value, datetime): return (self.min <= value <= self.max) else: return (self.min_date <= value <= self.max_date)...
'Use a file to store all messages. The UTF-8 encoding will be used. Write an informative message if the file can\'t be created. @param filename: C{L{string}}'
def setFilename(self, filename, append=True):
filename = os.path.expanduser(filename) filename = os.path.realpath(filename) append = os.access(filename, os.F_OK) try: import codecs if append: self.__file = codecs.open(filename, 'a', 'utf-8') else: self.__file = codecs.open(filename, 'w', 'utf-8') ...
'Write a new message : append it in the buffer, display it to the screen (if needed), and write it in the log file (if needed). @param level: Message level. @type level: C{int} @param text: Message content. @type text: C{str} @param ctxt: The caller instance.'
def newMessage(self, level, text, ctxt=None):
if (((level < self.LOG_ERROR) and config.quiet) or ((level <= self.LOG_INFO) and (not config.verbose))): return if config.debug: from lib.hachoir_core.error import getBacktrace backtrace = getBacktrace(None) if backtrace: text += ('\n\n' + backtrace) _text = text ...
'New informative message. @type text: C{str}'
def info(self, text):
self.newMessage(Log.LOG_INFO, text)
'New warning message. @type text: C{str}'
def warning(self, text):
self.newMessage(Log.LOG_WARN, text)
'New error message. @type text: C{str}'
def error(self, text):
self.newMessage(Log.LOG_ERROR, text)
'Constructor: - max_time: Maximum wanted duration of the whole benchmark (default: 5 seconds, minimum: 1 second). - min_count: Minimum number of function calls to get good statistics (defaut: 5, minimum: 1). - progress_time: Time between each "progress" message (default: 1 second, minimum: 250 ms). - max_count: Maximum...
def __init__(self, max_time=5.0, min_count=5, max_count=None, progress_time=1.0):
self.max_time = max(max_time, 1.0) self.min_count = max(min_count, 1) self.max_count = max_count self.progress_time = max(progress_time, 0.25) self.verbose = False self.disable_gc = False
'Format a time delta to string: use humanDurationNanosec()'
def formatTime(self, value):
return humanDurationNanosec((value * 1000000000))
'Display statistics to stdout: - best time (minimum) - average time (arithmetic average) - worst time (maximum) - total time (sum) Use arithmetic avertage instead of geometric average because geometric fails if any value is zero (returns zero) and also because floating point multiplication lose precision with many valu...
def displayStat(self, stat):
average = (stat.getSum() / len(stat)) values = (stat.getMin(), average, stat.getMax(), stat.getSum()) values = tuple((self.formatTime(value) for value in values)) print (_('Benchmark: best=%s average=%s worst=%s total=%s') % values)
'Call func(*args, **kw) as many times as needed to get good statistics. Algorithm: - call the function once - compute needed number of calls - and then call function N times To compute number of calls, parameters are: - time of first function call - minimum number of calls (min_count attribute) - maximum test time (max...
def _run(self, func, args, kw):
stat = BenchmarkStat() diff = self._runOnce(func, args, kw) best = diff stat.append(diff) total_time = diff count = int(floor((self.max_time / diff))) count = max(count, self.min_count) if self.max_count: count = min(count, self.max_count) if (count == 1): return stat...
'Check statistics and raise a BenchmarkError if they are invalid. Example of tests: reject empty stat, reject stat with only nul values.'
def validateStat(self, stat):
if (not stat): raise BenchmarkError('empty statistics') if (not stat.getSum()): raise BenchmarkError('nul statistics')
'Run function func(*args, **kw), validate statistics, and display the result on stdout. Disable garbage collector if asked too.'
def run(self, func, *args, **kw):
if self.disable_gc: try: import gc except ImportError: self.disable_gc = False if self.disable_gc: gc_enabled = gc.isenabled() gc.disable() else: gc_enabled = False stat = self._run(func, args, kw) if gc_enabled: gc.enable() ...
'Search a value by its key and returns its index Returns None if the key doesn\'t exist. >>> d=Dict( (("two", "deux"), ("one", "un")) ) >>> d.index("two") 0 >>> d.index("one") 1 >>> d.index("three") is None True'
def index(self, key):
return self._index.get(key)
'Get item with specified key. To get a value by it\'s index, use mydict.values[index] >>> d=Dict( (("two", "deux"), ("one", "un")) ) >>> d["one"] \'un\''
def __getitem__(self, key):
return self._value_list[self._index[key]]
'Append new value'
def append(self, key, value):
if (key in self._index): raise UniqKeyError((_("Key '%s' already exists") % key)) self._index[key] = len(self._value_list) self._key_list.append(key) self._value_list.append(value)
'Create a generator to iterate on: (key, value). >>> d=Dict( (("two", "deux"), ("one", "un")) ) >>> for key, value in d.iteritems(): ... print "%r: %r" % (key, value) \'two\': \'deux\' \'one\': \'un\''
def iteritems(self):
for index in xrange(len(self)): (yield (self._key_list[index], self._value_list[index]))
'Create an iterator on values'
def itervalues(self):
return iter(self._value_list)
'Create an iterator on keys'
def iterkeys(self):
return iter(self._key_list)
'Replace an existing value with another one >>> d=Dict( (("two", "deux"), ("one", "un")) ) >>> d.replace("one", "three", 3) >>> d {\'two\': \'deux\', \'three\': 3} You can also use the classic form: >>> d[\'three\'] = 4 >>> d {\'two\': \'deux\', \'three\': 4}'
def replace(self, oldkey, newkey, new_value):
index = self._index[oldkey] self._value_list[index] = new_value if (oldkey != newkey): del self._index[oldkey] self._index[newkey] = index self._key_list[index] = newkey
'Delete item at position index. May raise IndexError. >>> d=Dict( ((6, \'six\'), (9, \'neuf\'), (4, \'quatre\')) ) >>> del d[1] >>> d {6: \'six\', 4: \'quatre\'}'
def __delitem__(self, index):
if (index < 0): index += len(self._value_list) if (not (0 <= index < len(self._value_list))): raise IndexError((_('list assignment index out of range (%s/%s)') % (index, len(self._value_list)))) del self._value_list[index] del self._key_list[index] for (key, item_in...
'Insert an item at specified position index. >>> d=Dict( ((6, \'six\'), (9, \'neuf\'), (4, \'quatre\')) ) >>> d.insert(1, \'40\', \'quarante\') >>> d {6: \'six\', \'40\': \'quarante\', 9: \'neuf\', 4: \'quatre\'}'
def insert(self, index, key, value):
if (key in self): raise UniqKeyError((_("Insert error: key '%s' ready exists") % key)) _index = index if (index < 0): index += len(self._value_list) if (not (0 <= index <= len(self._value_list))): raise IndexError((_("Insert error: index '%s' is inva...
'Connect an event handler to an event. Append it to handlers list.'
def connect(self, event_name, handler):
try: self.handlers[event_name].append(handler) except KeyError: self.handlers[event_name] = [handler]
'Raiser an event: call each handler for this event_name.'
def raiseEvent(self, event_name, *args):
if (event_name not in self.handlers): return for handler in self.handlers[event_name]: handler(*args)
'Constructor: see L{Field.__init__} for parameter description'
def __init__(self, parent, name, size, description=None):
Field.__init__(self, parent, name, size, description)
'Constructor: see L{Field.__init__} for parameter description'
def __init__(self, parent, name, description=None):
RawBits.__init__(self, parent, name, 1, description=description)
'Read first number fields if they are not read yet. Returns number of new added fields.'
def readFirstFields(self, number):
number = (number - self.current_length) if (0 < number): return self.readMoreFields(number) else: return 0
'Parser constructor @param stream: Data input stream (see L{InputStream}) @param description: (optional) String description'
def __init__(self, stream, description=None):
assert (hasattr(self, 'endian') and (self.endian in (BIG_ENDIAN, LITTLE_ENDIAN))) GenericFieldSet.__init__(self, None, 'root', stream, description, stream.askSize(self))
'Try to fix last field when we know current field set size. Returns new added field if any, or None.'
def _fixLastField(self):
assert (self._size is not None) message = ['stop parser'] self._field_generator = None while (self._size < self._current_size): field = self._deleteField((len(self._fields) - 1)) message.append(('delete field %s' % field.path)) assert (self._current_size <= self._size) b...
'Set default class attributes, set right address if None address is given. @param parent: Parent field of this field @type parent: L{Field}|None @param name: Name of the field, have to be unique in parent. If it ends with "[]", end will be replaced with "[new_id]" (eg. "raw[]" becomes "raw[0]", next will be "raw[1]", a...
def __init__(self, parent, name, size=None, description=None):
assert issubclass(parent.__class__, Field) assert ((size is None) or (0 <= size)) self._parent = parent self._name = name self._address = parent.nextFieldAddress() self._size = size self._description = description
'Method called by code like "if field: (...)". Always returns True'
def __nonzero__(self):
return True
'Constructor @param parent: Parent field set, None for root parser @param name: Name of the field, have to be unique in parent. If it ends with "[]", end will be replaced with "[new_id]" (eg. "raw[]" becomes "raw[0]", next will be "raw[1]", and then "raw[2]", etc.) @type name: str @param stream: Input stream from which...
def __init__(self, parent, name, stream, description=None, size=None):
BasicFieldSet.__init__(self, parent, name, stream, description, size) self._fields = Dict() self._field_generator = self.createFields() self._array_cache = {} self.__is_feeding = False
'Reset a field set: * clear fields ; * restart field generator ; * set current size to zero ; * clear field array count. But keep: name, value, description and size.'
def reset(self):
BasicFieldSet.reset(self) self._fields = Dict() self._field_generator = self.createFields() self._current_size = 0 self._array_cache = {}
'Returns number of fields, may need to create all fields if it\'s not done yet.'
def __len__(self):
if (self._field_generator is not None): self._feedAll() return len(self._fields)
'Add a field to the field set: * add it into _fields * update _current_size May raise a StopIteration() on error'
def _addField(self, field):
if (not issubclass(field.__class__, Field)): raise ParserError(("Field type (%s) is not a subclass of 'Field'!" % field.__class__.__name__)) assert isinstance(field._name, str) if field._name.endswith('[]'): self.setUniqueFieldName(field) if config.debug: ...
'Try to fix last field when we know current field set size. Returns new added field if any, or None.'
def _fixLastField(self):
assert (self._size is not None) message = ['stop parser'] self._field_generator = None while (self._size < self._current_size): field = self._deleteField((len(self._fields) - 1)) message.append(('delete field %s' % field.path)) assert (self._current_size <= self._size) s...
'Try to fix a feeding error. Returns False if error can\'t be fixed, otherwise returns new field if any, or None.'
def _fixFeedError(self, exception):
if ((self._size is None) or (not self.autofix)): return False self.warning(unicode(exception)) return self._fixLastField()
'Return the field if it was found, None else'
def _feedUntil(self, field_name):
if (self.__is_feeding or (self._field_generator and self._field_generator.gi_running)): self.warning(('Unable to get %s (and generator is already running)' % field_name)) return None try: while True: field = self._field_generator.next() sel...
'Read more number fields, or do nothing if parsing is done. Returns number of new added fields.'
def readMoreFields(self, number):
if (self._field_generator is None): return 0 oldlen = len(self._fields) try: for index in xrange(number): self._addField(self._field_generator.next()) except HACHOIR_ERRORS as err: if (self._fixFeedError(err) is False): raise except StopIteration: ...
'Create a generator to iterate on each field, may create new fields when needed'
def __iter__(self):
try: done = 0 while True: if (done == len(self._fields)): if (self._field_generator is None): break self._addField(self._field_generator.next()) for field in self._fields.values[done:]: (yield field) ...
'Create a field to seek to specified address, or None if it\'s not needed. May raise an (ParserError) exception if address is invalid.'
def seekBit(self, address, name='padding[]', description=None, relative=True, null=False):
if relative: nbits = (address - self._current_size) else: nbits = (address - (self.absolute_address + self._current_size)) if (nbits < 0): raise ParserError('Seek error, unable to go back!') if (0 < nbits): if null: return createNullField(self, ...
'Same as seekBit(), but with address in byte.'
def seekByte(self, address, name='padding[]', description=None, relative=True, null=False):
return self.seekBit((address * 8), name, description, relative, null=null)
'Only search in existing fields'
def getFieldByAddress(self, address, feed=True):
if (feed and (self._field_generator is not None)): self._feedAll() if (address < self._current_size): i = lowerBound(self._fields.values, (lambda x: ((x.address + x.size) <= address))) if (i is not None): return self._fields.values[i] return None
'Can only write in existing fields (address < self._current_size)'
def writeFieldsIn(self, old_field, address, new_fields):
total_size = sum((field.size for field in new_fields)) if (old_field.size < total_size): raise ParserError(('Unable to write fields at address %s (too big)!' % address)) replace = [] size = (address - old_field.address) assert (0 <= size) if (0 < size): pa...
'Is the array empty or not?'
def __nonzero__(self):
if self._cache: return True else: return (0 in self)
'Number of fields in the array'
def __len__(self):
total = (self._max_index + 1) if (not self._known_size): for index in itertools.count(total): try: field = self[index] total += 1 except MissingField: break return total
'Get a field of the array. Returns a field, or raise MissingField exception if the field doesn\'t exist.'
def __getitem__(self, index):
try: value = self._cache[index] except KeyError: try: value = self.fieldset[(self._format % index)] except MissingField: self._known_size = True raise self._cache[index] = value self._max_index = max(index, self._max_index) return v...
'Iterate in the fields in their index order: field[0], field[1], ...'
def __iter__(self):
for index in itertools.count(0): try: (yield self[index]) except MissingField: raise StopIteration()
'pattern is None or repeated string'
def __init__(self, parent, name, nbytes, description='Padding', pattern=None):
assert ((pattern is None) or isinstance(pattern, str)) Bytes.__init__(self, parent, name, nbytes, description) self.pattern = pattern self._display_pattern = self.checkPattern()
'Read \'size\' bits at position \'address\' (in bits) from the beginning of the stream.'
def read(self, address, size):
raise NotImplementedError
'Read an integer number'
def readInteger(self, address, signed, nbits, endian):
value = self.readBits(address, nbits, endian) if (signed and ((1 << (nbits - 1)) <= value)): value -= (1 << nbits) return value
'If include_needle is True, add its length to the result. Returns None is needle can\'t be found.'
def searchBytesLength(self, needle, include_needle, start_address=0, end_address=None):
pos = self.searchBytes(needle, start_address, end_address) if (pos is None): return None length = ((pos - start_address) // 8) if include_needle: length += len(needle) return length
'Search some bytes in [start_address;end_address[. Addresses must be aligned to byte. Returns the address of the bytes if found, None else.'
def searchBytes(self, needle, start_address=0, end_address=None):
if (start_address % 8): raise InputStreamError('Unable to search bytes with address with bit granularity') length = len(needle) size = max((3 * length), 4096) buffer = '' if (self._size and ((end_address is None) or (self._size < end_address))): end_address = ...
'Read bytes from the stream at specified address (in bits). Address have to be a multiple of 8. nbytes have to in 1..MAX_READ_NBYTES (64 KB). This method is only supported for StringOuputStream (not on FileOutputStream). Return read bytes as byte string.'
def readBytes(self, address, nbytes):
assert ((address % 8) == 0) assert (1 <= nbytes <= MAX_READ_NBYTES) self._output.flush() oldpos = self._output.tell() try: self._output.seek(0) try: return self._output.read(nbytes) except IOError as err: if (err[0] == EBADF): raise Out...