source: trunk/anuga_core/source/anuga/caching/caching.py @ 8050

Last change on this file since 8050 was 7317, checked in by rwilson, 15 years ago

Replaced 'print' statements with log.critical() calls.

File size: 70.4 KB
Line 
1# =============================================================================
2# caching.py - Supervised caching of function results.
3# Copyright (C) 1999, 2000, 2001, 2002 Ole Moller Nielsen
4# Australian National University (1999-2003)
5# Geoscience Australia (2003-present)
6#
7#    This program is free software; you can redistribute it and/or modify
8#    it under the terms of the GNU General Public License as published by
9#    the Free Software Foundation; either version 2 of the License, or
10#    (at your option) any later version.
11#
12#    This program is distributed in the hope that it will be useful,
13#    but WITHOUT ANY WARRANTY; without even the implied warranty of
14#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15#    GNU General Public License (http://www.gnu.org/copyleft/gpl.html)
16#    for more details.
17#
18#    You should have received a copy of the GNU General Public License
19#    along with this program; if not, write to the Free Software
20#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307
21#
22#
23# Contact address: Ole.Nielsen@ga.gov.au
24#
25# Version 1.5.6 February 2002
26# =============================================================================
27 
28"""Module caching.py - Supervised caching of function results.
29
30Public functions:
31
32cache(my_F,args) -- Cache values returned from callable object my_F given args.
33cachestat() --      Reports statistics about cache hits and time saved.
34test() --       Conducts a basic test of the caching functionality.
35
36See doc strings of individual functions for detailed documentation.
37"""
38
39# -----------------------------------------------------------------------------
40# Initialisation code
41
42# Determine platform
43#
44from os import getenv
45import types
46
47import os
48if os.name in ['nt', 'dos', 'win32', 'what else?']:
49  unix = False
50else:
51  unix = True
52
53import anuga.utilities.log as log
54
55import numpy as num
56
57#from future
58
59cache_dir = '.python_cache'
60
61# Make default caching directory name
62# We are changing the 'data directory' environment variable from
63# INUNDATIONHOME to ANUGADATA - this gives a changeover.
64if unix:
65    homedir = getenv('ANUGADATA')
66    if not homedir:
67        homedir = getenv('INUNDATIONHOME')
68
69    if not homedir:
70        homedir = '~'
71    else:
72        # Since homedir will be a group area, individually label the caches
73        user = getenv('LOGNAME')
74        if not user:
75            cache_dir += '_' + user
76   
77    CR = '\n'
78else:
79    homedir = 'c:'
80    CR = '\r\n'  #FIXME: Not tested under windows
81 
82cachedir = os.path.join(homedir, cache_dir)
83
84# -----------------------------------------------------------------------------
85# Options directory with default values - to be set by user
86#
87
88options = { 
89  'cachedir': cachedir,  # Default cache directory
90  'maxfiles': 1000000,   # Maximum number of cached files
91  'savestat': True,      # Log caching info to stats file
92  'verbose': True,       # Write messages to standard output
93  'bin': True,           # Use binary format (more efficient)
94  'compression': True,   # Use zlib compression
95  'bytecode': True,      # Recompute if bytecode has changed
96  'expire': False        # Automatically remove files that have been accessed
97                         # least recently
98}
99
100# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101
102def set_option(key, value):
103  """Function to set values in the options directory.
104
105  USAGE:
106    set_option(key, value)
107
108  ARGUMENTS:
109    key --   Key in options dictionary. (Required)
110    value -- New value for key. (Required)
111
112  DESCRIPTION:
113    Function to set values in the options directory.
114    Raises an exception if key is not in options.
115  """
116
117  if key in options:
118    options[key] = value
119  else:
120    raise KeyError(key)  # Key not found, raise an exception
121
122# -----------------------------------------------------------------------------
123# Function cache - the main routine
124
125def cache(my_F, 
126          args=(), 
127          kwargs={}, 
128          dependencies=None, 
129          cachedir=None,
130          verbose=None, 
131          compression=None, 
132          evaluate=False, 
133          test=False, 
134          clear=False,
135          return_filename=False):
136  """Supervised caching of function results. Also known as memoization.
137
138  USAGE:
139    result = cache(my_F, args, kwargs, dependencies, cachedir, verbose,
140                   compression, evaluate, test, return_filename)
141
142  ARGUMENTS:
143    my_F --            Callable object (Required)
144    args --            Arguments to my_F (Default: ())
145    kwargs --          Keyword arguments to my_F (Default: {})   
146    dependencies --    Filenames that my_F depends on (Default: None)
147    cachedir --        Directory for cache files (Default: options['cachedir'])
148    verbose --         Flag verbose output to stdout
149                       (Default: options['verbose'])
150    compression --     Flag zlib compression (Default: options['compression'])
151    evaluate --        Flag forced evaluation of my_F (Default: False)
152    test --            Flag test for cached results (Default: False)
153    clear --           Flag delete cached results (Default: False)   
154    return_filename -- Flag return of cache filename (Default: False)   
155
156  DESCRIPTION:
157    A Python function call of the form
158
159      result = my_F(arg1,...,argn)
160
161    can be replaced by
162
163      from caching import cache
164      result = cache(my_F,(arg1,...,argn))
165
166  The latter form returns the same output as the former but reuses cached
167  results if the function has been computed previously in the same context.
168  'result' and the arguments can be simple types, tuples, list, dictionaries or
169  objects, but not unhashable types such as functions or open file objects.
170  The function 'my_F' may be a member function of an object or a module.
171
172  This type of caching is particularly useful for computationally intensive
173  functions with few frequently used combinations of input arguments. Note that
174  if the inputs or output are very large caching might not save time because
175  disc access may dominate the execution time.
176
177  If the function definition changes after a result has been cached it will be
178  detected by examining the functions bytecode (co_code, co_consts,
179  func_defaults, co_argcount) and it will be recomputed.
180
181  LIMITATIONS:
182    1 Caching uses function(*args, **kwargs) to evaluate and will work
183      with anything that can be pickled, so any limitation in function(,)
184      or pickle extends to caching.
185    2 A function to be cached should not depend on global variables
186      as wrong results may occur if globals are changed after a result has
187      been cached.
188
189  -----------------------------------------------------------------------------
190  Additional functionality:
191
192  Keyword args
193    Keyword arguments (kwargs) can be added as a dictionary of keyword: value
194    pairs, following Python's 'extended call syntax'.
195   
196    A Python function call of the form
197   
198      result = my_F(arg1,...,argn, kwarg1=val1,...,kwargm=valm)   
199
200    is then cached as follows
201
202      from caching import cache
203      result = cache(my_F,(arg1,...,argn), {kwarg1:val1,...,kwargm:valm})
204   
205    The default value of kwargs is {} 
206
207  Explicit dependencies:
208    The call
209      cache(my_F,(arg1,...,argn), dependencies = <list of filenames>)
210    Checks the size, creation time and modification time of each listed file.
211    If any file has changed the function is recomputed and the results stored
212    again.
213
214  Specify caching directory:
215    The call
216      cache(my_F,(arg1,...,argn), cachedir = <cachedir>)
217    designates <cachedir> where cached data are stored. Use ~ to indicate users
218    home directory - not $HOME. The default is ~/.python_cache on a UNIX
219    platform and c:/.python_cache on a Win platform.
220
221  Silent operation:
222    The call
223      cache(my_F,(arg1,...,argn), verbose=False)
224    suppresses messages to standard output.
225
226  Compression:
227    The call
228      cache(my_F,(arg1,...,argn), compression=False)
229    disables compression. (Default: compression=True). If the requested compressed
230    or uncompressed file is not there, it'll try the other version.
231
232  Forced evaluation:
233    The call
234      cache(my_F,(arg1,...,argn), evaluate=True)
235    forces the function to evaluate even though cached data may exist.
236
237  Testing for presence of cached result:
238    The call
239      cache(my_F,(arg1,...,argn), test=True)
240    retrieves cached result if it exists, otherwise None. The function will not
241    be evaluated. If both evaluate and test are switched on, evaluate takes
242    precedence.
243    ??NOTE: In case of hash collisions, this may return the wrong result as
244    ??it only checks if *a* cached result is present.
245    # I think this was due to the bytecode option being False for some reason. (23/1/2009).
246   
247  Obtain cache filenames:
248    The call   
249      cache(my_F,(arg1,...,argn), return_filename=True)
250    returns the hashed base filename under which this function and its
251    arguments would be cached
252
253  Clearing cached results:
254    The call
255      cache(my_F,'clear')
256    clears all cached data for 'my_F' and
257      cache('clear')
258    clears all cached data.
259 
260    NOTE: The string 'clear' can be passed an *argument* to my_F using
261      cache(my_F,('clear',)) or cache(my_F,tuple(['clear'])).
262
263    New form of clear:
264      cache(my_F,(arg1,...,argn), clear=True)
265    clears cached data for particular combination my_F and args
266     
267  """
268
269  # Imports and input checks
270  #
271  import types, time, string
272
273  if not cachedir:
274    cachedir = options['cachedir']
275
276  if verbose == None:  # Do NOT write 'if not verbose:', it could be zero.
277    verbose = options['verbose']
278
279  if compression == None:  # Do NOT write 'if not compression:',
280                           # it could be zero.
281    compression = options['compression']
282
283  # Create cache directory if needed
284  CD = checkdir(cachedir,verbose)
285
286  # Handle the case cache('clear')
287  if type(my_F) == types.StringType:
288    if string.lower(my_F) == 'clear':
289      clear_cache(CD,verbose=verbose)
290      return
291
292  # Handle the case cache(my_F, 'clear')
293  if type(args) == types.StringType:
294    if string.lower(args) == 'clear':
295      clear_cache(CD,my_F,verbose=verbose)
296      return
297
298  # Force singleton arg into a tuple
299  if type(args) != types.TupleType:
300    args = tuple([args])
301 
302  # Check that kwargs is a dictionary
303  if type(kwargs) != types.DictType:
304    raise TypeError   
305   
306  # Hash arguments (and keyword args) to integer
307  arghash = myhash((args, kwargs))
308 
309  # Get sizes and timestamps for files listed in dependencies.
310  # Force singletons into a tuple.
311  if dependencies and type(dependencies) != types.TupleType \
312                  and type(dependencies) != types.ListType:
313    dependencies = tuple([dependencies])
314  deps = get_depstats(dependencies)
315
316  # Extract function name from my_F object
317  funcname = get_funcname(my_F)
318
319  # Create cache filename
320  FN = funcname+'['+`arghash`+']'  # The symbol '(' does not work under unix
321
322  if return_filename:
323    return(FN)
324
325  if clear:
326    for file_type in file_types:
327      file_name = CD+FN+'_'+file_type
328      for fn in [file_name, file_name + '.z']:
329        if os.access(fn, os.F_OK):             
330          if unix:
331            os.remove(fn)
332          else:
333            # FIXME: os.remove doesn't work under windows       
334            os.system('del '+fn)
335          if verbose is True:
336            log.critical('MESSAGE (caching): File %s deleted' % fn)
337        ##else:
338        ##  log.critical('%s was not accessed' % fn)
339    return None
340
341
342  #-------------------------------------------------------------------       
343 
344  # Check if previous computation has been cached
345  if evaluate is True:
346    Retrieved = None  # Force evaluation of my_F regardless of caching status.
347    reason = 5
348  else:
349    T, FN, Retrieved, reason, comptime, loadtime, compressed = \
350        CacheLookup(CD, FN, my_F, 
351                    args, kwargs, 
352                    deps, 
353                    verbose, 
354                    compression,
355                    dependencies)
356
357  if not Retrieved:
358    if test:  # Do not attempt to evaluate function
359      T = None
360    else:  # Evaluate function and save to cache
361      if verbose is True:
362       
363        msg1(funcname, args, kwargs,reason)
364
365      # Remove expired files automatically
366      if options['expire']:
367        DeleteOldFiles(CD,verbose)
368       
369      # Save args before function is evaluated in case
370      # they are modified by function
371      save_args_to_cache(CD,FN,args,kwargs,compression)
372
373      # Execute and time function with supplied arguments
374      t0 = time.time()
375
376      T = my_F(*args, **kwargs) # Built-in 'apply' deprecated in Py3K   
377     
378      #comptime = round(time.time()-t0)
379      comptime = time.time()-t0
380
381      if verbose is True:
382        msg2(funcname,args,kwargs,comptime,reason)
383
384      # Save results and estimated loading time to cache
385      loadtime = save_results_to_cache(T, CD, FN, my_F, deps, comptime, \
386                                       funcname, dependencies, compression)
387      if verbose is True:
388        msg3(loadtime, CD, FN, deps, compression)
389      compressed = compression
390
391  if options['savestat'] and (not test or Retrieved):
392  ##if options['savestat']:
393    addstatsline(CD,funcname,FN,Retrieved,reason,comptime,loadtime,compressed)
394
395  return(T)  # Return results in all cases
396
397# -----------------------------------------------------------------------------
398
399def cachestat(sortidx=4, period=-1, showuser=None, cachedir=None):
400  """Generate statistics of caching efficiency.
401
402  USAGE:
403    cachestat(sortidx, period, showuser, cachedir)
404
405  ARGUMENTS:
406    sortidx --  Index of field by which lists are (default: 4)
407                Legal values are
408                 0: 'Name'
409                 1: 'Hits'
410                 2: 'CPU'
411                 3: 'Time Saved'
412                 4: 'Gain(%)'
413                 5: 'Size'
414    period --   If set to -1 all available caching history is used.
415                If set 0 only the current month is used (default -1).
416    showuser -- Flag for additional table showing user statistics
417                (default: None).
418    cachedir -- Directory for cache files (default: options['cachedir']).
419
420  DESCRIPTION:
421    Logged caching statistics is converted into summaries of the form
422    --------------------------------------------------------------------------
423    Function Name   Hits   Exec(s)  Cache(s)  Saved(s)   Gain(%)      Size
424    --------------------------------------------------------------------------
425  """
426
427  __cachestat(sortidx, period, showuser, cachedir)
428  return
429
430# -----------------------------------------------------------------------------
431
432# Has mostly been moved to proper unit test.
433# What remains here includes example of the
434# cache statistics form.
435def test(cachedir=None, verbose=False, compression=None):
436  """Test the functionality of caching.
437
438  USAGE:
439    test(verbose)
440
441  ARGUMENTS:
442    verbose --     Flag whether caching will output its statistics (default=False)
443    cachedir --    Directory for cache files (Default: options['cachedir'])
444    compression -- Flag zlib compression (Default: options['compression'])
445  """
446   
447  import string, time
448
449  # Initialise
450  #
451  #import caching
452  #reload(caching)
453
454  if not cachedir:
455    cachedir = options['cachedir']
456
457  if verbose is None:  # Do NOT write 'if not verbose:', it could be zero.
458    verbose = options['verbose']
459 
460  if compression == None:  # Do NOT write 'if not compression:',
461                           # it could be zero.
462    compression = options['compression']
463  else:
464    try:
465      set_option('compression', compression)
466    except:
467      test_error('Set option failed')     
468
469  try:
470    import zlib
471  except:
472    log.critical()
473    log.critical('*** Could not find zlib, default to no-compression      ***')
474    log.critical('*** Installing zlib will improve performance of caching ***')
475    log.critical()
476    compression = 0       
477    set_option('compression', compression)   
478 
479  log.critical('\nTesting caching module - please stand by\n')
480
481  # Define a test function to be cached
482  #
483  def f(a,b,c,N,x=0,y='abcdefg'):
484    """f(a,b,c,N)
485       Do something time consuming and produce a complex result.
486    """
487
488    import string
489
490    B = []
491    for n in range(N):
492      s = str(n+2.0/(n + 4.0))+'.a'*10
493      B.append((a,b,c,s,n,x,y))
494    return(B)
495   
496  # Check that default cachedir is OK
497  #     
498  CD = checkdir(cachedir,verbose)   
499   
500   
501  # Make a dependency file
502  #   
503  try:
504    DepFN = CD + 'testfile.tmp'
505    DepFN_wildcard = CD + 'test*.tmp'
506    Depfile = open(DepFN,'w')
507    Depfile.write('We are the knights who say NI!')
508    Depfile.close()
509    test_OK('Wrote file %s' %DepFN)
510  except:
511    test_error('Could not open file %s for writing - check your environment' \
512               % DepFN)
513
514  # Check set_option (and switch stats off
515  #   
516  try:
517    set_option('savestat',0)
518    assert(options['savestat'] == 0)
519    test_OK('Set option')
520  except:
521    test_error('Set option failed')   
522   
523  # Make some test input arguments
524  #
525  N = 5000  #Make N fairly small here
526
527  a = [1,2]
528  b = ('Thou shalt count the number three',4)
529  c = {'Five is right out': 6, (7,8): 9}
530  x = 3
531  y = 'holy hand granate'
532
533  # Test caching
534  #
535  if compression:
536    comprange = 2
537  else:
538    comprange = 1
539
540  for comp in range(comprange):
541 
542    # Evaluate and store
543    #
544    try:
545      T1 = cache(f,(a,b,c,N), {'x':x, 'y':y}, evaluate=1, \
546                   verbose=verbose, compression=comp)
547      if comp:                   
548        test_OK('Caching evaluation with compression')
549      else:     
550        test_OK('Caching evaluation without compression')     
551    except:
552      if comp:
553        test_error('Caching evaluation with compression failed - try caching.test(compression=0)')
554      else:
555        test_error('Caching evaluation failed - try caching.test(verbose=1)')
556
557    # Retrieve
558    #                           
559    try:                         
560      T2 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose, \
561                   compression=comp) 
562
563      if comp:                   
564        test_OK('Caching retrieval with compression')
565      else:     
566        test_OK('Caching retrieval without compression')     
567    except:
568      if comp:
569        test_error('Caching retrieval with compression failed - try caching.test(compression=0)')
570      else:                                     
571        test_error('Caching retrieval failed - try caching.test(verbose=1)')
572
573    # Reference result
574    #   
575    T3 = f(a,b,c,N,x=x,y=y)  # Compute without caching
576   
577    if T1 == T2 and T2 == T3:
578      if comp:
579        test_OK('Basic caching functionality (with compression)')
580      else:
581        test_OK('Basic caching functionality (without compression)')
582    else:
583      test_error('Cached result does not match computed result')
584
585
586  # Test return_filename
587  #   
588  try:
589    FN = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose, \
590                 return_filename=1)   
591    assert(FN[:2] == 'f[')
592    test_OK('Return of cache filename')
593  except:
594    test_error('Return of cache filename failed')
595
596  # Test existence of cachefiles
597 
598  try:
599    (datafile,compressed0) = myopen(CD+FN+'_'+file_types[0],"rb",compression)
600    (argsfile,compressed1) = myopen(CD+FN+'_'+file_types[1],"rb",compression)
601    (admfile,compressed2) =  myopen(CD+FN+'_'+file_types[2],"rb",compression)
602    test_OK('Presence of cache files')
603    datafile.close()
604    argsfile.close()
605    admfile.close()
606  except:
607    test_error('Expected cache files did not exist') 
608             
609  # Test 'test' function when cache is present
610  #     
611  try:
612    #T1 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose, \
613    #                   evaluate=1) 
614    T4 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose, test=1)
615    assert(T1 == T4)
616
617    test_OK("Option 'test' when cache file present")
618  except:
619    test_error("Option 'test' when cache file present failed")     
620
621  # Test that 'clear' works
622  #
623  #try:
624  #  cache(f,'clear',verbose=verbose)
625  #  test_OK('Clearing of cache files')
626  #except:
627  #  test_error('Clear does not work')
628  try:
629    cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose, clear=1)   
630    test_OK('Clearing of cache files')
631  except:
632    test_error('Clear does not work') 
633
634 
635
636  # Test 'test' function when cache is absent
637  #     
638  try:
639    T4 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose, test=1)
640    assert(T4 is None)
641    test_OK("Option 'test' when cache absent")
642  except:
643    test_error("Option 'test' when cache absent failed")     
644         
645  # Test dependencies
646  #
647  T1 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose, \
648               dependencies=DepFN) 
649  T2 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose, \
650               dependencies=DepFN)                     
651                       
652  if T1 == T2:
653    test_OK('Basic dependencies functionality')
654  else:
655    test_error('Dependencies do not work')
656
657  # Test basic wildcard dependency
658  #
659  T3 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose, \
660               dependencies=DepFN_wildcard)                     
661   
662  if T1 == T3:
663    test_OK('Basic dependencies with wildcard functionality')
664  else:
665    test_error('Dependencies with wildcards do not work')
666
667
668  # Test that changed timestamp in dependencies triggers recomputation
669 
670  # Modify dependency file
671  Depfile = open(DepFN,'a')
672  Depfile.write('You must cut down the mightiest tree in the forest with a Herring')
673  Depfile.close()
674 
675  T3 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose, \
676               dependencies=DepFN, test = 1)                     
677 
678  if T3 is None:
679    test_OK('Changed dependencies recognised')
680  else:
681    test_error('Changed dependencies not recognised')   
682 
683  # Test recomputation when dependencies have changed
684  #
685  T3 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose, \
686               dependencies=DepFN)                       
687  if T1 == T3:
688    test_OK('Recomputed value with changed dependencies')
689  else:
690    test_error('Recomputed value with changed dependencies failed')
691
692  # Performance test (with statistics)
693  # Don't really rely on this as it will depend on specific computer.
694  #
695
696  set_option('savestat',1)
697
698  N = 20*N   #Should be large on fast computers...
699  tt = time.time()
700  T1 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose)
701  t1 = time.time() - tt
702 
703  tt = time.time()
704  T2 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=verbose)
705  t2 = time.time() - tt
706 
707  if T1 == T2:
708    if t1 > t2:
709      test_OK('Performance test: relative time saved = %s pct' \
710              %str(round((t1-t2)*100/t1,2)))
711  else:       
712    test_error('Basic caching failed for new problem')
713           
714  # Test presence of statistics file
715  #
716  try: 
717    DIRLIST = os.listdir(CD)
718    SF = []
719    for FN in DIRLIST:
720      if string.find(FN,statsfile) >= 0:
721        fid = open(CD+FN,'r')
722        fid.close()
723    test_OK('Statistics files present') 
724  except:
725    test_OK('Statistics files cannot be opened')         
726     
727  print_header_box('Show sample output of the caching function:')
728 
729  T2 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=0)
730  T2 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=0)
731  T2 = cache(f,(a,b,c,N), {'x':x, 'y':y}, verbose=1)
732 
733  print_header_box('Show sample output of cachestat():')
734  if unix:
735    cachestat()   
736  else:
737    try:
738      import time
739      t = time.strptime('2030','%Y')
740      cachestat()
741    except: 
742      log.critical('cachestat() does not work here, because it relies on '
743                   'time.strptime() which is unavailable in Windows')
744     
745  test_OK('Caching self test completed')   
746     
747           
748  # Test setoption (not yet implemented)
749  #
750
751 
752#==============================================================================
753# Auxiliary functions
754#==============================================================================
755
756# Import pickler
757# cPickle is used by functions mysave, myload, and compare
758#
759import cPickle  # 10 to 100 times faster than pickle
760pickler = cPickle
761
762# Local immutable constants
763#
764comp_level = 1              # Compression level for zlib.
765                            # comp_level = 1 works well.
766textwidth1 = 16             # Text width of key fields in report forms.
767#textwidth2 = 132            # Maximal width of textual representation of
768textwidth2 = 300            # Maximal width of textual representation of
769                            # arguments.
770textwidth3 = 16             # Initial width of separation lines. Is modified.
771textwidth4 = 50             # Text width in test_OK()
772statsfile  = '.cache_stat'  # Basefilename for cached statistics.
773                            # It will reside in the chosen cache directory.
774
775file_types = ['Result',     # File name extension for cached function results.
776              'Args',       # File name extension for stored function args.
777              'Admin']      # File name extension for administrative info.
778
779Reason_msg = ['OK',         # Verbose reasons for recomputation
780              'No cached result', 
781              'Dependencies have changed', 
782              'Arguments have changed',
783              'Bytecode has changed',
784              'Recomputation was requested by caller',
785              'Cached file was unreadable']             
786             
787# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
788
789def CacheLookup(CD, FN, my_F, args, kwargs, deps, verbose, compression, 
790                dependencies):
791  """Determine whether cached result exists and return info.
792
793  USAGE:
794    (T, FN, Retrieved, reason, comptime, loadtime, compressed) = \ 
795    CacheLookup(CD, FN, my_F, args, kwargs, deps, verbose, compression, \
796                dependencies)
797
798  INPUT ARGUMENTS:
799    CD --            Cache Directory
800    FN --            Suggested cache file name
801    my_F --          Callable object
802    args --          Tuple of arguments
803    kwargs --        Dictionary of keyword arguments   
804    deps --          Dependencies time stamps
805    verbose --       Flag text output
806    compression --   Flag zlib compression
807    dependencies --  Given list of dependencies
808   
809  OUTPUT ARGUMENTS:
810    T --             Cached result if present otherwise None
811    FN --            File name under which new results must be saved
812    Retrieved --     True if a valid cached result was found
813    reason --        0: OK (if Retrieved),
814                     1: No cached result,
815                     2: Dependencies have changed,
816                     3: Arguments have changed
817                     4: Bytecode has changed
818                     5: Recomputation was forced
819                     6: Unreadable file
820    comptime --      Number of seconds it took to computed cachged result
821    loadtime --      Number of seconds it took to load cached result
822    compressed --    Flag (0,1) if cached results were compressed or not
823
824  DESCRIPTION:
825    Determine if cached result exists as follows:
826    Load in saved arguments and bytecode stored under hashed filename.
827    If they are identical to current arguments and bytecode and if dependencies
828    have not changed their time stamp, then return cached result.
829
830    Otherwise return filename under which new results should be cached.
831    Hash collisions are handled recursively by calling CacheLookup again with a
832    modified filename.
833  """
834
835  import time, string, types
836
837  # Assess whether cached result exists - compressed or not.
838  #
839  if verbose:
840    log.critical('Caching: looking for cached files %s_{%s,%s,%s}.z'
841                 % (CD+FN, file_types[0], file_types[1], file_types[2]))
842  (datafile,compressed0) = myopen(CD+FN+'_'+file_types[0],"rb",compression)
843  (argsfile,compressed1) = myopen(CD+FN+'_'+file_types[1],"rb",compression)
844  (admfile,compressed2) =  myopen(CD+FN+'_'+file_types[2],"rb",compression)
845
846  if verbose is True and deps is not None:
847    log.critical('Caching: Dependencies are %s' % deps.keys())
848
849  if not (argsfile and datafile and admfile) or \
850     not (compressed0 == compressed1 and compressed0 == compressed2):
851    # Cached result does not exist or files were compressed differently
852    #
853    # This will ensure that evaluation will take place unless all files are
854    # present.
855
856    reason = 1
857    return(None,FN,None,reason,None,None,None) #Recompute using same filename
858
859  compressed = compressed0  # Remember if compressed files were actually used
860  datafile.close()
861
862  # Retrieve arguments and adm. info
863  #
864  R, reason = myload(argsfile, compressed)  # The original arguments
865  argsfile.close()
866   
867  if reason > 0:
868      # Recompute using same filename   
869      return(None, FN, None, reason, None, None, None)
870  else:   
871      (argsref, kwargsref) = R
872
873  R, reason = myload(admfile, compressed)
874  admfile.close() 
875
876  if reason > 0:
877    return(None,FN,None,reason,None,None,None) #Recompute using same filename
878
879 
880  depsref  = R[0]  # Dependency statistics
881  comptime = R[1]  # The computation time
882  coderef  = R[2]  # The byte code
883  funcname = R[3]  # The function name
884
885  # Check if dependencies have changed
886  #
887  if dependencies and not compare(depsref, deps):
888    if verbose:
889      log.critical('Dependencies %s have changed - recomputing' % dependencies)
890
891    # Don't use cached file - recompute
892    reason = 2
893    return(None, FN, None, reason, None, None, None)
894
895  # Get bytecode from my_F
896  #
897  bytecode = get_bytecode(my_F)
898
899  # Check if arguments or bytecode have changed
900  if compare(argsref, args) and compare(kwargsref, kwargs) and \
901     (not options['bytecode'] or compare(bytecode, coderef)):
902
903    # Arguments and dependencies match. Get cached results
904    T, loadtime, compressed, reason = load_from_cache(CD, FN, compressed)
905    if reason > 0:
906        # Recompute using same FN     
907        return(None, FN, None, reason, None, None, None)
908
909    Retrieved = 1
910    reason = 0
911
912    if verbose:
913      msg4(funcname,args,kwargs,deps,comptime,loadtime,CD,FN,compressed)
914
915      if loadtime >= comptime:
916        log.critical('Caching did not yield any gain.')
917        log.critical('Consider executing function %s without caching.'
918                     % funcname)
919  else:
920
921    # Non matching arguments or bytecodes signify a hash-collision.
922    # This is resolved by recursive search of cache filenames
923    # until either a matching or an unused filename is found.
924    #
925    (T, FN, Retrieved, reason, comptime, loadtime, compressed) = \
926        CacheLookup(CD, FN+'x', my_F, args, kwargs, deps, 
927                    verbose, compression, dependencies)
928
929    # The real reason is that args or bytecodes have changed.
930    # Not that the recursive seach has found an unused filename
931    if not Retrieved:
932      if not compare(bytecode, coderef):
933        reason = 4 # Bytecode has changed
934      else:   
935        reason = 3 # Arguments have changed
936       
937   
938  return((T, FN, Retrieved, reason, comptime, loadtime, compressed))
939
940# -----------------------------------------------------------------------------
941
942def clear_cache(CD, my_F=None, verbose=None):
943  """Clear cache for my_F.
944
945  USAGE:
946     clear(CD, my_F, verbose)
947
948  ARGUMENTS:
949     CD --       Caching directory (required)
950     my_F --     Function object (default: None)
951     verbose --  Flag verbose output (default: None)
952
953  DESCRIPTION:
954
955    If my_F == None, clear everything,
956    otherwise clear only files pertaining to my_F.
957  """
958
959  import os, re
960   
961  if CD[-1] != os.sep:
962    CD = CD+os.sep
963 
964  if verbose == None:
965    verbose = options['verbose']
966
967  # FIXME: Windows version needs to be tested
968
969  if my_F:
970    funcname = get_funcname(my_F)
971    if verbose:
972      log.critical('Clearing %s' % CD+funcname+'*')
973
974    file_names = os.listdir(CD)
975    for file_name in file_names:
976      #RE = re.search('^' + funcname,file_name)  #Inefficient
977      #if RE:
978      if file_name[:len(funcname)] == funcname:
979        if unix:
980          os.remove(CD+file_name)
981        else:
982          os.system('del '+CD+file_name)
983          # FIXME: os.remove doesn't work under windows
984  else:
985    file_names = os.listdir(CD)
986    if len(file_names) > 0:
987      if verbose:
988        log.critical('Remove the following files:')
989        for file_name in file_names:
990            log.critical('     ' + file_name)
991
992        A = raw_input('Delete (Y/N)[N] ?')
993      else:
994        A = 'Y' 
995       
996      if A == 'Y' or A == 'y':
997        for file_name in file_names:
998          if unix:
999            os.remove(CD+file_name)
1000          else:
1001            os.system('del '+CD+file_name)
1002            # FIXME: os.remove doesn't work under windows
1003          #exitcode=os.system('/bin/rm '+CD+'* 2> /dev/null')
1004
1005# -----------------------------------------------------------------------------
1006
1007def DeleteOldFiles(CD,verbose=None):
1008  """Remove expired files
1009
1010  USAGE:
1011    DeleteOldFiles(CD,verbose=None)
1012  """
1013
1014  if verbose == None:
1015    verbose = options['verbose']
1016
1017  maxfiles = options['maxfiles']
1018
1019  # FIXME: Windows version
1020
1021  import os
1022  block = 1000  # How many files to delete per invokation
1023  Files = os.listdir(CD)
1024  numfiles = len(Files)
1025  if not unix: return  # FIXME: Windows case ?
1026
1027  if numfiles > maxfiles:
1028    delfiles = numfiles-maxfiles+block
1029    if verbose:
1030      log.critical('Deleting %d expired files:' % delfiles)
1031      os.system('ls -lur '+CD+'* | head -' + `delfiles`)            # List them
1032    os.system('ls -ur '+CD+'* | head -' + `delfiles` + ' | xargs /bin/rm')
1033                                                                  # Delete them
1034    # FIXME: Replace this with os.listdir and os.remove
1035
1036# -----------------------------------------------------------------------------
1037
1038def save_args_to_cache(CD, FN, args, kwargs, compression):
1039  """Save arguments to cache
1040
1041  USAGE:
1042    save_args_to_cache(CD,FN,args,kwargs,compression)
1043  """
1044
1045  import time, os, sys, types
1046
1047  (argsfile, compressed) = myopen(CD+FN+'_'+file_types[1], 'wb', compression)
1048
1049  if argsfile is None:
1050    msg = 'ERROR (caching): Could not open argsfile for writing: %s' %FN
1051    raise IOError, msg
1052
1053  mysave((args,kwargs),argsfile,compression)  # Save args and kwargs to cache
1054  argsfile.close()
1055
1056  # Change access rights if possible
1057  #
1058  #if unix:
1059  #  try:
1060  #    exitcode=os.system('chmod 666 '+argsfile.name)
1061  #  except:
1062  #    pass
1063  #else:
1064  #  pass  # FIXME: Take care of access rights under Windows
1065
1066  return
1067
1068# -----------------------------------------------------------------------------
1069
1070def save_results_to_cache(T, CD, FN, my_F, deps, comptime, funcname,
1071                          dependencies, compression):
1072  """Save computed results T and admin info to cache
1073
1074  USAGE:
1075    save_results_to_cache(T, CD, FN, my_F, deps, comptime, funcname,
1076                          dependencies, compression)
1077  """
1078
1079  import time, os, sys, types
1080
1081  (datafile, compressed1) = myopen(CD+FN+'_'+file_types[0],'wb',compression)
1082  (admfile, compressed2) = myopen(CD+FN+'_'+file_types[2],'wb',compression)
1083
1084  if not datafile:
1085    if verbose:
1086        log.critical('ERROR: Could not open %s' % datafile.name)
1087    raise IOError
1088
1089  if not admfile:
1090    if verbose:
1091        log.critical('ERROR: Could not open %s' % admfile.name)
1092    raise IOError
1093
1094  t0 = time.time()
1095
1096  mysave(T,datafile,compression)  # Save data to cache
1097  datafile.close()
1098  #savetime = round(time.time()-t0,2)
1099  savetime = time.time()-t0 
1100
1101  bytecode = get_bytecode(my_F)  # Get bytecode from function object
1102  admtup = (deps, comptime, bytecode, funcname)  # Gather admin info
1103
1104  mysave(admtup,admfile,compression)  # Save admin info to cache
1105  admfile.close()
1106
1107  # Change access rights if possible
1108  #
1109  #if unix:
1110  #  try:
1111  #    exitcode=os.system('chmod 666 '+datafile.name)
1112  #    exitcode=os.system('chmod 666 '+admfile.name)
1113  #  except:
1114  #    pass
1115  #else:
1116  #  pass  # FIXME: Take care of access rights under Windows
1117
1118  return(savetime)
1119
1120# -----------------------------------------------------------------------------
1121
1122def load_from_cache(CD, FN, compression):
1123  """Load previously cached data from file FN
1124
1125  USAGE:
1126    load_from_cache(CD,FN,compression)
1127  """
1128
1129  import time
1130
1131  (datafile, compressed) = myopen(CD+FN+'_'+file_types[0],"rb",compression)
1132  t0 = time.time()
1133  T, reason = myload(datafile,compressed)
1134
1135  loadtime = time.time()-t0
1136  datafile.close() 
1137
1138  return T, loadtime, compressed, reason
1139
1140# -----------------------------------------------------------------------------
1141
1142def myopen(FN, mode, compression=True):
1143  """Open file FN using given mode
1144
1145  USAGE:
1146    myopen(FN, mode, compression=True)
1147
1148  ARGUMENTS:
1149    FN --           File name to be opened
1150    mode --         Open mode (as in open)
1151    compression --  Flag zlib compression
1152
1153  DESCRIPTION:
1154     if compression
1155       Attempt first to open FN + '.z'
1156       If this fails try to open FN
1157     else do the opposite
1158     Return file handle plus info about whether it was compressed or not.
1159  """
1160
1161  import string
1162
1163  # Determine if file exists already (if writing was requested)
1164  # This info is only used to determine if access modes should be set
1165  #
1166  if 'w' in mode or 'a' in mode:
1167    try:
1168      file = open(FN+'.z','r')
1169      file.close()
1170      new_file = 0
1171    except:
1172      try:
1173        file = open(FN,'r') 
1174        file.close()
1175        new_file = 0
1176      except:
1177        new_file = 1
1178  else:
1179    new_file = 0 #Assume it exists if mode was not 'w'
1180 
1181
1182  compressed = 0
1183  if compression:
1184    try:
1185      file = open(FN+'.z',mode)
1186      compressed = 1
1187    except:
1188      try:
1189        file = open(FN,mode)
1190      except:
1191        file = None
1192  else:
1193    try:
1194      file = open(FN,mode)
1195    except:
1196      try:
1197        file = open(FN+'.z',mode)
1198        compressed = 1
1199      except:
1200        file = None
1201
1202  # Now set access rights if it is a new file
1203  #
1204  if file and new_file:
1205    if unix:
1206      exitcode=os.system('chmod 666 '+file.name)
1207    else:
1208      pass  # FIXME: Take care of access rights under Windows
1209
1210  return(file,compressed)
1211
1212# -----------------------------------------------------------------------------
1213
1214def myload(file, compressed):
1215  """Load data from file
1216
1217  USAGE:
1218    myload(file, compressed)
1219  """
1220
1221  reason = 0
1222  try:
1223    if compressed:
1224      import zlib
1225
1226      RsC = file.read()
1227      try:
1228        Rs  = zlib.decompress(RsC)
1229      except:
1230        #  File "./caching.py", line 1032, in load_from_cache
1231        #  T = myload(datafile,compressed)
1232        #  File "./caching.py", line 1124, in myload
1233        #  Rs  = zlib.decompress(RsC)
1234        #  zlib.error: Error -5 while decompressing data
1235        #raise Exception
1236        reason = 6  # Unreadable file
1237        return None, reason 
1238     
1239     
1240      del RsC  # Free up some space
1241      R = pickler.loads(Rs)
1242    else:
1243      try:
1244        R = pickler.load(file)
1245      #except EOFError, e:
1246      except:
1247        #Catch e.g., file with 0 length or corrupted
1248        reason = 6  # Unreadable file
1249        return None, reason
1250     
1251  except MemoryError:
1252    import sys
1253    if options['verbose']:
1254      log.critical('ERROR: Out of memory while loading %s, aborting'
1255                   % file.name)
1256
1257    # Raise the error again for now
1258    #
1259    raise MemoryError
1260
1261  return R, reason
1262
1263# -----------------------------------------------------------------------------
1264
1265def mysave(T, file, compression):
1266  """Save data T to file
1267
1268  USAGE:
1269    mysave(T, file, compression)
1270
1271  """
1272
1273  bin = options['bin']
1274
1275  if compression:
1276    try:
1277      import zlib
1278    except:
1279      log.critical()
1280      log.critical('*** Could not find zlib ***')
1281      log.critical('*** Try to run caching with compression off ***')
1282      log.critical("*** caching.set_option('compression', 0) ***")
1283      raise Exception
1284     
1285
1286    try:
1287      Ts  = pickler.dumps(T, bin)
1288    except MemoryError:
1289      msg = '****WARNING (caching.py): Could not pickle data for compression.'
1290      msg += ' Try using compression = False'
1291      raise MemoryError, msg
1292    else: 
1293      # Compressed pickling     
1294      TsC = zlib.compress(Ts, comp_level)
1295      file.write(TsC)
1296  else:
1297      # Uncompressed pickling
1298      pickler.dump(T, file, bin)
1299
1300      # FIXME: This may not work on Windoze network drives.
1301      # The error msg is IOError: [Errno 22] Invalid argument
1302      # Testing with small files was OK, though.
1303      # I think this is an OS problem.
1304
1305      # Excerpt from http://www.ultraseek.com/support/faqs/4173.html
1306     
1307# The error is caused when there is a problem with server disk access (I/0). This happens at the OS level, and there is no controlling these errors through the Ultraseek application.
1308#
1309#Ultraseek contains an embedded Python interpreter. The exception "exceptions.IOError: [Errno 22] Invalid argument" is generated by the Python interpreter. The exception is thrown when a disk access operation fails due to an I/O-related reason.
1310#
1311#The following extract is taken from the site http://www.python.org:
1312#
1313#---------------------------------------------------------------------------------------------
1314#exception IOError
1315#Raised when an I/O operation (such as a print statement, the built-in open() function or a method of a file object) fails for an I/O-related reason, e.g., ``file not found'' or ``disk full''.
1316#This class is derived from EnvironmentError. See the discussion above for more information on exception instance attributes.
1317#---------------------------------------------------------------------------------------------
1318#
1319#The error code(s) that accompany exceptions are described at:
1320#http://www.python.org/dev/doc/devel//lib/module-errno.html
1321#
1322#You can view several postings on this error message by going to http://www.python.org, and typing the below into the search box:
1323#
1324#exceptions.IOError invalid argument Errno 22
1325       
1326      #try:
1327      #  pickler.dump(T,file,bin)
1328      #except IOError, e:
1329      #  print e
1330      #  msg = 'Could not store to %s, bin=%s' %(file, bin)
1331      #  raise msg
1332     
1333
1334# -----------------------------------------------------------------------------
1335
1336   
1337def myhash(T, ids=None):
1338  """Compute hashed integer from a range of inputs.
1339  If T is not hashable being e.g. a tuple T, myhash will recursively
1340  hash the values individually
1341
1342  USAGE:
1343    myhash(T)
1344
1345  ARGUMENTS:
1346    T -- Anything
1347  """
1348
1349  from types import TupleType, ListType, DictType, InstanceType 
1350   
1351  if type(T) in [TupleType, ListType, DictType, InstanceType]: 
1352      # Keep track of unique id's to protect against infinite recursion
1353      if ids is None: ids = []
1354
1355      # Check if T has already been encountered
1356      i = id(T) 
1357 
1358      if i in ids:
1359          return 0 # T has been hashed already     
1360      else:
1361          ids.append(i)
1362   
1363
1364   
1365  # Start hashing 
1366 
1367  # On some architectures None, False and True gets different hash values
1368  if T is None:
1369      return(-1)
1370  if T is False:
1371      return(0)
1372  if T is True:
1373      return(1)
1374
1375  # Get hash values for hashable entries
1376  if type(T) in [TupleType, ListType]:
1377      hvals = []
1378      for t in T:
1379          h = myhash(t, ids)
1380          hvals.append(h)
1381      val = hash(tuple(hvals))
1382  elif type(T) == DictType:
1383      # Make dictionary ordering unique 
1384     
1385      # FIXME(Ole): Need new way of doing this in Python 3.0
1386      I = T.items()
1387      I.sort()   
1388      val = myhash(I, ids)
1389  elif isinstance(T, num.ndarray):
1390      T = num.array(T) # Ensure array is contiguous
1391
1392      # Use mean value for efficiency
1393      val = hash(num.average(T.flat))
1394  elif type(T) == InstanceType:
1395      # Use the attribute values
1396      val = myhash(T.__dict__, ids)
1397  else:
1398      try:
1399          val = hash(T)
1400      except:
1401          val = 1
1402
1403  return(val)
1404
1405
1406
1407def compare(A, B, ids=None):
1408    """Safe comparison of general objects
1409
1410    USAGE:
1411      compare(A,B)
1412
1413    DESCRIPTION:
1414      Return 1 if A and B they are identical, 0 otherwise
1415    """
1416
1417    from types import TupleType, ListType, DictType, InstanceType
1418   
1419    # Keep track of unique id's to protect against infinite recursion
1420    if ids is None: ids = {}
1421
1422    # Check if T has already been encountered
1423    iA = id(A) 
1424    iB = id(B)     
1425   
1426    if (iA, iB) in ids:
1427        # A and B have been compared already
1428        return ids[(iA, iB)]
1429    else:
1430        ids[(iA, iB)] = True
1431   
1432   
1433    # Check if arguments are of same type
1434    if type(A) != type(B):
1435        return False
1436       
1437 
1438    # Compare recursively based on argument type
1439    if type(A) in [TupleType, ListType]:
1440        N = len(A)
1441        if len(B) != N: 
1442            identical = False
1443        else:
1444            identical = True
1445            for i in range(N):
1446                if not compare(A[i], B[i], ids): 
1447                    identical = False; break
1448                   
1449    elif type(A) == DictType:
1450        if len(A) != len(B):
1451            identical = False
1452        else:                       
1453            # Make dictionary ordering unique
1454            a = A.items(); a.sort()   
1455            b = B.items(); b.sort()
1456           
1457            identical = compare(a, b, ids)
1458           
1459    elif isinstance(A, num.ndarray):
1460        # Use element by element comparison
1461        identical = num.alltrue(A==B)
1462
1463    elif type(A) == InstanceType:
1464        # Take care of special case where elements are instances           
1465        # Base comparison on attributes     
1466        identical = compare(A.__dict__, 
1467                            B.__dict__, 
1468                            ids)
1469    else:       
1470        # Fall back to general code
1471        try:
1472            identical = (A == B)
1473        except:
1474            import pickle
1475            # Use pickle to compare data
1476            # The native pickler must be used
1477            # since the faster cPickle does not
1478            # guarantee a unique translation           
1479            try:
1480                identical = (pickle.dumps(A,0) == pickle.dumps(B,0))
1481            except:
1482                identical = False
1483
1484    # Record result of comparison and return           
1485    ids[(iA, iB)] = identical
1486   
1487    return(identical)
1488
1489   
1490# -----------------------------------------------------------------------------
1491
1492def nospace(s):
1493  """Replace spaces in string s with underscores
1494
1495  USAGE:
1496    nospace(s)
1497
1498  ARGUMENTS:
1499    s -- string
1500  """
1501
1502  import string
1503
1504  newstr = ''
1505  for i in range(len(s)):
1506    if s[i] == ' ':
1507      newstr = newstr+'_'
1508    else:
1509      newstr = newstr+s[i]
1510
1511  return(newstr)
1512
1513# -----------------------------------------------------------------------------
1514
1515def get_funcname(my_F):
1516  """Retrieve name of function object func (depending on its type)
1517
1518  USAGE:
1519    get_funcname(my_F)
1520  """
1521
1522  import types, string
1523
1524  if type(my_F) == types.FunctionType:
1525    funcname = my_F.func_name
1526  elif type(my_F) == types.BuiltinFunctionType:
1527    funcname = my_F.__name__
1528  else:
1529    tab = string.maketrans("<>'","   ")
1530    tmp = string.translate(repr(my_F), tab)
1531    tmp = string.split(tmp)
1532    funcname = string.join(tmp)
1533
1534    # Truncate memory address as in
1535    # class __main__.Dummy at 0x00A915D0
1536    index = funcname.find('at 0x')
1537    if index >= 0:
1538      funcname = funcname[:index+5] # Keep info that there is an address
1539
1540  funcname = nospace(funcname)
1541  return(funcname)
1542
1543# -----------------------------------------------------------------------------
1544
1545def get_bytecode(my_F):
1546  """ Get bytecode from function object.
1547
1548  USAGE:
1549    get_bytecode(my_F)
1550  """
1551
1552  import types
1553
1554  if type(my_F) == types.FunctionType:
1555    return get_func_code_details(my_F)
1556  elif type(my_F) == types.MethodType:
1557    return get_func_code_details(my_F.im_func)
1558  elif type(my_F) == types.InstanceType:   
1559    if hasattr(my_F, '__call__'):
1560      # Get bytecode from __call__ method
1561      bytecode = get_func_code_details(my_F.__call__.im_func)
1562     
1563      # Add hash value of object to detect attribute changes
1564      return bytecode + (myhash(my_F),) 
1565    else:
1566      msg = 'Instance %s was passed into caching in the role of a function ' % str(my_F)
1567      msg = ' but it was not callable.'
1568      raise Exception, msg
1569  elif type(my_F) in [types.BuiltinFunctionType, types.BuiltinMethodType]:     
1570    # Built-in functions are assumed not to change 
1571    return None, 0, 0, 0
1572  elif type(my_F) == types.ClassType:
1573      # Get bytecode from __init__ method
1574      bytecode = get_func_code_details(my_F.__init__.im_func)   
1575      return bytecode     
1576  else:
1577    msg = 'Unknown function type: %s' % type(my_F)
1578    raise Exception, msg
1579
1580
1581 
1582 
1583def get_func_code_details(my_F):
1584  """Extract co_code, co_consts, co_argcount, func_defaults
1585  """
1586 
1587  bytecode = my_F.func_code.co_code
1588  consts = my_F.func_code.co_consts
1589  argcount = my_F.func_code.co_argcount   
1590  defaults = my_F.func_defaults       
1591 
1592  return bytecode, consts, argcount, defaults 
1593
1594# -----------------------------------------------------------------------------
1595
1596def get_depstats(dependencies):
1597  """ Build dictionary of dependency files and their size, mod. time and ctime.
1598
1599  USAGE:
1600    get_depstats(dependencies):
1601  """
1602
1603  import types
1604
1605  d = {}
1606  if dependencies:
1607
1608    #Expand any wildcards
1609    import glob
1610    expanded_dependencies = []
1611    for FN in dependencies:
1612      expanded_FN = glob.glob(FN)
1613
1614      if expanded_FN == []:
1615        errmsg = 'ERROR (caching.py): Dependency '+FN+' does not exist.'
1616        raise Exception, errmsg     
1617
1618      expanded_dependencies += expanded_FN
1619
1620   
1621    for FN in expanded_dependencies:
1622      if not type(FN) == types.StringType:
1623        errmsg = 'ERROR (caching.py): Dependency must be a string.\n'
1624        errmsg += '                   Dependency given: %s' %FN
1625        raise Exception, errmsg     
1626      if not os.access(FN,os.F_OK):
1627        errmsg = 'ERROR (caching.py): Dependency '+FN+' does not exist.'
1628        raise Exception, errmsg
1629      (size,atime,mtime,ctime) = filestat(FN)
1630
1631      # We don't use atime because that would cause recomputation every time.
1632      # We don't use ctime because that is irrelevant and confusing for users.
1633      d.update({FN : (size,mtime)})
1634
1635  return(d)
1636
1637# -----------------------------------------------------------------------------
1638
1639def filestat(FN):
1640  """A safe wrapper using os.stat to get basic file statistics
1641     The built-in os.stat breaks down if file sizes are too large (> 2GB ?)
1642
1643  USAGE:
1644    filestat(FN)
1645
1646  DESCRIPTION:
1647     Must compile Python with
1648     CFLAGS="`getconf LFS_CFLAGS`" OPT="-g -O2 $CFLAGS" \
1649              configure
1650     as given in section 8.1.1 Large File Support in the Libray Reference
1651  """
1652
1653  import os, time
1654
1655  try:
1656    stats = os.stat(FN)
1657    size  = stats[6]
1658    atime = stats[7]
1659    mtime = stats[8]
1660    ctime = stats[9]
1661  except:
1662
1663    # Hack to get the results anyway (works only on Unix at the moment)
1664    #
1665    log.critical('Hack to get os.stat when files are too large')
1666
1667    if unix:
1668      tmp = '/tmp/cach.tmp.'+`time.time()`+`os.getpid()`
1669      # Unique filename, FIXME: Use random number
1670
1671      # Get size and access time (atime)
1672      #
1673      exitcode=os.system('ls -l --full-time --time=atime '+FN+' > '+tmp)
1674      (size,atime) = get_lsline(tmp)
1675
1676      # Get size and modification time (mtime)
1677      #
1678      exitcode=os.system('ls -l --full-time '+FN+' > '+tmp)
1679      (size,mtime) = get_lsline(tmp)
1680
1681      # Get size and ctime
1682      #
1683      exitcode=os.system('ls -l --full-time --time=ctime '+FN+' > '+tmp)
1684      (size,ctime) = get_lsline(tmp)
1685
1686      try:
1687        exitcode=os.system('rm '+tmp)
1688        # FIXME: Gives error if file doesn't exist
1689      except:
1690        pass
1691    else:
1692      pass
1693      raise Exception  # FIXME: Windows case
1694
1695  return(long(size),atime,mtime,ctime)
1696
1697# -----------------------------------------------------------------------------
1698
1699def get_lsline(FN):
1700  """get size and time for filename
1701
1702  USAGE:
1703    get_lsline(file_name)
1704
1705  DESCRIPTION:
1706    Read in one line 'ls -la' item from file (generated by filestat) and
1707    convert time to seconds since epoch. Return file size and time.
1708  """
1709
1710  import string, time
1711
1712  f = open(FN,'r')
1713  info = f.read()
1714  info = string.split(info)
1715
1716  size = info[4]
1717  week = info[5]
1718  mon  = info[6]
1719  day  = info[7]
1720  hour = info[8]
1721  year = info[9]
1722
1723  str = week+' '+mon+' '+day+' '+hour+' '+year
1724  timetup = time.strptime(str)
1725  t = time.mktime(timetup)
1726  return(size, t)
1727
1728# -----------------------------------------------------------------------------
1729
1730def checkdir(CD, verbose=None, warn=False):
1731  """Check or create caching directory
1732
1733  USAGE:
1734    checkdir(CD,verbose):
1735
1736  ARGUMENTS:
1737    CD -- Directory
1738    verbose -- Flag verbose output (default: None)
1739
1740  DESCRIPTION:
1741    If CD does not exist it will be created if possible
1742  """
1743
1744  import os
1745  import os.path
1746
1747  if CD[-1] != os.sep: 
1748    CD = CD + os.sep  # Add separator for directories
1749
1750  CD = os.path.expanduser(CD) # Expand ~ or ~user in pathname
1751  if not (os.access(CD,os.R_OK and os.W_OK) or CD == ''):
1752    try:
1753      exitcode=os.mkdir(CD)
1754
1755      # Change access rights if possible
1756      #
1757      if unix:
1758        exitcode=os.system('chmod 777 '+CD)
1759      else:
1760        pass  # FIXME: What about acces rights under Windows?
1761      if verbose: log.critical('MESSAGE: Directory %s created.' % CD)
1762    except:
1763      if warn is True:
1764        log.critical('WARNING: Directory %s could not be created.' % CD)
1765      if unix:
1766        CD = '/tmp/'
1767      else:
1768        CD = 'C:' 
1769      if warn is True:
1770        log.critical('Using directory %s instead' % CD)
1771
1772  return(CD)
1773
1774checkdir(cachedir, warn=True)
1775
1776#==============================================================================
1777# Statistics
1778#==============================================================================
1779
1780def addstatsline(CD, funcname, FN, Retrieved, reason, comptime, loadtime,
1781                 compression):
1782  """Add stats entry
1783
1784  USAGE:
1785    addstatsline(CD,funcname,FN,Retrieved,reason,comptime,loadtime,compression)
1786
1787  DESCRIPTION:
1788    Make one entry in the stats file about one cache hit recording time saved
1789    and other statistics. The data are used by the function cachestat.
1790  """
1791
1792  import os, time
1793
1794  try:
1795    TimeTuple = time.localtime(time.time())
1796    extension = time.strftime('%b%Y',TimeTuple)
1797    SFN = CD+statsfile+'.'+extension
1798    #statfile = open(SFN,'a')
1799    (statfile, dummy) = myopen(SFN,'a',compression=0)
1800
1801    # Change access rights if possible
1802    #
1803    #if unix:
1804    #  try:
1805    #    exitcode=os.system('chmod 666 '+SFN)
1806    #  except:
1807    #    pass
1808  except:
1809    log.critical('Warning: Stat file could not be opened')
1810
1811  try:
1812    if os.environ.has_key('USER'):
1813      user = os.environ['USER']
1814    else:
1815      user = 'Nobody'
1816
1817    date = time.asctime(TimeTuple)
1818
1819    if Retrieved:
1820      hit = '1'
1821    else:
1822      hit = '0'
1823
1824    # Get size of result file
1825    #   
1826    if compression:
1827      stats = os.stat(CD+FN+'_'+file_types[0]+'.z')
1828    else:
1829      stats = os.stat(CD+FN+'_'+file_types[0])
1830 
1831    if stats: 
1832      size = stats[6]
1833    else:
1834      size = -1  # Error condition, but don't crash. This is just statistics 
1835
1836    # Build entry
1837   
1838    entry = date             + ',' +\
1839            user             + ',' +\
1840            FN               + ',' +\
1841            str(int(size))   + ',' +\
1842            str(compression) + ',' +\
1843            hit              + ',' +\
1844            str(reason)      + ',' +\
1845            str(round(comptime,4)) + ',' +\
1846            str(round(loadtime,4)) +\
1847            CR
1848           
1849    statfile.write(entry)
1850    statfile.close()
1851  except:
1852    log.critical('Warning: Writing of stat file failed')
1853
1854# -----------------------------------------------------------------------------
1855
1856# FIXME: should take cachedir as an optional arg
1857#
1858def __cachestat(sortidx=4, period=-1, showuser=None, cachedir=None):
1859  """  List caching statistics.
1860
1861  USAGE:
1862    __cachestat(sortidx=4,period=-1,showuser=None,cachedir=None):
1863
1864      Generate statistics of caching efficiency.
1865      The parameter sortidx determines by what field lists are sorted.
1866      If the optional keyword period is set to -1,
1867      all available caching history is used.
1868      If it is 0 only the current month is used.
1869      Future versions will include more than one month....
1870      OMN 20/8/2000
1871  """
1872
1873  import os
1874  import os.path
1875  from string import split, rstrip, find
1876  from time import strptime, localtime, strftime, mktime, ctime
1877
1878  # sortidx = 4    # Index into Fields[1:]. What to sort by.
1879
1880  Fields = ['Name', 'Hits', 'Exec(s)', \
1881            'Cache(s)', 'Saved(s)', 'Gain(%)', 'Size']
1882  Widths = [25,7,9,9,9,9,13]
1883  #Types = ['s','d','d','d','d','.2f','d']
1884  Types = ['s','d','.2f','.2f','.2f','.2f','d'] 
1885
1886  Dictnames = ['Function', 'User']
1887
1888  if not cachedir:
1889    cachedir = checkdir(options['cachedir'])
1890
1891  SD = os.path.expanduser(cachedir)  # Expand ~ or ~user in pathname
1892
1893  if period == -1:  # Take all available stats
1894    SFILENAME = statsfile
1895  else:  # Only stats from current month 
1896       # MAKE THIS MORE GENERAL SO period > 0 counts several months backwards!
1897    TimeTuple = localtime(time())
1898    extension = strftime('%b%Y',TimeTuple)
1899    SFILENAME = statsfile+'.'+extension
1900
1901  DIRLIST = os.listdir(SD)
1902  SF = []
1903  for FN in DIRLIST:
1904    if find(FN,SFILENAME) >= 0:
1905      SF.append(FN)
1906
1907  blocksize = 15000000
1908  total_read = 0
1909  total_hits = 0
1910  total_discarded = 0
1911  firstday = mktime(strptime('2030','%Y'))
1912             # FIXME: strptime don't exist in WINDOWS ?
1913  lastday = 0
1914
1915  FuncDict = {}
1916  UserDict = {}
1917  for FN in SF:
1918    input = open(SD+FN,'r')
1919    log.critical('Reading file %s' % SD+FN)
1920
1921    while True:
1922      A = input.readlines(blocksize)
1923      if len(A) == 0: break
1924      total_read = total_read + len(A)
1925      for record in A:
1926        record = tuple(split(rstrip(record),','))
1927
1928        if len(record) == 9:
1929          timestamp = record[0]
1930       
1931          try:
1932            t = mktime(strptime(timestamp))
1933          except:
1934            total_discarded = total_discarded + 1         
1935            continue   
1936             
1937          if t > lastday:
1938            lastday = t
1939          if t < firstday:
1940            firstday = t
1941
1942          user     = record[1]
1943          my_F     = record[2]
1944
1945          # Strip hash-stamp off
1946          #
1947          i = find(my_F,'[')
1948          my_F = my_F[:i]
1949
1950          size        = float(record[3])
1951
1952          # Compression kepword can be Boolean
1953          if record[4] in ['True', '1']:
1954            compression = 1
1955          elif record[4] in ['False', '0']: 
1956            compression = 0
1957          else:
1958            log.critical('Unknown value of compression %s' % str(record[4]))
1959            log.critical(str(record))
1960            total_discarded = total_discarded + 1           
1961            continue
1962
1963          #compression = int(record[4]) # Can be Boolean
1964          hit         = int(record[5])
1965          reason      = int(record[6])   # Not used here   
1966          cputime     = float(record[7])
1967          loadtime    = float(record[8])
1968
1969          if hit:
1970            total_hits = total_hits + 1
1971            saving = cputime-loadtime
1972
1973            if cputime != 0:
1974              rel_saving = round(100.0*saving/cputime,2)
1975            else:
1976              #rel_saving = round(1.0*saving,2)
1977              rel_saving = 100.0 - round(1.0*saving,2)  # A bit of a hack
1978
1979            info = [1,cputime,loadtime,saving,rel_saving,size]
1980
1981            UpdateDict(UserDict,user,info)
1982            UpdateDict(FuncDict,my_F,info)
1983          else:
1984            pass #Stats on recomputations and their reasons could go in here
1985             
1986        else:
1987          total_discarded = total_discarded + 1
1988
1989    input.close()
1990
1991  # Compute averages of all sums and write list
1992  #
1993
1994  if total_read == 0:
1995    printline(Widths,'=')
1996    log.critical('CACHING STATISTICS: No valid records read')
1997    printline(Widths,'=')
1998    return
1999
2000  log.critical()
2001  printline(Widths,'=')
2002  log.critical('CACHING STATISTICS: '+ctime(firstday)+' to '+ctime(lastday))
2003  printline(Widths,'=')
2004  log.critical('  Total number of valid records %d' % total_read)
2005  log.critical('  Total number of discarded records %d' % total_discarded)
2006  log.critical('  Total number of hits %d' % total_hits)
2007  log.critical()
2008
2009  log.critical('  Fields %s are averaged over number of hits' % Fields[2:])
2010  log.critical('  Time is measured in seconds and size in bytes')
2011  log.critical('  Tables are sorted by %s' % Fields[1:][sortidx])
2012
2013  if showuser:
2014    Dictionaries = [FuncDict, UserDict]
2015  else:
2016    Dictionaries = [FuncDict]
2017
2018  i = 0
2019  for Dict in Dictionaries:
2020    for key in Dict.keys():
2021      rec = Dict[key]
2022      for n in range(len(rec)):
2023        if n > 0:
2024          rec[n] = round(1.0*rec[n]/rec[0],2)
2025      Dict[key] = rec
2026
2027    # Sort and output
2028    #
2029    keylist = SortDict(Dict,sortidx)
2030
2031    # Write Header
2032    #
2033    log.critical()
2034    printline(Widths,'-')
2035    n = 0
2036    for s in Fields:
2037      if s == Fields[0]:  # Left justify
2038        s = Dictnames[i] + ' ' + s; i=i+1
2039        #exec "print '%-" + str(Widths[n]) + "s'%s,"; n=n+1
2040        log.critical('%-*s' % (Widths[n], s))
2041        n += 1
2042      else:
2043        #exec "print '%" + str(Widths[n]) + "s'%s,"; n=n+1
2044        log.critical('%*s' % (Widths[n], s))
2045        n += 1
2046    log.critical()
2047    printline(Widths,'-')
2048
2049    # Output Values
2050    #
2051    for key in keylist:
2052      rec = Dict[key]
2053      n = 0
2054      if len(key) > Widths[n]: key = key[:Widths[n]-3] + '...'
2055      #exec "print '%-" + str(Widths[n]) + Types[n]+"'%key,";n=n+1
2056      log.critical('%-*s' % (Widths[n], str(key)))
2057      n += 1
2058      for val in rec:
2059        #exec "print '%" + str(Widths[n]) + Types[n]+"'%val,"; n=n+1
2060        log.critical('%*s' % (Widths[n], str(key)))
2061        n += 1
2062      log.critical()
2063    log.critical()
2064
2065#==============================================================================
2066# Auxiliary stats functions
2067#==============================================================================
2068
2069def UpdateDict(Dict, key, info):
2070  """Update dictionary by adding new values to existing.
2071
2072  USAGE:
2073    UpdateDict(Dict,key,info)
2074  """
2075
2076  if Dict.has_key(key):
2077    dinfo = Dict[key]
2078    for n in range(len(dinfo)):
2079      dinfo[n] = info[n] + dinfo[n]
2080  else:
2081    dinfo = info[:]  # Make a copy of info list
2082
2083  Dict[key] = dinfo
2084  return Dict
2085
2086# -----------------------------------------------------------------------------
2087
2088def SortDict(Dict, sortidx=0):
2089  """Sort dictionary
2090
2091  USAGE:
2092    SortDict(Dict,sortidx):
2093
2094  DESCRIPTION:
2095    Sort dictionary of lists according field number 'sortidx'
2096  """
2097
2098  import types
2099
2100  sortlist  = []
2101  keylist = Dict.keys()
2102  for key in keylist:
2103    rec = Dict[key]
2104    if not type(rec) in [types.ListType, types.TupleType]:
2105      rec = [rec]
2106
2107    if sortidx > len(rec)-1:
2108      msg = 'ERROR: Sorting index too large, sortidx = %s' % str(sortidx)
2109      raise IndexError, msg
2110
2111    val = rec[sortidx]
2112    sortlist.append(val)
2113
2114  A = map(None,sortlist,keylist)
2115  A.sort()
2116  keylist = map(lambda x: x[1], A)  # keylist sorted by sortidx
2117
2118  return(keylist)
2119
2120# -----------------------------------------------------------------------------
2121
2122def printline(Widths,char):
2123  """Print textline in fixed field.
2124
2125  USAGE:
2126    printline(Widths,char)
2127  """
2128
2129  s = ''
2130  for n in range(len(Widths)):
2131    s = s+Widths[n]*char
2132    if n > 0:
2133      s = s+char
2134
2135  log.critical(s)
2136
2137#==============================================================================
2138# Messages
2139#==============================================================================
2140
2141def msg1(funcname, args, kwargs, reason):
2142  """Message 1
2143
2144  USAGE:
2145    msg1(funcname, args, kwargs, reason):
2146  """
2147
2148  import string
2149
2150  print_header_box('Evaluating function %s' %funcname)
2151 
2152  msg7(args, kwargs)
2153  msg8(reason) 
2154 
2155  print_footer()
2156 
2157# -----------------------------------------------------------------------------
2158
2159def msg2(funcname, args, kwargs, comptime, reason):
2160  """Message 2
2161
2162  USAGE:
2163    msg2(funcname, args, kwargs, comptime, reason)
2164  """
2165
2166  import string
2167
2168  #try:
2169  #  R = Reason_msg[reason]
2170  #except:
2171  #  R = 'Unknown reason' 
2172 
2173  #print_header_box('Caching statistics (storing) - %s' %R)
2174  print_header_box('Caching statistics (storing)') 
2175 
2176  msg6(funcname,args,kwargs)
2177  msg8(reason)
2178
2179  log.critical(string.ljust('| CPU time:', textwidth1) +
2180               str(round(comptime,2)) + ' seconds')
2181
2182# -----------------------------------------------------------------------------
2183
2184def msg3(savetime, CD, FN, deps, compression):
2185  """Message 3
2186
2187  USAGE:
2188    msg3(savetime, CD, FN, deps, compression)
2189  """
2190
2191  import string
2192  log.critical(string.ljust('| Loading time:', textwidth1) + 
2193               str(round(savetime,2)) + ' seconds (estimated)')
2194  msg5(CD,FN,deps,compression)
2195
2196# -----------------------------------------------------------------------------
2197
2198def msg4(funcname, args, kwargs, deps, comptime, loadtime, CD, FN, compression):
2199  """Message 4
2200
2201  USAGE:
2202    msg4(funcname, args, kwargs, deps, comptime, loadtime, CD, FN, compression)
2203  """
2204
2205  import string
2206
2207  print_header_box('Caching statistics (retrieving)')
2208 
2209  msg6(funcname,args,kwargs)
2210  log.critical(string.ljust('| CPU time:', textwidth1) +
2211               str(round(comptime,2)) + ' seconds')
2212  log.critical(string.ljust('| Loading time:', textwidth1) +
2213               str(round(loadtime,2)) + ' seconds')
2214  log.critical(string.ljust('| Time saved:', textwidth1) +
2215               str(round(comptime-loadtime,2)) + ' seconds')
2216  msg5(CD,FN,deps,compression)
2217
2218# -----------------------------------------------------------------------------
2219
2220def msg5(CD, FN, deps, compression):
2221  """Message 5
2222
2223  USAGE:
2224    msg5(CD, FN, deps, compression)
2225
2226  DESCRIPTION:
2227   Print dependency stats. Used by msg3 and msg4
2228  """
2229
2230  import os, time, string
2231
2232  log.critical('|')
2233  log.critical(string.ljust('| Caching dir: ', textwidth1) + CD)
2234
2235  if compression:
2236    suffix = '.z'
2237    bytetext = 'bytes, compressed'
2238  else:
2239    suffix = ''
2240    bytetext = 'bytes'
2241
2242  for file_type in file_types:
2243    file_name = FN + '_' + file_type + suffix
2244    stats = os.stat(CD+file_name)
2245    log.critical(string.ljust('| ' + file_type + ' file: ', textwidth1) +
2246                 file_name + '('+ str(stats[6]) + ' ' + bytetext + ')')
2247
2248  log.critical('|')
2249  if len(deps) > 0:
2250    log.critical('| Dependencies:  ')
2251    dependencies  = deps.keys()
2252    dlist = []; maxd = 0
2253    tlist = []; maxt = 0
2254    slist = []; maxs = 0
2255    for d in dependencies:
2256      stats = deps[d]
2257      t = time.ctime(stats[1])
2258      s = str(stats[0])
2259      #if s[-1] == 'L':
2260      #  s = s[:-1]  # Strip rightmost 'long integer' L off.
2261      #              # FIXME: Unnecessary in versions later than 1.5.2
2262
2263      if len(d) > maxd: maxd = len(d)
2264      if len(t) > maxt: maxt = len(t)
2265      if len(s) > maxs: maxs = len(s)
2266      dlist.append(d)
2267      tlist.append(t)
2268      slist.append(s)
2269
2270    for n in range(len(dlist)):
2271      d = string.ljust(dlist[n]+':', maxd+1)
2272      t = string.ljust(tlist[n], maxt)
2273      s = string.rjust(slist[n], maxs)
2274
2275      log.critical('| %s %s %s bytes' % (d, t, s))
2276  else:
2277    log.critical('| No dependencies')
2278  print_footer()
2279
2280# -----------------------------------------------------------------------------
2281
2282def msg6(funcname, args, kwargs):
2283  """Message 6
2284
2285  USAGE:
2286    msg6(funcname, args, kwargs)
2287  """
2288
2289  import string
2290  log.critical(string.ljust('| Function:', textwidth1) + funcname)
2291
2292  msg7(args, kwargs)
2293 
2294# -----------------------------------------------------------------------------   
2295
2296def msg7(args, kwargs):
2297  """Message 7
2298 
2299  USAGE:
2300    msg7(args, kwargs):
2301  """
2302 
2303  import string
2304 
2305  args_present = 0 
2306  if args:
2307    if len(args) == 1:
2308      log.critical(string.ljust('| Argument:', textwidth1) +
2309                   mkargstr(args[0], textwidth2))
2310    else:
2311      log.critical(string.ljust('| Arguments:', textwidth1) +
2312                   mkargstr(args, textwidth2))
2313    args_present = 1
2314           
2315  if kwargs:
2316    if len(kwargs) == 1:
2317      log.critical(string.ljust('| Keyword Arg:', textwidth1) +
2318                   mkargstr(kwargs, textwidth2))
2319    else:
2320      log.critical(string.ljust('| Keyword Args:', textwidth1) +
2321                   mkargstr(kwargs, textwidth2))
2322    args_present = 1
2323
2324  if not args_present:               
2325    log.critical('| No arguments')    # Default if no args or kwargs present
2326
2327# -----------------------------------------------------------------------------
2328
2329def msg8(reason):
2330  """Message 8
2331 
2332  USAGE:
2333    msg8(reason):
2334  """
2335 
2336  import string
2337   
2338  try:
2339    R = Reason_msg[reason]
2340  except:
2341    R = 'Unknown' 
2342 
2343  log.critical(string.ljust('| Reason:', textwidth1) + R)
2344   
2345# -----------------------------------------------------------------------------
2346
2347def print_header_box(line):
2348  """Print line in a nice box.
2349 
2350  USAGE:
2351    print_header_box(line)
2352
2353  """
2354  global textwidth3
2355
2356  import time
2357
2358  time_stamp = time.ctime(time.time())
2359  line = time_stamp + '. ' + line
2360   
2361  N = len(line) + 1
2362  s = '+' + '-'*N + CR
2363
2364  log.critical(s + '| ' + line + CR + s)
2365
2366  textwidth3 = N
2367
2368# -----------------------------------------------------------------------------
2369   
2370def print_footer():
2371  """Print line same width as that of print_header_box.
2372  """
2373 
2374  N = textwidth3
2375  s = '+' + '-'*N + CR   
2376     
2377  log.critical(s)
2378     
2379# -----------------------------------------------------------------------------
2380
2381def mkargstr(args, textwidth, argstr = '', level=0):
2382  """ Generate a string containing first textwidth characters of arguments.
2383
2384  USAGE:
2385    mkargstr(args, textwidth, argstr = '', level=0)
2386
2387  DESCRIPTION:
2388    Exactly the same as str(args) possibly followed by truncation,
2389    but faster if args is huge.
2390  """
2391
2392  import types
2393
2394  if level > 10:
2395      # Protect against circular structures
2396      return '...'
2397 
2398  WasTruncated = 0
2399
2400  if not type(args) in [types.TupleType, types.ListType, types.DictType]:
2401    if type(args) == types.StringType:
2402      argstr = argstr + "'"+str(args)+"'"
2403    else:
2404      # Truncate large numeric arrays before using str()
2405      if isinstance(args, num.ndarray):
2406#        if len(args.flat) > textwidth: 
2407#        Changed by Duncan and Nick 21/2/07 .flat has problems with
2408#        non-contigous arrays and ravel is equal to .flat except it
2409#        can work with non-contiguous  arrays
2410        if len(num.ravel(args)) > textwidth:
2411          args = 'Array: ' + str(args.shape)
2412
2413      argstr = argstr + str(args)
2414  else:
2415    if type(args) == types.DictType:
2416      argstr = argstr + "{"
2417      for key in args.keys():
2418        argstr = argstr + mkargstr(key, textwidth, level=level+1) + ": " + \
2419                 mkargstr(args[key], textwidth, level=level+1) + ", "
2420        if len(argstr) > textwidth:
2421          WasTruncated = 1
2422          break
2423      argstr = argstr[:-2]  # Strip off trailing comma     
2424      argstr = argstr + "}"
2425
2426    else:
2427      if type(args) == types.TupleType:
2428        lc = '('
2429        rc = ')'
2430      else:
2431        lc = '['
2432        rc = ']'
2433      argstr = argstr + lc
2434      for arg in args:
2435        argstr = argstr + mkargstr(arg, textwidth, level=level+1) + ', '
2436        if len(argstr) > textwidth:
2437          WasTruncated = 1
2438          break
2439
2440      # Strip off trailing comma and space unless singleton tuple
2441      #
2442      if type(args) == types.TupleType and len(args) == 1:
2443        argstr = argstr[:-1]   
2444      else:
2445        argstr = argstr[:-2]
2446      argstr = argstr + rc
2447
2448  if len(argstr) > textwidth:
2449    WasTruncated = 1
2450
2451  if WasTruncated:
2452    argstr = argstr[:textwidth]+'...'
2453  return(argstr)
2454
2455# -----------------------------------------------------------------------------
2456
2457def test_OK(msg):
2458  """Print OK msg if test is OK.
2459 
2460  USAGE
2461    test_OK(message)
2462  """
2463
2464  import string
2465   
2466  log.critical(string.ljust(msg, textwidth4) + ' - OK' )
2467 
2468  #raise StandardError
2469 
2470# -----------------------------------------------------------------------------
2471
2472def test_error(msg):
2473  """Print error if test fails.
2474 
2475  USAGE
2476    test_error(message)
2477  """
2478 
2479  log.critical('ERROR (caching.test): %s' % msg)
2480  log.critical('Please send this code example and output to ')
2481  log.critical('Ole.Nielsen@anu.edu.au')
2482  log.critical()
2483  log.critical()
2484 
2485  raise StandardError
2486
2487#-------------------------------------------------------------
2488if __name__ == "__main__":
2489  pass
Note: See TracBrowser for help on using the repository browser.