Ignore:
Timestamp:
Jun 18, 2010, 4:43:10 PM (13 years ago)
Author:
hudson
Message:

Refactorings to increase code quality, fixed missing log import from sww_interrogate.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/anuga_core/source/anuga/file/csv_file.py

    r7854 r7858  
    1111"""
    1212
     13
    1314import csv
    1415import numpy as num
     16import anuga.utilities.log as log
    1517
    1618
     
    168170    if completed:
    169171        try:
    170             file = str(kwargs['file_name'])
     172            file_name = str(kwargs['file_name'])
    171173        except:
    172             raise 'kwargs must have file_name'
     174            raise Exception('kwargs must have file_name')
    173175    else:
    174176        # write temp file in output directory
    175177        try:
    176             file = str(kwargs['output_dir']) + 'detail_temp.csv'
     178            file_name = str(kwargs['output_dir']) + 'detail_temp.csv'
    177179        except:
    178             raise 'kwargs must have output_dir'
     180            raise Exception('kwargs must have output_dir')
    179181
    180182    # extracts the header info and the new line info
     
    199201    # try to open!
    200202    try:
    201         fid = open(file, 'r')
     203        fid = open(file_name, 'r')
    202204        file_header = fid.readline()
    203205        fid.close()
    204206        if verbose: log.critical('read file header %s' % file_header)
    205     except:
    206         msg = 'try to create new file: %s' % file
    207         if verbose: log.critical(msg)
     207    except Exception:
     208        msg = 'try to create new file: %s' % file_name
     209        if verbose:
     210            log.critical(msg)
    208211        #tries to open file, maybe directory is bad
    209212        try:
    210             fid = open(file, 'w')
     213            fid = open(file_name, 'w')
    211214            fid.write(header)
    212215            fid.close()
     
    218221    # if header is same or this is a new file
    219222    if file_header == str(header):
    220         fid = open(file, 'a')
     223        fid = open(file_name, 'a')
    221224        fid.write(line)
    222225        fid.close()
     
    225228        # if header is different and has completed will append info to
    226229        # end of details_temp.cvs file in output directory
    227         file = str(kwargs['output_dir']) + 'detail_temp.csv'
    228         fid = open(file, 'a')
     230        file_name = str(kwargs['output_dir']) + 'detail_temp.csv'
     231        fid = open(file_name, 'a')
    229232        fid.write(header)
    230233        fid.write(line)
     
    244247
    245248def load_csv_as_building_polygons(file_name,
    246                           floor_height=3,
    247                           clipping_polygons=None):
     249                          floor_height=3):
    248250    """
    249251    Convert CSV files of the form:
     
    285287           
    286288
    287 ##
    288 # @brief Convert CSV file into a dictionary of polygons and associated values.
    289 # @param filename The path to the file to read, value_name name for the 4th column
    290289def load_csv_as_polygons(file_name,
    291290                 value_name='value',
     
    338337   
    339338    msg = 'Did not find expected column header: northing'   
    340     assert 'northing' in X.keys(), northing
     339    assert 'northing' in X.keys(), msg
    341340   
    342341    msg = 'Did not find expected column header: northing'       
     
    357356    past_ids = {}
    358357    last_id = None
    359     for i, id in enumerate(X['id']):
     358    for i, poly_id in enumerate(X['id']):
    360359
    361360        # Check for duplicate polygons
    362         if id in past_ids:
     361        if poly_id in past_ids:
    363362            msg = 'Polygon %s was duplicated in line %d' % (id, i)
    364363            raise Exception, msg
    365364       
    366         if id not in polygons:
     365        if poly_id not in polygons:
    367366            # Start new polygon
    368             polygons[id] = []
     367            polygons[poly_id] = []
    369368            if values is not None:
    370                 values[id] = X[value_name][i]
     369                values[poly_id] = X[value_name][i]
    371370
    372371            # Keep track of previous polygon ids
     
    385384               
    386385            if exclude is True:
    387                 excluded_polygons[id]=True
    388 
    389         polygons[id].append(point)   
     386                excluded_polygons[poly_id]=True
     387
     388        polygons[poly_id].append(point)   
    390389           
    391390        # Check that value is the same across each polygon
    392391        msg = 'Values must be the same across each polygon.'
    393         msg += 'I got %s in line %d but it should have been %s' % (X[value_name][i], i, values[id])
    394         assert values[id] == X[value_name][i], msg
    395 
    396         last_id = id
     392        msg += 'I got %s in line %d but it should have been %s' % \
     393                            (X[value_name][i], i, values[poly_id])
     394        assert values[poly_id] == X[value_name][i], msg
     395
     396        last_id = poly_id
    397397
    398398    # Weed out polygons that were not wholly inside clipping polygons
    399     for id in excluded_polygons:
    400         del polygons[id]
     399    for poly_id in excluded_polygons:
     400        del polygons[poly_id]
    401401       
    402402    return polygons, values
Note: See TracChangeset for help on using the changeset viewer.