Changeset 7743


Ignore:
Timestamp:
May 24, 2010, 5:43:59 PM (14 years ago)
Author:
hudson
Message:

Removed more modules from data_handler: code to do with building destruction.

Location:
anuga_core/source/anuga
Files:
3 added
1 deleted
10 edited

Legend:

Unmodified
Added
Removed
  • anuga_core/source/anuga/damage_modelling/inundation_damage.py

    r7317 r7743  
    3131
    3232from anuga.utilities.numerical_tools import ensure_numeric
    33 from anuga.shallow_water.data_manager import Exposure_csv
     33from exposure import Exposure
    3434from anuga.abstract_2d_finite_volumes.util import file_function
    3535from anuga.geospatial_data.geospatial_data import ensure_absolute
     
    7373
    7474    for exposure_file_in in exposure_files_in:
    75         csv = Exposure_csv(exposure_file_in,
     75        csv = Exposure(exposure_file_in,
    7676                           title_check_list=[SHORE_DIST_LABEL,WALL_TYPE_LABEL,
    7777                                             STR_VALUE_LABEL,CONT_VALUE_LABEL])
     
    118118    """
    119119
    120     csv = Exposure_csv(exposure_file_in)
     120    csv = Exposure(exposure_file_in)
    121121    geospatial = csv.get_location()
    122122    max_depths, max_momentums = calc_max_depth_and_momentum(sww_base_name,
  • anuga_core/source/anuga/damage_modelling/test_inundation_damage.py

    r7735 r7743  
    305305        inundation_damage(sww_file, csv_file, verbose=False)
    306306
    307         csv_handle = Exposure_csv(csv_file)
     307        csv_handle = Exposure(csv_file)
    308308        struct_loss = csv_handle.get_column(EventDamageModel.STRUCT_LOSS_TITLE)
    309309        #print "struct_loss",struct_loss
     
    392392
    393393        # Test one file
    394         csv_handle = Exposure_csv(csv_file[:-4]+marker+extension)
     394        csv_handle = Exposure(csv_file[:-4]+marker+extension)
    395395        struct_loss = csv_handle.get_column(EventDamageModel.STRUCT_LOSS_TITLE)
    396396        #print "struct_loss",struct_loss
     
    403403       
    404404        # Test another file
    405         csv_handle = Exposure_csv(csv_fileII[:-4]+marker+extension)
     405        csv_handle = Exposure(csv_fileII[:-4]+marker+extension)
    406406        struct_loss = csv_handle.get_column(EventDamageModel.STRUCT_LOSS_TITLE)
    407407        #print "struct_loss",struct_loss
  • anuga_core/source/anuga/file_conversion/ferret2sww.py

    r7742 r7743  
    1414
    1515#local modules
    16 from file_conversion import get_min_max_indexes                           
     16from anuga.file_conversion.file_conversion import get_min_max_indices                           
    1717
    1818
     
    9797    longitudes = file_h.variables[dim_h_longitude]
    9898
    99     kmin, kmax, lmin, lmax = get_min_max_indexes(latitudes[:],
     99    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],
    100100                                                  longitudes[:],
    101101                                                  minlat, maxlat,
     
    153153        jmax = int(jmax)       
    154154
    155     kmin, kmax, lmin, lmax = get_min_max_indexes(latitudes[:],
     155    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],
    156156                                                  longitudes[:],
    157157                                                  minlat, maxlat,
  • anuga_core/source/anuga/file_conversion/file_conversion.py

    r7742 r7743  
    416416    latitudes.reverse()
    417417
    418     kmin, kmax, lmin, lmax = get_min_max_indexes(latitudes[:],longitudes[:],
     418    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],longitudes[:],
    419419                                                  minlat=minlat, maxlat=maxlat,
    420420                                                  minlon=minlon, maxlon=maxlon)
     
    890890# @param maxlon Maximum longitude of specified area.
    891891# @return Tuple (lat_min_index, lat_max_index, lon_min_index, lon_max_index)
    892 def get_min_max_indexes(latitudes_ref, longitudes_ref,
     892def get_min_max_indices(latitudes_ref, longitudes_ref,
    893893                         minlat=None, maxlat=None,
    894894                         minlon=None, maxlon=None):
  • anuga_core/source/anuga/file_conversion/test_file_conversion.py

    r7742 r7743  
    15971597        # k - lat
    15981598        # l - lon
    1599         kmin, kmax, lmin, lmax = get_min_max_indexes(
     1599        kmin, kmax, lmin, lmax = get_min_max_indices(
    16001600            latitudes,longitudes,
    16011601            -10,4,-10,31)
     
    16121612
    16131613        ## 2nd test
    1614         kmin, kmax, lmin, lmax = get_min_max_indexes(
     1614        kmin, kmax, lmin, lmax = get_min_max_indices(
    16151615            latitudes,longitudes,
    16161616            0.5,2.5,5,25)
     
    16271627
    16281628        ## 3rd test
    1629         kmin, kmax, lmin, lmax = get_min_max_indexes(\
     1629        kmin, kmax, lmin, lmax = get_min_max_indices(\
    16301630            latitudes,
    16311631            longitudes,
     
    16441644
    16451645        ## 4th test
    1646         kmin, kmax, lmin, lmax = get_min_max_indexes(
     1646        kmin, kmax, lmin, lmax = get_min_max_indices(
    16471647            latitudes,longitudes,
    16481648                                                      -0.1,1.9,-2,17)
     
    16581658                         'failed')
    16591659        ## 5th test
    1660         kmin, kmax, lmin, lmax = get_min_max_indexes(
     1660        kmin, kmax, lmin, lmax = get_min_max_indices(
    16611661            latitudes,longitudes,
    16621662            0.1,1.9,2,17)
     
    16741674        ## 6th test
    16751675
    1676         kmin, kmax, lmin, lmax = get_min_max_indexes(
     1676        kmin, kmax, lmin, lmax = get_min_max_indices(
    16771677            latitudes,longitudes,
    16781678            1.5,4,18,32)
     
    16911691        ## 7th test
    16921692        m2d = num.array([[0,1,2,3],[4,5,6,7],[8,9,10,11],[12,13,14,15]], num.int)    #array default#
    1693         kmin, kmax, lmin, lmax = get_min_max_indexes(
     1693        kmin, kmax, lmin, lmax = get_min_max_indices(
    16941694            latitudes,longitudes,
    16951695            1.5,1.5,15,15)
     
    17151715        # k - lat
    17161716        # l - lon
    1717         kmin, kmax, lmin, lmax = get_min_max_indexes(
     1717        kmin, kmax, lmin, lmax = get_min_max_indices(
    17181718            latitudes,longitudes,
    17191719            -10,4,-10,31)
     
    17301730
    17311731        ## 3rd test
    1732         kmin, kmax, lmin, lmax = get_min_max_indexes(\
     1732        kmin, kmax, lmin, lmax = get_min_max_indices(\
    17331733            latitudes,
    17341734            longitudes,
     
    17531753        # k - lat
    17541754        # l - lon
    1755         kmin, kmax, lmin, lmax = get_min_max_indexes(
     1755        kmin, kmax, lmin, lmax = get_min_max_indices(
    17561756            latitudes,longitudes,
    17571757            -37,-27,147,149.5)
     
    17821782        # k - lat
    17831783        # l - lon
    1784         kmin, kmax, lmin, lmax = get_min_max_indexes(
     1784        kmin, kmax, lmin, lmax = get_min_max_indices(
    17851785            latitudes,longitudes,
    17861786            -43,-37,148.5,149.5)
     
    18061806        # k - lat
    18071807        # l - lon
    1808         kmin, kmax, lmin, lmax = get_min_max_indexes(
     1808        kmin, kmax, lmin, lmax = get_min_max_indices(
    18091809            latitudes,longitudes)
    18101810
  • anuga_core/source/anuga/shallow_water/data_manager.py

    r7742 r7743  
    5454import os, sys
    5555import csv
    56 import exceptions
    5756import string
    5857import shutil
     
    8079from anuga.utilities.numerical_tools import ensure_numeric,  mean
    8180from anuga.caching.caching import myhash
    82 from anuga.utilities.anuga_exceptions import ANUGAError
    8381from anuga.shallow_water import Domain
    8482from anuga.abstract_2d_finite_volumes.pmesh2domain import \
     
    9896from sww_file import Read_sww, Write_sww
    9997
     98from anuga.anuga_exceptions import DataMissingValuesError, \
     99                DataFileNotOpenError, DataTimeError, DataDomainError, \
     100                NewQuantity
     101
    100102
    101103# Default block size for sww2dem()
    102104DEFAULT_BLOCK_SIZE = 10000
    103 
    104 ######
    105 # Exception classes
    106 ######
    107 
    108 class TitleValueError(exceptions.Exception): pass
    109 class DataMissingValuesError(exceptions.Exception): pass
    110 class DataFileNotOpenError(exceptions.Exception): pass
    111 class DataTimeError(exceptions.Exception): pass
    112 class DataDomainError(exceptions.Exception): pass
    113 class NewQuantity(exceptions.Exception): pass
    114105
    115106
     
    124115
    125116
    126 ##
    127 # @brief Class for National Exposure Database storage (NEXIS).
    128 
    129 LAT_TITLE = 'LATITUDE'
    130 LONG_TITLE = 'LONGITUDE'
    131 X_TITLE = 'x'
    132 Y_TITLE = 'y'
    133 
    134 class Exposure_csv:
    135 
    136     ##
    137     # @brief Instantiate this instance.
    138     # @param file_name Name of underlying data file.
    139     # @param latitude_title ??
    140     # @param longitude_title ??
    141     # @param is_x_y_locations ??
    142     # @param x_title ??
    143     # @param y_title ??
    144     # @param refine_polygon ??
    145     # @param title_check_list ??
    146     def __init__(self,file_name, latitude_title=LAT_TITLE,
    147                  longitude_title=LONG_TITLE, is_x_y_locations=None,
    148                  x_title=X_TITLE, y_title=Y_TITLE,
    149                  refine_polygon=None, title_check_list=None):
    150         """
    151         This class is for handling the exposure csv file.
    152         It reads the file in and converts the lats and longs to a geospatial
    153         data object.
    154         Use the methods to read and write columns.
    155 
    156         The format of the csv files it reads is;
    157            The first row is a title row.
    158            comma's are the delimiters
    159            each column is a 'set' of data
    160 
    161         Feel free to use/expand it to read other csv files.
    162 
    163         It is not for adding and deleting rows
    164 
    165         Can geospatial handle string attributes? It's not made for them.
    166         Currently it can't load and save string att's.
    167 
    168         So just use geospatial to hold the x, y and georef? Bad, since
    169         different att's are in diferent structures.  Not so bad, the info
    170         to write if the .csv file is saved is in attribute_dic
    171 
    172         The location info is in the geospatial attribute.
    173         """
    174 
    175         self._file_name = file_name
    176         self._geospatial = None #
    177 
    178         # self._attribute_dic is a dictionary.
    179         #The keys are the column titles.
    180         #The values are lists of column data
    181 
    182         # self._title_index_dic is a dictionary.
    183         #The keys are the column titles.
    184         #The values are the index positions of file columns.
    185         self._attribute_dic, self._title_index_dic = \
    186             load_csv_as_dict(self._file_name, \
    187             title_check_list=title_check_list)
    188         try:
    189             #Have code here that handles caps or lower
    190             lats = self._attribute_dic[latitude_title]
    191             longs = self._attribute_dic[longitude_title]
    192         except KeyError:
    193             # maybe a warning..
    194             #Let's see if this works..
    195             if False != is_x_y_locations:
    196                 is_x_y_locations = True
    197             pass
    198         else:
    199             self._geospatial = Geospatial_data(latitudes=lats,
    200                                                longitudes=longs)
    201 
    202         if is_x_y_locations is True:
    203             if self._geospatial is not None:
    204                 pass #fixme throw an error
    205             try:
    206                 xs = self._attribute_dic[x_title]
    207                 ys = self._attribute_dic[y_title]
    208                 points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
    209             except KeyError:
    210                 # maybe a warning..
    211                 msg = "Could not find location information."
    212                 raise TitleValueError, msg
    213             else:
    214                 self._geospatial = Geospatial_data(data_points=points)
    215 
    216         # create a list of points that are in the refining_polygon
    217         # described by a list of indexes representing the points
    218 
    219     ##
    220     # @brief Create a comparison method.
    221     # @param self This object.
    222     # @param other The other object.
    223     # @return True if objects are 'same'.
    224     def __cmp__(self, other):
    225         #check that 'other' is an instance of this class
    226         if isinstance(self, type(other)):
    227             result = cmp(self._attribute_dic, other._attribute_dic)
    228             if result <> 0:
    229                 return result
    230 
    231             # The order of the columns is important. Therefore..
    232             result = cmp(self._title_index_dic, other._title_index_dic)
    233             if result <> 0:
    234                 return result
    235             for self_ls, other_ls in map(None, self._attribute_dic,
    236                                          other._attribute_dic):
    237                 result = cmp(self._attribute_dic[self_ls],
    238                              other._attribute_dic[other_ls])
    239                 if result <> 0:
    240                     return result
    241             return 0
    242         else:
    243             return 1
    244 
    245     ##
    246     # @brief Get a list of column values given a column name.
    247     # @param column_name The name of the column to get values from.
    248     # @param use_refind_polygon Unused??
    249     def get_column(self, column_name, use_refind_polygon=False):
    250         """
    251         Given a column name return a list of the column values
    252 
    253         Note, the type of the values will be String!
    254         do this to change a list of strings to a list of floats
    255         time = [float(x) for x in time]
    256 
    257         Not implemented:
    258         if use_refind_polygon is True, only return values in the
    259         refined polygon
    260         """
    261 
    262         if not self._attribute_dic.has_key(column_name):
    263             msg = 'There is no column called %s!' % column_name
    264             raise TitleValueError, msg
    265 
    266         return self._attribute_dic[column_name]
    267 
    268     ##
    269     # @brief ??
    270     # @param value_column_name ??
    271     # @param known_column_name ??
    272     # @param known_values ??
    273     # @param use_refind_polygon ??
    274     def get_value(self, value_column_name, known_column_name,
    275                   known_values, use_refind_polygon=False):
    276         """
    277         Do linear interpolation on the known_colum, using the known_value,
    278         to return a value of the column_value_name.
    279         """
    280 
    281         pass
    282 
    283     ##
    284     # @brief Get a geospatial object that describes the locations.
    285     # @param use_refind_polygon Unused??
    286     def get_location(self, use_refind_polygon=False):
    287         """
    288         Return a geospatial object which describes the
    289         locations of the location file.
    290 
    291         Note, if there is not location info, this returns None.
    292 
    293         Not implemented:
    294         if use_refind_polygon is True, only return values in the
    295         refined polygon
    296         """
    297 
    298         return self._geospatial
    299 
    300     ##
    301     # @brief Add column to 'end' of CSV data.
    302     # @param column_name The new column name.
    303     # @param column_values The new column values.
    304     # @param overwrite If True, overwrites last column, doesn't add at end.
    305     def set_column(self, column_name, column_values, overwrite=False):
    306         """
    307         Add a column to the 'end' (with the right most column being the end)
    308         of the csv file.
    309 
    310         Set overwrite to True if you want to overwrite a column.
    311 
    312         Note, in column_name white space is removed and case is not checked.
    313         Precondition
    314         The column_name and column_values cannot have comma's in it.
    315         """
    316 
    317         # sanity checks
    318         value_row_count = \
    319                 len(self._attribute_dic[self._title_index_dic.keys()[0]])
    320         if len(column_values) <> value_row_count:
    321             msg = 'The number of column values must equal the number of rows.'
    322             raise DataMissingValuesError, msg
    323 
    324         # check new column name isn't already used, and we aren't overwriting
    325         if self._attribute_dic.has_key(column_name):
    326             if not overwrite:
    327                 msg = 'Column name %s already in use!' % column_name
    328                 raise TitleValueError, msg
    329         else:
    330             # New title.  Add it to the title index.
    331             self._title_index_dic[column_name] = len(self._title_index_dic)
    332 
    333         self._attribute_dic[column_name] = column_values
    334 
    335     ##
    336     # @brief Save the exposure CSV  file.
    337     # @param file_name If supplied, use this filename, not original.
    338     def save(self, file_name=None):
    339         """
    340         Save the exposure csv file
    341         """
    342 
    343         if file_name is None:
    344             file_name = self._file_name
    345 
    346         fd = open(file_name, 'wb')
    347         writer = csv.writer(fd)
    348 
    349         #Write the title to a cvs file
    350         line = [None] * len(self._title_index_dic)
    351         for title in self._title_index_dic.iterkeys():
    352             line[self._title_index_dic[title]] = title
    353         writer.writerow(line)
    354 
    355         # Write the values to a cvs file
    356         value_row_count = \
    357                 len(self._attribute_dic[self._title_index_dic.keys()[0]])
    358         for row_i in range(value_row_count):
    359             line = [None] * len(self._title_index_dic)
    360             for title in self._title_index_dic.iterkeys():
    361                 line[self._title_index_dic[title]] = \
    362                      self._attribute_dic[title][row_i]
    363             writer.writerow(line)
     117
    364118
    365119
  • anuga_core/source/anuga/shallow_water/data_manager_joaquims_patch.py

    r7711 r7743  
    27162716    #print "latitudes[:]",latitudes[:]
    27172717    #print "longitudes[:]",longitudes [:]
    2718     kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
     2718    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],
    27192719                                                  longitudes[:],
    27202720                                                  minlat, maxlat,
     
    36493649    latitudes.reverse()
    36503650
    3651     kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
     3651    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],longitudes[:],
    36523652                                                 minlat=minlat, maxlat=maxlat,
    36533653                                                 minlon=minlon, maxlon=maxlon)
     
    38503850    outfile.close()
    38513851
    3852 def _get_min_max_indexes(latitudes_ref,longitudes_ref,
     3852def get_min_max_indices(latitudes_ref,longitudes_ref,
    38533853                        minlat=None, maxlat=None,
    38543854                        minlon=None, maxlon=None):
  • anuga_core/source/anuga/shallow_water/test_data_manager.py

    r7742 r7743  
    2525from anuga.utilities.system_tools import get_pathname_from_package
    2626from anuga.utilities.file_utils import del_dir, load_csv_as_dict
    27 from anuga.utilities.anuga_exceptions import ANUGAError
     27from anuga.anuga_exceptions import ANUGAError
    2828from anuga.utilities.numerical_tools import ensure_numeric, mean
    2929from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
     
    34043404        os.remove(filename)
    34053405
    3406 
    3407 
    3408 ########## testing nbed class ##################
    3409     def test_exposure_csv_loading(self):
    3410         file_name = tempfile.mktemp(".csv")
    3411         file = open(file_name,"w")
    3412         file.write("LATITUDE, LONGITUDE ,sound  , speed \n\
    3413 115.0, -21.0, splat, 0.0\n\
    3414 114.0, -21.7, pow, 10.0\n\
    3415 114.5, -21.4, bang, 40.0\n")
    3416         file.close()
    3417         exposure = Exposure_csv(file_name, title_check_list = ['speed','sound'])
    3418         exposure.get_column("sound")
    3419        
    3420         self.failUnless(exposure._attribute_dic['sound'][2]==' bang',
    3421                         'FAILED!')
    3422         self.failUnless(exposure._attribute_dic['speed'][2]==' 40.0',
    3423                         'FAILED!')
    3424        
    3425         os.remove(file_name)
    3426        
    3427     def test_exposure_csv_loadingII(self):
    3428        
    3429 
    3430         file_name = tempfile.mktemp(".txt")
    3431         file = open(file_name,"w")
    3432         file.write("LATITUDE, LONGITUDE ,sound  , speed \n\
    3433 115.0, -21.0, splat, 0.0\n\
    3434 114.0, -21.7, pow, 10.0\n\
    3435 114.5, -21.4, bang, 40.0\n")
    3436         file.close()
    3437         exposure = Exposure_csv(file_name)
    3438         exposure.get_column("sound")
    3439        
    3440         self.failUnless(exposure._attribute_dic['sound'][2]==' bang',
    3441                         'FAILED!')
    3442         self.failUnless(exposure._attribute_dic['speed'][2]==' 40.0',
    3443                         'FAILED!')
    3444        
    3445         os.remove(file_name)
    3446        
    3447     def test_exposure_csv_loading_title_check_list(self):
    3448 
    3449         # I can't get cvs.reader to close the exposure file
    3450         # The hacks below are to get around this.       
    3451         if sys.platform == 'win32':
    3452             file_name = tempfile.gettempdir() + \
    3453                     "test_exposure_csv_loading_title_check_list.csv"
    3454         else:
    3455             file_name = tempfile.mktemp(".csv")
    3456         file = open(file_name,"w")
    3457         file.write("LATITUDE, LONGITUDE ,sound  , speed \n\
    3458 115.0, -21.0, splat, 0.0\n\
    3459 114.0, -21.7, pow, 10.0\n\
    3460 114.5, -21.4, bang, 40.0\n")
    3461         file.close()
    3462         try:
    3463             exposure = Exposure_csv(file_name, title_check_list = ['SOUND'])
    3464         except IOError:
    3465             pass
    3466         else:
    3467             self.failUnless(0 ==1,  'Assertion not thrown error!')
    3468            
    3469         if not sys.platform == 'win32':
    3470             os.remove(file_name)
    3471        
    3472     def test_exposure_csv_cmp(self):
    3473         file_name = tempfile.mktemp(".csv")
    3474         file = open(file_name,"w")
    3475         file.write("LATITUDE, LONGITUDE ,sound  , speed \n\
    3476 115.0, -21.0, splat, 0.0\n\
    3477 114.0, -21.7, pow, 10.0\n\
    3478 114.5, -21.4, bang, 40.0\n")
    3479         file.close()
    3480        
    3481         e1 = Exposure_csv(file_name)
    3482         e2 = Exposure_csv(file_name)
    3483         os.remove(file_name)
    3484 
    3485         self.failUnless(cmp(e1,e2)==0,
    3486                         'FAILED!')
    3487        
    3488         self.failUnless(cmp(e1,"hey")==1,
    3489                         'FAILED!')
    3490        
    3491         file_name = tempfile.mktemp(".csv")
    3492         file = open(file_name,"w")
    3493         # Note, this has less spaces in the title,
    3494         # the instances will be the same.
    3495         file.write("LATITUDE,LONGITUDE ,sound, speed \n\
    3496 115.0, -21.0, splat, 0.0\n\
    3497 114.0, -21.7, pow, 10.0\n\
    3498 114.5, -21.4, bang, 40.0\n")
    3499         file.close()
    3500         e3 = Exposure_csv(file_name)
    3501         os.remove(file_name)
    3502 
    3503         self.failUnless(cmp(e3,e2)==0,
    3504                         'FAILED!')
    3505        
    3506         file_name = tempfile.mktemp(".csv")
    3507         file = open(file_name,"w")
    3508         # Note, 40 changed to 44 .
    3509         file.write("LATITUDE,LONGITUDE ,sound, speed \n\
    3510 115.0, -21.0, splat, 0.0\n\
    3511 114.0, -21.7, pow, 10.0\n\
    3512 114.5, -21.4, bang, 44.0\n")
    3513         file.close()
    3514         e4 = Exposure_csv(file_name)
    3515         os.remove(file_name)
    3516         #print "e4",e4._attribute_dic
    3517         #print "e2",e2._attribute_dic
    3518         self.failUnless(cmp(e4,e2)<>0,
    3519                         'FAILED!')
    3520        
    3521         file_name = tempfile.mktemp(".csv")
    3522         file = open(file_name,"w")
    3523         # Note, the first two columns are swapped.
    3524         file.write("LONGITUDE,LATITUDE ,sound, speed \n\
    3525  -21.0,115.0, splat, 0.0\n\
    3526  -21.7,114.0, pow, 10.0\n\
    3527  -21.4,114.5, bang, 40.0\n")
    3528         file.close()
    3529         e5 = Exposure_csv(file_name)
    3530         os.remove(file_name)
    3531 
    3532         self.failUnless(cmp(e3,e5)<>0,
    3533                         'FAILED!')
    3534        
    3535     def test_exposure_csv_saving(self):
    3536        
    3537 
    3538         file_name = tempfile.mktemp(".csv")
    3539         file = open(file_name,"w")
    3540         file.write("LATITUDE, LONGITUDE ,sound  , speed \n\
    3541 115.0, -21.0, splat, 0.0\n\
    3542 114.0, -21.7, pow, 10.0\n\
    3543 114.5, -21.4, bang, 40.0\n")
    3544         file.close()
    3545         e1 = Exposure_csv(file_name)
    3546        
    3547         file_name2 = tempfile.mktemp(".csv")
    3548         e1.save(file_name = file_name2)
    3549         e2 = Exposure_csv(file_name2)
    3550        
    3551         self.failUnless(cmp(e1,e2)==0,
    3552                         'FAILED!')
    3553         os.remove(file_name)
    3554         os.remove(file_name2)
    3555 
    3556     def test_exposure_csv_get_location(self):
    3557         file_name = tempfile.mktemp(".csv")
    3558         file = open(file_name,"w")
    3559         file.write("LONGITUDE , LATITUDE, sound  , speed \n\
    3560 150.916666667, -34.5, splat, 0.0\n\
    3561 150.0, -34.0, pow, 10.0\n")
    3562         file.close()
    3563         e1 = Exposure_csv(file_name)
    3564 
    3565         gsd = e1.get_location()
    3566        
    3567         points = gsd.get_data_points(absolute=True)
    3568        
    3569         assert num.allclose(points[0][0], 308728.009)
    3570         assert num.allclose(points[0][1], 6180432.601)
    3571         assert num.allclose(points[1][0],  222908.705)
    3572         assert num.allclose(points[1][1], 6233785.284)
    3573         self.failUnless(gsd.get_geo_reference().get_zone() == 56,
    3574                         'Bad zone error!')
    3575 
    3576         os.remove(file_name)
    3577        
    3578     def test_exposure_csv_set_column_get_column(self):
    3579         file_name = tempfile.mktemp(".csv")
    3580         file = open(file_name,"w")
    3581         file.write("LONGITUDE , LATITUDE, sound  , speed \n\
    3582 150.916666667, -34.5, splat, 0.0\n\
    3583 150.0, -34.0, pow, 10.0\n")
    3584         file.close()
    3585         e1 = Exposure_csv(file_name)     
    3586         os.remove(file_name)
    3587 
    3588         new_title = "feast"
    3589         new_values = ["chicken","soup"]
    3590         e1.set_column(new_title, new_values)
    3591         returned_values = e1.get_column(new_title)
    3592         self.failUnless(returned_values == new_values,
    3593                         ' Error!')
    3594        
    3595         file_name2 = tempfile.mktemp(".csv")
    3596         e1.save(file_name = file_name2)
    3597         e2 = Exposure_csv(file_name2)
    3598         returned_values = e2.get_column(new_title)
    3599         self.failUnless(returned_values == new_values,
    3600                         ' Error!')       
    3601         os.remove(file_name2)
    3602 
    3603     def test_exposure_csv_set_column_get_column_error_checking(self):
    3604         file_name = tempfile.mktemp(".csv")
    3605         file = open(file_name,"w")
    3606         file.write("LONGITUDE , LATITUDE, sound  , speed \n\
    3607 150.916666667, -34.5, splat, 0.0\n\
    3608 150.0, -34.0, pow, 10.0\n")
    3609         file.close()
    3610         e1 = Exposure_csv(file_name)     
    3611         os.remove(file_name)
    3612 
    3613         new_title = "sound"
    3614         new_values = [12.5,7.6]
    3615         try:
    3616             e1.set_column(new_title, new_values)
    3617         except TitleValueError:
    3618             pass
    3619         else:
    3620             self.failUnless(0 ==1,  'Error not thrown error!')
    3621            
    3622         e1.set_column(new_title, new_values, overwrite=True)
    3623         returned_values = e1.get_column(new_title)
    3624         self.failUnless(returned_values == new_values,
    3625                         ' Error!')       
    3626        
    3627         new2_title = "short list"
    3628         new2_values = [12.5]
    3629         try:
    3630             e1.set_column(new2_title, new2_values)
    3631         except DataMissingValuesError:
    3632             pass
    3633         else:
    3634             self.failUnless(0 ==1,  'Error not thrown error!')
    3635            
    3636         new2_title = "long list"
    3637         new2_values = [12.5, 7,8]
    3638         try:
    3639             e1.set_column(new2_title, new2_values)
    3640         except DataMissingValuesError:
    3641             pass
    3642         else:
    3643             self.failUnless(0 ==1,  'Error not thrown error!')
    3644         file_name2 = tempfile.mktemp(".csv")
    3645         e1.save(file_name = file_name2)
    3646         e2 = Exposure_csv(file_name2)
    3647         returned_values = e2.get_column(new_title)
    3648         for returned, new in map(None, returned_values, new_values):
    3649             self.failUnless(returned == str(new), ' Error!')
    3650         #self.failUnless(returned_values == new_values, ' Error!')       
    3651         os.remove(file_name2)
    3652        
    3653         try:
    3654             e1.get_column("toe jam")
    3655         except TitleValueError:
    3656             pass
    3657         else:
    3658             self.failUnless(0 ==1,  'Error not thrown error!')
    3659            
    3660     def test_exposure_csv_loading_x_y(self):
    3661        
    3662 
    3663         file_name = tempfile.mktemp(".csv")
    3664         file = open(file_name,"w")
    3665         file.write("x, y ,sound  , speed \n\
    3666 115.0, 7, splat, 0.0\n\
    3667 114.0, 8.0, pow, 10.0\n\
    3668 114.5, 9., bang, 40.0\n")
    3669         file.close()
    3670         e1 = Exposure_csv(file_name, is_x_y_locations=True)
    3671         gsd = e1.get_location()
    3672        
    3673         points = gsd.get_data_points(absolute=True)
    3674        
    3675         assert num.allclose(points[0][0], 115)
    3676         assert num.allclose(points[0][1], 7)
    3677         assert num.allclose(points[1][0], 114)
    3678         assert num.allclose(points[1][1], 8)
    3679         assert num.allclose(points[2][0], 114.5)
    3680         assert num.allclose(points[2][1], 9)
    3681         self.failUnless(gsd.get_geo_reference().get_zone() == -1,
    3682                         'Bad zone error!')
    3683 
    3684         os.remove(file_name)
    3685 
    3686            
    3687     def test_exposure_csv_loading_x_y2(self):
    3688        
    3689         csv_file = tempfile.mktemp(".csv")
    3690         fd = open(csv_file,'wb')
    3691         writer = csv.writer(fd)
    3692         writer.writerow(['x','y','STR_VALUE','C_VALUE','ROOF_TYPE','WALLS', 'SHORE_DIST'])
    3693         writer.writerow([5.5,0.5,'199770','130000','Metal','Timber',20])
    3694         writer.writerow([4.5,1.0,'150000','76000','Metal','Double Brick',20])
    3695         writer.writerow([4.5,1.5,'150000','76000','Metal','Brick Veneer',20])
    3696         fd.close()
    3697 
    3698         e1 = Exposure_csv(csv_file)
    3699         gsd = e1.get_location()
    3700        
    3701         points = gsd.get_data_points(absolute=True)
    3702         assert num.allclose(points[0][0], 5.5)
    3703         assert num.allclose(points[0][1], 0.5)
    3704         assert num.allclose(points[1][0], 4.5)
    3705         assert num.allclose(points[1][1], 1.0)
    3706         assert num.allclose(points[2][0], 4.5)
    3707         assert num.allclose(points[2][1], 1.5)
    3708         self.failUnless(gsd.get_geo_reference().get_zone() == -1,
    3709                         'Bad zone error!')
    3710 
    3711         os.remove(csv_file)
    37123406
    37133407    #### TESTS FOR URS 2 SWW  ###     
  • anuga_core/source/anuga/shallow_water/test_file_conversion.py

    r7737 r7743  
    15971597        # k - lat
    15981598        # l - lon
    1599         kmin, kmax, lmin, lmax = _get_min_max_indexes(
     1599        kmin, kmax, lmin, lmax = get_min_max_indices(
    16001600            latitudes,longitudes,
    16011601            -10,4,-10,31)
     
    16121612
    16131613        ## 2nd test
    1614         kmin, kmax, lmin, lmax = _get_min_max_indexes(
     1614        kmin, kmax, lmin, lmax = get_min_max_indices(
    16151615            latitudes,longitudes,
    16161616            0.5,2.5,5,25)
     
    16271627
    16281628        ## 3rd test
    1629         kmin, kmax, lmin, lmax = _get_min_max_indexes(\
     1629        kmin, kmax, lmin, lmax = get_min_max_indices(\
    16301630            latitudes,
    16311631            longitudes,
     
    16441644
    16451645        ## 4th test
    1646         kmin, kmax, lmin, lmax = _get_min_max_indexes(
     1646        kmin, kmax, lmin, lmax = get_min_max_indices(
    16471647            latitudes,longitudes,
    16481648                                                      -0.1,1.9,-2,17)
     
    16581658                         'failed')
    16591659        ## 5th test
    1660         kmin, kmax, lmin, lmax = _get_min_max_indexes(
     1660        kmin, kmax, lmin, lmax = get_min_max_indices(
    16611661            latitudes,longitudes,
    16621662            0.1,1.9,2,17)
     
    16741674        ## 6th test
    16751675
    1676         kmin, kmax, lmin, lmax = _get_min_max_indexes(
     1676        kmin, kmax, lmin, lmax = get_min_max_indices(
    16771677            latitudes,longitudes,
    16781678            1.5,4,18,32)
     
    16911691        ## 7th test
    16921692        m2d = num.array([[0,1,2,3],[4,5,6,7],[8,9,10,11],[12,13,14,15]], num.int)    #array default#
    1693         kmin, kmax, lmin, lmax = _get_min_max_indexes(
     1693        kmin, kmax, lmin, lmax = get_min_max_indices(
    16941694            latitudes,longitudes,
    16951695            1.5,1.5,15,15)
     
    17151715        # k - lat
    17161716        # l - lon
    1717         kmin, kmax, lmin, lmax = _get_min_max_indexes(
     1717        kmin, kmax, lmin, lmax = get_min_max_indices(
    17181718            latitudes,longitudes,
    17191719            -10,4,-10,31)
     
    17301730
    17311731        ## 3rd test
    1732         kmin, kmax, lmin, lmax = _get_min_max_indexes(\
     1732        kmin, kmax, lmin, lmax = get_min_max_indices(\
    17331733            latitudes,
    17341734            longitudes,
     
    17531753        # k - lat
    17541754        # l - lon
    1755         kmin, kmax, lmin, lmax = _get_min_max_indexes(
     1755        kmin, kmax, lmin, lmax = get_min_max_indices(
    17561756            latitudes,longitudes,
    17571757            -37,-27,147,149.5)
     
    17821782        # k - lat
    17831783        # l - lon
    1784         kmin, kmax, lmin, lmax = _get_min_max_indexes(
     1784        kmin, kmax, lmin, lmax = get_min_max_indices(
    17851785            latitudes,longitudes,
    17861786            -43,-37,148.5,149.5)
     
    18061806        # k - lat
    18071807        # l - lon
    1808         kmin, kmax, lmin, lmax = _get_min_max_indexes(
     1808        kmin, kmax, lmin, lmax = get_min_max_indices(
    18091809            latitudes,longitudes)
    18101810
  • anuga_core/source/anuga/shallow_water/test_forcing_terms.py

    r7736 r7743  
    2020from boundaries import Reflective_boundary
    2121from forcing import Wind_stress, Inflow, Rainfall
    22 from file_conversion import timefile2netcdf
     22from anuga.file_conversion.file_conversion import timefile2netcdf
    2323
    2424import numpy as num
Note: See TracChangeset for help on using the changeset viewer.