Changeset 7276 for anuga_core/source/anuga/geospatial_data
- Timestamp:
- Jun 30, 2009, 2:07:41 PM (16 years ago)
- Location:
- anuga_core/source/anuga/geospatial_data
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
anuga_core/source/anuga/geospatial_data/geospatial_data.py
r6438 r7276 1 """Class Geospatial_data - Manipulation of locations on the planet and2 associated attributes. 3 1 """Class Geospatial_data 2 3 Manipulation of locations on the planet and associated attributes. 4 4 """ 5 5 … … 9 9 from warnings import warn 10 10 from string import lower 11 from RandomArray import randint, seed, get_seed12 11 from copy import deepcopy 12 import copy 13 13 14 from Scientific.IO.NetCDF import NetCDFFile 14 15 import Numeric as num 15 import numpy as num 16 from numpy.random import randint, seed 16 17 17 18 from anuga.coordinate_transforms.lat_long_UTM_conversion import UTMtoLL … … 20 21 TitleError, DEFAULT_ZONE, ensure_geo_reference, write_NetCDF_georeference 21 22 from anuga.coordinate_transforms.redfearn import convert_from_latlon_to_utm 23 from anuga.utilities.system_tools import clean_line 22 24 from anuga.utilities.anuga_exceptions import ANUGAError 23 25 from anuga.config import points_file_block_line_size as MAX_READ_LINES 24 26 from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a 27 from anuga.config import netcdf_float 28 25 29 26 30 DEFAULT_ATTRIBUTE = 'elevation' … … 56 60 load_file_now=True, 57 61 verbose=False): 58 """ 59 Create instance from data points and associated attributes 62 """Create instance from data points and associated attributes 60 63 61 64 data_points: x,y coordinates in meters. Type must be either a 62 sequence of 2-tuples or an Mx2 Numeric array of floats. A file name65 sequence of 2-tuples or an Mx2 numeric array of floats. A file name 63 66 with extension .txt, .cvs or .pts can also be passed in here. 64 67 … … 131 134 132 135 verbose: 133 134 136 """ 135 137 136 138 if isinstance(data_points, basestring): 137 # assume data pointis really a file name139 # assume data_points is really a file name 138 140 file_name = data_points 139 141 140 142 self.set_verbose(verbose) 141 self.geo_reference = None #create the attribute143 self.geo_reference = None 142 144 self.file_name = file_name 143 145 … … 148 150 149 151 if file_name is None: 150 if latitudes is not None \ 151 or longitudes is not None \ 152 or points_are_lats_longs: 152 if (latitudes is not None or longitudes is not None 153 or points_are_lats_longs): 153 154 data_points, geo_reference = \ 154 155 _set_using_lat_long(latitudes=latitudes, … … 156 157 geo_reference=geo_reference, 157 158 data_points=data_points, 158 points_are_lats_longs= \159 points_are_lats_longs= 159 160 points_are_lats_longs) 160 161 self.check_data_points(data_points) … … 188 189 # This message was misleading. 189 190 # FIXME (Ole): Are we blocking here or not? 190 # print 'ASCII formats are not that great for '191 # print 'blockwise reading. Consider storing this'192 # print 'data as a pts NetCDF format'191 # print 'ASCII formats are not that great for ' 192 # print 'blockwise reading. Consider storing this' 193 # print 'data as a pts NetCDF format' 193 194 194 195 ## … … 203 204 204 205 ## 205 # @brief Save points data in instance.206 # @param data_points Points data to store in instance and check.206 # @brief Check data points. 207 # @param data_points Points data to check and store in instance. 207 208 # @note Throws ValueError exception if no data. 208 209 def check_data_points(self, data_points): 209 """Checks data points 210 """ 210 """Checks data points""" 211 211 212 212 if data_points is None: … … 214 214 msg = 'There is no data or file provided!' 215 215 raise ValueError, msg 216 217 216 else: 218 217 self.data_points = ensure_numeric(data_points) … … 226 225 # @note Throws exception if unable to convert dict keys to numeric. 227 226 def set_attributes(self, attributes): 228 """Check and assign attributes dictionary 229 """ 227 """Check and assign attributes dictionary""" 230 228 231 229 if attributes is None: … … 234 232 235 233 if not isinstance(attributes, DictType): 236 # Convert single attribute into dictionary234 # Convert single attribute into dictionary 237 235 attributes = {DEFAULT_ATTRIBUTE: attributes} 238 236 239 # Check input attributes237 # Check input attributes 240 238 for key in attributes.keys(): 241 239 try: 242 240 attributes[key] = ensure_numeric(attributes[key]) 243 241 except: 244 msg = 'Attribute %s could not be converted' %key245 msg += 'to a numeric vector'246 raise msg242 msg = ("Attribute '%s' (%s) could not be converted to a" 243 "numeric vector" % (str(key), str(attributes[key]))) 244 raise Exception, msg 247 245 248 246 self.attributes = attributes 249 247 250 #def set_geo_reference(self, geo_reference):251 # # FIXME (Ole): Backwards compatibility - deprecate252 # self.setgeo_reference(geo_reference)253 254 248 ## 255 249 # @brief Set the georeference of geospatial data. 256 # @param geo_reference The georeference data 250 # @param geo_reference The georeference data to set. 257 251 # @note Will raise exception if param not instance of Geo_reference. 258 252 def set_geo_reference(self, geo_reference): … … 263 257 """ 264 258 265 from anuga.coordinate_transforms.geo_reference import Geo_reference266 267 259 if geo_reference is None: 268 260 # Use default - points are in absolute coordinates … … 275 267 # FIXME (Ole): This exception will be raised even 276 268 # if geo_reference is None. Is that the intent Duncan? 277 msg = 'Argument geo_reference must be a valid Geo_reference '278 msg += 'object or None.'279 raise msg269 msg = ('Argument geo_reference must be a valid Geo_reference ' 270 'object or None.') 271 raise Expection, msg 280 272 281 273 # If a geo_reference already exists, change the point data according to … … 384 376 # @param isSouthHemisphere If True, return lat/lon points in S.Hemi. 385 377 # @return A set of data points, in appropriate form. 386 def get_data_points(self, absolute=True, geo_reference=None, 387 as_lat_long=False, isSouthHemisphere=True): 378 def get_data_points(self, 379 absolute=True, 380 geo_reference=None, 381 as_lat_long=False, 382 isSouthHemisphere=True): 388 383 """Get coordinates for all data points as an Nx2 array 389 384 … … 466 461 # @return The new geospatial object. 467 462 def __add__(self, other): 468 """Returns the addition of 2 geospati cal objects,463 """Returns the addition of 2 geospatial objects, 469 464 objects are concatencated to the end of each other 470 465 … … 494 489 if self.attributes is None: 495 490 if other.attributes is not None: 496 msg = 'Geospatial data must have the same '497 msg += 'attributes to allow addition.'491 msg = ('Geospatial data must have the same ' 492 'attributes to allow addition.') 498 493 raise Exception, msg 499 494 … … 505 500 attrib1 = self.attributes[x] 506 501 attrib2 = other.attributes[x] 507 new_attributes[x] = num.concatenate((attrib1, attrib2)) 502 new_attributes[x] = num.concatenate((attrib1, attrib2), 503 axis=0) #??default# 508 504 else: 509 msg = 'Geospatial data must have the same \n'510 msg += 'attributes to allow addition.'505 msg = ('Geospatial data must have the same ' 506 'attributes to allow addition.') 511 507 raise Exception, msg 512 508 else: 513 # other is None:509 # other is None: 514 510 new_points = self.get_data_points(absolute=True) 515 511 new_attributes = self.attributes … … 525 521 # @return The new geospatial object. 526 522 def __radd__(self, other): 527 """Handle cases like None + Geospatial_data(...) 528 """ 523 """Handle cases like None + Geospatial_data(...)""" 529 524 530 525 return self + other 531 526 532 533 534 527 ################################################################################ 528 # IMPORT/EXPORT POINTS FILES 529 ################################################################################ 535 530 536 531 ## … … 542 537 def import_points_file(self, file_name, delimiter=None, verbose=False): 543 538 """ load an .txt, .csv or .pts file 539 544 540 Note: will throw an IOError/SyntaxError if it can't load the file. 545 541 Catch these! … … 553 549 554 550 attributes = {} 555 if file_name[-4:] == ".pts":551 if file_name[-4:] == ".pts": 556 552 try: 557 553 data_points, attributes, geo_reference = \ … … 560 556 msg = 'Could not open file %s ' % file_name 561 557 raise IOError, msg 562 elif file_name[-4:] == ".txt" or file_name[-4:]== ".csv":558 elif file_name[-4:] == ".txt" or file_name[-4:]== ".csv": 563 559 try: 564 560 data_points, attributes, geo_reference = \ … … 566 562 except IOError, e: 567 563 # This should only be if a file is not found 568 msg = 'Could not open file %s. ' % file_name569 msg += 'Check the file location.'564 msg = ('Could not open file %s. Check the file location.' 565 % file_name) 570 566 raise IOError, msg 571 567 except SyntaxError, e: 572 568 # This should only be if there is a format error 573 msg = 'Could not open file %s. \n' %file_name574 msg += Error_message['IOError']569 msg = ('Problem with format of file %s.\n%s' 570 % (file_name, Error_message['IOError'])) 575 571 raise SyntaxError, msg 576 572 else: 577 msg = 'Extension %s is unknown' % file_name[-4:]573 msg = 'Extension %s is unknown' % file_name[-4:] 578 574 raise IOError, msg 579 575 … … 590 586 def export_points_file(self, file_name, absolute=True, 591 587 as_lat_long=False, isSouthHemisphere=True): 592 593 """ 594 write a points file, file_name, as a text (.csv) or binary (.pts) file 588 """write a points file as a text (.csv) or binary (.pts) file 589 595 590 file_name is the file name, including the extension 596 591 The point_dict is defined at the top of this file. … … 621 616 self.get_all_attributes(), 622 617 self.get_geo_reference()) 623 624 618 elif file_name[-4:] == ".txt" or file_name[-4:] == ".csv": 625 619 msg = "ERROR: trying to write a .txt file with relative data." … … 628 622 self.get_data_points(absolute=True, 629 623 as_lat_long=as_lat_long, 630 isSouthHemisphere=isSouthHemisphere),624 isSouthHemisphere=isSouthHemisphere), 631 625 self.get_all_attributes(), 632 626 as_lat_long=as_lat_long) 633 634 627 elif file_name[-4:] == ".urs" : 635 628 msg = "ERROR: Can not write a .urs file as a relative file." … … 637 630 _write_urs_file(file_name, 638 631 self.get_data_points(as_lat_long=True, 639 isSouthHemisphere=isSouthHemisphere)) 640 632 isSouthHemisphere=isSouthHemisphere)) 641 633 else: 642 634 msg = 'Unknown file type %s ' %file_name … … 659 651 """ 660 652 661 # FIXME: add the geo_reference to this653 # FIXME: add the geo_reference to this 662 654 points = self.get_data_points() 663 sampled_points = num.take(points, indices )655 sampled_points = num.take(points, indices, axis=0) 664 656 665 657 attributes = self.get_all_attributes() … … 668 660 if attributes is not None: 669 661 for key, att in attributes.items(): 670 sampled_attributes[key] = num.take(att, indices )662 sampled_attributes[key] = num.take(att, indices, axis=0) 671 663 672 664 return Geospatial_data(sampled_points, sampled_attributes) … … 679 671 # @note Points in each result object are selected randomly. 680 672 def split(self, factor=0.5, seed_num=None, verbose=False): 681 """Returns two 682 geospatial_data object, first is the size of the 'factor' 673 """Returns two geospatial_data object, first is the size of the 'factor' 683 674 smaller the original and the second is the remainder. The two 684 new object are disjoin setof each other.675 new objects are disjoint sets of each other. 685 676 686 677 Points of the two new object have selected RANDOMLY. … … 698 689 """ 699 690 700 i =0691 i = 0 701 692 self_size = len(self) 702 693 random_list = [] 703 694 remainder_list = [] 704 new_size = round(factor *self_size)695 new_size = round(factor * self_size) 705 696 706 697 # Find unique random numbers 707 698 if verbose: print "make unique random number list and get indices" 708 699 709 total =num.array(range(self_size), num.Int)#array default#700 total = num.array(range(self_size), num.int) #array default# 710 701 total_list = total.tolist() 711 702 … … 721 712 # Set seed if provided, mainly important for unit test! 722 713 # plus recalcule seed when no seed provided. 723 if seed_num !=None:724 seed(seed_num , seed_num)714 if seed_num is not None: 715 seed(seed_num) 725 716 else: 726 717 seed() 727 718 728 if verbose: print "seed:", get_seed()719 #if verbose: print "seed:", get_seed() 729 720 730 721 random_num = randint(0, self_size-1, (int(new_size),)) 731 722 random_num = random_num.tolist() 732 723 733 # need to sort and reverse so the pop() works correctly724 # need to sort and reverse so the pop() works correctly 734 725 random_num.sort() 735 726 random_num.reverse() … … 737 728 if verbose: print "make random number list and get indices" 738 729 739 j =0740 k =1730 j = 0 731 k = 1 741 732 remainder_list = total_list[:] 742 733 743 734 # pops array index (random_num) from remainder_list 744 # (which starts as the 745 # total_list and appends to random_list 735 # (which starts as the total_list and appends to random_list) 746 736 random_num_len = len(random_num) 747 737 for i in random_num: 748 738 random_list.append(remainder_list.pop(i)) 749 739 j += 1 750 # prints progress740 # prints progress 751 741 if verbose and round(random_num_len/10*k) == j: 752 742 print '(%s/%s)' % (j, random_num_len) … … 754 744 755 745 # FIXME: move to tests, it might take a long time 756 # then create an array of random leng htbetween 500 and 1000,746 # then create an array of random length between 500 and 1000, 757 747 # and use a random factor between 0 and 1 758 748 # setup for assertion … … 760 750 test_total.extend(remainder_list) 761 751 test_total.sort() 762 msg = 'The two random lists made from the original list when added ' \763 'together DO NOT equal the original list'752 msg = ('The two random lists made from the original list when added ' 753 'together DO NOT equal the original list') 764 754 assert total_list == test_total, msg 765 755 … … 778 768 file pointer position 779 769 """ 780 from Scientific.IO.NetCDF import NetCDFFile 781 # FIXME - what to do if the file isn't there770 771 # FIXME - what to do if the file isn't there 782 772 783 773 # FIXME (Ole): Shouldn't this go into the constructor? … … 807 797 self.number_of_blocks = self.number_of_points/self.max_read_lines 808 798 # This computes the number of full blocks. The last block may be 809 # smaller and won't be i rcluded in this estimate.799 # smaller and won't be included in this estimate. 810 800 811 801 if self.verbose is True: 812 print 'Reading %d points (in ~%d blocks) from file %s. ' \ 813 % (self.number_of_points, 814 self.number_of_blocks, 815 self.file_name), 816 print 'Each block consists of %d data points' \ 817 % self.max_read_lines 818 802 print ('Reading %d points (in ~%d blocks) from file %s. ' 803 % (self.number_of_points, self.number_of_blocks, 804 self.file_name)), 805 print ('Each block consists of %d data points' 806 % self.max_read_lines) 819 807 else: 820 808 # Assume the file is a csv file … … 847 835 848 836 if self.verbose is True: 849 if self.show_verbose >= self.start_row \ 850 and self.show_verbose < fin_row: 851 print 'Reading block %d (points %d to %d) out of %d'\ 852 %(self.block_number, 853 self.start_row, 854 fin_row, 855 self.number_of_blocks) 837 if (self.show_verbose >= self.start_row 838 and self.show_verbose < fin_row): 839 print ('Reading block %d (points %d to %d) out of %d' 840 % (self.block_number, self.start_row, 841 fin_row, self.number_of_blocks)) 856 842 857 843 self.show_verbose += max(self.max_read_lines, 858 844 self.verbose_block_size) 859 860 845 861 846 # Read next block … … 869 854 870 855 self.block_number += 1 871 872 856 else: 873 857 # Assume the file is a csv file … … 876 860 att_dict, 877 861 geo_ref, 878 self.file_pointer) = \879 _read_csv_file_blocking(self.file_pointer,880 self.header[:],881 max_read_lines=\882 self.max_read_lines,883 verbose=self.verbose)862 self.file_pointer) = _read_csv_file_blocking(self.file_pointer, 863 self.header[:], 864 max_read_lines= 865 self.max_read_lines, 866 verbose= 867 self.verbose) 884 868 885 869 # Check that the zones haven't changed. … … 888 872 self.blocking_georef = geo_ref 889 873 elif self.blocking_georef is not None: 890 msg = 'Geo reference given, then not given.'891 msg += ' This should not happen.'874 msg = ('Geo reference given, then not given.' 875 ' This should not happen.') 892 876 raise ValueError, msg 893 877 geo = Geospatial_data(pointlist, att_dict, geo_ref) … … 907 891 del self.file_pointer 908 892 # This should only be if there is a format error 909 msg = 'Could not open file %s. \n' % self.file_name910 msg += Error_message['IOError']893 msg = ('Could not open file %s.\n%s' 894 % (self.file_name, Error_message['IOError'])) 911 895 raise SyntaxError, msg 912 896 return geo 913 914 897 915 898 ##################### Error messages ########### 916 899 Error_message = {} 917 900 Em = Error_message 918 Em['IOError'] = "NOTE: The format for a comma separated .txt/.csv file is:\n"919 Em['IOError'] += " 1st line: [column names]\n" 920 Em['IOError'] += " other lines: [x value], [y value], [attributes]\n" 921 Em['IOError'] += "\n" 922 Em['IOError'] += " for example:\n" 923 Em['IOError'] += " x, y, elevation, friction\n" 924 Em['IOError'] += " 0.6, 0.7, 4.9, 0.3\n" 925 Em['IOError'] += " 1.9, 2.8, 5, 0.3\n" 926 Em['IOError'] += " 2.7, 2.4, 5.2, 0.3\n" 927 Em['IOError'] += "\n" 928 Em['IOError'] += "The first two columns are assumed to be x, y coordinates.\n" 929 Em['IOError'] += "The attribute values must be numeric.\n" 901 Em['IOError'] = ('NOTE: The format for a comma separated .txt/.csv file is:\n' 902 ' 1st line: [column names]\n' 903 ' other lines: [x value], [y value], [attributes]\n' 904 '\n' 905 ' for example:\n' 906 ' x, y, elevation, friction\n' 907 ' 0.6, 0.7, 4.9, 0.3\n' 908 ' 1.9, 2.8, 5, 0.3\n' 909 ' 2.7, 2.4, 5.2, 0.3\n' 910 '\n' 911 'The first two columns are assumed to be x, y coordinates.\n' 912 'The attribute values must be numeric.\n') 930 913 931 914 ## … … 936 919 # @param data_points ?? 937 920 # @param points_are_lats_longs ?? 921 # @note IS THIS USED??? 938 922 def _set_using_lat_long(latitudes, 939 923 longitudes, … … 941 925 data_points, 942 926 points_are_lats_longs): 943 """ 944 if the points has lat long info, assume it is in (lat, long) order. 945 """ 927 """If the points has lat long info, assume it is in (lat, long) order.""" 946 928 947 929 if geo_reference is not None: 948 msg = "A georeference is specified yet latitude and longitude " \949 "are also specified!"930 msg = ('A georeference is specified yet latitude and longitude ' 931 'are also specified!') 950 932 raise ValueError, msg 951 933 952 934 if data_points is not None and not points_are_lats_longs: 953 msg = "Data points are specified yet latitude and longitude are " \954 "also specified."935 msg = ('Data points are specified yet latitude and longitude are ' 936 'also specified.') 955 937 raise ValueError, msg 956 938 … … 988 970 """Read .pts NetCDF file 989 971 990 Return a dic of array of points, and dic of array of attribute972 Return a (dict_points, dict_attribute, geo_ref) 991 973 eg 992 dic ['points'] = [[1.0,2.0],[3.0,5.0]]993 dic ['attributelist']['elevation'] = [[7.0,5.0]]974 dict['points'] = [[1.0,2.0],[3.0,5.0]] 975 dict['attributelist']['elevation'] = [[7.0,5.0]] 994 976 """ 995 996 from Scientific.IO.NetCDF import NetCDFFile997 977 998 978 if verbose: print 'Reading ', file_name … … 1055 1035 header, 1056 1036 max_read_lines=1e30) 1057 # If the file is bigger that this, block..1037 # If the file is bigger that this, block.. 1058 1038 # FIXME (Ole) What's up here? 1059 1039 except ANUGAError: … … 1095 1075 # @param verbose True if this function is to be verbose. 1096 1076 # @note Will throw IndexError, SyntaxError exceptions. 1097 def _read_csv_file_blocking(file_pointer, header, 1077 def _read_csv_file_blocking(file_pointer, 1078 header, 1098 1079 delimiter=CSV_DELIMITER, 1099 1080 max_read_lines=MAX_READ_LINES, … … 1136 1117 if len(header) != len(numbers): 1137 1118 file_pointer.close() 1138 msg = "File load error. " \1139 "There might be a problem with the file header."1119 msg = ('File load error. ' 1120 'There might be a problem with the file header.') 1140 1121 raise SyntaxError, msg 1141 1122 for i,n in enumerate(numbers): 1142 1123 n.strip() 1143 1124 if n != '\n' and n != '': 1144 #attributes.append(float(n))1145 1125 att_dict.setdefault(header[i],[]).append(float(n)) 1146 1126 except ValueError: … … 1150 1130 raise StopIteration 1151 1131 1152 pointlist = num.array(points, num. Float)1132 pointlist = num.array(points, num.float) 1153 1133 for key in att_dict.keys(): 1154 att_dict[key] = num.array(att_dict[key], num. Float)1134 att_dict[key] = num.array(att_dict[key], num.float) 1155 1135 1156 1136 # Do stuff here so the info is in lat's and longs … … 1183 1163 # @note Will throw IOError and AttributeError exceptions. 1184 1164 def _read_pts_file_header(fid, verbose=False): 1185 """Read the geo_reference and number_of_points from a .pts file 1186 """ 1165 '''Read the geo_reference and number_of_points from a .pts file''' 1187 1166 1188 1167 keys = fid.variables.keys() … … 1205 1184 1206 1185 ## 1207 # @brief Read the body of a . csffile, with blocking.1186 # @brief Read the body of a .pts file, with blocking. 1208 1187 # @param fid Handle to already open file. 1209 1188 # @param start_row Start row index of points to return. … … 1212 1191 # @return Tuple of (pointlist, attributes). 1213 1192 def _read_pts_file_blocking(fid, start_row, fin_row, keys): 1214 """Read the body of a .csv file. 1215 """ 1193 '''Read the body of a .pts file.''' 1216 1194 1217 1195 pointlist = num.array(fid.variables['points'][start_row:fin_row]) … … 1239 1217 1240 1218 WARNING: This function mangles the point_atts data structure 1241 # F??ME: (DSG)This format has issues.1219 # F??ME: (DSG)This format has issues. 1242 1220 # There can't be an attribute called points 1243 1221 # consider format change … … 1251 1229 """ 1252 1230 1253 from Scientific.IO.NetCDF import NetCDFFile1254 1255 1231 # NetCDF file definition 1256 1232 outfile = NetCDFFile(file_name, netcdf_mode_w) … … 1258 1234 # Create new file 1259 1235 outfile.institution = 'Geoscience Australia' 1260 outfile.description = 'NetCDF format for compact and portable storage ' \1261 'of spatial point data'1236 outfile.description = ('NetCDF format for compact and portable storage ' 1237 'of spatial point data') 1262 1238 1263 1239 # Dimension definitions 1264 1240 shape = write_data_points.shape[0] 1265 1241 outfile.createDimension('number_of_points', shape) 1266 outfile.createDimension('number_of_dimensions', 2) # This is 2d data1242 outfile.createDimension('number_of_dimensions', 2) # This is 2d data 1267 1243 1268 1244 # Variable definition 1269 outfile.createVariable('points', n um.Float, ('number_of_points',1270 1271 1272 # create variables1273 outfile.variables['points'][:] = write_data_points #.astype(Float32)1245 outfile.createVariable('points', netcdf_float, 1246 ('number_of_points', 'number_of_dimensions')) 1247 1248 # create variables 1249 outfile.variables['points'][:] = write_data_points 1274 1250 1275 1251 if write_attributes is not None: 1276 1252 for key in write_attributes.keys(): 1277 outfile.createVariable(key, n um.Float, ('number_of_points',))1278 outfile.variables[key][:] = write_attributes[key] #.astype(Float32)1253 outfile.createVariable(key, netcdf_float, ('number_of_points',)) 1254 outfile.variables[key][:] = write_attributes[key] 1279 1255 1280 1256 if write_geo_reference is not None: … … 1296 1272 as_lat_long=False, 1297 1273 delimiter=','): 1298 """Write a .csv file. 1299 """ 1274 """Write a .csv file.""" 1300 1275 1301 1276 points = write_data_points … … 1361 1336 # @return ?? 1362 1337 def _point_atts2array(point_atts): 1363 point_atts['pointlist'] = num.array(point_atts['pointlist'], num. Float)1338 point_atts['pointlist'] = num.array(point_atts['pointlist'], num.float) 1364 1339 1365 1340 for key in point_atts['attributelist'].keys(): 1366 1341 point_atts['attributelist'][key] = \ 1367 num.array(point_atts['attributelist'][key], num. Float)1342 num.array(point_atts['attributelist'][key], num.float) 1368 1343 1369 1344 return point_atts … … 1375 1350 # @return A points dictionary. 1376 1351 def geospatial_data2points_dictionary(geospatial_data): 1377 """Convert geospatial data to points_dictionary 1378 """ 1352 """Convert geospatial data to points_dictionary""" 1379 1353 1380 1354 points_dictionary = {} … … 1396 1370 # @param points_dictionary A points dictionary to convert. 1397 1371 def points_dictionary2geospatial_data(points_dictionary): 1398 """Convert points_dictionary to geospatial data object 1399 """ 1400 1401 msg = 'Points dictionary must have key pointlist' 1372 """Convert points_dictionary to geospatial data object""" 1373 1374 msg = "Points dictionary must have key 'pointlist'" 1402 1375 assert points_dictionary.has_key('pointlist'), msg 1403 1376 1404 msg = 'Points dictionary must have key attributelist'1377 msg = "Points dictionary must have key 'attributelist'" 1405 1378 assert points_dictionary.has_key('attributelist'), msg 1406 1379 … … 1412 1385 return Geospatial_data(points_dictionary['pointlist'], 1413 1386 points_dictionary['attributelist'], 1414 geo_reference = geo) 1415 1416 1417 ## 1418 # @brief Split a string into 'clean' fields. 1419 # @param line The string to process. 1420 # @param delimiter The delimiter string to split 'line' with. 1421 # @return A list of 'cleaned' field strings. 1422 # @note Any fields that were initially zero length will be removed. 1423 def clean_line(line,delimiter): 1424 """Remove whitespace 1425 """ 1426 1427 line = line.strip() # probably unnecessary RW 1428 numbers = line.split(delimiter) 1429 1430 i = len(numbers) - 1 1431 while i >= 0: 1432 if numbers[i] == '': 1433 numbers.pop(i) 1434 else: 1435 numbers[i] = numbers[i].strip() 1436 i += -1 1437 1438 return numbers 1387 geo_reference=geo) 1439 1388 1440 1389 … … 1460 1409 """ 1461 1410 1462 import copy1463 1411 # Input check 1464 1412 if isinstance(points, basestring): 1465 # It's a string - assume it is a point file1413 # It's a string - assume it is a point file 1466 1414 points = Geospatial_data(file_name=points) 1467 1415 … … 1471 1419 assert geo_reference == None, msg 1472 1420 else: 1473 points = ensure_numeric(copy.copy(points), num. Float)1421 points = ensure_numeric(copy.copy(points), num.float) 1474 1422 1475 1423 # Sort of geo_reference and convert points 1476 1424 if geo_reference is None: 1477 geo = None #Geo_reference()1425 geo = None # Geo_reference() 1478 1426 else: 1479 1427 if isinstance(geo_reference, Geo_reference): … … 1494 1442 # @return A geospatial object. 1495 1443 def ensure_geospatial(points, geo_reference=None): 1496 """ 1497 This function inputs several formats and 1498 outputs one format. - a geospatial_data instance. 1444 """Convert various data formats to a geospatial_data instance. 1499 1445 1500 1446 Inputed formats are; … … 1508 1454 """ 1509 1455 1510 import copy1511 1456 # Input check 1512 1457 if isinstance(points, Geospatial_data): … … 1516 1461 else: 1517 1462 # List or numeric array of absolute points 1518 points = ensure_numeric(points, num. Float)1463 points = ensure_numeric(points, num.float) 1519 1464 1520 1465 # Sort out geo reference … … 1565 1510 cache=False, 1566 1511 verbose=False): 1567 """ 1568 Removes a small random sample of points from 'data_file'. Then creates 1569 models with different alpha values from 'alpha_list' and cross validates 1570 the predicted value to the previously removed point data. Returns the 1571 alpha value which has the smallest covariance. 1512 """Removes a small random sample of points from 'data_file'. 1513 Then creates models with different alpha values from 'alpha_list' and 1514 cross validates the predicted value to the previously removed point data. 1515 Returns the alpha value which has the smallest covariance. 1572 1516 1573 1517 data_file: must not contain points outside the boundaries defined 1574 and it either a pts, txt or csv file.1518 and it must be either a pts, txt or csv file. 1575 1519 1576 1520 alpha_list: the alpha values to test in a single list … … 1615 1559 from anuga.fit_interpolate.benchmark_least_squares import mem_usage 1616 1560 1617 1618 attribute_smoothed='elevation' 1561 attribute_smoothed = 'elevation' 1619 1562 1620 1563 if mesh_file is None: … … 1622 1565 mesh_file = 'temp.msh' 1623 1566 1624 if north_boundary is None or south_boundary is None \1625 or east_boundary is None or west_boundary is None:1567 if (north_boundary is None or south_boundary is None 1568 or east_boundary is None or west_boundary is None): 1626 1569 no_boundary = True 1627 1570 else: … … 1630 1573 if no_boundary is True: 1631 1574 msg = 'All boundaries must be defined' 1632 raise msg1633 1634 poly_topo = [[east_boundary, south_boundary],1635 [east_boundary, north_boundary],1636 [west_boundary, north_boundary],1637 [west_boundary, south_boundary]]1575 raise Expection, msg 1576 1577 poly_topo = [[east_boundary, south_boundary], 1578 [east_boundary, north_boundary], 1579 [west_boundary, north_boundary], 1580 [west_boundary, south_boundary]] 1638 1581 1639 1582 create_mesh_from_regions(poly_topo, … … 1647 1590 1648 1591 else: # if mesh file provided 1649 # test mesh file exists?1592 # test mesh file exists? 1650 1593 if verbose: "reading from file: %s" % mesh_file 1651 1594 if access(mesh_file,F_OK) == 0: … … 1653 1596 raise IOError, msg 1654 1597 1655 # split topo data1598 # split topo data 1656 1599 if verbose: print 'Reading elevation file: %s' % data_file 1657 1600 G = Geospatial_data(file_name = data_file) … … 1665 1608 if alpha_list == None: 1666 1609 alphas = [0.001,0.01,100] 1667 #alphas = [0.000001, 0.00001, 0.0001, 0.001, 0.01, \1668 #0.1, 1.0, 10.0, 100.0,1000.0,10000.0]1610 #alphas = [0.000001, 0.00001, 0.0001, 0.001, 0.01, 1611 # 0.1, 1.0, 10.0, 100.0,1000.0,10000.0] 1669 1612 else: 1670 1613 alphas = alpha_list 1671 1614 1672 # creates array with columns 1 and 2 are x, y. column 3 is elevation1673 # 4 onwards is the elevation_predicted using the alpha, which will1674 # be compared later against the real removed data1675 data = num.array([], typecode=num.Float)1676 1677 data =num.resize(data, (len(points), 3+len(alphas)))1678 1679 # gets relative point from sample1615 # creates array with columns 1 and 2 are x, y. column 3 is elevation 1616 # 4 onwards is the elevation_predicted using the alpha, which will 1617 # be compared later against the real removed data 1618 data = num.array([], dtype=num.float) 1619 1620 data = num.resize(data, (len(points), 3+len(alphas))) 1621 1622 # gets relative point from sample 1680 1623 data[:,0] = points[:,0] 1681 1624 data[:,1] = points[:,1] … … 1683 1626 data[:,2] = elevation_sample 1684 1627 1685 normal_cov =num.array(num.zeros([len(alphas), 2]), typecode=num.Float)1628 normal_cov = num.array(num.zeros([len(alphas), 2]), dtype=num.float) 1686 1629 1687 1630 if verbose: print 'Setup computational domains with different alphas' 1688 1631 1689 1632 for i, alpha in enumerate(alphas): 1690 # add G_other data to domains with different alphas1633 # add G_other data to domains with different alphas 1691 1634 if verbose: 1692 print '\n Calculating domain and mesh for Alpha =', alpha, '\n'1635 print '\nCalculating domain and mesh for Alpha =', alpha, '\n' 1693 1636 domain = Domain(mesh_file, use_cache=cache, verbose=verbose) 1694 1637 if verbose: print domain.statistics() … … 1702 1645 points_geo = Geospatial_data(points, domain.geo_reference) 1703 1646 1704 # returns the predicted elevation of the points that were "split" out1705 # of the original data set for one particular alpha1647 # returns the predicted elevation of the points that were "split" out 1648 # of the original data set for one particular alpha 1706 1649 if verbose: print 'Get predicted elevation for location to be compared' 1707 1650 elevation_predicted = \ … … 1709 1652 get_values(interpolation_points=points_geo) 1710 1653 1711 # add predicted elevation to array that starts with x, y, z...1654 # add predicted elevation to array that starts with x, y, z... 1712 1655 data[:,i+3] = elevation_predicted 1713 1656 1714 1657 sample_cov = cov(elevation_sample) 1715 1658 ele_cov = cov(elevation_sample - elevation_predicted) 1716 normal_cov[i,:] = [alpha, ele_cov / sample_cov]1659 normal_cov[i,:] = [alpha, ele_cov / sample_cov] 1717 1660 1718 1661 if verbose: … … 1722 1665 1723 1666 normal_cov0 = normal_cov[:,0] 1724 normal_cov_new = num.take(normal_cov, num.argsort(normal_cov0) )1667 normal_cov_new = num.take(normal_cov, num.argsort(normal_cov0), axis=0) 1725 1668 1726 1669 if plot_name is not None: … … 1737 1680 print 'Final results:' 1738 1681 for i, alpha in enumerate(alphas): 1739 print 'covariance for alpha %s = %s ' \1740 % (normal_cov[i][0], normal_cov[i][1])1741 print '\n Optimal alpha is: %s ' \1742 % normal_cov_new[(num.argmin(normal_cov_new, axis=0))[1], 0]1682 print ('covariance for alpha %s = %s ' 1683 % (normal_cov[i][0], normal_cov[i][1])) 1684 print ('\nOptimal alpha is: %s ' 1685 % normal_cov_new[(num.argmin(normal_cov_new, axis=0))[1], 0]) 1743 1686 1744 1687 # covariance and optimal alpha … … 1822 1765 from anuga.fit_interpolate.benchmark_least_squares import mem_usage 1823 1766 1824 1825 1767 attribute_smoothed = 'elevation' 1826 1768 … … 1828 1770 mesh_file = 'temp.msh' 1829 1771 1830 if north_boundary is None or south_boundary is None \1831 or east_boundary is None or west_boundary is None:1772 if (north_boundary is None or south_boundary is None 1773 or east_boundary is None or west_boundary is None): 1832 1774 no_boundary = True 1833 1775 else: … … 1836 1778 if no_boundary is True: 1837 1779 msg = 'All boundaries must be defined' 1838 raise msg1839 1840 poly_topo = [[east_boundary, south_boundary],1841 [east_boundary, north_boundary],1842 [west_boundary, north_boundary],1843 [west_boundary, south_boundary]]1780 raise Expection, msg 1781 1782 poly_topo = [[east_boundary, south_boundary], 1783 [east_boundary, north_boundary], 1784 [west_boundary, north_boundary], 1785 [west_boundary, south_boundary]] 1844 1786 1845 1787 create_mesh_from_regions(poly_topo, … … 1853 1795 1854 1796 else: # if mesh file provided 1855 # test mesh file exists?1797 # test mesh file exists? 1856 1798 if access(mesh_file,F_OK) == 0: 1857 1799 msg = "file %s doesn't exist!" % mesh_file 1858 1800 raise IOError, msg 1859 1801 1860 # split topo data1802 # split topo data 1861 1803 G = Geospatial_data(file_name=data_file) 1862 1804 if verbose: print 'start split' … … 1865 1807 points = G_small.get_data_points() 1866 1808 1867 #FIXME: Remove points outside boundary polygon1868 # print 'new point',len(points)1869 #1870 # new_points=[]1871 # new_points=array([],typecode=Float)1872 # new_points=resize(new_points,(len(points),2))1873 # print "BOUNDARY", boundary_poly1874 # for i,point in enumerate(points):1875 # if is_inside_polygon(point,boundary_poly, verbose=True):1876 # new_points[i] = point1877 # print"WOW",i,new_points[i]1878 # points = new_points1879 1880 1809 if verbose: print "Number of points in sample to compare: ", len(points) 1881 1810 1882 1811 if alpha_list == None: 1883 1812 alphas = [0.001,0.01,100] 1884 #alphas = [0.000001, 0.00001, 0.0001, 0.001, 0.01, \1885 #0.1, 1.0, 10.0, 100.0,1000.0,10000.0]1813 #alphas = [0.000001, 0.00001, 0.0001, 0.001, 0.01, 1814 # 0.1, 1.0, 10.0, 100.0,1000.0,10000.0] 1886 1815 else: 1887 1816 alphas = alpha_list … … 1892 1821 1893 1822 for alpha in alphas: 1894 # add G_other data to domains with different alphas1823 # add G_other data to domains with different alphas 1895 1824 if verbose: 1896 print '\n Calculating domain and mesh for Alpha =', alpha, '\n'1825 print '\nCalculating domain and mesh for Alpha =', alpha, '\n' 1897 1826 domain = Domain(mesh_file, use_cache=cache, verbose=verbose) 1898 1827 if verbose: print domain.statistics() … … 1904 1833 domains[alpha] = domain 1905 1834 1906 # creates array with columns 1 and 2 are x, y. column 3 is elevation1907 # 4 onwards is the elevation_predicted using the alpha, which will1908 # be compared later against the real removed data1909 data = num.array([], typecode=num.Float)1835 # creates array with columns 1 and 2 are x, y. column 3 is elevation 1836 # 4 onwards is the elevation_predicted using the alpha, which will 1837 # be compared later against the real removed data 1838 data = num.array([], dtype=num.float) 1910 1839 1911 1840 data = num.resize(data, (len(points), 3+len(alphas))) 1912 1841 1913 # gets relative point from sample1842 # gets relative point from sample 1914 1843 data[:,0] = points[:,0] 1915 1844 data[:,1] = points[:,1] … … 1917 1846 data[:,2] = elevation_sample 1918 1847 1919 normal_cov = num.array(num.zeros([len(alphas), 2]), typecode=num.Float)1848 normal_cov = num.array(num.zeros([len(alphas), 2]), dtype=num.float) 1920 1849 1921 1850 if verbose: 1922 1851 print 'Determine difference between predicted results and actual data' 1852 1923 1853 for i, alpha in enumerate(domains): 1924 1854 if verbose: print'Alpha =', alpha 1925 1855 1926 1856 points_geo = domains[alpha].geo_reference.change_points_geo_ref(points) 1927 # returns the predicted elevation of the points that were "split" out1928 # of the original data set for one particular alpha1857 # returns the predicted elevation of the points that were "split" out 1858 # of the original data set for one particular alpha 1929 1859 elevation_predicted = \ 1930 1860 domains[alpha].quantities[attribute_smoothed].\ 1931 1861 get_values(interpolation_points=points_geo) 1932 1862 1933 # add predicted elevation to array that starts with x, y, z...1863 # add predicted elevation to array that starts with x, y, z... 1934 1864 data[:,i+3] = elevation_predicted 1935 1865 … … 1941 1871 1942 1872 normal_cov0 = normal_cov[:,0] 1943 normal_cov_new = num.take(normal_cov, num.argsort(normal_cov0) )1873 normal_cov_new = num.take(normal_cov, num.argsort(normal_cov0), axis=0) 1944 1874 1945 1875 if plot_name is not None: -
anuga_core/source/anuga/geospatial_data/test_geospatial_data.py
r7012 r7276 1 1 #!/usr/bin/env python 2 3 2 4 3 import unittest 5 4 import os 5 import tempfile 6 6 from math import sqrt, pi 7 import tempfile 8 9 import Numeric as num 7 8 import numpy as num 10 9 11 10 from anuga.geospatial_data.geospatial_data import * … … 15 14 from anuga.utilities.system_tools import get_host_name 16 15 from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a 16 from anuga.config import netcdf_float 17 17 18 18 19 class Test_Geospatial_data(unittest.TestCase): … … 23 24 pass 24 25 25 26 26 def test_0(self): 27 27 #Basic points 28 28 from anuga.coordinate_transforms.geo_reference import Geo_reference 29 29 30 30 points = [[1.0, 2.1], [3.0, 5.3]] 31 31 G = Geospatial_data(points) 32 33 32 assert num.allclose(G.data_points, [[1.0, 2.1], [3.0, 5.3]]) 34 33 … … 37 36 rep = `G` 38 37 ref = '[[ 1. 2.1]\n [ 3. 5.3]]' 39 40 msg = 'Representation %s is not equal to %s' %(rep, ref) 38 msg = 'Representation %s is not equal to %s' % (rep, ref) 41 39 assert rep == ref, msg 42 40 43 41 #Check getter 44 42 assert num.allclose(G.get_data_points(), [[1.0, 2.1], [3.0, 5.3]]) 45 43 46 44 #Check defaults 47 45 assert G.attributes is None 48 49 46 assert G.geo_reference.zone == Geo_reference().zone 50 47 assert G.geo_reference.xllcorner == Geo_reference().xllcorner 51 48 assert G.geo_reference.yllcorner == Geo_reference().yllcorner 52 53 49 54 50 def test_1(self): 55 51 points = [[1.0, 2.1], [3.0, 5.3]] 56 52 attributes = [2, 4] 57 G = Geospatial_data(points, attributes) 53 G = Geospatial_data(points, attributes) 58 54 assert G.attributes.keys()[0] == DEFAULT_ATTRIBUTE 59 55 assert num.allclose(G.attributes.values()[0], [2, 4]) 60 61 56 62 57 def test_2(self): 63 58 from anuga.coordinate_transforms.geo_reference import Geo_reference 59 64 60 points = [[1.0, 2.1], [3.0, 5.3]] 65 61 attributes = [2, 4] … … 71 67 assert G.geo_reference.yllcorner == 200 72 68 73 74 69 def test_get_attributes_1(self): 75 70 from anuga.coordinate_transforms.geo_reference import Geo_reference 71 76 72 points = [[1.0, 2.1], [3.0, 5.3]] 77 73 attributes = [2, 4] … … 79 75 geo_reference=Geo_reference(56, 100, 200)) 80 76 81 82 77 P = G.get_data_points(absolute=False) 83 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 78 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 84 79 85 80 P = G.get_data_points(absolute=True) 86 assert num.allclose(P, [[101.0, 202.1], [103.0, 205.3]]) 81 assert num.allclose(P, [[101.0, 202.1], [103.0, 205.3]]) 87 82 88 83 V = G.get_attributes() #Simply get them … … 94 89 def test_get_attributes_2(self): 95 90 #Multiple attributes 96 97 98 91 from anuga.coordinate_transforms.geo_reference import Geo_reference 92 99 93 points = [[1.0, 2.1], [3.0, 5.3]] 100 94 attributes = {'a0': [0, 0], 'a1': [2, 4], 'a2': [79.4, -7]} … … 103 97 default_attribute_name='a1') 104 98 105 106 99 P = G.get_data_points(absolute=False) 107 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 108 100 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 101 109 102 V = G.get_attributes() #Get default attribute 110 103 assert num.allclose(V, [2, 4]) … … 119 112 assert num.allclose(V, [79.4, -7]) 120 113 114 # FIXME: use failUnlessRaises() 121 115 try: 122 116 V = G.get_attributes('hdnoatedu') #Invalid … … 124 118 pass 125 119 else: 126 raise 'Should have raised exception'120 raise Exception, 'Should have raised exception' 127 121 128 122 def test_get_data_points(self): 129 points_ab = [[12.5, 34.7],[-4.5,-60.0]]123 points_ab = [[12.5, 34.7], [-4.5, -60.0]] 130 124 x_p = -10 131 125 y_p = -40 132 126 geo_ref = Geo_reference(56, x_p, y_p) 133 127 points_rel = geo_ref.change_points_geo_ref(points_ab) 134 128 135 129 spatial = Geospatial_data(points_rel, geo_reference=geo_ref) 136 137 130 results = spatial.get_data_points(absolute=False) 138 139 131 assert num.allclose(results, points_rel) 140 132 141 133 x_p = -1770 142 134 y_p = 4.01 143 135 geo_ref = Geo_reference(56, x_p, y_p) 144 136 points_rel = geo_ref.change_points_geo_ref(points_ab) 145 results = spatial.get_data_points \ 146 ( geo_reference=geo_ref) 147 137 results = spatial.get_data_points(geo_reference=geo_ref) 138 148 139 assert num.allclose(results, points_rel) 149 140 150 151 141 def test_get_data_points_lat_long(self): 152 # lat long [-30.],[130] 153 #Zone: 52 154 #Easting: 596450.153 Northing: 6680793.777 155 # lat long [-32.],[131] 156 #Zone: 52 157 #Easting: 688927.638 Northing: 6457816.509 158 159 points_Lat_long = [[-30.,130], [-32,131]] 160 161 spatial = Geospatial_data(latitudes=[-30, -32.], 162 longitudes=[130, 131]) 163 142 # lat long [-30.], [130] 143 # Zone: 52 144 # Easting: 596450.153 Northing: 6680793.777 145 # lat long [-32.], [131] 146 # Zone: 52 147 # Easting: 688927.638 Northing: 6457816.509 148 149 points_Lat_long = [[-30., 130], [-32, 131]] 150 151 spatial = Geospatial_data(latitudes=[-30, -32.], longitudes=[130, 131]) 164 152 results = spatial.get_data_points(as_lat_long=True) 165 #print "test_get_data_points_lat_long - results", results166 #print "points_Lat_long",points_Lat_long167 153 assert num.allclose(results, points_Lat_long) 168 154 169 155 def test_get_data_points_lat_longII(self): 170 156 # x,y North,east long,lat 171 157 boundary_polygon = [[ 250000, 7630000]] 172 158 zone = 50 173 159 174 160 geo_reference = Geo_reference(zone=zone) 175 geo = Geospatial_data(boundary_polygon ,geo_reference=geo_reference)161 geo = Geospatial_data(boundary_polygon ,geo_reference=geo_reference) 176 162 seg_lat_long = geo.get_data_points(as_lat_long=True) 177 lat_result = degminsec2decimal_degrees(-21,24,54) 178 long_result = degminsec2decimal_degrees(114,35,17.89) 179 #print "seg_lat_long", seg_lat_long [0][0] 180 #print "lat_result",lat_result 163 lat_result = degminsec2decimal_degrees(-21, 24, 54) 164 long_result = degminsec2decimal_degrees(114, 35, 17.89) 165 assert num.allclose(seg_lat_long[0][0], lat_result) #lat 166 assert num.allclose(seg_lat_long[0][1], long_result) #long 167 168 def test_get_data_points_lat_longIII(self): 169 # x,y North,east long,lat 170 # for northern hemisphere 171 boundary_polygon = [[419944.8, 918642.4]] 172 zone = 47 173 174 geo_reference = Geo_reference(zone=zone) 175 geo = Geospatial_data(boundary_polygon, geo_reference=geo_reference) 176 seg_lat_long = geo.get_data_points(as_lat_long=True, 177 isSouthHemisphere=False) 178 lat_result = degminsec2decimal_degrees(8.31, 0, 0) 179 long_result = degminsec2decimal_degrees(98.273, 0, 0) 181 180 assert num.allclose(seg_lat_long[0][0], lat_result)#lat 182 181 assert num.allclose(seg_lat_long[0][1], long_result)#long 183 182 184 185 def test_get_data_points_lat_longIII(self):186 # x,y North,east long,lat187 #for northern hemisphere188 boundary_polygon = [[419944.8, 918642.4]]189 zone = 47190 191 geo_reference = Geo_reference(zone=zone)192 geo = Geospatial_data(boundary_polygon,193 geo_reference=geo_reference)194 195 seg_lat_long = geo.get_data_points(as_lat_long=True,196 isSouthHemisphere=False)197 198 lat_result = degminsec2decimal_degrees(8.31,0,0)199 long_result = degminsec2decimal_degrees(98.273,0,0)200 #print "seg_lat_long", seg_lat_long [0]201 #print "lat_result",lat_result202 assert num.allclose(seg_lat_long[0][0], lat_result)#lat203 assert num.allclose(seg_lat_long[0][1], long_result)#long204 205 206 207 183 def test_set_geo_reference(self): 208 """test_set_georeference209 210 Test that georeference can be changed without changing the 184 '''test_set_georeference 185 186 Test that georeference can be changed without changing the 211 187 absolute values. 212 """213 214 points_ab = [[12.5, 34.7],[-4.5,-60.0]]188 ''' 189 190 points_ab = [[12.5, 34.7], [-4.5, -60.0]] 215 191 x_p = -10 216 192 y_p = -40 217 193 geo_ref = Geo_reference(56, x_p, y_p) 218 194 points_rel = geo_ref.change_points_geo_ref(points_ab) 219 195 220 196 # Create without geo_ref properly set 221 G = Geospatial_data(points_rel) 197 G = Geospatial_data(points_rel) 222 198 assert not num.allclose(points_ab, G.get_data_points(absolute=True)) 223 199 224 200 # Create the way it should be 225 201 G = Geospatial_data(points_rel, geo_reference=geo_ref) 226 202 assert num.allclose(points_ab, G.get_data_points(absolute=True)) 227 203 228 204 # Change georeference and check that absolute values are unchanged. 229 205 x_p = 10 … … 231 207 new_geo_ref = Geo_reference(56, x_p, y_p) 232 208 G.set_geo_reference(new_geo_ref) 233 assert num.allclose(points_ab, G.get_data_points(absolute=True)) 234 235 236 237 209 msg = ('points_ab=\n%s\nbut G.get_data_points(absolute=True)=\n%s' 210 % (str(points_ab), str(G.get_data_points(absolute=True)))) 211 assert num.allclose(points_ab, G.get_data_points(absolute=True)), msg 212 238 213 def test_conversions_to_points_dict(self): 239 214 #test conversions to points_dict 240 241 242 215 from anuga.coordinate_transforms.geo_reference import Geo_reference 216 243 217 points = [[1.0, 2.1], [3.0, 5.3]] 244 218 attributes = {'a0': [0, 0], 'a1': [2, 4], 'a2': [79.4, -7]} … … 247 221 default_attribute_name='a1') 248 222 249 250 223 points_dict = geospatial_data2points_dictionary(G) 251 224 252 225 assert points_dict.has_key('pointlist') 253 assert points_dict.has_key('attributelist') 226 assert points_dict.has_key('attributelist') 254 227 assert points_dict.has_key('geo_reference') 255 228 … … 259 232 assert A.has_key('a0') 260 233 assert A.has_key('a1') 261 assert A.has_key('a2') 234 assert A.has_key('a2') 262 235 263 236 assert num.allclose( A['a0'], [0, 0] ) 264 assert num.allclose( A['a1'], [2, 4] ) 237 assert num.allclose( A['a1'], [2, 4] ) 265 238 assert num.allclose( A['a2'], [79.4, -7] ) 266 267 239 268 240 geo = points_dict['geo_reference'] 269 241 assert geo is G.geo_reference 270 242 271 272 243 def test_conversions_from_points_dict(self): 273 """test conversions from points_dict 274 """ 244 '''test conversions from points_dict''' 275 245 276 246 from anuga.coordinate_transforms.geo_reference import Geo_reference 277 247 278 248 points = [[1.0, 2.1], [3.0, 5.3]] 279 249 attributes = {'a0': [0, 0], 'a1': [2, 4], 'a2': [79.4, -7]} … … 283 253 points_dict['attributelist'] = attributes 284 254 points_dict['geo_reference'] = Geo_reference(56, 100, 200) 285 286 255 287 256 G = points_dictionary2geospatial_data(points_dict) 288 289 257 P = G.get_data_points(absolute=False) 290 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 291 292 #V = G.get_attribute_values() #Get default attribute 293 #assert allclose(V, [2, 4]) 258 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 294 259 295 260 V = G.get_attributes('a0') #Get by name … … 303 268 304 269 def test_add(self): 305 """ test the addition of two geospatical objects 306 no geo_reference see next test 307 """ 270 '''test the addition of two geospatical objects 271 no geo_reference see next test 272 ''' 273 308 274 points = [[1.0, 2.1], [3.0, 5.3]] 309 attributes = {'depth':[2, 4], 'elevation':[6.1, 5]} 310 attributes1 = {'depth':[2, 4], 'elevation':[2.5, 1]} 311 G1 = Geospatial_data(points, attributes) 312 G2 = Geospatial_data(points, attributes1) 313 314 # g3 = geospatial_data2points_dictionary(G1) 315 # print 'g3=', g3 316 275 attributes = {'depth': [2, 4], 'elevation': [6.1, 5]} 276 attributes1 = {'depth': [2, 4], 'elevation': [2.5, 1]} 277 G1 = Geospatial_data(points, attributes) 278 G2 = Geospatial_data(points, attributes1) 279 317 280 G = G1 + G2 318 281 … … 325 288 326 289 def test_addII(self): 327 """ test the addition of two geospatical objects 328 no geo_reference see next test 329 """ 290 '''test the addition of two geospatical objects 291 no geo_reference see next test 292 ''' 293 330 294 points = [[1.0, 2.1], [3.0, 5.3]] 331 attributes = {'depth': [2, 4]}332 G1 = Geospatial_data(points, attributes) 333 295 attributes = {'depth': [2, 4]} 296 G1 = Geospatial_data(points, attributes) 297 334 298 points = [[5.0, 2.1], [3.0, 50.3]] 335 attributes = {'depth': [200, 400]}299 attributes = {'depth': [200, 400]} 336 300 G2 = Geospatial_data(points, attributes) 337 338 # g3 = geospatial_data2points_dictionary(G1) 339 # print 'g3=', g3 340 301 341 302 G = G1 + G2 342 303 343 assert G.attributes.has_key('depth') 304 assert G.attributes.has_key('depth') 344 305 assert G.attributes.keys(), ['depth'] 345 306 assert num.allclose(G.attributes['depth'], [2, 4, 200, 400]) 346 307 assert num.allclose(G.get_data_points(), [[1.0, 2.1], [3.0, 5.3], 347 308 [5.0, 2.1], [3.0, 50.3]]) 309 348 310 def test_add_with_geo (self): 349 """ 350 Difference in Geo_reference resolved 351 """ 311 '''Difference in Geo_reference resolved''' 312 352 313 points1 = [[1.0, 2.1], [3.0, 5.3]] 353 314 points2 = [[5.0, 6.1], [6.0, 3.3]] … … 361 322 projection='UTM', 362 323 units='m') 363 324 364 325 G1 = Geospatial_data(points1, attributes1, geo_ref1) 365 326 G2 = Geospatial_data(points2, attributes2, geo_ref2) … … 370 331 371 332 P2 = G2.get_data_points(absolute=True) 372 assert num.allclose(P2, [[5.1, 9.1], [6.1, 6.3]]) 373 333 assert num.allclose(P2, [[5.1, 9.1], [6.1, 6.3]]) 334 374 335 G = G1 + G2 375 336 … … 380 341 P = G.get_data_points(absolute=True) 381 342 382 #P_relative = G.get_data_points(absolute=False)383 #384 #assert allclose(P_relative, P - [0.1, 2.0])385 386 assert num.allclose(P, num.concatenate( (P1,P2)))343 assert num.allclose(P, num.concatenate((P1,P2), axis=0)) #??default# 344 msg = ('P=\n%s\nshould be close to\n' 345 '[[2.0, 4.1], [4.0, 7.3],\n' 346 ' [5.1, 9.1], [6.1, 6.3]]' 347 % str(P)) 387 348 assert num.allclose(P, [[2.0, 4.1], [4.0, 7.3], 388 [5.1, 9.1], [6.1, 6.3]]) 389 390 391 392 349 [5.1, 9.1], [6.1, 6.3]]), msg 393 350 394 351 def test_add_with_geo_absolute (self): 395 """ 396 Difference in Geo_reference resolved 397 """ 352 '''Difference in Geo_reference resolved''' 353 398 354 points1 = num.array([[2.0, 4.1], [4.0, 7.3]]) 399 points2 = num.array([[5.1, 9.1], [6.1, 6.3]]) 355 points2 = num.array([[5.1, 9.1], [6.1, 6.3]]) 400 356 attributes1 = [2, 4] 401 357 attributes2 = [5, 76] … … 403 359 geo_ref2 = Geo_reference(55, 2.0, 3.0) 404 360 405 406 407 G1 = Geospatial_data(points1 - [geo_ref1.get_xllcorner(), geo_ref1.get_yllcorner()], 361 G1 = Geospatial_data(points1 - [geo_ref1.get_xllcorner(), 362 geo_ref1.get_yllcorner()], 408 363 attributes1, geo_ref1) 409 410 G2 = Geospatial_data(points2 - [geo_ref2.get_xllcorner(), geo_ref2.get_yllcorner()], 364 365 G2 = Geospatial_data(points2 - [geo_ref2.get_xllcorner(), 366 geo_ref2.get_yllcorner()], 411 367 attributes2, geo_ref2) 412 368 … … 416 372 417 373 P1 = G1.get_data_points(absolute=False) 418 assert num.allclose(P1, points1 - [geo_ref1.get_xllcorner(), geo_ref1.get_yllcorner()]) 374 msg = ('P1=\n%s\nbut points1 - <...>=\n%s' 375 % (str(P1), 376 str(points1 - [geo_ref1.get_xllcorner(), 377 geo_ref1.get_yllcorner()]))) 378 assert num.allclose(P1, points1 - [geo_ref1.get_xllcorner(), 379 geo_ref1.get_yllcorner()]), msg 419 380 420 381 P2 = G2.get_data_points(absolute=True) … … 422 383 423 384 P2 = G2.get_data_points(absolute=False) 424 assert num.allclose(P2, points2 - [geo_ref2.get_xllcorner(), geo_ref2.get_yllcorner()]) 425 385 assert num.allclose(P2, points2 - [geo_ref2.get_xllcorner(), 386 geo_ref2.get_yllcorner()]) 387 426 388 G = G1 + G2 427 428 #assert allclose(G.get_geo_reference().get_xllcorner(), 1.0)429 #assert allclose(G.get_geo_reference().get_yllcorner(), 2.0)430 431 389 P = G.get_data_points(absolute=True) 432 390 433 #P_relative = G.get_data_points(absolute=False) 434 # 435 #assert allclose(P_relative, [[1.0, 2.1], [3.0, 5.3], [4.1, 7.1], [5.1, 4.3]]) 436 437 assert num.allclose(P, num.concatenate( (points1,points2) )) 438 391 assert num.allclose(P, num.concatenate((points1, points2), axis=0)) #??default# 439 392 440 393 def test_add_with_None(self): 441 """ test that None can be added to a geospatical objects 442 """ 443 394 '''test that None can be added to a geospatical objects''' 395 444 396 points1 = num.array([[2.0, 4.1], [4.0, 7.3]]) 445 points2 = num.array([[5.1, 9.1], [6.1, 6.3]]) 397 points2 = num.array([[5.1, 9.1], [6.1, 6.3]]) 446 398 447 399 geo_ref1= Geo_reference(55, 1.0, 2.0) … … 452 404 projection='UTM', 453 405 units='m') 454 455 456 attributes1 = {'depth':[2, 4.7], 'elevation':[6.1, 5]} 457 attributes2 = {'depth':[-2.3, 4], 'elevation':[2.5, 1]} 458 406 407 attributes1 = {'depth': [2, 4.7], 'elevation': [6.1, 5]} 408 attributes2 = {'depth': [-2.3, 4], 'elevation': [2.5, 1]} 459 409 460 410 G1 = Geospatial_data(points1, attributes1, geo_ref1) … … 464 414 assert G1.attributes.has_key('elevation') 465 415 assert num.allclose(G1.attributes['depth'], [2, 4.7]) 466 assert num.allclose(G1.attributes['elevation'], [6.1, 5]) 467 416 assert num.allclose(G1.attributes['elevation'], [6.1, 5]) 417 468 418 G2 = Geospatial_data(points2, attributes2, geo_ref2) 469 419 assert num.allclose(G2.get_geo_reference().get_xllcorner(), 0.1) … … 472 422 assert G2.attributes.has_key('elevation') 473 423 assert num.allclose(G2.attributes['depth'], [-2.3, 4]) 474 assert num.allclose(G2.attributes['elevation'], [2.5, 1]) 424 assert num.allclose(G2.attributes['elevation'], [2.5, 1]) 475 425 476 426 #Check that absolute values are as expected … … 479 429 480 430 P2 = G2.get_data_points(absolute=True) 481 assert num.allclose(P2, [[5.2, 12.1], [6.2, 9.3]]) 431 assert num.allclose(P2, [[5.2, 12.1], [6.2, 9.3]]) 482 432 483 433 # Normal add 484 434 G = G1 + None 485 486 435 assert G.attributes.has_key('depth') 487 436 assert G.attributes.has_key('elevation') 488 437 assert num.allclose(G.attributes['depth'], [2, 4.7]) 489 assert num.allclose(G.attributes['elevation'], [6.1, 5]) 438 assert num.allclose(G.attributes['elevation'], [6.1, 5]) 490 439 491 440 # Points are now absolute. 492 441 assert num.allclose(G.get_geo_reference().get_xllcorner(), 0.0) 493 442 assert num.allclose(G.get_geo_reference().get_yllcorner(), 0.0) 494 P = G.get_data_points(absolute=True) 495 assert num.allclose(P, [[3.0, 6.1], [5.0, 9.3]])496 443 P = G.get_data_points(absolute=True) 444 msg = 'P=\n%s' % str(P) 445 assert num.allclose(P, [[3.0, 6.1], [5.0, 9.3]]), msg 497 446 498 447 G = G2 + None … … 500 449 assert G.attributes.has_key('elevation') 501 450 assert num.allclose(G.attributes['depth'], [-2.3, 4]) 502 assert num.allclose(G.attributes['elevation'], [2.5, 1]) 503 451 assert num.allclose(G.attributes['elevation'], [2.5, 1]) 504 452 assert num.allclose(G.get_geo_reference().get_xllcorner(), 0.0) 505 453 assert num.allclose(G.get_geo_reference().get_yllcorner(), 0.0) 506 P = G.get_data_points(absolute=True) 454 455 P = G.get_data_points(absolute=True) 507 456 assert num.allclose(P, [[5.2, 12.1], [6.2, 9.3]]) 508 509 510 457 511 458 # Reverse add 512 459 G = None + G1 513 514 460 assert G.attributes.has_key('depth') 515 461 assert G.attributes.has_key('elevation') 516 462 assert num.allclose(G.attributes['depth'], [2, 4.7]) 517 assert num.allclose(G.attributes['elevation'], [6.1, 5]) 463 assert num.allclose(G.attributes['elevation'], [6.1, 5]) 518 464 519 465 # Points are now absolute. 520 466 assert num.allclose(G.get_geo_reference().get_xllcorner(), 0.0) 521 467 assert num.allclose(G.get_geo_reference().get_yllcorner(), 0.0) 522 P = G.get_data_points(absolute=True) 523 assert num.allclose(P, [[3.0, 6.1], [5.0, 9.3]])524 468 469 P = G.get_data_points(absolute=True) 470 assert num.allclose(P, [[3.0, 6.1], [5.0, 9.3]]) 525 471 526 472 G = None + G2 … … 528 474 assert G.attributes.has_key('elevation') 529 475 assert num.allclose(G.attributes['depth'], [-2.3, 4]) 530 assert num.allclose(G.attributes['elevation'], [2.5, 1]) 476 assert num.allclose(G.attributes['elevation'], [2.5, 1]) 531 477 532 478 assert num.allclose(G.get_geo_reference().get_xllcorner(), 0.0) 533 479 assert num.allclose(G.get_geo_reference().get_yllcorner(), 0.0) 534 P = G.get_data_points(absolute=True) 480 481 P = G.get_data_points(absolute=True) 535 482 assert num.allclose(P, [[5.2, 12.1], [6.2, 9.3]]) 536 483 537 538 539 540 541 542 484 def test_clip0(self): 543 """test_clip0(self):544 485 '''test_clip0(self): 486 545 487 Test that point sets can be clipped by a polygon 546 """547 488 ''' 489 548 490 from anuga.coordinate_transforms.geo_reference import Geo_reference 549 491 550 492 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 551 493 [0, 0], [2.4, 3.3]] 552 494 G = Geospatial_data(points) 553 495 554 # First try the unit square 555 U = [[0,0], [1,0], [1,1], [0,1]] 556 assert num.allclose(G.clip(U).get_data_points(), [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 496 # First try the unit square 497 U = [[0,0], [1,0], [1,1], [0,1]] 498 assert num.allclose(G.clip(U).get_data_points(), 499 [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 557 500 558 501 # Then a more complex polygon 559 502 polygon = [[0,0], [1,0], [0.5,-1], [2, -1], [2,1], [0,1]] 560 points = [ [0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], [0.5, 1.5], [0.5, -0.5]] 503 points = [[0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], 504 [0.5, 1.5], [0.5, -0.5]] 561 505 G = Geospatial_data(points) 562 506 563 507 assert num.allclose(G.clip(polygon).get_data_points(), 564 [[0.5, 0.5], [1, -0.5], [1.5, 0]])508 [[0.5, 0.5], [1, -0.5], [1.5, 0]]) 565 509 566 510 def test_clip0_with_attributes(self): 567 """test_clip0_with_attributes(self):568 511 '''test_clip0_with_attributes(self): 512 569 513 Test that point sets with attributes can be clipped by a polygon 570 """571 514 ''' 515 572 516 from anuga.coordinate_transforms.geo_reference import Geo_reference 573 517 574 518 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 575 519 [0, 0], [2.4, 3.3]] … … 578 522 att_dict = {'att1': attributes, 579 523 'att2': num.array(attributes)+1} 580 524 581 525 G = Geospatial_data(points, att_dict) 582 526 583 # First try the unit square 584 U = [[0,0], [1,0], [1,1], [0,1]] 585 assert num.allclose(G.clip(U).get_data_points(), [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 527 # First try the unit square 528 U = [[0,0], [1,0], [1,1], [0,1]] 529 assert num.allclose(G.clip(U).get_data_points(), 530 [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 586 531 assert num.allclose(G.clip(U).get_attributes('att1'), [-4, 76, 0.1]) 587 assert num.allclose(G.clip(U).get_attributes('att2'), [-3, 77, 1.1]) 532 assert num.allclose(G.clip(U).get_attributes('att2'), [-3, 77, 1.1]) 588 533 589 534 # Then a more complex polygon 590 535 polygon = [[0,0], [1,0], [0.5,-1], [2, -1], [2,1], [0,1]] 591 points = [ [0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], [0.5, 1.5], [0.5, -0.5]] 536 points = [[0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], 537 [0.5, 1.5], [0.5, -0.5]] 592 538 593 539 # This time just one attribute 594 540 attributes = [2, -4, 5, 76, -2, 0.1] 595 541 G = Geospatial_data(points, attributes) 596 597 542 assert num.allclose(G.clip(polygon).get_data_points(), 598 [[0.5, 0.5], [1, -0.5], [1.5, 0]])543 [[0.5, 0.5], [1, -0.5], [1.5, 0]]) 599 544 assert num.allclose(G.clip(polygon).get_attributes(), [-4, 5, 76]) 600 601 545 602 546 def test_clip1(self): 603 """test_clip1(self):604 547 '''test_clip1(self): 548 605 549 Test that point sets can be clipped by a polygon given as 606 550 another Geospatial dataset 607 """608 551 ''' 552 609 553 from anuga.coordinate_transforms.geo_reference import Geo_reference 610 554 611 555 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 612 556 [0, 0], [2.4, 3.3]] … … 615 559 'att2': num.array(attributes)+1} 616 560 G = Geospatial_data(points, att_dict) 617 618 # First try the unit square 619 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 561 562 # First try the unit square 563 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 620 564 assert num.allclose(G.clip(U).get_data_points(), 621 [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 622 565 [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 623 566 assert num.allclose(G.clip(U).get_attributes('att1'), [-4, 76, 0.1]) 624 assert num.allclose(G.clip(U).get_attributes('att2'), [-3, 77, 1.1]) 625 567 assert num.allclose(G.clip(U).get_attributes('att2'), [-3, 77, 1.1]) 568 626 569 # Then a more complex polygon 627 points = [ [0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], [0.5, 1.5], [0.5, -0.5]] 628 attributes = [2, -4, 5, 76, -2, 0.1] 570 points = [[0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], 571 [0.5, 1.5], [0.5, -0.5]] 572 attributes = [2, -4, 5, 76, -2, 0.1] 629 573 G = Geospatial_data(points, attributes) 630 polygon = Geospatial_data([[0,0], [1,0], [0.5,-1], [2, -1], [2,1], [0,1]])631 574 polygon = Geospatial_data([[0,0], [1,0], [0.5,-1], [2, -1], 575 [2,1], [0,1]]) 632 576 633 577 assert num.allclose(G.clip(polygon).get_data_points(), 634 578 [[0.5, 0.5], [1, -0.5], [1.5, 0]]) 635 579 assert num.allclose(G.clip(polygon).get_attributes(), [-4, 5, 76]) 636 637 580 638 581 def test_clip0_outside(self): 639 """test_clip0_outside(self):640 582 '''test_clip0_outside(self): 583 641 584 Test that point sets can be clipped outside of a polygon 642 """643 585 ''' 586 644 587 from anuga.coordinate_transforms.geo_reference import Geo_reference 645 588 646 589 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 647 590 [0, 0], [2.4, 3.3]] 648 attributes = [2, -4, 5, 76, -2, 0.1, 3] 591 attributes = [2, -4, 5, 76, -2, 0.1, 3] 649 592 G = Geospatial_data(points, attributes) 650 593 651 # First try the unit square 594 # First try the unit square 652 595 U = [[0,0], [1,0], [1,1], [0,1]] 653 596 assert num.allclose(G.clip_outside(U).get_data_points(), 654 597 [[-1, 4], [1.0, 2.1], [3.0, 5.3], [2.4, 3.3]]) 655 #print G.clip_outside(U).get_attributes() 656 assert num.allclose(G.clip_outside(U).get_attributes(), [2, 5, -2, 3]) 657 598 assert num.allclose(G.clip_outside(U).get_attributes(), [2, 5, -2, 3]) 658 599 659 600 # Then a more complex polygon 660 601 polygon = [[0,0], [1,0], [0.5,-1], [2, -1], [2,1], [0,1]] 661 points = [ [0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], [0.5, 1.5], [0.5, -0.5]] 662 attributes = [2, -4, 5, 76, -2, 0.1] 602 points = [[0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], 603 [0.5, 1.5], [0.5, -0.5]] 604 attributes = [2, -4, 5, 76, -2, 0.1] 663 605 G = Geospatial_data(points, attributes) 664 665 606 assert num.allclose(G.clip_outside(polygon).get_data_points(), 666 607 [[0.5, 1.4], [0.5, 1.5], [0.5, -0.5]]) 667 assert num.allclose(G.clip_outside(polygon).get_attributes(), [2, -2, 0.1])668 608 assert num.allclose(G.clip_outside(polygon).get_attributes(), 609 [2, -2, 0.1]) 669 610 670 611 def test_clip1_outside(self): 671 """test_clip1_outside(self):672 612 '''test_clip1_outside(self): 613 673 614 Test that point sets can be clipped outside of a polygon given as 674 615 another Geospatial dataset 675 """676 616 ''' 617 677 618 from anuga.coordinate_transforms.geo_reference import Geo_reference 678 619 679 620 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 680 621 [0, 0], [2.4, 3.3]] 681 attributes = [2, -4, 5, 76, -2, 0.1, 3] 682 G = Geospatial_data(points, attributes) 683 684 # First try the unit square 685 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 622 attributes = [2, -4, 5, 76, -2, 0.1, 3] 623 G = Geospatial_data(points, attributes) 624 625 # First try the unit square 626 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 686 627 assert num.allclose(G.clip_outside(U).get_data_points(), 687 628 [[-1, 4], [1.0, 2.1], [3.0, 5.3], [2.4, 3.3]]) 688 assert num.allclose(G.clip(U).get_attributes(), [-4, 76, 0.1]) 629 assert num.allclose(G.clip(U).get_attributes(), [-4, 76, 0.1]) 689 630 690 631 # Then a more complex polygon 691 points = [ [0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], [0.5, 1.5], [0.5, -0.5]] 692 attributes = [2, -4, 5, 76, -2, 0.1] 632 points = [[0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], 633 [0.5, 1.5], [0.5, -0.5]] 634 attributes = [2, -4, 5, 76, -2, 0.1] 693 635 G = Geospatial_data(points, attributes) 694 695 polygon = Geospatial_data([[0,0], [1,0], [0.5,-1], [2, -1], [2,1], [0,1]]) 696 697 636 polygon = Geospatial_data([[0, 0], [1, 0], [0.5, -1], [2, -1], 637 [2, 1], [0, 1]]) 698 638 assert num.allclose(G.clip_outside(polygon).get_data_points(), 699 639 [[0.5, 1.4], [0.5, 1.5], [0.5, -0.5]]) 700 assert num.allclose(G.clip_outside(polygon).get_attributes(), [2, -2, 0.1]) 701 702 640 assert num.allclose(G.clip_outside(polygon).get_attributes(), 641 [2, -2, 0.1]) 703 642 704 643 def test_clip1_inside_outside(self): 705 """test_clip1_inside_outside(self):706 644 '''test_clip1_inside_outside(self): 645 707 646 Test that point sets can be clipped outside of a polygon given as 708 647 another Geospatial dataset 709 """710 648 ''' 649 711 650 from anuga.coordinate_transforms.geo_reference import Geo_reference 712 651 713 652 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 714 653 [0, 0], [2.4, 3.3]] 715 attributes = [2, -4, 5, 76, -2, 0.1, 3] 654 attributes = [2, -4, 5, 76, -2, 0.1, 3] 716 655 G = Geospatial_data(points, attributes) 717 656 718 # First try the unit square 719 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 657 # First try the unit square 658 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 720 659 G1 = G.clip(U) 721 assert num.allclose(G1.get_data_points(),[[0.2, 0.5], [0.4, 0.3], [0, 0]]) 660 assert num.allclose(G1.get_data_points(), 661 [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 722 662 assert num.allclose(G.clip(U).get_attributes(), [-4, 76, 0.1]) 723 724 663 G2 = G.clip_outside(U) 725 664 assert num.allclose(G2.get_data_points(),[[-1, 4], [1.0, 2.1], 726 665 [3.0, 5.3], [2.4, 3.3]]) 727 assert num.allclose(G.clip_outside(U).get_attributes(), [2, 5, -2, 3]) 728 729 666 assert num.allclose(G.clip_outside(U).get_attributes(), [2, 5, -2, 3]) 667 730 668 # New ordering 731 new_points = [[0.2, 0.5], [0.4, 0.3], [0, 0]] +\ 732 [[-1, 4], [1.0, 2.1], [3.0, 5.3], [2.4, 3.3]] 733 734 new_attributes = [-4, 76, 0.1, 2, 5, -2, 3] 735 669 new_points = [[0.2, 0.5], [0.4, 0.3], [0, 0], [-1, 4], 670 [1.0, 2.1], [3.0, 5.3], [2.4, 3.3]] 671 new_attributes = [-4, 76, 0.1, 2, 5, -2, 3] 672 736 673 assert num.allclose((G1+G2).get_data_points(), new_points) 737 674 assert num.allclose((G1+G2).get_attributes(), new_attributes) … … 741 678 G.export_points_file(FN) 742 679 743 744 680 # Read it back in 745 681 G3 = Geospatial_data(FN) 746 682 747 748 683 # Check result 749 assert num.allclose(G3.get_data_points(), new_points) 750 assert num.allclose(G3.get_attributes(), new_attributes) 751 684 assert num.allclose(G3.get_data_points(), new_points) 685 assert num.allclose(G3.get_attributes(), new_attributes) 686 752 687 os.remove(FN) 753 688 754 755 689 def test_load_csv(self): 756 757 import os 758 import tempfile 759 760 fileName = tempfile.mktemp(".csv") 761 file = open(fileName,"w") 762 file.write("x,y,elevation speed \n\ 690 fileName = tempfile.mktemp('.csv') 691 file = open(fileName,'w') 692 file.write('x,y,elevation speed \n\ 763 693 1.0 0.0 10.0 0.0\n\ 764 694 0.0 1.0 0.0 10.0\n\ 765 1.0 0.0 10.4 40.0\n ")695 1.0 0.0 10.4 40.0\n') 766 696 file.close() 767 #print fileName 697 768 698 results = Geospatial_data(fileName) 769 699 os.remove(fileName) 770 # print 'data', results.get_data_points() 771 assert num.allclose(results.get_data_points(), [[1.0, 0.0],[0.0, 1.0], 772 [1.0, 0.0]]) 700 assert num.allclose(results.get_data_points(), 701 [[1.0, 0.0],[0.0, 1.0], [1.0, 0.0]]) 773 702 assert num.allclose(results.get_attributes(attribute_name='elevation'), 774 703 [10.0, 0.0, 10.4]) … … 776 705 [0.0, 10.0, 40.0]) 777 706 778 779 ###################### .CSV ############################## 707 ################################################################################ 708 # Test CSV files 709 ################################################################################ 780 710 781 711 def test_load_csv_lat_long_bad_blocking(self): 782 """ 783 test_load_csv_lat_long_bad_blocking(self): 712 '''test_load_csv_lat_long_bad_blocking(self): 784 713 Different zones in Geo references 785 """ 786 fileName = tempfile.mktemp(".csv") 787 file = open(fileName,"w") 788 file.write("Lati,LONG,z \n\ 714 ''' 715 716 fileName = tempfile.mktemp('.csv') 717 file = open(fileName, 'w') 718 file.write('Lati,LONG,z \n\ 789 719 -25.0,180.0,452.688000\n\ 790 -34,150.0,459.126000\n ")720 -34,150.0,459.126000\n') 791 721 file.close() 792 722 793 723 results = Geospatial_data(fileName, max_read_lines=1, 794 724 load_file_now=False) 795 796 #for i in results: 797 # pass 725 798 726 try: 799 727 for i in results: … … 803 731 else: 804 732 msg = 'Different zones in Geo references not caught.' 805 raise msg806 807 os.remove(fileName) 808 733 raise Exception, msg 734 735 os.remove(fileName) 736 809 737 def test_load_csv(self): 810 811 fileName = tempfile.mktemp(".txt") 812 file = open(fileName,"w") 813 file.write(" x,y, elevation , speed \n\ 814 1.0, 0.0, 10.0, 0.0\n\ 815 0.0, 1.0, 0.0, 10.0\n\ 816 1.0, 0.0 ,10.4, 40.0\n") 738 fileName = tempfile.mktemp('.txt') 739 file = open(fileName, 'w') 740 file.write(' x,y, elevation , speed \n\ 741 1.0, 0.0, 10.0, 0.0\n\ 742 0.0, 1.0, 0.0, 10.0\n\ 743 1.0, 0.0 ,10.4, 40.0\n') 817 744 file.close() 818 745 819 746 results = Geospatial_data(fileName, max_read_lines=2) 820 747 821 822 assert num.allclose(results.get_data_points(), [[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]]) 823 assert num.allclose(results.get_attributes(attribute_name='elevation'), [10.0, 0.0, 10.4]) 824 assert num.allclose(results.get_attributes(attribute_name='speed'), [0.0, 10.0, 40.0]) 748 assert num.allclose(results.get_data_points(), 749 [[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]]) 750 assert num.allclose(results.get_attributes(attribute_name='elevation'), 751 [10.0, 0.0, 10.4]) 752 assert num.allclose(results.get_attributes(attribute_name='speed'), 753 [0.0, 10.0, 40.0]) 825 754 826 755 # Blocking … … 828 757 for i in results: 829 758 geo_list.append(i) 830 759 831 760 assert num.allclose(geo_list[0].get_data_points(), 832 761 [[1.0, 0.0],[0.0, 1.0]]) 833 834 762 assert num.allclose(geo_list[0].get_attributes(attribute_name='elevation'), 835 763 [10.0, 0.0]) 836 764 assert num.allclose(geo_list[1].get_data_points(), 837 [[1.0, 0.0]]) 765 [[1.0, 0.0]]) 838 766 assert num.allclose(geo_list[1].get_attributes(attribute_name='elevation'), 839 767 [10.4]) 840 841 os.remove(fileName) 842 768 769 os.remove(fileName) 770 843 771 def test_load_csv_bad(self): 844 """test_load_csv_bad(self):772 '''test_load_csv_bad(self): 845 773 header column, body column missmatch 846 774 (Format error) 847 """ 848 import os 849 850 fileName = tempfile.mktemp(".txt") 851 file = open(fileName,"w") 852 file.write(" elevation , speed \n\ 853 1.0, 0.0, 10.0, 0.0\n\ 854 0.0, 1.0, 0.0, 10.0\n\ 855 1.0, 0.0 ,10.4, 40.0\n") 775 ''' 776 777 fileName = tempfile.mktemp('.txt') 778 file = open(fileName, 'w') 779 file.write(' elevation , speed \n\ 780 1.0, 0.0, 10.0, 0.0\n\ 781 0.0, 1.0, 0.0, 10.0\n\ 782 1.0, 0.0 ,10.4, 40.0\n') 856 783 file.close() 857 784 … … 861 788 # Blocking 862 789 geo_list = [] 863 #for i in results:864 # geo_list.append(i)865 790 try: 866 791 for i in results: … … 869 794 pass 870 795 else: 871 msg = ' bad file did not raise error!'872 raise msg796 msg = 'Bad file did not raise error!' 797 raise Exception, msg 873 798 os.remove(fileName) 874 799 875 800 def test_load_csv_badII(self): 876 """test_load_csv_bad(self):801 '''test_load_csv_bad(self): 877 802 header column, body column missmatch 878 803 (Format error) 879 """ 880 import os 881 882 fileName = tempfile.mktemp(".txt") 883 file = open(fileName,"w") 884 file.write(" x,y,elevation , speed \n\ 804 ''' 805 806 fileName = tempfile.mktemp('.txt') 807 file = open(fileName, 'w') 808 file.write(' x,y,elevation , speed \n\ 885 809 1.0, 0.0, 10.0, 0.0\n\ 886 810 0.0, 1.0, 0.0, 10\n\ 887 1.0, 0.0 ,10.4 yeah\n ")811 1.0, 0.0 ,10.4 yeah\n') 888 812 file.close() 889 813 … … 893 817 # Blocking 894 818 geo_list = [] 895 #for i in results:896 # geo_list.append(i)897 819 try: 898 820 for i in results: … … 901 823 pass 902 824 else: 903 msg = ' bad file did not raise error!'904 raise msg825 msg = 'Bad file did not raise error!' 826 raise Exception, msg 905 827 os.remove(fileName) 906 828 907 829 def test_load_csv_badIII(self): 908 """test_load_csv_bad(self):830 '''test_load_csv_bad(self): 909 831 space delimited 910 """ 911 import os 912 913 fileName = tempfile.mktemp(".txt") 914 file = open(fileName,"w") 915 file.write(" x y elevation speed \n\ 832 ''' 833 834 fileName = tempfile.mktemp('.txt') 835 file = open(fileName, 'w') 836 file.write(' x y elevation speed \n\ 916 837 1. 0.0 10.0 0.0\n\ 917 838 0.0 1.0 0.0 10.0\n\ 918 1.0 0.0 10.4 40.0\n ")839 1.0 0.0 10.4 40.0\n') 919 840 file.close() 920 841 … … 925 846 pass 926 847 else: 927 msg = ' bad file did not raise error!'928 raise msg929 os.remove(fileName) 930 848 msg = 'Bad file did not raise error!' 849 raise Exception, msg 850 os.remove(fileName) 851 931 852 def test_load_csv_badIV(self): 932 """test_load_csv_bad(self):853 ''' test_load_csv_bad(self): 933 854 header column, body column missmatch 934 855 (Format error) 935 """ 936 import os 937 938 fileName = tempfile.mktemp(".txt") 939 file = open(fileName,"w") 940 file.write(" x,y,elevation , speed \n\ 856 ''' 857 858 fileName = tempfile.mktemp('.txt') 859 file = open(fileName, 'w') 860 file.write(' x,y,elevation , speed \n\ 941 861 1.0, 0.0, 10.0, wow\n\ 942 862 0.0, 1.0, 0.0, ha\n\ 943 1.0, 0.0 ,10.4, yeah\n ")863 1.0, 0.0 ,10.4, yeah\n') 944 864 file.close() 945 865 … … 949 869 # Blocking 950 870 geo_list = [] 951 #for i in results:952 # geo_list.append(i)953 871 try: 954 872 for i in results: … … 957 875 pass 958 876 else: 959 msg = ' bad file did not raise error!'960 raise msg877 msg = 'Bad file did not raise error!' 878 raise Exception, msg 961 879 os.remove(fileName) 962 880 963 881 def test_load_pts_blocking(self): 964 882 #This is pts! 965 966 import os 967 968 fileName = tempfile.mktemp(".txt") 969 file = open(fileName,"w") 970 file.write(" x,y, elevation , speed \n\ 971 1.0, 0.0, 10.0, 0.0\n\ 972 0.0, 1.0, 0.0, 10.0\n\ 973 1.0, 0.0 ,10.4, 40.0\n") 883 fileName = tempfile.mktemp('.txt') 884 file = open(fileName, 'w') 885 file.write(' x,y, elevation , speed \n\ 886 1.0, 0.0, 10.0, 0.0\n\ 887 0.0, 1.0, 0.0, 10.0\n\ 888 1.0, 0.0 ,10.4, 40.0\n') 974 889 file.close() 975 890 976 pts_file = tempfile.mktemp( ".pts")977 891 pts_file = tempfile.mktemp('.pts') 892 978 893 convert = Geospatial_data(fileName) 979 894 convert.export_points_file(pts_file) 980 895 results = Geospatial_data(pts_file, max_read_lines=2) 981 896 982 assert num.allclose(results.get_data_points(), [[1.0, 0.0],[0.0, 1.0],983 897 assert num.allclose(results.get_data_points(), 898 [[1.0, 0.0],[0.0, 1.0], [1.0, 0.0]]) 984 899 assert num.allclose(results.get_attributes(attribute_name='elevation'), 985 900 [10.0, 0.0, 10.4]) … … 990 905 geo_list = [] 991 906 for i in results: 992 geo_list.append(i) 907 geo_list.append(i) 993 908 assert num.allclose(geo_list[0].get_data_points(), 994 909 [[1.0, 0.0],[0.0, 1.0]]) … … 996 911 [10.0, 0.0]) 997 912 assert num.allclose(geo_list[1].get_data_points(), 998 [[1.0, 0.0]]) 913 [[1.0, 0.0]]) 999 914 assert num.allclose(geo_list[1].get_attributes(attribute_name='elevation'), 1000 915 [10.4]) 1001 1002 os.remove(fileName) 1003 os.remove(pts_file) 916 917 os.remove(fileName) 918 os.remove(pts_file) 1004 919 1005 920 def verbose_test_load_pts_blocking(self): 1006 1007 import os 1008 1009 fileName = tempfile.mktemp(".txt") 1010 file = open(fileName,"w") 1011 file.write(" x,y, elevation , speed \n\ 1012 1.0, 0.0, 10.0, 0.0\n\ 1013 0.0, 1.0, 0.0, 10.0\n\ 1014 1.0, 0.0, 10.0, 0.0\n\ 1015 0.0, 1.0, 0.0, 10.0\n\ 1016 1.0, 0.0, 10.0, 0.0\n\ 1017 0.0, 1.0, 0.0, 10.0\n\ 1018 1.0, 0.0, 10.0, 0.0\n\ 1019 0.0, 1.0, 0.0, 10.0\n\ 1020 1.0, 0.0, 10.0, 0.0\n\ 1021 0.0, 1.0, 0.0, 10.0\n\ 1022 1.0, 0.0, 10.0, 0.0\n\ 1023 0.0, 1.0, 0.0, 10.0\n\ 1024 1.0, 0.0, 10.0, 0.0\n\ 1025 0.0, 1.0, 0.0, 10.0\n\ 1026 1.0, 0.0, 10.0, 0.0\n\ 1027 0.0, 1.0, 0.0, 10.0\n\ 1028 1.0, 0.0, 10.0, 0.0\n\ 1029 0.0, 1.0, 0.0, 10.0\n\ 1030 1.0, 0.0, 10.0, 0.0\n\ 1031 0.0, 1.0, 0.0, 10.0\n\ 1032 1.0, 0.0, 10.0, 0.0\n\ 1033 0.0, 1.0, 0.0, 10.0\n\ 1034 1.0, 0.0, 10.0, 0.0\n\ 1035 0.0, 1.0, 0.0, 10.0\n\ 1036 1.0, 0.0, 10.0, 0.0\n\ 1037 0.0, 1.0, 0.0, 10.0\n\ 1038 1.0, 0.0, 10.0, 0.0\n\ 1039 0.0, 1.0, 0.0, 10.0\n\ 1040 1.0, 0.0, 10.0, 0.0\n\ 1041 0.0, 1.0, 0.0, 10.0\n\ 1042 1.0, 0.0, 10.0, 0.0\n\ 1043 0.0, 1.0, 0.0, 10.0\n\ 1044 1.0, 0.0, 10.0, 0.0\n\ 1045 0.0, 1.0, 0.0, 10.0\n\ 1046 1.0, 0.0, 10.0, 0.0\n\ 1047 0.0, 1.0, 0.0, 10.0\n\ 1048 1.0, 0.0, 10.0, 0.0\n\ 1049 0.0, 1.0, 0.0, 10.0\n\ 1050 1.0, 0.0, 10.0, 0.0\n\ 1051 0.0, 1.0, 0.0, 10.0\n\ 1052 1.0, 0.0, 10.0, 0.0\n\ 1053 0.0, 1.0, 0.0, 10.0\n\ 1054 1.0, 0.0, 10.0, 0.0\n\ 1055 0.0, 1.0, 0.0, 10.0\n\ 1056 1.0, 0.0, 10.0, 0.0\n\ 1057 0.0, 1.0, 0.0, 10.0\n\ 1058 1.0, 0.0 ,10.4, 40.0\n") 921 fileName = tempfile.mktemp('.txt') 922 file = open(fileName, 'w') 923 file.write(' x,y, elevation , speed \n\ 924 1.0, 0.0, 10.0, 0.0\n\ 925 0.0, 1.0, 0.0, 10.0\n\ 926 1.0, 0.0, 10.0, 0.0\n\ 927 0.0, 1.0, 0.0, 10.0\n\ 928 1.0, 0.0, 10.0, 0.0\n\ 929 0.0, 1.0, 0.0, 10.0\n\ 930 1.0, 0.0, 10.0, 0.0\n\ 931 0.0, 1.0, 0.0, 10.0\n\ 932 1.0, 0.0, 10.0, 0.0\n\ 933 0.0, 1.0, 0.0, 10.0\n\ 934 1.0, 0.0, 10.0, 0.0\n\ 935 0.0, 1.0, 0.0, 10.0\n\ 936 1.0, 0.0, 10.0, 0.0\n\ 937 0.0, 1.0, 0.0, 10.0\n\ 938 1.0, 0.0, 10.0, 0.0\n\ 939 0.0, 1.0, 0.0, 10.0\n\ 940 1.0, 0.0, 10.0, 0.0\n\ 941 0.0, 1.0, 0.0, 10.0\n\ 942 1.0, 0.0, 10.0, 0.0\n\ 943 0.0, 1.0, 0.0, 10.0\n\ 944 1.0, 0.0, 10.0, 0.0\n\ 945 0.0, 1.0, 0.0, 10.0\n\ 946 1.0, 0.0, 10.0, 0.0\n\ 947 0.0, 1.0, 0.0, 10.0\n\ 948 1.0, 0.0, 10.0, 0.0\n\ 949 0.0, 1.0, 0.0, 10.0\n\ 950 1.0, 0.0, 10.0, 0.0\n\ 951 0.0, 1.0, 0.0, 10.0\n\ 952 1.0, 0.0, 10.0, 0.0\n\ 953 0.0, 1.0, 0.0, 10.0\n\ 954 1.0, 0.0, 10.0, 0.0\n\ 955 0.0, 1.0, 0.0, 10.0\n\ 956 1.0, 0.0, 10.0, 0.0\n\ 957 0.0, 1.0, 0.0, 10.0\n\ 958 1.0, 0.0, 10.0, 0.0\n\ 959 0.0, 1.0, 0.0, 10.0\n\ 960 1.0, 0.0, 10.0, 0.0\n\ 961 0.0, 1.0, 0.0, 10.0\n\ 962 1.0, 0.0, 10.0, 0.0\n\ 963 0.0, 1.0, 0.0, 10.0\n\ 964 1.0, 0.0, 10.0, 0.0\n\ 965 0.0, 1.0, 0.0, 10.0\n\ 966 1.0, 0.0, 10.0, 0.0\n\ 967 0.0, 1.0, 0.0, 10.0\n\ 968 1.0, 0.0, 10.0, 0.0\n\ 969 0.0, 1.0, 0.0, 10.0\n\ 970 1.0, 0.0 ,10.4, 40.0\n') 1059 971 file.close() 1060 972 1061 pts_file = tempfile.mktemp(".pts") 1062 973 pts_file = tempfile.mktemp('.pts') 1063 974 convert = Geospatial_data(fileName) 1064 975 convert.export_points_file(pts_file) … … 1068 979 geo_list = [] 1069 980 for i in results: 1070 geo_list.append(i) 981 geo_list.append(i) 1071 982 assert num.allclose(geo_list[0].get_data_points(), 1072 [[1.0, 0.0], [0.0, 1.0]])983 [[1.0, 0.0], [0.0, 1.0]]) 1073 984 assert num.allclose(geo_list[0].get_attributes(attribute_name='elevation'), 1074 985 [10.0, 0.0]) 1075 986 assert num.allclose(geo_list[1].get_data_points(), 1076 [[1.0, 0.0],[0.0, 1.0] ]) 987 [[1.0, 0.0],[0.0, 1.0] ]) 1077 988 assert num.allclose(geo_list[1].get_attributes(attribute_name='elevation'), 1078 989 [10.0, 0.0]) 1079 1080 os.remove(fileName) 990 991 os.remove(fileName) 1081 992 os.remove(pts_file) 1082 1083 1084 993 1085 994 def test_new_export_pts_file(self): … … 1088 997 att_dict['elevation'] = num.array([10.1, 0.0, 10.4]) 1089 998 att_dict['brightness'] = num.array([10.0, 1.0, 10.4]) 1090 1091 fileName = tempfile.mktemp(".pts") 1092 999 1000 fileName = tempfile.mktemp('.pts') 1093 1001 G = Geospatial_data(pointlist, att_dict) 1094 1095 1002 G.export_points_file(fileName) 1096 1097 1003 results = Geospatial_data(file_name = fileName) 1098 1099 os.remove(fileName) 1100 1101 assert num.allclose(results.get_data_points(),[[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]]) 1102 assert num.allclose(results.get_attributes(attribute_name='elevation'), [10.1, 0.0, 10.4]) 1004 os.remove(fileName) 1005 1006 assert num.allclose(results.get_data_points(), 1007 [[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1008 assert num.allclose(results.get_attributes(attribute_name='elevation'), 1009 [10.1, 0.0, 10.4]) 1103 1010 answer = [10.0, 1.0, 10.4] 1104 assert num.allclose(results.get_attributes(attribute_name='brightness'), answer) 1011 assert num.allclose(results.get_attributes(attribute_name='brightness'), 1012 answer) 1105 1013 1106 1014 def test_new_export_absolute_pts_file(self): 1107 1015 att_dict = {} 1108 pointlist = num.array([[1.0, 0.0], [0.0, 1.0],[1.0, 0.0]])1016 pointlist = num.array([[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1109 1017 att_dict['elevation'] = num.array([10.1, 0.0, 10.4]) 1110 1018 att_dict['brightness'] = num.array([10.0, 1.0, 10.4]) 1111 1019 geo_ref = Geo_reference(50, 25, 55) 1112 1113 fileName = tempfile.mktemp(".pts") 1114 1020 1021 fileName = tempfile.mktemp('.pts') 1115 1022 G = Geospatial_data(pointlist, att_dict, geo_ref) 1116 1117 1023 G.export_points_file(fileName, absolute=True) 1118 1119 1024 results = Geospatial_data(file_name = fileName) 1120 1121 os.remove(fileName) 1122 1123 assert num.allclose(results.get_data_points(), G.get_data_points(True)) 1124 assert num.allclose(results.get_attributes(attribute_name='elevation'), [10.1, 0.0, 10.4]) 1025 os.remove(fileName) 1026 1027 msg = ('results.get_data_points()=\n%s\nbut G.get_data_points(True)=\n%s' 1028 % (str(results.get_data_points()), str(G.get_data_points(True)))) 1029 assert num.allclose(results.get_data_points(), 1030 G.get_data_points(True)), msg 1031 msg = ("results.get_attributes(attribute_name='elevation')=%s" 1032 % str(results.get_attributes(attribute_name='elevation'))) 1033 assert num.allclose(results.get_attributes(attribute_name='elevation'), 1034 [10.1, 0.0, 10.4]), msg 1125 1035 answer = [10.0, 1.0, 10.4] 1126 assert num.allclose(results.get_attributes(attribute_name='brightness'), answer) 1036 msg = ("results.get_attributes(attribute_name='brightness')=%s, " 1037 'answer=%s' % 1038 (str(results.get_attributes(attribute_name='brightness')), 1039 str(answer))) 1040 assert num.allclose(results.get_attributes(attribute_name='brightness'), 1041 answer), msg 1127 1042 1128 1043 def test_loadpts(self): 1129 1130 1044 from Scientific.IO.NetCDF import NetCDFFile 1131 1045 1132 fileName = tempfile.mktemp( ".pts")1046 fileName = tempfile.mktemp('.pts') 1133 1047 # NetCDF file definition 1134 1048 outfile = NetCDFFile(fileName, netcdf_mode_w) 1135 1049 1136 1050 # dimension definitions 1137 outfile.createDimension('number_of_points', 3) 1138 outfile.createDimension('number_of_dimensions', 2) # This is 2d data1139 1051 outfile.createDimension('number_of_points', 3) 1052 outfile.createDimension('number_of_dimensions', 2) # This is 2d data 1053 1140 1054 # variable definitions 1141 outfile.createVariable('points', n um.Float, ('number_of_points',1142 'number_of_dimensions'))1143 outfile.createVariable('elevation', n um.Float, ('number_of_points',))1144 1055 outfile.createVariable('points', netcdf_float, ('number_of_points', 1056 'number_of_dimensions')) 1057 outfile.createVariable('elevation', netcdf_float, ('number_of_points',)) 1058 1145 1059 # Get handles to the variables 1146 1060 points = outfile.variables['points'] 1147 1061 elevation = outfile.variables['elevation'] 1148 1062 1149 1063 points[0, :] = [1.0,0.0] 1150 elevation[0] = 10.0 1064 elevation[0] = 10.0 1151 1065 points[1, :] = [0.0,1.0] 1152 elevation[1] = 0.0 1066 elevation[1] = 0.0 1153 1067 points[2, :] = [1.0,0.0] 1154 elevation[2] = 10.4 1068 elevation[2] = 10.4 1155 1069 1156 1070 outfile.close() 1157 1071 1158 1072 results = Geospatial_data(file_name = fileName) 1159 1073 os.remove(fileName) 1160 answer = [[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]] 1161 assert num.allclose(results.get_data_points(), [[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]]) 1162 assert num.allclose(results.get_attributes(attribute_name='elevation'), [10.0, 0.0, 10.4]) 1163 1074 1075 answer = [[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]] 1076 assert num.allclose(results.get_data_points(), 1077 [[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1078 assert num.allclose(results.get_attributes(attribute_name='elevation'), 1079 [10.0, 0.0, 10.4]) 1080 1164 1081 def test_writepts(self): 1165 #test_writepts: Test that storage of x,y,attributes works1166 1082 '''Test that storage of x,y,attributes works''' 1083 1167 1084 att_dict = {} 1168 pointlist = num.array([[1.0, 0.0], [0.0, 1.0],[1.0, 0.0]])1085 pointlist = num.array([[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1169 1086 att_dict['elevation'] = num.array([10.0, 0.0, 10.4]) 1170 1087 att_dict['brightness'] = num.array([10.0, 0.0, 10.4]) 1171 geo_reference=Geo_reference(56, 1.9,1.9)1088 geo_reference=Geo_reference(56, 1.9, 1.9) 1172 1089 1173 1090 # Test pts format 1174 fileName = tempfile.mktemp( ".pts")1091 fileName = tempfile.mktemp('.pts') 1175 1092 G = Geospatial_data(pointlist, att_dict, geo_reference) 1176 1093 G.export_points_file(fileName, False) … … 1178 1095 os.remove(fileName) 1179 1096 1180 assert num.allclose(results.get_data_points(False),[[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]]) 1181 assert num.allclose(results.get_attributes('elevation'), [10.0, 0.0, 10.4]) 1097 assert num.allclose(results.get_data_points(False), 1098 [[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1099 assert num.allclose(results.get_attributes('elevation'), 1100 [10.0, 0.0, 10.4]) 1182 1101 answer = [10.0, 0.0, 10.4] 1183 1102 assert num.allclose(results.get_attributes('brightness'), answer) 1184 1103 self.failUnless(geo_reference == geo_reference, 1185 1104 'test_writepts failed. Test geo_reference') 1186 1105 1187 1106 def test_write_csv_attributes(self): 1188 #test_write : Test that storage of x,y,attributes works1189 1107 '''Test that storage of x,y,attributes works''' 1108 1190 1109 att_dict = {} 1191 pointlist = num.array([[1.0, 0.0], [0.0, 1.0],[1.0, 0.0]])1110 pointlist = num.array([[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1192 1111 att_dict['elevation'] = num.array([10.0, 0.0, 10.4]) 1193 1112 att_dict['brightness'] = num.array([10.0, 0.0, 10.4]) 1194 geo_reference=Geo_reference(56,0,0) 1113 geo_reference=Geo_reference(56, 0, 0) 1114 1195 1115 # Test txt format 1196 fileName = tempfile.mktemp( ".txt")1116 fileName = tempfile.mktemp('.txt') 1197 1117 G = Geospatial_data(pointlist, att_dict, geo_reference) 1198 1118 G.export_points_file(fileName) 1199 #print "fileName", fileName1200 1119 results = Geospatial_data(file_name=fileName) 1201 1120 os.remove(fileName) 1202 assert num.allclose(results.get_data_points(False),[[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]]) 1203 assert num.allclose(results.get_attributes('elevation'), [10.0, 0.0, 10.4]) 1121 1122 assert num.allclose(results.get_data_points(False), 1123 [[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1124 assert num.allclose(results.get_attributes('elevation'), 1125 [10.0, 0.0, 10.4]) 1204 1126 answer = [10.0, 0.0, 10.4] 1205 1127 assert num.allclose(results.get_attributes('brightness'), answer) 1206 1207 1128 1208 1129 def test_write_csv_attributes_lat_long(self): 1209 #test_write : Test that storage of x,y,attributes works1210 1130 '''Test that storage of x,y,attributes works''' 1131 1211 1132 att_dict = {} 1212 pointlist = num.array([[-21.5, 114.5],[-21.6,114.5],[-21.7,114.5]])1133 pointlist = num.array([[-21.5, 114.5], [-21.6, 114.5], [-21.7, 114.5]]) 1213 1134 att_dict['elevation'] = num.array([10.0, 0.0, 10.4]) 1214 1135 att_dict['brightness'] = num.array([10.0, 0.0, 10.4]) 1136 1215 1137 # Test txt format 1216 fileName = tempfile.mktemp( ".txt")1138 fileName = tempfile.mktemp('.txt') 1217 1139 G = Geospatial_data(pointlist, att_dict, points_are_lats_longs=True) 1218 1140 G.export_points_file(fileName, as_lat_long=True) 1219 #print "fileName", fileName1220 1141 results = Geospatial_data(file_name=fileName) 1221 1142 os.remove(fileName) 1143 1222 1144 assert num.allclose(results.get_data_points(False, as_lat_long=True), 1223 pointlist) 1224 assert num.allclose(results.get_attributes('elevation'), [10.0, 0.0, 10.4]) 1145 pointlist) 1146 assert num.allclose(results.get_attributes('elevation'), 1147 [10.0, 0.0, 10.4]) 1225 1148 answer = [10.0, 0.0, 10.4] 1226 1149 assert num.allclose(results.get_attributes('brightness'), answer) 1227 1150 1228 1151 def test_writepts_no_attributes(self): 1229 1230 #test_writepts_no_attributes: Test that storage of x,y alone works 1231 1152 '''Test that storage of x,y alone works''' 1153 1232 1154 att_dict = {} 1233 pointlist = num.array([[1.0, 0.0], [0.0, 1.0],[1.0, 0.0]])1234 geo_reference=Geo_reference(56, 1.9,1.9)1155 pointlist = num.array([[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1156 geo_reference=Geo_reference(56, 1.9, 1.9) 1235 1157 1236 1158 # Test pts format 1237 fileName = tempfile.mktemp( ".pts")1159 fileName = tempfile.mktemp('.pts') 1238 1160 G = Geospatial_data(pointlist, None, geo_reference) 1239 1161 G.export_points_file(fileName, False) … … 1241 1163 os.remove(fileName) 1242 1164 1243 assert num.allclose(results.get_data_points(False),[[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]]) 1165 assert num.allclose(results.get_data_points(False), 1166 [[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1244 1167 self.failUnless(geo_reference == geo_reference, 1245 'test_writepts failed. Test geo_reference') 1246 1247 1168 'test_writepts failed. Test geo_reference') 1169 1248 1170 def test_write_csv_no_attributes(self): 1249 #test_write txt _no_attributes: Test that storage of x,y alone works1250 1171 '''Test that storage of x,y alone works''' 1172 1251 1173 att_dict = {} 1252 pointlist = num.array([[1.0, 0.0], [0.0, 1.0],[1.0, 0.0]])1174 pointlist = num.array([[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1253 1175 geo_reference=Geo_reference(56,0,0) 1176 1254 1177 # Test format 1255 fileName = tempfile.mktemp( ".txt")1178 fileName = tempfile.mktemp('.txt') 1256 1179 G = Geospatial_data(pointlist, None, geo_reference) 1257 1180 G.export_points_file(fileName) 1258 1181 results = Geospatial_data(file_name=fileName) 1259 1182 os.remove(fileName) 1260 assert num.allclose(results.get_data_points(False),[[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]]) 1261 1262 1263 1264 ########################## BAD .PTS ########################## 1183 1184 assert num.allclose(results.get_data_points(False), 1185 [[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1186 1187 ################################################################################ 1188 # Check bad PTS files. 1189 ################################################################################ 1265 1190 1266 1191 def test_load_bad_no_file_pts(self): 1267 import os 1268 import tempfile 1269 1270 fileName = tempfile.mktemp(".pts") 1271 #print fileName 1192 fileName = tempfile.mktemp('.pts') 1272 1193 try: 1273 results = Geospatial_data(file_name = fileName) 1274 # dict = import_points_file(fileName) 1194 results = Geospatial_data(file_name=fileName) 1275 1195 except IOError: 1276 1196 pass 1277 1197 else: 1278 1198 msg = 'imaginary file did not raise error!' 1279 raise msg 1280 # self.failUnless(0 == 1, 1281 # 'imaginary file did not raise error!') 1282 1199 raise Exception, msg 1283 1200 1284 1201 def test_create_from_pts_file(self): 1285 1286 1202 from Scientific.IO.NetCDF import NetCDFFile 1287 1203 1288 # fileName = tempfile.mktemp(".pts") 1204 # NetCDF file definition 1289 1205 FN = 'test_points.pts' 1290 # NetCDF file definition1291 1206 outfile = NetCDFFile(FN, netcdf_mode_w) 1292 1207 1293 1208 # dimension definitions 1294 outfile.createDimension('number_of_points', 3) 1295 outfile.createDimension('number_of_dimensions', 2) # This is 2d data1296 1209 outfile.createDimension('number_of_points', 3) 1210 outfile.createDimension('number_of_dimensions', 2) # This is 2d data 1211 1297 1212 # variable definitions 1298 outfile.createVariable('points', n um.Float, ('number_of_points',1299 'number_of_dimensions'))1300 outfile.createVariable('elevation', n um.Float, ('number_of_points',))1301 1213 outfile.createVariable('points', netcdf_float, ('number_of_points', 1214 'number_of_dimensions')) 1215 outfile.createVariable('elevation', netcdf_float, ('number_of_points',)) 1216 1302 1217 # Get handles to the variables 1303 1218 points = outfile.variables['points'] 1304 1219 elevation = outfile.variables['elevation'] 1305 1220 1306 1221 points[0, :] = [1.0,0.0] 1307 elevation[0] = 10.0 1222 elevation[0] = 10.0 1308 1223 points[1, :] = [0.0,1.0] 1309 elevation[1] = 0.0 1224 elevation[1] = 0.0 1310 1225 points[2, :] = [1.0,0.0] 1311 elevation[2] = 10.4 1226 elevation[2] = 10.4 1312 1227 1313 1228 outfile.close() … … 1317 1232 assert num.allclose(G.get_geo_reference().get_xllcorner(), 0.0) 1318 1233 assert num.allclose(G.get_geo_reference().get_yllcorner(), 0.0) 1319 1320 assert num.allclose(G.get_data_points(), [[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]])1234 assert num.allclose(G.get_data_points(), 1235 [[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1321 1236 assert num.allclose(G.get_attributes(), [10.0, 0.0, 10.4]) 1322 1237 os.remove(FN) 1323 1238 1324 1239 def test_create_from_pts_file_with_geo(self): 1325 """This test reveals if Geospatial data is correctly instantiated from a pts file. 1326 """ 1327 1240 '''Test if Geospatial data is correctly instantiated from a pts file.''' 1241 1328 1242 from Scientific.IO.NetCDF import NetCDFFile 1329 1243 1244 # NetCDF file definition 1330 1245 FN = 'test_points.pts' 1331 # NetCDF file definition1332 1246 outfile = NetCDFFile(FN, netcdf_mode_w) 1333 1247 … … 1339 1253 1340 1254 # dimension definitions 1341 outfile.createDimension('number_of_points', 3) 1342 outfile.createDimension('number_of_dimensions', 2) # This is 2d data1343 1255 outfile.createDimension('number_of_points', 3) 1256 outfile.createDimension('number_of_dimensions', 2) # This is 2d data 1257 1344 1258 # variable definitions 1345 outfile.createVariable('points', n um.Float, ('number_of_points',1346 'number_of_dimensions'))1347 outfile.createVariable('elevation', n um.Float, ('number_of_points',))1348 1259 outfile.createVariable('points', netcdf_float, ('number_of_points', 1260 'number_of_dimensions')) 1261 outfile.createVariable('elevation', netcdf_float, ('number_of_points',)) 1262 1349 1263 # Get handles to the variables 1350 1264 points = outfile.variables['points'] … … 1352 1266 1353 1267 points[0, :] = [1.0,0.0] 1354 elevation[0] = 10.0 1268 elevation[0] = 10.0 1355 1269 points[1, :] = [0.0,1.0] 1356 elevation[1] = 0.0 1270 elevation[1] = 0.0 1357 1271 points[2, :] = [1.0,0.0] 1358 elevation[2] = 10.4 1272 elevation[2] = 10.4 1359 1273 1360 1274 outfile.close() … … 1364 1278 assert num.allclose(G.get_geo_reference().get_xllcorner(), xll) 1365 1279 assert num.allclose(G.get_geo_reference().get_yllcorner(), yll) 1366 1367 1280 assert num.allclose(G.get_data_points(), [[1.0+xll, 0.0+yll], 1368 1281 [0.0+xll, 1.0+yll], 1369 1282 [1.0+xll, 0.0+yll]]) 1370 1371 1283 assert num.allclose(G.get_attributes(), [10.0, 0.0, 10.4]) 1284 1372 1285 os.remove(FN) 1373 1286 1374 1375 1287 def test_add_(self): 1376 1288 '''test_add_(self): 1377 1289 adds an txt and pts files, reads the files and adds them 1378 1290 checking results are correct 1379 1291 ''' 1292 1380 1293 # create files 1381 1294 att_dict1 = {} 1382 pointlist1 = num.array([[1.0, 0.0], [0.0, 1.0],[1.0, 0.0]])1295 pointlist1 = num.array([[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1383 1296 att_dict1['elevation'] = num.array([-10.0, 0.0, 10.4]) 1384 1297 att_dict1['brightness'] = num.array([10.0, 0.0, 10.4]) 1385 1298 geo_reference1 = Geo_reference(56, 2.0, 1.0) 1386 1299 1387 1300 att_dict2 = {} 1388 pointlist2 = num.array([[2.0, 1.0], [1.0, 2.0],[2.0, 1.0]])1301 pointlist2 = num.array([[2.0, 1.0], [1.0, 2.0], [2.0, 1.0]]) 1389 1302 att_dict2['elevation'] = num.array([1.0, 15.0, 1.4]) 1390 1303 att_dict2['brightness'] = num.array([14.0, 1.0, -12.4]) 1391 geo_reference2 = Geo_reference(56, 1.0, 2.0) 1304 geo_reference2 = Geo_reference(56, 1.0, 2.0) 1392 1305 1393 1306 G1 = Geospatial_data(pointlist1, att_dict1, geo_reference1) 1394 1307 G2 = Geospatial_data(pointlist2, att_dict2, geo_reference2) 1395 1396 fileName1 = tempfile.mktemp( ".txt")1397 fileName2 = tempfile.mktemp( ".pts")1398 1399 # makes files1308 1309 fileName1 = tempfile.mktemp('.txt') 1310 fileName2 = tempfile.mktemp('.pts') 1311 1312 # makes files 1400 1313 G1.export_points_file(fileName1) 1401 1314 G2.export_points_file(fileName2) 1402 1315 1403 1316 # add files 1404 1405 G3 = Geospatial_data(file_name = fileName1) 1406 G4 = Geospatial_data(file_name = fileName2) 1407 1317 G3 = Geospatial_data(file_name=fileName1) 1318 G4 = Geospatial_data(file_name=fileName2) 1408 1319 G = G3 + G4 1409 1320 1410 1411 1321 #read results 1412 # print'res', G.get_data_points()1413 # print'res1', G.get_data_points(False)1414 1322 assert num.allclose(G.get_data_points(), 1415 1323 [[ 3.0, 1.0], [ 2.0, 2.0], 1416 1324 [ 3.0, 1.0], [ 3.0, 3.0], 1417 1325 [ 2.0, 4.0], [ 3.0, 3.0]]) 1418 1419 1326 assert num.allclose(G.get_attributes(attribute_name='elevation'), 1420 1327 [-10.0, 0.0, 10.4, 1.0, 15.0, 1.4]) 1421 1422 1328 answer = [10.0, 0.0, 10.4, 14.0, 1.0, -12.4] 1423 assert num.allclose(G.get_attributes(attribute_name='brightness'), answer)1424 1329 assert num.allclose(G.get_attributes(attribute_name='brightness'), 1330 answer) 1425 1331 self.failUnless(G.get_geo_reference() == geo_reference1, 1426 1427 1332 'test_writepts failed. Test geo_reference') 1333 1428 1334 os.remove(fileName1) 1429 1335 os.remove(fileName2) 1430 1336 1431 1337 def test_ensure_absolute(self): 1432 points = [[2.0, 0.0], [1.0, 1.0],1433 [2.0, 0.0],[2.0, 2.0],1434 [1.0, 3.0],[2.0, 2.0]]1338 points = [[2.0, 0.0], [1.0, 1.0], 1339 [2.0, 0.0], [2.0, 2.0], 1340 [1.0, 3.0], [2.0, 2.0]] 1435 1341 new_points = ensure_absolute(points) 1436 1342 1437 1343 assert num.allclose(new_points, points) 1438 1344 1439 points = num.array([[2.0, 0.0], [1.0, 1.0],1440 [2.0, 0.0], [2.0, 2.0],1441 [1.0, 3.0], [2.0, 2.0]])1345 points = num.array([[2.0, 0.0], [1.0, 1.0], 1346 [2.0, 0.0], [2.0, 2.0], 1347 [1.0, 3.0], [2.0, 2.0]]) 1442 1348 new_points = ensure_absolute(points) 1443 1349 1444 1350 assert num.allclose(new_points, points) 1445 1446 ab_points = num.array([[2.0, 0.0], [1.0, 1.0],1447 [2.0, 0.0], [2.0, 2.0],1448 [1.0, 3.0], [2.0, 2.0]])1449 1450 mesh_origin = (56, 290000, 618000) # zone, easting, northing1451 1452 data_points = num.zeros((ab_points.shape), num.Float) 1351 1352 ab_points = num.array([[2.0, 0.0], [1.0, 1.0], 1353 [2.0, 0.0], [2.0, 2.0], 1354 [1.0, 3.0], [2.0, 2.0]]) 1355 1356 mesh_origin = (56, 290000, 618000) # zone, easting, northing 1357 data_points = num.zeros((ab_points.shape), num.float) 1358 1453 1359 #Shift datapoints according to new origins 1454 1360 for k in range(len(ab_points)): 1455 1361 data_points[k][0] = ab_points[k][0] - mesh_origin[1] 1456 1362 data_points[k][1] = ab_points[k][1] - mesh_origin[2] 1457 #print "data_points",data_points 1458 new_points = ensure_absolute(data_points, 1459 geo_reference=mesh_origin) 1460 #print "new_points",new_points 1461 #print "ab_points",ab_points 1462 1363 new_points = ensure_absolute(data_points, geo_reference=mesh_origin) 1364 1463 1365 assert num.allclose(new_points, ab_points) 1464 1366 1465 1367 geo = Geo_reference(56,67,-56) 1466 1467 data_points = geo.change_points_geo_ref(ab_points) 1468 new_points = ensure_absolute(data_points, 1469 geo_reference=geo) 1470 #print "new_points",new_points 1471 #print "ab_points",ab_points 1472 1368 data_points = geo.change_points_geo_ref(ab_points) 1369 new_points = ensure_absolute(data_points, geo_reference=geo) 1370 1473 1371 assert num.allclose(new_points, ab_points) 1474 1475 1372 1476 1373 geo_reference = Geo_reference(56, 100, 200) … … 1478 1375 points = geo_reference.change_points_geo_ref(ab_points) 1479 1376 attributes = [2, 4] 1480 #print "geo in points", points 1481 G = Geospatial_data(points, attributes, 1482 geo_reference=geo_reference) 1483 1377 G = Geospatial_data(points, attributes, geo_reference=geo_reference) 1484 1378 new_points = ensure_absolute(G) 1485 #print "new_points",new_points 1486 #print "ab_points",ab_points 1487 1379 1488 1380 assert num.allclose(new_points, ab_points) 1489 1381 1490 1491 1492 1382 def test_ensure_geospatial(self): 1493 points = [[2.0, 0.0], [1.0, 1.0],1494 [2.0, 0.0],[2.0, 2.0],1495 [1.0, 3.0],[2.0, 2.0]]1383 points = [[2.0, 0.0], [1.0, 1.0], 1384 [2.0, 0.0], [2.0, 2.0], 1385 [1.0, 3.0], [2.0, 2.0]] 1496 1386 new_points = ensure_geospatial(points) 1497 1498 assert num.allclose(new_points.get_data_points(absolute = True), points) 1499 1500 points = num.array([[2.0, 0.0],[1.0, 1.0], 1501 [2.0, 0.0],[2.0, 2.0], 1502 [1.0, 3.0],[2.0, 2.0]]) 1387 assert num.allclose(new_points.get_data_points(absolute=True), points) 1388 1389 points = num.array([[2.0, 0.0], [1.0, 1.0], 1390 [2.0, 0.0], [2.0, 2.0], 1391 [1.0, 3.0], [2.0, 2.0]]) 1503 1392 new_points = ensure_geospatial(points) 1504 1505 assert num.allclose(new_points.get_data_points(absolute = True), points) 1506 1393 assert num.allclose(new_points.get_data_points(absolute=True), points) 1394 1507 1395 ab_points = num.array([[2.0, 0.0],[1.0, 1.0], 1508 1396 [2.0, 0.0],[2.0, 2.0], 1509 1397 [1.0, 3.0],[2.0, 2.0]]) 1510 1511 mesh_origin = (56, 290000, 618000) #zone, easting, northing 1512 1513 data_points = num.zeros((ab_points.shape), num.Float) 1398 mesh_origin = (56, 290000, 618000) # zone, easting, northing 1399 data_points = num.zeros((ab_points.shape), num.float) 1400 1514 1401 #Shift datapoints according to new origins 1515 1402 for k in range(len(ab_points)): 1516 1403 data_points[k][0] = ab_points[k][0] - mesh_origin[1] 1517 1404 data_points[k][1] = ab_points[k][1] - mesh_origin[2] 1518 #print "data_points",data_points1519 1405 new_geospatial = ensure_geospatial(data_points, 1520 1406 geo_reference=mesh_origin) 1521 1407 new_points = new_geospatial.get_data_points(absolute=True) 1522 #print "new_points",new_points1523 #print "ab_points",ab_points1524 1525 1408 assert num.allclose(new_points, ab_points) 1526 1409 1527 geo = Geo_reference(56,67,-56) 1528 1529 data_points = geo.change_points_geo_ref(ab_points) 1530 new_geospatial = ensure_geospatial(data_points, 1531 geo_reference=geo) 1410 geo = Geo_reference(56, 67, -56) 1411 data_points = geo.change_points_geo_ref(ab_points) 1412 new_geospatial = ensure_geospatial(data_points, geo_reference=geo) 1532 1413 new_points = new_geospatial.get_data_points(absolute=True) 1533 #print "new_points",new_points 1534 #print "ab_points",ab_points 1535 1536 assert num.allclose(new_points, ab_points) 1537 1414 msg = ('new_points=\n%s\nab_points=\n%s' 1415 % (str(new_points), str(ab_points))) 1416 assert num.allclose(new_points, ab_points), msg 1538 1417 1539 1418 geo_reference = Geo_reference(56, 100, 200) … … 1541 1420 points = geo_reference.change_points_geo_ref(ab_points) 1542 1421 attributes = [2, 4] 1543 #print "geo in points", points 1544 G = Geospatial_data(points, attributes, 1545 geo_reference=geo_reference) 1546 1547 new_geospatial = ensure_geospatial(G) 1422 G = Geospatial_data(points, attributes, geo_reference=geo_reference) 1423 new_geospatial = ensure_geospatial(G) 1548 1424 new_points = new_geospatial.get_data_points(absolute=True) 1549 #print "new_points",new_points1550 #print "ab_points",ab_points1551 1552 1425 assert num.allclose(new_points, ab_points) 1553 1426 1554 1427 def test_isinstance(self): 1555 1556 import os 1557 1558 fileName = tempfile.mktemp(".csv") 1559 file = open(fileName,"w") 1560 file.write("x,y, elevation , speed \n\ 1561 1.0, 0.0, 10.0, 0.0\n\ 1562 0.0, 1.0, 0.0, 10.0\n\ 1563 1.0, 0.0, 10.4, 40.0\n") 1428 fileName = tempfile.mktemp('.csv') 1429 file = open(fileName, 'w') 1430 file.write('x,y, elevation , speed \n\ 1431 1.0, 0.0, 10.0, 0.0\n\ 1432 0.0, 1.0, 0.0, 10.0\n\ 1433 1.0, 0.0, 10.4, 40.0\n') 1564 1434 file.close() 1565 1435 1566 1436 results = Geospatial_data(fileName) 1567 1437 assert num.allclose(results.get_data_points(absolute=True), 1568 [[1.0, 0.0], [0.0, 1.0],[1.0, 0.0]])1438 [[1.0, 0.0], [0.0, 1.0], [1.0, 0.0]]) 1569 1439 assert num.allclose(results.get_attributes(attribute_name='elevation'), 1570 1440 [10.0, 0.0, 10.4]) … … 1573 1443 1574 1444 os.remove(fileName) 1575 1576 1445 1577 1446 def test_no_constructors(self): 1578 1579 1447 try: 1580 1448 G = Geospatial_data() 1581 # results = Geospatial_data(file_name = fileName)1582 # dict = import_points_file(fileName)1583 1449 except ValueError: 1584 1450 pass 1585 1451 else: 1586 1452 msg = 'Instance must have a filename or data points' 1587 raise msg1453 raise Exception, msg 1588 1454 1589 1455 def test_load_csv_lat_long(self): 1590 """ 1591 comma delimited 1592 1593 """ 1594 fileName = tempfile.mktemp(".csv") 1595 file = open(fileName,"w") 1596 file.write("long,lat, elevation, yeah \n\ 1456 '''comma delimited''' 1457 1458 fileName = tempfile.mktemp('.csv') 1459 file = open(fileName, 'w') 1460 file.write('long,lat, elevation, yeah \n\ 1597 1461 150.916666667,-34.50,452.688000, 10\n\ 1598 150.0,-34,459.126000, 10\n ")1462 150.0,-34,459.126000, 10\n') 1599 1463 file.close() 1464 1600 1465 results = Geospatial_data(fileName) 1601 1466 os.remove(fileName) 1602 1467 points = results.get_data_points() 1603 1468 1604 1469 assert num.allclose(points[0][0], 308728.009) 1605 1470 assert num.allclose(points[0][1], 6180432.601) 1606 1471 assert num.allclose(points[1][0], 222908.705) 1607 1472 assert num.allclose(points[1][1], 6233785.284) 1608 1609 1473 1610 1474 def test_load_csv_lat_longII(self): 1611 """ 1612 comma delimited 1613 1614 """ 1615 fileName = tempfile.mktemp(".csv") 1616 file = open(fileName,"w") 1617 file.write("Lati,LONG,z \n\ 1475 '''comma delimited''' 1476 1477 fileName = tempfile.mktemp('.csv') 1478 file = open(fileName, 'w') 1479 file.write('Lati,LONG,z \n\ 1618 1480 -34.50,150.916666667,452.688000\n\ 1619 -34,150.0,459.126000\n ")1481 -34,150.0,459.126000\n') 1620 1482 file.close() 1483 1621 1484 results = Geospatial_data(fileName) 1622 1485 os.remove(fileName) 1623 1486 points = results.get_data_points() 1624 1487 1625 1488 assert num.allclose(points[0][0], 308728.009) 1626 1489 assert num.allclose(points[0][1], 6180432.601) 1627 assert num.allclose(points[1][0], 1490 assert num.allclose(points[1][0], 222908.705) 1628 1491 assert num.allclose(points[1][1], 6233785.284) 1629 1492 1630 1631 1493 def test_load_csv_lat_long_bad(self): 1632 """ 1633 comma delimited 1634 1635 """ 1636 fileName = tempfile.mktemp(".csv") 1637 file = open(fileName,"w") 1638 file.write("Lati,LONG,z \n\ 1494 '''comma delimited''' 1495 1496 fileName = tempfile.mktemp('.csv') 1497 file = open(fileName, 'w') 1498 file.write('Lati,LONG,z \n\ 1639 1499 -25.0,180.0,452.688000\n\ 1640 -34,150.0,459.126000\n ")1500 -34,150.0,459.126000\n') 1641 1501 file.close() 1502 1642 1503 try: 1643 1504 results = Geospatial_data(fileName) … … 1646 1507 else: 1647 1508 msg = 'Different zones in Geo references not caught.' 1648 raise msg1649 1650 os.remove(fileName) 1651 1509 raise Exception, msg 1510 1511 os.remove(fileName) 1512 1652 1513 def test_lat_long(self): 1653 lat_gong = degminsec2decimal_degrees(-34, 30,0.)1654 lon_gong = degminsec2decimal_degrees(150, 55,0.)1655 1656 lat_2 = degminsec2decimal_degrees(-34, 00,0.)1657 lon_2 = degminsec2decimal_degrees(150, 00,0.)1658 1514 lat_gong = degminsec2decimal_degrees(-34, 30, 0.) 1515 lon_gong = degminsec2decimal_degrees(150, 55, 0.) 1516 1517 lat_2 = degminsec2decimal_degrees(-34, 00, 0.) 1518 lon_2 = degminsec2decimal_degrees(150, 00, 0.) 1519 1659 1520 lats = [lat_gong, lat_2] 1660 1521 longs = [lon_gong, lon_2] … … 1662 1523 1663 1524 points = gsd.get_data_points(absolute=True) 1664 1525 1665 1526 assert num.allclose(points[0][0], 308728.009) 1666 1527 assert num.allclose(points[0][1], 6180432.601) 1667 assert num.allclose(points[1][0], 1528 assert num.allclose(points[1][0], 222908.705) 1668 1529 assert num.allclose(points[1][1], 6233785.284) 1669 1530 self.failUnless(gsd.get_geo_reference().get_zone() == 56, 1670 1531 'Bad zone error!') 1671 1532 1533 # use self.failUnlessRaises(ValueError, Geospatial_data(latitudes=lats)) 1672 1534 try: 1673 1535 results = Geospatial_data(latitudes=lats) … … 1675 1537 pass 1676 1538 else: 1677 self.fail Unless(0 ==1,'Error not thrown error!')1539 self.fail('Error not thrown error!') 1678 1540 try: 1679 1541 results = Geospatial_data(latitudes=lats) … … 1681 1543 pass 1682 1544 else: 1683 self.fail Unless(0 ==1,'Error not thrown error!')1545 self.fail('Error not thrown error!') 1684 1546 try: 1685 1547 results = Geospatial_data(longitudes=lats) … … 1687 1549 pass 1688 1550 else: 1689 self.fail Unless(0 ==1,'Error not thrown error!')1551 self.fail('Error not thrown error!') 1690 1552 try: 1691 1553 results = Geospatial_data(latitudes=lats, longitudes=longs, … … 1694 1556 pass 1695 1557 else: 1696 self.fail Unless(0 ==1,'Error not thrown error!')1697 1558 self.fail('Error not thrown error!') 1559 1698 1560 try: 1699 1561 results = Geospatial_data(latitudes=lats, longitudes=longs, … … 1702 1564 pass 1703 1565 else: 1704 self.fail Unless(0 ==1,'Error not thrown error!')1566 self.fail('Error not thrown error!') 1705 1567 1706 1568 def test_lat_long2(self): 1707 lat_gong = degminsec2decimal_degrees(-34, 30,0.)1708 lon_gong = degminsec2decimal_degrees(150, 55,0.)1709 1710 lat_2 = degminsec2decimal_degrees(-34, 00,0.)1711 lon_2 = degminsec2decimal_degrees(150, 00,0.)1712 1569 lat_gong = degminsec2decimal_degrees(-34, 30, 0.) 1570 lon_gong = degminsec2decimal_degrees(150, 55, 0.) 1571 1572 lat_2 = degminsec2decimal_degrees(-34, 00, 0.) 1573 lon_2 = degminsec2decimal_degrees(150, 00, 0.) 1574 1713 1575 points = [[lat_gong, lon_gong], [lat_2, lon_2]] 1714 1576 gsd = Geospatial_data(data_points=points, points_are_lats_longs=True) 1715 1577 1716 1578 points = gsd.get_data_points(absolute=True) 1717 1579 1718 1580 assert num.allclose(points[0][0], 308728.009) 1719 1581 assert num.allclose(points[0][1], 6180432.601) 1720 assert num.allclose(points[1][0], 1582 assert num.allclose(points[1][0], 222908.705) 1721 1583 assert num.allclose(points[1][1], 6233785.284) 1722 1584 self.failUnless(gsd.get_geo_reference().get_zone() == 56, … … 1728 1590 pass 1729 1591 else: 1730 self.failUnless(0 ==1, 'Error not thrown error!') 1731 1592 self.fail('Error not thrown error!') 1732 1593 1733 1594 def test_write_urs_file(self): 1734 lat_gong = degminsec2decimal_degrees(-34, 00,0)1735 lon_gong = degminsec2decimal_degrees(150, 30,0.)1736 1737 lat_2 = degminsec2decimal_degrees(-34, 00,1)1738 lon_2 = degminsec2decimal_degrees(150, 00,0.)1595 lat_gong = degminsec2decimal_degrees(-34, 00, 0) 1596 lon_gong = degminsec2decimal_degrees(150, 30, 0.) 1597 1598 lat_2 = degminsec2decimal_degrees(-34, 00, 1) 1599 lon_2 = degminsec2decimal_degrees(150, 00, 0.) 1739 1600 p1 = (lat_gong, lon_gong) 1740 1601 p2 = (lat_2, lon_2) … … 1742 1603 gsd = Geospatial_data(data_points=list(points), 1743 1604 points_are_lats_longs=True) 1744 1745 fn = tempfile.mktemp( ".urs")1605 1606 fn = tempfile.mktemp('.urs') 1746 1607 gsd.export_points_file(fn) 1747 #print "fn", fn1748 1608 handle = open(fn) 1749 1609 lines = handle.readlines() 1750 assert lines[0], '2'1751 assert lines[1], '-34.0002778 150.0 0'1752 assert lines[2], '-34.0 150.5 1'1610 assert lines[0], '2' 1611 assert lines[1], '-34.0002778 150.0 0' 1612 assert lines[2], '-34.0 150.5 1' 1753 1613 handle.close() 1754 1614 os.remove(fn) 1755 1615 1756 1616 def test_lat_long_set(self): 1757 lat_gong = degminsec2decimal_degrees(-34, 30,0.)1758 lon_gong = degminsec2decimal_degrees(150, 55,0.)1759 1760 lat_2 = degminsec2decimal_degrees(-34, 00,0.)1761 lon_2 = degminsec2decimal_degrees(150, 00,0.)1617 lat_gong = degminsec2decimal_degrees(-34, 30, 0.) 1618 lon_gong = degminsec2decimal_degrees(150, 55, 0.) 1619 1620 lat_2 = degminsec2decimal_degrees(-34, 00, 0.) 1621 lon_2 = degminsec2decimal_degrees(150, 00, 0.) 1762 1622 p1 = (lat_gong, lon_gong) 1763 1623 p2 = (lat_2, lon_2) … … 1767 1627 1768 1628 points = gsd.get_data_points(absolute=True) 1769 #print "points[0][0]", points[0][0]1770 1629 #Note the order is unknown, due to using sets 1771 1630 # and it changes from windows to linux … … 1773 1632 assert num.allclose(points[1][0], 308728.009) 1774 1633 assert num.allclose(points[1][1], 6180432.601) 1775 assert num.allclose(points[0][0], 1634 assert num.allclose(points[0][0], 222908.705) 1776 1635 assert num.allclose(points[0][1], 6233785.284) 1777 1636 except AssertionError: 1778 1637 assert num.allclose(points[0][0], 308728.009) 1779 1638 assert num.allclose(points[0][1], 6180432.601) 1780 assert num.allclose(points[1][0], 1639 assert num.allclose(points[1][0], 222908.705) 1781 1640 assert num.allclose(points[1][1], 6233785.284) 1782 1641 1783 1642 self.failUnless(gsd.get_geo_reference().get_zone() == 56, 1784 1643 'Bad zone error!') 1785 1644 points = gsd.get_data_points(as_lat_long=True) 1786 #print "test_lat_long_set points", points1787 1645 try: 1788 1646 assert num.allclose(points[0][0], -34) … … 1793 1651 1794 1652 def test_len(self): 1795 1796 1653 points = [[1.0, 2.1], [3.0, 5.3]] 1797 1654 G = Geospatial_data(points) 1798 self.failUnless(2 == len(G),'Len error!')1799 1655 self.failUnless(2 == len(G), 'Len error!') 1656 1800 1657 points = [[1.0, 2.1]] 1801 1658 G = Geospatial_data(points) 1802 self.failUnless(1 == len(G),'Len error!')1659 self.failUnless(1 == len(G), 'Len error!') 1803 1660 1804 1661 points = [[1.0, 2.1], [3.0, 5.3], [3.0, 5.3], [3.0, 5.3]] 1805 1662 G = Geospatial_data(points) 1806 self.failUnless(4 == len(G),'Len error!')1807 1663 self.failUnless(4 == len(G), 'Len error!') 1664 1808 1665 def test_split(self): 1809 1666 """test if the results from spilt are disjoin sets""" 1810 1811 #below is a work around until the randint works on cyclones compute nodes 1812 if get_host_name()[8:9]!='0': 1813 1814 1667 1668 # below is a workaround until randint works on cyclones compute nodes 1669 if get_host_name()[8:9] != '0': 1815 1670 points = [[1.0, 1.0], [1.0, 2.0],[1.0, 3.0], [1.0, 4.0], [1.0, 5.0], 1816 1671 [2.0, 1.0], [2.0, 2.0],[2.0, 3.0], [2.0, 4.0], [2.0, 5.0], … … 1818 1673 [4.0, 1.0], [4.0, 2.0],[4.0, 3.0], [4.0, 4.0], [4.0, 5.0], 1819 1674 [5.0, 1.0], [5.0, 2.0],[5.0, 3.0], [5.0, 4.0], [5.0, 5.0]] 1820 attributes = {'depth':[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 1821 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], 1822 'speed':[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 1823 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]} 1675 attributes = {'depth': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1676 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 1677 21, 22, 23, 24, 25], 1678 'speed': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1679 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 1680 21, 22, 23, 24, 25]} 1824 1681 G = Geospatial_data(points, attributes) 1825 1682 1826 1683 factor = 0.21 1827 1828 #will return G1 with 10% of points and G2 with 90% 1829 G1, G2 = G.split(factor,100) 1830 1684 1685 # will return G1 with 10% of points and G2 with 90% 1686 G1, G2 = G.split(factor, 100) 1831 1687 assert num.allclose(len(G), len(G1)+len(G2)) 1832 1688 assert num.allclose(round(len(G)*factor), len(G1)) 1833 1689 1834 1690 P = G1.get_data_points(absolute=False) 1835 assert num.allclose(P, [[5.0,4.0],[4.0,3.0],[4.0,2.0],[3.0,1.0],[2.0,3.0]]) 1836 1691 expected = [[5.,4.], [4.,1.], [2.,4.], [2.,3.], [1.,4.]] 1692 # expected = [[5.0, 4.0], [4.0, 3.0], [4.0, 2.0], 1693 # [3.0, 1.0], [2.0, 3.0]] 1694 msg = 'Expected %s, but\nP=%s' % (str(expected), str(P)) 1695 assert num.allclose(P, expected), msg 1696 1837 1697 A = G1.get_attributes() 1838 assert num.allclose(A,[24, 18, 17, 11, 8]) 1839 1698 expected = [24, 16, 9, 8, 4] 1699 msg = 'expected=%s, but A=%s' % (str(expected), str(A)) 1700 assert num.allclose(A, expected), msg 1701 1840 1702 def test_split1(self): 1841 """test if the results from spilt are disjoin sets""" 1842 #below is a work around until the randint works on cyclones compute nodes 1843 if get_host_name()[8:9]!='0': 1844 1845 from RandomArray import randint,seed 1846 seed(100,100) 1847 a_points = randint(0,999999,(10,2)) 1703 """test if the results from split are disjoint sets""" 1704 1705 # below is a workaround until randint works on cyclones compute nodes 1706 if get_host_name()[8:9] != '0': 1707 from numpy.random import randint, seed 1708 1709 seed((100, 100)) 1710 a_points = randint(0, 999999, (10,2)) 1848 1711 points = a_points.tolist() 1849 # print points 1850 1712 1851 1713 G = Geospatial_data(points) 1852 1714 1853 1715 factor = 0.1 1854 1855 #will return G1 with 10% of points and G2 with 90% 1856 G1, G2 = G.split(factor,100) 1857 1858 # print 'G1',G1 1716 1717 # will return G1 with 10% of points and G2 with 90% 1718 G1, G2 = G.split(factor, 100) 1859 1719 assert num.allclose(len(G), len(G1)+len(G2)) 1860 1720 assert num.allclose(round(len(G)*factor), len(G1)) 1861 1721 1862 1722 P = G1.get_data_points(absolute=False) 1863 assert num.allclose(P, [[982420.,28233.]]) 1864 1865 1723 expected = [[425835., 137518.]] 1724 msg = 'expected=%s, but\nP=%s' % (str(expected), str(P)) 1725 assert num.allclose(P, expected), msg 1726 1866 1727 def test_find_optimal_smoothing_parameter(self): 1867 1728 """ 1868 Creates a elevation file repres ting hill (sort of) and runs1729 Creates a elevation file representing a hill (sort of) and runs 1869 1730 find_optimal_smoothing_parameter for 3 different alphas, 1870 1731 1871 1732 NOTE the random number seed is provided to control the results 1872 1733 """ 1734 1873 1735 from cmath import cos 1874 1736 1875 # below is a work around until therandint works on cyclones compute nodes1737 # below is a workaround until randint works on cyclones compute nodes 1876 1738 if get_host_name()[8:9]!='0': 1877 1878 filename = tempfile.mktemp(".csv") 1879 file = open(filename,"w") 1880 file.write("x,y,elevation \n") 1881 1882 for i in range(-5,6): 1883 for j in range(-5,6): 1884 #this equation made surface like a circle ripple 1739 filename = tempfile.mktemp('.csv') 1740 file = open(filename, 'w') 1741 file.write('x,y,elevation \n') 1742 1743 for i in range(-5, 6): 1744 for j in range(-5, 6): 1745 # this equation makes a surface like a circle ripple 1885 1746 z = abs(cos(((i*i) + (j*j))*.1)*2) 1886 # print 'x,y,f',i,j,z 1887 file.write("%s, %s, %s\n" %(i, j, z)) 1888 1747 file.write("%s, %s, %s\n" % (i, j, z)) 1748 1889 1749 file.close() 1890 1891 value, alpha = find_optimal_smoothing_parameter(data_file=filename, 1750 1751 value, alpha = find_optimal_smoothing_parameter(data_file=filename, 1892 1752 alpha_list=[0.0001, 0.01, 1], 1893 1753 mesh_file=None, … … 1900 1760 seed_num=100000, 1901 1761 verbose=False) 1902 1762 1903 1763 os.remove(filename) 1904 1764 1905 1765 # print value, alpha 1906 assert (alpha==0.01)1766 assert(alpha == 0.01) 1907 1767 1908 1768 def test_find_optimal_smoothing_parameter1(self): 1909 1769 """ 1910 Creates a elevation file repres tinghill (sort of) and1770 Creates a elevation file representing a hill (sort of) and 1911 1771 Then creates a mesh file and passes the mesh file and the elevation 1912 1772 file to find_optimal_smoothing_parameter for 3 different alphas, 1913 1773 1914 1774 NOTE the random number seed is provided to control the results 1915 1775 """ 1916 #below is a work around until the randint works on cyclones compute nodes 1776 1777 # below is a workaround until randint works on cyclones compute nodes 1917 1778 if get_host_name()[8:9]!='0': 1918 1919 1779 from cmath import cos 1920 1780 from anuga.pmesh.mesh_interface import create_mesh_from_regions 1921 1922 filename = tempfile.mktemp( ".csv")1923 file = open(filename, "w")1924 file.write( "x,y,elevation \n")1925 1926 for i in range(-5 ,6):1927 for j in range(-5, 6):1928 # this equation madesurface like a circle ripple1781 1782 filename = tempfile.mktemp('.csv') 1783 file = open(filename, 'w') 1784 file.write('x,y,elevation \n') 1785 1786 for i in range(-5 ,6): 1787 for j in range(-5, 6): 1788 # this equation makes a surface like a circle ripple 1929 1789 z = abs(cos(((i*i) + (j*j))*.1)*2) 1930 # print 'x,y,f',i,j,z 1931 file.write("%s, %s, %s\n" %(i, j, z)) 1932 1790 file.write('%s, %s, %s\n' % (i, j, z)) 1791 1933 1792 file.close() 1934 poly=[[5,5],[5,-5],[-5,-5],[-5,5]] 1935 internal_poly=[[[[1,1],[1,-1],[-1,-1],[-1,1]],.5]] 1936 mesh_filename= tempfile.mktemp(".msh") 1937 1793 1794 poly=[[5,5], [5,-5], [-5,-5], [-5,5]] 1795 internal_poly=[[[[1,1], [1,-1], [-1,-1], [-1,1]], .5]] 1796 mesh_filename= tempfile.mktemp('.msh') 1797 1938 1798 create_mesh_from_regions(poly, 1939 boundary_tags={'back': [2],1940 'side': [1,3],1941 'ocean': [0]},1942 maximum_triangle_area=3,1943 interior_regions=internal_poly,1944 filename=mesh_filename,1945 use_cache=False,1946 verbose=False)1947 1948 value, alpha = find_optimal_smoothing_parameter(data_file=filename, 1799 boundary_tags={'back': [2], 1800 'side': [1,3], 1801 'ocean': [0]}, 1802 maximum_triangle_area=3, 1803 interior_regions=internal_poly, 1804 filename=mesh_filename, 1805 use_cache=False, 1806 verbose=False) 1807 1808 value, alpha = find_optimal_smoothing_parameter(data_file=filename, 1949 1809 alpha_list=[0.0001, 0.01, 1], 1950 1810 mesh_file=mesh_filename, … … 1952 1812 seed_num=174, 1953 1813 verbose=False) 1954 1814 1955 1815 os.remove(filename) 1956 1816 os.remove(mesh_filename) 1957 1958 # print value, alpha 1959 assert (alpha==0.01) 1817 1818 msg = 'alpha=%s' % str(alpha) 1819 # 0.01 was expected with Numeric.RandomArray RNG 1820 assert alpha==1.0, msg 1960 1821 1961 1822 def test_find_optimal_smoothing_parameter2(self): 1962 """ 1963 Tests requirement that mesh file must exist or IOError is thrown 1964 1823 '''Tests requirement that mesh file must exist or IOError is thrown 1824 1965 1825 NOTE the random number seed is provided to control the results 1966 """ 1826 ''' 1827 1967 1828 from cmath import cos 1968 1829 from anuga.pmesh.mesh_interface import create_mesh_from_regions 1969 1970 filename = tempfile.mktemp( ".csv")1971 mesh_filename= tempfile.mktemp( ".msh")1972 1830 1831 filename = tempfile.mktemp('.csv') 1832 mesh_filename= tempfile.mktemp('.msh') 1833 1973 1834 try: 1974 value, alpha = find_optimal_smoothing_parameter(data_file=filename, 1835 value, alpha = find_optimal_smoothing_parameter(data_file=filename, 1975 1836 alpha_list=[0.0001, 0.01, 1], 1976 1837 mesh_file=mesh_filename, … … 1981 1842 pass 1982 1843 else: 1983 self.failUnless(0 ==1, 'Error not thrown error!') 1984 1985 1844 self.fail('Error not thrown error!') 1845 1846 ################################################################################ 1847 1986 1848 if __name__ == "__main__": 1987 1988 #suite = unittest.makeSuite(Test_Geospatial_data, 'test_write_csv_attributes_lat_long') 1989 suite = unittest.makeSuite(Test_Geospatial_data, 'test_find_optimal_smoothing_parameter') 1990 #suite = unittest.makeSuite(Test_Geospatial_data, 'test_split1') 1991 #suite = unittest.makeSuite(Test_Geospatial_data, 'test') 1849 suite = unittest.makeSuite(Test_Geospatial_data, 'test') 1992 1850 runner = unittest.TextTestRunner() #verbosity=2) 1993 1851 runner.run(suite) 1994 1852 1995 1853
Note: See TracChangeset
for help on using the changeset viewer.