Changeset 4776
- Timestamp:
- Nov 1, 2007, 11:12:32 AM (17 years ago)
- Location:
- anuga_core/source/anuga
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
anuga_core/source/anuga/geospatial_data/geospatial_data.py
r4745 r4776 655 655 new_size = round(factor*self_size) 656 656 657 # find unique random numbers657 # Find unique random numbers 658 658 if verbose: print "make unique random number list and get indices" 659 659 … … 662 662 if verbose: print "total list len",len(total_list) 663 663 664 # there will be repeated random numbers however will not be a665 # problem as they are being 'pop'ed out of array so if there666 # are two numbers the same they will pop different indicies,667 # still basically random664 # There will be repeated random numbers however will not be a 665 # problem as they are being 'pop'ed out of array so if there 666 # are two numbers the same they will pop different indicies, 667 # still basically random 668 668 ## create list of non-unquie random numbers 669 669 if verbose: print "create random numbers list %s long" %new_size 670 670 671 # set seed if provided, mainly important for unit test!671 # Set seed if provided, mainly important for unit test! 672 672 if seed_num != None: 673 673 seed(seed_num,seed_num) … … 684 684 k=1 685 685 remainder_list = total_list[:] 686 # pops array index (random_num) from remainder_list686 # pops array index (random_num) from remainder_list 687 687 # (which starts as the 688 # total_list and appends to random_list688 # total_list and appends to random_list 689 689 random_num_len = len(random_num) 690 690 for i in random_num: … … 696 696 k+=1 697 697 698 # FIXME: move to tests, it might take a long time699 # then create an array of random lenght between 500 and 1000,700 # and use a random factor between 0 and 1701 # setup for assertion698 # FIXME: move to tests, it might take a long time 699 # then create an array of random lenght between 500 and 1000, 700 # and use a random factor between 0 and 1 701 # setup for assertion 702 702 test_total = random_list[:] 703 703 test_total.extend(remainder_list) … … 707 707 assert (total_list==test_total),msg 708 708 709 # get new samples709 # Get new samples 710 710 if verbose: print "get values of indices for random list" 711 711 G1 = self.get_sample(random_list) … … 733 733 if self.file_name[-4:] == ".pts": 734 734 735 # see if the file is there. Throw a QUIET IO error if it isn't735 # See if the file is there. Throw a QUIET IO error if it isn't 736 736 fd = open(self.file_name,'r') 737 737 fd.close() 738 738 739 # throws prints to screen if file not present739 # Throws prints to screen if file not present 740 740 self.fid = NetCDFFile(self.file_name, 'r') 741 741 … … 761 761 762 762 else: 763 # assume the file is a csv file763 # Assume the file is a csv file 764 764 file_pointer = open(self.file_name) 765 765 self.header, self.file_pointer = \ … … 775 775 if self.file_name[-4:] == ".pts": 776 776 if self.start_row == self.last_row: 777 # read the end of the file last iteration778 # remove blocking attributes777 # Read the end of the file last iteration 778 # Remove blocking attributes 779 779 self.fid.close() 780 780 del self.max_read_lines … … 822 822 823 823 else: 824 # assume the file is a csv file824 # Assume the file is a csv file 825 825 try: 826 826 pointlist, att_dict, geo_ref, self.file_pointer = \ … … 859 859 raise SyntaxError, msg 860 860 return geo 861 862 861 863 ##################### Error messages ########### 862 864 Error_message = {} … … 933 935 934 936 935 # see if the file is there. Throw a QUIET IO error if it isn't937 # See if the file is there. Throw a QUIET IO error if it isn't 936 938 fd = open(file_name,'r') 937 939 fd.close() 938 940 939 # throws prints to screen if file not present941 # Throws prints to screen if file not present 940 942 fid = NetCDFFile(file_name, 'r') 941 943 … … 1016 1018 att_dict = {} 1017 1019 1018 # This is to remove the x and y headers.1020 # This is to remove the x and y headers. 1019 1021 header = header[:] 1020 1022 try: … … 1063 1065 att_dict[key] = array(att_dict[key]).astype(Float) 1064 1066 1065 # Do stuff here so the info is in lat's and longs1067 # Do stuff here so the info is in lat's and longs 1066 1068 geo_ref = None 1067 1069 x_header = lower(x_header[:3]) … … 1146 1148 outfile = NetCDFFile(file_name, 'w') 1147 1149 1148 # Create new file1150 # Create new file 1149 1151 outfile.institution = 'Geoscience Australia' 1150 1152 outfile.description = 'NetCDF format for compact and portable storage ' +\ 1151 1153 'of spatial point data' 1152 1154 1153 # dimension definitions1155 # Dimension definitions 1154 1156 shape = write_data_points.shape[0] 1155 1157 outfile.createDimension('number_of_points', shape) 1156 1158 outfile.createDimension('number_of_dimensions', 2) #This is 2d data 1157 1159 1158 # variable definition1160 # Variable definition 1159 1161 outfile.createVariable('points', Float, ('number_of_points', 1160 1162 'number_of_dimensions')) -
anuga_core/source/anuga/shallow_water/data_manager.py
r4775 r4776 1290 1290 """ 1291 1291 1292 # FIXME: Can this be written feasibly using write_pts?1292 # FIXME: Can this be written feasibly using write_pts? 1293 1293 1294 1294 import os … … 1298 1298 root = basename_in 1299 1299 1300 # Get NetCDF1301 infile = NetCDFFile(root + '.dem', 'r') # Open existing netcdf file for read1300 # Get NetCDF 1301 infile = NetCDFFile(root + '.dem', 'r') # Open existing netcdf file for read 1302 1302 1303 1303 if verbose: print 'Reading DEM from %s' %(root + '.dem') … … 1305 1305 ncols = infile.ncols[0] 1306 1306 nrows = infile.nrows[0] 1307 xllcorner = infile.xllcorner[0] # Easting of lower left corner1308 yllcorner = infile.yllcorner[0] # Northing of lower left corner1307 xllcorner = infile.xllcorner[0] # Easting of lower left corner 1308 yllcorner = infile.yllcorner[0] # Northing of lower left corner 1309 1309 cellsize = infile.cellsize[0] 1310 1310 NODATA_value = infile.NODATA_value[0] … … 1315 1315 false_northing = infile.false_northing[0] 1316 1316 1317 # Text strings1317 # Text strings 1318 1318 projection = infile.projection 1319 1319 datum = infile.datum … … 1321 1321 1322 1322 1323 # Get output file1323 # Get output file 1324 1324 if basename_out == None: 1325 1325 ptsname = root + '.pts' … … 1331 1331 outfile = NetCDFFile(ptsname, 'w') 1332 1332 1333 # Create new file1333 # Create new file 1334 1334 outfile.institution = 'Geoscience Australia' 1335 1335 outfile.description = 'NetCDF pts format for compact and portable storage ' +\ 1336 1336 'of spatial point data' 1337 # assign default values1337 # Assign default values 1338 1338 if easting_min is None: easting_min = xllcorner 1339 1339 if easting_max is None: easting_max = xllcorner + ncols*cellsize … … 1341 1341 if northing_max is None: northing_max = yllcorner + nrows*cellsize 1342 1342 1343 # compute offsets to update georeferencing1343 # Compute offsets to update georeferencing 1344 1344 easting_offset = xllcorner - easting_min 1345 1345 northing_offset = yllcorner - northing_min 1346 1346 1347 # Georeferencing1347 # Georeferencing 1348 1348 outfile.zone = zone 1349 outfile.xllcorner = easting_min # Easting of lower left corner1350 outfile.yllcorner = northing_min # Northing of lower left corner1349 outfile.xllcorner = easting_min # Easting of lower left corner 1350 outfile.yllcorner = northing_min # Northing of lower left corner 1351 1351 outfile.false_easting = false_easting 1352 1352 outfile.false_northing = false_northing … … 1357 1357 1358 1358 1359 # Grid info (FIXME: probably not going to be used, but heck)1359 # Grid info (FIXME: probably not going to be used, but heck) 1360 1360 outfile.ncols = ncols 1361 1361 outfile.nrows = nrows … … 1364 1364 totalnopoints = nrows*ncols 1365 1365 1366 # calculating number of NODATA_values for each row in clipped region1367 # FIXME: use array operations to do faster1366 # Calculating number of NODATA_values for each row in clipped region 1367 # FIXME: use array operations to do faster 1368 1368 nn = 0 1369 1369 k = 0 … … 1390 1390 index2 = thisj 1391 1391 1392 # dimension definitions1392 # Dimension definitions 1393 1393 nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize)) 1394 1394 ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize)) … … 1407 1407 outfile.createDimension('number_of_dimensions', 2) #This is 2d data 1408 1408 1409 # variable definitions1409 # Variable definitions 1410 1410 outfile.createVariable('points', Float, ('number_of_points', 1411 1411 'number_of_dimensions')) … … 1417 1417 1418 1418 lenv = index2-index1+1 1419 # Store data1419 # Store data 1420 1420 global_index = 0 1421 # for i in range(nrows):1421 # for i in range(nrows): 1422 1422 for i in range(i1_0,thisi+1,1): 1423 1423 if verbose and i%((nrows+10)/10)==0: … … 1429 1429 no_NODATA = sum(v == NODATA_value) 1430 1430 if no_NODATA > 0: 1431 newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA1431 newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA 1432 1432 else: 1433 newcols = lenv # ncols_in_bounding_box1433 newcols = lenv # ncols_in_bounding_box 1434 1434 1435 1435 telev = zeros(newcols, Float)
Note: See TracChangeset
for help on using the changeset viewer.