Changeset 6304
- Timestamp:
- Feb 10, 2009, 11:11:04 AM (16 years ago)
- Location:
- branches/numpy
- Files:
-
- 1 deleted
- 93 edited
- 1 copied
Legend:
- Unmodified
- Added
- Removed
-
branches/numpy/anuga/abstract_2d_finite_volumes/domain.py
r6246 r6304 31 31 from quantity import Quantity 32 32 33 import Numericas num33 import numpy as num 34 34 35 35 … … 185 185 buffer_shape = self.full_send_dict[key][0].shape[0] 186 186 self.full_send_dict[key].append(num.zeros((buffer_shape, self.nsys), 187 num. Float))187 num.float)) 188 188 189 189 for key in self.ghost_recv_dict: 190 190 buffer_shape = self.ghost_recv_dict[key][0].shape[0] 191 191 self.ghost_recv_dict[key].append(num.zeros((buffer_shape, self.nsys), 192 num. Float))192 num.float)) 193 193 194 194 # Setup cell full flag … … 197 197 N = len(self) #number_of_elements 198 198 self.number_of_elements = N 199 self.tri_full_flag = num.ones(N, num. Int)199 self.tri_full_flag = num.ones(N, num.int) 200 200 for i in self.ghost_recv_dict.keys(): 201 201 for id in self.ghost_recv_dict[i][0]: … … 258 258 # (boolean) array, to be used during the flux calculation. 259 259 N = len(self) # Number_of_triangles 260 self.already_computed_flux = num.zeros((N, 3), num. Int)260 self.already_computed_flux = num.zeros((N, 3), num.int) 261 261 262 262 # Storage for maximal speeds computed for each triangle by 263 263 # compute_fluxes. 264 264 # This is used for diagnostics only (reset at every yieldstep) 265 self.max_speed = num.zeros(N, num. Float)265 self.max_speed = num.zeros(N, num.float) 266 266 267 267 if mesh_filename is not None: … … 375 375 raise Exception, msg 376 376 377 q = num.zeros(len(self.conserved_quantities), num. Float)377 q = num.zeros(len(self.conserved_quantities), num.float) 378 378 379 379 for i, name in enumerate(self.conserved_quantities): … … 447 447 448 448 name: Name of quantity 449 value: Compatible list, Numeric array, const or function (see below)449 value: Compatible list, numeric array, const or function (see below) 450 450 451 451 The values will be stored in elements following their internal ordering. … … 879 879 880 880 msg += '------------------------------------------------\n' 881 msg += ' Speeds in [%f, %f]\n' % (min(self.max_speed ),882 max(self.max_speed ))881 msg += ' Speeds in [%f, %f]\n' % (min(self.max_speed.flat), 882 max(self.max_speed.flat)) 883 883 msg += ' Histogram:\n' 884 884 … … 892 892 else: 893 893 # Closed upper interval 894 hi = max(self.max_speed )894 hi = max(self.max_speed.flat) 895 895 msg += ' [%f, %f]: %d\n' % (lo, hi, count) 896 896 897 N = len(self.max_speed )897 N = len(self.max_speed.flat) 898 898 if N > 10: 899 899 msg += ' Percentiles (10%):\n' … … 1357 1357 self.number_of_steps = 0 1358 1358 self.number_of_first_order_steps = 0 1359 self.max_speed = num.zeros(N, num. Float)1359 self.max_speed = num.zeros(N, num.float) 1360 1360 1361 1361 ## … … 1626 1626 # momentum 1627 1627 if self.protect_against_isolated_degenerate_timesteps is True and \ 1628 self.max_speed> 10.0: # FIXME (Ole): Make this configurable1628 num.max(self.max_speed) > 10.0: # FIXME (Ole): Make this configurable 1629 1629 1630 1630 # Setup 10 bins for speed histogram … … 1641 1641 1642 1642 # Find triangles in last bin 1643 # FIXME - speed up using Numeric1643 # FIXME - speed up using numeric package 1644 1644 d = 0 1645 1645 for i in range(self.number_of_full_triangles): -
branches/numpy/anuga/abstract_2d_finite_volumes/ermapper_grids.py
r6161 r6304 2 2 3 3 # from os import open, write, read 4 import Numericas num5 6 celltype_map = {'IEEE4ByteReal': num. Float32, 'IEEE8ByteReal': num.Float64}4 import numpy as num 5 6 celltype_map = {'IEEE4ByteReal': num.float32, 'IEEE8ByteReal': num.float64} 7 7 8 8 … … 11 11 write_ermapper_grid(ofile, data, header = {}): 12 12 13 Function to write a 2D Numeric array to an ERMapper grid. There are a series of conventions adopted within13 Function to write a 2D numeric array to an ERMapper grid. There are a series of conventions adopted within 14 14 this code, specifically: 15 15 1) The registration coordinate for the data is the SW (or lower-left) corner of the data 16 16 2) The registration coordinates refer to cell centres 17 3) The data is a 2D Numeric array with the NW-most data in element (0,0) and the SE-most data in element (N,M)17 3) The data is a 2D numeric array with the NW-most data in element (0,0) and the SE-most data in element (N,M) 18 18 where N is the last line and M is the last column 19 19 4) There has been no testng of the use of a rotated grid. Best to keep data in an NS orientation … … 163 163 return header 164 164 165 def write_ermapper_data(grid, ofile, data_format = num.Float32):165 def write_ermapper_data(grid, ofile, data_format=num.float32): 166 166 167 167 … … 193 193 194 194 195 def read_ermapper_data(ifile, data_format = num. Float32):195 def read_ermapper_data(ifile, data_format = num.float32): 196 196 # open input file in a binary format and read the input string 197 197 fid = open(ifile,'rb') … … 199 199 fid.close() 200 200 201 # convert input string to required format (Note default format is num. Float32)201 # convert input string to required format (Note default format is num.float32) 202 202 grid_as_float = num.fromstring(input_string,data_format) 203 203 return grid_as_float -
branches/numpy/anuga/abstract_2d_finite_volumes/general_mesh.py
r6191 r6304 1 import Numericas num1 import numpy as num 2 2 3 3 from anuga.coordinate_transforms.geo_reference import Geo_reference … … 10 10 which is represented as a coordinate set (x,y). 11 11 Vertices from different triangles can point to the same node. 12 The nodes are implemented as an Nx2 Numeric array containing the12 The nodes are implemented as an Nx2 numeric array containing the 13 13 x and y coordinates. 14 14 … … 19 19 where 20 20 21 nodes is either a list of 2-tuples or an Nx2 Numeric array of21 nodes is either a list of 2-tuples or an Nx2 numeric array of 22 22 floats representing all x, y coordinates in the mesh. 23 23 24 triangles is either a list of 3-tuples or an Mx3 Numeric array of24 triangles is either a list of 3-tuples or an Mx3 numeric array of 25 25 integers representing indices of all vertices in the mesh. 26 26 Each vertex is identified by its index i in [0, N-1]. … … 68 68 69 69 nodes: x,y coordinates represented as a sequence of 2-tuples or 70 a Nx2 Numeric array of floats.70 a Nx2 numeric array of floats. 71 71 72 triangles: sequence of 3-tuples or Mx3 Numeric array of72 triangles: sequence of 3-tuples or Mx3 numeric array of 73 73 non-negative integers representing indices into 74 74 the nodes array. … … 88 88 if verbose: print 'General_mesh: Building basic mesh structure in ANUGA domain' 89 89 90 self.triangles = num.array(triangles, num. Int)91 self.nodes = num.array(nodes, num. Float)90 self.triangles = num.array(triangles, num.int) 91 self.nodes = num.array(nodes, num.float) 92 92 93 93 … … 125 125 126 126 # Input checks 127 msg = 'Triangles must an Mx3 Numeric array or a sequence of 3-tuples. '127 msg = 'Triangles must an Mx3 numeric array or a sequence of 3-tuples. ' 128 128 msg += 'The supplied array has the shape: %s'\ 129 129 %str(self.triangles.shape) 130 130 assert len(self.triangles.shape) == 2, msg 131 131 132 msg = 'Nodes must an Nx2 Numeric array or a sequence of 2-tuples'132 msg = 'Nodes must an Nx2 numeric array or a sequence of 2-tuples' 133 133 msg += 'The supplied array has the shape: %s'\ 134 134 %str(self.nodes.shape) … … 144 144 max(self.nodes[:,0]), max(self.nodes[:,1]) ] 145 145 146 self.xy_extent = num.array(xy_extent, num. Float)146 self.xy_extent = num.array(xy_extent, num.float) 147 147 148 148 149 149 # Allocate space for geometric quantities 150 self.normals = num.zeros((N, 6), num. Float)151 self.areas = num.zeros(N, num. Float)152 self.edgelengths = num.zeros((N, 3), num. Float)150 self.normals = num.zeros((N, 6), num.float) 151 self.areas = num.zeros(N, num.float) 152 self.edgelengths = num.zeros((N, 3), num.float) 153 153 154 154 # Get x,y coordinates for all triangles and store … … 185 185 # - Stored as six floats n0x,n0y,n1x,n1y,n2x,n2y per triangle 186 186 187 n0 = num.array([x2 - x1, y2 - y1], num. Float)187 n0 = num.array([x2 - x1, y2 - y1], num.float) 188 188 l0 = num.sqrt(num.sum(n0**2)) 189 189 190 n1 = num.array([x0 - x2, y0 - y2], num. Float)190 n1 = num.array([x0 - x2, y0 - y2], num.float) 191 191 l1 = num.sqrt(num.sum(n1**2)) 192 192 193 n2 = num.array([x1 - x0, y1 - y0], num. Float)193 n2 = num.array([x1 - x0, y1 - y0], num.float) 194 194 l2 = num.sqrt(num.sum(n2**2)) 195 195 … … 275 275 if not self.geo_reference.is_absolute(): 276 276 return V + num.array([self.geo_reference.get_xllcorner(), 277 self.geo_reference.get_yllcorner()], num. Float)277 self.geo_reference.get_yllcorner()], num.float) 278 278 else: 279 279 return V … … 317 317 if absolute is True and not self.geo_reference.is_absolute(): 318 318 offset=num.array([self.geo_reference.get_xllcorner(), 319 self.geo_reference.get_yllcorner()], num. Float)319 self.geo_reference.get_yllcorner()], num.float) 320 320 return num.array([V[i3,:]+offset, 321 321 V[i3+1,:]+offset, 322 V[i3+2,:]+offset], num. Float)322 V[i3+2,:]+offset], num.float) 323 323 else: 324 return num.array([V[i3,:], V[i3+1,:], V[i3+2,:]], num. Float)324 return num.array([V[i3,:], V[i3+1,:], V[i3+2,:]], num.float) 325 325 326 326 … … 349 349 350 350 M = self.number_of_triangles 351 vertex_coordinates = num.zeros((3*M, 2), num. Float)351 vertex_coordinates = num.zeros((3*M, 2), num.float) 352 352 353 353 for i in range(M): … … 409 409 K = 3*M # Total number of unique vertices 410 410 411 T = num.reshape(num.arange(K, typecode=num.Int), (M,3))411 T = num.reshape(num.arange(K, dtype=num.int), (M,3)) 412 412 413 413 return T … … 416 416 417 417 def get_unique_vertices(self, indices=None): 418 """FIXME(Ole): This function needs a docstring 419 """ 418 """FIXME(Ole): This function needs a docstring""" 419 420 420 triangles = self.get_triangles(indices=indices) 421 421 unique_verts = {} 422 422 for triangle in triangles: 423 unique_verts[triangle[0]] = 0 424 unique_verts[triangle[1]] = 0 425 unique_verts[triangle[2]] = 0 423 #print 'triangle(%s)=%s' % (type(triangle), str(triangle)) 424 unique_verts[triangle[0]] = 0 425 unique_verts[triangle[1]] = 0 426 unique_verts[triangle[2]] = 0 426 427 return unique_verts.keys() 427 428 … … 452 453 triangle_list.append( (volume_id, vertex_id) ) 453 454 454 triangle_list = num.array(triangle_list, num. Int) #array default#455 triangle_list = num.array(triangle_list, num.int) #array default# 455 456 else: 456 457 # Get info for all nodes recursively. … … 529 530 530 531 # Count number of triangles per node 531 number_of_triangles_per_node = num.zeros(self.number_of_full_nodes) 532 number_of_triangles_per_node = num.zeros(self.number_of_full_nodes, 533 num.int) #array default# 532 534 for volume_id, triangle in enumerate(self.get_triangles()): 533 535 for vertex_id in triangle: … … 536 538 # Allocate space for inverted structure 537 539 number_of_entries = num.sum(number_of_triangles_per_node) 538 vertex_value_indices = num.zeros(number_of_entries )540 vertex_value_indices = num.zeros(number_of_entries, num.int) #array default# 539 541 540 542 # Register (triangle, vertex) indices for each node -
branches/numpy/anuga/abstract_2d_finite_volumes/generic_boundary_conditions.py
r6174 r6304 8 8 from anuga.fit_interpolate.interpolate import Modeltime_too_early 9 9 10 import Numericas num10 import numpy as num 11 11 12 12 … … 69 69 raise Exception, msg 70 70 71 self.conserved_quantities=num.array(conserved_quantities, num. Float)71 self.conserved_quantities=num.array(conserved_quantities, num.float) 72 72 73 73 def __repr__(self): … … 99 99 100 100 try: 101 q = num.array(q, num. Float)101 q = num.array(q, num.float) 102 102 except: 103 103 msg = 'Return value from time boundary function could ' 104 msg += 'not be converted into a Numeric array of floats.\n'104 msg += 'not be converted into a numeric array of floats.\n' 105 105 msg += 'Specified function should return either list or array.\n' 106 106 msg += 'I got %s' %str(q) … … 180 180 181 181 if verbose: print 'Find midpoint coordinates of entire boundary' 182 self.midpoint_coordinates = num.zeros((len(domain.boundary), 2), num. Float)182 self.midpoint_coordinates = num.zeros((len(domain.boundary), 2), num.float) 183 183 boundary_keys = domain.boundary.keys() 184 184 … … 200 200 201 201 # Compute midpoints 202 if edge_id == 0: m = num.array([(x1 + x2)/2, (y1 + y2)/2], num. Float)203 if edge_id == 1: m = num.array([(x0 + x2)/2, (y0 + y2)/2], num. Float)204 if edge_id == 2: m = num.array([(x1 + x0)/2, (y1 + y0)/2], num. Float)202 if edge_id == 0: m = num.array([(x1 + x2)/2, (y1 + y2)/2], num.float) 203 if edge_id == 1: m = num.array([(x0 + x2)/2, (y0 + y2)/2], num.float) 204 if edge_id == 2: m = num.array([(x1 + x0)/2, (y1 + y0)/2], num.float) 205 205 206 206 # Convert to absolute UTM coordinates … … 309 309 # See http://docs.python.org/lib/warning-filter.html 310 310 self.default_boundary_invoked = True 311 312 313 if res == NAN: 311 312 if num.any(res == NAN): 314 313 x,y=self.midpoint_coordinates[i,:] 315 314 msg = 'NAN value found in file_boundary at ' -
branches/numpy/anuga/abstract_2d_finite_volumes/mesh_factory.py
r6145 r6304 3 3 """ 4 4 5 import Numericas num5 import numpy as num 6 6 7 7 … … 94 94 index = Index(n,m) 95 95 96 points = num.zeros((Np, 2), num. Float)96 points = num.zeros((Np, 2), num.float) 97 97 98 98 for i in range(m+1): … … 106 106 107 107 108 elements = num.zeros((Nt, 3), num. Int)108 elements = num.zeros((Nt, 3), num.int) 109 109 boundary = {} 110 110 nt = -1 -
branches/numpy/anuga/abstract_2d_finite_volumes/neighbour_mesh.py
r6191 r6304 12 12 from math import pi, sqrt 13 13 14 import Numericas num14 import numpy as num 15 15 16 16 … … 24 24 coordinate set. 25 25 26 Coordinate sets are implemented as an N x 2 Numeric array containing26 Coordinate sets are implemented as an N x 2 numeric array containing 27 27 x and y coordinates. 28 28 … … 33 33 where 34 34 35 coordinates is either a list of 2-tuples or an Mx2 Numeric array of35 coordinates is either a list of 2-tuples or an Mx2 numeric array of 36 36 floats representing all x, y coordinates in the mesh. 37 37 38 triangles is either a list of 3-tuples or an Nx3 Numeric array of38 triangles is either a list of 3-tuples or an Nx3 numeric array of 39 39 integers representing indices of all vertices in the mesh. 40 40 Each vertex is identified by its index i in [0, M-1]. … … 76 76 """ 77 77 Build triangles from x,y coordinates (sequence of 2-tuples or 78 Mx2 Numeric array of floats) and triangles (sequence of 3-tuples79 or Nx3 Numeric array of non-negative integers).78 Mx2 numeric array of floats) and triangles (sequence of 3-tuples 79 or Nx3 numeric array of non-negative integers). 80 80 """ 81 81 … … 98 98 99 99 #Allocate space for geometric quantities 100 self.centroid_coordinates = num.zeros((N, 2), num. Float)101 102 self.radii = num.zeros(N, num. Float)103 104 self.neighbours = num.zeros((N, 3), num. Int)105 self.neighbour_edges = num.zeros((N, 3), num. Int)106 self.number_of_boundaries = num.zeros(N, num. Int)107 self.surrogate_neighbours = num.zeros((N, 3), num. Int)100 self.centroid_coordinates = num.zeros((N, 2), num.float) 101 102 self.radii = num.zeros(N, num.float) 103 104 self.neighbours = num.zeros((N, 3), num.int) 105 self.neighbour_edges = num.zeros((N, 3), num.int) 106 self.number_of_boundaries = num.zeros(N, num.int) 107 self.surrogate_neighbours = num.zeros((N, 3), num.int) 108 108 109 109 #Get x,y coordinates for all triangles and store … … 124 124 125 125 #Compute centroid 126 centroid = num.array([(x0 + x1 + x2)/3, (y0 + y1 + y2)/3], num. Float)126 centroid = num.array([(x0 + x1 + x2)/3, (y0 + y1 + y2)/3], num.float) 127 127 self.centroid_coordinates[i] = centroid 128 128 … … 133 133 134 134 #Midpoints 135 m0 = num.array([(x1 + x2)/2, (y1 + y2)/2], num. Float)136 m1 = num.array([(x0 + x2)/2, (y0 + y2)/2], num. Float)137 m2 = num.array([(x1 + x0)/2, (y1 + y0)/2], num. Float)135 m0 = num.array([(x1 + x2)/2, (y1 + y2)/2], num.float) 136 m1 = num.array([(x0 + x2)/2, (y0 + y2)/2], num.float) 137 m2 = num.array([(x1 + x0)/2, (y1 + y0)/2], num.float) 138 138 139 139 #The radius is the distance from the centroid of … … 394 394 #Check that all keys in given boundary exist 395 395 for tag in tagged_elements.keys(): 396 tagged_elements[tag] = num.array(tagged_elements[tag], num. Int)396 tagged_elements[tag] = num.array(tagged_elements[tag], num.int) 397 397 398 398 msg = 'Not all elements exist. ' … … 506 506 # Sanity check 507 507 if p0 is None: 508 raise Exception('Impossible')509 508 msg = 'Impossible: p0 is None!?' 509 raise Exception, msg 510 510 511 511 # Register potential paths from A to B … … 613 613 point_registry[tuple(p1)] = len(point_registry) 614 614 615 polygon.append(list(p1)) # De- Numeric each point :-)615 polygon.append(list(p1)) # De-numeric each point :-) 616 616 p0 = p1 617 617 … … 1139 1139 1140 1140 # Distances from line origin to the two intersections 1141 z0 = num.array([x0 - xi0, y0 - eta0], num. Float)1142 z1 = num.array([x1 - xi0, y1 - eta0], num. Float)1141 z0 = num.array([x0 - xi0, y0 - eta0], num.float) 1142 z1 = num.array([x1 - xi0, y1 - eta0], num.float) 1143 1143 d0 = num.sqrt(num.sum(z0**2)) 1144 1144 d1 = num.sqrt(num.sum(z1**2)) … … 1155 1155 # Normal direction: 1156 1156 # Right hand side relative to line direction 1157 vector = num.array([x1 - x0, y1 - y0], num. Float) # Segment vector1157 vector = num.array([x1 - x0, y1 - y0], num.float) # Segment vector 1158 1158 length = num.sqrt(num.sum(vector**2)) # Segment length 1159 normal = num.array([vector[1], -vector[0]], num. Float)/length1159 normal = num.array([vector[1], -vector[0]], num.float)/length 1160 1160 1161 1161 … … 1237 1237 assert isinstance(segment, Triangle_intersection), msg 1238 1238 1239 midpoint = num.sum(num.array(segment.segment, num. Float))/21239 midpoint = num.sum(num.array(segment.segment, num.float))/2 1240 1240 midpoints.append(midpoint) 1241 1241 -
branches/numpy/anuga/abstract_2d_finite_volumes/pmesh2domain.py
r6145 r6304 1 """Class pmesh2domain - Converting .tsh files to do amains1 """Class pmesh2domain - Converting .tsh files to domains 2 2 3 3 … … 8 8 9 9 import sys 10 11 import Numeric as num 12 13 14 def pmesh_instance_to_domain_instance(mesh, 15 DomainClass): 16 """ 17 Convert a pmesh instance/object into a domain instance. 18 19 Use pmesh_to_domain_instance to convert a mesh file to a domain instance. 20 """ 21 22 vertex_coordinates, vertices, tag_dict, vertex_quantity_dict \ 23 ,tagged_elements_dict, geo_reference = \ 24 pmesh_to_domain(mesh_instance=mesh) 10 import numpy as num 11 12 13 ## 14 # @brief Convert a pmesh instance to a domain instance. 15 # @param mesh The pmesh instance to convert. 16 # @param DomainClass The class to instantiate and return. 17 # @return The converted pmesh instance (as a 'DomainClass' instance). 18 def pmesh_instance_to_domain_instance(mesh, DomainClass): 19 """Convert a pmesh instance/object into a domain instance. 20 21 Uses pmesh_to_domain_instance to convert a mesh file to a domain instance. 22 """ 23 24 (vertex_coordinates, vertices, tag_dict, vertex_quantity_dict, 25 tagged_elements_dict, geo_reference) = pmesh_to_domain(mesh_instance=mesh) 26 27 # NOTE(Ole): This import cannot be at the module level 28 # due to mutual dependency with domain.py 29 from anuga.abstract_2d_finite_volumes.domain import Domain 30 31 # ensure that the required 'DomainClass' actually is an instance of Domain 32 msg = ('The class %s is not a subclass of the generic domain class %s' 33 % (DomainClass, Domain)) 34 assert issubclass(DomainClass, Domain), msg 35 36 # instantiate the result class 37 result = DomainClass(coordinates=vertex_coordinates, 38 vertices=vertices, 39 boundary=tag_dict, 40 tagged_elements=tagged_elements_dict, 41 geo_reference=geo_reference) 42 43 # set the water stage to be the elevation 44 if (vertex_quantity_dict.has_key('elevation') and 45 not vertex_quantity_dict.has_key('stage')): 46 vertex_quantity_dict['stage'] = vertex_quantity_dict['elevation'] 47 result.set_quantity_vertices_dict(vertex_quantity_dict) 48 49 return result 50 51 52 ## 53 # @brief Convert a mesh file to a Domain instance. 54 # @param file_name Name of the file to convert (TSH or MSH). 55 # @param DomainClass Class of return instance. 56 # @param use_cache True if caching is to be used. 57 # @param verbose True if this function is to be verbose. 58 # @return An instance of 'DomainClass' containing the file data. 59 def pmesh_to_domain_instance(file_name, DomainClass, use_cache=False, 60 verbose=False): 61 """Converts a mesh file(.tsh or .msh), to a Domain instance. 62 63 file_name is the name of the mesh file to convert, including the extension 64 65 DomainClass is the Class that will be returned. 66 It must be a subclass of Domain, with the same interface as domain. 67 68 use_cache: True means that caching is attempted for the computed domain. 69 """ 70 71 if use_cache is True: 72 from caching import cache 73 result = cache(_pmesh_to_domain_instance, (file_name, DomainClass), 74 dependencies=[file_name], verbose=verbose) 75 else: 76 result = apply(_pmesh_to_domain_instance, (file_name, DomainClass)) 77 78 return result 79 80 81 ## 82 # @brief Convert a mesh file to a Domain instance. 83 # @param file_name Name of the file to convert (TSH or MSH). 84 # @param DomainClass Class of return instance. 85 # @return The DomainClass instance containing the file data. 86 def _pmesh_to_domain_instance(file_name, DomainClass): 87 """Converts a mesh file(.tsh or .msh), to a Domain instance. 88 89 Internal function. See public interface pmesh_to_domain_instance for details 90 """ 91 92 (vertex_coordinates, vertices, tag_dict, vertex_quantity_dict, 93 tagged_elements_dict, geo_reference) = pmesh_to_domain(file_name=file_name) 25 94 26 95 # NOTE(Ole): This import cannot be at the module level due to mutual … … 28 97 from anuga.abstract_2d_finite_volumes.domain import Domain 29 98 30 31 32 33 msg = 'The class %s is not a subclass of the generic domain class %s'\ 34 %(DomainClass, Domain) 99 # ensure the required class is a subclass of Domain 100 msg = ('The class %s is not a subclass of the generic domain class %s' 101 % (DomainClass, Domain)) 35 102 assert issubclass(DomainClass, Domain), msg 36 37 103 38 104 domain = DomainClass(coordinates = vertex_coordinates, … … 42 108 geo_reference = geo_reference ) 43 109 44 # set the water stage to be the elevation 45 if vertex_quantity_dict.has_key('elevation') and not vertex_quantity_dict.has_key('stage'): 46 vertex_quantity_dict['stage'] = vertex_quantity_dict['elevation'] 47 48 domain.set_quantity_vertices_dict(vertex_quantity_dict) 49 #print "vertex_quantity_dict",vertex_quantity_dict 50 return domain 51 52 53 54 def pmesh_to_domain_instance(file_name, DomainClass, use_cache = False, verbose = False): 55 """ 56 Converts a mesh file(.tsh or .msh), to a Domain instance. 57 58 file_name is the name of the mesh file to convert, including the extension 59 60 DomainClass is the Class that will be returned. 61 It must be a subclass of Domain, with the same interface as domain. 62 63 use_cache: True means that caching is attempted for the computed domain. 64 """ 65 66 if use_cache is True: 67 from caching import cache 68 result = cache(_pmesh_to_domain_instance, (file_name, DomainClass), 69 dependencies = [file_name], 70 verbose = verbose) 71 72 else: 73 result = apply(_pmesh_to_domain_instance, (file_name, DomainClass)) 74 75 return result 76 77 78 79 80 def _pmesh_to_domain_instance(file_name, DomainClass): 81 """ 82 Converts a mesh file(.tsh or .msh), to a Domain instance. 83 84 Internal function. See public interface pmesh_to_domain_instance for details 85 """ 86 87 vertex_coordinates, vertices, tag_dict, vertex_quantity_dict, \ 88 tagged_elements_dict, geo_reference = \ 89 pmesh_to_domain(file_name=file_name) 90 91 92 # NOTE(Ole): This import cannot be at the module level due to mutual 93 # dependency with domain.py 94 from anuga.abstract_2d_finite_volumes.domain import Domain 95 96 97 msg = 'The class %s is not a subclass of the generic domain class %s'\ 98 %(DomainClass, Domain) 99 assert issubclass(DomainClass, Domain), msg 100 101 102 103 domain = DomainClass(coordinates = vertex_coordinates, 104 vertices = vertices, 105 boundary = tag_dict, 106 tagged_elements = tagged_elements_dict, 107 geo_reference = geo_reference ) 108 109 110 111 #FIXME (Ole): Is this really the right place to apply the a default 112 #value specific to the shallow water wave equation? 113 #The 'assert' above indicates that any subclass of Domain is acceptable. 114 #Suggestion - module shallow_water.py will eventually take care of this 115 #(when I get around to it) so it should be removed from here. 110 # FIXME (Ole): Is this really the right place to apply a default 111 # value specific to the shallow water wave equation? 112 # The 'assert' above indicates that any subclass of Domain is acceptable. 113 # Suggestion - module shallow_water.py will eventually take care of this 114 # (when I get around to it) so it should be removed from here. 116 115 117 116 # This doesn't work on the domain instance. 118 117 # This is still needed so -ve elevations don't cuase 'lakes' 119 118 # The fixme we discussed was to only create a quantity when its values 120 # are set.119 # are set. 121 120 # I think that's the way to go still 122 121 123 122 # set the water stage to be the elevation 124 if vertex_quantity_dict.has_key('elevation') and not vertex_quantity_dict.has_key('stage'): 123 if (vertex_quantity_dict.has_key('elevation') and 124 not vertex_quantity_dict.has_key('stage')): 125 125 vertex_quantity_dict['stage'] = vertex_quantity_dict['elevation'] 126 127 126 domain.set_quantity_vertices_dict(vertex_quantity_dict) 128 #print "vertex_quantity_dict",vertex_quantity_dict 127 129 128 return domain 130 129 131 130 132 def pmesh_to_domain(file_name=None, 133 mesh_instance=None, 134 use_cache=False, 131 ## 132 # @brief Convert pmesh file/instance to list(s) that can instantiate a Domain. 133 # @param file_name Path to file to convert. 134 # @param mesh_instance Instance to convert. 135 # @param use_cache True if we are to cache. 136 # @param verbose True if this function is to be verbose. 137 # @return ?? 138 def pmesh_to_domain(file_name=None, mesh_instance=None, use_cache=False, 135 139 verbose=False): 136 """ 137 Convert a pmesh file or a pmesh mesh instance to a bunch of lists 140 """Convert a pmesh file or a pmesh mesh instance to a bunch of lists 138 141 that can be used to instanciate a domain object. 139 142 … … 144 147 from caching import cache 145 148 result = cache(_pmesh_to_domain, (file_name, mesh_instance), 146 dependencies = [file_name], 147 verbose = verbose) 149 dependencies=[file_name], verbose=verbose) 148 150 149 151 else: … … 153 155 154 156 155 def _pmesh_to_domain(file_name=None, 156 mesh_instance=None, 157 use_cache=False, 157 ## 158 # @brief Convert pmesh file/instance to list(s) that can instantiate a Domain. 159 # @param file_name Path to file to convert. 160 # @param mesh_instance Instance to convert. 161 # @param use_cache True if we are to cache. 162 # @param verbose True if this function is to be verbose. 163 # @return ?? 164 def _pmesh_to_domain(file_name=None, mesh_instance=None, use_cache=False, 158 165 verbose=False): 159 """ 160 Convert a pmesh file or a pmesh mesh instance to a bunch of lists 166 """Convert a pmesh file or a pmesh mesh instance to a bunch of lists 161 167 that can be used to instanciate a domain object. 162 168 """ … … 164 170 from load_mesh.loadASCII import import_mesh_file 165 171 172 # get data from mesh instance or file 166 173 if file_name is None: 167 174 mesh_dict = mesh_instance.Mesh2IODict() 168 175 else: 169 176 mesh_dict = import_mesh_file(file_name) 170 #print "mesh_dict",mesh_dict 177 178 # extract required data from the mesh dictionary 171 179 vertex_coordinates = mesh_dict['vertices'] 172 180 volumes = mesh_dict['triangles'] 173 181 vertex_quantity_dict = {} 174 point_atts = num.transpose(mesh_dict['vertex_attributes'])182 point_atts = mesh_dict['vertex_attributes'] 175 183 point_titles = mesh_dict['vertex_attribute_titles'] 176 184 geo_reference = mesh_dict['geo_reference'] 177 if point_atts != None: 178 for quantity, value_vector in map (None, point_titles, point_atts): 185 if point_atts is not None: 186 point_atts = num.transpose(mesh_dict['vertex_attributes']) 187 for quantity, value_vector in map(None, point_titles, point_atts): 179 188 vertex_quantity_dict[quantity] = value_vector 180 189 tag_dict = pmesh_dict_to_tag_dict(mesh_dict) 181 190 tagged_elements_dict = build_tagged_elements_dictionary(mesh_dict) 182 return vertex_coordinates, volumes, tag_dict, vertex_quantity_dict, tagged_elements_dict, geo_reference 183 191 192 return (vertex_coordinates, volumes, tag_dict, vertex_quantity_dict, 193 tagged_elements_dict, geo_reference) 184 194 185 195 186 196 def build_tagged_elements_dictionary(mesh_dict): 187 197 """Build the dictionary of element tags. 198 188 199 tagged_elements is a dictionary of element arrays, 189 keyed by tag: 190 { (tag): [e1, e2, e3..] }191 """ 200 keyed by tag: { (tag): [e1, e2, e3..] } 201 """ 202 192 203 tri_atts = mesh_dict['triangle_tags'] 193 204 tagged_elements = {} … … 198 209 tagged_elements.setdefault(tri_atts[tri_att_index], 199 210 []).append(tri_att_index) 211 200 212 return tagged_elements 213 201 214 202 215 def pmesh_dict_to_tag_dict(mesh_dict): … … 204 217 to a dictionary of tags, indexed with volume id and face number. 205 218 """ 219 206 220 triangles = mesh_dict['triangles'] 207 221 sides = calc_sides(triangles) 208 222 tag_dict = {} 209 for seg, tag in map(None, mesh_dict['segments'], 210 mesh_dict['segment_tags']): 223 for seg, tag in map(None, mesh_dict['segments'], mesh_dict['segment_tags']): 211 224 v1 = int(seg[0]) 212 225 v2 = int(seg[1]) … … 222 235 223 236 def calc_sides(triangles): 224 #Build dictionary mapping from sides (2-tuple of points) 225 #to left hand side neighbouring triangle 237 '''Build dictionary mapping from sides (2-tuple of points) 238 to left hand side neighbouring triangle 239 ''' 240 226 241 sides = {} 242 227 243 for id, triangle in enumerate(triangles): 228 244 a = int(triangle[0]) 229 245 b = int(triangle[1]) 230 246 c = int(triangle[2]) 247 231 248 sides[a,b] = (id, 2) #(id, face) 232 249 sides[b,c] = (id, 0) #(id, face) 233 250 sides[c,a] = (id, 1) #(id, face) 251 234 252 return sides 253 -
branches/numpy/anuga/abstract_2d_finite_volumes/quantity.py
r6244 r6304 23 23 from anuga.caching import cache 24 24 25 import Numeric as num 25 import anuga.utilities.numerical_tools as aunt 26 27 import numpy as num 26 28 27 29 … … 43 45 if vertex_values is None: 44 46 N = len(domain) # number_of_elements 45 self.vertex_values = num.zeros((N, 3), num. Float)47 self.vertex_values = num.zeros((N, 3), num.float) 46 48 else: 47 self.vertex_values = num.array(vertex_values, num. Float)49 self.vertex_values = num.array(vertex_values, num.float) 48 50 49 51 N, V = self.vertex_values.shape … … 57 59 58 60 # Allocate space for other quantities 59 self.centroid_values = num.zeros(N, num. Float)60 self.edge_values = num.zeros((N, 3), num. Float)61 self.centroid_values = num.zeros(N, num.float) 62 self.edge_values = num.zeros((N, 3), num.float) 61 63 62 64 # Allocate space for Gradient 63 self.x_gradient = num.zeros(N, num. Float)64 self.y_gradient = num.zeros(N, num. Float)65 self.x_gradient = num.zeros(N, num.float) 66 self.y_gradient = num.zeros(N, num.float) 65 67 66 68 # Allocate space for Limiter Phi 67 self.phi = num.zeros(N, num. Float)69 self.phi = num.zeros(N, num.float) 68 70 69 71 # Intialise centroid and edge_values … … 72 74 # Allocate space for boundary values 73 75 L = len(domain.boundary) 74 self.boundary_values = num.zeros(L, num. Float)76 self.boundary_values = num.zeros(L, num.float) 75 77 76 78 # Allocate space for updates of conserved quantities by … … 78 80 79 81 # Allocate space for update fields 80 self.explicit_update = num.zeros(N, num. Float )81 self.semi_implicit_update = num.zeros(N, num. Float )82 self.centroid_backup_values = num.zeros(N, num. Float)82 self.explicit_update = num.zeros(N, num.float ) 83 self.semi_implicit_update = num.zeros(N, num.float ) 84 self.centroid_backup_values = num.zeros(N, num.float) 83 85 84 86 self.set_beta(1.0) … … 316 318 317 319 numeric: 318 Compatible list, Numeric array (see below) or constant.320 Compatible list, numeric array (see below) or constant. 319 321 If callable it will treated as a function (see below) 320 322 If instance of another Quantity it will be treated as such. … … 357 359 358 360 In case of location == 'centroids' the dimension values must 359 be a list of a Numerical array of length N,361 be a list of a numerical array of length N, 360 362 N being the number of elements. 361 363 Otherwise it must be of dimension Nx3 … … 458 460 459 461 msg = 'Indices must be a list or None' 460 assert type(indices) in [ListType, NoneType, num.ArrayType], msg 462 assert (type(indices) in [ListType, NoneType] 463 or isinstance(indices, num.ndarray)), msg 461 464 462 465 # Determine which 'set_values_from_...' to use 463 466 if numeric is not None: 464 if type(numeric) in [FloatType, IntType, LongType]: 465 self.set_values_from_constant(numeric, location, 466 indices, verbose) 467 elif type(numeric) in [num.ArrayType, ListType]: 467 if isinstance(numeric, num.ndarray) or isinstance(numeric, list): 468 468 self.set_values_from_array(numeric, location, indices, 469 469 use_cache=use_cache, verbose=verbose) … … 479 479 indices, verbose=verbose, 480 480 use_cache=use_cache) 481 # if (isinstance(numeric, float) or isinstance(numeric, int) 482 # or isinstance(numeric, long)): 481 483 else: 482 msg = 'Illegal type for argument numeric: %s' % str(numeric) 483 raise msg 484 try: 485 numeric = float(numeric) 486 except ValueError: 487 msg = ("Illegal type for variable 'numeric': " 488 "%s, shape=%s\n%s" 489 % (type(numeric), numeric.shape, str(numeric))) 490 msg += ('\ntype(numeric)==FloatType is %s' 491 % str(type(numeric)==FloatType)) 492 msg += ('\nisinstance(numeric, float)=%s' 493 % str(isinstance(numeric, float))) 494 msg += ('\nisinstance(numeric, num.ndarray)=%s' 495 % str(isinstance(numeric, num.ndarray))) 496 raise Exception, msg 497 self.set_values_from_constant(numeric, location, 498 indices, verbose) 499 elif quantity is not None: 500 if type(numeric) in [FloatType, IntType, LongType]: 501 self.set_values_from_constant(numeric, location, 502 indices, verbose) 503 else: 504 msg = ("Illegal type for variable 'numeric': %s, shape=%s\n%s" 505 % (type(numeric), numeric.shape, str(numeric))) 506 msg += ('\ntype(numeric)==FloatType is %s' 507 % str(type(numeric)==FloatType)) 508 msg += ('\nisinstance(numeric, float)=%s' 509 % str(isinstance(numeric, float))) 510 msg += ('\nisinstance(numeric, num.ndarray)=%s' 511 % str(isinstance(numeric, num.ndarray))) 512 raise Exception, msg 484 513 elif quantity is not None: 485 514 self.set_values_from_quantity(quantity, location, indices, verbose) … … 583 612 """Set values for quantity 584 613 585 values: Numeric array614 values: numeric array 586 615 location: Where values are to be stored. 587 616 Permissible options are: vertices, centroid, unique vertices … … 593 622 594 623 In case of location == 'centroid' the dimension values must 595 be a list of a Numerical array of length N, N being the number624 be a list of a numerical array of length N, N being the number 596 625 of elements. 597 626 … … 607 636 """ 608 637 609 values = num.array(values, num. Float)638 values = num.array(values, num.float) 610 639 611 640 if indices is not None: 612 indices = num.array(indices, num. Int)641 indices = num.array(indices, num.int) 613 642 msg = ('Number of values must match number of indices: You ' 614 643 'specified %d values and %d indices' … … 637 666 'Values array must be 1d') 638 667 639 self.set_vertex_values(values.flat , indices=indices,668 self.set_vertex_values(values.flatten(), indices=indices, 640 669 use_cache=use_cache, verbose=verbose) 641 670 else: … … 788 817 from anuga.coordinate_transforms.geo_reference import Geo_reference 789 818 790 points = ensure_numeric(points, num. Float)791 values = ensure_numeric(values, num. Float)819 points = ensure_numeric(points, num.float) 820 values = ensure_numeric(values, num.float) 792 821 793 822 if location != 'vertices': … … 1080 1109 1081 1110 # Ensure that interpolation points is either a list of 1082 # points, Nx2 array, or geospatial and convert to Numeric array1111 # points, Nx2 array, or geospatial and convert to numeric array 1083 1112 if isinstance(interpolation_points, Geospatial_data): 1084 1113 # Ensure interpolation points are in absolute UTM coordinates … … 1119 1148 """Get values for quantity 1120 1149 1121 Extract values for quantity as a Numeric array.1150 Extract values for quantity as a numeric array. 1122 1151 1123 1152 Inputs: … … 1137 1166 1138 1167 In case of location == 'centroids' the dimension of returned 1139 values will be a list or a Numerical array of length N, N being1168 values will be a list or a numerical array of length N, N being 1140 1169 the number of elements. 1141 1170 … … 1173 1202 'edges', 'unique vertices']: 1174 1203 msg = 'Invalid location: %s' % location 1175 raise msg1204 raise Exception, msg 1176 1205 1177 1206 import types 1178 1207 1179 1208 assert type(indices) in [types.ListType, types.NoneType, 1180 num. ArrayType], \1181 'Indices must be a list or None' 1209 num.ndarray], \ 1210 'Indices must be a list or None' #??# 1182 1211 1183 1212 if location == 'centroids': … … 1210 1239 sum += self.vertex_values[triangle_id, vertex_id] 1211 1240 vert_values.append(sum / len(triangles)) 1212 return num.array(vert_values, num. Float)1241 return num.array(vert_values, num.float) 1213 1242 else: 1214 1243 if (indices is None): … … 1236 1265 1237 1266 # Check that A can be converted to array and is of appropriate dim 1238 A = ensure_numeric(A, num. Float)1267 A = ensure_numeric(A, num.float) 1239 1268 assert len(A.shape) == 1 1240 1269 … … 1336 1365 1337 1366 if precision is None: 1338 precision = num. Float1367 precision = num.float 1339 1368 1340 1369 if smooth is True: … … 1342 1371 V = self.domain.get_triangles() 1343 1372 N = self.domain.number_of_full_nodes # Ignore ghost nodes if any 1344 A = num.zeros(N, num. Float)1373 A = num.zeros(N, num.float) 1345 1374 points = self.domain.get_nodes() 1346 1375 … … 1360 1389 if current_node == N: 1361 1390 msg = 'Current node exceeding number of nodes (%d) ' % N 1362 raise msg1391 raise Exception, msg 1363 1392 1364 1393 k += 1 … … 1381 1410 V = self.domain.get_disconnected_triangles() 1382 1411 points = self.domain.get_vertex_coordinates() 1383 A = self.vertex_values.flat .astype(precision)1412 A = self.vertex_values.flatten().astype(precision) 1384 1413 1385 1414 # Return -
branches/numpy/anuga/abstract_2d_finite_volumes/quantity_ext.c
r5897 r6304 11 11 12 12 #include "Python.h" 13 #include " Numeric/arrayobject.h"13 #include "numpy/arrayobject.h" 14 14 #include "math.h" 15 15 -
branches/numpy/anuga/abstract_2d_finite_volumes/region.py
r6145 r6304 8 8 # FIXME (DSG-DSG) add better comments 9 9 10 import Numericas num10 import numpy as num 11 11 12 12 -
branches/numpy/anuga/abstract_2d_finite_volumes/test_domain.py
r6195 r6304 7 7 from anuga.config import epsilon 8 8 9 import Numericas num9 import numpy as num 10 10 11 11 … … 65 65 66 66 67 assert domain.get_conserved_quantities(0, edge=1) == 0.67 assert num.alltrue(domain.get_conserved_quantities(0, edge=1) == 0.) 68 68 69 69 … … 345 345 346 346 347 A = num.array([[1,2,3], [5,5,-5], [0,0,9], [-6,3,3]], num. Float)348 B = num.array([[2,4,4], [3,2,1], [6,-3,4], [4,5,-1]], num. Float)347 A = num.array([[1,2,3], [5,5,-5], [0,0,9], [-6,3,3]], num.float) 348 B = num.array([[2,4,4], [3,2,1], [6,-3,4], [4,5,-1]], num.float) 349 349 350 350 #print A … … 613 613 614 614 sem = num.array([1.,1.,1.,1.])/num.array([1, 2, 3, 4]) 615 denom = num.ones(4, num. Float)-domain.timestep*sem615 denom = num.ones(4, num.float)-domain.timestep*sem 616 616 617 617 # x = array([1, 2, 3, 4]) + array( [.4,.3,.2,.1] ) -
branches/numpy/anuga/abstract_2d_finite_volumes/test_ermapper.py
r6145 r6304 7 7 from os import remove 8 8 9 import Numericas num9 import numpy as num 10 10 11 11 … … 45 45 46 46 # Write test data 47 ermapper_grids.write_ermapper_data(original_grid, filename, num. Float64)47 ermapper_grids.write_ermapper_data(original_grid, filename, num.float64) 48 48 49 49 # Read in the test data 50 new_grid = ermapper_grids.read_ermapper_data(filename, num. Float64)50 new_grid = ermapper_grids.read_ermapper_data(filename, num.float64) 51 51 52 52 # Check that the test data that has been read in matches the original data -
branches/numpy/anuga/abstract_2d_finite_volumes/test_general_mesh.py
r6174 r6304 10 10 from anuga.coordinate_transforms.geo_reference import Geo_reference 11 11 12 import Numericas num12 import numpy as num 13 13 14 14 … … 59 59 60 60 #bac, bce, ecf, dbe, daf, dae 61 triangles = num.array([[1,0,2], [1,2,4], [4,2,5], [3,1,4]], num. Int)61 triangles = num.array([[1,0,2], [1,2,4], [4,2,5], [3,1,4]], num.int) 62 62 63 63 domain = General_mesh(nodes, triangles, … … 277 277 geo_reference = geo) 278 278 node = domain.get_node(2) 279 self.assertEqual(c, node) 279 #self.assertEqual(c, node) 280 self.failUnless(num.alltrue(c == node)) 280 281 281 282 node = domain.get_node(2, absolute=True) 282 self.assertEqual(nodes_absolute[2], node) 283 #self.assertEqual(nodes_absolute[2], node) 284 self.failUnless(num.alltrue(nodes_absolute[2] == node)) 283 285 284 286 node = domain.get_node(2, absolute=True) 285 self.assertEqual(nodes_absolute[2], node) 287 #self.assertEqual(nodes_absolute[2], node) 288 self.failUnless(num.alltrue(nodes_absolute[2] == node)) 286 289 287 290 -
branches/numpy/anuga/abstract_2d_finite_volumes/test_generic_boundary_conditions.py
r6195 r6304 8 8 from anuga.config import epsilon 9 9 10 import Numericas num10 import numpy as num 11 11 12 12 -
branches/numpy/anuga/abstract_2d_finite_volumes/test_ghost.py
r6195 r6304 6 6 from anuga.abstract_2d_finite_volumes.domain import * 7 7 from anuga.config import epsilon 8 9 import numpy as num 8 10 9 11 … … 40 42 41 43 42 assert domain.get_conserved_quantities(0, edge=1) == 0.44 assert num.alltrue(domain.get_conserved_quantities(0, edge=1) == 0.) 43 45 44 46 -
branches/numpy/anuga/abstract_2d_finite_volumes/test_neighbour_mesh.py
r6174 r6304 18 18 from anuga.utilities.numerical_tools import ensure_numeric 19 19 20 import Numericas num20 import numpy as num 21 21 22 22 … … 988 988 [ 75735.4765625 , 23762.00585938], 989 989 [ 52341.70703125, 38563.39453125]] 990 991 ##points = ensure_numeric(points, Int)/1000 # Simplify for ease of interpretation992 990 993 991 triangles = [[19, 0,15], … … 1092 1090 [ 35406.3359375 , 79332.9140625 ]] 1093 1091 1094 scaled_points = ensure_numeric(points, num. Int)/1000 # Simplify for ease of interpretation1092 scaled_points = ensure_numeric(points, num.int)/1000 # Simplify for ease of interpretation 1095 1093 1096 1094 triangles = [[ 0, 1, 2], -
branches/numpy/anuga/abstract_2d_finite_volumes/test_pmesh2domain.py
r6191 r6304 14 14 from anuga.pmesh.mesh import importMeshFromFile 15 15 16 import Numericas num16 import numpy as num 17 17 18 18 -
branches/numpy/anuga/abstract_2d_finite_volumes/test_quantity.py
r6195 r6304 15 15 from anuga.utilities.polygon import * 16 16 17 import Numericas num17 import numpy as num 18 18 19 19 … … 1765 1765 quantity = Quantity(self.mesh4) 1766 1766 1767 quantity.vertex_values = num.array([[1,0,2], [1,2,4], [4,2,5], [3,1,4]], num. Float)1767 quantity.vertex_values = num.array([[1,0,2], [1,2,4], [4,2,5], [3,1,4]], num.float) 1768 1768 1769 1769 quantity.interpolate_from_vertices_to_edges() … … 1781 1781 [3., 2.5, 1.5], 1782 1782 [3.5, 4.5, 3.], 1783 [2.5, 3.5, 2]], num. Float)1783 [2.5, 3.5, 2]], num.float) 1784 1784 1785 1785 quantity.interpolate_from_edges_to_vertices() … … 1835 1835 1836 1836 sem = num.array([1.,1.,1.,1.])/num.array([1, 2, 3, 4]) 1837 denom = num.ones(4, num. Float)-timestep*sem1837 denom = num.ones(4, num.float)-timestep*sem 1838 1838 1839 1839 x = num.array([1, 2, 3, 4])/denom … … 1859 1859 1860 1860 sem = num.array([1.,1.,1.,1.])/num.array([1, 2, 3, 4]) 1861 denom = num.ones(4, num. Float)-timestep*sem1861 denom = num.ones(4, num.float)-timestep*sem 1862 1862 1863 1863 x = num.array([1., 2., 3., 4.]) … … 1901 1901 1902 1902 bed = domain.quantities['elevation'].vertex_values 1903 stage = num.zeros(bed.shape, num. Float)1903 stage = num.zeros(bed.shape, num.float) 1904 1904 1905 1905 h = 0.03 … … 1985 1985 1986 1986 bed = domain.quantities['elevation'].vertex_values 1987 stage = num.zeros(bed.shape, num. Float)1987 stage = num.zeros(bed.shape, num.float) 1988 1988 1989 1989 h = 0.03 … … 1999 1999 stage = domain.quantities['stage'] 2000 2000 A, V = stage.get_vertex_values(xy=False, smooth=False) 2001 Q = stage.vertex_values.flat 2001 Q = stage.vertex_values.flatten() 2002 2002 2003 2003 for k in range(8): … … 2511 2511 suite = unittest.makeSuite(Test_Quantity, 'test') 2512 2512 #suite = unittest.makeSuite(Test_Quantity, 'test_set_values_from_file_using_polygon') 2513 2514 #suite = unittest.makeSuite(Test_Quantity, 'test_set_vertex_values_using_general_interface_subset_and_geo')2515 #print "restricted test"2516 #suite = unittest.makeSuite(Test_Quantity,'verbose_test_set_values_from_UTM_pts')2517 2513 runner = unittest.TextTestRunner() 2518 2514 runner.run(suite) -
branches/numpy/anuga/abstract_2d_finite_volumes/test_region.py
r6145 r6304 8 8 #from anuga.config import epsilon 9 9 10 import Numericas num10 import numpy as num 11 11 12 12 … … 42 42 43 43 def test_region_tags(self): 44 """ 45 get values based on triangle lists. 46 """ 47 from mesh_factory import rectangular 48 from shallow_water import Domain 49 50 #Create basic mesh 51 points, vertices, boundary = rectangular(1, 3) 52 53 #Create shallow water domain 54 domain = Domain(points, vertices, boundary) 55 domain.build_tagged_elements_dictionary({'bottom':[0,1], 56 'top':[4,5], 57 'all':[0,1,2,3,4,5]}) 58 44 """get values based on triangle lists.""" 45 46 from mesh_factory import rectangular 47 from shallow_water import Domain 48 49 #Create basic mesh 50 points, vertices, boundary = rectangular(1, 3) 51 52 #Create shallow water domain 53 domain = Domain(points, vertices, boundary) 54 domain.build_tagged_elements_dictionary({'bottom': [0,1], 55 'top': [4,5], 56 'all': [0,1,2,3,4,5]}) 59 57 60 58 #Set friction … … 65 63 b = Set_region('top', 'friction', 1.0) 66 64 domain.set_region([a, b]) 67 #print domain.quantities['friction'].get_values() 68 assert num.allclose(domain.quantities['friction'].get_values(),\ 69 [[ 0.09, 0.09, 0.09], 70 [ 0.09, 0.09, 0.09], 71 [ 0.07, 0.07, 0.07], 72 [ 0.07, 0.07, 0.07], 73 [ 1.0, 1.0, 1.0], 74 [ 1.0, 1.0, 1.0]]) 65 66 expected = [[ 0.09, 0.09, 0.09], 67 [ 0.09, 0.09, 0.09], 68 [ 0.07, 0.07, 0.07], 69 [ 0.07, 0.07, 0.07], 70 [ 1.0, 1.0, 1.0], 71 [ 1.0, 1.0, 1.0]] 72 msg = ("\ndomain.quantities['friction']=%s\nexpected value=%s" 73 % (str(domain.quantities['friction'].get_values()), 74 str(expected))) 75 assert num.allclose(domain.quantities['friction'].get_values(), 76 expected), msg 75 77 76 78 #c = Add_Value_To_region('all', 'friction', 10.0) … … 126 128 127 129 def test_unique_vertices(self): 128 """ 129 get values based on triangle lists. 130 """ 130 """get values based on triangle lists.""" 131 131 132 from mesh_factory import rectangular 132 133 from shallow_water import Domain … … 147 148 a = Set_region('bottom', 'friction', 0.09, location = 'unique vertices') 148 149 domain.set_region(a) 149 #print domain.quantities['friction'].get_values() 150 assert num.allclose(domain.quantities['friction'].get_values(),\ 150 assert num.allclose(domain.quantities['friction'].get_values(), 151 151 [[ 0.09, 0.09, 0.09], 152 152 [ 0.09, 0.09, 0.09], … … 217 217 #print domain.quantities['friction'].get_values() 218 218 frict_points = domain.quantities['friction'].get_values() 219 assert num.allclose(frict_points[0],\ 220 [ calc_frict, calc_frict, calc_frict]) 221 assert num.allclose(frict_points[1],\ 222 [ calc_frict, calc_frict, calc_frict]) 219 expected = [calc_frict, calc_frict, calc_frict] 220 msg = ('frict_points[0]=%s\nexpected=%s' % (str(frict_points[0]), 221 str(expected))) 222 assert num.allclose(frict_points[0], expected), msg 223 msg = ('frict_points[1]=%s\nexpected=%s' % (str(frict_points[1]), 224 str(expected))) 225 assert num.allclose(frict_points[1], expected), msg 223 226 224 227 def test_unique_vertices_average_loc_unique_vert(self): … … 262 265 #------------------------------------------------------------- 263 266 if __name__ == "__main__": 264 suite = unittest.makeSuite(Test_Region, 'test')267 suite = unittest.makeSuite(Test_Region, 'test') 265 268 runner = unittest.TextTestRunner() 266 269 runner.run(suite) -
branches/numpy/anuga/abstract_2d_finite_volumes/test_util.py
r6175 r6304 22 22 import string 23 23 24 import Numericas num24 import numpy as num 25 25 26 26 … … 630 630 q1 = F(t+60, point_id=id) 631 631 632 if q0 == NAN:632 if num.alltrue(q0 == NAN): 633 633 actual = q0 634 634 else: … … 641 641 #print "actual", actual 642 642 #print 643 if q0 == NAN:644 self.failUnless( q == actual, 'Fail!')643 if num.alltrue(q0 == NAN): 644 self.failUnless(num.alltrue(q == actual), 'Fail!') 645 645 else: 646 646 assert num.allclose(q, actual) … … 1181 1181 #FIXME: Division is not expected to work for integers. 1182 1182 #This must be caught. 1183 foo = num.array([[1,2,3], [4,5,6]], num. Float)1184 1185 bar = num.array([[-1,0,5], [6,1,1]], num. Float)1183 foo = num.array([[1,2,3], [4,5,6]], num.float) 1184 1185 bar = num.array([[-1,0,5], [6,1,1]], num.float) 1186 1186 1187 1187 D = {'X': foo, 'Y': bar} … … 1201 1201 1202 1202 # make an error for zero on zero 1203 # this is really an error in Numeric, SciPy core can handle it1203 # this is really an error in numeric, SciPy core can handle it 1204 1204 # Z = apply_expression_to_dictionary('0/Y', D) 1205 1205 -
branches/numpy/anuga/abstract_2d_finite_volumes/util.py
r6194 r6304 27 27 from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a 28 28 29 import Numericas num29 import numpy as num 30 30 31 31 … … 554 554 expression e.g. by overloading 555 555 556 Due to a limitation with Numeric, this can not evaluate 0/0556 Due to a limitation with numeric, this can not evaluate 0/0 557 557 In general, the user can fix by adding 1e-30 to the numerator. 558 558 SciPy core can handle this situation. … … 789 789 - easting, northing, name , elevation? 790 790 - OR (this is not yet done) 791 - structure which can be converted to a Numeric array,791 - structure which can be converted to a numeric array, 792 792 such as a geospatial data object 793 793 … … 1255 1255 n0 = int(n0) 1256 1256 m = len(locations) 1257 model_time = num.zeros((n0, m, p), num. Float)1258 stages = num.zeros((n0, m, p), num. Float)1259 elevations = num.zeros((n0, m, p), num. Float)1260 momenta = num.zeros((n0, m, p), num. Float)1261 xmom = num.zeros((n0, m, p), num. Float)1262 ymom = num.zeros((n0, m, p), num. Float)1263 speed = num.zeros((n0, m, p), num. Float)1264 bearings = num.zeros((n0, m, p), num. Float)1265 due_east = 90.0*num.ones((n0, 1), num. Float)1266 due_west = 270.0*num.ones((n0, 1), num. Float)1267 depths = num.zeros((n0, m, p), num. Float)1268 eastings = num.zeros((n0, m, p), num. Float)1257 model_time = num.zeros((n0, m, p), num.float) 1258 stages = num.zeros((n0, m, p), num.float) 1259 elevations = num.zeros((n0, m, p), num.float) 1260 momenta = num.zeros((n0, m, p), num.float) 1261 xmom = num.zeros((n0, m, p), num.float) 1262 ymom = num.zeros((n0, m, p), num.float) 1263 speed = num.zeros((n0, m, p), num.float) 1264 bearings = num.zeros((n0, m, p), num.float) 1265 due_east = 90.0*num.ones((n0, 1), num.float) 1266 due_west = 270.0*num.ones((n0, 1), num.float) 1267 depths = num.zeros((n0, m, p), num.float) 1268 eastings = num.zeros((n0, m, p), num.float) 1269 1269 min_stages = [] 1270 1270 max_stages = [] … … 1278 1278 min_speeds = [] 1279 1279 max_depths = [] 1280 model_time_plot3d = num.zeros((n0, m), num. Float)1281 stages_plot3d = num.zeros((n0, m), num. Float)1282 eastings_plot3d = num.zeros((n0, m),num. Float)1280 model_time_plot3d = num.zeros((n0, m), num.float) 1281 stages_plot3d = num.zeros((n0, m), num.float) 1282 eastings_plot3d = num.zeros((n0, m),num.float) 1283 1283 if time_unit is 'mins': scale = 60.0 1284 1284 if time_unit is 'hours': scale = 3600.0 … … 1832 1832 """ 1833 1833 1834 xc = num.zeros(triangles.shape[0], num. Float) # Space for centroid info1834 xc = num.zeros(triangles.shape[0], num.float) # Space for centroid info 1835 1835 1836 1836 for k in range(triangles.shape[0]): … … 2119 2119 #add tide to stage if provided 2120 2120 if quantity == 'stage': 2121 quantity_value[quantity] = num.array(quantity_value[quantity], num. Float) \2121 quantity_value[quantity] = num.array(quantity_value[quantity], num.float) \ 2122 2122 + directory_add_tide 2123 2123 … … 2469 2469 2470 2470 #convert to array for file_function 2471 points_array = num.array(points,num. Float)2471 points_array = num.array(points,num.float) 2472 2472 2473 2473 points_array = ensure_absolute(points_array) -
branches/numpy/anuga/advection/advection.py
r6146 r6304 33 33 from anuga.abstract_2d_finite_volumes.domain import * 34 34 35 import Numericas num35 import numpy as num 36 36 37 37 … … 252 252 stage_bdry = Stage.boundary_values 253 253 254 flux = num.zeros(1, num. Float) #Work array for summing up fluxes254 flux = num.zeros(1, num.float) #Work array for summing up fluxes 255 255 256 256 #Loop -
branches/numpy/anuga/advection/advection_ext.c
r5162 r6304 10 10 11 11 #include "Python.h" 12 #include " Numeric/arrayobject.h"12 #include "numpy/arrayobject.h" 13 13 #include "math.h" 14 14 #include "stdio.h" -
branches/numpy/anuga/advection/test_advection.py
r6146 r6304 8 8 from anuga.advection.advection import Domain, Transmissive_boundary, Dirichlet_boundary 9 9 10 import Numericas num10 import numpy as num 11 11 12 12 … … 142 142 143 143 X = domain.quantities['stage'].explicit_update 144 # print 'X=%s' % str(X) 144 145 assert X[0] == -X[1] 145 146 -
branches/numpy/anuga/alpha_shape/alpha_shape.py
r6174 r6304 25 25 from anuga.geospatial_data.geospatial_data import Geospatial_data 26 26 27 import Numericas num27 import numpy as num 28 28 29 29 … … 88 88 raise PointError, "Three points on a straight line" 89 89 90 #Convert input to Numeric arrays91 self.points = num.array(points, num. Float)90 #Convert input to numeric arrays 91 self.points = num.array(points, num.float) 92 92 93 93 … … 288 288 zeroind = [k for k in range(len(denom)) if \ 289 289 (denom[k]< EPSILON and denom[k] > -EPSILON)] 290 try: 291 dx = num.divide_safe(y31*dist21 - y21*dist31,denom) 292 dy = num.divide_safe(x21*dist31 - x31*dist21,denom) 293 except ZeroDivisionError: 294 raise AlphaError 295 290 291 if num.any(denom == 0.0): 292 raise AlphaError 293 294 dx = num.divide(y31*dist21 - y21*dist31, denom) 295 dy = num.divide(x21*dist31 - x31*dist21, denom) 296 296 297 self.triradius = 0.5*num.sqrt(dx*dx + dy*dy) 297 298 #print "triangle radii", self.triradius -
branches/numpy/anuga/alpha_shape/test_alpha_shape.py
r6147 r6304 5 5 import unittest 6 6 7 import Numericas num7 import numpy as num 8 8 9 9 try: -
branches/numpy/anuga/caching/caching.py
r6232 r6304 51 51 unix = True 52 52 53 import Numericas num53 import numpy as num 54 54 55 55 … … 1390 1390 I.sort() 1391 1391 val = myhash(I, ids) 1392 elif type(T) == num.ArrayType:1392 elif isinstance(T, num.ndarray): 1393 1393 T = num.array(T) # Ensure array is contiguous 1394 1394 … … 1461 1461 identical = compare(a, b, ids) 1462 1462 1463 elif type(A) == num.ArrayType:1463 elif isinstance(A, num.ndarray): 1464 1464 # Use element by element comparison 1465 1465 identical = num.alltrue(A==B) … … 2435 2435 argstr = argstr + "'"+str(args)+"'" 2436 2436 else: 2437 # Truncate large Numeric arrays before using str()2438 if type(args) == num.ArrayType:2437 # Truncate large numeric arrays before using str() 2438 if isinstance(args, num.ndarray): 2439 2439 # if len(args.flat) > textwidth: 2440 2440 # Changed by Duncan and Nick 21/2/07 .flat has problems with -
branches/numpy/anuga/caching/test_caching.py
r6232 r6304 7 7 from anuga.caching.dummy_classes_for_testing import Dummy, Dummy_memorytest 8 8 9 import Numericas num9 import numpy as num 10 10 11 11 … … 26 26 27 27 def f_numeric(A, B): 28 """Operation on Numeric arrays28 """Operation on numeric arrays 29 29 """ 30 30 … … 123 123 """test_caching_of_numeric_arrays 124 124 125 Test that Numeric arrays can be recognised by caching even if their id's are different125 Test that numeric arrays can be recognised by caching even if their id's are different 126 126 """ 127 127 … … 160 160 161 161 162 assert T1 == T2, 'Cached result does not match computed result'163 assert T2 == T3, 'Cached result does not match computed result'162 assert num.alltrue(T1 == T2), 'Cached result does not match computed result' 163 assert num.alltrue(T2 == T3), 'Cached result does not match computed result' 164 164 165 165 166 166 def test_hash_collision(self): 167 """test_hash_collision(self): 168 169 Test that hash collisons are dealt with correctly 170 """ 167 """Test that hash collisons are dealt with correctly""" 171 168 172 169 verbose = False … … 202 199 T2 = cache(f_numeric, (A1, A1), 203 200 compression=comp, verbose=verbose) 204 205 206 #print T1 207 #print T2 208 assert T2 != T1 209 210 211 212 213 201 202 T1_ref = f_numeric(A0, A0) 203 T2_ref = f_numeric(A1, A1) 204 205 assert num.alltrue(T1 == T1_ref) 206 assert num.alltrue(T2 == T2_ref) 207 208 214 209 def test_caching_of_dictionaries(self): 215 210 """test_caching_of_dictionaries … … 412 407 413 408 414 x = num.arange(10).astype(num. Float)409 x = num.arange(10).astype(num.float) 415 410 416 411 ref1 = f1(x) -
branches/numpy/anuga/config.py
r6108 r6304 4 4 import os 5 5 import sys 6 7 ################################################################################ 8 # Numerical constants 6 import numpy as num 7 8 9 ################################################################################ 10 # numerical constants 9 11 ################################################################################ 10 12 … … 160 162 # Too large (100) creates 'flopping' water 161 163 # Too small (0) creates 'creep' 162 164 163 165 maximum_froude_number = 100.0 # To be used in limiters. 164 166 … … 181 183 182 184 ################################################################################ 185 # NetCDF-specific type constants. Used when defining NetCDF file variables. 186 ################################################################################ 187 188 netcdf_char = 'c' 189 netcdf_byte = 'b' 190 netcdf_int = 'i' 191 netcdf_float = 'd' 192 netcdf_float64 = 'd' 193 netcdf_float32 = 'f' 194 195 ################################################################################ 183 196 # Dynamically-defined constants. 184 197 ################################################################################ … … 191 204 # Code to set the write mode depending on 192 205 # whether Scientific.IO supports large NetCDF files 193 s = """from Scientific.IO.NetCDF import NetCDFFile; fid = NetCDFFile('tmpfilenamexx', 'wl')""" 206 s = """ 207 from Scientific.IO.NetCDF import NetCDFFile 208 fid = NetCDFFile('tmpfilenamexx', 'wl') 209 """ 194 210 195 211 # Need to run in a separate process due an -
branches/numpy/anuga/coordinate_transforms/geo_reference.py
r6149 r6304 11 11 from anuga.utilities.anuga_exceptions import ANUGAError, TitleError, ParsingError, \ 12 12 ShapeError 13 14 import Numeric as num 13 from anuga.config import netcdf_float, netcdf_int, netcdf_float32 14 15 import numpy as num 15 16 16 17 … … 64 65 self.xllcorner = xllcorner 65 66 self.yllcorner = yllcorner 67 #self.is_absolute = num.allclose([self.xllcorner, self.yllcorner], 0) 66 68 67 69 if NetCDFObject is not None: … … 99 101 100 102 # Fix some assertion failures 101 if type(self.zone) == num.ArrayTypeand self.zone.shape == ():103 if isinstance(self.zone, num.ndarray) and self.zone.shape == (): 102 104 self.zone = self.zone[0] 103 if type(self.xllcorner) == num.ArrayTypeand self.xllcorner.shape == ():105 if isinstance(self.xllcorner, num.ndarray) and self.xllcorner.shape == (): 104 106 self.xllcorner = self.xllcorner[0] 105 if type(self.yllcorner) == num.ArrayTypeand self.yllcorner.shape == ():107 if isinstance(self.yllcorner, num.ndarray) and self.yllcorner.shape == (): 106 108 self.yllcorner = self.yllcorner[0] 107 109 108 assert ( type(self.xllcorner) == types.FloatType or\109 type(self.xllcorner) == types.IntType)110 assert ( type(self.yllcorner) == types.FloatType or\111 type(self.yllcorner) == types.IntType)112 assert ( type(self.zone) == types.IntType)110 assert (self.xllcorner.dtype.kind in num.typecodes['Float'] or 111 self.xllcorner.dtype.kind in num.typecodes['Integer']) 112 assert (self.yllcorner.dtype.kind in num.typecodes['Float'] or 113 self.yllcorner.dtype.kind in num.typecodes['Integer']) 114 assert (self.zone.dtype.kind in num.typecodes['Integer']) 113 115 114 116 try: … … 173 175 174 176 # Fix some assertion failures 175 if (type(self.zone) == num.ArrayType and self.zone.shape == ()):177 if isinstance(self.zone, num.ndarray) and self.zone.shape == (): 176 178 self.zone = self.zone[0] 177 if type(self.xllcorner) == num.ArrayTypeand self.xllcorner.shape == ():179 if isinstance(self.xllcorner, num.ndarray) and self.xllcorner.shape == (): 178 180 self.xllcorner = self.xllcorner[0] 179 if type(self.yllcorner) == num.ArrayTypeand self.yllcorner.shape == ():181 if isinstance(self.yllcorner, num.ndarray) and self.yllcorner.shape == (): 180 182 self.yllcorner = self.yllcorner[0] 181 182 assert (type(self.xllcorner) == types.FloatType) 183 assert (type(self.yllcorner) == types.FloatType) 184 assert (type(self.zone) == types.IntType) 185 183 184 # useless asserts - see try/except code above 185 # assert (type(self.xllcorner) == types.FloatType) 186 # assert (type(self.yllcorner) == types.FloatType) 187 # assert (type(self.zone) == types.IntType) 188 186 189 187 190 def change_points_geo_ref(self, points, 188 191 points_geo_ref=None): 189 192 """ 190 Change the geo reference of a list or Numeric array of points to193 Change the geo reference of a list or numeric array of points to 191 194 be this reference.(The reference used for this object) 192 195 If the points do not have a geo ref, assume 'absolute' values … … 197 200 is_list = True 198 201 199 points = ensure_numeric(points, num. Float)202 points = ensure_numeric(points, num.float) 200 203 201 204 if len(points.shape) == 1: … … 243 246 244 247 245 248 ## 249 # @brief 250 # @param points 251 # @return 252 # @note 246 253 def get_absolute(self, points): 247 """ 248 Given a set of points geo referenced to this instance, 254 """Given a set of points geo referenced to this instance, 249 255 return the points as absolute values. 250 256 """ 251 257 252 #if self.is_absolute ():258 #if self.is_absolute: 253 259 # return points 260 254 261 is_list = False 255 262 if type(points) == types.ListType: 256 263 is_list = True 257 264 258 points = ensure_numeric(points, num. Float)265 points = ensure_numeric(points, num.float) 259 266 if len(points.shape) == 1: 260 267 #One point has been passed … … 264 271 #points = reshape(points, (1,2)) 265 272 266 267 273 msg = 'Input must be an N x 2 array or list of (x,y) values. ' 268 274 msg += 'I got an %d x %d array' %points.shape 269 275 if not points.shape[1] == 2: 270 276 raise ShapeError, msg 271 272 277 273 278 # Add geo ref to points 279 #if not self.is_absolute: 274 280 if not self.is_absolute(): 275 281 points[:,0] += self.xllcorner 276 282 points[:,1] += self.yllcorner 277 283 #self.is_absolute = True 278 284 279 285 if is_list: … … 296 302 is_list = True 297 303 298 points = ensure_numeric(points, num. Float)304 points = ensure_numeric(points, num.float) 299 305 if len(points.shape) == 1: 300 306 #One point has been passed -
branches/numpy/anuga/coordinate_transforms/test_geo_reference.py
r6149 r6304 9 9 from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a 10 10 11 import Numericas num11 import numpy as num 12 12 13 13 … … 171 171 #print "4 new_lofl",new_lofl 172 172 173 self.failUnless( type(new_lofl) == num.ArrayType, ' failed')173 self.failUnless(isinstance(new_lofl, num.ndarray), ' failed') 174 174 self.failUnless(type(new_lofl) == type(lofl), ' failed') 175 175 lofl[:,0] -= x … … 189 189 #print "5 new_lofl",new_lofl 190 190 191 self.failUnless( type(new_lofl) == num.ArrayType, ' failed')191 self.failUnless(isinstance(new_lofl, num.ndarray), ' failed') 192 192 self.failUnless(type(new_lofl) == type(lofl), ' failed') 193 193 … … 206 206 #print "new_lofl",new_lofl 207 207 208 self.failUnless( type(new_lofl) == num.ArrayType, ' failed')208 self.failUnless(isinstance(new_lofl, num.ndarray), ' failed') 209 209 self.failUnless(type(new_lofl) == type(lofl), ' failed') 210 210 for point,new_point in map(None,[lofl],new_lofl): … … 229 229 self.failUnless(point[0]+point_x-x==new_point[0], ' failed') 230 230 self.failUnless(point[1]+point_y-y==new_point[1], ' failed') 231 232 233 def test_get_absolute(self):231 232 def test_get_absolute_list(self): 233 # test with supplied offsets 234 234 x = 7.0 235 235 y = 3.0 236 236 237 g = Geo_reference(56,x,y) 238 lofl = [[3.0,34.0], [64.0,6.0]] 239 new_lofl = g.get_absolute(lofl) 240 #print "lofl",lofl 241 #print "new_lofl",new_lofl 242 243 self.failUnless(type(new_lofl) == types.ListType, ' failed') 244 self.failUnless(type(new_lofl) == type(lofl), ' failed') 245 for point,new_point in map(None,lofl,new_lofl): 246 self.failUnless(point[0]+x==new_point[0], ' failed') 247 self.failUnless(point[1]+y==new_point[1], ' failed') 248 237 g = Geo_reference(56, x, y) 238 points = [[3.0,34.0], [64.0,6.0]] 239 new_points = g.get_absolute(points) 240 241 self.failUnless(type(new_points) == types.ListType, 'failed') 242 self.failUnless(type(new_points) == type(points), 'failed') 243 for point, new_point in map(None, points, new_points): 244 self.failUnless(point[0]+x == new_point[0], 'failed') 245 self.failUnless(point[1]+y == new_point[1], 'failed') 246 247 # test with no supplied offsets 248 g = Geo_reference() 249 points = [[3.0,34.0], [64.0,6.0]] 250 new_points = g.get_absolute(points) 251 252 self.failUnless(type(new_points) == types.ListType, 'failed') 253 self.failUnless(type(new_points) == type(points), 'failed') 254 for point, new_point in map(None, points, new_points): 255 self.failUnless(point[0] == new_point[0], 'failed') 256 self.failUnless(point[1] == new_point[1], 'failed') 249 257 258 # test that calling get_absolute twice does the right thing 259 # first call 260 dx = 10.0 261 dy = 10.0 262 g = Geo_reference(56, dx, dy) 263 points = [[3.0,34.0], [64.0,6.0]] 264 expected_new_points = [[3.0+dx,34.0+dy], [64.0+dx,6.0+dy]] 265 new_points = g.get_absolute(points) 266 267 self.failUnless(type(new_points) == types.ListType, 'failed') 268 self.failUnless(type(new_points) == type(points), 'failed') 269 for point, new_point in map(None, expected_new_points, new_points): 270 self.failUnless(point[0] == new_point[0], 'failed') 271 self.failUnless(point[1] == new_point[1], 'failed') 272 273 # and repeat from 'new_points = g.get_absolute(points)' above 274 # to see if second call with same input gives same results. 275 new_points = g.get_absolute(points) 276 277 self.failUnless(type(new_points) == types.ListType, 'failed') 278 self.failUnless(type(new_points) == type(points), 'failed') 279 for point, new_point in map(None, expected_new_points, new_points): 280 self.failUnless(point[0] == new_point[0], 'failed') 281 self.failUnless(point[1] == new_point[1], 'failed') 282 283 def test_get_absolute_array(self): 284 '''Same test as test_get_absolute_list(), but with numeric arrays.''' 285 286 # test with supplied offsets 287 x = 7.0 288 y = 3.0 289 290 g = Geo_reference(56, x, y) 291 points = num.array([[3.0,34.0], [64.0,6.0]]) 292 new_points = g.get_absolute(points) 293 294 self.failUnless(isinstance(new_points, num.ndarray), 'failed') 295 self.failUnless(type(new_points) == type(points), 'failed') 296 self.failUnless(num.alltrue(points == new_points), 'failed') 297 298 # test with no supplied offsets 250 299 g = Geo_reference() 251 lofl = [[3.0,34.0], [64.0,6.0]] 252 new_lofl = g.get_absolute(lofl) 253 #print "lofl",lofl 254 #print "new_lofl",new_lofl 255 256 self.failUnless(type(new_lofl) == types.ListType, ' failed') 257 self.failUnless(type(new_lofl) == type(lofl), ' failed') 258 for point,new_point in map(None,lofl,new_lofl): 259 self.failUnless(point[0]==new_point[0], ' failed') 260 self.failUnless(point[1]==new_point[1], ' failed') 261 300 points = num.array([[3.0,34.0], [64.0,6.0]]) 301 new_points = g.get_absolute(points) 302 303 self.failUnless(isinstance(new_points, num.ndarray), 'failed') 304 self.failUnless(type(new_points) == type(points), 'failed') 305 self.failUnless(num.alltrue(points == new_points), 'failed') 306 307 # test that calling get_absolute twice does the right thing 308 # first call 309 dx = 10.0 310 dy = 10.0 311 g = Geo_reference(56, dx, dy) 312 points = num.array([[3.0,34.0], [64.0,6.0]]) 313 expected_new_points = num.array([[3.0+dx,34.0+dy], [64.0+dx,6.0+dy]]) 314 new_points = g.get_absolute(points) 315 316 self.failUnless(isinstance(new_points, num.ndarray), 'failed') 317 self.failUnless(type(new_points) == type(points), 'failed') 318 msg = ('First call of .get_absolute() returned %s\nexpected %s' 319 % (str(new_points), str(expected_new_points))) 320 self.failUnless(num.alltrue(expected_new_points == new_points), msg) 321 322 # and repeat from 'new_points = g.get_absolute(points)' above 323 # to see if second call with same input gives same results. 324 new_points = g.get_absolute(points) 325 326 self.failUnless(isinstance(new_points, num.ndarray), 'failed') 327 self.failUnless(type(new_points) == type(points), 'failed') 328 msg = ('Second call of .get_absolute() returned %s\nexpected %s' 329 % (str(new_points), str(expected_new_points))) 330 self.failUnless(num.alltrue(expected_new_points == new_points), msg) 331 332 # and repeat again to see if *third* call with same input 333 # gives same results. 334 new_points = g.get_absolute(points) 335 336 self.failUnless(isinstance(new_points, num.ndarray), 'failed') 337 self.failUnless(type(new_points) == type(points), 'failed') 338 msg = ('Second call of .get_absolute() returned %s\nexpected %s' 339 % (str(new_points), str(expected_new_points))) 340 self.failUnless(num.alltrue(expected_new_points == new_points), msg) 341 262 342 def test_is_absolute(self): 263 343 … … 427 507 if __name__ == "__main__": 428 508 429 suite = unittest.makeSuite(geo_referenceTestCase,'test') 509 #suite = unittest.makeSuite(geo_referenceTestCase,'test') 510 suite = unittest.makeSuite(geo_referenceTestCase,'test_get_absolute') 430 511 runner = unittest.TextTestRunner() #verbosity=2) 431 512 runner.run(suite) -
branches/numpy/anuga/coordinate_transforms/test_lat_long_UTM_conversion.py
r6149 r6304 12 12 from anuga.utilities.anuga_exceptions import ANUGAError 13 13 14 import Numericas num14 import numpy as num 15 15 16 16 -
branches/numpy/anuga/coordinate_transforms/test_redfearn.py
r6149 r6304 11 11 from anuga.utilities.anuga_exceptions import ANUGAError 12 12 13 import Numericas num13 import numpy as num 14 14 15 15 -
branches/numpy/anuga/culvert_flows/Test_Culvert_Flat_Water_Lev.py
r6150 r6304 26 26 from math import pi,pow,sqrt 27 27 28 import Numericas num28 import numpy as num 29 29 30 30 -
branches/numpy/anuga/culvert_flows/culvert_class.py
r6150 r6304 14 14 from anuga.config import minimum_allowed_height, velocity_protection 15 15 16 import Numericas num16 import numpy as num 17 17 18 18 -
branches/numpy/anuga/culvert_flows/culvert_polygons.py
r6150 r6304 6 6 from anuga.utilities.polygon import inside_polygon, polygon_area 7 7 8 import Numericas num8 import numpy as num 9 9 10 10 -
branches/numpy/anuga/culvert_flows/test_culvert_class.py
r6150 r6304 21 21 from math import pi,pow,sqrt 22 22 23 import Numericas num23 import numpy as num 24 24 25 25 … … 470 470 ref_volume = domain.get_quantity('stage').get_integral() 471 471 for t in domain.evolve(yieldstep = 1, finaltime = 25): 472 472 473 #print domain.timestepping_statistics() 473 474 new_volume = domain.get_quantity('stage').get_integral() 474 475 475 476 msg = 'Total volume has changed' 476 477 assert num.allclose(new_volume, ref_volume), msg … … 564 565 #------------------------------------------------------------- 565 566 if __name__ == "__main__": 566 #suite = unittest.makeSuite(Test_Culvert, 'test_that_culvert_rating_limits_flow_in_shallow_inlet_condition')567 567 suite = unittest.makeSuite(Test_Culvert, 'test') 568 runner = unittest.TextTestRunner() 568 runner = unittest.TextTestRunner() #verbosity=2) 569 569 runner.run(suite) 570 570 -
branches/numpy/anuga/damage_modelling/inundation_damage.py
r6151 r6304 10 10 from types import StringType 11 11 12 import Numericas num12 import numpy as num 13 13 14 14 … … 17 17 except ImportError: 18 18 # Hand-built mockup of the things we need from the kinds package, since it 19 # was recently removed from the standard Numeric distro. Some users may19 # was recently removed from the standard numeric distro. Some users may 20 20 # not have it by default. 21 21 class _bunch: … … 314 314 315 315 # the data being created 316 struct_damage = num.zeros(self.structure_count, num. Float)317 contents_damage = num.zeros(self.structure_count, num. Float)316 struct_damage = num.zeros(self.structure_count, num.float) 317 contents_damage = num.zeros(self.structure_count, num.float) 318 318 self.struct_inundated = ['']* self.structure_count 319 319 -
branches/numpy/anuga/damage_modelling/test_inundation_damage.py
r6151 r6304 17 17 from anuga.shallow_water.data_manager import get_dataobject 18 18 19 import Numericas num19 import numpy as num 20 20 21 21 … … 83 83 #Initial condition - with jumps 84 84 bed = domain.quantities['elevation'].vertex_values 85 stage = num.zeros(bed.shape, num. Float)85 stage = num.zeros(bed.shape, num.float) 86 86 87 87 h = 0.3 … … 153 153 #Initial condition - with jumps 154 154 bed = domain.quantities['elevation'].vertex_values 155 stage = num.zeros(bed.shape, num. Float)155 stage = num.zeros(bed.shape, num.float) 156 156 157 157 h = 30. … … 476 476 edm = EventDamageModel([0.0]*17, [0.0]*17, [0.0]*17, 477 477 [0.0]*17, [0.0]*17) 478 edm.struct_damage = num.zeros(17,num. Float)479 edm.contents_damage = num.zeros(17,num. Float)478 edm.struct_damage = num.zeros(17,num.float) 479 edm.contents_damage = num.zeros(17,num.float) 480 480 collapse_probability = {0.4:[0], #0 481 481 0.6:[1], #1 … … 597 597 pass 598 598 suite = unittest.makeSuite(Test_inundation_damage,'test') 599 #suite = unittest.makeSuite(Test_inundation_damage,'test_inundation_damage_list_as_input')600 599 runner = unittest.TextTestRunner() 601 600 runner.run(suite) -
branches/numpy/anuga/fit_interpolate/fit.py
r6244 r6304 45 45 class VertsWithNoTrianglesError(exceptions.Exception): pass 46 46 47 import Numericas num47 import numpy as num 48 48 49 49 … … 66 66 67 67 vertex_coordinates: List of coordinate pairs [xi, eta] of 68 points constituting a mesh (or an m x 2 Numeric array or68 points constituting a mesh (or an m x 2 numeric array or 69 69 a geospatial object) 70 70 Points may appear multiple times 71 71 (e.g. if vertices have discontinuities) 72 72 73 triangles: List of 3-tuples (or a Numeric array) of73 triangles: List of 3-tuples (or a numeric array) of 74 74 integers representing indices of all vertices in the mesh. 75 75 … … 251 251 if len(z.shape) > 1: 252 252 att_num = z.shape[1] 253 self.Atz = num.zeros((m,att_num), num. Float)253 self.Atz = num.zeros((m,att_num), num.float) 254 254 else: 255 255 att_num = 1 256 self.Atz = num.zeros((m,), num. Float)256 self.Atz = num.zeros((m,), num.float) 257 257 assert z.shape[0] == point_coordinates.shape[0] 258 258 … … 336 336 point_coordinates: The co-ordinates of the data points. 337 337 List of coordinate pairs [x, y] of 338 data points or an nx2 Numeric array or a Geospatial_data object338 data points or an nx2 numeric array or a Geospatial_data object 339 339 or points file filename 340 340 z: Single 1d vector or array of data at the point_coordinates. … … 382 382 if point_coordinates is None: 383 383 if verbose: print 'Warning: no data points in fit' 384 assert self.AtA <> None, 'no interpolation matrix' 385 assert self.Atz <> None 384 #assert self.AtA != None, 'no interpolation matrix' 385 #assert self.Atz != None 386 assert not self.AtA is None, 'no interpolation matrix' 387 assert not self.Atz is None 386 388 387 389 # FIXME (DSG) - do a message … … 433 435 point_coordinates: The co-ordinates of the data points. 434 436 List of coordinate pairs [x, y] of 435 data points or an nx2 Numeric array or a Geospatial_data object437 data points or an nx2 numeric array or a Geospatial_data object 436 438 z: Single 1d vector or array of data at the point_coordinates. 437 439 attribute_name: Used to get the z values from the … … 448 450 absolute = True) 449 451 450 # Convert input to Numeric arrays452 # Convert input to numeric arrays 451 453 if z is not None: 452 z = ensure_numeric(z, num. Float)454 z = ensure_numeric(z, num.float) 453 455 else: 454 456 msg = 'z not specified' … … 456 458 z = point_coordinates.get_attributes(attribute_name) 457 459 458 point_coordinates = ensure_numeric(point_coordinates, num. Float)460 point_coordinates = ensure_numeric(point_coordinates, num.float) 459 461 self._build_matrix_AtA_Atz(point_coordinates, z, verbose) 460 462 … … 556 558 Inputs: 557 559 vertex_coordinates: List of coordinate pairs [xi, eta] of 558 points constituting a mesh (or an m x 2 Numeric array or560 points constituting a mesh (or an m x 2 numeric array or 559 561 a geospatial object) 560 562 Points may appear multiple times 561 563 (e.g. if vertices have discontinuities) 562 564 563 triangles: List of 3-tuples (or a Numeric array) of565 triangles: List of 3-tuples (or a numeric array) of 564 566 integers representing indices of all vertices in the mesh. 565 567 566 568 point_coordinates: List of coordinate pairs [x, y] of data points 567 (or an nx2 Numeric array). This can also be a .csv/.txt/.pts569 (or an nx2 numeric array). This can also be a .csv/.txt/.pts 568 570 file name. 569 571 … … 593 595 # are None 594 596 595 #Convert input to Numeric arrays596 triangles = ensure_numeric(triangles, num. Int)597 #Convert input to numeric arrays 598 triangles = ensure_numeric(triangles, num.int) 597 599 vertex_coordinates = ensure_absolute(vertex_coordinates, 598 600 geo_reference = mesh_origin) … … 662 664 vertex_coordinates = mesh_dict['vertices'] 663 665 triangles = mesh_dict['triangles'] 664 if type(mesh_dict['vertex_attributes']) == num.ArrayType:666 if isinstance(mesh_dict['vertex_attributes'], num.ndarray): 665 667 old_point_attributes = mesh_dict['vertex_attributes'].tolist() 666 668 else: 667 669 old_point_attributes = mesh_dict['vertex_attributes'] 668 670 669 if type(mesh_dict['vertex_attribute_titles']) == num.ArrayType:671 if isinstance(mesh_dict['vertex_attribute_titles'], num.ndarray): 670 672 old_title_list = mesh_dict['vertex_attribute_titles'].tolist() 671 673 else: -
branches/numpy/anuga/fit_interpolate/general_fit_interpolate.py
r6152 r6304 35 35 from anuga.fit_interpolate.search_functions import set_last_triangle 36 36 37 import Numericas num37 import numpy as num 38 38 39 39 … … 67 67 68 68 vertex_coordinates: List of coordinate pairs [xi, eta] of 69 points constituting a mesh (or an m x 2 Numeric array or69 points constituting a mesh (or an m x 2 numeric array or 70 70 a geospatial object) 71 71 Points may appear multiple times 72 72 (e.g. if vertices have discontinuities) 73 73 74 triangles: List of 3-tuples (or a Numeric array) of74 triangles: List of 3-tuples (or a numeric array) of 75 75 integers representing indices of all vertices in the mesh. 76 76 … … 96 96 # are None 97 97 98 #Convert input to Numeric arrays99 triangles = ensure_numeric(triangles, num. Int)98 #Convert input to numeric arrays 99 triangles = ensure_numeric(triangles, num.int) 100 100 vertex_coordinates = ensure_absolute(vertex_coordinates, 101 101 geo_reference = mesh_origin) -
branches/numpy/anuga/fit_interpolate/interpolate.py
r6223 r6304 41 41 42 42 43 import Numericas num43 import numpy as num 44 44 45 45 … … 77 77 vertex_coordinates: List of coordinate pairs [xi, eta] of 78 78 points constituting a mesh 79 (or an m x 2 Numeric array or79 (or an m x 2 numeric array or 80 80 a geospatial object) 81 81 Points may appear multiple times 82 82 (e.g. if vertices have discontinuities) 83 83 84 triangles: List of 3-tuples (or a Numeric array) of84 triangles: List of 3-tuples (or a numeric array) of 85 85 integers representing indices of all vertices 86 86 in the mesh. … … 92 92 interpolation_points: Interpolate mesh data to these positions. 93 93 List of coordinate pairs [x, y] of 94 data points or an nx2 Numeric array or a94 data points or an nx2 numeric array or a 95 95 Geospatial_data object 96 96 … … 132 132 133 133 # Create interpolation object with matrix 134 args = (ensure_numeric(vertex_coordinates, num. Float),134 args = (ensure_numeric(vertex_coordinates, num.float), 135 135 ensure_numeric(triangles)) 136 136 kwargs = {'mesh_origin': mesh_origin, … … 181 181 Inputs: 182 182 vertex_coordinates: List of coordinate pairs [xi, eta] of 183 points constituting a mesh (or an m x 2 Numeric array or183 points constituting a mesh (or an m x 2 numeric array or 184 184 a geospatial object) 185 185 Points may appear multiple times 186 186 (e.g. if vertices have discontinuities) 187 187 188 triangles: List of 3-tuples (or a Numeric array) of188 triangles: List of 3-tuples (or a numeric array) of 189 189 integers representing indices of all vertices in the mesh. 190 190 … … 243 243 point_coordinates: Interpolate mesh data to these positions. 244 244 List of coordinate pairs [x, y] of 245 data points or an nx2 Numeric array or a Geospatial_data object245 data points or an nx2 numeric array or a Geospatial_data object 246 246 247 247 If point_coordinates is absent, the points inputted last time … … 294 294 # creating a dummy array to concatenate to. 295 295 296 f = ensure_numeric(f, num. Float)296 f = ensure_numeric(f, num.float) 297 297 if len(f.shape) > 1: 298 z = num.zeros((0, f.shape[1]) )298 z = num.zeros((0, f.shape[1]), num.int) #array default# 299 299 else: 300 z = num.zeros((0,) )300 z = num.zeros((0,), num.int) #array default# 301 301 302 302 for end in range(start_blocking_len, … … 340 340 point_coordinates = point_coordinates.get_data_points(absolute=True) 341 341 342 # Convert lists to Numeric arrays if necessary343 point_coordinates = ensure_numeric(point_coordinates, num. Float)344 f = ensure_numeric(f, num. Float)342 # Convert lists to numeric arrays if necessary 343 point_coordinates = ensure_numeric(point_coordinates, num.float) 344 f = ensure_numeric(f, num.float) 345 345 346 346 from anuga.caching import myhash … … 447 447 if verbose: print 'Building interpolation matrix' 448 448 449 # Convert point_coordinates to Numeric arrays, in case it was a list.450 point_coordinates = ensure_numeric(point_coordinates, num. Float)449 # Convert point_coordinates to numeric arrays, in case it was a list. 450 point_coordinates = ensure_numeric(point_coordinates, num.float) 451 451 452 452 if verbose: print 'Getting indices inside mesh boundary' … … 526 526 points: Interpolate mesh data to these positions. 527 527 List of coordinate pairs [x, y] of 528 data points or an nx2 Numeric array or a Geospatial_data object528 data points or an nx2 numeric array or a Geospatial_data object 529 529 530 530 No test for this yet. … … 694 694 695 695 Mandatory input 696 time: px1 array of monotonously increasing times ( Float)697 quantities: Dictionary of arrays or 1 array ( Float)696 time: px1 array of monotonously increasing times (float) 697 quantities: Dictionary of arrays or 1 array (float) 698 698 The arrays must either have dimensions pxm or mx1. 699 699 The resulting function will be time dependent in … … 704 704 quantity_names: List of keys into the quantities dictionary for 705 705 imposing a particular order on the output vector. 706 vertex_coordinates: mx2 array of coordinates ( Float)706 vertex_coordinates: mx2 array of coordinates (float) 707 707 triangles: nx3 array of indices into vertex_coordinates (Int) 708 708 interpolation_points: Nx2 array of coordinates to be interpolated to … … 810 810 self.quantities_range = {} 811 811 for name in quantity_names: 812 q = quantities[name][:].flat 812 q = quantities[name][:].flatten() 813 813 self.quantities_range[name] = [min(q), max(q)] 814 814 … … 830 830 interpolation_points = ensure_numeric(interpolation_points) 831 831 except: 832 msg = 'Interpolation points must be an N x 2 Numeric array ' \832 msg = 'Interpolation points must be an N x 2 numeric array ' \ 833 833 'or a list of points\n' 834 834 msg += 'Got: %s.' %(str(self.interpolation_points)[:60] + '...') … … 913 913 914 914 for name in quantity_names: 915 self.precomputed_values[name] = num.zeros((p, m), num. Float)915 self.precomputed_values[name] = num.zeros((p, m), num.float) 916 916 917 917 if verbose is True: … … 1056 1056 1057 1057 # Compute interpolated values 1058 q = num.zeros(len(self.quantity_names), num. Float)1058 q = num.zeros(len(self.quantity_names), num.float) 1059 1059 for i, name in enumerate(self.quantity_names): 1060 1060 Q = self.precomputed_values[name] … … 1105 1105 res = [] 1106 1106 for col in q: 1107 res.append(col*num.ones(N, num. Float))1107 res.append(col*num.ones(N, num.float)) 1108 1108 1109 1109 return res … … 1145 1145 minq, maxq = self.quantities_range[name] 1146 1146 str += ' %s in [%f, %f]\n' %(name, minq, maxq) 1147 #q = quantities[name][:].flat 1147 #q = quantities[name][:].flatten() 1148 1148 #str += ' %s in [%f, %f]\n' %(name, min(q), max(q)) 1149 1149 … … 1158 1158 1159 1159 for name in quantity_names: 1160 q = precomputed_values[name][:].flat 1160 q = precomputed_values[name][:].flatten() 1161 1161 str += ' %s at interpolation points in [%f, %f]\n'\ 1162 1162 %(name, min(q), max(q)) … … 1190 1190 1191 1191 #Add the x and y together 1192 vertex_coordinates = num.concatenate((x[:,num. NewAxis], y[:,num.NewAxis]),axis=1)1192 vertex_coordinates = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]),axis=1) 1193 1193 1194 1194 #Will return the quantity values at the specified times and locations … … 1262 1262 keys.remove("volumes") 1263 1263 keys.remove("time") 1264 #Turn NetCDF objects into Numeric arrays1264 #Turn NetCDF objects into numeric arrays 1265 1265 quantities = {} 1266 1266 for name in keys: -
branches/numpy/anuga/fit_interpolate/search_functions.py
r6174 r6304 11 11 from anuga.config import max_float 12 12 13 import Numericas num13 import numpy as num 14 14 15 15 … … 22 22 num.array([max_float,max_float]), 23 23 num.array([max_float,max_float])), 24 (num.array([1,1] , num.Int), #array default#25 num.array([0,0] , num.Int), #array default#24 (num.array([1,1]), 25 num.array([0,0]), 26 26 num.array([-1.1,-1.1]))]]] 27 27 -
branches/numpy/anuga/fit_interpolate/test_fit.py
r6174 r6304 21 21 from anuga.shallow_water import Domain 22 22 23 import Numericas num23 import numpy as num 24 24 25 25 … … 582 582 583 583 #Define f(x,y) = x 584 f = num.array([0,0,2] , num.Int) #Value at global vertex 2 #array default#584 f = num.array([0,0,2]) #Value at global vertex 2 585 585 586 586 #Check that int (df/dx)**2 + (df/dy)**2 dx dy = … … 589 589 590 590 #Define f(x,y) = y 591 f = num.array([0,2,0] , num.Int) #Value at global vertex 1 #array default#591 f = num.array([0,2,0]) #Value at global vertex 1 592 592 593 593 #Check that int (df/dx)**2 + (df/dy)**2 dx dy = … … 596 596 597 597 #Define f(x,y) = x+y 598 f = num.array([0,2,2] , num.Int) #Values at global vertex 1 and 2 #array default#598 f = num.array([0,2,2]) #Values at global vertex 1 and 2 599 599 600 600 #Check that int (df/dx)**2 + (df/dy)**2 dx dy = … … 623 623 624 624 #Define f(x,y) = x 625 f = num.array([0,0,2,0,2,4] , num.Int) #f evaluated at points a-f #array default#625 f = num.array([0,0,2,0,2,4]) #f evaluated at points a-f 626 626 627 627 #Check that int (df/dx)**2 + (df/dy)**2 dx dy = … … 630 630 631 631 #Define f(x,y) = y 632 f = num.array([0,2,0,4,2,0] , num.Int) #f evaluated at points a-f #array default#632 f = num.array([0,2,0,4,2,0]) #f evaluated at points a-f 633 633 634 634 #Check that int (df/dx)**2 + (df/dy)**2 dx dy = … … 637 637 638 638 #Define f(x,y) = x+y 639 f = num.array([0,2,2,4,4,4] , num.Int) #f evaluated at points a-f #array default#639 f = num.array([0,2,2,4,4,4]) #f evaluated at points a-f 640 640 641 641 #Check that int (df/dx)**2 + (df/dy)**2 dx dy = … … 1099 1099 if __name__ == "__main__": 1100 1100 suite = unittest.makeSuite(Test_Fit,'test') 1101 #suite = unittest.makeSuite(Test_Fit,'test_smooth_attributes_to_mesh_function') 1102 #suite = unittest.makeSuite(Test_Fit,'') 1103 runner = unittest.TextTestRunner(verbosity=1) 1101 runner = unittest.TextTestRunner() #verbosity=1) 1104 1102 runner.run(suite) 1105 1103 -
branches/numpy/anuga/fit_interpolate/test_interpolate.py
r6189 r6304 15 15 from Scientific.IO.NetCDF import NetCDFFile 16 16 17 import Numericas num17 import numpy as num 18 18 19 19 … … 67 67 68 68 bed = domain.quantities['elevation'].vertex_values 69 stage = num.zeros(bed.shape, num. Float)69 stage = num.zeros(bed.shape, num.float) 70 70 71 71 h = 0.3 … … 129 129 130 130 x, y, vertex_values, triangles = quantity.get_vertex_values(xy=True, smooth=False) 131 vertex_coordinates = num.concatenate( (x[:, num. NewAxis], y[:, num.NewAxis]), axis=1 )131 vertex_coordinates = num.concatenate( (x[:, num.newaxis], y[:, num.newaxis]), axis=1 ) 132 132 # FIXME: This concat should roll into get_vertex_values 133 133 … … 149 149 150 150 x, y, vertex_values, triangles = quantity.get_vertex_values(xy=True, smooth=False) 151 vertex_coordinates = num.concatenate( (x[:, num. NewAxis], y[:, num.NewAxis]), axis=1 )151 vertex_coordinates = num.concatenate( (x[:, num.newaxis], y[:, num.newaxis]), axis=1 ) 152 152 # FIXME: This concat should roll into get_vertex_values 153 153 … … 182 182 183 183 x, y, vertex_values, triangles = quantity.get_vertex_values(xy=True, smooth=False) 184 vertex_coordinates = num.concatenate( (x[:, num. NewAxis], y[:, num.NewAxis]), axis=1 )184 vertex_coordinates = num.concatenate( (x[:, num.newaxis], y[:, num.newaxis]), axis=1 ) 185 185 # FIXME: This concat should roll into get_vertex_values 186 186 … … 201 201 202 202 x, y, vertex_values, triangles = quantity.get_vertex_values(xy=True, smooth=False) 203 vertex_coordinates = num.concatenate( (x[:, num. NewAxis], y[:, num.NewAxis]), axis=1 )203 vertex_coordinates = num.concatenate( (x[:, num.newaxis], y[:, num.newaxis]), axis=1 ) 204 204 # FIXME: This concat should roll into get_vertex_values 205 205 … … 233 233 234 234 x, y, vertex_values, triangles = quantity.get_vertex_values(xy=True, smooth=False) 235 vertex_coordinates = num.concatenate( (x[:, num. NewAxis], y[:, num.NewAxis]), axis=1 )235 vertex_coordinates = num.concatenate( (x[:, num.newaxis], y[:, num.newaxis]), axis=1 ) 236 236 # FIXME: This concat should roll into get_vertex_values 237 237 … … 1078 1078 1079 1079 #One quantity 1080 Q = num.zeros( (3,6), num. Float )1080 Q = num.zeros( (3,6), num.float ) 1081 1081 1082 1082 #Linear in time and space … … 1225 1225 1226 1226 #One quantity 1227 Q = num.zeros( (3,6), num. Float )1227 Q = num.zeros( (3,6), num.float ) 1228 1228 1229 1229 #Linear in time and space … … 1285 1285 1286 1286 # One quantity 1287 Q = num.zeros((8,6), num. Float)1287 Q = num.zeros((8,6), num.float) 1288 1288 1289 1289 # Linear in time and space … … 1353 1353 1354 1354 #One quantity 1355 Q = num.zeros( (2,6), num. Float )1355 Q = num.zeros( (2,6), num.float ) 1356 1356 1357 1357 #Linear in time and space … … 1419 1419 1420 1420 # One quantity 1421 Q = num.zeros( (3,6), num. Float )1421 Q = num.zeros( (3,6), num.float ) 1422 1422 1423 1423 # Linear in time and space … … 1608 1608 1609 1609 #One quantity 1610 Q = num.zeros( (len(time),6), num. Float )1610 Q = num.zeros( (len(time),6), num.float ) 1611 1611 1612 1612 #Linear in time and space … … 1850 1850 if __name__ == "__main__": 1851 1851 suite = unittest.makeSuite(Test_Interpolate,'test') 1852 #suite = unittest.makeSuite(Test_Interpolate,'test_interpolate_one_point_many_triangles') 1853 runner = unittest.TextTestRunner(verbosity=1) 1852 runner = unittest.TextTestRunner() #verbosity=1) 1854 1853 runner.run(suite) 1855 1854 -
branches/numpy/anuga/fit_interpolate/test_search_functions.py
r6152 r6304 211 211 if __name__ == "__main__": 212 212 suite = unittest.makeSuite(Test_search_functions,'test') 213 #suite = unittest.makeSuite(Test_search_functions,'expanding_search') 214 runner = unittest.TextTestRunner(verbosity=1) 213 runner = unittest.TextTestRunner() #verbosity=1) 215 214 runner.run(suite) 216 215 -
branches/numpy/anuga/geospatial_data/geospatial_data.py
r6166 r6304 13 13 from Scientific.IO.NetCDF import NetCDFFile 14 14 15 import Numericas num15 import numpy as num 16 16 17 17 from anuga.coordinate_transforms.lat_long_UTM_conversion import UTMtoLL … … 23 23 from anuga.config import points_file_block_line_size as MAX_READ_LINES 24 24 from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a 25 from anuga.config import netcdf_float 25 26 26 27 DEFAULT_ATTRIBUTE = 'elevation' … … 56 57 load_file_now=True, 57 58 verbose=False): 58 """ 59 Create instance from data points and associated attributes 59 """Create instance from data points and associated attributes 60 60 61 61 data_points: x,y coordinates in meters. Type must be either a 62 sequence of 2-tuples or an Mx2 Numeric array of floats. A file name62 sequence of 2-tuples or an Mx2 numeric array of floats. A file name 63 63 with extension .txt, .cvs or .pts can also be passed in here. 64 64 … … 131 131 132 132 verbose: 133 134 133 """ 135 134 136 135 if isinstance(data_points, basestring): 137 # assume data pointis really a file name136 # assume data_points is really a file name 138 137 file_name = data_points 139 138 140 139 self.set_verbose(verbose) 141 self.geo_reference = None # create the attribute140 self.geo_reference = None # create the attribute 142 141 self.file_name = file_name 143 142 … … 157 156 data_points=data_points, 158 157 points_are_lats_longs=\ 159 158 points_are_lats_longs) 160 159 self.check_data_points(data_points) 161 160 self.set_attributes(attributes) … … 188 187 # This message was misleading. 189 188 # FIXME (Ole): Are we blocking here or not? 190 # print 'ASCII formats are not that great for '191 # print 'blockwise reading. Consider storing this'192 # print 'data as a pts NetCDF format'189 # print 'ASCII formats are not that great for ' 190 # print 'blockwise reading. Consider storing this' 191 # print 'data as a pts NetCDF format' 193 192 194 193 ## … … 203 202 204 203 ## 205 # @brief Save points data in instance.206 # @param data_points Points data to store in instance and check.204 # @brief Check data points. 205 # @param data_points Points data to check and store in instance. 207 206 # @note Throws ValueError exception if no data. 208 207 def check_data_points(self, data_points): 209 """Checks data points 210 """ 208 """Checks data points""" 211 209 212 210 if data_points is None: … … 217 215 else: 218 216 self.data_points = ensure_numeric(data_points) 217 return 218 219 print 'self.data_points=%s' % str(self.data_points) 220 print 'self.data_points.shape=%s' % str(self.data_points.shape) 219 221 if not (0,) == self.data_points.shape: 220 222 assert len(self.data_points.shape) == 2 … … 226 228 # @note Throws exception if unable to convert dict keys to numeric. 227 229 def set_attributes(self, attributes): 228 """Check and assign attributes dictionary 229 """ 230 """Check and assign attributes dictionary""" 230 231 231 232 if attributes is None: … … 234 235 235 236 if not isinstance(attributes, DictType): 236 # Convert single attribute into dictionary237 # Convert single attribute into dictionary 237 238 attributes = {DEFAULT_ATTRIBUTE: attributes} 238 239 239 # Check input attributes240 # Check input attributes 240 241 for key in attributes.keys(): 241 242 try: 242 243 attributes[key] = ensure_numeric(attributes[key]) 243 244 except: 244 msg = 'Attribute %s could not be converted' %key245 msg += 'to a numeric vector'246 raise msg245 msg = ("Attribute '%s' (%s) could not be converted to a" 246 "numeric vector" % (str(key), str(attributes[key]))) 247 raise Exception, msg 247 248 248 249 self.attributes = attributes 249 250 250 #def set_geo_reference(self, geo_reference):251 # # FIXME (Ole): Backwards compatibility - deprecate252 # self.setgeo_reference(geo_reference)253 254 251 ## 255 252 # @brief Set the georeference of geospatial data. 256 # @param geo_reference The georeference data 253 # @param geo_reference The georeference data to set. 257 254 # @note Will raise exception if param not instance of Geo_reference. 258 255 def set_geo_reference(self, geo_reference): … … 277 274 msg = 'Argument geo_reference must be a valid Geo_reference ' 278 275 msg += 'object or None.' 279 raise msg276 raise Expection, msg 280 277 281 278 # If a geo_reference already exists, change the point data according to … … 511 508 raise Exception, msg 512 509 else: 513 # other is None:510 # other is None: 514 511 new_points = self.get_data_points(absolute=True) 515 512 new_attributes = self.attributes … … 525 522 # @return The new geospatial object. 526 523 def __radd__(self, other): 527 """Handle cases like None + Geospatial_data(...) 528 """ 524 """Handle cases like None + Geospatial_data(...)""" 529 525 530 526 return self + other … … 542 538 def import_points_file(self, file_name, delimiter=None, verbose=False): 543 539 """ load an .txt, .csv or .pts file 540 544 541 Note: will throw an IOError/SyntaxError if it can't load the file. 545 542 Catch these! … … 571 568 except SyntaxError, e: 572 569 # This should only be if there is a format error 573 msg = ' Could not openfile %s. \n' %file_name570 msg = 'Problem with format of file %s. \n' %file_name 574 571 msg += Error_message['IOError'] 575 572 raise SyntaxError, msg … … 591 588 as_lat_long=False, isSouthHemisphere=True): 592 589 593 """ 594 write a points file, file_name, as a text (.csv) or binary (.pts) file 590 """write a points file as a text (.csv) or binary (.pts) file 591 595 592 file_name is the file name, including the extension 596 593 The point_dict is defined at the top of this file. … … 659 656 """ 660 657 661 # FIXME: add the geo_reference to this658 # FIXME: add the geo_reference to this 662 659 points = self.get_data_points() 663 660 sampled_points = num.take(points, indices) … … 679 676 # @note Points in each result object are selected randomly. 680 677 def split(self, factor=0.5, seed_num=None, verbose=False): 681 """Returns two 682 geospatial_data object, first is the size of the 'factor' 678 """Returns two geospatial_data object, first is the size of the 'factor' 683 679 smaller the original and the second is the remainder. The two 684 new object are disjoin setof each other.680 new objects are disjoint sets of each other. 685 681 686 682 Points of the two new object have selected RANDOMLY. … … 707 703 if verbose: print "make unique random number list and get indices" 708 704 709 total=num.array(range(self_size) , num.Int) #array default#705 total=num.array(range(self_size)) 710 706 total_list = total.tolist() 711 707 … … 731 727 random_num = random_num.tolist() 732 728 733 # need to sort and reverse so the pop() works correctly729 # need to sort and reverse so the pop() works correctly 734 730 random_num.sort() 735 731 random_num.reverse() … … 748 744 random_list.append(remainder_list.pop(i)) 749 745 j += 1 750 # prints progress746 # prints progress 751 747 if verbose and round(random_num_len/10*k) == j: 752 748 print '(%s/%s)' % (j, random_num_len) … … 779 775 """ 780 776 from Scientific.IO.NetCDF import NetCDFFile 781 # FIXME - what to do if the file isn't there777 # FIXME - what to do if the file isn't there 782 778 783 779 # FIXME (Ole): Shouldn't this go into the constructor? … … 941 937 data_points, 942 938 points_are_lats_longs): 943 """ 944 if the points has lat long info, assume it is in (lat, long) order. 945 """ 939 """If the points has lat long info, assume it is in (lat, long) order.""" 946 940 947 941 if geo_reference is not None: … … 1055 1049 header, 1056 1050 max_read_lines=1e30) 1057 # If the file is bigger that this, block..1051 # If the file is bigger that this, block.. 1058 1052 # FIXME (Ole) What's up here? 1059 1053 except ANUGAError: … … 1082 1076 """ 1083 1077 1084 line = file_pointer.readline() 1078 line = file_pointer.readline().strip() 1085 1079 header = clean_line(line, delimiter) 1086 1080 … … 1095 1089 # @param verbose True if this function is to be verbose. 1096 1090 # @note Will throw IndexError, SyntaxError exceptions. 1097 def _read_csv_file_blocking(file_pointer, header, 1091 def _read_csv_file_blocking(file_pointer, 1092 header, 1098 1093 delimiter=CSV_DELIMITER, 1099 1094 max_read_lines=MAX_READ_LINES, … … 1150 1145 raise StopIteration 1151 1146 1152 pointlist = num.array(points, num. Float)1147 pointlist = num.array(points, num.float) 1153 1148 for key in att_dict.keys(): 1154 att_dict[key] = num.array(att_dict[key], num. Float)1149 att_dict[key] = num.array(att_dict[key], num.float) 1155 1150 1156 1151 # Do stuff here so the info is in lat's and longs … … 1183 1178 # @note Will throw IOError and AttributeError exceptions. 1184 1179 def _read_pts_file_header(fid, verbose=False): 1185 """Read the geo_reference and number_of_points from a .pts file 1186 """ 1180 """Read the geo_reference and number_of_points from a .pts file""" 1187 1181 1188 1182 keys = fid.variables.keys() … … 1212 1206 # @return Tuple of (pointlist, attributes). 1213 1207 def _read_pts_file_blocking(fid, start_row, fin_row, keys): 1214 """Read the body of a .csv file. 1215 """ 1208 """Read the body of a .csv file.""" 1216 1209 1217 1210 pointlist = num.array(fid.variables['points'][start_row:fin_row]) … … 1239 1232 1240 1233 WARNING: This function mangles the point_atts data structure 1241 # F??ME: (DSG)This format has issues.1234 # F??ME: (DSG)This format has issues. 1242 1235 # There can't be an attribute called points 1243 1236 # consider format change … … 1264 1257 shape = write_data_points.shape[0] 1265 1258 outfile.createDimension('number_of_points', shape) 1266 outfile.createDimension('number_of_dimensions', 2) # This is 2d data1259 outfile.createDimension('number_of_dimensions', 2) # This is 2d data 1267 1260 1268 1261 # Variable definition 1269 outfile.createVariable('points', n um.Float, ('number_of_points',1270 'number_of_dimensions'))1271 1272 # create variables1273 outfile.variables['points'][:] = write_data_points #.astype(Float32)1262 outfile.createVariable('points', netcdf_float, ('number_of_points', 1263 'number_of_dimensions')) 1264 1265 # create variables 1266 outfile.variables['points'][:] = write_data_points 1274 1267 1275 1268 if write_attributes is not None: 1276 1269 for key in write_attributes.keys(): 1277 outfile.createVariable(key, n um.Float, ('number_of_points',))1278 outfile.variables[key][:] = write_attributes[key] #.astype(Float32)1270 outfile.createVariable(key, netcdf_float, ('number_of_points',)) 1271 outfile.variables[key][:] = write_attributes[key] 1279 1272 1280 1273 if write_geo_reference is not None: … … 1296 1289 as_lat_long=False, 1297 1290 delimiter=','): 1298 """Write a .csv file. 1299 """ 1291 """Write a .csv file.""" 1300 1292 1301 1293 points = write_data_points … … 1361 1353 # @return ?? 1362 1354 def _point_atts2array(point_atts): 1363 point_atts['pointlist'] = num.array(point_atts['pointlist'], num. Float)1355 point_atts['pointlist'] = num.array(point_atts['pointlist'], num.float) 1364 1356 1365 1357 for key in point_atts['attributelist'].keys(): 1366 1358 point_atts['attributelist'][key] = \ 1367 num.array(point_atts['attributelist'][key], num. Float)1359 num.array(point_atts['attributelist'][key], num.float) 1368 1360 1369 1361 return point_atts … … 1375 1367 # @return A points dictionary. 1376 1368 def geospatial_data2points_dictionary(geospatial_data): 1377 """Convert geospatial data to points_dictionary 1378 """ 1369 """Convert geospatial data to points_dictionary""" 1379 1370 1380 1371 points_dictionary = {} … … 1396 1387 # @param points_dictionary A points dictionary to convert. 1397 1388 def points_dictionary2geospatial_data(points_dictionary): 1398 """Convert points_dictionary to geospatial data object 1399 """ 1389 """Convert points_dictionary to geospatial data object""" 1400 1390 1401 1391 msg = 'Points dictionary must have key pointlist' … … 1417 1407 ## 1418 1408 # @brief Split a string into 'clean' fields. 1419 # @param lineThe string to process.1409 # @param str The string to process. 1420 1410 # @param delimiter The delimiter string to split 'line' with. 1421 1411 # @return A list of 'cleaned' field strings. 1422 1412 # @note Any fields that were initially zero length will be removed. 1423 def clean_line(line,delimiter): 1424 """Remove whitespace 1425 """ 1426 1427 line = line.strip() # probably unnecessary RW 1428 numbers = line.split(delimiter) 1429 1430 i = len(numbers) - 1 1431 while i >= 0: 1432 if numbers[i] == '': 1433 numbers.pop(i) 1434 else: 1435 numbers[i] = numbers[i].strip() 1436 i += -1 1437 1438 return numbers 1413 # @note If a field contains '\n' it isn't zero length. 1414 def clean_line(str, delimiter): 1415 """Split string on given delimiter, remove whitespace from each field.""" 1416 1417 return [x.strip() for x in str.split(delimiter) if x != ''] 1439 1418 1440 1419 … … 1462 1441 # Input check 1463 1442 if isinstance(points, basestring): 1464 # It's a string - assume it is a point file1443 # It's a string - assume it is a point file 1465 1444 points = Geospatial_data(file_name=points) 1466 1445 … … 1470 1449 assert geo_reference == None, msg 1471 1450 else: 1472 points = ensure_numeric(points, num. Float)1451 points = ensure_numeric(points, num.float) 1473 1452 1474 1453 # Sort of geo_reference and convert points 1475 1454 if geo_reference is None: 1476 geo = None # Geo_reference()1455 geo = None # Geo_reference() 1477 1456 else: 1478 1457 if isinstance(geo_reference, Geo_reference): … … 1493 1472 # @return A geospatial object. 1494 1473 def ensure_geospatial(points, geo_reference=None): 1495 """ 1496 This function inputs several formats and 1497 outputs one format. - a geospatial_data instance. 1474 """Convert various data formats to a geospatial_data instance. 1498 1475 1499 1476 Inputed formats are; … … 1514 1491 else: 1515 1492 # List or numeric array of absolute points 1516 points = ensure_numeric(points, num. Float)1493 points = ensure_numeric(points, num.float) 1517 1494 1518 1495 # Sort out geo reference … … 1563 1540 cache=False, 1564 1541 verbose=False): 1565 """ 1566 Removes a small random sample of points from 'data_file'. Then creates 1567 models with different alpha values from 'alpha_list' and cross validates 1568 the predicted value to the previously removed point data. Returns the 1569 alpha value which has the smallest covariance. 1542 """Removes a small random sample of points from 'data_file'. 1543 Then creates models with different alpha values from 'alpha_list' and 1544 cross validates the predicted value to the previously removed point data. 1545 Returns the alpha value which has the smallest covariance. 1570 1546 1571 1547 data_file: must not contain points outside the boundaries defined … … 1628 1604 if no_boundary is True: 1629 1605 msg = 'All boundaries must be defined' 1630 raise msg1606 raise Expection, msg 1631 1607 1632 1608 poly_topo = [[east_boundary,south_boundary], … … 1645 1621 1646 1622 else: # if mesh file provided 1647 # test mesh file exists?1623 # test mesh file exists? 1648 1624 if verbose: "reading from file: %s" % mesh_file 1649 1625 if access(mesh_file,F_OK) == 0: … … 1651 1627 raise IOError, msg 1652 1628 1653 # split topo data1629 # split topo data 1654 1630 if verbose: print 'Reading elevation file: %s' % data_file 1655 1631 G = Geospatial_data(file_name = data_file) … … 1668 1644 alphas = alpha_list 1669 1645 1670 # creates array with columns 1 and 2 are x, y. column 3 is elevation1671 # 4 onwards is the elevation_predicted using the alpha, which will1672 # be compared later against the real removed data1673 data = num.array([], typecode=num.Float)1646 # creates array with columns 1 and 2 are x, y. column 3 is elevation 1647 # 4 onwards is the elevation_predicted using the alpha, which will 1648 # be compared later against the real removed data 1649 data = num.array([], dtype=num.float) 1674 1650 1675 1651 data=num.resize(data, (len(points), 3+len(alphas))) 1676 1652 1677 # gets relative point from sample1653 # gets relative point from sample 1678 1654 data[:,0] = points[:,0] 1679 1655 data[:,1] = points[:,1] … … 1681 1657 data[:,2] = elevation_sample 1682 1658 1683 normal_cov=num.array(num.zeros([len(alphas), 2]), typecode=num.Float)1659 normal_cov=num.array(num.zeros([len(alphas), 2]), dtype=num.float) 1684 1660 1685 1661 if verbose: print 'Setup computational domains with different alphas' 1686 1662 1687 1663 for i, alpha in enumerate(alphas): 1688 # add G_other data to domains with different alphas1664 # add G_other data to domains with different alphas 1689 1665 if verbose: 1690 1666 print '\n Calculating domain and mesh for Alpha = ', alpha, '\n' … … 1700 1676 points_geo = Geospatial_data(points, domain.geo_reference) 1701 1677 1702 # returns the predicted elevation of the points that were "split" out1703 # of the original data set for one particular alpha1678 # returns the predicted elevation of the points that were "split" out 1679 # of the original data set for one particular alpha 1704 1680 if verbose: print 'Get predicted elevation for location to be compared' 1705 1681 elevation_predicted = \ … … 1707 1683 get_values(interpolation_points=points_geo) 1708 1684 1709 # add predicted elevation to array that starts with x, y, z...1685 # add predicted elevation to array that starts with x, y, z... 1710 1686 data[:,i+3] = elevation_predicted 1711 1687 … … 1834 1810 if no_boundary is True: 1835 1811 msg = 'All boundaries must be defined' 1836 raise msg1812 raise Expection, msg 1837 1813 1838 1814 poly_topo = [[east_boundary,south_boundary], … … 1851 1827 1852 1828 else: # if mesh file provided 1853 # test mesh file exists?1829 # test mesh file exists? 1854 1830 if access(mesh_file,F_OK) == 0: 1855 1831 msg = "file %s doesn't exist!" % mesh_file 1856 1832 raise IOError, msg 1857 1833 1858 # split topo data1834 # split topo data 1859 1835 G = Geospatial_data(file_name=data_file) 1860 1836 if verbose: print 'start split' … … 1863 1839 points = G_small.get_data_points() 1864 1840 1865 # FIXME: Remove points outside boundary polygon1841 # FIXME: Remove points outside boundary polygon 1866 1842 # print 'new point',len(points) 1867 1843 # 1868 1844 # new_points=[] 1869 # new_points=array([], typecode=Float)1845 # new_points=array([],dtype=float) 1870 1846 # new_points=resize(new_points,(len(points),2)) 1871 1847 # print "BOUNDARY", boundary_poly … … 1890 1866 1891 1867 for alpha in alphas: 1892 # add G_other data to domains with different alphas1868 # add G_other data to domains with different alphas 1893 1869 if verbose: 1894 1870 print '\n Calculating domain and mesh for Alpha = ', alpha, '\n' … … 1902 1878 domains[alpha] = domain 1903 1879 1904 # creates array with columns 1 and 2 are x, y. column 3 is elevation1905 # 4 onwards is the elevation_predicted using the alpha, which will1906 # be compared later against the real removed data1907 data = num.array([], typecode=num.Float)1880 # creates array with columns 1 and 2 are x, y. column 3 is elevation 1881 # 4 onwards is the elevation_predicted using the alpha, which will 1882 # be compared later against the real removed data 1883 data = num.array([], dtype=num.float) 1908 1884 1909 1885 data = num.resize(data, (len(points), 3+len(alphas))) 1910 1886 1911 # gets relative point from sample1887 # gets relative point from sample 1912 1888 data[:,0] = points[:,0] 1913 1889 data[:,1] = points[:,1] … … 1915 1891 data[:,2] = elevation_sample 1916 1892 1917 normal_cov = num.array(num.zeros([len(alphas), 2]), typecode=num.Float)1893 normal_cov = num.array(num.zeros([len(alphas), 2]), dtype=num.float) 1918 1894 1919 1895 if verbose: … … 1923 1899 1924 1900 points_geo = domains[alpha].geo_reference.change_points_geo_ref(points) 1925 # returns the predicted elevation of the points that were "split" out1926 # of the original data set for one particular alpha1901 # returns the predicted elevation of the points that were "split" out 1902 # of the original data set for one particular alpha 1927 1903 elevation_predicted = \ 1928 1904 domains[alpha].quantities[attribute_smoothed].\ 1929 1905 get_values(interpolation_points=points_geo) 1930 1906 1931 # add predicted elevation to array that starts with x, y, z...1907 # add predicted elevation to array that starts with x, y, z... 1932 1908 data[:,i+3] = elevation_predicted 1933 1909 -
branches/numpy/anuga/geospatial_data/test_geospatial_data.py
r6153 r6304 1 1 #!/usr/bin/env python 2 3 2 4 3 import unittest 5 4 import os 5 import tempfile 6 6 from math import sqrt, pi 7 import tempfile8 7 from sets import ImmutableSet 9 8 10 import Numericas num9 import numpy as num 11 10 12 11 from anuga.geospatial_data.geospatial_data import * … … 16 15 from anuga.utilities.system_tools import get_host_name 17 16 from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a 17 from anuga.config import netcdf_float 18 18 19 19 20 class Test_Geospatial_data(unittest.TestCase): … … 24 25 pass 25 26 26 27 27 def test_0(self): 28 28 #Basic points 29 29 from anuga.coordinate_transforms.geo_reference import Geo_reference 30 30 31 31 points = [[1.0, 2.1], [3.0, 5.3]] 32 32 G = Geospatial_data(points) 33 34 33 assert num.allclose(G.data_points, [[1.0, 2.1], [3.0, 5.3]]) 35 34 … … 38 37 rep = `G` 39 38 ref = '[[ 1. 2.1]\n [ 3. 5.3]]' 40 41 msg = 'Representation %s is not equal to %s' %(rep, ref) 39 msg = 'Representation %s is not equal to %s' % (rep, ref) 42 40 assert rep == ref, msg 43 41 44 42 #Check getter 45 43 assert num.allclose(G.get_data_points(), [[1.0, 2.1], [3.0, 5.3]]) 46 44 47 45 #Check defaults 48 46 assert G.attributes is None 49 50 47 assert G.geo_reference.zone == Geo_reference().zone 51 48 assert G.geo_reference.xllcorner == Geo_reference().xllcorner 52 49 assert G.geo_reference.yllcorner == Geo_reference().yllcorner 53 54 50 55 51 def test_1(self): 56 52 points = [[1.0, 2.1], [3.0, 5.3]] 57 53 attributes = [2, 4] 58 G = Geospatial_data(points, attributes) 54 G = Geospatial_data(points, attributes) 59 55 assert G.attributes.keys()[0] == DEFAULT_ATTRIBUTE 60 56 assert num.allclose(G.attributes.values()[0], [2, 4]) 61 62 57 63 58 def test_2(self): 64 59 from anuga.coordinate_transforms.geo_reference import Geo_reference 60 65 61 points = [[1.0, 2.1], [3.0, 5.3]] 66 62 attributes = [2, 4] … … 72 68 assert G.geo_reference.yllcorner == 200 73 69 74 75 70 def test_get_attributes_1(self): 76 71 from anuga.coordinate_transforms.geo_reference import Geo_reference 72 77 73 points = [[1.0, 2.1], [3.0, 5.3]] 78 74 attributes = [2, 4] … … 80 76 geo_reference=Geo_reference(56, 100, 200)) 81 77 82 83 78 P = G.get_data_points(absolute=False) 84 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 79 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 85 80 86 81 P = G.get_data_points(absolute=True) 87 assert num.allclose(P, [[101.0, 202.1], [103.0, 205.3]]) 82 assert num.allclose(P, [[101.0, 202.1], [103.0, 205.3]]) 88 83 89 84 V = G.get_attributes() #Simply get them … … 95 90 def test_get_attributes_2(self): 96 91 #Multiple attributes 97 98 99 92 from anuga.coordinate_transforms.geo_reference import Geo_reference 93 100 94 points = [[1.0, 2.1], [3.0, 5.3]] 101 95 attributes = {'a0': [0, 0], 'a1': [2, 4], 'a2': [79.4, -7]} … … 104 98 default_attribute_name='a1') 105 99 106 107 100 P = G.get_data_points(absolute=False) 108 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 109 101 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 102 110 103 V = G.get_attributes() #Get default attribute 111 104 assert num.allclose(V, [2, 4]) … … 125 118 pass 126 119 else: 127 raise 'Should have raised exception'120 raise Exception, 'Should have raised exception' 128 121 129 122 def test_get_data_points(self): 130 points_ab = [[12.5, 34.7],[-4.5,-60.0]]123 points_ab = [[12.5, 34.7], [-4.5, -60.0]] 131 124 x_p = -10 132 125 y_p = -40 133 126 geo_ref = Geo_reference(56, x_p, y_p) 134 127 points_rel = geo_ref.change_points_geo_ref(points_ab) 135 128 136 129 spatial = Geospatial_data(points_rel, geo_reference=geo_ref) 137 138 130 results = spatial.get_data_points(absolute=False) 139 140 131 assert num.allclose(results, points_rel) 141 132 142 133 x_p = -1770 143 134 y_p = 4.01 144 135 geo_ref = Geo_reference(56, x_p, y_p) 145 136 points_rel = geo_ref.change_points_geo_ref(points_ab) 146 results = spatial.get_data_points \ 147 ( geo_reference=geo_ref) 148 137 results = spatial.get_data_points(geo_reference=geo_ref) 138 149 139 assert num.allclose(results, points_rel) 150 140 151 152 141 def test_get_data_points_lat_long(self): 153 # lat long [-30.],[130] 154 #Zone: 52 155 #Easting: 596450.153 Northing: 6680793.777 156 # lat long [-32.],[131] 157 #Zone: 52 158 #Easting: 688927.638 Northing: 6457816.509 159 160 points_Lat_long = [[-30.,130], [-32,131]] 161 162 spatial = Geospatial_data(latitudes=[-30, -32.], 163 longitudes=[130, 131]) 164 142 # lat long [-30.], [130] 143 # Zone: 52 144 # Easting: 596450.153 Northing: 6680793.777 145 # lat long [-32.], [131] 146 # Zone: 52 147 # Easting: 688927.638 Northing: 6457816.509 148 149 points_Lat_long = [[-30., 130], [-32, 131]] 150 151 spatial = Geospatial_data(latitudes=[-30, -32.], longitudes=[130, 131]) 165 152 results = spatial.get_data_points(as_lat_long=True) 166 #print "test_get_data_points_lat_long - results", results167 #print "points_Lat_long",points_Lat_long168 153 assert num.allclose(results, points_Lat_long) 169 154 170 155 def test_get_data_points_lat_longII(self): 171 156 # x,y North,east long,lat 172 157 boundary_polygon = [[ 250000, 7630000]] 173 158 zone = 50 174 159 175 160 geo_reference = Geo_reference(zone=zone) 176 geo = Geospatial_data(boundary_polygon ,geo_reference=geo_reference)161 geo = Geospatial_data(boundary_polygon ,geo_reference=geo_reference) 177 162 seg_lat_long = geo.get_data_points(as_lat_long=True) 178 lat_result = degminsec2decimal_degrees(-21,24,54) 179 long_result = degminsec2decimal_degrees(114,35,17.89) 180 #print "seg_lat_long", seg_lat_long [0][0] 181 #print "lat_result",lat_result 163 lat_result = degminsec2decimal_degrees(-21, 24, 54) 164 long_result = degminsec2decimal_degrees(114, 35, 17.89) 165 assert num.allclose(seg_lat_long[0][0], lat_result) #lat 166 assert num.allclose(seg_lat_long[0][1], long_result) #long 167 168 def test_get_data_points_lat_longIII(self): 169 # x,y North,east long,lat 170 # for northern hemisphere 171 boundary_polygon = [[419944.8, 918642.4]] 172 zone = 47 173 174 geo_reference = Geo_reference(zone=zone) 175 geo = Geospatial_data(boundary_polygon, geo_reference=geo_reference) 176 seg_lat_long = geo.get_data_points(as_lat_long=True, 177 isSouthHemisphere=False) 178 lat_result = degminsec2decimal_degrees(8.31, 0, 0) 179 long_result = degminsec2decimal_degrees(98.273, 0, 0) 182 180 assert num.allclose(seg_lat_long[0][0], lat_result)#lat 183 181 assert num.allclose(seg_lat_long[0][1], long_result)#long 184 182 185 186 def test_get_data_points_lat_longIII(self):187 # x,y North,east long,lat188 #for northern hemisphere189 boundary_polygon = [[419944.8, 918642.4]]190 zone = 47191 192 geo_reference = Geo_reference(zone=zone)193 geo = Geospatial_data(boundary_polygon,194 geo_reference=geo_reference)195 196 seg_lat_long = geo.get_data_points(as_lat_long=True,197 isSouthHemisphere=False)198 199 lat_result = degminsec2decimal_degrees(8.31,0,0)200 long_result = degminsec2decimal_degrees(98.273,0,0)201 #print "seg_lat_long", seg_lat_long [0]202 #print "lat_result",lat_result203 assert num.allclose(seg_lat_long[0][0], lat_result)#lat204 assert num.allclose(seg_lat_long[0][1], long_result)#long205 206 207 208 183 def test_set_geo_reference(self): 209 """test_set_georeference210 211 Test that georeference can be changed without changing the 184 '''test_set_georeference 185 186 Test that georeference can be changed without changing the 212 187 absolute values. 213 """214 215 points_ab = [[12.5, 34.7],[-4.5,-60.0]]188 ''' 189 190 points_ab = [[12.5, 34.7], [-4.5, -60.0]] 216 191 x_p = -10 217 192 y_p = -40 218 193 geo_ref = Geo_reference(56, x_p, y_p) 219 194 points_rel = geo_ref.change_points_geo_ref(points_ab) 220 195 221 196 # Create without geo_ref properly set 222 G = Geospatial_data(points_rel) 197 G = Geospatial_data(points_rel) 223 198 assert not num.allclose(points_ab, G.get_data_points(absolute=True)) 224 199 225 200 # Create the way it should be 226 201 G = Geospatial_data(points_rel, geo_reference=geo_ref) 227 202 assert num.allclose(points_ab, G.get_data_points(absolute=True)) 228 203 229 204 # Change georeference and check that absolute values are unchanged. 230 205 x_p = 10 … … 232 207 new_geo_ref = Geo_reference(56, x_p, y_p) 233 208 G.set_geo_reference(new_geo_ref) 234 assert num.allclose(points_ab, G.get_data_points(absolute=True)) 235 236 237 238 209 msg = ('points_ab=%s, but\nG.get_data_points(absolute=True)=%s' 210 % (str(points_ab), str(G.get_data_points(absolute=True)))) 211 assert num.allclose(points_ab, G.get_data_points(absolute=True)), msg 212 239 213 def test_conversions_to_points_dict(self): 240 214 #test conversions to points_dict 241 242 243 215 from anuga.coordinate_transforms.geo_reference import Geo_reference 216 244 217 points = [[1.0, 2.1], [3.0, 5.3]] 245 218 attributes = {'a0': [0, 0], 'a1': [2, 4], 'a2': [79.4, -7]} … … 248 221 default_attribute_name='a1') 249 222 250 251 223 points_dict = geospatial_data2points_dictionary(G) 252 224 253 225 assert points_dict.has_key('pointlist') 254 assert points_dict.has_key('attributelist') 226 assert points_dict.has_key('attributelist') 255 227 assert points_dict.has_key('geo_reference') 256 228 … … 260 232 assert A.has_key('a0') 261 233 assert A.has_key('a1') 262 assert A.has_key('a2') 234 assert A.has_key('a2') 263 235 264 236 assert num.allclose( A['a0'], [0, 0] ) 265 assert num.allclose( A['a1'], [2, 4] ) 237 assert num.allclose( A['a1'], [2, 4] ) 266 238 assert num.allclose( A['a2'], [79.4, -7] ) 267 268 239 269 240 geo = points_dict['geo_reference'] 270 241 assert geo is G.geo_reference 271 242 272 273 243 def test_conversions_from_points_dict(self): 274 """test conversions from points_dict 275 """ 244 '''test conversions from points_dict''' 276 245 277 246 from anuga.coordinate_transforms.geo_reference import Geo_reference 278 247 279 248 points = [[1.0, 2.1], [3.0, 5.3]] 280 249 attributes = {'a0': [0, 0], 'a1': [2, 4], 'a2': [79.4, -7]} … … 284 253 points_dict['attributelist'] = attributes 285 254 points_dict['geo_reference'] = Geo_reference(56, 100, 200) 286 287 255 288 256 G = points_dictionary2geospatial_data(points_dict) 289 290 257 P = G.get_data_points(absolute=False) 291 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 292 293 #V = G.get_attribute_values() #Get default attribute 294 #assert allclose(V, [2, 4]) 258 assert num.allclose(P, [[1.0, 2.1], [3.0, 5.3]]) 295 259 296 260 V = G.get_attributes('a0') #Get by name … … 304 268 305 269 def test_add(self): 306 """ test the addition of two geospatical objects 307 no geo_reference see next test 308 """ 270 '''test the addition of two geospatical objects 271 no geo_reference see next test 272 ''' 273 309 274 points = [[1.0, 2.1], [3.0, 5.3]] 310 attributes = {'depth':[2, 4], 'elevation':[6.1, 5]} 311 attributes1 = {'depth':[2, 4], 'elevation':[2.5, 1]} 312 G1 = Geospatial_data(points, attributes) 313 G2 = Geospatial_data(points, attributes1) 314 315 # g3 = geospatial_data2points_dictionary(G1) 316 # print 'g3=', g3 317 275 attributes = {'depth': [2, 4], 'elevation': [6.1, 5]} 276 attributes1 = {'depth': [2, 4], 'elevation': [2.5, 1]} 277 G1 = Geospatial_data(points, attributes) 278 G2 = Geospatial_data(points, attributes1) 279 318 280 G = G1 + G2 319 281 … … 326 288 327 289 def test_addII(self): 328 """ test the addition of two geospatical objects 329 no geo_reference see next test 330 """ 290 '''test the addition of two geospatical objects 291 no geo_reference see next test 292 ''' 293 331 294 points = [[1.0, 2.1], [3.0, 5.3]] 332 attributes = {'depth': [2, 4]}333 G1 = Geospatial_data(points, attributes) 334 295 attributes = {'depth': [2, 4]} 296 G1 = Geospatial_data(points, attributes) 297 335 298 points = [[5.0, 2.1], [3.0, 50.3]] 336 attributes = {'depth': [200, 400]}299 attributes = {'depth': [200, 400]} 337 300 G2 = Geospatial_data(points, attributes) 338 339 # g3 = geospatial_data2points_dictionary(G1) 340 # print 'g3=', g3 341 301 342 302 G = G1 + G2 343 303 344 assert G.attributes.has_key('depth') 304 assert G.attributes.has_key('depth') 345 305 assert G.attributes.keys(), ['depth'] 346 306 assert num.allclose(G.attributes['depth'], [2, 4, 200, 400]) 347 307 assert num.allclose(G.get_data_points(), [[1.0, 2.1], [3.0, 5.3], 348 308 [5.0, 2.1], [3.0, 50.3]]) 309 349 310 def test_add_with_geo (self): 350 """ 351 Difference in Geo_reference resolved 352 """ 311 '''Difference in Geo_reference resolved''' 312 353 313 points1 = [[1.0, 2.1], [3.0, 5.3]] 354 314 points2 = [[5.0, 6.1], [6.0, 3.3]] … … 362 322 projection='UTM', 363 323 units='m') 364 324 365 325 G1 = Geospatial_data(points1, attributes1, geo_ref1) 366 326 G2 = Geospatial_data(points2, attributes2, geo_ref2) … … 371 331 372 332 P2 = G2.get_data_points(absolute=True) 373 assert num.allclose(P2, [[5.1, 9.1], [6.1, 6.3]]) 374 333 assert num.allclose(P2, [[5.1, 9.1], [6.1, 6.3]]) 334 375 335 G = G1 + G2 376 336 … … 381 341 P = G.get_data_points(absolute=True) 382 342 383 #P_relative = G.get_data_points(absolute=False)384 #385 #assert allclose(P_relative, P - [0.1, 2.0])386 387 343 assert num.allclose(P, num.concatenate( (P1,P2) )) 344 msg = 'P=%s' % str(P) 388 345 assert num.allclose(P, [[2.0, 4.1], [4.0, 7.3], 389 [5.1, 9.1], [6.1, 6.3]]) 390 391 392 393 346 [5.1, 9.1], [6.1, 6.3]]), msg 394 347 395 348 def test_add_with_geo_absolute (self): 396 """ 397 Difference in Geo_reference resolved 398 """ 349 '''Difference in Geo_reference resolved''' 350 399 351 points1 = num.array([[2.0, 4.1], [4.0, 7.3]]) 400 points2 = num.array([[5.1, 9.1], [6.1, 6.3]]) 352 points2 = num.array([[5.1, 9.1], [6.1, 6.3]]) 401 353 attributes1 = [2, 4] 402 354 attributes2 = [5, 76] … … 404 356 geo_ref2 = Geo_reference(55, 2.0, 3.0) 405 357 406 407 408 G1 = Geospatial_data(points1 - [geo_ref1.get_xllcorner(), geo_ref1.get_yllcorner()], 358 G1 = Geospatial_data(points1 - [geo_ref1.get_xllcorner(), 359 geo_ref1.get_yllcorner()], 409 360 attributes1, geo_ref1) 410 411 G2 = Geospatial_data(points2 - [geo_ref2.get_xllcorner(), geo_ref2.get_yllcorner()], 361 362 G2 = Geospatial_data(points2 - [geo_ref2.get_xllcorner(), 363 geo_ref2.get_yllcorner()], 412 364 attributes2, geo_ref2) 413 365 … … 417 369 418 370 P1 = G1.get_data_points(absolute=False) 419 assert num.allclose(P1, points1 - [geo_ref1.get_xllcorner(), geo_ref1.get_yllcorner()]) 371 msg = ('P1=%s, but\npoints1 - <...>=%s' 372 % (str(P1), 373 str(points1 - [geo_ref1.get_xllcorner(), 374 geo_ref1.get_yllcorner()]))) 375 assert num.allclose(P1, points1 - [geo_ref1.get_xllcorner(), 376 geo_ref1.get_yllcorner()]), msg 420 377 421 378 P2 = G2.get_data_points(absolute=True) … … 423 380 424 381 P2 = G2.get_data_points(absolute=False) 425 assert num.allclose(P2, points2 - [geo_ref2.get_xllcorner(), geo_ref2.get_yllcorner()]) 426 382 assert num.allclose(P2, points2 - [geo_ref2.get_xllcorner(), 383 geo_ref2.get_yllcorner()]) 384 427 385 G = G1 + G2 428 429 #assert allclose(G.get_geo_reference().get_xllcorner(), 1.0)430 #assert allclose(G.get_geo_reference().get_yllcorner(), 2.0)431 432 386 P = G.get_data_points(absolute=True) 433 387 434 #P_relative = G.get_data_points(absolute=False) 435 # 436 #assert allclose(P_relative, [[1.0, 2.1], [3.0, 5.3], [4.1, 7.1], [5.1, 4.3]]) 437 438 assert num.allclose(P, num.concatenate( (points1,points2) )) 439 388 assert num.allclose(P, num.concatenate((points1, points2))) 440 389 441 390 def test_add_with_None(self): 442 """ test that None can be added to a geospatical objects 443 """ 444 391 '''test that None can be added to a geospatical objects''' 392 445 393 points1 = num.array([[2.0, 4.1], [4.0, 7.3]]) 446 points2 = num.array([[5.1, 9.1], [6.1, 6.3]]) 394 points2 = num.array([[5.1, 9.1], [6.1, 6.3]]) 447 395 448 396 geo_ref1= Geo_reference(55, 1.0, 2.0) … … 453 401 projection='UTM', 454 402 units='m') 455 456 457 attributes1 = {'depth':[2, 4.7], 'elevation':[6.1, 5]} 458 attributes2 = {'depth':[-2.3, 4], 'elevation':[2.5, 1]} 459 403 404 attributes1 = {'depth': [2, 4.7], 'elevation': [6.1, 5]} 405 attributes2 = {'depth': [-2.3, 4], 'elevation': [2.5, 1]} 460 406 461 407 G1 = Geospatial_data(points1, attributes1, geo_ref1) … … 465 411 assert G1.attributes.has_key('elevation') 466 412 assert num.allclose(G1.attributes['depth'], [2, 4.7]) 467 assert num.allclose(G1.attributes['elevation'], [6.1, 5]) 468 413 assert num.allclose(G1.attributes['elevation'], [6.1, 5]) 414 469 415 G2 = Geospatial_data(points2, attributes2, geo_ref2) 470 416 assert num.allclose(G2.get_geo_reference().get_xllcorner(), 0.1) … … 473 419 assert G2.attributes.has_key('elevation') 474 420 assert num.allclose(G2.attributes['depth'], [-2.3, 4]) 475 assert num.allclose(G2.attributes['elevation'], [2.5, 1]) 421 assert num.allclose(G2.attributes['elevation'], [2.5, 1]) 476 422 477 423 #Check that absolute values are as expected … … 480 426 481 427 P2 = G2.get_data_points(absolute=True) 482 assert num.allclose(P2, [[5.2, 12.1], [6.2, 9.3]]) 428 assert num.allclose(P2, [[5.2, 12.1], [6.2, 9.3]]) 483 429 484 430 # Normal add 485 431 G = G1 + None 486 487 432 assert G.attributes.has_key('depth') 488 433 assert G.attributes.has_key('elevation') 489 434 assert num.allclose(G.attributes['depth'], [2, 4.7]) 490 assert num.allclose(G.attributes['elevation'], [6.1, 5]) 435 assert num.allclose(G.attributes['elevation'], [6.1, 5]) 491 436 492 437 # Points are now absolute. 493 438 assert num.allclose(G.get_geo_reference().get_xllcorner(), 0.0) 494 439 assert num.allclose(G.get_geo_reference().get_yllcorner(), 0.0) 495 P = G.get_data_points(absolute=True) 496 assert num.allclose(P, [[3.0, 6.1], [5.0, 9.3]])497 440 P = G.get_data_points(absolute=True) 441 msg = 'P=%s' % str(P) 442 assert num.allclose(P, [[3.0, 6.1], [5.0, 9.3]]), msg 498 443 499 444 G = G2 + None … … 501 446 assert G.attributes.has_key('elevation') 502 447 assert num.allclose(G.attributes['depth'], [-2.3, 4]) 503 assert num.allclose(G.attributes['elevation'], [2.5, 1]) 504 448 assert num.allclose(G.attributes['elevation'], [2.5, 1]) 505 449 assert num.allclose(G.get_geo_reference().get_xllcorner(), 0.0) 506 450 assert num.allclose(G.get_geo_reference().get_yllcorner(), 0.0) 507 P = G.get_data_points(absolute=True) 451 452 P = G.get_data_points(absolute=True) 508 453 assert num.allclose(P, [[5.2, 12.1], [6.2, 9.3]]) 509 510 511 454 512 455 # Reverse add 513 456 G = None + G1 514 515 457 assert G.attributes.has_key('depth') 516 458 assert G.attributes.has_key('elevation') 517 459 assert num.allclose(G.attributes['depth'], [2, 4.7]) 518 assert num.allclose(G.attributes['elevation'], [6.1, 5]) 460 assert num.allclose(G.attributes['elevation'], [6.1, 5]) 519 461 520 462 # Points are now absolute. 521 463 assert num.allclose(G.get_geo_reference().get_xllcorner(), 0.0) 522 464 assert num.allclose(G.get_geo_reference().get_yllcorner(), 0.0) 523 P = G.get_data_points(absolute=True) 524 assert num.allclose(P, [[3.0, 6.1], [5.0, 9.3]])525 465 466 P = G.get_data_points(absolute=True) 467 assert num.allclose(P, [[3.0, 6.1], [5.0, 9.3]]) 526 468 527 469 G = None + G2 … … 529 471 assert G.attributes.has_key('elevation') 530 472 assert num.allclose(G.attributes['depth'], [-2.3, 4]) 531 assert num.allclose(G.attributes['elevation'], [2.5, 1]) 473 assert num.allclose(G.attributes['elevation'], [2.5, 1]) 532 474 533 475 assert num.allclose(G.get_geo_reference().get_xllcorner(), 0.0) 534 476 assert num.allclose(G.get_geo_reference().get_yllcorner(), 0.0) 535 P = G.get_data_points(absolute=True) 477 478 P = G.get_data_points(absolute=True) 536 479 assert num.allclose(P, [[5.2, 12.1], [6.2, 9.3]]) 537 480 538 539 540 541 542 543 481 def test_clip0(self): 544 """test_clip0(self):545 482 '''test_clip0(self): 483 546 484 Test that point sets can be clipped by a polygon 547 """548 485 ''' 486 549 487 from anuga.coordinate_transforms.geo_reference import Geo_reference 550 488 551 489 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 552 490 [0, 0], [2.4, 3.3]] 553 491 G = Geospatial_data(points) 554 492 555 # First try the unit square 556 U = [[0,0], [1,0], [1,1], [0,1]] 557 assert num.allclose(G.clip(U).get_data_points(), [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 493 # First try the unit square 494 U = [[0,0], [1,0], [1,1], [0,1]] 495 assert num.allclose(G.clip(U).get_data_points(), 496 [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 558 497 559 498 # Then a more complex polygon 560 499 polygon = [[0,0], [1,0], [0.5,-1], [2, -1], [2,1], [0,1]] 561 points = [ [0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], [0.5, 1.5], [0.5, -0.5]] 500 points = [[0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], 501 [0.5, 1.5], [0.5, -0.5]] 562 502 G = Geospatial_data(points) 563 503 564 504 assert num.allclose(G.clip(polygon).get_data_points(), 565 [[0.5, 0.5], [1, -0.5], [1.5, 0]])505 [[0.5, 0.5], [1, -0.5], [1.5, 0]]) 566 506 567 507 def test_clip0_with_attributes(self): 568 """test_clip0_with_attributes(self):569 508 '''test_clip0_with_attributes(self): 509 570 510 Test that point sets with attributes can be clipped by a polygon 571 """572 511 ''' 512 573 513 from anuga.coordinate_transforms.geo_reference import Geo_reference 574 514 575 515 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 576 516 [0, 0], [2.4, 3.3]] … … 579 519 att_dict = {'att1': attributes, 580 520 'att2': num.array(attributes)+1} 581 521 582 522 G = Geospatial_data(points, att_dict) 583 523 584 # First try the unit square 585 U = [[0,0], [1,0], [1,1], [0,1]] 586 assert num.allclose(G.clip(U).get_data_points(), [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 524 # First try the unit square 525 U = [[0,0], [1,0], [1,1], [0,1]] 526 assert num.allclose(G.clip(U).get_data_points(), 527 [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 587 528 assert num.allclose(G.clip(U).get_attributes('att1'), [-4, 76, 0.1]) 588 assert num.allclose(G.clip(U).get_attributes('att2'), [-3, 77, 1.1]) 529 assert num.allclose(G.clip(U).get_attributes('att2'), [-3, 77, 1.1]) 589 530 590 531 # Then a more complex polygon 591 532 polygon = [[0,0], [1,0], [0.5,-1], [2, -1], [2,1], [0,1]] 592 points = [ [0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], [0.5, 1.5], [0.5, -0.5]] 533 points = [[0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], 534 [0.5, 1.5], [0.5, -0.5]] 593 535 594 536 # This time just one attribute 595 537 attributes = [2, -4, 5, 76, -2, 0.1] 596 538 G = Geospatial_data(points, attributes) 597 598 539 assert num.allclose(G.clip(polygon).get_data_points(), 599 [[0.5, 0.5], [1, -0.5], [1.5, 0]])540 [[0.5, 0.5], [1, -0.5], [1.5, 0]]) 600 541 assert num.allclose(G.clip(polygon).get_attributes(), [-4, 5, 76]) 601 602 542 603 543 def test_clip1(self): 604 """test_clip1(self):605 544 '''test_clip1(self): 545 606 546 Test that point sets can be clipped by a polygon given as 607 547 another Geospatial dataset 608 """609 548 ''' 549 610 550 from anuga.coordinate_transforms.geo_reference import Geo_reference 611 551 612 552 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 613 553 [0, 0], [2.4, 3.3]] … … 616 556 'att2': num.array(attributes)+1} 617 557 G = Geospatial_data(points, att_dict) 618 619 # First try the unit square 620 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 558 559 # First try the unit square 560 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 621 561 assert num.allclose(G.clip(U).get_data_points(), 622 [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 623 562 [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 624 563 assert num.allclose(G.clip(U).get_attributes('att1'), [-4, 76, 0.1]) 625 assert num.allclose(G.clip(U).get_attributes('att2'), [-3, 77, 1.1]) 626 564 assert num.allclose(G.clip(U).get_attributes('att2'), [-3, 77, 1.1]) 565 627 566 # Then a more complex polygon 628 points = [ [0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], [0.5, 1.5], [0.5, -0.5]] 629 attributes = [2, -4, 5, 76, -2, 0.1] 567 points = [[0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], 568 [0.5, 1.5], [0.5, -0.5]] 569 attributes = [2, -4, 5, 76, -2, 0.1] 630 570 G = Geospatial_data(points, attributes) 631 polygon = Geospatial_data([[0,0], [1,0], [0.5,-1], [2, -1], [2,1], [0,1]])632 571 polygon = Geospatial_data([[0,0], [1,0], [0.5,-1], [2, -1], 572 [2,1], [0,1]]) 633 573 634 574 assert num.allclose(G.clip(polygon).get_data_points(), 635 575 [[0.5, 0.5], [1, -0.5], [1.5, 0]]) 636 576 assert num.allclose(G.clip(polygon).get_attributes(), [-4, 5, 76]) 637 638 577 639 578 def test_clip0_outside(self): 640 """test_clip0_outside(self):641 579 '''test_clip0_outside(self): 580 642 581 Test that point sets can be clipped outside of a polygon 643 """644 582 ''' 583 645 584 from anuga.coordinate_transforms.geo_reference import Geo_reference 646 585 647 586 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 648 587 [0, 0], [2.4, 3.3]] 649 attributes = [2, -4, 5, 76, -2, 0.1, 3] 588 attributes = [2, -4, 5, 76, -2, 0.1, 3] 650 589 G = Geospatial_data(points, attributes) 651 590 652 # First try the unit square 591 # First try the unit square 653 592 U = [[0,0], [1,0], [1,1], [0,1]] 654 593 assert num.allclose(G.clip_outside(U).get_data_points(), 655 594 [[-1, 4], [1.0, 2.1], [3.0, 5.3], [2.4, 3.3]]) 656 #print G.clip_outside(U).get_attributes() 657 assert num.allclose(G.clip_outside(U).get_attributes(), [2, 5, -2, 3]) 658 595 assert num.allclose(G.clip_outside(U).get_attributes(), [2, 5, -2, 3]) 659 596 660 597 # Then a more complex polygon 661 598 polygon = [[0,0], [1,0], [0.5,-1], [2, -1], [2,1], [0,1]] 662 points = [ [0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], [0.5, 1.5], [0.5, -0.5]] 663 attributes = [2, -4, 5, 76, -2, 0.1] 599 points = [[0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], 600 [0.5, 1.5], [0.5, -0.5]] 601 attributes = [2, -4, 5, 76, -2, 0.1] 664 602 G = Geospatial_data(points, attributes) 665 666 603 assert num.allclose(G.clip_outside(polygon).get_data_points(), 667 604 [[0.5, 1.4], [0.5, 1.5], [0.5, -0.5]]) 668 assert num.allclose(G.clip_outside(polygon).get_attributes(), [2, -2, 0.1])669 605 assert num.allclose(G.clip_outside(polygon).get_attributes(), 606 [2, -2, 0.1]) 670 607 671 608 def test_clip1_outside(self): 672 """test_clip1_outside(self):673 609 '''test_clip1_outside(self): 610 674 611 Test that point sets can be clipped outside of a polygon given as 675 612 another Geospatial dataset 676 """677 613 ''' 614 678 615 from anuga.coordinate_transforms.geo_reference import Geo_reference 679 616 680 617 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 681 618 [0, 0], [2.4, 3.3]] 682 attributes = [2, -4, 5, 76, -2, 0.1, 3] 683 G = Geospatial_data(points, attributes) 684 685 # First try the unit square 686 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 619 attributes = [2, -4, 5, 76, -2, 0.1, 3] 620 G = Geospatial_data(points, attributes) 621 622 # First try the unit square 623 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 687 624 assert num.allclose(G.clip_outside(U).get_data_points(), 688 625 [[-1, 4], [1.0, 2.1], [3.0, 5.3], [2.4, 3.3]]) 689 assert num.allclose(G.clip(U).get_attributes(), [-4, 76, 0.1]) 626 assert num.allclose(G.clip(U).get_attributes(), [-4, 76, 0.1]) 690 627 691 628 # Then a more complex polygon 692 points = [ [0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], [0.5, 1.5], [0.5, -0.5]] 693 attributes = [2, -4, 5, 76, -2, 0.1] 629 points = [[0.5, 1.4], [0.5, 0.5], [1, -0.5], [1.5, 0], 630 [0.5, 1.5], [0.5, -0.5]] 631 attributes = [2, -4, 5, 76, -2, 0.1] 694 632 G = Geospatial_data(points, attributes) 695 696 polygon = Geospatial_data([[0,0], [1,0], [0.5,-1], [2, -1], [2,1], [0,1]]) 697 698 633 polygon = Geospatial_data([[0, 0], [1, 0], [0.5, -1], [2, -1], 634 [2, 1], [0, 1]]) 699 635 assert num.allclose(G.clip_outside(polygon).get_data_points(), 700 636 [[0.5, 1.4], [0.5, 1.5], [0.5, -0.5]]) 701 assert num.allclose(G.clip_outside(polygon).get_attributes(), [2, -2, 0.1]) 702 703 637 assert num.allclose(G.clip_outside(polygon).get_attributes(), 638 [2, -2, 0.1]) 704 639 705 640 def test_clip1_inside_outside(self): 706 """test_clip1_inside_outside(self):707 641 '''test_clip1_inside_outside(self): 642 708 643 Test that point sets can be clipped outside of a polygon given as 709 644 another Geospatial dataset 710 """711 645 ''' 646 712 647 from anuga.coordinate_transforms.geo_reference import Geo_reference 713 648 714 649 points = [[-1, 4], [0.2, 0.5], [1.0, 2.1], [0.4, 0.3], [3.0, 5.3], 715 650 [0, 0], [2.4, 3.3]] 716 attributes = [2, -4, 5, 76, -2, 0.1, 3] 651 attributes = [2, -4, 5, 76, -2, 0.1, 3] 717 652 G = Geospatial_data(points, attributes) 718 653 719 # First try the unit square 720 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 654 # First try the unit square 655 U = Geospatial_data([[0,0], [1,0], [1,1], [0,1]]) 721 656 G1 = G.clip(U) 722 assert num.allclose(G1.get_data_points(),[[0.2, 0.5], [0.4, 0.3], [0, 0]]) 657 assert num.allclose(G1.get_data_points(), 658 [[0.2, 0.5], [0.4, 0.3], [0, 0]]) 723 659 assert num.allclose(G.clip(U).get_attributes(), [-4, 76, 0.1]) 724 725 660 G2 = G.clip_outside(U) 726 661 assert num.allclose(G2.get_data_points(),[[-1, 4], [1.0, 2.1], 727 662 [3.0, 5.3], [2.4, 3.3]]) 728 assert num.allclose(G.clip_outside(U).get_attributes(), [2, 5, -2, 3]) 729 730 663 assert num.allclose(G.clip_outside(U).get_attributes(), [2, 5, -2, 3]) 664 731 665 # New ordering 732 new_points = [[0.2, 0.5], [0.4, 0.3], [0, 0]] +\ 733 [[-1, 4], [1.0, 2.1], [3.0, 5.3], [2.4, 3.3]] 734 735 new_attributes = [-4, 76, 0.1, 2, 5, -2, 3] 736 666 new_points = [[0.2, 0.5], [0.4, 0.3], [0, 0], [-1, 4], 667 [1.0, 2.1], [3.0, 5.3], [2.4, 3.3]] 668 new_attributes = [-4, 76, 0.1, 2, 5, -2, 3] 669 737 670 assert num.allclose((G1+G2).get_data_points(), new_points) 738 671 assert num.allclose((G1+G2).get_attributes(), new_attributes) … … 742 675 G.export_points_file(FN) 743 676 744 745 677 # Read it back in 746 678 G3 = Geospatial_data(FN) 747 679 748 749 680 # Check result 750 assert num.allclose(G3.get_data_points(), new_points) 751 assert num.allclose(G3.get_attributes(), new_attributes) 752 681 assert num.allclose(G3.get_data_points(), new_points) 682 assert num.allclose(G3.get_attributes(), new_attributes) 683 753 684 os.remove(FN) 754 685 755 756 686 def test_load_csv(self): 757 758 import os 759 import tempfile 760 761 fileName = tempfile.mktemp(".csv") 762 file = open(fileName,"w") 763 file.write("x,y,elevation speed \n\ 687 fileName = tempfile.mktemp('.csv') 688 file = open(fileName,'w') 689 file.write('x,y,elevation speed \n\ 764 690 1.0 0.0 10.0 0.0\n\ 765 691 0.0 1.0 0.0 10.0\n\ 766 1.0 0.0 10.4 40.0\n ")692 1.0 0.0 10.4 40.0\n') 767 693 file.close() 768 #print fileName 694 769 695 results = Geospatial_data(fileName) 770 696 os.remove(fileName) 771 # print 'data', results.get_data_points() 772 assert num.allclose(results.get_data_points(), [[1.0, 0.0],[0.0, 1.0], 773 [1.0, 0.0]]) 697 assert num.allclose(results.get_data_points(), 698 [[1.0, 0.0],[0.0, 1.0], [1.0, 0.0]]) 774 699 assert num.allclose(results.get_attributes(attribute_name='elevation'), 775 700 [10.0, 0.0, 10.4]) … … 777 702 [0.0, 10.0, 40.0]) 778 703 779 780 ###################### .CSV ############################## 704 ################################################################################ 705 # Test CSV files 706 ################################################################################ 781 707 782 708 def test_load_csv_lat_long_bad_blocking(self): 783 """ 784 test_load_csv_lat_long_bad_blocking(self): 709 '''test_load_csv_lat_long_bad_blocking(self): 785 710 Different zones in Geo references 786 """ 787 fileName = tempfile.mktemp(".csv") 788 file = open(fileName,"w") 789 file.write("Lati,LONG,z \n\ 711 ''' 712 713 fileName = tempfile.mktemp('.csv') 714 file = open(fileName, 'w') 715 file.write('Lati,LONG,z \n\ 790 716 -25.0,180.0,452.688000\n\ 791 -34,150.0,459.126000\n ")717 -34,150.0,459.126000\n') 792 718 file.close() 793 719 794 720 results = Geospatial_data(fileName, max_read_lines=1, 795 721 load_file_now=False) 796 797 #for i in results: 798 # pass 722 799 723 try: 800 724 for i in results: … … 804 728 else: 805 729 msg = 'Different zones in Geo references not caught.' 806 raise msg807 808 os.remove(fileName) 809 730 raise Exception, msg 731 732 os.remove(fileName) 733 810 734 def test_load_csv(self): 811 812 fileName = tempfile.mktemp(".txt") 813 file = open(fileName,"w") 814 file.write(" x,y, elevation , speed \n\ 815 1.0, 0.0, 10.0, 0.0\n\ 816 0.0, 1.0, 0.0, 10.0\n\ 817 1.0, 0.0 ,10.4, 40.0\n") 735 fileName = tempfile.mktemp('.txt') 736 file = open(fileName, 'w') 737 file.write(' x,y, elevation , speed \n\ 738 1.0, 0.0, 10.0, 0.0\n\ 739 0.0, 1.0, 0.0, 10.0\n\ 740 1.0, 0.0 ,10.4, 40.0\n') 818 741 file.close() 819 742 820 743 results = Geospatial_data(fileName, max_read_lines=2) 821 744 822 823 assert num.allclose(results.get_data_points(), [[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]]) 824 assert num.allclose(results.get_attributes(attribute_name='elevation'), [10.0, 0.0, 10.4]) 825 assert num.allclose(results.get_attributes(attribute_name='speed'), [0.0, 10.0, 40.0]) 745 assert num.allclose(results.get_data_points(), 746 [[1.0, 0.0],[0.0, 1.0],[1.0, 0.0]]) 747 assert num.allclose(results.get_attributes(attribute_name='elevation'), 748 [10.0, 0.0, 10.4]) 749 assert num.allclose(results.get_attributes(attribute_name='speed'), 750 [0.0, 10.0, 40.0]) 826 751 827 752 # Blocking … … 829 754 for i in results: 830 755 geo_list.append(i) 831 756 832 757 assert num.allclose(geo_list[0].get_data_points(), 833 758 [[1.0, 0.0],[0.0, 1.0]]) 834 835 759 assert num.allclose(geo_list[0].get_attributes(attribute_name='elevation'), 836 760 [10.0, 0.0]) 837 761 assert num.allclose(geo_list[1].get_data_points(), 838 [[1.0, 0.0]]) 762 [[1.0, 0.0]]) 839 763 assert num.allclose(geo_list[1].get_attributes(attribute_name='elevation'), 840 764 [10.4]) 841 842 os.remove(fileName) 843 765 766 os.remove(fileName) 767 844 768 def test_load_csv_bad(self): 845 """test_load_csv_bad(self):769 '''test_load_csv_bad(self): 846 770 header column, body column missmatch 847 771 (Format error) 848 """ 849 import os 850 851 fileName = tempfile.mktemp(".txt") 852 file = open(fileName,"w") 853 file.write(" elevation , speed \n\ 854 1.0, 0.0, 10.0, 0.0\n\ 855 0.0, 1.0, 0.0, 10.0\n\ 856 1.0, 0.0 ,10.4, 40.0\n") 772 ''' 773 774 fileName = tempfile.mktemp('.txt') 775 file = open(fileName, 'w') 776 file.write(' elevation , speed \n\ 777 1.0, 0.0, 10.0, 0.0\n\ 778 0.0, 1.0, 0.0, 10.0\n\ 779 1.0, 0.0 ,10.4, 40.0\n') 857 780 file.close() 858 781 … … 862 785 # Blocking 863 786 geo_list = [] 864 #for i in results:865 # geo_list.append(i)866 787 try: 867 788 for i in results: … … 870 791 pass 871 792 else: 872 msg = ' bad file did not raise error!'873 raise msg793 msg = 'Bad file did not raise error!' 794 raise Exception, msg 874 795 os.remove(fileName) 875 796 876 797 def test_load_csv_badII(self): 877 """test_load_csv_bad(self):798 '''test_load_csv_bad(self): 878 799 header column, body column missmatch 879 800 (Format error) 880 """ 881 import os 882 883 fileName = tempfile.mktemp(".txt") 884 file = open(fileName,"w") 885 file.write(" x,y,elevation , speed \n\ 801 ''' 802 803 fileName = tempfile.mktemp('.txt') 804 file = open(fileName, 'w') 805 file.write(' x,y,elevation , speed \n\ 886 806 1.0, 0.0, 10.0, 0.0\n\ 887 807 0.0, 1.0, 0.0, 10\n\ 888 1.0, 0.0 ,10.4 yeah\n ")808 1.0, 0.0 ,10.4 yeah\n') 889 809 file.close() 890 810 … … 894 814 # Blocking 895 815 geo_list = [] 896 #for i in results:897 # geo_list.append(i)898 816 try: 899 817 for i in results: … … 902 820 pass 903 821 else: 904 msg = ' bad file did not raise error!'905 raise msg822 msg = 'Bad file did not raise error!' 823 raise Exception, msg 906 824 os.remove(fileName) 907 825 908 826 def test_load_csv_badIII(self): 909 """test_load_csv_bad(self):827 '''test_load_csv_bad(self): 910 828 space delimited 911 """ 912 import os 913 914 fileName = tempfile.mktemp(".txt") 915 file = open(fileName,"w") 916 file.write(" x y elevation speed \n\ 829 ''' 830 831 fileName = tempfile.mktemp('.txt') 832 file = open(fileName, 'w') 833 file.write(' x y elevation speed \n\ 917 834 1. 0.0 10.0 0.0\n\ 918 835 0.0 1.0 0.0 10.0\n\ 919 1.0 0.0 10.4 40.0\n ")836 1.0 0.0 10.4 40.0\n') 920 837 file.close() 921 838 … … 926 843 pass 927 844 else: 928 msg = ' bad file did not raise error!'929 raise msg930 os.remove(fileName) 931 845 msg = 'Bad file did not raise error!' 846 raise Exception, msg 847 os.remove(fileName) 848 932 849 def test_load_csv_badIV(self): 933 """test_load_csv_bad(self):850 ''' test_load_csv_bad(self): 934 851 header column, body column missmatch 935 852 (Format error) 936 """ 937 import os 938 939 fileName = tempfile.mktemp(".txt") 940 file = open(fileName,"w") 941 file.write(" x,y,elevation , speed \n\ 853 ''' 854 855 fileName = tempfile.mktemp('.txt') 856 file = open(fileName, 'w') 857 file.write(' x,y,elevation , speed \n\ 942 858 1.0, 0.0, 10.0, wow\n\ 943 859 0.0, 1.0, 0.0, ha\n\ 944 1.0, 0.0 ,10.4, yeah\n ")860 1.0, 0.0 ,10.4, yeah\n') 945 861 file.close() 946 862 … … 950 866 # Blocking 951 867 geo_list = [] 952 #for i in results:953 # geo_list.append(i)954 868 try: 955 869 for i in results: … … 958 872 pass 959 873 else: 960 msg = ' bad file did not raise error!'961 raise msg874 msg = 'Bad file did not raise error!' 875 raise Exception, msg 962 876 os.remove(fileName) 963 877 964 878 def test_load_pts_blocking(self): 965 879 #This is pts! 966 967 import os 968 969 fileName = tempfile.mktemp(".txt") 970 file = open(fileName,"w") 971 file.write(" x,y, elevation , speed \n\ 972 1.0, 0.0, 10.0, 0.0\n\ 973 0.0, 1.0, 0.0, 10.0\n\ 974 1.0, 0.0 ,10.4, 40.0\n") 880 fileName = tempfile.mktemp('.txt') 881 file = open(fileName, 'w') 882 file.write(' x,y, elevation , speed \n\ 883 1.0, 0.0, 10.0, 0.0\n\ 884 0.0, 1.0, 0.0, 10.0\n\ 885 1.0, 0.0 ,10.4, 40.0\n') 975 886 file.close() 976 887 977 pts_file = tempfile.mktemp( ".pts")978 888 pts_file = tempfile.mktemp('.pts') 889 979 890 convert = Geospatial_data(fileName) 980 891 convert.export_points_file(pts_file) 981 892 results = Geospatial_data(pts_file, max_read_lines=2) 982 893 983 assert num.allclose(results.get_data_points(), [[1.0, 0.0],[0.0, 1.0],984 894 assert num.allclose(results.get_data_points(), 895 [[1.0, 0.0],[0.0, 1.0], [1.0, 0.0]]) 985 896 assert num.allclose(results.get_attributes(attribute_name='elevation'), 986 897 [10.0, 0.0, 10.4]) … … 991 902 geo_list = [] 992 903 for i in results: 993 geo_list.append(i) 904 geo_list.append(i) 994 905 assert num.allclose(geo_list[0].get_data_points(), 995 906 [[1.0, 0.0],[0.0, 1.0]]) … … 997 908 [10.0, 0.0]) 998 909 assert num.allclose(geo_list[1].get_data_points(), 999 [[1.0, 0.0]]) 910 [[1.0, 0.0]]) 1000 911 assert num.allclose(geo_list[1].get_attributes(attribute_name='elevation'), 1001 912 [10.4]) 1002 1003 os.remove(fileName) 1004 os.remove(pts_file) 913 914 os.remove(fileName) 915 os.remove(pts_file) 1005 916 1006 917 def verbose_test_load_pts_blocking(self): 1007 1008 import os 1009 1010 fileName = tempfile.mktemp(".txt") 1011 file = open(fileName,"w") 1012 file.write(" x,y, elevation , speed \n\ 1013