1 | from struct import pack, unpack |
---|
2 | import array as p_array |
---|
3 | import numpy as num |
---|
4 | |
---|
5 | |
---|
6 | from anuga.coordinate_transforms.geo_reference import Geo_reference |
---|
7 | |
---|
8 | from anuga.geospatial_data.geospatial_data import ensure_absolute, \ |
---|
9 | Geospatial_data |
---|
10 | |
---|
11 | from anuga.coordinate_transforms.redfearn import redfearn |
---|
12 | |
---|
13 | class Read_urs: |
---|
14 | """ |
---|
15 | Read the info in URS mux files. |
---|
16 | |
---|
17 | for the quantities here's a correlation between the file names and |
---|
18 | what they mean; |
---|
19 | z-mux is height above sea level, m |
---|
20 | e-mux is velocity is Eastern direction, m/s |
---|
21 | n-mux is velocity is Northern direction, m/s |
---|
22 | """ |
---|
23 | |
---|
24 | def __init__(self, urs_file): |
---|
25 | self.iterated = False |
---|
26 | columns = 3 # long, lat , depth |
---|
27 | mux_file = open(urs_file, 'rb') |
---|
28 | |
---|
29 | # Number of points/stations |
---|
30 | (self.points_num,) = unpack('i', mux_file.read(4)) |
---|
31 | |
---|
32 | # nt, int - Number of time steps |
---|
33 | (self.time_step_count,) = unpack('i', mux_file.read(4)) |
---|
34 | #dt, float - time step, seconds |
---|
35 | (self.time_step,) = unpack('f', mux_file.read(4)) |
---|
36 | msg = "Bad data in the urs file." |
---|
37 | if self.points_num < 0: |
---|
38 | mux_file.close() |
---|
39 | raise ANUGAError(msg) |
---|
40 | if self.time_step_count < 0: |
---|
41 | mux_file.close() |
---|
42 | raise ANUGAError(msg) |
---|
43 | if self.time_step < 0: |
---|
44 | mux_file.close() |
---|
45 | raise ANUGAError(msg) |
---|
46 | |
---|
47 | # The depth is in meters, and it is the distance from the ocean |
---|
48 | # to the sea bottom. |
---|
49 | lonlatdep = p_array.array('f') |
---|
50 | lonlatdep.read(mux_file, columns * self.points_num) |
---|
51 | lonlatdep = num.array(lonlatdep, dtype=num.float) |
---|
52 | lonlatdep = num.reshape(lonlatdep, (self.points_num, columns)) |
---|
53 | self.lonlatdep = lonlatdep |
---|
54 | |
---|
55 | self.mux_file = mux_file |
---|
56 | # check this array |
---|
57 | |
---|
58 | def __iter__(self): |
---|
59 | """ |
---|
60 | iterate over quantity data which is with respect to time. |
---|
61 | |
---|
62 | Note: You can only iterate once over an object |
---|
63 | |
---|
64 | returns quantity infomation for each time slice |
---|
65 | """ |
---|
66 | |
---|
67 | msg = "You can only interate once over a urs file." |
---|
68 | assert not self.iterated, msg |
---|
69 | |
---|
70 | self.iter_time_step = 0 |
---|
71 | self.iterated = True |
---|
72 | |
---|
73 | return self |
---|
74 | |
---|
75 | def next(self): |
---|
76 | if self.time_step_count == self.iter_time_step: |
---|
77 | self.close() |
---|
78 | raise StopIteration |
---|
79 | |
---|
80 | #Read in a time slice from mux file |
---|
81 | hz_p_array = p_array.array('f') |
---|
82 | hz_p_array.read(self.mux_file, self.points_num) |
---|
83 | hz_p = num.array(hz_p_array, dtype=num.float) |
---|
84 | self.iter_time_step += 1 |
---|
85 | |
---|
86 | return hz_p |
---|
87 | |
---|
88 | def close(self): |
---|
89 | self.mux_file.close() |
---|
90 | |
---|
91 | |
---|
92 | |
---|
93 | |
---|
94 | ### PRODUCING THE POINTS NEEDED FILE ### |
---|
95 | |
---|
96 | # Ones used for FESA 2007 results |
---|
97 | #LL_LAT = -50.0 |
---|
98 | #LL_LONG = 80.0 |
---|
99 | #GRID_SPACING = 1.0/60.0 |
---|
100 | #LAT_AMOUNT = 4800 |
---|
101 | #LONG_AMOUNT = 3600 |
---|
102 | |
---|
103 | |
---|
104 | def save_boundary_as_urs(file_name, boundary_polygon, zone, |
---|
105 | ll_lat, ll_long, |
---|
106 | grid_spacing, |
---|
107 | lat_amount, long_amount, |
---|
108 | isSouthernHemisphere=True, |
---|
109 | export_csv=False, use_cache=False, |
---|
110 | verbose=False): |
---|
111 | """ |
---|
112 | Given the info to replicate the URS grid and a polygon output |
---|
113 | a file that specifies the cloud of boundary points for URS. |
---|
114 | |
---|
115 | This creates a .urs file. This is in the format used by URS; |
---|
116 | 1st line is the number of points, |
---|
117 | each line after represents a point,in lats and longs. |
---|
118 | |
---|
119 | Note: The polygon cannot cross zones or hemispheres. |
---|
120 | |
---|
121 | A work-a-round for different zones or hemispheres is to run this twice, |
---|
122 | once for each zone, and then combine the output. |
---|
123 | |
---|
124 | file_name - name of the urs file produced for David. |
---|
125 | boundary_polygon - a list of points that describes a polygon. |
---|
126 | The last point is assumed ot join the first point. |
---|
127 | This is in UTM (lat long would be better though) |
---|
128 | |
---|
129 | This is info about the URS model that needs to be inputted. |
---|
130 | |
---|
131 | ll_lat - lower left latitude, in decimal degrees |
---|
132 | ll-long - lower left longitude, in decimal degrees |
---|
133 | grid_spacing - in deciamal degrees |
---|
134 | lat_amount - number of latitudes |
---|
135 | long_amount- number of longs |
---|
136 | |
---|
137 | Don't add the file extension. It will be added. |
---|
138 | """ |
---|
139 | |
---|
140 | geo = calculate_boundary_points(boundary_polygon, zone, ll_lat, ll_long, |
---|
141 | grid_spacing, |
---|
142 | lat_amount, long_amount, isSouthernHemisphere, |
---|
143 | use_cache, verbose) |
---|
144 | |
---|
145 | if not file_name[-4:] == ".urs": |
---|
146 | file_name += ".urs" |
---|
147 | |
---|
148 | geo.export_points_file(file_name, isSouthHemisphere=isSouthernHemisphere) |
---|
149 | |
---|
150 | if export_csv: |
---|
151 | if file_name[-4:] == ".urs": |
---|
152 | file_name = file_name[:-4] + ".csv" |
---|
153 | geo.export_points_file(file_name) |
---|
154 | |
---|
155 | return geo |
---|
156 | |
---|
157 | |
---|
158 | def calculate_boundary_points(boundary_polygon, zone, ll_lat, |
---|
159 | ll_long, grid_spacing, |
---|
160 | lat_amount, long_amount, isSouthHemisphere=True, |
---|
161 | use_cache=False, verbose=False): |
---|
162 | args = (boundary_polygon, |
---|
163 | zone, ll_lat, |
---|
164 | ll_long, grid_spacing, |
---|
165 | lat_amount, long_amount, isSouthHemisphere) |
---|
166 | kwargs = {} |
---|
167 | |
---|
168 | if use_cache is True: |
---|
169 | try: |
---|
170 | from anuga.caching import cache |
---|
171 | except: |
---|
172 | msg = 'Caching was requested, but caching module' \ |
---|
173 | 'could not be imported' |
---|
174 | raise Exception(msg) |
---|
175 | |
---|
176 | geo = cache(_calculate_boundary_points, |
---|
177 | args, kwargs, |
---|
178 | verbose=verbose, |
---|
179 | compression=False) |
---|
180 | else: |
---|
181 | geo = apply(_calculate_boundary_points, args, kwargs) |
---|
182 | |
---|
183 | return geo |
---|
184 | |
---|
185 | |
---|
186 | def _calculate_boundary_points(boundary_polygon, |
---|
187 | zone, ll_lat, |
---|
188 | ll_long, grid_spacing, |
---|
189 | lat_amount, long_amount, |
---|
190 | isSouthHemisphere): |
---|
191 | """ |
---|
192 | boundary_polygon - a list of points that describes a polygon. |
---|
193 | The last point is assumed ot join the first point. |
---|
194 | This is in UTM (lat long would b better though) |
---|
195 | |
---|
196 | ll_lat - lower left latitude, in decimal degrees |
---|
197 | ll-long - lower left longitude, in decimal degrees |
---|
198 | grid_spacing - in decimal degrees |
---|
199 | |
---|
200 | """ |
---|
201 | |
---|
202 | msg = "grid_spacing can not be zero" |
---|
203 | assert not grid_spacing == 0, msg |
---|
204 | |
---|
205 | a = boundary_polygon |
---|
206 | |
---|
207 | # List of segments. Each segment is two points. |
---|
208 | segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))] |
---|
209 | |
---|
210 | # convert the segs to Lat's and longs. |
---|
211 | # Don't assume the zone of the segments is the same as the lower left |
---|
212 | # corner of the lat long data!! They can easily be in different zones |
---|
213 | lat_long_set = frozenset() |
---|
214 | for seg in segs: |
---|
215 | points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, |
---|
216 | lat_amount, long_amount, zone, |
---|
217 | isSouthHemisphere) |
---|
218 | lat_long_set |= frozenset(points_lat_long) |
---|
219 | |
---|
220 | if lat_long_set == frozenset([]): |
---|
221 | msg = "URS region specified and polygon does not overlap." |
---|
222 | raise ValueError(msg) |
---|
223 | |
---|
224 | # Warning there is no info in geospatial saying the hemisphere of |
---|
225 | # these points. There should be. |
---|
226 | geo = Geospatial_data(data_points=list(lat_long_set), |
---|
227 | points_are_lats_longs=True) |
---|
228 | |
---|
229 | return geo |
---|
230 | |
---|
231 | |
---|
232 | def points_needed(seg, ll_lat, ll_long, grid_spacing, |
---|
233 | lat_amount, long_amount, zone, |
---|
234 | isSouthHemisphere): |
---|
235 | """ |
---|
236 | seg is two points, in UTM |
---|
237 | return a list of the points, in lats and longs that are needed to |
---|
238 | interpolate any point on the segment. |
---|
239 | """ |
---|
240 | |
---|
241 | from math import sqrt |
---|
242 | |
---|
243 | geo_reference = Geo_reference(zone=zone) |
---|
244 | geo = Geospatial_data(seg, geo_reference=geo_reference) |
---|
245 | seg_lat_long = geo.get_data_points(as_lat_long=True, |
---|
246 | isSouthHemisphere=isSouthHemisphere) |
---|
247 | |
---|
248 | # 1.415 = 2^0.5, rounded up.... |
---|
249 | sqrt_2_rounded_up = 1.415 |
---|
250 | buffer = sqrt_2_rounded_up * grid_spacing |
---|
251 | |
---|
252 | max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer |
---|
253 | max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer |
---|
254 | min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer |
---|
255 | min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer |
---|
256 | |
---|
257 | first_row = (min_long - ll_long) / grid_spacing |
---|
258 | |
---|
259 | # To round up |
---|
260 | first_row_long = int(round(first_row + 0.5)) |
---|
261 | |
---|
262 | last_row = (max_long - ll_long) / grid_spacing # round down |
---|
263 | last_row_long = int(round(last_row)) |
---|
264 | |
---|
265 | first_row = (min_lat - ll_lat) / grid_spacing |
---|
266 | # To round up |
---|
267 | first_row_lat = int(round(first_row + 0.5)) |
---|
268 | |
---|
269 | last_row = (max_lat - ll_lat) / grid_spacing # round down |
---|
270 | last_row_lat = int(round(last_row)) |
---|
271 | |
---|
272 | max_distance = 157147.4112 * grid_spacing |
---|
273 | points_lat_long = [] |
---|
274 | |
---|
275 | # Create a list of the lat long points to include. |
---|
276 | for index_lat in range(first_row_lat, last_row_lat + 1): |
---|
277 | for index_long in range(first_row_long, last_row_long + 1): |
---|
278 | lat = ll_lat + index_lat*grid_spacing |
---|
279 | long = ll_long + index_long*grid_spacing |
---|
280 | |
---|
281 | #filter here to keep good points |
---|
282 | if keep_point(lat, long, seg, max_distance): |
---|
283 | points_lat_long.append((lat, long)) #must be hashable |
---|
284 | |
---|
285 | # Now that we have these points, lets throw ones out that are too far away |
---|
286 | return points_lat_long |
---|
287 | |
---|
288 | |
---|
289 | |
---|
290 | def keep_point(lat, long, seg, max_distance): |
---|
291 | """ |
---|
292 | seg is two points, UTM |
---|
293 | """ |
---|
294 | |
---|
295 | from math import sqrt |
---|
296 | |
---|
297 | _ , x0, y0 = redfearn(lat, long) |
---|
298 | x1 = seg[0][0] |
---|
299 | y1 = seg[0][1] |
---|
300 | x2 = seg[1][0] |
---|
301 | y2 = seg[1][1] |
---|
302 | x2_1 = x2-x1 |
---|
303 | y2_1 = y2-y1 |
---|
304 | try: |
---|
305 | d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt( \ |
---|
306 | (x2_1)*(x2_1)+(y2_1)*(y2_1)) |
---|
307 | except ZeroDivisionError: |
---|
308 | if sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1)) == 0 \ |
---|
309 | and abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1)) == 0: |
---|
310 | return True |
---|
311 | else: |
---|
312 | return False |
---|
313 | |
---|
314 | return d <= max_distance |
---|
315 | |
---|
316 | |
---|
317 | |
---|