1 | import os |
---|
2 | from struct import pack, unpack |
---|
3 | import array as p_array |
---|
4 | import numpy as num |
---|
5 | |
---|
6 | from anuga.utilities.numerical_tools import ensure_numeric |
---|
7 | from anuga.caching.caching import myhash |
---|
8 | |
---|
9 | from anuga.file.netcdf import Write_nc, write_elevation_nc |
---|
10 | |
---|
11 | |
---|
12 | from mux import WAVEHEIGHT_MUX_LABEL, EAST_VELOCITY_LABEL, \ |
---|
13 | NORTH_VELOCITY_LABEL |
---|
14 | |
---|
15 | |
---|
16 | def urs2nc(basename_in='o', basename_out='urs'): |
---|
17 | """Convert the 3 urs files to 4 nc files. |
---|
18 | |
---|
19 | The name of the urs file names must be; |
---|
20 | [basename_in]-z-mux |
---|
21 | [basename_in]-e-mux |
---|
22 | [basename_in]-n-mux |
---|
23 | """ |
---|
24 | |
---|
25 | files_in = [basename_in + WAVEHEIGHT_MUX_LABEL, |
---|
26 | basename_in + EAST_VELOCITY_LABEL, |
---|
27 | basename_in + NORTH_VELOCITY_LABEL] |
---|
28 | files_out = [basename_out + '_ha.nc', |
---|
29 | basename_out + '_ua.nc', |
---|
30 | basename_out + '_va.nc'] |
---|
31 | quantities = ['HA', 'UA', 'VA'] |
---|
32 | |
---|
33 | #if os.access(files_in[0]+'.mux', os.F_OK) == 0 : |
---|
34 | for i, file_name in enumerate(files_in): |
---|
35 | if os.access(file_name, os.F_OK) == 0: |
---|
36 | if os.access(file_name + '.mux', os.F_OK) == 0 : |
---|
37 | msg = 'File %s does not exist or is not accessible' % file_name |
---|
38 | raise IOError, msg |
---|
39 | else: |
---|
40 | files_in[i] += '.mux' |
---|
41 | log.critical("file_name %s" % file_name) |
---|
42 | |
---|
43 | hashed_elevation = None |
---|
44 | for file_in, file_out, quantity in map(None, files_in, |
---|
45 | files_out, |
---|
46 | quantities): |
---|
47 | lonlatdep, lon, lat, depth = _binary_c2nc(file_in, |
---|
48 | file_out, |
---|
49 | quantity) |
---|
50 | if hashed_elevation == None: |
---|
51 | elevation_file = basename_out + '_e.nc' |
---|
52 | write_elevation_nc(elevation_file, |
---|
53 | lon, |
---|
54 | lat, |
---|
55 | depth) |
---|
56 | hashed_elevation = myhash(lonlatdep) |
---|
57 | else: |
---|
58 | msg = "The elevation information in the mux files is inconsistent" |
---|
59 | assert hashed_elevation == myhash(lonlatdep), msg |
---|
60 | |
---|
61 | files_out.append(elevation_file) |
---|
62 | |
---|
63 | return files_out |
---|
64 | |
---|
65 | |
---|
66 | def _binary_c2nc(file_in, file_out, quantity): |
---|
67 | """Reads in a quantity urs file and writes a quantity nc file. |
---|
68 | Additionally, returns the depth and lat, long info, |
---|
69 | so it can be written to a file. |
---|
70 | """ |
---|
71 | |
---|
72 | columns = 3 # long, lat , depth |
---|
73 | mux_file = open(file_in, 'rb') |
---|
74 | |
---|
75 | # Number of points/stations |
---|
76 | (points_num,) = unpack('i', mux_file.read(4)) |
---|
77 | |
---|
78 | # nt, int - Number of time steps |
---|
79 | (time_step_count,) = unpack('i', mux_file.read(4)) |
---|
80 | |
---|
81 | #dt, float - time step, seconds |
---|
82 | (time_step,) = unpack('f', mux_file.read(4)) |
---|
83 | |
---|
84 | msg = "Bad data in the mux file." |
---|
85 | if points_num < 0: |
---|
86 | mux_file.close() |
---|
87 | raise ANUGAError, msg |
---|
88 | if time_step_count < 0: |
---|
89 | mux_file.close() |
---|
90 | raise ANUGAError, msg |
---|
91 | if time_step < 0: |
---|
92 | mux_file.close() |
---|
93 | raise ANUGAError, msg |
---|
94 | |
---|
95 | lonlatdep = p_array.array('f') |
---|
96 | lonlatdep.read(mux_file, columns * points_num) |
---|
97 | lonlatdep = num.array(lonlatdep, dtype=num.float) |
---|
98 | lonlatdep = num.reshape(lonlatdep, (points_num, columns)) |
---|
99 | |
---|
100 | lon, lat, depth = lon_lat2grid(lonlatdep) |
---|
101 | lon_sorted = list(lon) |
---|
102 | lon_sorted.sort() |
---|
103 | |
---|
104 | if not num.alltrue(lon == lon_sorted): |
---|
105 | msg = "Longitudes in mux file are not in ascending order" |
---|
106 | raise IOError, msg |
---|
107 | |
---|
108 | lat_sorted = list(lat) |
---|
109 | lat_sorted.sort() |
---|
110 | |
---|
111 | nc_file = Write_nc(quantity, |
---|
112 | file_out, |
---|
113 | time_step_count, |
---|
114 | time_step, |
---|
115 | lon, |
---|
116 | lat) |
---|
117 | |
---|
118 | for i in range(time_step_count): |
---|
119 | #Read in a time slice from mux file |
---|
120 | hz_p_array = p_array.array('f') |
---|
121 | hz_p_array.read(mux_file, points_num) |
---|
122 | hz_p = num.array(hz_p_array, dtype=num.float) |
---|
123 | hz_p = num.reshape(hz_p, (len(lon), len(lat))) |
---|
124 | hz_p = num.transpose(hz_p) # mux has lat varying fastest, nc has long v.f. |
---|
125 | |
---|
126 | #write time slice to nc file |
---|
127 | nc_file.store_timestep(hz_p) |
---|
128 | |
---|
129 | mux_file.close() |
---|
130 | nc_file.close() |
---|
131 | |
---|
132 | return lonlatdep, lon, lat, depth |
---|
133 | |
---|
134 | |
---|
135 | |
---|
136 | def lon_lat2grid(long_lat_dep): |
---|
137 | """ |
---|
138 | given a list of points that are assumed to be an a grid, |
---|
139 | return the long's and lat's of the grid. |
---|
140 | long_lat_dep is an array where each row is a position. |
---|
141 | The first column is longitudes. |
---|
142 | The second column is latitudes. |
---|
143 | |
---|
144 | The latitude is the fastest varying dimension - in mux files |
---|
145 | """ |
---|
146 | |
---|
147 | LONG = 0 |
---|
148 | LAT = 1 |
---|
149 | QUANTITY = 2 |
---|
150 | |
---|
151 | long_lat_dep = ensure_numeric(long_lat_dep, num.float) |
---|
152 | |
---|
153 | num_points = long_lat_dep.shape[0] |
---|
154 | this_rows_long = long_lat_dep[0,LONG] |
---|
155 | |
---|
156 | # Count the length of unique latitudes |
---|
157 | i = 0 |
---|
158 | while long_lat_dep[i,LONG] == this_rows_long and i < num_points: |
---|
159 | i += 1 |
---|
160 | |
---|
161 | # determine the lats and longsfrom the grid |
---|
162 | lat = long_lat_dep[:i, LAT] |
---|
163 | long = long_lat_dep[::i, LONG] |
---|
164 | |
---|
165 | lenlong = len(long) |
---|
166 | lenlat = len(lat) |
---|
167 | |
---|
168 | msg = 'Input data is not gridded' |
---|
169 | assert num_points % lenlat == 0, msg |
---|
170 | assert num_points % lenlong == 0, msg |
---|
171 | |
---|
172 | # Test that data is gridded |
---|
173 | for i in range(lenlong): |
---|
174 | msg = 'Data is not gridded. It must be for this operation' |
---|
175 | first = i * lenlat |
---|
176 | last = first + lenlat |
---|
177 | |
---|
178 | assert num.allclose(long_lat_dep[first:last,LAT], lat), msg |
---|
179 | assert num.allclose(long_lat_dep[first:last,LONG], long[i]), msg |
---|
180 | |
---|
181 | msg = 'Out of range latitudes/longitudes' |
---|
182 | for l in lat:assert -90 < l < 90 , msg |
---|
183 | for l in long:assert -180 < l < 180 , msg |
---|
184 | |
---|
185 | # Changing quantity from lat being the fastest varying dimension to |
---|
186 | # long being the fastest varying dimension |
---|
187 | # FIXME - make this faster/do this a better way |
---|
188 | # use numeric transpose, after reshaping the quantity vector |
---|
189 | quantity = num.zeros(num_points, num.float) |
---|
190 | |
---|
191 | for lat_i, _ in enumerate(lat): |
---|
192 | for long_i, _ in enumerate(long): |
---|
193 | q_index = lat_i*lenlong + long_i |
---|
194 | lld_index = long_i*lenlat + lat_i |
---|
195 | temp = long_lat_dep[lld_index, QUANTITY] |
---|
196 | quantity[q_index] = temp |
---|
197 | |
---|
198 | return long, lat, quantity |
---|
199 | |
---|
200 | |
---|
201 | |
---|