source: trunk/anuga_core/anuga/file_conversion/file_conversion.py @ 9737

Last change on this file since 9737 was 8780, checked in by steve, 12 years ago

Some changes to allow netcdf4 use

File size: 10.8 KB
Line 
1""" Conversion routines.
2    ANUGA needs to deal with many different file formats, and this
3    module provides routines for easily converting between them.
4   
5    These routines are necessarily high level, sitting above the various
6    ANUGA modules. They take a file as input, and output a file.
7"""
8
9#non ANUGA imports
10from anuga.file.netcdf import NetCDFFile
11import numpy as num
12import os.path
13import os
14
15#ANUGA imports
16from anuga.coordinate_transforms.geo_reference import Geo_reference, \
17     write_NetCDF_georeference, ensure_geo_reference
18from anuga.abstract_2d_finite_volumes.pmesh2domain import \
19     pmesh_to_domain_instance
20from anuga.utilities.numerical_tools import ensure_numeric
21from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a, \
22                            netcdf_float
23
24from anuga.anuga_exceptions import *
25
26
27#shallow water imports
28from anuga.file.sww import Read_sww, Write_sww
29from anuga.shallow_water.shallow_water_domain import Domain
30from anuga.shallow_water.shallow_water_domain import Domain
31
32
33def sww2obj(filename, size):
34    """ Convert netcdf based data output to obj
35
36        Convert SWW data to OBJ data.
37        basefilename Stem of filename, needs size and extension added.
38        size The number of lines to write.
39    """
40
41    if filename[-4:] != '.sww':
42        raise IOError('Output file %s should be of type .sww.' % sww_file)
43
44    basefilename = filename[:-4]
45
46    # Get NetCDF
47    nc_fname = create_filename('.', basefilename, 'sww', size)
48    log.critical('Reading from %s' % nc_fname)
49    fid = NetCDFFile(nc_fname, netcdf_mode_r)  #Open existing file for read
50
51    # Get the variables
52    x = fid.variables['x']
53    y = fid.variables['y']
54    z = fid.variables['elevation']
55    time = fid.variables['time']
56    stage = fid.variables['stage']
57
58    M = size  #Number of lines
59    xx = num.zeros((M,3), num.float)
60    yy = num.zeros((M,3), num.float)
61    zz = num.zeros((M,3), num.float)
62
63    for i in range(M):
64        for j in range(3):
65            xx[i,j] = x[i+j*M]
66            yy[i,j] = y[i+j*M]
67            zz[i,j] = z[i+j*M]
68
69    # Write obj for bathymetry
70    FN = create_filename('.', basefilename, 'obj', size)
71    write_obj(FN,xx,yy,zz)
72
73    # Now read all the data with variable information, combine with
74    # x,y info and store as obj
75    for k in range(len(time)):
76        t = time[k]
77        log.critical('Processing timestep %f' % t)
78
79        for i in range(M):
80            for j in range(3):
81                zz[i,j] = stage[k,i+j*M]
82
83        #Write obj for variable data
84        #FN = create_filename(basefilename, 'obj', size, time=t)
85        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
86        write_obj(FN, xx, yy, zz)
87
88def timefile2netcdf(file_text, file_out = None, quantity_names=None, \
89                                time_as_seconds=False):
90    """Template for converting typical text files with time series to
91    NetCDF tms file.
92
93    The file format is assumed to be either two fields separated by a comma:
94
95        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
96
97    E.g
98
99      31/08/04 00:00:00, 1.328223 0 0
100      31/08/04 00:15:00, 1.292912 0 0
101
102    or time (seconds), value0 value1 value2 ...
103
104      0.0, 1.328223 0 0
105      0.1, 1.292912 0 0
106
107    will provide a time dependent function f(t) with three attributes
108
109    filename is assumed to be the rootname with extensions .txt/.tms and .sww
110    """
111
112    import time, calendar
113    from anuga.config import time_format
114    from anuga.utilities.numerical_tools import ensure_numeric
115
116    if file_text[-4:] != '.txt':
117        raise IOError('Input file %s should be of type .txt.' % file_text)
118
119    if file_out == None:
120        file_out = file_text[:-4] + '.tms'
121
122    fid = open(file_text)
123    line = fid.readline()
124    fid.close()
125
126    fields = line.split(',')
127    msg = "File %s must have the format 'datetime, value0 value1 value2 ...'" \
128          % file_text
129    assert len(fields) == 2, msg
130
131    if not time_as_seconds:
132        try:
133            starttime = calendar.timegm(time.strptime(fields[0], time_format))
134        except ValueError:
135            msg = 'First field in file %s must be' % file_text
136            msg += ' date-time with format %s.\n' % time_format
137            msg += 'I got %s instead.' % fields[0]
138            raise DataTimeError, msg
139    else:
140        try:
141            starttime = float(fields[0])
142        except Error:
143            msg = "Bad time format"
144            raise DataTimeError, msg
145
146    # Split values
147    values = []
148    for value in fields[1].split():
149        values.append(float(value))
150
151    q = ensure_numeric(values)
152
153    msg = 'ERROR: File must contain at least one independent value'
154    assert len(q.shape) == 1, msg
155
156    # Read times proper
157    from anuga.config import time_format
158    import time, calendar
159
160    fid = open(file_text)
161    lines = fid.readlines()
162    fid.close()
163
164    N = len(lines)
165    d = len(q)
166
167    T = num.zeros(N, num.float)       # Time
168    Q = num.zeros((N, d), num.float)  # Values
169
170    for i, line in enumerate(lines):
171        fields = line.split(',')
172        if not time_as_seconds:
173            realtime = calendar.timegm(time.strptime(fields[0], time_format))
174        else:
175            realtime = float(fields[0])
176        T[i] = realtime - starttime
177
178        for j, value in enumerate(fields[1].split()):
179            Q[i, j] = float(value)
180
181    msg = 'File %s must list time as a monotonuosly ' % file_text
182    msg += 'increasing sequence'
183    assert num.alltrue(T[1:] - T[:-1] > 0), msg
184
185    #Create NetCDF file
186    fid = NetCDFFile(file_out, netcdf_mode_w)
187
188    fid.institution = 'Geoscience Australia'
189    fid.description = 'Time series'
190
191    #Reference point
192    #Start time in seconds since the epoch (midnight 1/1/1970)
193    #FIXME: Use Georef
194    fid.starttime = starttime
195
196    # dimension definitions
197    #fid.createDimension('number_of_volumes', self.number_of_volumes)
198    #fid.createDimension('number_of_vertices', 3)
199
200    fid.createDimension('number_of_timesteps', len(T))
201
202    fid.createVariable('time', netcdf_float, ('number_of_timesteps',))
203
204    fid.variables['time'][:] = T
205
206    for i in range(Q.shape[1]):
207        try:
208            name = quantity_names[i]
209        except:
210            name = 'Attribute%d' % i
211
212        fid.createVariable(name, netcdf_float, ('number_of_timesteps',))
213        fid.variables[name][:] = Q[:,i]
214
215    fid.close()
216   
217   
218
219def tsh2sww(filename, verbose=False):
220    """
221    to check if a tsh/msh file 'looks' good.
222    """
223
224    if filename[-4:] != '.tsh' and filename[-4:] != '.msh':
225        raise IOError('Input file %s should be .tsh or .msh.' % name_out)
226
227    if verbose == True: log.critical('Creating domain from %s' % filename)
228
229    domain = pmesh_to_domain_instance(filename, Domain)
230
231    if verbose == True: log.critical("Number of triangles = %s" % len(domain))
232
233    domain.smooth = True
234    domain.format = 'sww'   #Native netcdf visualisation format
235    file_path, filename = path.split(filename)
236    filename, ext = path.splitext(filename)
237    domain.set_name(filename)
238    domain.reduction = mean
239
240    if verbose == True: log.critical("file_path = %s" % file_path)
241
242    if file_path == "":
243        file_path = "."
244    domain.set_datadir(file_path)
245
246    if verbose == True:
247        log.critical("Output written to %s%s%s.%s"
248                     % (domain.get_datadir(), sep, domain.get_name(),
249                        domain.format))
250
251    sww = SWW_file(domain)
252    sww.store_connectivity()
253    sww.store_timestep()
254
255
256
257def get_min_max_indices(latitudes_ref, longitudes_ref,
258                         minlat=None, maxlat=None,
259                         minlon=None, maxlon=None):
260    """
261    Return max, min indexes (for slicing) of the lat and long arrays to cover
262    the area specified with min/max lat/long.
263
264    Think of the latitudes and longitudes describing a 2d surface.
265    The area returned is, if possible, just big enough to cover the
266    inputed max/min area. (This will not be possible if the max/min area
267    has a section outside of the latitudes/longitudes area.)
268
269    asset  longitudes are sorted,
270    long - from low to high (west to east, eg 148 - 151)
271    assert latitudes are sorted, ascending or decending
272    """
273
274    latitudes = latitudes_ref[:]
275    longitudes = longitudes_ref[:]
276
277    latitudes = ensure_numeric(latitudes)
278    longitudes = ensure_numeric(longitudes)
279
280    assert num.allclose(num.sort(longitudes), longitudes)
281
282    #print latitudes[0],longitudes[0]
283    #print len(latitudes),len(longitudes)
284    #print latitudes[len(latitudes)-1],longitudes[len(longitudes)-1]
285
286    lat_ascending = True
287    if not num.allclose(num.sort(latitudes), latitudes):
288        lat_ascending = False
289        # reverse order of lat, so it's in ascending order
290        latitudes = latitudes[::-1]
291        assert num.allclose(num.sort(latitudes), latitudes)
292
293    largest_lat_index = len(latitudes)-1
294
295    #Cut out a smaller extent.
296    if minlat == None:
297        lat_min_index = 0
298    else:
299        lat_min_index = num.searchsorted(latitudes, minlat)-1
300        if lat_min_index < 0:
301            lat_min_index = 0
302
303    if maxlat == None:
304        lat_max_index = largest_lat_index #len(latitudes)
305    else:
306        lat_max_index = num.searchsorted(latitudes, maxlat)
307        if lat_max_index > largest_lat_index:
308            lat_max_index = largest_lat_index
309
310    if minlon == None:
311        lon_min_index = 0
312    else:
313        lon_min_index = num.searchsorted(longitudes, minlon)-1
314        if lon_min_index < 0:
315            lon_min_index = 0
316
317    if maxlon == None:
318        lon_max_index = len(longitudes)
319    else:
320        lon_max_index = num.searchsorted(longitudes, maxlon)
321
322    # Reversing the indexes, if the lat array is decending
323    if lat_ascending is False:
324        lat_min_index, lat_max_index = largest_lat_index - lat_max_index, \
325                                       largest_lat_index - lat_min_index
326    lat_max_index = lat_max_index + 1 # taking into account how slicing works
327    lon_max_index = lon_max_index + 1 # taking into account how slicing works
328
329    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
330
331
332def write_obj(filename, x, y, z):
333    """Store x,y,z vectors into filename (obj format).
334
335       Vectors are assumed to have dimension (M,3) where
336       M corresponds to the number elements.
337       triangles are assumed to be disconnected
338
339       The three numbers in each vector correspond to three vertices,
340
341       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
342    """
343
344    import os.path
345
346    root, ext = os.path.splitext(filename)
347    if ext == '.obj':
348        FN = filename
349    else:
350        FN = filename + '.obj'
351
352    outfile = open(FN, 'wb')
353    outfile.write("# Triangulation as an obj file\n")
354
355    M, N = x.shape
356    assert N == 3  #Assuming three vertices per element
357
358    for i in range(M):
359        for j in range(N):
360            outfile.write("v %f %f %f\n" % (x[i,j], y[i,j], z[i,j]))
361
362    for i in range(M):
363        base = i * N
364        outfile.write("f %d %d %d\n" % (base+1, base+2, base+3))
365
366    outfile.close()
367
Note: See TracBrowser for help on using the repository browser.