source: anuga_core/source/anuga/fit_interpolate/fit.py @ 6237

Last change on this file since 6237 was 6237, checked in by ole, 15 years ago

Comments regarding ticket:314 and cleanup

File size: 25.5 KB
Line 
1"""Least squares fitting.
2
3   Implements a penalised least-squares fit.
4   putting point data onto the mesh.
5
6   The penalty term (or smoothing term) is controlled by the smoothing
7   parameter alpha.
8   With a value of alpha=0, the fit function will attempt
9   to interpolate as closely as possible in the least-squares sense.
10   With values alpha > 0, a certain amount of smoothing will be applied.
11   A positive alpha is essential in cases where there are too few
12   data points.
13   A negative alpha is not allowed.
14   A typical value of alpha is 1.0e-6
15
16
17   Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou
18   Geoscience Australia, 2004.
19
20   TO DO
21   * test geo_ref, geo_spatial
22
23   IDEAS
24   * (DSG-) Change the interface of fit, so a domain object can
25      be passed in. (I don't know if this is feasible). If could
26      save time/memory.
27"""
28import types
29
30from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
31from anuga.caching import cache           
32from anuga.geospatial_data.geospatial_data import Geospatial_data, \
33     ensure_absolute
34from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
35from anuga.utilities.sparse import Sparse, Sparse_CSR
36from anuga.utilities.polygon import inside_polygon
37from anuga.fit_interpolate.search_functions import search_tree_of_vertices
38
39from anuga.utilities.cg_solve import conjugate_gradient
40from anuga.utilities.numerical_tools import ensure_numeric, gradient
41from anuga.config import default_smoothing_parameter as DEFAULT_ALPHA
42
43import exceptions
44class TooFewPointsError(exceptions.Exception): pass
45class VertsWithNoTrianglesError(exceptions.Exception): pass
46
47import Numeric as num
48
49
50class Fit(FitInterpolate):
51   
52    def __init__(self,
53                 vertex_coordinates=None,
54                 triangles=None,
55                 mesh=None,
56                 mesh_origin=None,
57                 alpha = None,
58                 verbose=False,
59                 max_vertices_per_cell=None):
60
61
62        """
63        Fit data at points to the vertices of a mesh.
64
65        Inputs:
66
67          vertex_coordinates: List of coordinate pairs [xi, eta] of
68              points constituting a mesh (or an m x 2 Numeric array or
69              a geospatial object)
70              Points may appear multiple times
71              (e.g. if vertices have discontinuities)
72
73          triangles: List of 3-tuples (or a Numeric array) of
74              integers representing indices of all vertices in the mesh.
75
76          mesh_origin: A geo_reference object or 3-tuples consisting of
77              UTM zone, easting and northing.
78              If specified vertex coordinates are assumed to be
79              relative to their respective origins.
80
81          max_vertices_per_cell: Number of vertices in a quad tree cell
82          at which the cell is split into 4.
83
84          Note: Don't supply a vertex coords as a geospatial object and
85              a mesh origin, since geospatial has its own mesh origin.
86
87
88        Usage,
89        To use this in a blocking way, call  build_fit_subset, with z info,
90        and then fit, with no point coord, z info.
91       
92        """
93        # Initialise variabels
94        if alpha is None:
95            self.alpha = DEFAULT_ALPHA
96        else:   
97            self.alpha = alpha
98           
99        FitInterpolate.__init__(self,
100                 vertex_coordinates,
101                 triangles,
102                 mesh,
103                 mesh_origin,
104                 verbose,
105                 max_vertices_per_cell)
106       
107        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
108       
109        self.AtA = None
110        self.Atz = None
111
112        self.point_count = 0
113        if self.alpha <> 0:
114            if verbose: print 'Building smoothing matrix'
115            self._build_smoothing_matrix_D()
116           
117        self.mesh_boundary_polygon = self.mesh.get_boundary_polygon()   
118           
119    def _build_coefficient_matrix_B(self,
120                                  verbose = False):
121        """
122        Build final coefficient matrix
123
124        Precon
125        If alpha is not zero, matrix D has been built
126        Matrix Ata has been built
127        """
128
129        if self.alpha <> 0:
130            #if verbose: print 'Building smoothing matrix'
131            #self._build_smoothing_matrix_D()
132            self.B = self.AtA + self.alpha*self.D
133        else:
134            self.B = self.AtA
135
136        # Convert self.B matrix to CSR format for faster matrix vector
137        self.B = Sparse_CSR(self.B)
138
139    def _build_smoothing_matrix_D(self):
140        """Build m x m smoothing matrix, where
141        m is the number of basis functions phi_k (one per vertex)
142
143        The smoothing matrix is defined as
144
145        D = D1 + D2
146
147        where
148
149        [D1]_{k,l} = \int_\Omega
150           \frac{\partial \phi_k}{\partial x}
151           \frac{\partial \phi_l}{\partial x}\,
152           dx dy
153
154        [D2]_{k,l} = \int_\Omega
155           \frac{\partial \phi_k}{\partial y}
156           \frac{\partial \phi_l}{\partial y}\,
157           dx dy
158
159
160        The derivatives \frac{\partial \phi_k}{\partial x},
161        \frac{\partial \phi_k}{\partial x} for a particular triangle
162        are obtained by computing the gradient a_k, b_k for basis function k
163        """
164       
165        # FIXME: algorithm might be optimised by computing local 9x9
166        # "element stiffness matrices:
167
168        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
169
170        self.D = Sparse(m,m)
171
172        # For each triangle compute contributions to D = D1+D2
173        for i in range(len(self.mesh)):
174
175            # Get area
176            area = self.mesh.areas[i]
177
178            # Get global vertex indices
179            v0 = self.mesh.triangles[i,0]
180            v1 = self.mesh.triangles[i,1]
181            v2 = self.mesh.triangles[i,2]
182
183            # Get the three vertex_points
184            xi0 = self.mesh.get_vertex_coordinate(i, 0)
185            xi1 = self.mesh.get_vertex_coordinate(i, 1)
186            xi2 = self.mesh.get_vertex_coordinate(i, 2)
187
188            # Compute gradients for each vertex
189            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
190                              1, 0, 0)
191
192            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
193                              0, 1, 0)
194
195            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
196                              0, 0, 1)
197
198            # Compute diagonal contributions
199            self.D[v0,v0] += (a0*a0 + b0*b0)*area
200            self.D[v1,v1] += (a1*a1 + b1*b1)*area
201            self.D[v2,v2] += (a2*a2 + b2*b2)*area
202
203            # Compute contributions for basis functions sharing edges
204            e01 = (a0*a1 + b0*b1)*area
205            self.D[v0,v1] += e01
206            self.D[v1,v0] += e01
207
208            e12 = (a1*a2 + b1*b2)*area
209            self.D[v1,v2] += e12
210            self.D[v2,v1] += e12
211
212            e20 = (a2*a0 + b2*b0)*area
213            self.D[v2,v0] += e20
214            self.D[v0,v2] += e20
215
216    def get_D(self):
217        return self.D.todense()
218
219
220
221    def _build_matrix_AtA_Atz(self,
222                              point_coordinates,
223                              z,
224                              verbose = False):
225        """Build:
226        AtA  m x m  interpolation matrix, and,
227        Atz  m x a  interpolation matrix where,
228        m is the number of basis functions phi_k (one per vertex)
229        a is the number of data attributes
230
231        This algorithm uses a quad tree data structure for fast binning of
232        data points.
233
234        If Ata is None, the matrices AtA and Atz are created.
235
236        This function can be called again and again, with sub-sets of
237        the point coordinates.  Call fit to get the results.
238       
239        Preconditions
240        z and points are numeric
241        Point_coordindates and mesh vertices have the same origin.
242
243        The number of attributes of the data points does not change
244        """
245       
246        # Build n x m interpolation matrix
247        if self.AtA == None:
248            # AtA and Atz need to be initialised.
249            m = self.mesh.number_of_nodes
250            if len(z.shape) > 1:
251                att_num = z.shape[1]
252                self.Atz = num.zeros((m,att_num), num.Float)
253            else:
254                att_num = 1
255                self.Atz = num.zeros((m,), num.Float)
256            assert z.shape[0] == point_coordinates.shape[0] 
257
258            AtA = Sparse(m,m)
259            # The memory damage has been done by now.
260        else:
261             AtA = self.AtA #Did this for speed, did ~nothing
262        self.point_count += point_coordinates.shape[0]
263
264
265        inside_indices = inside_polygon(point_coordinates,
266                                        self.mesh_boundary_polygon,
267                                        closed=True,
268                                        verbose=False) # Too much output if True
269
270       
271        n = len(inside_indices)
272
273        # Compute matrix elements for points inside the mesh
274        triangles = self.mesh.triangles # Shorthand
275        for d, i in enumerate(inside_indices):
276            # For each data_coordinate point
277            # if verbose and d%((n+10)/10)==0: print 'Doing %d of %d' %(d, n)
278            x = point_coordinates[i]
279            element_found, sigma0, sigma1, sigma2, k = \
280                           search_tree_of_vertices(self.root, self.mesh, x)
281           
282            if element_found is True:
283                j0 = triangles[k,0] #Global vertex id for sigma0
284                j1 = triangles[k,1] #Global vertex id for sigma1
285                j2 = triangles[k,2] #Global vertex id for sigma2
286
287                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
288                js     = [j0,j1,j2]
289
290                for j in js:
291                    self.Atz[j] +=  sigmas[j]*z[i]
292                    #print "self.Atz building", self.Atz
293                    #print "self.Atz[j]", self.Atz[j]
294                    #print " sigmas[j]", sigmas[j]
295                    #print "z[i]",z[i]
296                    #print "result", sigmas[j]*z[i]
297                   
298                    for k in js:
299                        AtA[j,k] += sigmas[j]*sigmas[k]
300            else:
301                # FIXME(Ole): This is the message referred to in ticket:314
302                msg = 'Could not find triangle for point %s. ' % str(x) 
303                msg += 'Mesh boundary is %s' % str(self.mesh_boundary_polygon)
304                raise Exception(msg)
305            self.AtA = AtA
306
307       
308    def fit(self, point_coordinates_or_filename=None, z=None,
309            verbose=False,
310            point_origin=None,
311            attribute_name=None,
312            max_read_lines=500):
313        """Fit a smooth surface to given 1d array of data points z.
314
315        The smooth surface is computed at each vertex in the underlying
316        mesh using the formula given in the module doc string.
317
318        Inputs:
319        point_coordinates: The co-ordinates of the data points.
320              List of coordinate pairs [x, y] of
321              data points or an nx2 Numeric array or a Geospatial_data object
322              or points file filename
323          z: Single 1d vector or array of data at the point_coordinates.
324         
325        """
326       
327        # Use blocking to load in the point info
328        if type(point_coordinates_or_filename) == types.StringType:
329            msg = "Don't set a point origin when reading from a file"
330            assert point_origin is None, msg
331            filename = point_coordinates_or_filename
332
333            G_data = Geospatial_data(filename,
334                                     max_read_lines=max_read_lines,
335                                     load_file_now=False,
336                                     verbose=verbose)
337
338            for i, geo_block in enumerate(G_data):
339                if verbose is True and 0 == i%200: 
340                    # The time this will take
341                    # is dependant on the # of Triangles
342                       
343                    print 'Processing Block %d' %i
344                    # FIXME (Ole): It would be good to say how many blocks
345                    # there are here. But this is no longer necessary
346                    # for pts files as they are reported in geospatial_data
347                    # I suggest deleting this verbose output and make
348                    # Geospatial_data more informative for txt files.
349                    #
350                    # I still think so (12/12/7, Ole).
351           
352
353                   
354                # Build the array
355
356                points = geo_block.get_data_points(absolute=True)
357                z = geo_block.get_attributes(attribute_name=attribute_name)
358                self.build_fit_subset(points, z, verbose=verbose)
359
360               
361            point_coordinates = None
362        else:
363            point_coordinates =  point_coordinates_or_filename
364           
365        if point_coordinates is None:
366            if verbose: print 'Warning: no data points in fit'
367            assert self.AtA <> None, 'no interpolation matrix'
368            assert self.Atz <> None
369           
370            # FIXME (DSG) - do  a message
371        else:
372            point_coordinates = ensure_absolute(point_coordinates,
373                                                geo_reference=point_origin)
374            # if isinstance(point_coordinates,Geospatial_data) and z is None:
375            # z will come from the geo-ref
376            self.build_fit_subset(point_coordinates, z, verbose)
377
378        # Check sanity
379        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
380        n = self.point_count
381        if n<m and self.alpha == 0.0:
382            msg = 'ERROR (least_squares): Too few data points\n'
383            msg += 'There are only %d data points and alpha == 0. ' %n
384            msg += 'Need at least %d\n' %m
385            msg += 'Alternatively, set smoothing parameter alpha to a small '
386            msg += 'positive value,\ne.g. 1.0e-3.'
387            raise TooFewPointsError(msg)
388
389        self._build_coefficient_matrix_B(verbose)
390        loners = self.mesh.get_lone_vertices()
391        # FIXME  - make this as error message.
392        # test with
393        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
394        if len(loners)>0:
395            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
396            msg += 'All vertices should be part of a triangle.\n'
397            msg += 'In the future this will be inforced.\n'
398            msg += 'The following vertices are not part of a triangle;\n'
399            msg += str(loners)
400            print msg
401            #raise VertsWithNoTrianglesError(msg)
402       
403       
404        return conjugate_gradient(self.B, self.Atz, self.Atz,
405                                  imax=2*len(self.Atz) )
406
407       
408    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
409                              verbose=False):
410        """Fit a smooth surface to given 1d array of data points z.
411
412        The smooth surface is computed at each vertex in the underlying
413        mesh using the formula given in the module doc string.
414
415        Inputs:
416        point_coordinates: The co-ordinates of the data points.
417              List of coordinate pairs [x, y] of
418              data points or an nx2 Numeric array or a Geospatial_data object
419        z: Single 1d vector or array of data at the point_coordinates.
420        attribute_name: Used to get the z values from the
421              geospatial object if no attribute_name is specified,
422              it's a bit of a lucky dip as to what attributes you get.
423              If there is only one attribute it will be that one.
424
425        """
426
427        # FIXME(DSG-DSG): Check that the vert and point coords
428        # have the same zone.
429        if isinstance(point_coordinates,Geospatial_data):
430            point_coordinates = point_coordinates.get_data_points( \
431                absolute = True)
432       
433        # Convert input to Numeric arrays
434        if z is not None:
435            z = ensure_numeric(z, num.Float)
436        else:
437            msg = 'z not specified'
438            assert isinstance(point_coordinates,Geospatial_data), msg
439            z = point_coordinates.get_attributes(attribute_name)
440
441        point_coordinates = ensure_numeric(point_coordinates, num.Float)
442        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
443
444
445############################################################################
446
447def fit_to_mesh(point_coordinates, # this can also be a points file name
448                vertex_coordinates=None,
449                triangles=None,
450                mesh=None,
451                point_attributes=None,
452                alpha=DEFAULT_ALPHA,
453                verbose=False,
454                acceptable_overshoot=1.01, 
455                # FIXME: Move to config - this value is assumed in caching test
456                # FIXME(Ole): Just realised that this was never implemented (29 Jan 2009). I suggest removing it altogether.
457                mesh_origin=None,
458                data_origin=None,
459                max_read_lines=None,
460                attribute_name=None,
461                use_cache=False):
462    """Wrapper around internal function _fit_to_mesh for use with caching.
463   
464    """
465   
466    args = (point_coordinates, )
467    kwargs = {'vertex_coordinates': vertex_coordinates,
468              'triangles': triangles,
469              'mesh': mesh,
470              'point_attributes': point_attributes,
471              'alpha': alpha,
472              'verbose': verbose,
473              'acceptable_overshoot': acceptable_overshoot,
474              'mesh_origin': mesh_origin,
475              'data_origin': data_origin,
476              'max_read_lines': max_read_lines,
477              'attribute_name': attribute_name
478              }
479
480    if use_cache is True:
481        if isinstance(point_coordinates, basestring):
482            # We assume that point_coordinates is the name of a .csv/.txt
483            # file which must be passed onto caching as a dependency
484            # (in case it has changed on disk)
485            dep = [point_coordinates]
486        else:
487            dep = None
488
489           
490        #from caching import myhash
491        #import copy
492        #print args
493        #print kwargs
494        #print 'hashing:'
495        #print 'args', myhash( (args, kwargs) )
496        #print 'again', myhash( copy.deepcopy( (args, kwargs)) )       
497       
498        #print 'mesh hash', myhash( kwargs['mesh'] )       
499       
500        #print '-------------------------'
501        #print 'vertices hash', myhash( kwargs['mesh'].nodes )
502        #print 'triangles hash', myhash( kwargs['mesh'].triangles )
503        #print '-------------------------'       
504       
505        #for key in mesh.__dict__:
506        #    print key, myhash(mesh.__dict__[key])
507       
508        #for key in mesh.quantities.keys():
509        #    print key, myhash(mesh.quantities[key])
510       
511        #import sys; sys.exit()
512           
513        return cache(_fit_to_mesh,
514                     args, kwargs,
515                     verbose=verbose,
516                     compression=False,
517                     dependencies=dep)
518    else:
519        return apply(_fit_to_mesh,
520                     args, kwargs)
521
522def _fit_to_mesh(point_coordinates, # this can also be a points file name
523                 vertex_coordinates=None,
524                 triangles=None,
525                 mesh=None,
526                 point_attributes=None,
527                 alpha=DEFAULT_ALPHA,
528                 verbose=False,
529                 acceptable_overshoot=1.01,
530                 mesh_origin=None,
531                 data_origin=None,
532                 max_read_lines=None,
533                 attribute_name=None):
534    """
535    Fit a smooth surface to a triangulation,
536    given data points with attributes.
537
538
539        Inputs:
540        vertex_coordinates: List of coordinate pairs [xi, eta] of
541              points constituting a mesh (or an m x 2 Numeric array or
542              a geospatial object)
543              Points may appear multiple times
544              (e.g. if vertices have discontinuities)
545
546          triangles: List of 3-tuples (or a Numeric array) of
547          integers representing indices of all vertices in the mesh.
548
549          point_coordinates: List of coordinate pairs [x, y] of data points
550          (or an nx2 Numeric array). This can also be a .csv/.txt/.pts
551          file name.
552
553          alpha: Smoothing parameter.
554
555          acceptable overshoot: NOT IMPLEMENTED
556          controls the allowed factor by which
557          fitted values
558          may exceed the value of input data. The lower limit is defined
559          as min(z) - acceptable_overshoot*delta z and upper limit
560          as max(z) + acceptable_overshoot*delta z
561         
562
563          mesh_origin: A geo_reference object or 3-tuples consisting of
564              UTM zone, easting and northing.
565              If specified vertex coordinates are assumed to be
566              relative to their respective origins.
567         
568
569          point_attributes: Vector or array of data at the
570                            point_coordinates.
571
572    """
573
574    # Duncan and Ole think that this isn't worth caching.
575    # Caching happens at the higher level anyway.
576   
577
578    if mesh is None:
579        # FIXME(DSG): Throw errors if triangles or vertex_coordinates
580        # are None
581           
582        #Convert input to Numeric arrays
583        triangles = ensure_numeric(triangles, num.Int)
584        vertex_coordinates = ensure_absolute(vertex_coordinates,
585                                             geo_reference = mesh_origin)
586
587        if verbose: print 'FitInterpolate: Building mesh'       
588        mesh = Mesh(vertex_coordinates, triangles)
589        mesh.check_integrity()
590   
591    interp = Fit(mesh=mesh,
592                 verbose=verbose,
593                 alpha=alpha)
594
595    vertex_attributes = interp.fit(point_coordinates,
596                                   point_attributes,
597                                   point_origin=data_origin,
598                                   max_read_lines=max_read_lines,
599                                   attribute_name=attribute_name,
600                                   verbose=verbose)
601
602       
603    # Add the value checking stuff that's in least squares.
604    # Maybe this stuff should get pushed down into Fit.
605    # at least be a method of Fit.
606    # Or intigrate it into the fit method, saving teh max and min's
607    # as att's.
608   
609    return vertex_attributes
610
611
612#def _fit(*args, **kwargs):
613#    """Private function for use with caching. Reason is that classes
614#    may change their byte code between runs which is annoying.
615#    """
616#   
617#    return Fit(*args, **kwargs)
618
619
620def fit_to_mesh_file(mesh_file, point_file, mesh_output_file,
621                     alpha=DEFAULT_ALPHA, verbose= False,
622                     expand_search = False,
623                     precrop = False,
624                     display_errors = True):
625    """
626    Given a mesh file (tsh) and a point attribute file, fit
627    point attributes to the mesh and write a mesh file with the
628    results.
629
630    Note: the points file needs titles.  If you want anuga to use the tsh file,
631    make sure the title is elevation.
632
633    NOTE: Throws IOErrors, for a variety of file problems.
634   
635    """
636
637    from load_mesh.loadASCII import import_mesh_file, \
638         export_mesh_file, concatinate_attributelist
639
640
641    try:
642        mesh_dict = import_mesh_file(mesh_file)
643    except IOError,e:
644        if display_errors:
645            print "Could not load bad file. ", e
646        raise IOError  #Could not load bad mesh file.
647   
648    vertex_coordinates = mesh_dict['vertices']
649    triangles = mesh_dict['triangles']
650    if type(mesh_dict['vertex_attributes']) == num.ArrayType:
651        old_point_attributes = mesh_dict['vertex_attributes'].tolist()
652    else:
653        old_point_attributes = mesh_dict['vertex_attributes']
654
655    if type(mesh_dict['vertex_attribute_titles']) == num.ArrayType:
656        old_title_list = mesh_dict['vertex_attribute_titles'].tolist()
657    else:
658        old_title_list = mesh_dict['vertex_attribute_titles']
659
660    if verbose: print 'tsh file %s loaded' %mesh_file
661
662    # load in the points file
663    try:
664        geo = Geospatial_data(point_file, verbose=verbose)
665    except IOError,e:
666        if display_errors:
667            print "Could not load bad file. ", e
668        raise IOError  #Re-raise exception 
669
670    point_coordinates = geo.get_data_points(absolute=True)
671    title_list,point_attributes = concatinate_attributelist( \
672        geo.get_all_attributes())
673
674    if mesh_dict.has_key('geo_reference') and \
675           not mesh_dict['geo_reference'] is None:
676        mesh_origin = mesh_dict['geo_reference'].get_origin()
677    else:
678        mesh_origin = None
679
680    if verbose: print "points file loaded"
681    if verbose: print "fitting to mesh"
682    f = fit_to_mesh(point_coordinates,
683                    vertex_coordinates,
684                    triangles,
685                    None,
686                    point_attributes,
687                    alpha = alpha,
688                    verbose = verbose,
689                    data_origin = None,
690                    mesh_origin = mesh_origin)
691    if verbose: print "finished fitting to mesh"
692
693    # convert array to list of lists
694    new_point_attributes = f.tolist()
695    #FIXME have this overwrite attributes with the same title - DSG
696    #Put the newer attributes last
697    if old_title_list <> []:
698        old_title_list.extend(title_list)
699        #FIXME can this be done a faster way? - DSG
700        for i in range(len(old_point_attributes)):
701            old_point_attributes[i].extend(new_point_attributes[i])
702        mesh_dict['vertex_attributes'] = old_point_attributes
703        mesh_dict['vertex_attribute_titles'] = old_title_list
704    else:
705        mesh_dict['vertex_attributes'] = new_point_attributes
706        mesh_dict['vertex_attribute_titles'] = title_list
707
708    if verbose: print "exporting to file ", mesh_output_file
709
710    try:
711        export_mesh_file(mesh_output_file, mesh_dict)
712    except IOError,e:
713        if display_errors:
714            print "Could not write file. ", e
715        raise IOError
Note: See TracBrowser for help on using the repository browser.