Line | |
---|
1 | def conjugate_gradient(A,b,x0,imax=2000,tol=1.0e-8,iprint=0): |
---|
2 | # |
---|
3 | # Try to solve linear equation Ax = b using |
---|
4 | # conjugate gradient method |
---|
5 | # |
---|
6 | # Input |
---|
7 | # A: matrix or function which applies a matrix, assumed symmetric |
---|
8 | # b: right hand side |
---|
9 | # x0: inital guess |
---|
10 | # imax: max number of iterations |
---|
11 | # tol: tolerance used for residual |
---|
12 | # |
---|
13 | # Output |
---|
14 | # x: approximate solution |
---|
15 | # i: number of iterations |
---|
16 | # |
---|
17 | |
---|
18 | #if nargin<3 |
---|
19 | # x0 = zeros(b); |
---|
20 | #end |
---|
21 | |
---|
22 | from Numeric import dot |
---|
23 | #from operator import mod |
---|
24 | |
---|
25 | print "A shape",A.shape |
---|
26 | print "b shape",b.shape |
---|
27 | print "x0 shape",x0.shape |
---|
28 | i=0 |
---|
29 | x = x0 |
---|
30 | r = b - A*x |
---|
31 | d = r |
---|
32 | rTr = dot(r,r) |
---|
33 | rTr0 = rTr |
---|
34 | |
---|
35 | while (i<imax and rTr>tol**2*rTr0): |
---|
36 | q = A*d |
---|
37 | alpha = rTr/dot(d,q) |
---|
38 | x = x + alpha*d |
---|
39 | if i%50 : |
---|
40 | r = b - A*x |
---|
41 | else: |
---|
42 | r = r - alpha*q |
---|
43 | rTrOld = rTr |
---|
44 | rTr = dot(r,r) |
---|
45 | bt = rTr/rTrOld |
---|
46 | |
---|
47 | d = r + bt*d |
---|
48 | i = i+1 |
---|
49 | print "i",i |
---|
50 | #if i%iprint : |
---|
51 | # #print 'i = %g rTr = %20.15e \n': i,rTr |
---|
52 | # pass |
---|
53 | if i==imax: |
---|
54 | print 'max number of iterations attained' |
---|
55 | |
---|
56 | return x |
---|
57 | |
---|
58 | |
---|
Note: See
TracBrowser
for help on using the repository browser.