Conjugate Gradient Example 2:

In this example, two different preconditioners are used to find the solution of a sparse positive definite linear system which occurs in a finite difference solution of Laplace's equation on a regular c \times c grid, c = 50. The matrix is A=E(c^2, c). For the first solution, Jacobi preconditioning with preconditioner M={\rm{diag}}(A) is used. The required iteration number and maximum absolute error are printed. Next, a more complicated preconditioning matrix, consisting of the symmetric tridiagonal part of A, is used. Again, the iteration number and the maximum absolute error are printed. The iteration number is substantially reduced.
using System;
using Imsl.Math;

public class ConjugateGradientEx2 : ConjugateGradient.IPreconditioner
{
	private SparseMatrix A;
	private SparseCholesky M;
	
	public ConjugateGradientEx2(int n, int c)
	{
		
		// Create matrix E(n,c), n>1, 1<c<n-1
		// See Osterby and Zlatev(1982), pp. 7-8
		A = new SparseMatrix(n, n);
		for (int j = 0; j < n; j++)
		{
			if (j - c >= 0)
				A.Set(j, j - c, - 1.0);
			if (j - 1 >= 0)
				A.Set(j, j - 1, - 1.0);
				A.Set(j, j, 4.0);
			if (j + 1 < n)
				A.Set(j, j + 1, - 1.0);
			if (j + c < n)
				A.Set(j, j + c, - 1.0);
		}
		
		// Create and factor preconditioning matrix
		SparseMatrix C = new SparseMatrix(n, n);
		for (int j = 0; j < n; j++)
		{
			if (j - 1 >= 0)
				C.Set(j, j - 1, - 1.0);
			C.Set(j, j, 4.0);
			if (j + 1 < n)
				C.Set(j, j + 1, - 1.0);
		}
		M = new SparseCholesky(C);
		M.FactorSymbolically();
		M.FactorNumerically();
	}
	
	public void Amultp(double[] p, double[] z)
	{
		double[] w = A.Multiply(p);
		Array.Copy(w, 0, z, 0, w.Length);
	}
	
	public void Preconditioner(double[] r, double[] z)
	{
			double[] w = M.Solve(r);
			Array.Copy(w, 0, z, 0, w.Length);
	}
	
	public static void  Main(String[] args)
	{
		int n = 2500;
		int c = 50;
		
		ConjugateGradientEx2 atp = new ConjugateGradientEx2(n, c);
		
		// Set a predetermined answer and diagonal
		double[] expected = new double[n];
		double[] diagonal = new double[n];
		for (int i = 0; i < n; i++)
		{
			expected[i] = (double) (i % 5);
			diagonal[i] = 4.0;
		}
		
		// Get right-hand side
		double[] b = new double[n];
		atp.Amultp(expected, b);
		
		// Solve system with Jacobi preconditioning
		ConjugateGradient cgJacobi = new ConjugateGradient(n, atp);
		cgJacobi.SetJacobi(diagonal);
		double[] solution = cgJacobi.Solve(b);
		
		// Compute inf-norm of computed solution - exact solution, print results
		double norm = 0.0;
		for (int i = 0; i < n; i++)
		{
			norm = Math.Max(norm, Math.Abs(solution[i] - expected[i]));
		}
		Console.Out.WriteLine("Jacobi preconditioning");
		Console.Out.WriteLine("Iterations= " + cgJacobi.Iterations + ", norm= "
         + norm);
		Console.Out.WriteLine();
		
		
		/*
		* Solve the same system, with Cholesky preconditioner
		*/
		ConjugateGradient cgCholesky = new ConjugateGradient(n, atp);
		solution = cgCholesky.Solve(b);

		norm = 0.0;
		for (int i = 0; i < n; i++)
		{
			norm = Math.Max(norm, Math.Abs(solution[i] - expected[i]));
		}
		Console.Out.WriteLine("More general preconditioning");
		Console.Out.WriteLine("Iterations= " + cgCholesky.Iterations + ", norm= "
         + norm);
	}
}

Output

Jacobi preconditioning
Iterations= 187, norm= 4.46344072813076E-10

More general preconditioning
Iterations= 127, norm= 5.13725062489812E-10

Link to C# source.