07: Implement the Conjugate Gradient alg

Finishes task 5, but adds a TODO!
This commit is contained in:
2022-02-18 17:32:13 +01:00
parent 60e9accc80
commit 1927c04bfb

View File

@ -574,6 +574,7 @@
" assert len(A.shape) == 2 and A.shape[0] == A.shape[1]\n", " assert len(A.shape) == 2 and A.shape[0] == A.shape[1]\n",
" \n", " \n",
" n = len(A)\n", " n = len(A)\n",
" \n",
" x_cur = np.zeros(n)\n", " x_cur = np.zeros(n)\n",
" x_prev = np.zeros(n)\n", " x_prev = np.zeros(n)\n",
" \n", " \n",
@ -583,7 +584,7 @@
" x_prev = x_cur.copy()\n", " x_prev = x_cur.copy()\n",
" \n", " \n",
" v = b - A @ x_prev\n", " v = b - A @ x_prev\n",
" t = np.dot(v,v)/np.dot(v, A @ v)\n", " t = np.dot(v, v)/np.dot(v, A @ v)\n",
" x_cur = x_prev.copy() + t*v\n", " x_cur = x_prev.copy() + t*v\n",
" \n", " \n",
" return x_cur, k" " return x_cur, k"
@ -705,7 +706,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 9,
"metadata": { "metadata": {
"deletable": false, "deletable": false,
"nbgrader": { "nbgrader": {
@ -722,14 +723,41 @@
}, },
"outputs": [], "outputs": [],
"source": [ "source": [
"def CG(A, b, eps):\n", "def CG(A, b, eps, k_max = 10000):\n",
" # YOUR CODE HERE\n", " \"\"\"\n",
" raise NotImplementedError()" " Return the Conjugate Gradient algorithm estimate solution x to the problem\n",
" Ax = b and the number of iterations k it took to decrease maximum\n",
" norm error below eps or to exceed iteration maximum k_max.\n",
" \"\"\"\n",
" \n",
" # Assert n by n matrix.\n",
" assert len(A.shape) == 2 and A.shape[0] == A.shape[1]\n",
" \n",
" n = len(A)\n",
" \n",
" x_cur = np.zeros(n)\n",
" x_prev = x_cur.copy()\n",
" r_cur = b - A @ x_cur\n",
" v = r_cur\n",
" \n",
" k = 0\n",
" while diff(x_cur, x_prev) > eps and k < k_max or k == 0:\n",
" k += 1\n",
" x_prev = x_cur.copy()\n",
" r_prev = r_cur\n",
" \n",
" t = np.dot(r_prev, r_prev)/np.dot(v, A @ v)\n",
" x_cur = x_prev + t*v\n",
" r_cur = r_prev - t*A @ v\n",
" s = np.dot(r_cur, r_cur)/np.dot(r_prev, r_prev)\n",
" v = r_cur + s*v\n",
" \n",
" return x_cur, k"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 10,
"metadata": { "metadata": {
"deletable": false, "deletable": false,
"nbgrader": { "nbgrader": {
@ -744,11 +772,56 @@
"task": false "task": false
} }
}, },
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Starting with A =\n",
"[[10 -1 2 0]\n",
" [-1 11 -1 3]\n",
" [ 2 -1 10 -1]\n",
" [ 0 3 -1 8]]\n",
"and b = [ 6 25 -11 15]\n",
"We apply the Conjugate Gradient algorithm to solve Ax = b.\n",
"\n",
"For eps = 1e-01\tafter k = 4\t iterations:\n",
"x =\t\t\t [ 1. 2. -1. 1.]\n",
"Ax =\t\t\t [ 6. 25. -11. 15.]\n",
"diff(Ax, b) =\t\t 1.7763568394002505e-15\n",
"diff(x, x_exact) =\t 2.220446049250313e-16\n",
"\n",
"For eps = 1e-02\tafter k = 5\t iterations:\n",
"x =\t\t\t [ 1. 2. -1. 1.]\n",
"Ax =\t\t\t [ 6. 25. -11. 15.]\n",
"diff(Ax, b) =\t\t 1.7763568394002505e-15\n",
"diff(x, x_exact) =\t 2.220446049250313e-16\n",
"\n",
"For eps = 1e-03\tafter k = 5\t iterations:\n",
"x =\t\t\t [ 1. 2. -1. 1.]\n",
"Ax =\t\t\t [ 6. 25. -11. 15.]\n",
"diff(Ax, b) =\t\t 1.7763568394002505e-15\n",
"diff(x, x_exact) =\t 2.220446049250313e-16\n",
"\n",
"For eps = 1e-04\tafter k = 5\t iterations:\n",
"x =\t\t\t [ 1. 2. -1. 1.]\n",
"Ax =\t\t\t [ 6. 25. -11. 15.]\n",
"diff(Ax, b) =\t\t 1.7763568394002505e-15\n",
"diff(x, x_exact) =\t 2.220446049250313e-16\n",
"\n"
]
}
],
"source": [ "source": [
"def test_CG():\n", "def test_CG():\n",
" # YOUR CODE HERE\n", " \"\"\"\n",
" raise NotImplementedError()\n", " Check that CG returns solutions for the example system Ax = b\n",
" within the error defined by the same eps as used for the iteration.\n",
" \"\"\"\n",
" \n",
" # TODO: Do we need to plot the error over epsilon like in task 4?\n",
" \n",
" return test_alg(CG, \"Conjugate Gradient\")\n",
" \n", " \n",
"test_CG()" "test_CG()"
] ]