diff options
Diffstat (limited to 'notes')
-rw-r--r-- | notes/Sep-11.org | 17 | ||||
-rw-r--r-- | notes/Sep-13.org | 16 | ||||
-rw-r--r-- | notes/Sep-15.org | 52 | ||||
-rw-r--r-- | notes/Sep-20.org | 21 | ||||
-rw-r--r-- | notes/Sep-22.org | 45 | ||||
-rw-r--r-- | notes/Sep-25.org | 48 |
6 files changed, 191 insertions, 8 deletions
diff --git a/notes/Sep-11.org b/notes/Sep-11.org index 1568618..3d71f2f 100644 --- a/notes/Sep-11.org +++ b/notes/Sep-11.org @@ -31,17 +31,18 @@ Table of Errors (lizfcm.utils:table (:headers '("u" "v" "e_{abs}" "e_{rel}") :domain-order (u v) :domain-values *u-v*) - (fround (eabs u v) 4) - (fround (erel u v) 4)) + (eabs u v) + (erel u v)) #+END_SRC #+RESULTS: -| u | v | e_{abs} | e_{rel} | -| 1 | 0.99 | 0.0 | 0.0 | -| 1 | 1.01 | 0.0 | 0.0 | -| -1.5 | -1.2 | 0.0 | 0.0 | -| 100 | 99.9 | 0.0 | 0.0 | -| 100 | 99 | 0.0 | 0.0 | +| u | v | e_{abs} | e_{rel} | +| 1 | 0.99 | 0.00999999 | 0.00999999 | +| 1 | 1.01 | 0.00999999 | 0.00999999 | +| -1.5 | -1.2 | 0.29999995 | 0.19999997 | +| 100 | 99.9 | 0.099998474 | 0.0009999848 | +| 100 | 99 | 1 | 1/100 | + Look at $u \approx 0$ then $v \approx 0$, $e_{abs}$ is better error since $e_{rel}$ is high. diff --git a/notes/Sep-13.org b/notes/Sep-13.org new file mode 100644 index 0000000..0ebff2b --- /dev/null +++ b/notes/Sep-13.org @@ -0,0 +1,16 @@ +* Homework 2 +1. maceps - single precision + +2. maceps - double precision + +3. 2-norm of a vector + +4. 1-norm of a vector + +5. infinity-norm of a vector (max-norm) + +6. 2-norm distance between 2 vectors + +7. 1-norm distance between 2 vectors + +8. infinity-norm distance diff --git a/notes/Sep-15.org b/notes/Sep-15.org new file mode 100644 index 0000000..d5bf371 --- /dev/null +++ b/notes/Sep-15.org @@ -0,0 +1,52 @@ +* Taylor Series Approx. +Suppose f has $\infty$ many derivatives near a point a. Then the taylor series is given by + +$f(x) = \Sigma_{n=0}^{\infty} \frac{f^{(n)}(a)}{n!}(x-a)^n$ + +For increment notation we can write + +$f(a + h) = f(a) + f'(a)(a+h - a) + \dots$ + +$= \Sigma_{n=0}^{\infty} \frac{f^{(n)}(a)}{h!} (h^n)$ + +Consider the approximation + +$e = |f'(a) - \frac{f(a + h) - f(a)}{h}| = |f'(a) - \frac{1}{h}(f(a + h) - f(a))|$ + +Substituting... + +$= |f'(a) - \frac{1}{h}((f(a) + f'(a) h + \frac{f''(a)}{2} h^2 + \cdots) - f(a))|$ + +$f(a) - f(a) = 0$... and $distribute the h$ + +$= |-1/2 f''(a) h + \frac{1}{6}f'''(a)h^2 \cdots|$ + +** With Remainder +We can determine for some u $f(a + h) = f(a) + f'(a)h + \frac{1}{2}f''(u)h^2$ + +and so the error is $e = |f'(a) - \frac{f(a + h) - f(a)}{h}| = |\frac{h}{2}f''(u)|$ + +- [https://openstax.org/books/calculus-volume-2/pages/6-3-taylor-and-maclaurin-series] + + > Taylor's Theorem w/ Remainder + + +** Of Deriviatives + +Again, $f'(a) \approx \frac{f(a+h) - f(a)}{h}$, + +$e = |\frac{1}{2} f''(a) + \frac{1}{3!}h^2 f'''(a) + \cdots$ + +$R_2 = \frac{h}{2} f''(u)$ + +$|\frac{h}{2} f''(u)| \leq M h^1$ + +$M = \frac{1}{2}|f'(u)|$ + +*** Another approximation + +$\text{err} = |f'(a) - \frac{f(a) - f(a - h)}{h}|$ + +$= f'(a) - \frac{1}{h}(f(a) - (f(a) + f'(a)(a - (a - h)) + \frac{1}{2}f''(a)(a-(a-h))^2 + \cdots))$ + +$= |f'(a) - \frac{1}{h}(f'(a) + \frac{1}{2}f''(a)h)|$ + diff --git a/notes/Sep-20.org b/notes/Sep-20.org new file mode 100644 index 0000000..ba067bb --- /dev/null +++ b/notes/Sep-20.org @@ -0,0 +1,21 @@ +* Review & Summary +Approx f'(a) with + ++ forward difference $f'(a) \approx \frac{f(a+h) - f(a)}{h}$ + ++ backward difference $f'(a) \approx \frac{f(a) - f(a-h)}{h}$ + ++ central difference $f'(a) \approx \frac{f(a+h) - f(a-h)}{2h}$ + +** Taylor Series +given $C = \frac{1}{2}(|f''(\xi)|) \cdot h^1$ + +with f.d. $e_{\text{abs}} \leq Ch^1$ + +b.d. $e_{\text{abs}} \leq Ch^1$ + +c.d. $e_{\text{abs}} \leq Ch^2$ + +$e_{\text{abs}} \leq Ch^r$ + +$log(e(h)) \leq log(ch^r) = log(C) + log(h^r) = log(C) + rlog(h)$ diff --git a/notes/Sep-22.org b/notes/Sep-22.org new file mode 100644 index 0000000..b631e3b --- /dev/null +++ b/notes/Sep-22.org @@ -0,0 +1,45 @@ +* regression +consider the generic problem of fitting a dataset to a linear polynomial + +given discrete f: x \rightarrow y + +interpolation: y = a + bx + +[[1 x_0] [[y_0] + [1 x_1] \cdot [[a] = [y_1] + [1 x_n]] [b]] [y_n]] + +consider p \in col(A) + +then y = p + q for some q \cdot p = 0 + +then we can generate n \in col(A) by $Az$ and n must be orthogonal to q as well + +(Az)^T \cdot q = 0 = (Az)^T (y - p) + +0 = (z^T A^T)(y - Ax) + = z^T (A^T y - A^T A x) + = A^T Ax + = A^T y + + +A^T A = [[n+1 \Sigma_{n=0}^n x_n] + [\Sigma_{n=0}^n x_n \Sigma_{n=0}^n x_n^2]] + +A^T y = [[\Sigma_{n=0}^n y_n] + [\Sigma_{n=0}^n x_n y_n]] + +a_11 = n+1 +a_12 = \Sigma_{n=0}^n x_n +a_21 = a_12 +a_22 = \Sigma_{n=0}^n x_n^2 +b_1 = \Sigma_{n=0}^n y_n +b_2 = \Sigma_{n=0}^n x_n y_n + +then apply this with: + +log(e(h)) \leq log(C) + rlog(h) + +* homework 3: + +two columns \Rightarrow coefficients for linear regression diff --git a/notes/Sep-25.org b/notes/Sep-25.org new file mode 100644 index 0000000..b2d63e3 --- /dev/null +++ b/notes/Sep-25.org @@ -0,0 +1,48 @@ +ex: erfc(x) = \int_{0}^x (\frac{2}{\sqrt{pi}})e^{-t^2 }dt +ex: IVP \frac{dP}{dt} = \alpha P - \beta P^2 + P(0) = P_0 + +Explicit Euler Method + +$\frac{P(t + \Delta t) - P(t)}{\Delta t} \approx \alpha P(t) - \beta P^2(t)$ + +From 0 \rightarrow T +P(T) \approx n steps + +* Steps +** Calculus: defference quotient +$f'(a) \approx \frac{f(a+h) - f(a)}{h}$ + +** Test. +Roundoff for h \approx 0 + +** Calculus: Taylor Serioes w/ Remainder +$e_{abs}(h) \leq Ch^r$ + +(see Sep-20 . Taylor Series) + +* Pseudo Code +#+BEGIN_SRC python + for i in range(n): + a12 = a12 + x[i+1] + a22 = a22 + x[i+1]**2 + a21 = a12 + b1 = y[0] + b2 = y[0] * x[0] + for i in range(n): + b1 = b1 + y[i+1] + b2 = b2 + y[i+1]*x[i+1] + detA = a22*a11 - a12*a21 + c = (a22*b1 - a12*b2) / detA + d = (-a21 * b1 + a11 * b2) / detA + + return (c, d) +#+END_SRC + +* Error +We want +$e_k = |df(h_kk) - f'(a)|$ + +$= |df(h_k) - df(h_m) + df(h_m) - f'(a)|$ + +$\leq |df(h_k) - df(h_m)| + |df(h_m) - f'(a)|$ and $|df(h_m) - f'(a)|$ is negligible |