\chapter{Elements of Linear Algebra} \label{ch:elements-of-linear-algebra} \begin{remark}[vector] Let $u$ a vector, we will use interchangeably the following notations: $u$ and $\vec{u}$ \end{remark} Let $u = \begin{pmatrix} u_1 \\ \vdots \\ u_n \end{pmatrix}$ and $v = \begin{pmatrix} v_1 \\ \vdots \\ v_n \end{pmatrix}$ \begin{align*} \langle u, v\rangle & = \left(u_1, \ldots, u_v\right) \begin{pmatrix} v_1 \\ \vdots \\ v_n \end{pmatrix} \\ & = u_1 v_1 + u_2 v_2 + \ldots + u_n v_n \end{align*} \begin{definition}[Norm] Length of the vector. \[ \norm{u} = \sqrt{\scalar{u, v}} \] $\norm{u, v} > 0$ \end{definition} \begin{definition}[Distance] \[ dist(u, v) = \norm{u-v} \] \end{definition} \begin{definition}[Orthogonality] \[ u \perp v \Leftrightarrow \scalar{u, v} = 0 \] \end{definition} \begin{remark} \[ (dist(u, v))^2 = \norm{u - v}^2, \] and \[ \scalar{v-u, v-u} \] \end{remark} Scalar product properties: \begin{itemize} \item $\scalar{u, v} = \scalar{v, u}$ \item $\scalar{(u+v), w} = \scalar{u, w} + \scalar{v, w}$ \item $\scalar{u, v}$ \item $\scalar{\vec{u}, \vec{v}} = \norm{\vec{u}} \times \norm{\vec{v}} \times \cos(\widehat{\vec{u}, \vec{v}})$ \end{itemize} \begin{align*} \scalar{v-u, v-u} & = \scalar{v, v} + \scalar{u, u} - 2 \scalar{u, v} \\ & = \norm{v}^2 + \norm{u}^2 \\ & = -2 \scalar{u, v} \end{align*} \begin{align*} \norm{u - v}^2 & = \norm{u}^2 + \norm{v}^2 - 2 \scalar{u,v} \\ \norm{u + v}^2 & = \norm{u}^2 + \norm{v}^2 + 2 \scalar{u,v} \end{align*} If $u \perp v$, then $\scalar{u, v} = 0$ \begin{proof}[Indeed] $\norm{u-v}^2 = \norm{u+v}^2$, \begin{align*} \Leftrightarrow & -2 \scalar{u, v} = 2 \scalar{u, v} \\ \Leftrightarrow & 4 \scalar{u, v} = 0 \\ \Leftrightarrow & \scalar{u, v} = 0 \end{align*} \end{proof} \begin{theorem}{Pythagorean theorem} If $u \perp v$, then $\norm{u+v}^2 = \norm{u}^2 + \norm{v}^2$ . \end{theorem} \begin{definition}[Orthogonal Projection] \end{definition} Let $y = \begin{pmatrix} y_1 \\ . \\ y_n \end{pmatrix} \in \RR[n]$ and $w$ a subspace of $\RR[n]$ $\mathcal{Y}$ can be written as the orthogonal projection of $y$ on $w$: \[ \mathcal{Y} = proj^w(y) + z, \] where \[ \begin{cases} z \in w^\perp \\ proj^w(y) \in w \end{cases} \] There is only one vector $\mathcal{Y}$ that ? The scalar product between $z$ and (?) is zero. \begin{property} $proj^w(y)$ is the closest vector to $y$ that belongs to $w$. \end{property} \begin{definition}[Matrix] A matrix is an application, that is, a function that transform a thing into another, it is a linear function. \end{definition} \begin{example}[Matrix application] Let $A$ be a matrix: \[ A = \begin{pmatrix} a & b \\ c & d \end{pmatrix} \] and \[ x = \begin{pmatrix} x_1 \\ x_2 \end{pmatrix} \] Then, \begin{align*} Ax & = \begin{pmatrix} a & b \\ c & d \end{pmatrix} \begin{pmatrix} x_1 \\ x_2 \end{pmatrix} \\ & = \begin{pmatrix} a x_1 + b_x2 \\ c x_1 + d x_2 \end{pmatrix} \end{align*} Similarly, \begin{align*} \begin{pmatrix} a & b & c & d \\ e & f & g & h \\ i & j & k & l \end{pmatrix} \begin{pmatrix} x_1 \\ x_2 \\ x_3 \\ x_4 \end{pmatrix} & = \begin{pmatrix} a x_1 + b x_2 + c x_3 \ldots \end{pmatrix} \end{align*} \end{example} The number of columns has to be the same as the dimension of the vector to which the matrix is applied. \begin{definition}[Tranpose of a Matrix] Let $A = \begin{pmatrix} a & b \\ c & d \end{pmatrix}$, then $A^\T = \begin{pmatrix} a & c \\ b & d \end{pmatrix}$ \end{definition} \begin{example} \begin{align*} Y & = X \beta + \varepsilon \\ \begin{pmatrix} y_1 \\ y_2 \\ y_3 \\ y_4 \end{pmatrix} & = \begin{pmatrix} 1 & x_{11} & x_{12} \\ 1 & x_{21} & x_{22} \\ 1 & x_{31} & x_{32} \\ 1 & x_{41} & x_{42} \end{pmatrix} \begin{pmatrix} \beta_0 \\ \beta_1 \\ \beta_2 \end{pmatrix} + \begin{pmatrix} \varepsilon_1 \\ \varepsilon_2 \\ \varepsilon_3 \\ \varepsilon_4 \end{pmatrix} \end{align*} \end{example}