\documentclass[prb, notitlepage, aps, 10pt]{revtex4-2} \usepackage[utf8]{inputenc} \usepackage{listings} \usepackage{amssymb} \usepackage{graphicx,amsmath} \usepackage{enumitem} \usepackage{nicefrac} \usepackage{amsmath} \usepackage{graphicx} \usepackage{amsfonts} \usepackage{comment} \usepackage{bm} \newcommand{\norm}[1]{\left\lVert#1\right\rVert} \usepackage{hyperref} \hypersetup{ colorlinks=true, linkcolor=blue, filecolor=magenta, urlcolor=cyan, pdfpagemode=FullScreen, } \begin{document} \title{\texorpdfstring{Numerical Methods, Fall 2022\\ Assignment 2 [SVD decomposition with applications] \\ Total: 40, Deadline: 21 Oct}{}} \maketitle \section*{Suggested Reading} \begin{itemize} \item Lectures 4-5 of \cite{trefethen1997numerical} \item Lecture 2 of \cite{tyrtyshnikov2012brief} \item \href{https://stats.stackexchange.com/questions/2691/making-sense-of-principal-component-analysis-eigenvectors-eigenvalues}{Making sense of principal component analysis, eigenvectors and eigenvalues} \end{itemize} \section*{Exercises} \begin{enumerate} \begin{comment} \item (5) Construct (manually) SVD decomposition of the following matrices: $$ (a)\quad\begin{bmatrix} 3 & 0\\ 0 & -2 \end{bmatrix},\quad (b)\quad\begin{bmatrix} 0 & 2\\ 0 & 0\\ 0 & 0 \end{bmatrix},\quad (c)\quad\begin{bmatrix} 1 & 1\\ 1 & 1 \end{bmatrix}. $$ For the case (b), construct both full and reduced SVD decomposition via \texttt{np.linalg.svd}. \end{comment} \item (10) In this exercise, we will explore three main algorithms available in scientific \lstinline{python} distributions for computation of SVD: \lstinline{numpy.linalg.svd}, \lstinline{scipy.sparse.linalg.svds} and \lstinline{sklearn.utils.extmath.randomized_svd}. To this end: \begin{itemize} \item Construct a random $n\times n$ matrix $A$ (with iid elements sampled from standard normal distribution); consider $n=2000$. \item Using these implementations, construct rank-2 approximations to $A$. You will thus obtain three rank-2 matrices $A_\textrm{svd}$, $A_\textrm{svds}$ and $A_\textrm{rsvd}$. Measure the run--time of these three algorithms for the given task. \item Compute the error norms: $\Vert A-A_\textrm{svd}\Vert_F$, $\Vert A-A_\textrm{svds}\Vert_F$, $\Vert A-A_\textrm{rsvd}\Vert_F$. Explain the result. \end{itemize} \item (5) Let $A$ be $m\times n$ with SVD $A = U\Sigma V^T$. Compute SVDs of the following matrices in terms of $U$, $\Sigma$ and $V$: (i) $\left(A^T A\right)^{-1}$, (ii) $\left(A^T A\right)^{-1}A^T$, (iii) $A\left(A^T A\right)^{-1}$, (iv) $A\left(A^T A\right)^{-1}A^T$. \item (10) Consider the matrix: $$ \begin{bmatrix} -2 & 11\\ -10 & 5 \end{bmatrix} $$ \begin{itemize} \item List the singular values, left singular vectors and right singular vectors of $A$. The SVD is not unique, so find the one that has the minimal number of minus signs in $U$ and $V$. \item Draw a labeled picture of the unit ball in $\mathbb{R}^2$ and its image under $A$, together with the singular vectors with the coordinates of their vertices marked. \item What are $2$-norm and Frobenius norm of $A$? \item Find $A^{-1}$ not directly, but via SVD. \item Find the eigenvalues $\lambda_1, \lambda_2$ of $A$. \end{itemize} \item (5) The file \path{A.npy} contains the $n\times n$ matrix $A$. Determine the best approximation of $A_{ij}$ in terms of the following anzats, where the variables are separated: $A_{ij}\approx h_i\eta_j$. What is the related relative error of such approximation: $$ \delta_{\textrm{err}}=\frac{\sqrt{\sum_{ij}\left(A_{ij}-h_i\eta_j\right)^2}}{\sqrt{\sum_{ij}A_{ij}^2}}? $$ How many terms $K$ would an exact representation of the following form: $$ A_{ij}=\sum_{\alpha=1}^K h_{\alpha i}\eta_{\alpha j} $$ require? \item (10) In this exercise, you will explore application of SVD to dimensionality reduction. Let us start with loading the dataset: \lstset{language=Python} \lstset{frame=lines} \lstset{label={lst:code_direct}} \lstset{basicstyle=\ttfamily} \begin{lstlisting} from sklearn.datasets import load_digits digits = load_digits() A = digits.data y = digits.target \end{lstlisting} \end{enumerate} so that rows of A contain monochromatic images of digits (64 float values which should be reshaped into $8\times 8$ images) and $y$ contains the digit labels. \begin{itemize} \item Inspect the dataset: plot examples of images, corresponding to several digits (say $0, 3, 7$). \item Normalize the dataset $A$. \item Use SVD to project the dataset $A$ from $64$ dimensions to $2$ dimensions. Show the colored scatter plot, where colors encode the digits. \end{itemize} \bibliography{library.bib} \end{document}