%% LyX 2.3.3 created this file. For more info, see http://www.lyx.org/.
%% Do not edit unless you really know what you are doing.
\documentclass[oneside,english]{amsbook}
\usepackage[T1]{fontenc}
\usepackage[latin9]{inputenc}
\usepackage{amsthm}
\usepackage{amssymb}
\makeatletter
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Textclass specific LaTeX commands.
\numberwithin{section}{chapter}
\numberwithin{equation}{section}
\numberwithin{figure}{section}
\newenvironment{lyxlist}[1]
{\begin{list}{}
{\settowidth{\labelwidth}{#1}
\setlength{\leftmargin}{\labelwidth}
\addtolength{\leftmargin}{\labelsep}
\renewcommand{\makelabel}[1]{##1\hfil}}}
{\end{list}}
\theoremstyle{definition}
\newtheorem*{defn*}{\protect\definitionname}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% User specified LaTeX commands.
\include{mydefs}
\usepackage{fullpage}
\makeatother
\usepackage{babel}
\providecommand{\definitionname}{Definition}
\begin{document}
\section*{Lior Silberman's Math 223: Problem Set 2 (due 25/1/2021)}
\begin{center}
\textbf{Practice problems (recommended, but do not submit)}
\par\end{center}
\begin{itemize}
\item Study the method of solving linear equations introduced in section
1.4 and use it to solve problem 2 of section 1.4.
\item Section 1.4, problems 1-5 (ignore matrices), 8, 12-13, 17-19.
\item Section 1.5, problems 1,2 (ignore matrices), 4, 9, 10
\end{itemize}
\begin{center}
\textbf{Linear dependence and independence}
\par\end{center}
\begin{lyxlist}{10.}
\item [{1.}] Let $\vu=\left(\begin{array}{c}
a\\
b
\end{array}\right),\vv=\left(\begin{array}{c}
c\\
d
\end{array}\right)\in\R^{2}$ and suppose that $\vu\neq\zv$. Show that $\vv$ is not dependent
on $\vu$ iff $ad-bc\neq0$.\bigskip{}
\item [{2.}] In each of the following problems either exhibit the given
vector as a linear combination of elements of the set or show that
this is impossible (\cf PS1 problem 2).
\begin{lyxlist}{10.}
\item [{(a)}] $V=\R^{3}$, $S=\left\{ \left(\begin{array}{c}
1\\
0\\
1
\end{array}\right),\left(\begin{array}{c}
1\\
1\\
0
\end{array}\right)\right\} $, $\underline{v}=\left(\begin{array}{c}
-4\\
-2\\
0
\end{array}\right)$ ~~~~~(b) Same $V,S$ but $\underline{v}=\left(\begin{array}{c}
-4\\
-2\\
-2
\end{array}\right)$.
\item [{(c)}] $V=\R^{2}$, $S=\left\{ \left(\begin{array}{c}
a\\
b
\end{array}\right),\left(\begin{array}{c}
c\\
d
\end{array}\right)\right\} $ such that $ad-bc\neq0$, $\vv=\left(\begin{array}{c}
e\\
f
\end{array}\right)$. \bigskip{}
\end{lyxlist}
\item [{3.}] More on spans.
\begin{lyxlist}{10.}
\item [{(a)}] Let $W=\Span(S)$ where $S$ is as in 2(a). Identify $W$
as the set of triples which solve a single equation in three variables.
\item [{(b)}] Let $T=\left\{ x^{k+1}-x^{k}\right\} _{k=0}^{\infty}\subset\R[x]$.
Show that $\Span(T)\subset\left\{ p\in\R[x]\mid p(1)=0\right\} $.
\item [{({*}c)}] Show equality in (b).
\item [{(d)}] Let $R=\left\{ 1+x^{k}\right\} _{k=1}^{\infty}\subset\R[x]$
(that is, $R$ is the set of polynomials $1+x,1+x^{2},1+x^{3},\cdots$).
Show that this set is linearly independent.
\item [{(e)}] Give (with proof)! a simple criterion, similar to the one
in part (b), for whether a polynomial is in $\Span(S)$.\bigskip{}
\end{lyxlist}
\item [{4.}] For each vector in the set $S=\left\{ (0,0,0,0),(0,0,3,0),(1,1,0,1),(2,2,0,0),(0,0,0,-1)\right\} \subset\R^{4}$
decide whether that vector is dependent or independent of the other
vectors in $S$.\bigskip{}
\item [{{*}5.}] Let $S\subset\R[x]$ be a set of non-zero polynomials,
no two of which have the same degree. Show that $S$ is linearly independent.\bigskip{}
\end{lyxlist}
\newpage{}
\begin{center}
\textbf{The ``minimal dependent subset'' trick}
\par\end{center}
The following result (6(d)) is a \emph{uniqueness }result, very handy
in proving linear independence.
\begin{lyxlist}{10.}
\item [{6.}] Let $V$ be a vector space, and let $S\subset V$ be linearly
dependent. Let $S'\subset S$ be a linearly dependent subset of the
smallest possible size, and enumerate its elements as $S'=\left\{ \vv_{i}\right\} _{i=1}^{n}$
(so $n$ is the size of $S'$ and the $\vv_{i}$ are distinct, in
particular $n\geq1$).
\begin{lyxlist}{10.}
\item [{(a)}] Show that $S$ contains a finite subset which is linearly
dependent (this is a test of understanding the definitions)
\item [{RMK}] Part (a) justifies the existence of $S'$.
\item [{(b)}] By definition of linear dependence there are scalars $\left\{ a_{i}\right\} _{i=1}^{n}\subset\R$
not all zero so that $\sum_{i=1}^{n}a_{i}\vv_{i}=\zv$. Show that
all the $a_{i}$ are non-zero.
\item [{(c)}] Conclude from (b) that \emph{every} vector of $S'$ depends
on the other vectors.
\item [{({*}d)}] Suppose that there existed other scalars $b_{i}$ so that
also $\sum_{i=1}^{n}b_{i}\vv_{i}=\zv$. Show that there is a single
scalar $t$ such that $b_{i}=ta_{i}$ for all $1\leq i\leq n$.\bigskip{}
\end{lyxlist}
\item [{{*}{*}7.}] (Linear independence of functions) Some differential
calculus will be used here.
\begin{lyxlist}{10.}
\item [{(a)}] Let $r_{1},\ldots,r_{n}$ be distinct real numbers. Show
that the set of functions $\left\{ e^{r_{i}x}\right\} _{i=1}^{n}$
is independent in $\R^{\R}$.
\item [{(b)}] Fix $a**0}\cup\left\{ 1\right\} $
of functions on $\left[a,b\right]$ (you can treat $1$ as the function
$\cos(0x)$). Show that this set is linearly independent.
\item [{\bigskip{}
}]~
\end{lyxlist}
\end{lyxlist}
\begin{center}
\textbf{Supplementary problem: Independence in direct sums}
\par\end{center}
\begin{lyxlist}{10.}
\item [{A}] Before thinking more about direct sums, meditate on the following:
by breaking every vector in $\R^{n+m}$ into its first $n$ and last
$m$ coordinates, you can identify $\R^{n+m}$ with $\R^{n}\oplus\R^{m}$.
Now do the same problem twice:
\begin{lyxlist}{10.}
\item [{(a)}] Let $n,m\geq1$ and let $S_{1},S_{2}\subset\R^{n+m}$ be
two linearly independent subsets. Suppose that every vector in $S_{1}$
is supported in the first $n$ coordinates, and that every vector
in $S_{2}$ is supported in the last $m$ coordinates. Show that $S_{1}\cup S_{2}$
is also linearly independent. If $n=2,\,m=1$ this means that vectors
from $S_{1}$ look like $\left(\begin{array}{c}
*\\
*\\
0
\end{array}\right)$ and vectors in $S_{2}$ look like $\left(\begin{array}{c}
0\\
0\\
*
\end{array}\right)$.
\item [{(b)}] Let $V,W$ be two vector spaces. Let $S_{1}\subset V$ and
$S_{2}\subset W$ be linearly independent. Show that $\left\{ \left(\vv,0\right)\mid\vv\in S_{1}\right\} \cup\left\{ \left(0,\vw\right)\mid\vw\in S_{2}\right\} $
is linearly independent in $V\oplus W$.
\item [{RMK}] To understand every problem about direct sums consider it
first in setting of part (a). Then try the general case.\bigskip{}
\end{lyxlist}
\end{lyxlist}
\newpage{}
Hint for 5: (1) In a linear combination of polynomials from $S$,
consider the polynomial of highest degree appearing with a non-zero
coefficient. (2) Try to see what happens if $S=\left\{ 1+1,1+x,1+x^{2}\right\} $.
\begin{center}
\textbf{Supplementary problem: another construction}
\par\end{center}
\begin{lyxlist}{10.}
\item [{A.}] (Quotient vector spaces) Let $V$ be a vector space, $W$
a subspace.
\begin{lyxlist}{10.}
\item [{(a)}] Define a relation $\cdot\equiv\cdot\,(W)$ (read ``congruent
mod $W$'') on $V$ by $\vv\equiv\vv'\,(W)\iff(\vv-\vv')\in W$.
Show that this relation is an \emph{equivalence relation}, that is
that it is reflexive, symmetric and transitive.
\item [{(b)}] For a vector $\vv\in V$ let $\vv+W$ denote the set of sums
$\left\{ \vv+\vw\mid\vw\in W\right\} $. Show that $\vv+W=\vv'+W$
iff $\vv+W\cap\vv'+W\neq\emptyset$ iff $\vv-\vv'\in W$. In particular
show that if $\vv'\in\vv+W$ then $\vv'+W=\vv+W$. These subsets are
the equivalence classes of the relation from part (a) and are called
\emph{cosets} mod $W$ or \emph{affine subspaces.}
\item [{(c)}] Show that if $\vv\equiv\vv'\,(W)$ and $\vu\equiv\vu'\,(W)$
and $a,b\in\R$ then $a\vv+b\vu\equiv a\vv'+b\vu'\,(W)$.
\item [{DEF}] Let $V/W=\left\{ \vv+W\mid\vv\in V\right\} $ be the set
of cosets mod $W$. Define addition and scalar multiplication on $V/W$
by $\left(\vv+W\right)+\left(\vu+W\right)\eqdef(\vv+\vu)+W$ and $a\left(\vv+W\right)\eqdef(a\vv)+W$.
\item [{(d)}] Use (c) to show that the operation is \emph{well-defined}
-- that if $\vv+W=\vv'+W$ and $\vu+W=\vu'+W$ then $(\vv+\vu)+W=(\vv'+\vu')+W$
so that the sum of two cosets comes out the same no matter which vector
is chosen to represent the coset.
\item [{(e)}] Show that $V/W$ with these operations is a vector space,
known as the \emph{quotient vector space} $V/W$.
\end{lyxlist}
\end{lyxlist}
\begin{center}
\textbf{Supplementary problems: finite fields}
\par\end{center}
Let $p$ be a prime number. Define addition and multiplication on
$\left\{ 0,1,\cdots,p-1\right\} $as follows: $a+_{p}b=c$ and $a\cdot_{p}b=d$
if $c$ (resp. $d$) is the remainder obtained when dividing $a+b$
(resp. $ab$) by $p$.
\begin{lyxlist}{10.}
\item [{B.}] (Elementary calculations)
\begin{lyxlist}{10.}
\item [{(a)}] Show that these operations are associative and commutative,
that $0$ is neutral for addition, that $1$ is netural for multiplication.
\item [{(b)}] Show that if $1**