Mathematics for Applied Sciences (Osnabrück 2023-2024)/Part I/Lecture 25/latex
\setcounter{section}{25}
\subtitle {The dimension formula}
The following statement is called \keyword {dimension formula} {.}
\inputfaktbeweisnichtvorgefuehrt
{Linear mapping/Dimension formula/Fact}
{Theorem}
{}
{
\factsituation {Let $K$ denote a
field,
let
\mathcor {} {V} {and} {W} {}
denote
$K$-vector spaces,
and let
\mathdisp {\varphi \colon V \longrightarrow W} { }
denote a
$K$-linear mapping.}
\factcondition {Suppose that $V$ has finite dimension.}
\factconclusion {Then
\mathrelationchaindisplay
{\relationchain
{ \dim_{ K } { \left( V \right) }
}
{ =} { \dim_{ K } { \left( \operatorname{kern} \varphi \right) } + \dim_{ K } { \left( \operatorname{Im} \varphi \right) }
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{}
holds.}
\factextra {}
}
{
Set
\mathrelationchain
{\relationchain
{ n
}
{ = }{ \dim_{ K } { \left( V \right) }
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{.}
Let
\mathrelationchain
{\relationchain
{U
}
{ = }{ \operatorname{kern} \varphi
}
{ \subseteq }{ V
}
{ }{
}
{ }{
}
}
{}{}{}
denote the
kernel
of the mapping and let
\mathrelationchain
{\relationchain
{ k
}
{ = }{ \dim_{ K } { \left( U \right) }
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
denote its
dimension
\extrabracket {\mathrelationchainb
{\relationchainb
{ k
}
{ \leq }{ n
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}} {} {.}
Let
\mathdisp {u_1 , \ldots , u_k} { }
be a
basis
of $U$. Due to
Theorem 23.23
,
there exist vectors
\mathdisp {v_1 , \ldots , v_{n-k }} { }
such that
\mathdisp {u_1 , \ldots , u_k, \, v_1 , \ldots , v_{n-k }} { }
is a basis of $V$.
We claim that
\mathdisp {w_j = \varphi(v_j), \, j=1 , \ldots , n-k} { , }
is a basis of the image. Let
\mathrelationchain
{\relationchain
{ w
}
{ \in }{ W
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
be an element of the image \mathl{\varphi(V)}{.} Then there exists a vector
\mathrelationchain
{\relationchain
{ v
}
{ \in }{ V
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
such that
\mathrelationchain
{\relationchain
{ \varphi(v)
}
{ = }{ w
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{.}
We can write $v$ with the basis as
\mathrelationchaindisplay
{\relationchain
{ v
}
{ =} { \sum_{i = 1}^{ k } s_i u_i + \sum_{ j = 1 }^{ n-k } t_j v_j
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{.}
Then we have
\mathrelationchainalign
{\relationchainalign
{w
}
{ =} { \varphi(v)
}
{ =} { \varphi { \left( \sum_{i=1}^{ k } s_i u_i + \sum_{j = 1}^{n-k } t_j v_j \right) }
}
{ =} { \sum_{i = 1}^{ k } s_i \varphi(u_i) + \sum_{j = 1}^{n- k } t_j \varphi (v_j)
}
{ =} { \sum_{j = 1}^{n-k } t_j w_j
}
}
{}
{}{,}
which means that $w$ is a
linear combination
in terms of the $w_j$.
In order to prove that the family
\mathcond {w_j} {}
{j=1 , \ldots , n-k} {}
{} {} {} {,}
is
linearly independent,
let a representation of zero be given,
\mathrelationchaindisplay
{\relationchain
{ 0
}
{ =} { \sum_{j = 1}^{n-k } t_j w_j
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{.}
Then
\mathrelationchaindisplay
{\relationchain
{ \varphi { \left( \sum_{j = 1}^{n-k } t_j v_j \right) }
}
{ =} { \sum_{j = 1}^{n-k } t_j \varphi { \left( v_j \right) }
}
{ =} { 0
}
{ } {
}
{ } {
}
}
{}{}{.}
Therefore, \mathl{\sum_{j=1}^{n-k } t_j v_j}{} belongs to the kernel of the mapping. Hence, we can write
\mathrelationchaindisplay
{\relationchain
{ \sum_{ j = 1 }^{n-k } t_j v_j
}
{ =} { \sum_{ i = 1 }^{ k } s_i u_i
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{.}
Since this is altogether a basis of $V$, we can infer that all coefficients are $0$, in particular,
\mathrelationchain
{\relationchain
{ t_j
}
{ = }{ 0
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{.}
\inputdefinition
{ }
{
Let $K$ denote a
field,
let
\mathcor {} {V} {and} {W} {}
denote
$K$-vector spaces,
and let
\mathdisp {\varphi \colon V \longrightarrow W} { }
denote a
$K$-linear mapping.
Suppose that $V$ has finite dimension. Then we call
\mathrelationchaindisplay
{\relationchain
{ \operatorname{rk} \, \varphi
}
{ \defeq} { \dim_{ K } { \left( \operatorname{Im} \varphi \right) }
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{}
}
The dimension formula can also be expressed as
\mathrelationchaindisplay
{\relationchain
{ \dim_{ K } { \left( V \right) }
}
{ =} { \dim_{ K } { \left( \operatorname{kern} \varphi \right) } + \operatorname{rk} \, \varphi
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{.}
\inputexample{}
{
We consider the
linear mapping
\mathdisp {\varphi \colon \R^3 \longrightarrow \R^4
, \begin{pmatrix} x \\y\\ z \end{pmatrix} \longmapsto M\begin{pmatrix} x \\y\\ z \end{pmatrix} = \begin{pmatrix} y+z \\2y+2z\\ x+3y+4z\\2x+4y+6z \end{pmatrix}} { , }
given by the matrix
\mathrelationchaindisplay
{\relationchain
{ M
}
{ =} { \begin{pmatrix} 0 & 1 & 1 \\ 0 & 2 & 2 \\ 1 & 3 & 4 \\ 2 & 4 & 6 \end{pmatrix}
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{.}
To determine the
kernel,
we have to solve the
homogeneous linear system
\mathrelationchaindisplay
{\relationchain
{ \begin{pmatrix} y+z \\2y+2z\\ x+3y+4z\\2x+4y+6z \end{pmatrix}
}
{ =} { \begin{pmatrix} 0 \\0\\ 0\\0 \end{pmatrix}
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{.}
The solution space is
\mathrelationchaindisplay
{\relationchain
{L
}
{ =} { { \left\{ s \begin{pmatrix} 1 \\1\\ -1 \end{pmatrix} \mid s \in \R \right\} }
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{,}
and this is the kernel of $\varphi$. The kernel has dimension one, therefore the dimension of the image is $2$, due to
the dimension formula.
}
\inputfactproof
{Vector space/Finite dimensional/Injective surjective bijective/Fact}
{Corollary}
{}
{
\factsituation {Let $K$ denote a
field,
let
\mathcor {} {V} {and} {W} {}
denote
$K$-vector spaces
with the same
dimension
$n$. Let
\mathdisp {\varphi \colon V \longrightarrow W} { }
denote a
linear mapping.}
\factconclusion {Then $\varphi$ is
injective
if and only if $\varphi$ is
surjective.}
\factextra {}
}
{
This follows from the dimension formula and Lemma 24.14 .
\subtitle {Composition of linear mappings and matrices}
\inputfaktbeweis
{Linear mapping/Matrix/Composition/Fact}
{Lemma}
{}
{
\factsituation {}
\factconclusion {In the
correspondence
between
linear mappings
and
matrices,
the
composition
of linear mappings corresponds to the
matrix multiplication.}
\factextra {More precisely: let \mathl{U,V,W}{} denote
vector spaces
over a
field
$K$ with
bases
\mathdisp {\mathfrak{ u } = u_1 , \ldots , u_p , \, \mathfrak{ v } = v_1 , \ldots , v_n \text{ and } \mathfrak{ w } = w_1 , \ldots , w_m} { . }
Let
\mathdisp {\psi:U \longrightarrow V \text{ and } \varphi: V \longrightarrow W} { }
denote linear mappings. Then, for the describing matrix of \mathl{\psi,\, \varphi}{,} and of the composition \mathl{\varphi \circ \psi}{,} the relation
\mathrelationchaindisplay
{\relationchain
{ M^{ \mathfrak{ u } }_{ \mathfrak{ w } } (\varphi \circ \psi )
}
{ =} { ( M^{ \mathfrak{ v } }_{ \mathfrak{ w } } (\varphi) ) \circ ( M^{ \mathfrak{ u } }_{ \mathfrak{ v } }(\psi) )
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{}
holds.}
}
{
We consider the chain of mappings
\mathdisp {U \stackrel{\psi}{\longrightarrow } V \stackrel{\varphi}{\longrightarrow } W} { . }
Suppose that $\psi$ is described by the \mathl{n \times p}{-}matrix
\mathrelationchain
{\relationchain
{ B
}
{ = }{(b_{jk})_{jk}
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{,}
and that $\varphi$ is described by the \mathl{m \times n}{-}matrix
\mathrelationchain
{\relationchain
{A
}
{ = }{ { \left( a_{ij} \right) }_{ij}
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
\extrabracket {with respect to the bases} {} {.}
The composition \mathl{\varphi \circ \psi}{} has the following effect on the base vector $u_k$.
\mathrelationchainalign
{\relationchainalign
{ { \left( \varphi \circ \psi \right) } { \left( u_k \right) }
}
{ =} { \varphi { \left( \psi { \left( u_k \right) } \right) }
}
{ =} { \varphi { \left( \sum_{ j = 1 }^{ n } b_{jk} v_j \right) }
}
{ =} { \sum_{ j = 1 }^{ n } b_{jk} \varphi(v_j)
}
{ =} { \sum_{ j = 1 }^{ n } b_{jk} { \left( \sum_{ i = 1 }^{ m } a_{ij} w_i \right) }
}
}
{
\relationchainextensionalign
{ =} { \sum_{ i = 1 }^{ m } { \left( \sum_{ j = 1 }^{ n } a_{ij} b_{jk} \right) } w_i
}
{ =} { \sum_{ i = 1 }^{ m } c_{ik} w_i
}
{ } {}
{ } {}
}
{}{.}
Here, these coefficients
\mathrelationchain
{\relationchain
{ c_{ik}
}
{ = }{ \sum_{ j = 1 }^{ n } a_{ij} b_{jk}
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
are just the entries of the
product matrix
\mathl{A \circ B}{.}
From this, we can conclude that the product of matrices is associative.
\subtitle {Invertible matrices}
\inputdefinition
{ }
{
Let $K$ be a
field,
and let $M$ denote an
$n \times n$-matrix
over $K$. Then $M$ is called \definitionword {invertible}{,} if there exists a matrix
\mathrelationchain
{\relationchain
{A
}
{ \in }{ \operatorname{Mat}_{ n } (K)
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
such that
\mathrelationchaindisplay
{\relationchain
{ A \circ M
}
{ =} { E_{ n }
}
{ =} { M \circ A
}
{ } {
}
{ } {
}
}
{}{}{}
}
\inputdefinition
{ }
{
Let $K$ denote a
field.
For an
invertible matrix
\mathrelationchain
{\relationchain
{M
}
{ \in }{ \operatorname{Mat}_{ n } (K)
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{,}
the matrix
\mathrelationchain
{\relationchain
{A
}
{ \in }{\operatorname{Mat}_{ n } (K)
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
fulfilling
\mathrelationchaindisplay
{\relationchain
{ A \circ M
}
{ =} { E_{ n }
}
{ =} { M \circ A
}
{ } {
}
{ } {
}
}
{}{}{,}
is called the \definitionword {inverse matrix}{} of $M$. It is denoted by
\mathdisp {M^{-1}} { . }
}
\subtitle {Linear mappings and change of basis}
\inputfaktbeweisnichtvorgefuehrt
{Linear mapping/Finite dimensional/Change of basis/Fact}
{Lemma}
{}
{
\factsituation {Let $K$ denote a
field,
and let
\mathcor {} {V} {and} {W} {}
denote finite-dimensional
$K$-vector spaces.
Let
\mathcor {} {\mathfrak{ v }} {and} {\mathfrak{ u }} {}
be
bases
of $V$ and
\mathcor {} {\mathfrak{ w }} {and} {\mathfrak{ z }} {}
bases of $W$. Let
\mathdisp {\varphi \colon V \longrightarrow W} { }
denote a
linear mapping,
which is described by the
matrix
\mathl{M^ \mathfrak{ v }_ \mathfrak{ w }(\varphi)}{} with respect to the bases
\mathcor {} {\mathfrak{ v }} {and} {\mathfrak{ w }} {.}}
\factconclusion {Then $\varphi$ is described with respect to the bases
\mathcor {} {\mathfrak{ u }} {and} {\mathfrak{ z }} {}
by the matrix
\mathdisp {M^{ \mathfrak{ w } }_{ \mathfrak{ z } } \circ ( M^ \mathfrak{ v }_ \mathfrak{ w }(\varphi) ) \circ ( M^{ \mathfrak{ v } }_{ \mathfrak{ u } })^{-1}} { , }
where
\mathcor {} {M^{ \mathfrak{ v } }_{ \mathfrak{ u } }} {and} {M^{ \mathfrak{ w } }_{ \mathfrak{ z } }} {}
are the
transformation matrices,
which describe the change of basis from
\mathcor {} {\mathfrak{ v }} {to} {\mathfrak{ u }} {}
and from
\mathcor {} {\mathfrak{ w }} {to} {\mathfrak{ z }} {.}}
\factextra {}
}
{
The linear standard mappings
$K^n \rightarrow V$
and
$K^m \rightarrow W$
for the various bases are denoted by \mathl{\Psi_{ \mathfrak{ v } }, \, \Psi_{ \mathfrak{ u } }, \, \Psi_{ \mathfrak{ w } }, \, \Psi_{ \mathfrak{ z } }}{.} We consider the
commutative diagram
\mathdisp {\begin{matrix} K^n & & & \stackrel{ M^{ \mathfrak{ v } }_{ \mathfrak{ w } } (\varphi) }{\longrightarrow} & & & K^m \\
& \searrow \Psi_{ \mathfrak{ v } } \!\!\!\!\! & & & & \Psi_{ \mathfrak{ w } } \swarrow \!\!\!\!\! & \\
\!\!\!\!\! M^{ \mathfrak{ v } }_{ \mathfrak{ u } } \downarrow & & V & \stackrel{ \varphi }{\longrightarrow} & W & & \, \, \, \, \downarrow M^{ \mathfrak{ w } }_{ \mathfrak{ z } } \\
& \nearrow \Psi_{ \mathfrak{ u } } \!\!\!\!\! & & & & \Psi_{ \mathfrak{ z } } \nwarrow \!\!\!\!\! & \\
K^n & & & \stackrel{ M^{ \mathfrak{ u } }_{ \mathfrak{ z } } (\varphi) }{\longrightarrow} & & & K^m ,
\!\!\!\!\!
\end{matrix}} { }
where the commutativity rests on
Fact *****
and
Fact *****.
In this situation, we have altogether
\mathrelationchainalign
{\relationchainalign
{ M^{ \mathfrak{ u } }_{ \mathfrak{ z } } (\varphi)
}
{ =} { \Psi_{ \mathfrak{ z } }^{-1} \circ \varphi \circ \Psi_{ \mathfrak{ u } }
}
{ =} { \Psi_{ \mathfrak{ z } }^{-1} \circ ( \Psi_{ \mathfrak{ w } } \circ M^{ \mathfrak{ v } }_{ \mathfrak{ w } } (\varphi) \circ \Psi_{ \mathfrak{ v } }^{-1} ) \circ \Psi_{ \mathfrak{ u } }
}
{ =} { (\Psi_{ \mathfrak{ z } }^{-1} \circ \Psi_{ \mathfrak{ w } } ) \circ M^{ \mathfrak{ v } }_{ \mathfrak{ w } } (\varphi) \circ ( \Psi_{ \mathfrak{ v } }^{-1} \circ \Psi_{ \mathfrak{ u } } )
}
{ =} { (\Psi_{ \mathfrak{ z } }^{-1} \circ \Psi_{ \mathfrak{ w } } ) \circ M^{ \mathfrak{ v } }_{ \mathfrak{ w } } (\varphi) \circ ( \Psi_{ \mathfrak{ u } }^{-1} \circ \Psi_{ \mathfrak{ v } } )^{-1}
}
}
{
\relationchainextensionalign
{ =} { M^{ \mathfrak{ w } }_{ \mathfrak{ z } } \circ M^{ \mathfrak{ v } }_{ \mathfrak{ w } } (\varphi) \circ( M^{ \mathfrak{ v } }_{ \mathfrak{ u } } )^{-1}
}
{ } {}
{ } {}
{ } {}
}
{}{.}
\inputfactproof
{Endomorphismus/Finite dimensional/Change of basis/Fact}
{Corollary}
{}
{
\factsituation {Let $K$ denote a
field,
and let $V$ denote a
$K$-vector space
of finite dimension. Let
\mathdisp {\varphi \colon V \longrightarrow V} { }
be a
linear mapping.
Let
\mathcor {} {\mathfrak{ u }} {and} {\mathfrak{ v }} {}
denote
bases
of $V$.}
\factconclusion {Then the matrices that describe the linear mapping with respect to
\mathcor {} {\mathfrak{ u }} {and} {\mathfrak{ v }} {}
respectively
\extrabracket {on both sides} {} {,}
fulfil the relation
\mathrelationchaindisplay
{\relationchain
{ M^ \mathfrak{ u }_ \mathfrak{ u }(\varphi)
}
{ =} { M^{ \mathfrak{ v } }_{ \mathfrak{ u } } \circ M^ \mathfrak{ v }_ \mathfrak{ v }(\varphi) \circ ( M^{ \mathfrak{ v } }_{ \mathfrak{ u } })^{-1}
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{.}}
\factextra {}
}
{
This follows directly from Lemma 25.8 .
\inputdefinition
{ }
{
Two square matrices
\mathrelationchain
{\relationchain
{ M,N
}
{ \in }{ \operatorname{Mat}_{ n } (K)
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
are called
\definitionword {similar}{,}
if there exists an
invertible matrix
$B$ with
\mathrelationchain
{\relationchain
{M
}
{ = }{ B N B^{-1}
}
{ }{
}
{ }{
}
{ }{
}
}
}
Due to Corollary 25.9 , for a linear mapping $\varphi \colon V \rightarrow V$, the describing matrices with respect to several bases are similar.
\subtitle {Properties of linear mappings}
\inputfactproof
{Linear mapping/Matrix to basis/Several properties/Fact}
{Lemma}
{}
{
\factsituation {Let $K$ be a field, and let
\mathcor {} {V} {and} {W} {}
be vector spaces over $K$ of dimensions
\mathcor {} {n} {and} {m} {.}
Let
\mathdisp {\varphi \colon V \longrightarrow W} { }
be a linear map, described by the matrix
\mathrelationchain
{\relationchain
{ M
}
{ \in }{ \operatorname{Mat}_{ m \times n } (K)
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
with respect to two bases.}
\factsegue {Then the following properties hold.}
\factconclusion {\enumerationthree {$\varphi$ is
injective
if and only if the columns of the matrix are
linearly independent.
} {$\varphi$ is
surjective
if and only if the columns of the matrix form a
generating system
of $K^m$.
} {Let
\mathrelationchain
{\relationchain
{m
}
{ = }{n
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{.}
Then $\varphi$ is
bijective
if and only if the columns of the matrix form a basis of $K^m$, and this holds if and only if $M$ is
invertible.
}}
\factextra {}
}
{
Let
\mathrelationchain
{\relationchain
{ \mathfrak{ v }
}
{ = }{ v_1 , \ldots , v_n
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
and
\mathrelationchain
{\relationchain
{ \mathfrak{ w }
}
{ = }{ w_1 , \ldots , w_m
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
denote the bases of
\mathcor {} {V} {and} {W} {}
respectively, and let \mathl{s_1 , \ldots , s_n}{} denote the column vectors of $M$. (1). The mapping $\varphi$ has the property
\mathrelationchaindisplay
{\relationchain
{ \varphi(v_j)
}
{ =} { \sum_{ i = 1 }^{ m } s_{ij} w_i
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{,}
where \mathl{s_{ij}}{} is the $i$-th entry of the $j$-th column vector. Therefore,
\mathrelationchaindisplay
{\relationchain
{ \varphi { \left( \sum_{ j = 1 }^{ n } a_j v_j \right) }
}
{ =} { \sum_{ j = 1 }^{ n } a_j { \left( \sum_{ i = 1 }^{ m } s_{ij } w_i \right) }
}
{ =} { \sum_{ i = 1 }^{ m } { \left( \sum_{ j = 1 }^{ n } a_j s_{ij} \right) } w_i
}
{ } {
}
{ } {
}
}
{}{}{.}
This is $0$ if and only if
\mathrelationchain
{\relationchain
{ \sum_{ j = 1 }^{ n } a_j s_{ij}
}
{ = }{ 0
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{}
for all $i$, and this is equivalent with
\mathrelationchaindisplay
{\relationchain
{ \sum_{ j = 1 }^{ n } a_js_j
}
{ =} { 0
}
{ } {
}
{ } {
}
{ } {
}
}
{}{}{.}
For this vector equation, there exists a nontrivial tuple \mathl{{ \left( a_1 , \ldots , a_n \right) }}{,} if and only if the columns are linearly dependent, and this holds if and only if $\varphi$ is not injective.
(2). See
Exercise 25.3
.
(3). Let
\mathrelationchain
{\relationchain
{n
}
{ = }{m
}
{ }{
}
{ }{
}
{ }{
}
}
{}{}{.}
The first equivalence follows from (1) and (2). If $\varphi$ is bijective, then there exists a
\extrabracket {linear} {} {}
inverse mapping
\mathl{\varphi^{-1}}{} with
\mathdisp {\varphi \circ \varphi^{-1} =
\operatorname{Id}_{ W } \text{ and } \varphi^{-1} \circ \varphi =
\operatorname{Id}_{ V }} { . }
Let $M$ denote the matrix for $\varphi$, and $N$ the matrix for $\varphi^{-1}$. The matrix for the identity is the
identity matrix.
Because of
Lemma 25.5
,
we have
\mathrelationchaindisplay
{\relationchain
{ M \circ N
}
{ =} { E_{ n }
}
{ =} { N \circ M
}
{ } {
}
{ } {
}
}
{}{}{}
and therefore $M$ is invertible. The reverse implication is proved similarly.
\subtitle {Finding the inverse matrix}
\inputverfahren{}
{
Let $M$ denote a square matrix. How can we decide whether the matrix is invertible, and how can we find the inverse matrix \mathl{M^{-1}}{?}
For this we write down a table, on the left-hand side we write down the matrix $M$, and on the right-hand side we write down the identity matrix
\extrabracket {of the right size} {} {.}
Now we apply on both sides step by step the same elementary row manipulations. The goal is to produce in the left-hand column, starting with the matrix, in the end the identity matrix. This is possible if and only if the matrix is invertible. We claim that we produce, by this method, in the right column the matrix $M^{-1}$ in the end. This rests on the following \keyword {invariance principle} {.} Every elementary row manipulation can be realized as a matrix multiplication with some
elementary matrix
$E$ from the left. If in the table we have somewhere the pair
\mathdisp {(M_1, M_2)} { , }
after the next step
\extrabracket {in the next line} {} {}
we have
\mathdisp {(EM_1,EM_2)} { . }
If we multiply the inverse of the second matrix
\extrabracket {which we do not know yet; however, we do know its existence, in case the matrix is invertible} {} {}
with the first matrix, then we get
\mathrelationchaindisplay
{\relationchain
{ (EM_1)^{-1} EM_2
}
{ =} { M_1^{-1} E^{-1} E M_2
}
{ =} { M_1^{-1} M_2
}
{ } {
}
{ } {
}
}
{}{}{.}
This means that this expression is not changed in each single step. In the beginning, this expression equals \mathl{M^{-1} E_{ n }}{,} hence in the end, the pair \mathl{( E_{ n } , N)}{} must fulfil
\mathrelationchaindisplay
{\relationchain
{ N
}
{ =} { E_{ n }^{-1} N
}
{ =} { M^{-1} E_{ n }
}
{ =} { M^{-1}
}
{ } {}
}
{}{}{.}
}
\inputexample{}
{
We want to find for the matrix \mathl{\begin{pmatrix} 1 & 3 & 1 \\ 4 & 1 & 2 \\0 & 1 & 1 \end{pmatrix}}{} its
inverse matrix
$M^{-1}$, following
Method 25.12
.
\matabellezweisieben {\leitzeilezwei {} {} } {\mazeileundzwei { \begin{pmatrix} 1 & 3 & 1 \\ 4 & 1 & 2 \\0 & 1 & 1 \end{pmatrix} } { \begin{pmatrix} 1 & 0 & 0 \\ 0 & 1 & 0 \\0 & 0 & 1 \end{pmatrix}
} }
{\mazeileundzwei { \begin{pmatrix} 1 & 3 & 1 \\ 0 & -11 & -2 \\0 & 1 & 1 \end{pmatrix} } { \begin{pmatrix} 1 & 0 & 0 \\ -4 & 1 & 0 \\0 & 0 & 1 \end{pmatrix}
} }
{\mazeileundzwei { \begin{pmatrix} 1 & 3 & 1 \\ 0 & 1 & 1 \\0 & -11 & -2 \end{pmatrix} } { \begin{pmatrix} 1 & 0 & 0 \\ 0 & 0 & 1 \\-4 & 1 & 0 \end{pmatrix}
} }
{\mazeileundzwei { \begin{pmatrix} 1 & 3 & 1 \\ 0 & 1 & 1 \\0 & 0 & 9 \end{pmatrix} } { \begin{pmatrix} 1 & 0 & 0 \\ 0 & 0 & 1 \\-4 & 1 & 11 \end{pmatrix}
} }
{\mazeileundzwei { \begin{pmatrix} 1 & 3 & 1 \\ 0 & 1 & 1 \\0 & 0 & 1 \end{pmatrix} } { \begin{pmatrix} 1 & 0 & 0 \\ 0 & 0 & 1 \\\frac{-4}{9} & \frac{1}{9} & \frac{11}{9} \end{pmatrix}
} }
{\mazeileundzwei { \begin{pmatrix} 1 & 0 & -2 \\ 0 & 1 & 1 \\0 & 0 & 1 \end{pmatrix} } { \begin{pmatrix} 1 & 0 & -3 \\ 0 & 0 & 1 \\\frac{-4}{9} & \frac{1}{9} & \frac{11}{9} \end{pmatrix}
} }
{\mazeileundzwei { \begin{pmatrix} 1 & 0 & 0 \\ 0 & 1 & 0 \\0 & 0 & 1 \end{pmatrix} } { \begin{pmatrix} \frac{1}{9} & \frac{2}{9} & \frac{-5}{9} \\ \frac{4}{9} & \frac{-1}{9} & \frac{-2}{9} \\\frac{-4}{9} & \frac{1}{9} & \frac{11}{9} \end{pmatrix}
} }
}