\documentclass[11pt]{article}
\pagestyle{empty}
\newcommand{\sig}{\sigma}
\newcommand{\ra}{\rightarrow}
\newcommand{\eps}{\epsilon}
\newcommand{\gam}{\gamma}
\newcommand{\ah}{\alpha}
\newcommand{\ben}{\begin{enumerate}}
\newcommand{\een}{\end{enumerate}}
\newcommand{\hsa}{\hspace*{1cm}}
\newcommand{\hsb}{\hspace*{2cm}}
\newcommand{\So}{\\ {\tt Solution:}}
\begin{document}
\begin{center} {\Large\bf Fundamental Algorithms, Assignment 4\\ Solutions}
\end{center}
\ben
\item Consider the recursion
$T(n)=9T(n/3) + n^2$ with initial value $T(1)=1$. Calculate the
{\em precise} values of $T(3),T(9),T(27),T(81),T(243)$. Make a good
(and correct) guess as to the general formula for $T(3^i)$ and
write this as $T(n)$. (Don't worry about when $n$ is not a power
of three.) Now use the Master Theorem to give, in Thetaland, the
asymptotics of $T(n)$. Check that the two answers are consistent.
\So $T(3)=9(1)+3^3=18=2(9)$, $T(9)=9(18)+9^2=243=3(81)$, $T(27)=
9(243)+729=2916=4(729)$, $T(81)=32805=5(6561)$, $T(243)=354294=6(59049)$.
In general, $T(3^i)=(i+1)3^{2i}$. With $n=3^i$ we have $3^{2i}=n^2$
and $i=\log_3n$ so the formula is $T(n)=n^2(1+\log_3n)$. In Thetaland,
$T(n)=\Theta(n^2\lg n)$. With the Master Theorem, as $\log_39=2$ we
are in the special case which gives indeed $T(n)=\Theta(n^2\lg n)$.
\par Another approach is via the auxilliary function $S(n)$ discussed
in class. Here $S(n)=T(n)/n^2$. Dividing the original recursion by
$n^2$ gives
\[ \frac{T(n)}{n^2} = \frac{T(n/3)}{(n/3)^2} + 1 \]
so that
\[ S(n) = S(n/3)+1 \mbox{ with initial value } S(1)=T(1)/1^2= 1 \]
so that
\[ S(n) 1+\log_3n \mbox { and so } T(n)=n^2(1+\log_3n) \]
\item Use the Master Theorem to give, in Thetaland, the asymptotics of
these recursions:
\ben
\item $T(n)= 6T(n/2) + n\sqrt{n}$
\So As $\log_26=\frac{\ln 6}{\ln 2}=2.58\cdots > 3/2$ we have
Low Overhead and $T(n)=\Theta(n^{\log_26})$.
\item $T(n)= 4T(n/2)+n^5$
\So $\log_24=2<5$ so we have High Overhead and $T(n)=\Theta(n^5)$.
\item $T(n)= 4T(n/2)+7n^2+2n+1$
\So $\log_24=2$ and the Overhead is $\Theta(n^2)$ so $T(n)=\Theta(n^2\lg n$.
\een
\item {\tt Toom-3} is an algorithm similar to the
Karatsuba algorithm discussed in class. (Don't worry how
{\tt Toom-3} really works, we just want an analysis given the
information below.)
It multiplies two $n$ digit numbers by making five recursive calls
to multiplication of two $n/3$ digit numbers plus thirty additions
and subtractions. Each of the additions and subtractions take
time $O(n)$. Give the recursion for the time $T(n)$ for {\tt Toom-3} and
use the Master Theorem to find the asymptotics of $T(n)$. Compare with
the time $\Theta(n^{\log_23})$ of Karatsuba. Which is faster when $n$
is large?
\So
$T(n)=5T(n/3)+O(n)$ as the thirty is absorbed into the
big oh $n$ term. From the master theorem $T(n)=\Theta(n^{\log_35})$. As
\[ \log_35=\frac{\ln 5}{\ln 3}= 1.46\cdots < 1.58\cdots = \log_23 \]
it is better that
the $\Theta(n^{\log_23})$ of Karatsuba. (In practice unless $n$ is really
large Karatsuba does better because {\tt Toom-3} has large constant factors.)
\item Write the following sums in the form $\Theta(g(n))$ with $g(n)$
one of the standard functions. In each case give reasonable (they
needn't be optimal) positive $c_1,c_2$ so that the sum is between
$c_1g(n)$ and $c_2g(n)$ for $n$ large.
\ben
\item $n^2+(n+1)^2+\ldots + (2n)^2$
\So $\Theta(n^3)$. There are $\sim n$ terms all between $n^2$ and
$4n^2$ so the sum is between $n^3(1+o(1))$ and $4n^3(1+o(1))$.
\item $\lg^2(1)+\lg^2(2)+\ldots + \lg^2(n)$
\So $\Theta(n\lg^2n)$. There are $n$ terms all at most $\lg^2(n)$
so an upper bound is $n\lg^2(n)$. Lopping off the bottom half of the
terms we still have $n/2$ terms and each is at least
$\lg^2(n/2) = (\lg(n)-1)^2 \sim \lg^2n$ so the lower bound is
$(1+o(1))(\frac{n}{2})\lg^2n$.
\item $1^3+\ldots+n^3$.
\So $T(n)=\Theta(n^4)$. Upper bound $n^4$ as $n$ terms, each at most
$n$. Lopping off bottom half yields $n/2$ terms, each at least
$(n/2)^3$ giving a lower bound $(n/2)(n/2)^3= n^4/16$.
\een
\item (Just for fun!) Name this Paul Simon tune:
$\forall_x\exists_y \lceil x \rceil = \lfloor y \rfloor$.
\So One Man's Ceiling is another Man's Floor.
\item Give an algorithm for subtracting two $n$-digit decimal numbers.
The numbers will be inputted as $A[0\cdots N]$ and $B[0\cdots N]$
and the output should be $C[0\cdots N]$.
How long does your algorithm take, expressing
your answer in one of the standard $\Theta(g(n))$ forms.
\So Here is one way, the term {\tt BORROW} being the truth value of
whether you have ``borrowed."
\\ BORROW=false;
\\ FOR I=0 TO N;
\\ IF BORROW=false THEN X=A[I]-B[I];
\\ IF BORROW=true THEN X=A[I]-1-B[I];
\\ IF X $\geq$ 0 THEN
\\ \hsa C[I]=X;
\\ \hsa BORROW=false;
\\ ELSE
\\ \hsa C[I]=X+10;
\\ \hsa BORROW=true;
\\ ENDFOR
\\ IF BORROW=true THEN ERROR;
\\ END
\par This takes only a single pass and so is a linear time, that is $\Theta(N)$
algorithm.
\een
\end{document}