\documentstyle[11pt]{article}
\begin{document}
\pagestyle{empty}
\newcommand{\sig}{\sigma}
\newcommand{\ra}{\rightarrow}
\newcommand{\eps}{\epsilon}
\newcommand{\gam}{\gamma}
\newcommand{\ah}{\alpha}
\newcommand{\ben}{\begin{enumerate}}
\newcommand{\noi}{\noindent}
\newcommand{\een}{\end{enumerate}}
\newcommand{\hsa}{\hspace*{1cm}}
\newcommand{\hsb}{\hspace*{2cm}}
\newcommand{\So}{\\ {\tt Solution:}}
\newcommand{\Co}{\\ {\tt Comment:}}
\begin{center}{\Large\bf FUNDAMENTAL ALGORITHMS \\ PRACTICE MIDTERM\\ WITH SOLUTIONS}\end{center}
\ben
\item (35) These questions concern a max-heap $A$ with auxilliary
structures {\tt length[A]}, {\tt Left[i]}, {\tt Right[i]} and
{\tt Parent[i]}.
\ben
\item (5) Give the max-heap property. (That is, what is the
relationship between the values $A[i]$ that one needs for a
heap.)
\So $A[parent[i]]\geq A[i]$
\item (5) Suppose {\tt length[A]}=110 and the values $A[i]$ are
distinct. What are the possible $i$ for which $A[i]$ is
the second largest value? (You must give all possible $i$.)
\So $i=2$ or $i=3$
\item (5) Suppose {\tt length[A]}=110 and the values $A[i]$ are
distinct. What are the possible $i$ for which $A[i]$ is
the smallest value? (You must give all possible $i$.)
\So $i$ must be a leaf so its children $2i,2i+1$ must not exist,
so we need $2i > 110$, so $56 \leq i \leq 110$.
\Co Some looked at the $i$ in the last row but some in the row
before that have no children.
\item (10) Describe the algorithm {\tt MAX-HEAP-INSERT(A,key)}
which adds to heap $A$ a new entry with value {\tt key}.
(Either a description in words or a pseudocode program
would be fine.)
\So First increment $heapsize[A]$ by one. Then set
$A[heapsize(A)]$ to $key$. Set $i=heapsize(A)$. Now
while $i\neq 1$ (the root) and $A(parent(i))< A(i)$
interchange $A(i)$, $A(parent(i))$ and set $i$ to
$parent(i)$.
\item (10) How long (in worst case) does the above algorithm
take as a function of $n=${\tt length[A]}?
Give a {\em reason} for your answer.
\So You are working up the tree (in worst case all the way to
the root) so there would be $\lg n$ interchanges so time
$\Theta(\lg n)$.
\een
\item (25) Write the expressions below as $\Theta(g(n))$
where $g(n)$ is of one of the forms $n^a$, $\ln^bn$, $n^a\ln^bn$.
For the second and third parts give a short argument for your answer.
\ben
\item (5) $10n\lg n + 89 + 3n^2$
\So $\Theta(n^2)$
\item (10) $\sqrt{1}+\sqrt{2}+\ldots+\sqrt{n-1}+\sqrt{n}$
\So From above it is $n\sqrt{n}=n^{3/2}$. From below, cutting at
the half way mark, it is $(n/2)\sqrt{n/2}= \frac{1}{2\sqrt{2}}n^{3/2}$.
Thus it is $\Theta(n^{3/2})$.
\item (10) The average time {\tt QUICKSORT} takes to sort
an array of (caution: curve ball!) size $n^4$.
\So QUICKSORT on $n$ takes average time $\Theta(n\ln n)$ so on $n^4$
it would be $\Theta(n^4\ln (n^4))$ but $\ln(n^4)=4\ln n$ and the $4$
gets absorbed in the $\Theta$ giving $\Theta(n^4\ln n)$.
\een
\item (10) Consider the following fragment of a program:
\\ \hsa J=I
\\ \hsa WHILE J $\leq$ N
\\ \hsb do J=2*J
\\
Give a precise formula for the number of times the third step
is applied. Your answer will be a function of $I$ and $N$.
(Nearly full credit if you are off by one.)
\So (We tacitly assume $I\leq N$ and both are positive integers.)
After applying the third step $t$ times $J=I2^t$ and continue
as long as $I2^t \leq N$ which is $t\leq \lg(N/I)$ which is
up to $t=\lfloor \lg(N/I) \rfloor$.
{\em However,} the actual answer is $1+\lfloor \lg(N/I) \rfloor$.
The third step will be applied when $J=I2^s$ for $0\leq s\leq t$.
\item (15)
Consider the following program:
\\ X=0
\\ FOR I=1 to N
\\ \hsa K=1
\\ \hsa WHILE I*K $\leq$ N
\\ \hsb K ++
\\ Analyze the time this program takes as an asymptotic function
of $N$.
\So For a given $I$, $K$ will run from $1$ to $N/I$ so the time
for the inner loop is $N/I$ and so the total time
is $N+\frac{N}{2}+\frac{N}{3}+\ldots \frac{N}{N}$ or
$N(1+\frac{1}{2}+\ldots+\frac{1}{N})$. The sum is $\sim N\ln N$
but as we hadn't had this the answer
as a sum was given $13$ of $15$ points.
\item (25) These questions concern {\tt BUCKET-SORT}. We assume that
the input in an array $A[1\cdots n]$ with all $0\leq A[i]< 1$.
We will assume an auxilliary array $B[0\ldots (n-1)]$ of linked
lists, or buckets. We assume an algorithm $SORT[C]$ which will
sort a linked list $C$. This will be a black box that we will
apply to the linked lists $B[i]$.
\ben
\item (10) Give the algorithm for {\tt BUCKET-SORT[A]}.
\So Assume (or force) each $B[i]$ to be initially empty. Now for
$i=1$ to $n$ set $place$ to be the floor of $nA[i]$, and add
$A[i]$ at the start of linked list $B[place]$. Now for $0\leq i \leq n$
$SORT[B[i]]$. Now concatenate the lists $B[i]$.
\item (5) What side assumption do we make, under which {\tt BUCKET-SORT}
is an excellent algorithm.
\So The data is uniform in $[0,1)$.
\item (10) Assume $SORT[C]$ takes time $m^2$ when $C$ is a list of
length $m$. Now assume that the data $A[i]$ are actually uniformly
distributed between $0$ and $n^{-1/2}$. How long will
{\tt BUCKET-SORT[A]} take.
\So Now there will be $n^{1/2}$ buckets each having size $n^{1/2}$. Each
such bucket is sorted in time $n$ ($m^2$ with $m=\sqrt{n}$) so the total
sorting time is $n^{1/2}n = n^{3/2}$. One also need go through the empty
buckets but that just takes time $O(n)$ which is a smaller order of
magnitude, so the total time is $\Theta(n^{3/2})$.
\een
\item (20) Consider the recursion
\[ T(n) = 2T(n/2) + 3n \]
with initial condition $T(1)=0$. We shall {\em assume}
$n$ is a power of two.
\ben \item (5) What is $T(8)$ precisely.
\So $T(2)=2\cdot0 + 6 = 6$; $T(4)=2\cdot 6 + 12 = 24$; $T(8)= 2\cdot 24 + 24 = 72$.
\item (5) What does the Master Theorem tell you about the
asymptotic growth of $T(n)$?
\So $\lg_22=1$ so this is the ``just right" case and $T(n)=\Theta(n\lg n)$.
\item (5) Making the substitution $S(n)=T(n)/n$ create a recursion
for $S(n)$.
\So Dividing the intial recursion by $n$
\[ \frac{T(n)}{n} = \frac{T(n/2)}{n/2} + 3 \]
so that
\[ S(n) = S(n/2) + 3 \]
\item (5) Solve the recursion for $S(n)$ precisely. Using this,
give the precise value for $T(n)$.
\So $S(1)=T(1)/1=0$. We double $\lg n$ times, each time adding $3$
so that $S(n)= 3\lg n$ (precisely!) and so $T(n)=3n\lg n$.
\een
\een
\end{document}