Sunteți pe pagina 1din 4

void bubbleSort(ItemType theArray[], int n)

int split(int a[], int low, int high)


struct node *newNode(int item)
{
{
{
bool sorted = false;
Cccccccccccc
int x = a[low];
struct node *temp = (struct node
int pass = 1;
bool highTurn = true;
*)malloc(sizeof(struct node));
while (!sorted && (pass < n))
while (low < high)
temp->key = item;
{
{
temp->left = temp->right = NULL;
sorted = true;
ifindex++)
(highTurn)
return temp;
for (int index = 0; index < n - pass;
if (a[high] < x)
}
{
{
void inorder(struct node *root)
int nextIndex = index + 1;
a[low++] = a[high];
{
if (theArray[index] > theArray[nextIndex])
highTurn = false;
if (root != NULL)
{
}
std::swap(theArray[index], theArray[nextIndex]);
{
else
high--;
sorted = false;
inorder(root->left);
else if (a[low] > x)
}
printf("%d \n", root->key);
{
}
inorder(root->right);
a[high--] = a[low];
pass++;
}
highTurn = true;
}
}
}
}
struct node* insert(struct node* node, int key)
else low++;
{
}
void insertionSort(ItemType theArray[], int n)
a[low] = x;
{
if (node == NULL) return newNode(key);
for (int unsorted = 1; unsorted < n; unsorted++)return low;
if (key < node->key)
}
{
node->left = insert(node->left, key);
ItemType nextItem = theArray[unsorted]; void quickSort1(int nums[], int first, int last)
else if (key > node->key)
{
int loc = unsorted;
node->right = insert(node->right, key);
if (first >= last) return;
while ((loc > 0) && (theArray[loc - 1] > nextItem))
return node;
{
int mid = split(nums, first, last);
}
theArray[loc] = theArray[loc - 1];
quickSort1(nums,first,mid-1);
struct node * minValueNode(struct node* node)
loc--;
quickSort1(nums,mid+1,last);
{
}
}
struct node* current = node;
theArray[loc] = nextItem;
void quickSort(int nums[], int length)
while (current->left != NULL)
}
{
current = current->left;
}
quickSort1(nums,0, length-1);
void mergeSort(ItemType theArray[], int first, int last)
return current;
}
{
}
if (first < last)
struct node* deleteNode(struct node* root, int key) {
void selectionSort(ItemType theArray[], int n)
{
int mid = first + (last - first) 2;
mergeSort(theArray, first, mid);
{
if (root == NULL) return root;
for (int last = n - 1; last >= 1; last--)
if (key < root->key)
mergeSort(theArray, mid + 1, last);
{
root->left = deleteNode(root->left, key);
int largest = findIndexofLargest(theArray, last+1);
else if (key > root->key)
merge(theArray, first, mid, last);
root->right = deleteNode(root->right, key); }
}
std::swap(theArray[largest], theArray[last]);
else
void merge(ItemType theArray[], int first, int mid, int last)
}
{
{
}
if (root->left == NULL)
ItemType tempArray[MAX_SIZE];
typedef string ItemType;
int first1 = first;
{
int findIndexofLargest(const ItemType theArray[], int size)
int
last1
=
mid;
struct node *temp = root->right;
int first2 = mid + 1;
{
free(root);
int last2 = last;
int indexSoFar = 0;
return temp;
int index = first1;
for (int currentIndex = 1; currentIndex < size; currentIndex++)
}
while ((first1 <= last1) && (first2 <= last2))
{
{
else if (root->right == NULL)
if (theArray[currentIndex] > theArray[indexSoFar])
if (theArray[first1] <= theArray[first2])
{
indexSoFar = currentIndex;
{
struct node *temp = root->left;
tempArray[index] = theArray[first1];
}
free(root);
first1++;
return indexSoFar;
}
return temp;
}
else
}
struct node* temp = minValueNode(root->right); {
void shellSort(ItemType theArray[], int n)
tempArray[index] = theArray[first2];
{
root->key = temp->key;
first2++;
for (int h = n / 2; h > 0; h = h / 2)
root->right = deleteNode(root->right, temp->key);
}
index++;
{
}
}
for (int unsorted = h; unsorted < n; unsorted++)
return root;
while (first1 <= last1)
{
}
{
ItemType nextItem = theArray[unsorted];
int main()
tempArray[index] = theArray[first1];
int loc = unsorted;
first1++;
{
index++;
while ( (loc >= h) && (theArray[loc - h] > nextItem) )
struct node *root = NULL;
}
{
root = insert(root, 7);
while (first2 <= last2)
theArray[loc] = theArray[loc - h];
}
{
loc = loc - h;
tempArray[index] = theArray[first2];
} // end while
first2++;
index++;
theArray[loc] = nextItem;
}
} // end for
for (index = first; index <= last; index++)
} // end for
theArray[index] = tempArray[index];
} // end shellSort
}
Let T(n) be the number of iterations of the for loop in the merge function.
(That will be 1/2 the potential number of item comparisons.) Then .. In general,
when n is 2k for some k > 0, we have
T(n) = 2T(n/2) + n = 2(2T(n/4) + n/2) + n = 4T(n/4) + 2n = 4(2T(n/8) + n/4) + 2n = 8T(n/8) +
3n ... = nT(n/n) + (log(n))n = nT(1) + nlog(n) = n0 + nlog(n) = nlog(n) Where log(n) means
log base 2 of n. This also happens to be the same as the number of assignments to copy buff
back into the original array.
bool canMake(int nums[], int size, int
#2 binary sort
t){
int Jessie(int nums[], int first, int last, int target)
{
if (first == last)
return t==0 || size-- >0 &&
return nums[first] == target ? first : -1;
(canMake(nums,size,t+nums[size]) ||
int mid = (first + last) / 2;
canMake(nums,size,t-nums[size]) ||
if (target <= nums[mid])
return Jessie(nums, first, mid, target); canMake(nums,size,t));
else
}
return Jessie(nums, mid+1, last, target);
}
// pre for initial call: first==0, last==(length of nums 1)
int Riley(int nums[], int first, int last, int target) {
if (first >= last)
return (first > last || nums[first] != target) ? -1 : first;
int mid = (first + last) / 2;
if (target == nums[mid])
return mid;
if (target < nums[mid])
return Riley(nums, first, mid-1, target);
return Riley(nums, mid+1, last, target);
}

/*
Notice: selectionsort does O(n2) item comparisons but only O(n) swaps.
That makes it faster than bubbleSort and insertionSort when the array elements
are large chunks of data (e.g. long strings).
insertionSort is O(n2) for both item comparisons & assignments in the average
case but
O(n) in the best case where the array is already sorted or close to being
sorted.
*/
Analysis for selectionsort
The minIndex function does O(n) item comparisons but no item assignments.
minIndex is called O(n) times from selectionSort.
Therefore selectionSort does O(n2) item comparisons.
Each call to minIndex is followed by one swap (= 3 assignments).
Therefore selectionSort does O(n) item assignments.
Even though each item comparison take less time than an item assignments, the time
is not negligible.
As the array gets big the bulk of the runtime will be taken by item comparisons,
not item assignments.
That's why selection sort is an O(n2) algorithm, in both the best case and the
worst case.
In the worst case the the array items start out in descending order.
Therefore each insertion will involve O(n) item assignments (via the shifts).
Since there are O(n) insertions, the worst case runtime is O(n2).
In the best case the array items start out in ascending order.
Therefore only O(1) comparisons and item assignments are required for each insertion.
So insertionSort is O(n) in the best case. Also, when the array is "almost sorted" in the sense that
a value is never far from its correct position, insertionSort will have O(n) performance.
That makes insertionSort better in some cases than an O(nlogn) algorithm like quicksort.

Nested loops
count = 0;
for (int i = 1; i < n ; i++)
for (int j = 0; j < i; j++)
{
// some constant time operation
count++; //
count gets incremented i times.
}
cout << "The final value of count is " << count << endl;

We can derive a runtime function T(n) by expressing count in terms of n.


count = 1 + 2 + ... + (n-1) = n(n-1)/2 = (1/2)(n2 - n). .
Since a constant amount of work is being done for each loop iteration, T(n) will be proportional to count i.e. T(n) = kn2 - kn, for some k.
(Note that the 1/2 from (1/2)(n2 - n) gets wrapped into k.)
bubblesort, selectionSort and insertionSort have loops similar to the nested loop above. That's why they are each O(n2) .
void bubbleSort(int nums[], int n) {
for (int i = n-1; i > 0; i--)
for (int j = 0; j < i; j++)
if (nums[j] > nums[j+1]) // item comparison happens n-1 times when i=n-1, n-2 times when i=n-2,..., on down to 1 time when i=1.
swap(nums[j], nums[j+1]);
}
void selectionSort(int nums[], int n) {
for (int start = 0; start < n-1; start++) { // so n-1 passes
int minIndex = start;
for (int i = start+1; i < n; i++)
if (nums[minIndex] > nums[i]) // happens n-1 times when start=0, down to 1 time when start= n-2
minIndex = i;
swap(nums[start], nums[minIndex]); // only n swaps for any input array, unlike bubbleSort which may do as many swaps as comparisons
}
}
Analysis of recursive functions
We posit a runtime function T(n) and characterize it recursively based on the base cases of f and the structure of recursive calls.
For example, let's analyze the following recursive version of Fibonacci:
int f(int n) {
if (n < 2) return n;
return f(n-2) + f(n-1);
}
To approximate the runtime, let T(n) be the number of additions (+) performed for a given n.
Looking at the two base cases (n = 0, n = 1) and the structure of recursive calls, we derive three relationships:
T(0) = 0
T(1) = 0
T(n) = 1 + T(n-2) + T(n-1). (For n > 1).
Working fro the bottom up we can calculate the number of additions in terms of n.
T(2) = 1 + 0 + 0 =
1,
T(3) = 1 + 0 + 1 =
2,
T(4) = 1 + 1 + 2 =
4,
T(5) = 1 + 2 + 4 =
7,
T(6) = 1 + 4 + 7 = 12,
T(7) = 1 + 7 + 12 = 20,
T(8) = 1 +12 + 20 = 33,
T(9) = 1 + 20 + 33 = 54 etc.
It would be nicer to have a formula. Often we need special techniques to find the so-called closed-form solution but in this case it's easy to guess: Noticing that the results are all Fibonacci numbers minus
1 we conjecture that
T(n) = fib(n+1) - 1.
This works beautifully in our three relationships:
fib(1) - 1 = 1 - 1 = 0
fib(2) - 1= 1 - 1 = 0
fib(n+1) - 1 = 1 + fib(n-1) - 1 + fib(n) - 1
Now we can leverage the closed form for computing Fibonacci numbers:
fib(n) = [n - (- )-n] / sqrt(5) where = the golden ratio = (1+sqrt(5))/2 = approximately 1.6.
So T(n) = [ n+1 - (- )-n-1 ] / sqrt(5) - 1.

Notice this is O( n) since the leading term can be written (/sqrt(5)) n and the term on (- )-n-1 vanishes as n gets big. So adding 1 to input size should increase runtime by
approximately 1.6. You can verify this experimentally.
Of course this means our algorithm is exponentially slow. Remember we took that result on faith a few weeks ago.

Remember that a heap is a way to implement a priority queue.


void fixheap(int a[], int left, int right)
The property of a priority queue is:
{
int x;
when you get an item it is the one with the highest priority
x = a[left];
Heaps have same complexity as a balanced search tree but:
int i,j;
for (i = left, j = 2*i + 1; j <= right; i = j, j = 2*i + 1)
they can easily be kept in an array
{
they are much simpler than a balanced search tree
if (j < right && a[j+1] > a[j])
j++;
they are cheaper to run in practice
if (x >= a[j])
break;
A heap is a binary tree that has special structure compared to a general
a[i] = a[j];
binary tree:
}
a[i] = x;
The root is greater than any value in a subtree
}
this means the highest priority is at the root
void heapSort(int a[], int n)
this is less information than a BST and this is why it can be easier and
{
faster
if (n > 1)
2.
It is a complete tree
{
the height is always log(n) so all operations are O(log(n))
for (int left = n/2-1; left >= 0; left--)
The part just shown very similar to removal from a heap which is O(log(n)). You do it n-1
fixheap(a,left,n-1);
times so it is O(nlog(n)). The last steps are cheaper but for the reverse reason from the
swap(a[0], a[n-1]);
for (int right = n-2; right >= 1; right--)
building of the heap, most are log(n) so it is O(nlog(n)) overall for this part. The build part
{
was O(n) so it does not dominate. For the whole heap sort you get O(nlog(n)).
fixheap(a,0,right);
swap(a[0], a[right]);
There is no extra memory except a few for local temporaries.
}
}
Thus, we have finally achieved a comparison sort that uses no extra memory and is
}
O(nlog(n)) in the worst case.
T(n) kn2 T(2n) k(2n)2 = 4kn2 = 4T(n)
(If you double the input size then the runtime is quadrupled.)
T(n) kn3 T(2n) k(2n)3 = 8kn2 = 8T(n)
(If you double the input size then the runtime is multiplied by
In many cases people still use quick sort because it uses no extra memory and is usually
eight.) // Removes an element in Queue from front end.
O(nlog(n)). Quick sort runs faster than heap sort in practice. The worst case of O(n2) is not
void
Dequeue()
n
n+1
n
T(n) k2 T(n+1) k2 = 2k(2 ) = 2T(n)
(If you add 1 to the input size then the runtime is doubled.)
seenclass
in practice.
Queue
{
T(n) kn! T(n+1) k(n+1)! = (n+1)kn! = (n+1)T(n)
(If you add 1 to the input size then the runtime is
{
cout<<"Dequeuing \n";
multiplied by the approximate input size.)
private:
if(IsEmpty())
g) 2n (exponential) time Hint: 10,000 = 100 + 1 + 1 + 1 + ... + 1 ( where we add 1 a total of 9,900 times). Every
int A[MAX_SIZE];
{
+1 mean runtime doubles.
int front, rear;
cout<<"Error: Queue is Empty\n";
public:
return;
i) factorial time Hint:} 10,000 = 100 + 1 + 1 + 1 + ... + 1 ( where we add 1 a total of 9,900 times). Every +1
// Constructor - set front and rear as -1.
means runtime is multiplied
current array==
size.When
// We are assuming that for an empty Queue, both front and rear
elsebyif(front
rear the
) number of ms get big, you may want to convert to
appropriate units (seconds,
will be -1.
{ minutes, hours, days, years, centuries, millenia, ... , age_of_universes).
Queue()
rear = front = -1;
{
HeapSort
}
front = -1;
Call the buildMaxHeap()
function on the list. Also referred to as heapify(), this builds a heap from a list in
else
rear = -1;
O(n) operations.
{
}
Swap the first element of the list
with the
element. Decrease the considered range of the list by one.
front
= final
(front+1)%MAX_SIZE;
Call the siftDown()
function on the list to sift the new first element to its appropriate index in the
}
// To check wheter Queue is empty or not
heap. }
bool IsEmpty()
// Returns
atconsidered
front of
queue.
Go to stepelement
(2) unless the
range
of the list is one element.
{
int
Front()
Definition:
return (front == -1 && rear == -1);
{
Edge: lines between 2 nodes.
}
if(front
== -1)
Leaf: node
with 0 children
{ node along the path from the root to node n
Ancestor:
// To check whether Queue is full or not
cout<<"Error:
cannot return front from empty queue\n";
Sibling: 2 children of the
same parent
bool IsFull()
return
-1; path
Height: number of nodes
in the longest
{
} binary tree in which the value in each node is greater than or equal to the data in that
A max-heap is a complete
return (rear+1)%MAX_SIZE == front ? true : false;
return
A[front];
nodes children
}
}
If we consider all the binary trees with 24 nodes, the number of different possible heights is 20
/* The height of a certain complete binary tree is 9. What is smallest possible size the tree could have:
// Inserts an element in queue at rear end
Printing the elements in queue from front to rear.
256
void Enqueue(int x)
This
only nodes
to test
the code.
What function
is the numberis
of interior
(non-leaves)
in a perfect binary tree with height 21: 1048575
{
isnecessarily
not a standard
for
In some orderThis
(i.e. not
as listed), thefunction
values 1, 2, 3,
4. 5,Queue
6, 7 andimplementation.
8 are inserted into an empty binary
cout<<"Enqueuing "<<x<<" \n";
*/ producing a degenerate tree (meaning all interior nodes have just one child). How many such trees are
search tree,
if(IsFull())
void
Print()
possible? (Hint: one is obtained by inserting in numeric order and another is obtained by inserting in reverse
{
numeric {order... but there are others.): 128
cout<<"Error: Queue is Full\n";
// Finding
number
of 24
elements
in queue
If we consider
all the binary
trees with
nodes, the number
of different possible heights is ______.
return;
int
count
=
(rear+MAX_SIZE-front)%MAX_SIZE
+
1;
(height = # nodes in longest path, so an empty tree has height 0 etc.): 20
}
cout<<"Queue
: ";
if (IsEmpty())
for(int i = 0; i <count; i++)
{
{
front = rear = 0;
int index = (front+i) % MAX_SIZE; // Index of element while
}
travesing circularly from front
else
cout<<A[index]<<" ";
{
}
rear = (rear+1)%MAX_SIZE;
cout<<"\n\n";
}
}
A[rear] = x;
};
}

To gauge the complexity of an program we count the operations that are executed most frequently.
For large inputs these operations will eat up the bulk of the clock cycles devoted to a running that
program. In fact, we can do our analysis even before implementing the program. By looking at
repeated operations in the algorithm we can get a measure of the efficiency of the algorithm
independent of the implementation or hardware.
Our operation count will generally be a function f that depends on the size of the algorithm's input.
This allows us to characterize an algorithm's complexity by looking at how fast f grows. If f is a linear
function, we say the algorithm is a linear time algorithm. If f is a quadratic function we say the
algorithm has polynomial or quadratic runtime. Here are some of the complexity classes that an
algorithm may belong to:
O(1) Constant time
e.g. stack push, pop, peek (both array and linked
implementations)
O(log n) Log Time
e.g. binary search
O(sqrt n) Square root time
e.g. determine if n is prime. (This takes exp. time in terms
of #bits in n though, so it depends what we mean by size.)
O(n) Linear Time e.g. linear search. Any algorithm that loops through an array once
and does constant time op on each iteration.
O (n log n) Linearithmic Time ( or "nlogn Time") e.g. quicksort (average case),
mergesort (all cases)
O(n2) Quadratic Time e.g. insertion sort, selection sort, bubble sort, quicsksort worst
case
O(nk) Polynomial Time e.g. optimized slowsort is cubic time,
Strassen's matrix multiplication is n2.8 time, approximately.
O(2n), O(3n) etc and generally O(bn) b > 1. Exponential Time e.g. canMake() was
O(3n) in worst case. Factoring a number in terms of the number's length in bits.
O(n!) Factorial Time e.g. find shortest path to visit all nodes in a weighted graph
(Traveling salesman) using exhaustive search.

Node * makeTree(int maxheight)


{
if (maxheight <= 0) return
Node * p = rand() % 5 != 0
Node * q = rand() % 5 != 0
return createNode(rand() %
}

nullptr;
? makeTree(maxheight-1) : nullptr;
? makeTree(maxheight-1) : nullptr;
15, p, q);

void drawTree(Node * p, int indentation=0)


{
int dashes = 3;
for (int i = 0; i < indentation; i++) cout << " ";
if (p != nullptr)
{
for (int i = 0; i < dashes; i++) cout << "-";
cout << p->data << endl;
drawTree(p->left, indentation + 5);
drawTree(p->right, indentation + 5);
}
else cout << "~" << endl;
}
int size (Node*root)
{
if(root == nullptr)
{
return 0;
}
return 1+size(root->right)+size(root->left);
}
int height(Node*root)
{
if(root == nullptr)
{
return 0;
}
return 1 + max(height(root->right),height(root->left));
}
bool isPerfect(Node*root)
{
return ((size(root) == (pow(2,height(root))) - 1)? true : false);
}
bool isComplete(Node*root)
{
if(root == nullptr)
{
return true;
}
if(height(root->left)-height(root->right) == 1)
{
return isComplete(root->left) && isPerfect(root->right);
}
if(height(root->left)-height(root->right) == 0)
{
return isComplete(root->right) && isPerfect(root->left);
}
return false;
bool isBalanced(Node*root)
}
{
void preorder(Node *p)
if(root == nullptr)
{
{
if(!p)
return true;
}
{
if((abs(height(root->left)-height(root->right)))<= 1
return;
&& isBalanced(root->left) && isBalanced(root->right))
}
Producing a degenerate tree= 2^(n-1)
{
cout heights
<< p->data
<< " ";
If we consider all the binary trees with 32 nodes, the number of different possible
is 2^5-1
return true;
preorder(p->left);
= 31 => minimum height = 6 => 32-6 = 26
}
else
preorder(p->right);
What is the number of interior nodes (non-leaves) in a perfect binary tree with
height 24 ? 2^23
{
A "senary tree" is like a binary tree but each node has six child pointers }
rather than two. What is
return false;
the maximum possible number of nodes in a senary tree with height 6 ? 6^6+6^5
}
inorder(Node
*p)
A complete tree with 654,931 nodes has height 2^19 = 525288 2^20 =void
1048576
= > 19
}
A binary search tree (BST) is a binary tree such that for each value v in a{node N, the value in N's
void deleteTree(Node * p)
if(!p)
left child is < v and the value in N's right child is > v. A balanced BST gives O(log
n) find, insert and
{
remove operations. If the BST is not balanced these go down to O(n). (There{are various types of
if(p == nullptr)
return;
self-balancing BSTs but we won't cover those in CSCI 20.)
{
}
return;
BST's can be used to implement the ADT set, assuming the template type includes
a less than
inorder(p->left);
}
operation.
deleteTree(p->left);
cout
<<
p->data
<<
"
";
To look for a target t, compare t with current node's value v. If t == v, we found it. If t < v continue
deleteTree(p->right);
searching in the left subtree. If t > v continue searching in the right subtree. inorder(p->right);
If we arrive at an
delete (p);
}
empty subtree then t is not in the BST.
}
To insert a value, first look for it. If found, don't insert anything since there are no duplicates in a
void postorder(Node *p)
BST. Otherwise add a new leaf at the point where an empty subtree was reached.
{
To remove a value t there are four cases:
if(!p)
1) t is not found. (Don't alter the tree.)
{
2) t is found in a leaf N. (Set parent's pointer to null and delete N.)
return;
3) t is found in a node N with just one child. ("Chop out" N by making N's parent
} point to N's child.
Then delete N.)
inorder(p->left);
4) t is found in a node N with two children. (Go to right child , then continue
going left as far as
inorder(p->right);
possible to reach a node M. Copy M's value to N, then delete M. This works because
will
cout <<M p->data
<< " ";
necessarily contain the smallest value that's greater than t, so tree will still
} be a BST.
If values being inserted or removed are randomly distributed then the BST tends to stay balanced.
If that fails to happen, then we might end up with a "degenerate" tree containing
only one leaf * p)
void levelorder(Node
node. That's a worst case scenario since height is now equal to size, rather
{ than log of size. (So
formerly O(logn) operations are now O(n)). An example of a degenerate treequeue
is the <Node*>
tree obtained
myqueue;
by inserting 1, 2, 3, 4, 5, 6, 7 into an initially empty BST.
Node *temp;
A BST can be also be used to implement a priority queue but the front operation
will be O(log n)
myqueue.push(p);
(assuming balanced) rather than O(1) like in a heap.
while(!myqueue.empty())
A binary heap is a heap data structure that takes the form of a binary tree. {
Binary heaps are a
temp = myqueue.front();
common way of implementing priority queues.[1]:162163
myqueue.pop();
A binary heap is defined as a binary tree with two additional constraints: [2]
if(temp)
Shape property: a binary heap is a complete binary tree; that is, all levels of the tree,
except
{
cout << temp->data << " ";
myqueue.push(temp->left);
myqueue.push(temp->right);
}
}
}

S-ar putea să vă placă și