You are on page 1of 10

EXP 8

class AVLNode:

def __init__(self, key):

self.key = key

self.height = 1

self.left = None

self.right = None

class AVLTree:

def __init__(self):

self.root = None

def insert(self, key):

self.root = self._insert_helper(self.root, key)

def _insert_helper(self, node, key):

# Perform the normal BST insertion

if not node:

return AVLNode(key)

elif key < node.key:

node.left = self._insert_helper(node.left, key)

else:

node.right = self._insert_helper(node.right, key)

# Update the height of the ancestor node

node.height = 1 + max(self._get_height(node.left), self._get_height(node.right))

# Get the balance factor of this ancestor node

balance = self._get_balance(node)

# If the node is unbalanced, then try out the 4 cases


# Left Left Case

if balance > 1 and key < node.left.key:

return self._right_rotate(node)

# Right Right Case

if balance < -1 and key > node.right.key:

return self._left_rotate(node)

# Left Right Case

if balance > 1 and key > node.left.key:

node.left = self._left_rotate(node.left)

return self._right_rotate(node)

# Right Left Case

if balance < -1 and key < node.right.key:

node.right = self._right_rotate(node.right)

return self._left_rotate(node)

# Return the (unchanged) node pointer

return node

def _left_rotate(self, z):

y = z.right

t2 = y.left

# Perform rotation

y.left = z

z.right = t2

# Update heights

z.height = 1 + max(self._get_height(z.left), self._get_height(z.right))


y.height = 1 + max(self._get_height(y.left), self._get_height(y.right))

# Return the new root

return y

def _right_rotate(self, z):

y = z.left

t3 = y.right

# Perform rotation

y.right = z

z.left = t3

# Update heights

EXP9

def quick_sort(arr):

if len(arr) <= 1:

return arr

else:

pivot = arr[0]

left = [x for x in arr[1:] if x <= pivot]

right = [x for x in arr[1:] if x > pivot]

return quick_sort(left) + [pivot] + quick_sort(right)

# Test case

arr = [5, 2, 9, 3, 7, 4, 1, 8, 6, 0]

sorted_arr = quick_sort(arr)

print(sorted_arr)

# Time complexity analysis


start_time = time.time()

quick_sort(list(range(10000, 0, -1)))

end_time = time.time()

print("Time taken by Quick Sort Algorithm: ", end_time - start_time)

Pseudocode:

quick_sort(arr)

if length(arr) <= 1:

return arr

else:

pivot = arr[0] # Choose the first element as the pivot

left = [] # Create a list for elements less than or equal to pivot

right = [] # Create a list for elements greater than pivot

for i in range(1, length(arr)):

if arr[i] <= pivot:

append arr[i] to left

else:

append arr[i] to right

# Recursively sort the two subarrays

sorted_left = quick_sort(left)

sorted_right = quick_sort(right)

# Concatenate the sorted subarrays and pivot element

return sorted_left + [pivot] + sorted_right

Choosing a pivot element: This operation takes constant time, i.e., O(1).

Partitioning the array: This operation takes linear time, i.e., O(n), where n is the size of the array. In the worst case, if the
pivot element is either the smallest or largest element in the array, then the partitioning operation will not divide the
array into two subarrays of equal size. In this case, the partitioning operation takes O(n) time.

Recursively sorting the two subarrays: This operation involves recursive calls to the quick_sort function on subarrays of
size less than n. The number of recursive calls depends on the size of the subarrays and the pivot element chosen. In the
average case, the subarrays are divided into roughly equal sizes, and the number of recursive calls is logarithmic, i.e.,
O(log(n)). However, in the worst case, the subarrays are of size n-1 and 1, and the number of recursive calls is n-1.
Therefore, the worst-case time complexity of this operation is O(n^2).

Concatenating the two sorted subarrays and the pivot element: This operation takes linear time, i.e., O(n), where n is the
size of the array.

Therefore, the overall time complexity of the Quick Sort Algorithm is O(n*log(n)) in the average case and O(n^2) in the
worst case. The worst case occurs when the pivot element is either the smallest or largest element in the array, and the
subarrays are not divided into roughly equal sizes. However, in practice, Quick Sort is a very efficient algorithm and is
widely used in industry.

EXP 10

Here’s the implementation:

def merge_sort(arr):

if len(arr) <= 1:

return arr

# Divide the list into two halves

mid = len(arr) // 2

left_half = arr[:mid]

right_half = arr[mid:]

# Sort each half recursively

left_half = merge_sort(left_half)

right_half = merge_sort(right_half)

# Merge the sorted halves

return merge(left_half, right_half)

def merge(left, right):


merged = []

left_index = right_index = 0

while left_index < len(left) and right_index < len(right):

if left[left_index] < right[right_index]:

merged.append(left[left_index])

left_index += 1

else:

merged.append(right[right_index])

right_index += 1

# Add any remaining elements from the left or right halves

merged += left[left_index:]

merged += right[right_index:]

return merge:

arr = [5, 9, 1, 3, 10, 8, 2, 7, 6, 4]

sorted_arr = merge_sort(arr)

print(sorted_arr)

CODES
EXP 8
EXP 9
EXP 10

You might also like