Professional Documents
Culture Documents
OUTPUT:-
['worker', 'working', 'works']
CODE:-
def tokenize(text):
# Split the text into tokens based on whitespace
tokens = text.split()
return tokens
# Example text
text = "Tokenization is the process of breaking down text into smaller units."
OUTPUT:-
['Tokenization', 'is', 'the', 'process', 'of', 'breaking', 'down', 'text', 'into', 'smaller', 'units.']
CODE:-
def generate_add_delete_table(word):
"""
Generate an Add-Delete table for a given word.
Each row represents a transformation from the original word
to a new word by adding or deleting a single letter.
"""
table = []
# Add operation
for i in range(len(word) + 1):
for char in 'abcdefghijklmnopqrstuvwxyz':
new_word = word[:i] + char + word[i:]
table.append((f"ADD {char}", new_word))
# Delete operation
for i in range(len(word)):
new_word = word[:i] + word[i + 1:]
table.append((f"DEL {word[i]}", new_word))
return table
def print_add_delete_table(word):
"""
Print the Add-Delete table for a given word.
"""
table = generate_add_delete_table(word)
print(f"Add-Delete Table for '{word}':")
for operation, new_word in table:
print(f"{operation}: {new_word}")
# Example word
word = "example"