You are on page 1of 6

PROGRAM:

from queue import PriorityQueue


def astar(graph, start, goal):
frontier = PriorityQueue()
frontier.put(start, 0)
came_from = {}
cost_so_far = {}
came_from[start] = None
cost_so_far[start] = 0
while not frontier.empty():
current = frontier.get()
if current == goal:
break
for next_node in graph[current]:
new_cost = cost_so_far[current] + graph[current][next_node]
if next_node not in cost_so_far or new_cost < cost_so_far[next_node]:
cost_so_far[next_node] = new_cost
priority = new_cost + heuristic(goal, next_node)
frontier.put(next_node, priority)
came_from[next_node] = current
return came_from, cost_so_far
def heuristic(goal, node):
return abs(goal[0]-node[0]) + abs(goal[1]-node[1])
graph = {
(0,0): {(0,1):1, (1,0):1},
(0,1): {(0,0):1, (1,1):1},
(1,0): {(0,0):1, (1,1):1},
(1,1): {(0,1):1, (1,0):1}
}
start = (0,0)
goal = (1,1)
came_from, cost_so_far = astar(graph, start, goal)
path = [goal]
node = goal
while node != start:
node = came_from[node]
path.append(node)
path.reverse()
print("Shortest path:", path)

OUTPUT:

RESULT:
PROGRAM:

from queue import PriorityQueue


def astar(graph, start, goal):
frontier = PriorityQueue()
frontier.put(start, 0)
came_from = {}
cost_so_far = {}
came_from[start] = None
cost_so_far[start] = 0
while not frontier.empty():
current = frontier.get()
if current == goal:
break
for next_node in graph[current]:
new_cost = cost_so_far[current] + graph[current][next_node]
if next_node not in cost_so_far or new_cost < cost_so_far[next_node]:
cost_so_far[next_node] = new_cost
priority = new_cost + heuristic(goal, next_node)
frontier.put(next_node, priority)
came_from[next_node] = current
return came_from, cost_so_far
def heuristic(goal, node):
return abs(goal[0]-node[0]) + abs(goal[1]-node[1])
graph = {
(0,0): {(0,1):1, (1,0):1},
(0,1): {(0,0):1, (1,1):1},
(1,0): {(0,0):1, (1,1):1},
(1,1): {(0,1):1, (1,0):1}
}
start = (0,0)
goal = (1,1)
came_from, cost_so_far = astar(graph, start, goal)
path = [goal]
node = goal
while node != start:
node = came_from[node]
path.append(node)
path.reverse()
print("Shortest path:", path)

OUTPUT:

RESULT:
PROGRAM:
from pgmpy.models import BayesianModel
from pgmpy.factors.discrete import TabularCPD
from pgmpy.inference import VariableElimination

# Define the structure of the Bayesian network


model = BayesianModel([('A', 'C'), ('B', 'C'), ('B', 'D'), ('C', 'E')])

# Define the conditional probability distributions (CPDs) for each variable


cpd_a = TabularCPD('A', 2, [[0.5], [0.5]])
cpd_b = TabularCPD('B', 2, [[0.5], [0.5]])
cpd_c = TabularCPD('C', 2, [[0.8, 0.4, 0.6, 0.2], [0.2, 0.6, 0.4, 0.8]],
evidence=['A', 'B'], evidence_card=[2, 2])
cpd_d = TabularCPD('D', 2, [[0.6, 0.3], [0.4, 0.7]], evidence=['B'], evidence_card=[2])
cpd_e = TabularCPD('E', 2, [[0.9, 0.4], [0.1, 0.6]], evidence=['C'], evidence_card=[2])

# Add the CPDs to the model


model.add_cpds(cpd_a, cpd_b, cpd_c, cpd_d, cpd_e)

# Perform inference using the VariableElimination class


infer = VariableElimination(model)
print(infer.query(['E'], evidence={'A': 0, 'B': 1}))

OUTPUT:

RESULT:
PROGRAM:
from sklearn.linear_model import LinearRegression
import numpy as np

# Create sample data


x = np.array([1, 2, 3, 4, 5]).reshape((-1, 1))
y = np.array([2, 3, 4, 5, 6])

# Create a model and fit the data


model = LinearRegression().fit(x, y)

# Predict the response for a new observation


x_new = np.array([6]).reshape((-1, 1))
y_new = model.predict(x_new)

# Print the predicted response


print(y_new)

OUTPUT:

RESULT:
PROGRAM:
from pgmpy.models import BayesianModel
from pgmpy.factors.discrete import TabularCPD
from pgmpy.inference import VariableElimination

# Define the structure of the Bayesian network


model = BayesianModel([('A', 'C'), ('B', 'C'), ('B', 'D'), ('C', 'E')])

# Define the conditional probability distributions (CPDs) for each variable


cpd_a = TabularCPD('A', 2, [[0.5], [0.5]])
cpd_b = TabularCPD('B', 2, [[0.5], [0.5]])
cpd_c = TabularCPD('C', 2, [[0.8, 0.4, 0.6, 0.2], [0.2, 0.6, 0.4, 0.8]],
evidence=['A', 'B'], evidence_card=[2, 2])
cpd_d = TabularCPD('D', 2, [[0.6, 0.3], [0.4, 0.7]], evidence=['B'], evidence_card=[2])
cpd_e = TabularCPD('E', 2, [[0.9, 0.4], [0.1, 0.6]], evidence=['C'], evidence_card=[2])

# Add the CPDs to the model


model.add_cpds(cpd_a, cpd_b, cpd_c, cpd_d, cpd_e)

# Perform inference using the VariableElimination class


infer = VariableElimination(model)
print(infer.query(['E'], evidence={'A': 0, 'B': 1}))

OUTPUT:

RESULT:
PROGRAM:
from sklearn.linear_model import LinearRegression
import numpy as np

# Create sample data


x = np.array([1, 2, 3, 4, 5]).reshape((-1, 1))
y = np.array([2, 3, 4, 5, 6])

# Create a model and fit the data


model = LinearRegression().fit(x, y)

# Predict the response for a new observation


x_new = np.array([6]).reshape((-1, 1))
y_new = model.predict(x_new)

# Print the predicted response


print(y_new)

OUTPUT:

RESULT:

You might also like