π¬ Amaliyot: Differensial Hisob Simulyatsiyasi
Laboratoriya maqsadiβ
Bu laboratoriyada differensial hisob tushunchalarini Python yordamida vizualizatsiya qilamiz: hosilalar, gradientlar, optimizatsiya va Taylor qatorlari.
Kerakli kutubxonalarβ
pip install numpy matplotlib scipy sympy
1-laboratoriya: Hosilalarni Vizualizatsiyaβ
import numpy as np
import matplotlib.pyplot as plt
def visualize_derivatives():
"""Funksiya va uning hosilalarini ko'rsatish"""
x = np.linspace(-2, 4, 500)
# Funksiya: f(x) = xΒ³ - 6xΒ² + 9x + 1
f = x**3 - 6*x**2 + 9*x + 1
f_prime = 3*x**2 - 12*x + 9 # Birinchi hosila
f_double = 6*x - 12 # Ikkinchi hosila
fig, axes = plt.subplots(3, 1, figsize=(12, 10), sharex=True)
# Funksiya
axes[0].plot(x, f, 'b-', linewidth=2, label=r"$f(x) = x^3 - 6x^2 + 9x + 1$")
axes[0].axhline(y=0, color='k', linestyle='-', linewidth=0.5)
# Kritik nuqtalar
critical_x = [1, 3]
for cx in critical_x:
cy = cx**3 - 6*cx**2 + 9*cx + 1
axes[0].plot(cx, cy, 'ro', markersize=10)
axes[0].annotate(f'({cx}, {cy})', (cx, cy), textcoords="offset points",
xytext=(10, 10), fontsize=10)
axes[0].set_ylabel('f(x)', fontsize=12)
axes[0].set_title("Funksiya va uning hosilalari", fontsize=14)
axes[0].legend()
axes[0].grid(True, alpha=0.3)
# Birinchi hosila (tezlik)
axes[1].plot(x, f_prime, 'r-', linewidth=2, label=r"$f'(x) = 3x^2 - 12x + 9$")
axes[1].axhline(y=0, color='k', linestyle='-', linewidth=0.5)
axes[1].fill_between(x, f_prime, where=(f_prime > 0), alpha=0.3, color='green', label='f o\'sadi')
axes[1].fill_between(x, f_prime, where=(f_prime < 0), alpha=0.3, color='red', label='f kamayadi')
axes[1].set_ylabel("f'(x)", fontsize=12)
axes[1].legend()
axes[1].grid(True, alpha=0.3)
# Ikkinchi hosila (buklanish)
axes[2].plot(x, f_double, 'g-', linewidth=2, label=r"$f''(x) = 6x - 12$")
axes[2].axhline(y=0, color='k', linestyle='-', linewidth=0.5)
axes[2].fill_between(x, f_double, where=(f_double > 0), alpha=0.3, color='blue', label='Konkav yuqori')
axes[2].fill_between(x, f_double, where=(f_double < 0), alpha=0.3, color='orange', label='Konkav pastga')
axes[2].set_xlabel('x', fontsize=12)
axes[2].set_ylabel("f''(x)", fontsize=12)
axes[2].legend()
axes[2].grid(True, alpha=0.3)
plt.tight_layout()
plt.show()
def tangent_line_visualization():
"""Urinma chiziq vizualizatsiyasi"""
fig, ax = plt.subplots(figsize=(10, 8))
x = np.linspace(-1, 4, 500)
f = x**2 # f(x) = xΒ²
ax.plot(x, f, 'b-', linewidth=2, label=r'$f(x) = x^2$')
# Turli nuqtalarda urinmalar
points = [0.5, 1.5, 2.5]
colors = ['red', 'green', 'purple']
for a, color in zip(points, colors):
# Urinma: y - f(a) = f'(a)(x - a)
# f'(x) = 2x
slope = 2 * a
y_tangent = a**2 + slope * (x - a)
ax.plot(x, y_tangent, color=color, linestyle='--', linewidth=1.5,
label=f'Urinma x={a}: slope={slope:.1f}')
ax.plot(a, a**2, 'o', color=color, markersize=10)
ax.set_xlim(-1, 4)
ax.set_ylim(-1, 10)
ax.set_xlabel('x', fontsize=12)
ax.set_ylabel('y', fontsize=12)
ax.set_title('Urinma chiziqlar (Tangent Lines)', fontsize=14)
ax.legend()
ax.grid(True, alpha=0.3)
ax.set_aspect('equal')
plt.show()
if __name__ == "__main__":
visualize_derivatives()
tangent_line_visualization()
2-laboratoriya: Raqamli Differensiallashβ
import numpy as np
import matplotlib.pyplot as plt
def numerical_derivative(f, x, h=1e-5, method='central'):
"""
Raqamli hosila hisoblash
method: 'forward', 'backward', 'central'
"""
if method == 'forward':
return (f(x + h) - f(x)) / h
elif method == 'backward':
return (f(x) - f(x - h)) / h
elif method == 'central':
return (f(x + h) - f(x - h)) / (2 * h)
def compare_derivative_methods():
"""Differensiallash usullarini solishtirish"""
# Test funksiya: f(x) = sin(x)
f = np.sin
f_exact = np.cos # Aniq hosila
x = np.pi / 4 # Test nuqta
h_values = np.logspace(-1, -12, 50)
forward_errors = []
backward_errors = []
central_errors = []
exact = f_exact(x)
for h in h_values:
forward = numerical_derivative(f, x, h, 'forward')
backward = numerical_derivative(f, x, h, 'backward')
central = numerical_derivative(f, x, h, 'central')
forward_errors.append(abs(forward - exact))
backward_errors.append(abs(backward - exact))
central_errors.append(abs(central - exact))
# Vizualizatsiya
plt.figure(figsize=(10, 6))
plt.loglog(h_values, forward_errors, 'r-', label='Forward difference')
plt.loglog(h_values, backward_errors, 'g--', label='Backward difference')
plt.loglog(h_values, central_errors, 'b-', label='Central difference')
# Teorik tartibi
plt.loglog(h_values, h_values, 'k:', alpha=0.5, label='O(h)')
plt.loglog(h_values, h_values**2, 'k--', alpha=0.5, label='O(hΒ²)')
plt.xlabel('h (qadam kattaligi)', fontsize=12)
plt.ylabel('Xato', fontsize=12)
plt.title('Raqamli differensiallash xatolari', fontsize=14)
plt.legend()
plt.grid(True, alpha=0.3)
plt.show()
print(f"x = Ο/4 da sin(x) ning hosilasi:")
print(f"Aniq: cos(Ο/4) = {exact:.10f}")
print(f"Central (h=1e-5): {numerical_derivative(f, x, 1e-5, 'central'):.10f}")
def robot_velocity_from_position():
"""Robot pozitsiyasidan tezlik hisoblash"""
# Vaqt
t = np.linspace(0, 10, 1000)
dt = t[1] - t[0]
# Pozitsiya: s(t) = 5sin(t) + 0.5tΒ²
s = 5 * np.sin(t) + 0.5 * t**2
# Aniq tezlik: v(t) = 5cos(t) + t
v_exact = 5 * np.cos(t) + t
# Raqamli tezlik (central difference)
v_numerical = np.zeros_like(t)
v_numerical[1:-1] = (s[2:] - s[:-2]) / (2 * dt)
v_numerical[0] = (s[1] - s[0]) / dt
v_numerical[-1] = (s[-1] - s[-2]) / dt
# Tezlanish
a_exact = -5 * np.sin(t) + 1
a_numerical = np.gradient(v_numerical, dt)
fig, axes = plt.subplots(3, 1, figsize=(12, 10), sharex=True)
axes[0].plot(t, s, 'b-', linewidth=2)
axes[0].set_ylabel('Pozitsiya (m)', fontsize=12)
axes[0].set_title('Robot Harakati: Pozitsiyadan Tezlik va Tezlanish', fontsize=14)
axes[0].grid(True, alpha=0.3)
axes[1].plot(t, v_exact, 'g-', linewidth=2, label='Aniq')
axes[1].plot(t, v_numerical, 'r--', linewidth=1, label='Raqamli')
axes[1].set_ylabel('Tezlik (m/s)', fontsize=12)
axes[1].legend()
axes[1].grid(True, alpha=0.3)
axes[2].plot(t, a_exact, 'g-', linewidth=2, label='Aniq')
axes[2].plot(t, a_numerical, 'r--', linewidth=1, label='Raqamli')
axes[2].set_xlabel('Vaqt (s)', fontsize=12)
axes[2].set_ylabel('Tezlanish (m/sΒ²)', fontsize=12)
axes[2].legend()
axes[2].grid(True, alpha=0.3)
plt.tight_layout()
plt.show()
if __name__ == "__main__":
compare_derivative_methods()
robot_velocity_from_position()
3-laboratoriya: Gradient va Optimizatsiyaβ
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
def gradient_descent_2d():
"""2D gradient descent vizualizatsiyasi"""
# Funksiya: f(x, y) = (x - 2)Β² + (y - 1)Β²
def f(x, y):
return (x - 2)**2 + (y - 1)**2
def grad_f(x, y):
return np.array([2*(x - 2), 2*(y - 1)])
# Grid
x = np.linspace(-1, 5, 100)
y = np.linspace(-2, 4, 100)
X, Y = np.meshgrid(x, y)
Z = f(X, Y)
# Gradient descent
alpha = 0.1 # Learning rate
start = np.array([4.0, 3.0])
path = [start.copy()]
point = start.copy()
for _ in range(50):
grad = grad_f(point[0], point[1])
point = point - alpha * grad
path.append(point.copy())
if np.linalg.norm(grad) < 1e-6:
break
path = np.array(path)
# Vizualizatsiya
fig, axes = plt.subplots(1, 2, figsize=(14, 6))
# Contour plot
ax1 = axes[0]
contour = ax1.contour(X, Y, Z, levels=20)
ax1.clabel(contour, inline=True, fontsize=8)
ax1.plot(path[:, 0], path[:, 1], 'ro-', markersize=5, linewidth=1, label='Gradient descent')
ax1.plot(path[0, 0], path[0, 1], 'go', markersize=15, label='Boshlash')
ax1.plot(path[-1, 0], path[-1, 1], 'b*', markersize=15, label='Minimum')
ax1.set_xlabel('x')
ax1.set_ylabel('y')
ax1.set_title('Gradient Descent (Contour)', fontsize=14)
ax1.legend()
ax1.set_aspect('equal')
# 3D surface
ax2 = fig.add_subplot(122, projection='3d')
ax2.plot_surface(X, Y, Z, cmap='viridis', alpha=0.6)
ax2.plot(path[:, 0], path[:, 1], f(path[:, 0], path[:, 1]),
'ro-', markersize=5, linewidth=2)
ax2.set_xlabel('x')
ax2.set_ylabel('y')
ax2.set_zlabel('f(x, y)')
ax2.set_title('Gradient Descent (3D)', fontsize=14)
plt.tight_layout()
plt.show()
print(f"Minimum topildi: ({path[-1, 0]:.4f}, {path[-1, 1]:.4f})")
print(f"f(min) = {f(path[-1, 0], path[-1, 1]):.6f}")
def gradient_field_visualization():
"""Gradient vektorlar maydoni"""
# Funksiya: f(x, y) = xΒ² + 2yΒ²
def f(x, y):
return x**2 + 2*y**2
x = np.linspace(-3, 3, 30)
y = np.linspace(-3, 3, 30)
X, Y = np.meshgrid(x, y)
Z = f(X, Y)
# Gradient
Gx = 2 * X # βf/βx
Gy = 4 * Y # βf/βy
# Normalize for visualization
G_mag = np.sqrt(Gx**2 + Gy**2)
Gx_norm = Gx / (G_mag + 0.1)
Gy_norm = Gy / (G_mag + 0.1)
fig, axes = plt.subplots(1, 2, figsize=(14, 6))
# Gradient field
ax1 = axes[0]
contour = ax1.contour(X, Y, Z, levels=15, cmap='plasma')
ax1.clabel(contour, inline=True, fontsize=8)
ax1.quiver(X[::2, ::2], Y[::2, ::2], Gx_norm[::2, ::2], Gy_norm[::2, ::2],
color='blue', alpha=0.6)
ax1.set_xlabel('x')
ax1.set_ylabel('y')
ax1.set_title('Gradient vektorlar maydoni\n(eng tez o\'sish yo\'nalishi)', fontsize=12)
ax1.set_aspect('equal')
# Negative gradient (descent direction)
ax2 = axes[1]
contour = ax2.contour(X, Y, Z, levels=15, cmap='plasma')
ax2.clabel(contour, inline=True, fontsize=8)
ax2.quiver(X[::2, ::2], Y[::2, ::2], -Gx_norm[::2, ::2], -Gy_norm[::2, ::2],
color='red', alpha=0.6)
ax2.set_xlabel('x')
ax2.set_ylabel('y')
ax2.set_title('Manfiy gradient\n(eng tez pasayish yo\'nalishi)', fontsize=12)
ax2.set_aspect('equal')
plt.tight_layout()
plt.show()
def saddle_point_visualization():
"""Egar nuqta (saddle point) vizualizatsiyasi"""
# f(x, y) = xΒ² - yΒ²
def f(x, y):
return x**2 - y**2
x = np.linspace(-2, 2, 100)
y = np.linspace(-2, 2, 100)
X, Y = np.meshgrid(x, y)
Z = f(X, Y)
fig = plt.figure(figsize=(12, 5))
# 3D surface
ax1 = fig.add_subplot(121, projection='3d')
ax1.plot_surface(X, Y, Z, cmap='coolwarm', alpha=0.8)
ax1.scatter([0], [0], [0], c='black', s=100, marker='*')
ax1.set_xlabel('x')
ax1.set_ylabel('y')
ax1.set_zlabel('f(x, y)')
ax1.set_title('Egar nuqta: f(x,y) = xΒ² - yΒ²', fontsize=14)
# Contour
ax2 = fig.add_subplot(122)
contour = ax2.contour(X, Y, Z, levels=20, cmap='coolwarm')
ax2.clabel(contour, inline=True, fontsize=8)
ax2.plot(0, 0, 'k*', markersize=15, label='Egar nuqta')
ax2.set_xlabel('x')
ax2.set_ylabel('y')
ax2.set_title('Contour ko\'rinishi', fontsize=14)
ax2.set_aspect('equal')
ax2.legend()
plt.tight_layout()
plt.show()
if __name__ == "__main__":
gradient_descent_2d()
gradient_field_visualization()
saddle_point_visualization()
4-laboratoriya: Taylor Qatorlariβ
import numpy as np
import matplotlib.pyplot as plt
from math import factorial
def taylor_approximation(f, f_derivs, x0, x, n):
"""
Taylor qatori taxmini
f_derivs: [f(x0), f'(x0), f''(x0), ...]
"""
result = 0
for k in range(min(n + 1, len(f_derivs))):
result += f_derivs[k] * (x - x0)**k / factorial(k)
return result
def visualize_taylor_sin():
"""sin(x) uchun Taylor qatori"""
x = np.linspace(-2*np.pi, 2*np.pi, 500)
y_exact = np.sin(x)
fig, ax = plt.subplots(figsize=(12, 8))
ax.plot(x, y_exact, 'k-', linewidth=3, label='sin(x) - aniq')
colors = ['red', 'orange', 'green', 'blue', 'purple']
orders = [1, 3, 5, 7, 9]
for order, color in zip(orders, colors):
# sin(x) Taylor coefficients at x=0
y_taylor = np.zeros_like(x)
for k in range(order + 1):
if k % 2 == 1: # sin uchun faqat toq darajalar
sign = (-1)**((k - 1) // 2)
y_taylor += sign * x**k / factorial(k)
ax.plot(x, y_taylor, color=color, linestyle='--', linewidth=1.5,
label=f'Taylor (n={order})')
ax.set_xlim(-2*np.pi, 2*np.pi)
ax.set_ylim(-2, 2)
ax.set_xlabel('x', fontsize=12)
ax.set_ylabel('y', fontsize=12)
ax.set_title('sin(x) uchun Taylor qatori taxminlari', fontsize=14)
ax.legend()
ax.grid(True, alpha=0.3)
ax.axhline(y=0, color='k', linewidth=0.5)
ax.axvline(x=0, color='k', linewidth=0.5)
plt.show()
def taylor_error_analysis():
"""Taylor qatori xatosi tahlili"""
# e^x uchun
x_test = 1.0
exact = np.exp(x_test)
orders = range(1, 15)
errors = []
for n in orders:
approx = sum(x_test**k / factorial(k) for k in range(n + 1))
errors.append(abs(exact - approx))
fig, axes = plt.subplots(1, 2, figsize=(14, 5))
# Xato vs tartib
axes[0].semilogy(list(orders), errors, 'bo-', linewidth=2, markersize=8)
axes[0].set_xlabel('Taylor qatori tartibi n', fontsize=12)
axes[0].set_ylabel('Xato', fontsize=12)
axes[0].set_title(f'Taylor qatori xatosi (x={x_test})', fontsize=14)
axes[0].grid(True, alpha=0.3)
# Taxminlar
x = np.linspace(-2, 3, 200)
y_exact = np.exp(x)
axes[1].plot(x, y_exact, 'k-', linewidth=3, label='e^x')
for n in [1, 2, 4, 8]:
y_approx = sum(x**k / factorial(k) for k in range(n + 1))
axes[1].plot(x, y_approx, '--', linewidth=1.5, label=f'n={n}')
axes[1].set_xlim(-2, 3)
axes[1].set_ylim(-1, 10)
axes[1].set_xlabel('x', fontsize=12)
axes[1].set_ylabel('y', fontsize=12)
axes[1].set_title('e^x uchun Taylor taxminlari', fontsize=14)
axes[1].legend()
axes[1].grid(True, alpha=0.3)
plt.tight_layout()
plt.show()
def linearization_example():
"""Chiziqlashtirish (Linearization) amaliyoti"""
# f(x) = sqrt(x) ni x=4 atrofida chiziqlashtirish
def f(x):
return np.sqrt(x)
x0 = 4
f0 = f(x0)
f_prime = 1 / (2 * np.sqrt(x0)) # f'(x) = 1/(2βx)
def linear_approx(x):
return f0 + f_prime * (x - x0)
x = np.linspace(0.1, 10, 200)
fig, ax = plt.subplots(figsize=(10, 6))
ax.plot(x, f(x), 'b-', linewidth=2, label=r'$f(x) = \sqrt{x}$')
ax.plot(x, linear_approx(x), 'r--', linewidth=2,
label=f'Chiziq taxmin: $L(x) = {f0:.2f} + {f_prime:.3f}(x - {x0})$')
ax.plot(x0, f0, 'ko', markersize=12, label=f'Nuqta ({x0}, {f0})')
# Xato ko'rsatish
test_points = [3, 3.5, 4.5, 5]
for xp in test_points:
actual = f(xp)
approx = linear_approx(xp)
ax.plot([xp, xp], [actual, approx], 'g-', linewidth=2, alpha=0.5)
ax.plot(xp, actual, 'go', markersize=6)
ax.plot(xp, approx, 'ro', markersize=6)
ax.set_xlabel('x', fontsize=12)
ax.set_ylabel('y', fontsize=12)
ax.set_title('Chiziqlashtirish: βx ni x=4 atrofida', fontsize=14)
ax.legend()
ax.grid(True, alpha=0.3)
ax.set_xlim(0, 10)
ax.set_ylim(0, 4)
plt.show()
# Taxminiy hisoblash misollari
print("βx taxmini (x=4 atrofida):")
for x_val in [3.8, 3.9, 4.0, 4.1, 4.2]:
exact = np.sqrt(x_val)
approx = linear_approx(x_val)
error = abs(exact - approx)
print(f"β{x_val} β {approx:.4f} (aniq: {exact:.4f}, xato: {error:.6f})")
if __name__ == "__main__":
visualize_taylor_sin()
taylor_error_analysis()
linearization_example()
5-laboratoriya: Amaliy Optimizatsiyaβ
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import minimize, minimize_scalar
def rocket_trajectory_optimization():
"""Raketa traektoriyasi optimizatsiyasi"""
# Soddalashtirilgan masala:
# Maksimal balandlikka yetish uchun optimal uchirish burchagi
g = 9.81
v0 = 100 # boshlang'ich tezlik (m/s)
def max_height(theta):
"""Vertikal komponent asosida max balandlik"""
vy = v0 * np.sin(theta)
h_max = vy**2 / (2 * g)
return -h_max # minimize uchun manfiy
def range_distance(theta):
"""Gorizontal masofa"""
return v0**2 * np.sin(2 * theta) / g
theta_values = np.linspace(0, np.pi/2, 100)
heights = [-max_height(t) for t in theta_values]
ranges = [range_distance(t) for t in theta_values]
fig, axes = plt.subplots(1, 2, figsize=(14, 5))
# Balandlik
axes[0].plot(np.degrees(theta_values), heights, 'b-', linewidth=2)
axes[0].axvline(x=90, color='r', linestyle='--', label='Optimal: ΞΈ=90Β°')
axes[0].set_xlabel('Uchirish burchagi (Β°)', fontsize=12)
axes[0].set_ylabel('Max balandlik (m)', fontsize=12)
axes[0].set_title('Raketa balandligi vs uchirish burchagi', fontsize=14)
axes[0].legend()
axes[0].grid(True, alpha=0.3)
# Masofa
axes[1].plot(np.degrees(theta_values), ranges, 'g-', linewidth=2)
axes[1].axvline(x=45, color='r', linestyle='--', label='Optimal: ΞΈ=45Β°')
axes[1].set_xlabel('Uchirish burchagi (Β°)', fontsize=12)
axes[1].set_ylabel('Gorizontal masofa (m)', fontsize=12)
axes[1].set_title('Snaryad masofasi vs uchirish burchagi', fontsize=14)
axes[1].legend()
axes[1].grid(True, alpha=0.3)
plt.tight_layout()
plt.show()
# Scipy bilan optimizatsiya
result = minimize_scalar(max_height, bounds=(0, np.pi/2), method='bounded')
print(f"Optimal balandlik burchagi: {np.degrees(result.x):.1f}Β°")
print(f"Max balandlik: {-result.fun:.1f} m")
def robot_arm_inverse_kinematics():
"""Robot qo'li IK gradient descent bilan"""
L1, L2 = 1.0, 0.8
target = np.array([1.2, 0.6])
def forward_kinematics(theta):
t1, t2 = theta
x = L1 * np.cos(t1) + L2 * np.cos(t1 + t2)
y = L1 * np.sin(t1) + L2 * np.sin(t1 + t2)
return np.array([x, y])
def objective(theta):
pos = forward_kinematics(theta)
return np.sum((pos - target)**2)
def gradient(theta):
t1, t2 = theta
x, y = forward_kinematics(theta)
ex, ey = x - target[0], y - target[1]
# Jacobian
J = np.array([
[-L1*np.sin(t1) - L2*np.sin(t1+t2), -L2*np.sin(t1+t2)],
[L1*np.cos(t1) + L2*np.cos(t1+t2), L2*np.cos(t1+t2)]
])
return 2 * J.T @ np.array([ex, ey])
# Gradient descent
theta = np.array([0.5, 0.5])
alpha = 0.1
history = [theta.copy()]
for _ in range(100):
grad = gradient(theta)
theta = theta - alpha * grad
history.append(theta.copy())
if np.linalg.norm(grad) < 1e-6:
break
history = np.array(history)
# Vizualizatsiya
fig, axes = plt.subplots(1, 2, figsize=(14, 6))
# Robot
ax1 = axes[0]
ax1.set_xlim(-0.5, 2.5)
ax1.set_ylim(-0.5, 2)
ax1.set_aspect('equal')
# Final position
final_theta = history[-1]
x1 = L1 * np.cos(final_theta[0])
y1 = L1 * np.sin(final_theta[0])
x2, y2 = forward_kinematics(final_theta)
ax1.plot([0, x1], [0, y1], 'b-', linewidth=6, label='Link 1')
ax1.plot([x1, x2], [y1, y2], 'r-', linewidth=4, label='Link 2')
ax1.plot(0, 0, 'ko', markersize=15)
ax1.plot(x1, y1, 'ko', markersize=12)
ax1.plot(x2, y2, 'go', markersize=15, label='End effector')
ax1.plot(target[0], target[1], 'r*', markersize=20, label='Target')
# Trajectory
for i in range(len(history) - 1):
pos = forward_kinematics(history[i])
ax1.plot(pos[0], pos[1], 'g.', alpha=0.3, markersize=8)
ax1.set_title('Robot IK: Gradient Descent', fontsize=14)
ax1.legend()
ax1.grid(True, alpha=0.3)
# Convergence
ax2 = axes[1]
errors = [objective(h) for h in history]
ax2.semilogy(errors, 'b-', linewidth=2)
ax2.set_xlabel('Iteratsiya', fontsize=12)
ax2.set_ylabel('Xato (log scale)', fontsize=12)
ax2.set_title('Konvergensiya', fontsize=14)
ax2.grid(True, alpha=0.3)
plt.tight_layout()
plt.show()
print(f"Final position: ({x2:.4f}, {y2:.4f})")
print(f"Target: ({target[0]}, {target[1]})")
print(f"ΞΈβ = {np.degrees(final_theta[0]):.2f}Β°, ΞΈβ = {np.degrees(final_theta[1]):.2f}Β°")
if __name__ == "__main__":
rocket_trajectory_optimization()
robot_arm_inverse_kinematics()
Xulosaβ
Bu laboratoriyalarda biz:
- β Hosilalarni vizualizatsiya qildik
- β Raqamli differensiallash usullarini o'rgandik
- β Gradient va optimizatsiyani amalda ko'rdik
- β Taylor qatorlarini vizualizatsiya qildik
- β Robot va raketa masalalarini differensial hisob bilan yechdik
Keyingi qadamlarβ
- Integral hisob
- Differensial tenglamalar
- Ilg'or optimizatsiya (Newton-Raphson, BFGS)