import math
import matplotlib.pyplot as plt

def golden_section_search(func, a, b, epsilon):
    """
    Метод золотого сечения 
    """
    golden_ratio = (1 + math.sqrt(5)) / 2
    c = b - (b - a) / golden_ratio
    d = a + (b - a) / golden_ratio

    while abs(c - d) > epsilon*10**(-6):
        if func(c) < func(d):
            b = d
        else:
            a = c

        c = b - (b - a) / golden_ratio
        d = a + (b - a) / golden_ratio

    return (a + b) / 2

def gradient_descent(func, gradient, initial_point, epsilon, alpha, max_iterations):
    """
    Метод наискорейшего спуска для нахождения минимума функции.
    
    """
    x = initial_point
    step = 0
    all_iterations = []
    while True:
        grad = gradient(x)
        norm = math.sqrt(sum(g**2 for g in grad))
        if norm < epsilon:
            break
        direction = [-g / norm for g in grad]

        # Находим шаг с помощью метода золотого сечения
        func_alpha = lambda a: func([x[i] + a * direction[i] for i in range(len(x))])
        alpha = golden_section_search(func_alpha, 0, 100, epsilon*10**(-6))

        x = [x[i] + alpha * direction[i] for i in range(len(x))]
        step += 1

        # Сохраняем точку в каждой итерации
        all_iterations.append(x)

        print(f"Iteration {step}:")
        print(f"x[0] = {x[0]}, x[1] = {x[1]}")
        print(f"Function value = {func(x)}")
        print(f"Step size = {alpha}\n")

        if step >= max_iterations:
            print(f"Reached maximum number of iterations: {max_iterations}")
            break

    return x, step, all_iterations

def func(x):
    return (x[0] - 2)**2 + (x[1] - 7)**2 + 50*(x[1] + 4*x[0] - 6)**2 + 3.8

def gradient(x):
    return [2*(x[0] - 2) + 400*(x[1] + 4*x[0] - 6),
            2*(x[1] - 7) + 100*(x[1] + 4*x[0] - 6)]

initial_point = [1, 0]
epsilon = 0.01
alpha = 1
max_iterations = 100

minimum, steps, all_iterations = gradient_descent(func, gradient, initial_point, epsilon, alpha, max_iterations)
print("Минимум функции:", minimum)
print("Количество шагов:", steps)

# Построение графика
x_values = [point[0] for point in all_iterations]
y_values = [point[1] for point in all_iterations]

fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(x_values, y_values, 'bo-', markersize=3)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_title('Gradient Descent Optimization')

plt.show()
