Skip to content

Commit 089623f

Browse files
committedOct 24, 2018
Optimization Algorithm Added
1 parent 2770df1 commit 089623f

File tree

1 file changed

+21
-0
lines changed

1 file changed

+21
-0
lines changed
 

‎Optimization/GradientDescent.py

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
def GradientDescent(inputValue, learningRate, precision, maximumIterations, gradientFunction):
2+
previousStepSize = 1
3+
iterationCounter = 0
4+
while previousStepSize > precision and iterationCounter < maximumIterations:
5+
previousX = inputValue #Storing current x value in previousX
6+
inputValue = inputValue - learningRate * gradientFunction(previousX) #Grad descent
7+
previousStepSize = abs(inputValue - previousX) #Change in x
8+
iterationCounter = iterationCounter+1 #iteration count
9+
print("Iteration",iterationCounter,"\nX value is",inputValue) #Print iterations
10+
return inputValue
11+
12+
inputValue = float(input("Initial Value: "))
13+
learningRate = float(input("Learning Rate Alpha: "))
14+
precision = float(input("Precision Needed: "))
15+
maximumIterations = float(input("Maximum Iterations: "))
16+
print(float(input("Trial Number: ")))
17+
gradientFunction = lambda x: 2*(x+5) #Gradient of our function
18+
19+
minimum = GradientDescent(inputValue, learningRate, precision, maximumIterations, gradientFunction)
20+
21+
print("The local minimum value is", minimum)

0 commit comments

Comments
 (0)
Please sign in to comment.