## Code written by Robert Richardson for Stat 624
## MLE for tied up normal

import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

data =  np.array([3.08, 0.68, 2.09, 0.87, -0.02, 0.25, 1.98, 1.47, 1.95, 0.99])
n = data.size

# Maximizing the log likelihood
def log_lik(theta):
  return -n/2*np.log(2*np.pi)-n/2*np.log(theta)-1/(2*theta)*np.sum(pow(data-theta,2))

t = np.linspace(0,3,100)
ft = [log_lik(i) for i in t]
plt.plot(t,ft)
plt.show()

# First and second derivative functions
def first_deriv(theta):
  return -n/(2*theta)+1/(2*theta**2)*np.sum(pow(data-theta,2))+1/theta*np.sum(data-theta)

def second_deriv(theta):
  return n/(2*pow(theta,2))-1/(theta**3)*np.sum(pow(data-theta,2))-2/(theta**2)*np.sum(data-theta)-n/theta

diff = float('inf')
tolerance = .000001
x0 = .5
count = 0

while diff > tolerance:
  count = count+1
  x1 = x0 - first_deriv(x0)/second_deriv(x0)
  diff = x1-x0
  x0=x1

count
x0
log_lik(x0)
log_lik(x0+.1)
log_lik(x0-.1)
second_deriv(x0)

