import numpy as np
import pandas as pd
import miceforest as mf
import matplotlib.pyplot as plt
from sklearn.impute import KNNImputer
from sklearn.experimental import enable_iterative_imputer
from sklearn.impute import IterativeImputer
from sklearn.ensemble import RandomForestRegressor
from sklearn.linear_model import LogisticRegression
x1 = np.random.uniform(0,10,800)
x2 = np.random.uniform(0,10,800)
n1 = np.linspace(0,5,1000)
m1 = np.sqrt(25 - n1**2)
n2 = np.linspace(0,9,1000)
m2 = np.sqrt(81 - n2**2)
X = np.vstack((x1,x2)).T
y = np.ones(800)
for i in range(X.shape[0]):
Z = X[i][0]**2 + X[i][1]**2
if Z <=25:
y[i] = 0
elif 25 < Z < 81:
y[i] = 1
elif Z >= 81:
y[i] = 2
plt.figure(figsize=(6,6))
plt.scatter(x1,x2,c=y,edgecolors='k',linewidths=0.5)
plt.plot(n1,m1,ls='--',lw=1,color='k')
plt.plot(n2,m2,ls='--',lw=1,color='k')
# plt.savefig(r"F:\Latex_IDA\example_orginal.pdf",dpi=400,bbox_inches='tight')
plt.show()
data = np.hstack((X,y[:,None]))
data = pd.DataFrame(data,index=None)
data.to_csv(r"F:\Latex_IDA\example_orginal.csv",header=None,index=None)
random_state = np.random.RandomState(np.random.choice([i for i in range(100)],1,replace=False))
X_DF = pd.DataFrame(data=X)
X_DF.columns = [str(i) for i in X_DF.columns]
X_amp = mf.ampute_data(data=X_DF, perc=0.3, random_state=random_state)
X_amp_df = pd.DataFrame(X_amp)
X_amp_df.to_csv(r"F:\Latex_IDA\example_incomplete.csv",header=None,index=None)
# X_kernel = mf.ImputationKernel(
# data = X_amp,
# datasets=5,
# save_all_iterations=True,
# random_state=random_state
# )
#
# X_kernel.mice(iterations=10)
# X_complete_0 = X_kernel.complete_data(dataset=0, inplace=False)
# X_complete_1 = X_kernel.complete_data(dataset=1, inplace=False)
# X_complete_2 = X_kernel.complete_data(dataset=2, inplace=False)
# X_complete_3 = X_kernel.complete_data(dataset=3, inplace=False)
# X_complete_4 = X_kernel.complete_data(dataset=4, inplace=False)
# X_complete = (X_complete_0 + X_complete_1 + X_complete_2 + X_complete_3 + X_complete_4)/5
#
# X_complete = np.array(X_complete)
# ----------------MICE-RandmFroest---------------------
imp = IterativeImputer(estimator=RandomForestRegressor(),
initial_strategy="mean",
max_iter=10,
tol=1e-10,
random_state=0,)
Z = imp.fit_transform(X_amp)
plt.figure(figsize=(6,6))
plt.scatter(Z[:,0],Z[:,1],c=y,edgecolors='k',linewidths=0.5)
plt.plot(n1,m1,ls='--',lw=1,color='k')
plt.plot(n2,m2,ls='--',lw=1,color='k')
# plt.savefig(r"F:\Latex_IDA\example_MICE.pdf",dpi=400,bbox_inches='tight')
plt.title("MICE imputation")
plt.show()
## --------------------------------------------------------
# data = pd.read_csv(r"F:\Latex_IDA\example_incomplete.csv",header=None,index_col=None)
imputer = KNNImputer(n_neighbors=5, weights="distance",metric='nan_euclidean')
Z = imputer.fit_transform(X_amp_df)
plt.figure(figsize=(6,6))
plt.scatter(Z[:,0],Z[:,1],c=y,edgecolors='k',linewidths=0.5)
plt.plot(n1,m1,ls='--',lw=1,color='k')
plt.plot(n2,m2,ls='--',lw=1,color='k')
# plt.savefig(r"F:\Latex_IDA\example_ICkNNI.pdf",dpi=400,bbox_inches='tight')
plt.title("ICkNNI imputation")
plt.show()