-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathchurn_k_fold.py
71 lines (47 loc) · 2.1 KB
/
churn_k_fold.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 19 01:43:45 2018
@author: KERKOURI Mohamed Amine
"""
################################## Part 1 Pre-Processing#########################"
#import numpy as np
#import matplotlib.pyplot as plt
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('Churn_Modelling.csv')
X = dataset.iloc[:, 3:13].values
y = dataset.iloc[:, 13].values
# Encoding categorical data
# Encoding the Independent Variable
from sklearn.preprocessing import LabelEncoder, OneHotEncoder
labelencoder_X_1 = LabelEncoder()
X[:, 1] = labelencoder_X_1.fit_transform(X[:, 1])
labelencoder_X_2 = LabelEncoder()
X[:, 2] = labelencoder_X_2.fit_transform(X[:, 2])
onehotencoder = OneHotEncoder(categorical_features = [1])
X = onehotencoder.fit_transform(X).toarray()
X=X[:,1:]
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0)
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
#####################Part 2 ##########################################
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import cross_val_score
from keras.models import Sequential
from keras.layers import Dense
def build_classifier():
classifier =Sequential()
classifier.add(Dense(output_dim=6,init='uniform',activation ='relu',input_dim=11))
classifier.add(Dense(output_dim=6,init='uniform',activation ='relu'))
classifier.add(Dense(output_dim=6,init='uniform',activation ='relu'))
classifier.compile(optimizer='adam',loss='binary_crossentropy',metrics=['accuracy'])
return classifier
classifier = KerasClassifier(build_fn= build_classifier,batch_size = 10, epochs = 100)
accuracies = cross_val_score(estimator=classifier,X=X_train,y=y_train,cv=10,n_jobs=-1,pre_dispatch=2)
accuracy=accuracies.mean()
variance = accuracies.std()