728x90
반응형

데이터 클렌징 및 가공

import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
%matplotlib inline

import warnings
warnings.filterwarnings("ignore", category=RuntimeWarning)

bike_df = pd.read_csv('./bike_train.csv')
print(bike_df.shape)
bike_df.head(3)

# (10886, 12)

	datetime	season	holiday	workingday	weather	temp	atemp	humidity	windspeed	casual	registered	count
0	2011-01-01 00:00:00	1	0	0	1	9.84	14.395	81	0.0	3	13	16
1	2011-01-01 01:00:00	1	0	0	1	9.02	13.635	80	0.0	8	32	40
2	2011-01-01 02:00:00	1	0	0	1	9.02	13.635	80	0.0	5	27	32

datetime: hourly date + timestamp
season: 1 = 봄, 2 = 여름, 3 = 가을, 4 = 겨울
holiday: 1 = 토, 일요일의 주말을 제외한 국경일 등의 휴일, 0 = 휴일이 아닌 날
workingday: 1 = 토, 일요일의 주말 및 휴일이 아닌 주중, 0 = 주말 및 휴일
weather:
• 1 = 맑음, 약간 구름 낀 흐림
• 2 = 안개, 안개 + 흐림
• 3 = 가벼운 눈, 가벼운 비 + 천둥
• 4 = 심한 눈/비, 천둥/번개
temp: 온도(섭씨)
atemp: 체감온도(섭씨)
humidity: 상대습도
windspeed: 풍속
casual: 사전에 등록되지 않는 사용자가 대여한 횟수
registered: 사전에 등록된 사용자가 대여한 횟수
count: 대여 횟수

 

bike_df.info()

<class 'pandas.core.frame.DataFrame'>
RangeIndex: 10886 entries, 0 to 10885
Data columns (total 12 columns):
datetime      10886 non-null object
season        10886 non-null int64
holiday       10886 non-null int64
workingday    10886 non-null int64
weather       10886 non-null int64
temp          10886 non-null float64
atemp         10886 non-null float64
humidity      10886 non-null int64
windspeed     10886 non-null float64
casual        10886 non-null int64
registered    10886 non-null int64
count         10886 non-null int64
dtypes: float64(3), int64(8), object(1)
memory usage: 1020.6+ KB

 

# 문자열을 datetime 타입으로 변경. 
bike_df['datetime'] = bike_df.datetime.apply(pd.to_datetime)
bike_df.info()

<class 'pandas.core.frame.DataFrame'>
RangeIndex: 10886 entries, 0 to 10885
Data columns (total 12 columns):
datetime      10886 non-null datetime64[ns]
season        10886 non-null int64
holiday       10886 non-null int64
workingday    10886 non-null int64
weather       10886 non-null int64
temp          10886 non-null float64
atemp         10886 non-null float64
humidity      10886 non-null int64
windspeed     10886 non-null float64
casual        10886 non-null int64
registered    10886 non-null int64
count         10886 non-null int64
dtypes: datetime64[ns](1), float64(3), int64(8)
memory usage: 1020.6 KB

 

# datetime 타입에서 년, 월, 일, 시간 추출
bike_df['year'] = bike_df.datetime.apply(lambda x : x.year)
bike_df['month'] = bike_df.datetime.apply(lambda x : x.month)
bike_df['day'] = bike_df.datetime.apply(lambda x : x.day)
bike_df['hour'] = bike_df.datetime.apply(lambda x: x.hour)
print(bike_df.info())
bike_df.head(3)


<class 'pandas.core.frame.DataFrame'>
RangeIndex: 10886 entries, 0 to 10885
Data columns (total 16 columns):
datetime      10886 non-null datetime64[ns]
season        10886 non-null int64
holiday       10886 non-null int64
workingday    10886 non-null int64
weather       10886 non-null int64
temp          10886 non-null float64
atemp         10886 non-null float64
humidity      10886 non-null int64
windspeed     10886 non-null float64
casual        10886 non-null int64
registered    10886 non-null int64
count         10886 non-null int64
year          10886 non-null int64
month         10886 non-null int64
day           10886 non-null int64
hour          10886 non-null int64
dtypes: datetime64[ns](1), float64(3), int64(12)
memory usage: 1.3 MB
None

	datetime	season	holiday	workingday	weather	temp	atemp	humidity	windspeed	casual	registered	count	year	month	day	hour
0	2011-01-01 00:00:00	1	0	0	1	9.84	14.395	81	0.0	3	13	16	2011	1	1	0
1	2011-01-01 01:00:00	1	0	0	1	9.02	13.635	80	0.0	8	32	40	2011	1	1	1
2	2011-01-01 02:00:00	1	0	0	1	9.02	13.635	80	0.0	5	27	32	2011	1	1	2

 

drop_columns = ['datetime','casual','registered']
bike_df.drop(drop_columns, axis=1,inplace=True)

 

로그 변환, 피처 인코딩, 모델 학습/예측/평가

from sklearn.metrics import mean_squared_error, mean_absolute_error

# log 값 변환 시 언더플로우 영향으로 log() 가 아닌 log1p() 를 이용하여 RMSLE 계산
def rmsle(y, pred):
    log_y = np.log1p(y)
    log_pred = np.log1p(pred)
    squared_error = (log_y - log_pred) ** 2
    rmsle = np.sqrt(np.mean(squared_error))
    return rmsle

# 사이킷런의 mean_square_error() 를 이용하여 RMSE 계산
def rmse(y,pred):
    return np.sqrt(mean_squared_error(y,pred))

# 책에서는 mean_absolute_error()를 MSE로 잘못 기재함. 
# MAE, RMSE, RMSLE 를 모두 계산 
def evaluate_regr(y,pred):
    rmsle_val = rmsle(y,pred)
    rmse_val = rmse(y,pred)
    # MAE 는 scikit learn의 mean_absolute_error() 로 계산
    mae_val = mean_absolute_error(y,pred)
    print('RMSLE: {0:.3f}, RMSE: {1:.3F}, MAE: {2:.3F}'.format(rmsle_val, rmse_val, mae_val))

 

from sklearn.model_selection import train_test_split , GridSearchCV
from sklearn.linear_model import LinearRegression , Ridge , Lasso

y_target = bike_df['count']
X_features = bike_df.drop(['count'],axis=1,inplace=False)

X_train, X_test, y_train, y_test = train_test_split(X_features, y_target, test_size=0.3, random_state=0)

lr_reg = LinearRegression()
lr_reg.fit(X_train, y_train)
pred = lr_reg.predict(X_test)

evaluate_regr(y_test ,pred)

# RMSLE: 1.165, RMSE: 140.900, MAE: 105.924

 

def get_top_error_data(y_test, pred, n_tops = 5):
    # DataFrame에 컬럼들로 실제 대여횟수(count)와 예측 값을 서로 비교 할 수 있도록 생성. 
    result_df = pd.DataFrame(y_test.values, columns=['real_count'])
    result_df['predicted_count']= np.round(pred)
    result_df['diff'] = np.abs(result_df['real_count'] - result_df['predicted_count'])
    # 예측값과 실제값이 가장 큰 데이터 순으로 출력. 
    print(result_df.sort_values('diff', ascending=False)[:n_tops])
    
get_top_error_data(y_test,pred,n_tops=20)



     real_count  predicted_count   diff
1618         890            322.0  568.0
3151         798            241.0  557.0
966          884            327.0  557.0
412          745            194.0  551.0
2817         856            310.0  546.0
2277         813            267.0  546.0
2314         766            222.0  544.0
454          721            177.0  544.0
1003         713            171.0  542.0
2394         684            142.0  542.0
1181         891            357.0  534.0
1379         745            212.0  533.0
2003         770            241.0  529.0
1029         901            378.0  523.0
3227         724            202.0  522.0
1038         873            353.0  520.0
3197         694            176.0  518.0
507          688            174.0  514.0
637          900            393.0  507.0
87           594             95.0  499.0

 

y_target.hist()

 

y_log_transform = np.log1p(y_target)
y_log_transform.hist()

 

# 타겟 컬럼인 count 값을 log1p 로 Log 변환
y_target_log = np.log1p(y_target)

# 로그 변환된 y_target_log를 반영하여 학습/테스트 데이터 셋 분할
X_train, X_test, y_train, y_test = train_test_split(X_features, y_target_log, test_size=0.3, random_state=0)
lr_reg = LinearRegression()
lr_reg.fit(X_train, y_train)
pred = lr_reg.predict(X_test)

# 테스트 데이터 셋의 Target 값은 Log 변환되었으므로 다시 expm1를 이용하여 원래 scale로 변환
y_test_exp = np.expm1(y_test)

# 예측 값 역시 Log 변환된 타겟 기반으로 학습되어 예측되었으므로 다시 exmpl으로 scale변환
pred_exp = np.expm1(pred)

evaluate_regr(y_test_exp ,pred_exp)

# RMSLE: 1.017, RMSE: 162.594, MAE: 109.286

 

coef = pd.Series(lr_reg.coef_, index=X_features.columns)
coef_sort = coef.sort_values(ascending=False)
sns.barplot(x=coef_sort.values, y=coef_sort.index)

 

# 'year','month','hour','season','weather' feature들을 One Hot Encoding
X_features_ohe = pd.get_dummies(X_features, columns=['year','month','hour', 'holiday',
                                              'workingday','season','weather'])

 

# 원-핫 인코딩이 적용된 feature 데이터 세트 기반으로 학습/예측 데이터 분할. 
X_train, X_test, y_train, y_test = train_test_split(X_features_ohe, y_target_log,
                                                    test_size=0.3, random_state=0)

# 모델과 학습/테스트 데이터 셋을 입력하면 성능 평가 수치를 반환
def get_model_predict(model, X_train, X_test, y_train, y_test, is_expm1=False):
    model.fit(X_train, y_train)
    pred = model.predict(X_test)
    if is_expm1 :
        y_test = np.expm1(y_test)
        pred = np.expm1(pred)
    print('###',model.__class__.__name__,'###')
    evaluate_regr(y_test, pred)
# end of function get_model_predict    

# model 별로 평가 수행
lr_reg = LinearRegression()
ridge_reg = Ridge(alpha=10)
lasso_reg = Lasso(alpha=0.01)

for model in [lr_reg, ridge_reg, lasso_reg]:
    get_model_predict(model,X_train, X_test, y_train, y_test,is_expm1=True)
    

### LinearRegression ###
RMSLE: 0.589, RMSE: 97.483, MAE: 63.106
### Ridge ###
RMSLE: 0.589, RMSE: 98.407, MAE: 63.648
### Lasso ###
RMSLE: 0.634, RMSE: 113.031, MAE: 72.658

 

coef = pd.Series(lr_reg.coef_ , index=X_features_ohe.columns)
coef_sort = coef.sort_values(ascending=False)[:10]
sns.barplot(x=coef_sort.values , y=coef_sort.index)

 

from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
from xgboost import XGBRegressor
from lightgbm import LGBMRegressor


# 랜덤 포레스트, GBM, XGBoost, LightGBM model 별로 평가 수행
rf_reg = RandomForestRegressor(n_estimators=500)
gbm_reg = GradientBoostingRegressor(n_estimators=500)
xgb_reg = XGBRegressor(n_estimators=500)
lgbm_reg = LGBMRegressor(n_estimators=500)

for model in [rf_reg, gbm_reg, xgb_reg, lgbm_reg]:
    get_model_predict(model,X_train, X_test, y_train, y_test,is_expm1=True)
    


### RandomForestRegressor ###
RMSLE: 0.353, RMSE: 50.828, MAE: 31.537
### GradientBoostingRegressor ###
RMSLE: 0.340, RMSE: 55.761, MAE: 34.333
### XGBRegressor ###
RMSLE: 0.346, RMSE: 56.474, MAE: 34.917
### LGBMRegressor ###
RMSLE: 0.316, RMSE: 46.473, MAE: 28.777

 

반응형

'Data_Science > ML_Perfect_Guide' 카테고리의 다른 글

6-1. PCA  (0) 2021.12.29
5-8. House Price  (0) 2021.12.27
5-6. 회귀트리  (0) 2021.12.27
5-5. Logistics Regression  (0) 2021.12.27
5-4. Regularized Linear Models – Ridge, Lasso  (0) 2021.12.27
728x90
반응형
from sklearn.datasets import load_boston
from sklearn.model_selection import cross_val_score
from sklearn.ensemble import RandomForestRegressor
import pandas as pd
import numpy as np

# 보스턴 데이터 세트 로드
boston = load_boston()
bostonDF = pd.DataFrame(boston.data, columns = boston.feature_names)

bostonDF['PRICE'] = boston.target
y_target = bostonDF['PRICE']
X_data = bostonDF.drop(['PRICE'], axis=1,inplace=False)

rf = RandomForestRegressor(random_state=0, n_estimators=1000)
neg_mse_scores = cross_val_score(rf, X_data, y_target, scoring="neg_mean_squared_error", cv = 5)
rmse_scores  = np.sqrt(-1 * neg_mse_scores)
avg_rmse = np.mean(rmse_scores)

print(' 5 교차 검증의 개별 Negative MSE scores: ', np.round(neg_mse_scores, 2))
print(' 5 교차 검증의 개별 RMSE scores : ', np.round(rmse_scores, 2))
print(' 5 교차 검증의 평균 RMSE : {0:.3f} '.format(avg_rmse))




5 교차 검증의 개별 Negative MSE scores:  [ -7.88 -13.14 -20.57 -46.23 -18.88]
 5 교차 검증의 개별 RMSE scores :  [2.81 3.63 4.54 6.8  4.34]
 5 교차 검증의 평균 RMSE : 4.423

 

def get_model_cv_prediction(model, X_data, y_target):
    neg_mse_scores = cross_val_score(model, X_data, y_target, scoring="neg_mean_squared_error", cv = 5)
    rmse_scores  = np.sqrt(-1 * neg_mse_scores)
    avg_rmse = np.mean(rmse_scores)
    print('##### ',model.__class__.__name__ , ' #####')
    print(' 5 교차 검증의 평균 RMSE : {0:.3f} '.format(avg_rmse))

 

사이킷런의 여러 회귀 트리 클래스를 이용하여 회귀 예측

from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import GradientBoostingRegressor
from xgboost import XGBRegressor
from lightgbm import LGBMRegressor

dt_reg = DecisionTreeRegressor(random_state=0, max_depth=4)
rf_reg = RandomForestRegressor(random_state=0, n_estimators=1000)
gb_reg = GradientBoostingRegressor(random_state=0, n_estimators=1000)
xgb_reg = XGBRegressor(n_estimators=1000)
lgb_reg = LGBMRegressor(n_estimators=1000)

# 트리 기반의 회귀 모델을 반복하면서 평가 수행 
models = [dt_reg, rf_reg, gb_reg, xgb_reg, lgb_reg]
for model in models:  
    get_model_cv_prediction(model, X_data, y_target)
    
    
    
    
    
#####  DecisionTreeRegressor  #####
 5 교차 검증의 평균 RMSE : 5.978 
#####  RandomForestRegressor  #####
 5 교차 검증의 평균 RMSE : 4.423 
#####  GradientBoostingRegressor  #####
 5 교차 검증의 평균 RMSE : 4.269 
#####  XGBRegressor  #####
 5 교차 검증의 평균 RMSE : 4.251 
#####  LGBMRegressor  #####
 5 교차 검증의 평균 RMSE : 4.646

 

회귀 트리는 선형 회귀의 회귀 계수 대신, 피처 중요도로 피처의 상대적 중요도를 알 수 있습니다

import seaborn as sns
%matplotlib inline

rf_reg = RandomForestRegressor(n_estimators=1000)

# 앞 예제에서 만들어진 X_data, y_target 데이터 셋을 적용하여 학습합니다.   
rf_reg.fit(X_data, y_target)

feature_series = pd.Series(data=rf_reg.feature_importances_, index=X_data.columns )
feature_series = feature_series.sort_values(ascending=False)
sns.barplot(x= feature_series, y=feature_series.index)

 

오버피팅을 시각화 하기 위해 한개의 피처 RM과 타겟값 PRICE기반으로 회귀 예측 수행

import matplotlib.pyplot as plt
%matplotlib inline

bostonDF_sample = bostonDF[['RM','PRICE']]
bostonDF_sample = bostonDF_sample.sample(n=100,random_state=0)
print(bostonDF_sample.shape)
plt.figure()
plt.scatter(bostonDF_sample.RM , bostonDF_sample.PRICE,c="darkorange")

# (100, 2)

 

import numpy as np
from sklearn.linear_model import LinearRegression

# 선형 회귀와 결정 트리 기반의 Regressor 생성. DecisionTreeRegressor의 max_depth는 각각 2, 7
lr_reg = LinearRegression()
rf_reg2 = DecisionTreeRegressor(max_depth=2)
rf_reg7 = DecisionTreeRegressor(max_depth=7)

# 실제 예측을 적용할 테스트용 데이터 셋을 4.5 ~ 8.5 까지 100개 데이터 셋 생성. 
X_test = np.arange(4.5, 8.5, 0.04).reshape(-1, 1)

# 보스턴 주택가격 데이터에서 시각화를 위해 피처는 RM만, 그리고 결정 데이터인 PRICE 추출
X_feature = bostonDF_sample['RM'].values.reshape(-1,1)
y_target = bostonDF_sample['PRICE'].values.reshape(-1,1)

# 학습과 예측 수행. 
lr_reg.fit(X_feature, y_target)
rf_reg2.fit(X_feature, y_target)
rf_reg7.fit(X_feature, y_target)

pred_lr = lr_reg.predict(X_test)
pred_rf2 = rf_reg2.predict(X_test)
pred_rf7 = rf_reg7.predict(X_test)

 

fig , (ax1, ax2, ax3) = plt.subplots(figsize=(14,4), ncols=3)

# X축값을 4.5 ~ 8.5로 변환하며 입력했을 때, 선형 회귀와 결정 트리 회귀 예측 선 시각화
# 선형 회귀로 학습된 모델 회귀 예측선 
ax1.set_title('Linear Regression')
ax1.scatter(bostonDF_sample.RM, bostonDF_sample.PRICE, c="darkorange")
ax1.plot(X_test, pred_lr,label="linear", linewidth=2 )

# DecisionTreeRegressor의 max_depth를 2로 했을 때 회귀 예측선 
ax2.set_title('Decision Tree Regression: \n max_depth=2')
ax2.scatter(bostonDF_sample.RM, bostonDF_sample.PRICE, c="darkorange")
ax2.plot(X_test, pred_rf2, label="max_depth:3", linewidth=2 )

# DecisionTreeRegressor의 max_depth를 7로 했을 때 회귀 예측선 
ax3.set_title('Decision Tree Regression: \n max_depth=7')
ax3.scatter(bostonDF_sample.RM, bostonDF_sample.PRICE, c="darkorange")
ax3.plot(X_test, pred_rf7, label="max_depth:7", linewidth=2)

반응형

'Data_Science > ML_Perfect_Guide' 카테고리의 다른 글

5-8. House Price  (0) 2021.12.27
5-7. Bike Sharing Demand  (0) 2021.12.27
5-5. Logistics Regression  (0) 2021.12.27
5-4. Regularized Linear Models – Ridge, Lasso  (0) 2021.12.27
5-3. Polynomial Regression, overfitting  (0) 2021.12.27
728x90
반응형

로지스틱 회귀

import pandas as pd
import matplotlib.pyplot as plt
%matplotlib inline

from sklearn.datasets import load_breast_cancer
from sklearn.linear_model import LogisticRegression

cancer = load_breast_cancer()

 

from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split

# StandardScaler( )로 평균이 0, 분산 1로 데이터 분포도 변환
scaler = StandardScaler()
data_scaled = scaler.fit_transform(cancer.data)

X_train , X_test, y_train , y_test = train_test_split(data_scaled, cancer.target, test_size=0.3, random_state=0)

 

from sklearn.metrics import accuracy_score, roc_auc_score

# 로지스틱 회귀를 이용하여 학습 및 예측 수행. 
lr_clf = LogisticRegression()
lr_clf.fit(X_train, y_train)
lr_preds = lr_clf.predict(X_test)

# accuracy와 roc_auc 측정
print('accuracy: {:0.3f}'.format(accuracy_score(y_test, lr_preds)))
print('roc_auc: {:0.3f}'.format(roc_auc_score(y_test , lr_preds)))

# accuracy: 0.977
# roc_auc: 0.972

 

from sklearn.model_selection import GridSearchCV

params={'penalty':['l2', 'l1'],
        'C':[0.01, 0.1, 1, 1, 5, 10]}

grid_clf = GridSearchCV(lr_clf, param_grid=params, scoring='accuracy', cv=3 )
grid_clf.fit(data_scaled, cancer.target)
print('최적 하이퍼 파라미터:{0}, 최적 평균 정확도:{1:.3f}'.format(grid_clf.best_params_, grid_clf.best_score_))

# 최적 하이퍼 파라미터:{'C': 1, 'penalty': 'l2'}, 최적 평균 정확도:0.975
반응형
728x90
반응형

Regularized Linear Model - Ridge Regression

# 앞의 LinearRegression예제에서 분할한 feature 데이터 셋인 X_data과 Target 데이터 셋인 Y_target 데이터셋을 그대로 이용 
from sklearn.linear_model import Ridge
from sklearn.model_selection import cross_val_score

# boston 데이타셋 로드
boston = load_boston()

# boston 데이타셋 DataFrame 변환 
bostonDF = pd.DataFrame(boston.data , columns = boston.feature_names)

# boston dataset의 target array는 주택 가격임. 이를 PRICE 컬럼으로 DataFrame에 추가함. 
bostonDF['PRICE'] = boston.target
print('Boston 데이타셋 크기 :',bostonDF.shape)

y_target = bostonDF['PRICE']
X_data = bostonDF.drop(['PRICE'],axis=1,inplace=False)


ridge = Ridge(alpha = 10)
neg_mse_scores = cross_val_score(ridge, X_data, y_target, scoring="neg_mean_squared_error", cv = 5)
rmse_scores  = np.sqrt(-1 * neg_mse_scores)
avg_rmse = np.mean(rmse_scores)
print(' 5 folds 의 개별 Negative MSE scores: ', np.round(neg_mse_scores, 3))
print(' 5 folds 의 개별 RMSE scores : ', np.round(rmse_scores,3))
print(' 5 folds 의 평균 RMSE : {0:.3f} '.format(avg_rmse))



Boston 데이타셋 크기 : (506, 14)
 5 folds 의 개별 Negative MSE scores:  [-11.422 -24.294 -28.144 -74.599 -28.517]
 5 folds 의 개별 RMSE scores :  [3.38  4.929 5.305 8.637 5.34 ]
 5 folds 의 평균 RMSE : 5.518

 

alpha값을 0 , 0.1 , 1 , 10 , 100 으로 변경하면서 RMSE 측정

# Ridge에 사용될 alpha 파라미터의 값들을 정의
alphas = [0 , 0.1 , 1 , 10 , 100]

# alphas list 값을 iteration하면서 alpha에 따른 평균 rmse 구함.
for alpha in alphas :
    ridge = Ridge(alpha = alpha)
    
    #cross_val_score를 이용하여 5 fold의 평균 RMSE 계산
    neg_mse_scores = cross_val_score(ridge, X_data, y_target, scoring="neg_mean_squared_error", cv = 5)
    avg_rmse = np.mean(np.sqrt(-1 * neg_mse_scores))
    print('alpha {0} 일 때 5 folds 의 평균 RMSE : {1:.3f} '.format(alpha,avg_rmse))
    
    
    
alpha 0 일 때 5 folds 의 평균 RMSE : 5.829 
alpha 0.1 일 때 5 folds 의 평균 RMSE : 5.788 
alpha 1 일 때 5 folds 의 평균 RMSE : 5.653 
alpha 10 일 때 5 folds 의 평균 RMSE : 5.518 
alpha 100 일 때 5 folds 의 평균 RMSE : 5.330

 

 각 alpha에 따른 회귀 계수 값을 시각화. 각 alpha값 별로 plt.subplots로 맷플롯립 축 생성

# 각 alpha에 따른 회귀 계수 값을 시각화하기 위해 5개의 열로 된 맷플롯립 축 생성  
fig , axs = plt.subplots(figsize=(18,6) , nrows=1 , ncols=5)
# 각 alpha에 따른 회귀 계수 값을 데이터로 저장하기 위한 DataFrame 생성  
coeff_df = pd.DataFrame()

# alphas 리스트 값을 차례로 입력해 회귀 계수 값 시각화 및 데이터 저장. pos는 axis의 위치 지정
for pos , alpha in enumerate(alphas) :
    ridge = Ridge(alpha = alpha)
    ridge.fit(X_data , y_target)
    # alpha에 따른 피처별 회귀 계수를 Series로 변환하고 이를 DataFrame의 컬럼으로 추가.  
    coeff = pd.Series(data=ridge.coef_ , index=X_data.columns )
    colname='alpha:'+str(alpha)
    coeff_df[colname] = coeff
    # 막대 그래프로 각 alpha 값에서의 회귀 계수를 시각화. 회귀 계수값이 높은 순으로 표현
    coeff = coeff.sort_values(ascending=False)
    axs[pos].set_title(colname)
    axs[pos].set_xlim(-3,6)
    sns.barplot(x=coeff.values , y=coeff.index, ax=axs[pos])

# for 문 바깥에서 맷플롯립의 show 호출 및 alpha에 따른 피처별 회귀 계수를 DataFrame으로 표시
plt.show()

 

alpha 값에 따른 컬럼별 회귀계수 출력

ridge_alphas = [0 , 0.1 , 1 , 10 , 100]
sort_column = 'alpha:'+str(ridge_alphas[0])
coeff_df.sort_values(by=sort_column, ascending=False)


	alpha:0	alpha:0.1	alpha:1	alpha:10	alpha:100
RM	3.809865	3.818233	3.854000	3.702272	2.334536
CHAS	2.686734	2.670019	2.552393	1.952021	0.638335
RAD	0.306049	0.303515	0.290142	0.279596	0.315358
ZN	0.046420	0.046572	0.047443	0.049579	0.054496
INDUS	0.020559	0.015999	-0.008805	-0.042962	-0.052826
B	0.009312	0.009368	0.009673	0.010037	0.009393
AGE	0.000692	-0.000269	-0.005415	-0.010707	0.001212
TAX	-0.012335	-0.012421	-0.012912	-0.013993	-0.015856
CRIM	-0.108011	-0.107474	-0.104595	-0.101435	-0.102202
LSTAT	-0.524758	-0.525966	-0.533343	-0.559366	-0.660764
PTRATIO	-0.952747	-0.940759	-0.876074	-0.797945	-0.829218
DIS	-1.475567	-1.459626	-1.372654	-1.248808	-1.153390
NOX	-17.766611	-16.684645	-10.777015	-2.371619	-0.262847

 

라쏘 회귀

from sklearn.linear_model import Lasso, ElasticNet

# alpha값에 따른 회귀 모델의 폴드 평균 RMSE를 출력하고 회귀 계수값들을 DataFrame으로 반환 
def get_linear_reg_eval(model_name, params=None, X_data_n=None, y_target_n=None, verbose=True):
    coeff_df = pd.DataFrame()
    if verbose : print('####### ', model_name , '#######')
    for param in params:
        if model_name =='Ridge': model = Ridge(alpha=param)
        elif model_name =='Lasso': model = Lasso(alpha=param)
        elif model_name =='ElasticNet': model = ElasticNet(alpha=param, l1_ratio=0.7)
        neg_mse_scores = cross_val_score(model, X_data_n, 
                                             y_target_n, scoring="neg_mean_squared_error", cv = 5)
        avg_rmse = np.mean(np.sqrt(-1 * neg_mse_scores))
        print('alpha {0}일 때 5 폴드 세트의 평균 RMSE: {1:.3f} '.format(param, avg_rmse))
        # cross_val_score는 evaluation metric만 반환하므로 모델을 다시 학습하여 회귀 계수 추출
        
        model.fit(X_data , y_target)
        # alpha에 따른 피처별 회귀 계수를 Series로 변환하고 이를 DataFrame의 컬럼으로 추가. 
        coeff = pd.Series(data=model.coef_ , index=X_data.columns )
        colname='alpha:'+str(param)
        coeff_df[colname] = coeff
    return coeff_df
# end of get_linear_regre_eval

 

# 라쏘에 사용될 alpha 파라미터의 값들을 정의하고 get_linear_reg_eval() 함수 호출
lasso_alphas = [ 0.07, 0.1, 0.5, 1, 3]
coeff_lasso_df =get_linear_reg_eval('Lasso', params=lasso_alphas, X_data_n=X_data, y_target_n=y_target)


#######  Lasso #######
alpha 0.07일 때 5 폴드 세트의 평균 RMSE: 5.612 
alpha 0.1일 때 5 폴드 세트의 평균 RMSE: 5.615 
alpha 0.5일 때 5 폴드 세트의 평균 RMSE: 5.669 
alpha 1일 때 5 폴드 세트의 평균 RMSE: 5.776 
alpha 3일 때 5 폴드 세트의 평균 RMSE: 6.189

 

# 반환된 coeff_lasso_df를 첫번째 컬럼순으로 내림차순 정렬하여 회귀계수 DataFrame출력
sort_column = 'alpha:'+str(lasso_alphas[0])
coeff_lasso_df.sort_values(by=sort_column, ascending=False)


	alpha:0.07	alpha:0.1	alpha:0.5	alpha:1	alpha:3
RM	3.789725	3.703202	2.498212	0.949811	0.000000
CHAS	1.434343	0.955190	0.000000	0.000000	0.000000
RAD	0.270936	0.274707	0.277451	0.264206	0.061864
ZN	0.049059	0.049211	0.049544	0.049165	0.037231
B	0.010248	0.010249	0.009469	0.008247	0.006510
NOX	-0.000000	-0.000000	-0.000000	-0.000000	0.000000
AGE	-0.011706	-0.010037	0.003604	0.020910	0.042495
TAX	-0.014290	-0.014570	-0.015442	-0.015212	-0.008602
INDUS	-0.042120	-0.036619	-0.005253	-0.000000	-0.000000
CRIM	-0.098193	-0.097894	-0.083289	-0.063437	-0.000000
LSTAT	-0.560431	-0.568769	-0.656290	-0.761115	-0.807679
PTRATIO	-0.765107	-0.770654	-0.758752	-0.722966	-0.265072
DIS	-1.176583	-1.160538	-0.936605	-0.668790	-0.000000

 

엘라스틱넷 회귀

# 엘라스틱넷에 사용될 alpha 파라미터의 값들을 정의하고 get_linear_reg_eval() 함수 호출
# l1_ratio는 0.7로 고정
elastic_alphas = [ 0.07, 0.1, 0.5, 1, 3]
coeff_elastic_df =get_linear_reg_eval('ElasticNet', params=elastic_alphas,
                                      X_data_n=X_data, y_target_n=y_target)
                                      


#######  ElasticNet #######
alpha 0.07일 때 5 폴드 세트의 평균 RMSE: 5.542 
alpha 0.1일 때 5 폴드 세트의 평균 RMSE: 5.526 
alpha 0.5일 때 5 폴드 세트의 평균 RMSE: 5.467 
alpha 1일 때 5 폴드 세트의 평균 RMSE: 5.597 
alpha 3일 때 5 폴드 세트의 평균 RMSE: 6.068

 

# 반환된 coeff_elastic_df를 첫번째 컬럼순으로 내림차순 정렬하여 회귀계수 DataFrame출력
sort_column = 'alpha:'+str(elastic_alphas[0])
coeff_elastic_df.sort_values(by=sort_column, ascending=False)



alpha:0.07	alpha:0.1	alpha:0.5	alpha:1	alpha:3
RM	3.574162	3.414154	1.918419	0.938789	0.000000
CHAS	1.330724	0.979706	0.000000	0.000000	0.000000
RAD	0.278880	0.283443	0.300761	0.289299	0.146846
ZN	0.050107	0.050617	0.052878	0.052136	0.038268
B	0.010122	0.010067	0.009114	0.008320	0.007020
AGE	-0.010116	-0.008276	0.007760	0.020348	0.043446
TAX	-0.014522	-0.014814	-0.016046	-0.016218	-0.011417
INDUS	-0.044855	-0.042719	-0.023252	-0.000000	-0.000000
CRIM	-0.099468	-0.099213	-0.089070	-0.073577	-0.019058
NOX	-0.175072	-0.000000	-0.000000	-0.000000	-0.000000
LSTAT	-0.574822	-0.587702	-0.693861	-0.760457	-0.800368
PTRATIO	-0.779498	-0.784725	-0.790969	-0.738672	-0.423065
DIS	-1.189438	-1.173647	-0.975902	-0.725174	-0.031208

 

선형 회귀 모델을 위한 데이터 변환

print(y_target.shape)
plt.hist(y_target, bins=10)

# (506,)

(array([ 21.,  55.,  82., 154.,  84.,  41.,  30.,   8.,  10.,  21.]),
 array([ 5. ,  9.5, 14. , 18.5, 23. , 27.5, 32. , 36.5, 41. , 45.5, 50. ]),
 <BarContainer object of 10 artists>)

 

from sklearn.preprocessing import StandardScaler, MinMaxScaler, PolynomialFeatures

# method는 표준 정규 분포 변환(Standard), 최대값/최소값 정규화(MinMax), 로그변환(Log) 결정
# p_degree는 다향식 특성을 추가할 때 적용. p_degree는 2이상 부여하지 않음. 
def get_scaled_data(method='None', p_degree=None, input_data=None):
    if method == 'Standard':
        scaled_data = StandardScaler().fit_transform(input_data)
    elif method == 'MinMax':
        scaled_data = MinMaxScaler().fit_transform(input_data)
    elif method == 'Log':
        scaled_data = np.log1p(input_data)
    else:
        scaled_data = input_data

    if p_degree != None:
        scaled_data = PolynomialFeatures(degree=p_degree, 
                                         include_bias=False).fit_transform(scaled_data)
    
    return scaled_data

 

# Ridge의 alpha값을 다르게 적용하고 다양한 데이터 변환방법에 따른 RMSE 추출. 
alphas = [0.1, 1, 10, 100]
#변환 방법은 모두 6개, 원본 그대로, 표준정규분포, 표준정규분포+다항식 특성
# 최대/최소 정규화, 최대/최소 정규화+다항식 특성, 로그변환 
scale_methods=[(None, None), ('Standard', None), ('Standard', 2), 
               ('MinMax', None), ('MinMax', 2), ('Log', None)]
for scale_method in scale_methods:
    X_data_scaled = get_scaled_data(method=scale_method[0], p_degree=scale_method[1], 
                                    input_data=X_data)
    print('\n## 변환 유형:{0}, Polynomial Degree:{1}'.format(scale_method[0], scale_method[1]))
    get_linear_reg_eval('Ridge', params=alphas, X_data_n=X_data_scaled, 
                        y_target_n=y_target, verbose=False)
                        
                        
                        
                        
## 변환 유형:None, Polynomial Degree:None
alpha 0.1일 때 5 폴드 세트의 평균 RMSE: 5.788 
alpha 1일 때 5 폴드 세트의 평균 RMSE: 5.653 
alpha 10일 때 5 폴드 세트의 평균 RMSE: 5.518 
alpha 100일 때 5 폴드 세트의 평균 RMSE: 5.330 

## 변환 유형:Standard, Polynomial Degree:None
alpha 0.1일 때 5 폴드 세트의 평균 RMSE: 5.826 
alpha 1일 때 5 폴드 세트의 평균 RMSE: 5.803 
alpha 10일 때 5 폴드 세트의 평균 RMSE: 5.637 
alpha 100일 때 5 폴드 세트의 평균 RMSE: 5.421 

## 변환 유형:Standard, Polynomial Degree:2
alpha 0.1일 때 5 폴드 세트의 평균 RMSE: 8.827 
alpha 1일 때 5 폴드 세트의 평균 RMSE: 6.871 
alpha 10일 때 5 폴드 세트의 평균 RMSE: 5.485 
alpha 100일 때 5 폴드 세트의 평균 RMSE: 4.634 

## 변환 유형:MinMax, Polynomial Degree:None
alpha 0.1일 때 5 폴드 세트의 평균 RMSE: 5.764 
alpha 1일 때 5 폴드 세트의 평균 RMSE: 5.465 
alpha 10일 때 5 폴드 세트의 평균 RMSE: 5.754 
alpha 100일 때 5 폴드 세트의 평균 RMSE: 7.635 

## 변환 유형:MinMax, Polynomial Degree:2
alpha 0.1일 때 5 폴드 세트의 평균 RMSE: 5.298 
alpha 1일 때 5 폴드 세트의 평균 RMSE: 4.323 
alpha 10일 때 5 폴드 세트의 평균 RMSE: 5.185 
alpha 100일 때 5 폴드 세트의 평균 RMSE: 6.538 

## 변환 유형:Log, Polynomial Degree:None
alpha 0.1일 때 5 폴드 세트의 평균 RMSE: 4.770 
alpha 1일 때 5 폴드 세트의 평균 RMSE: 4.676 
alpha 10일 때 5 폴드 세트의 평균 RMSE: 4.836 
alpha 100일 때 5 폴드 세트의 평균 RMSE: 6.241

 

X = np.arange(6).reshape(3, 2)
poly = PolynomialFeatures(3)
poly.fit_transform(X)


# array([[  1.,   0.,   1.,   0.,   0.,   1.,   0.,   0.,   0.,   1.],
#        [  1.,   2.,   3.,   4.,   6.,   9.,   8.,  12.,  18.,  27.],
#        [  1.,   4.,   5.,  16.,  20.,  25.,  64.,  80., 100., 125.]])

 

반응형

'Data_Science > ML_Perfect_Guide' 카테고리의 다른 글

5-6. 회귀트리  (0) 2021.12.27
5-5. Logistics Regression  (0) 2021.12.27
5-3. Polynomial Regression, overfitting  (0) 2021.12.27
5-2. 보스턴 주택 가격 예측 || LinearRegression  (0) 2021.12.27
5-1. Gradient Descent  (0) 2021.12.27
728x90
반응형

Polynomial Regression 이해

PolynomialFeatures 클래스로 다항식 변환 사이킷런은 다항피처를 지원하지 않아서 단항으로 [단항, 단항...]**n 구성후 liear로 학습

from sklearn.preprocessing import PolynomialFeatures
import numpy as np

# 다항식으로 변환한 단항식 생성, [[0,1],[2,3]]의 2X2 행렬 생성
X = np.arange(4).reshape(2,2)
print('일차 단항식 계수 feature:\n',X )

# degree = 2 인 2차 다항식으로 변환하기 위해 PolynomialFeatures를 이용하여 변환
poly = PolynomialFeatures(degree=2)
poly.fit(X)
poly_ftr = poly.transform(X)
print('변환된 2차 다항식 계수 feature:\n', poly_ftr)


일차 단항식 계수 feature:
 [[0 1]
 [2 3]]
변환된 2차 다항식 계수 feature:
 [[1. 0. 1. 0. 0. 1.]
 [1. 2. 3. 4. 6. 9.]]

 

3차 다항식 결정값을 구하는 함수 polynomial_func(X) 생성. 즉 회귀식은 결정값 y = 1+ 2x_1 + 3x_1^2 + 4x_2^3

def polynomial_func(X):
    y = 1 + 2*X[:,0] + 3*X[:,0]**2 + 4*X[:,1]**3 
    return y

X = np.arange(0,4).reshape(2,2)

print('일차 단항식 계수 feature: \n' ,X)
y = polynomial_func(X)
print('삼차 다항식 결정값: \n', y)

# 3 차 다항식 변환 
poly_ftr = PolynomialFeatures(degree=3).fit_transform(X)
print('3차 다항식 계수 feature: \n',poly_ftr)

# Linear Regression에 3차 다항식 계수 feature와 3차 다항식 결정값으로 학습 후 회귀 계수 확인
model = LinearRegression()
model.fit(poly_ftr,y)
print('Polynomial 회귀 계수\n' , np.round(model.coef_, 2))
print('Polynomial 회귀 Shape :', model.coef_.shape)


일차 단항식 계수 feature: 
 [[0 1]
 [2 3]]
삼차 다항식 결정값: 
 [  5 125]
3차 다항식 계수 feature: 
 [[ 1.  0.  1.  0.  0.  1.  0.  0.  0.  1.]
 [ 1.  2.  3.  4.  6.  9.  8. 12. 18. 27.]]
Polynomial 회귀 계수
 [0.   0.18 0.18 0.36 0.54 0.72 0.72 1.08 1.62 2.34]
Polynomial 회귀 Shape : (10,)

 

3차 다항식 계수의 피처값과 3차 다항식 결정값으로 학습

** 사이킷런 파이프라인(Pipeline)을 이용하여 3차 다항회귀 학습 **

사이킷런의 Pipeline 객체는 Feature 엔지니어링 변환과 모델 학습/예측을 순차적으로 결합해줍니다.

from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.pipeline import Pipeline
import numpy as np

def polynomial_func(X):
    y = 1 + 2*X[:,0] + 3*X[:,0]**2 + 4*X[:,1]**3 
    return y

# Pipeline 객체로 Streamline 하게 Polynomial Feature변환과 Linear Regression을 연결
model = Pipeline([('poly', PolynomialFeatures(degree=3)),
                  ('linear', LinearRegression())])
X = np.arange(4).reshape(2,2)
y = polynomial_func(X)

model = model.fit(X, y)
print('Polynomial 회귀 계수\n', np.round(model.named_steps['linear'].coef_, 2))


# Polynomial 회귀 계수
#  [0.   0.18 0.18 0.36 0.54 0.72 0.72 1.08 1.62 2.34]

 

다항 회귀를 이용한 보스턴 주택가격 예측

from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error , r2_score
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.pipeline import Pipeline
import numpy as np

# boston 데이타셋 로드
boston = load_boston()

# boston 데이타셋 DataFrame 변환 
bostonDF = pd.DataFrame(boston.data , columns = boston.feature_names)

# boston dataset의 target array는 주택 가격임. 이를 PRICE 컬럼으로 DataFrame에 추가함. 
bostonDF['PRICE'] = boston.target
print('Boston 데이타셋 크기 :',bostonDF.shape)

y_target = bostonDF['PRICE']
X_data = bostonDF.drop(['PRICE'],axis=1,inplace=False)


X_train , X_test , y_train , y_test = train_test_split(X_data , y_target ,test_size=0.3, random_state=156)

## Pipeline을 이용하여 PolynomialFeatures 변환과 LinearRegression 적용을 순차적으로 결합. 
p_model = Pipeline([('poly', PolynomialFeatures(degree=3, include_bias=False)),
                  ('linear', LinearRegression())])

# 다항회귀는 오버피팅일어나기가 쉬움 degree 조절이 중요degree 조절이 중요 학습데이터에 충실하다보면 괴기한 모습

p_model.fit(X_train, y_train)
y_preds = p_model.predict(X_test)
mse = mean_squared_error(y_test, y_preds)
rmse = np.sqrt(mse)


print('MSE : {0:.3f} , RMSE : {1:.3F}'.format(mse , rmse))
print('Variance score : {0:.3f}'.format(r2_score(y_test, y_preds)))



# Boston 데이타셋 크기 : (506, 14)
# MSE : 79625.594 , RMSE : 282.180
# Variance score : -1116.598

 

X_train_poly= PolynomialFeatures(degree=2, include_bias=False).fit_transform(X_train, y_train)
X_train_poly.shape, X_train.shape

# ((354, 104), (354, 13))

 

Polynomial Regression 을 이용한 Underfitting, Overfitting 이해

cosine 곡선에 약간의 Noise 변동값을 더하여 실제값 곡선을 만듬 **

import numpy as np
import matplotlib.pyplot as plt
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import cross_val_score
%matplotlib inline

# random 값으로 구성된 X값에 대해 Cosine 변환값을 반환. 
def true_fun(X):
    return np.cos(1.5 * np.pi * X)

# X는 0 부터 1까지 30개의 random 값을 순서대로 sampling 한 데이타 입니다.  
np.random.seed(0)
n_samples = 30
X = np.sort(np.random.rand(n_samples))

# y 값은 cosine 기반의 true_fun() 에서 약간의 Noise 변동값을 더한 값입니다. 
y = true_fun(X) + np.random.randn(n_samples) * 0.1

 

plt.scatter(X, y)

 

plt.figure(figsize=(14, 5))
degrees = [1, 4, 15]

# 다항 회귀의 차수(degree)를 1, 4, 15로 각각 변화시키면서 비교합니다. 
for i in range(len(degrees)):
    ax = plt.subplot(1, len(degrees), i + 1)
    plt.setp(ax, xticks=(), yticks=())
    
    # 개별 degree별로 Polynomial 변환합니다. 
    polynomial_features = PolynomialFeatures(degree=degrees[i], include_bias=False)
    linear_regression = LinearRegression()
    pipeline = Pipeline([("polynomial_features", polynomial_features),
                         ("linear_regression", linear_regression)])
    pipeline.fit(X.reshape(-1, 1), y)
    
    # 교차 검증으로 다항 회귀를 평가합니다. 
    scores = cross_val_score(pipeline, X.reshape(-1,1), y,scoring="neg_mean_squared_error", cv=10)
    coefficients = pipeline.named_steps['linear_regression'].coef_
    print('\nDegree {0} 회귀 계수는 {1} 입니다.'.format(degrees[i], np.round(coefficients),2))
    print('Degree {0} MSE 는 {1:.2f} 입니다.'.format(degrees[i] , -1*np.mean(scores)))
    
    # 0 부터 1까지 테스트 데이터 세트를 100개로 나눠 예측을 수행합니다. 
    # 테스트 데이터 세트에 회귀 예측을 수행하고 예측 곡선과 실제 곡선을 그려서 비교합니다.  
    X_test = np.linspace(0, 1, 100)
    # 예측값 곡선
    plt.plot(X_test, pipeline.predict(X_test[:, np.newaxis]), label="Model") 
    # 실제 값 곡선
    plt.plot(X_test, true_fun(X_test), '--', label="True function")
    plt.scatter(X, y, edgecolor='b', s=20, label="Samples")
    
    plt.xlabel("x"); plt.ylabel("y"); plt.xlim((0, 1)); plt.ylim((-2, 2)); plt.legend(loc="best")
    plt.title("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(degrees[i], -scores.mean(), scores.std()))

plt.show()


Degree 1 회귀 계수는 [-2.] 입니다.
Degree 1 MSE 는 0.41 입니다.

Degree 4 회귀 계수는 [  0. -18.  24.  -7.] 입니다.
Degree 4 MSE 는 0.04 입니다.

Degree 15 회귀 계수는 [-2.98300000e+03  1.03900000e+05 -1.87417000e+06  2.03717200e+07
 -1.44874017e+08  7.09319141e+08 -2.47067172e+09  6.24564702e+09
 -1.15677216e+10  1.56895933e+10 -1.54007040e+10  1.06457993e+10
 -4.91381016e+09  1.35920642e+09 -1.70382078e+08] 입니다.
Degree 15 MSE 는 182581084.83 입니다.

 

편향분산 트레이드 오프 과소적합(편향높고, 분산낮고 : 평균으로 가려함) > 적합wellfit > 과대적합(분산높고, 편향낮음 : 실측 하나하나에 가까워지려함) bias편향성 X variance분산 : 변동성

=> 그래서 규제를 통해 well fit을 찾아 내려함

 

잔차 오류 최소화 : 구체화만 하면 회귀계수가 커짐, 회귀계수가 커지면 오버핏이 기괴해짐 alpha의 역할 => 0이면 infinite이면 규제 : alpha로 페널티 부여해 회귀계수값을 감소시키는 방식 L1 w절대값에 // L2 w제곱에

 

반응형
728x90
반응형
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
from scipy import stats
from sklearn.datasets import load_boston
%matplotlib inline

# boston 데이타셋 로드
boston = load_boston()

# boston 데이타셋 DataFrame 변환 
bostonDF = pd.DataFrame(boston.data , columns = boston.feature_names)

# boston dataset의 target array는 주택 가격임. 이를 PRICE 컬럼으로 DataFrame에 추가함. 
bostonDF['PRICE'] = boston.target
print('Boston 데이타셋 크기 :',bostonDF.shape)
bostonDF.head()

# Boston 데이타셋 크기 : (506, 14)
CRIM	ZN	INDUS	CHAS	NOX	RM	AGE	DIS	RAD	TAX	PTRATIO	B	LSTAT	PRICE
0	0.00632	18.0	2.31	0.0	0.538	6.575	65.2	4.0900	1.0	296.0	15.3	396.90	4.98	24.0
1	0.02731	0.0	7.07	0.0	0.469	6.421	78.9	4.9671	2.0	242.0	17.8	396.90	9.14	21.6
2	0.02729	0.0	7.07	0.0	0.469	7.185	61.1	4.9671	2.0	242.0	17.8	392.83	4.03	34.7
3	0.03237	0.0	2.18	0.0	0.458	6.998	45.8	6.0622	3.0	222.0	18.7	394.63	2.94	33.4
4	0.06905	0.0	2.18	0.0	0.458	7.147	54.2	6.0622	3.0	222.0	18.7	396.90	5.33	36.2
  • CRIM: 지역별 범죄 발생률
  • ZN: 25,000평방피트를 초과하는 거주 지역의 비율
  • NDUS: 비상업 지역 넓이 비율
  • CHAS: 찰스강에 대한 더미 변수(강의 경계에 위치한 경우는 1, 아니면 0)
  • NOX: 일산화질소 농도
  • RM: 거주할 수 있는 방 개수
  • AGE: 1940년 이전에 건축된 소유 주택의 비율
  • DIS: 5개 주요 고용센터까지의 가중 거리
  • RAD: 고속도로 접근 용이도
  • TAX: 10,000달러당 재산세율
  • PTRATIO: 지역의 교사와 학생 수 비율
  • B: 지역의 흑인 거주 비율
  • LSTAT: 하위 계층의 비율
  • MEDV: 본인 소유의 주택 가격(중앙값)

 

  • 각 컬럼별로 주택가격에 미치는 영향도를 조사
# 2개의 행과 4개의 열을 가진 subplots를 이용. axs는 4x2개의 ax를 가짐.
fig, axs = plt.subplots(figsize=(16,8) , ncols=4 , nrows=2)
lm_features = ['RM','ZN','INDUS','NOX','AGE','PTRATIO','LSTAT','RAD']
for i , feature in enumerate(lm_features):
    row = int(i/4)
    col = i%4
    # 시본의 regplot을 이용해 산점도와 선형 회귀 직선을 함께 표현
    sns.regplot(x=feature , y='PRICE',data=bostonDF , ax=axs[row][col])

 

학습과 테스트 데이터 세트로 분리하고 학습/예측/평가 수행

from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error , r2_score

y_target = bostonDF['PRICE']
X_data = bostonDF.drop(['PRICE'],axis=1,inplace=False)

X_train , X_test , y_train , y_test = train_test_split(X_data , y_target ,test_size=0.3, random_state=156)

# Linear Regression OLS로 학습/예측/평가 수행. 
lr = LinearRegression()
lr.fit(X_train ,y_train )
y_preds = lr.predict(X_test)
mse = mean_squared_error(y_test, y_preds)
rmse = np.sqrt(mse)

print('MSE : {0:.3f} , RMSE : {1:.3F}'.format(mse , rmse))
print('Variance score : {0:.3f}'.format(r2_score(y_test, y_preds)))

# MSE : 17.297 , RMSE : 4.159
# Variance score : 0.757

 

print('절편 값:',lr.intercept_)
print('회귀 계수값:', np.round(lr.coef_, 1))

# 절편 값: 40.995595172164315
# 회귀 계수값: [ -0.1   0.1   0.    3.  -19.8   3.4   0.   -1.7   0.4  -0.   -0.9   0.   -0.6]

 

# 회귀 계수를 큰 값 순으로 정렬하기 위해 Series로 생성. index가 컬럼명에 유의
coeff = pd.Series(data=np.round(lr.coef_, 1), index=X_data.columns )
coeff.sort_values(ascending=False)

RM          3.4
CHAS        3.0
RAD         0.4
ZN          0.1
B           0.0
TAX        -0.0
AGE         0.0
INDUS       0.0
CRIM       -0.1
LSTAT      -0.6
PTRATIO    -0.9
DIS        -1.7
NOX       -19.8
dtype: float64

 

from sklearn.model_selection import cross_val_score

y_target = bostonDF['PRICE']
X_data = bostonDF.drop(['PRICE'],axis=1,inplace=False)
lr = LinearRegression()

# cross_val_score( )로 5 Fold 셋으로 MSE 를 구한 뒤 이를 기반으로 다시  RMSE 구함. // 회귀는 오차 값이 작을수록 좋은 것
neg_mse_scores = cross_val_score(lr, X_data, y_target, scoring="neg_mean_squared_error", cv = 5)
rmse_scores  = np.sqrt(-1 * neg_mse_scores)
avg_rmse = np.mean(rmse_scores)

# cross_val_score(scoring="neg_mean_squared_error")로 반환된 값은 모두 음수 
print(' 5 folds 의 개별 Negative MSE scores: ', np.round(neg_mse_scores, 2))
print(' 5 folds 의 개별 RMSE scores : ', np.round(rmse_scores, 2))
print(' 5 folds 의 평균 RMSE : {0:.3f} '.format(avg_rmse))


#  5 folds 의 개별 Negative MSE scores:  [-12.46 -26.05 -33.07 -80.76 -33.31]
#  5 folds 의 개별 RMSE scores :  [3.53 5.1  5.75 8.99 5.77]
#  5 folds 의 평균 RMSE : 5.829

 

반응형
728x90
반응형

실제값을 Y=4X+6 시뮬레이션하는 데이터 값 생성

import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline

np.random.seed(0)
# y = 4X + 6 식을 근사(w1=4, w0=6). random 값은 Noise를 위해 만듬
X = 2 * np.random.rand(100,1)
y = 6 +4 * X+ np.random.randn(100,1)

# X, y 데이터 셋 scatter plot으로 시각화
plt.scatter(X, y)

 

X.shape, y.shape

# ((100, 1), (100, 1))

 

w0과 w1의 값을 최소화 할 수 있도록 업데이트 수행하는 함수 생성.**

  • 예측 배열 y_pred는 np.dot(X, w1.T) + w0 임 100개의 데이터 X(1,2,...,100)이 있다면 예측값은 w0 + X(1)w1 + X(2)w1 +..+ X(100)*w1이며, 이는 입력 배열 X와 w1 배열의 내적임.
  • 새로운 w1과 w0를 update함
# w1 과 w0 를 업데이트 할 w1_update, w0_update를 반환. 
def get_weight_updates(w1, w0, X, y, learning_rate=0.01):
    N = len(y)
    # 먼저 w1_update, w0_update를 각각 w1, w0의 shape와 동일한 크기를 가진 0 값으로 초기화
    w1_update = np.zeros_like(w1) 
    w0_update = np.zeros_like(w0)
    # 예측 배열 계산하고 예측과 실제 값의 차이 계산
    y_pred = np.dot(X, w1.T) + w0
    diff = y-y_pred
         
    # w0_update를 dot 행렬 연산으로 구하기 위해 모두 1값을 가진 행렬 생성 
    w0_factors = np.ones((N,1))

    # w1과 w0을 업데이트할 w1_update와 w0_update 계산
    w1_update = -(2/N)*learning_rate*(np.dot(X.T, diff))
    w0_update = -(2/N)*learning_rate*(np.dot(w0_factors.T, diff))    
    
    return w1_update, w0_update

 

# y_pred = np.dot(X, w1.T) + w0의 풀이 // W1 미분 과정과 .D0T()과 동일함
w0 = np.zeros((1,1))
w1 = np.zeros((1,1))
y_pred = np.dot(X, w1.T) + w0
diff = y-y_pred
print(diff.shape)
w0_factors = np.ones((100,1))
w1_update = -(2/100)*0.01*(np.dot(X.T, diff))
w0_update = -(2/100)*0.01*(np.dot(w0_factors.T, diff))   
print(w1_update.shape, w0_update.shape)
w1, w0

# (100, 1)
# (1, 1) (1, 1)

# (array([[0.]]), array([[0.]]))

 

반복적으로 경사 하강법을 이용하여 get_weigth_updates()를 호출하여 w1과 w0를 업데이트 하는 함수 생성

# 입력 인자 iters로 주어진 횟수만큼 반복적으로 w1과 w0를 업데이트 적용함. 
def gradient_descent_steps(X, y, iters=10000):
    # w0와 w1을 모두 0으로 초기화. 
    w0 = np.zeros((1,1))
    w1 = np.zeros((1,1))
    
    # 인자로 주어진 iters 만큼 반복적으로 get_weight_updates() 호출하여 w1, w0 업데이트 수행. 
    for ind in range(iters):
        w1_update, w0_update = get_weight_updates(w1, w0, X, y, learning_rate=0.01)
        w1 = w1 - w1_update
        w0 = w0 - w0_update
              
    return w1, w0

 

예측 오차 비용을 계산을 수행하는 함수 생성 및 경사 하강법 수행

def get_cost(y, y_pred):
    N = len(y) 
    cost = np.sum(np.square(y - y_pred))/N
    return cost

w1, w0 = gradient_descent_steps(X, y, iters=1000)
print("w1:{0:.3f} w0:{1:.3f}".format(w1[0,0], w0[0,0]))
y_pred = w1[0,0] * X + w0
print('Gradient Descent Total Cost:{0:.4f}'.format(get_cost(y, y_pred)))


# w1:4.022 w0:6.162
# Gradient Descent Total Cost:0.9935

 

plt.scatter(X, y)
plt.plot(X,y_pred)

 

미니 배치 확률적 경사 하강법을 이용한 최적 비용함수 도출 

FOR문으로 전체를 연산하여 시간이 오래걸리니 BATCH SIZE 만큼 샘플링인덱스

def stochastic_gradient_descent_steps(X, y, batch_size=10, iters=1000):
    w0 = np.zeros((1,1))
    w1 = np.zeros((1,1))
    prev_cost = 100000
    iter_index =0
    
    for ind in range(iters):
        np.random.seed(ind)
        # 전체 X, y 데이터에서 랜덤하게 batch_size만큼 데이터 추출하여 sample_X, sample_y로 저장
        stochastic_random_index = np.random.permutation(X.shape[0])
        sample_X = X[stochastic_random_index[0:batch_size]]
        sample_y = y[stochastic_random_index[0:batch_size]]
        # 랜덤하게 batch_size만큼 추출된 데이터 기반으로 w1_update, w0_update 계산 후 업데이트
        w1_update, w0_update = get_weight_updates(w1, w0, sample_X, sample_y, learning_rate=0.01)
        w1 = w1 - w1_update
        w0 = w0 - w0_update
    
    return w1, w0

 

np.random.permutation(X.shape[0])

array([66, 71, 54, 88, 82, 12, 36, 46, 14, 67, 10,  3, 62, 29, 97, 69, 70,
       93, 31, 73, 60, 96, 28, 27, 21, 19, 33, 78, 32, 94,  1, 41, 40, 76,
       37, 87, 24, 23, 50,  2, 47, 20, 77, 17, 56, 64, 68, 25, 15, 22, 16,
       98, 63, 92, 86, 38,  6, 57, 95, 44,  9, 42, 81, 99, 35, 84, 59, 48,
       75, 65, 85, 90, 55, 43, 58, 89, 30, 80, 34, 18, 51, 49, 52, 74, 26,
       45, 39,  4, 11, 53, 91, 79,  8,  0,  5, 13, 61, 72,  7, 83])

 

w1, w0 = stochastic_gradient_descent_steps(X, y, iters=1000)
print("w1:",round(w1[0,0],3),"w0:",round(w0[0,0],3))
y_pred = w1[0,0] * X + w0
print('Stochastic Gradient Descent Total Cost:{0:.4f}'.format(get_cost(y, y_pred)))

# w1: 4.028 w0: 6.156
# Stochastic Gradient Descent Total Cost:0.9937

 

반응형
728x90
반응형

Basic 스태킹 모델 ** 각 모델로 BAGGING 하고 데이터 눕히고 거기에 또 돌리기

 

데이터 로딩

import numpy as np

from sklearn.neighbors import KNeighborsClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import AdaBoostClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.linear_model import LogisticRegression

from sklearn.datasets import load_breast_cancer
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score

cancer_data = load_breast_cancer()

X_data = cancer_data.data
y_label = cancer_data.target

X_train , X_test , y_train , y_test = train_test_split(X_data , y_label , test_size=0.2 , random_state=0)

 

개별 Classifier와 최종 Stacking 데이터를 학습할 메타 Classifier 생성

# 개별 ML 모델을 위한 Classifier 생성.
knn_clf  = KNeighborsClassifier(n_neighbors=4)
rf_clf = RandomForestClassifier(n_estimators=100, random_state=0)
dt_clf = DecisionTreeClassifier()
ada_clf = AdaBoostClassifier(n_estimators=100)

# 최종 Stacking 모델을 위한 Classifier생성. 
lr_final = LogisticRegression(C=10)

 

개별 Classifier 학습/예측/평가

# 개별 모델들을 학습. 
knn_clf.fit(X_train, y_train)
rf_clf.fit(X_train , y_train)
dt_clf.fit(X_train , y_train)
ada_clf.fit(X_train, y_train)

 

# 학습된 개별 모델들이 각자 반환하는 예측 데이터 셋을 생성하고 개별 모델의 정확도 측정. 
knn_pred = knn_clf.predict(X_test)
rf_pred = rf_clf.predict(X_test)
dt_pred = dt_clf.predict(X_test)
ada_pred = ada_clf.predict(X_test)

print('KNN 정확도: {0:.4f}'.format(accuracy_score(y_test, knn_pred)))
print('랜덤 포레스트 정확도: {0:.4f}'.format(accuracy_score(y_test, rf_pred)))
print('결정 트리 정확도: {0:.4f}'.format(accuracy_score(y_test, dt_pred)))
print('에이다부스트 정확도: {0:.4f} :'.format(accuracy_score(y_test, ada_pred)))

KNN 정확도: 0.9211
랜덤 포레스트 정확도: 0.9649
결정 트리 정확도: 0.8947
에이다부스트 정확도: 0.9561 :

 

개별 모델의 예측 결과를 메타 모델이 학습할 수 있도록 ★스태킹 형태로 재 생성 **

그래야 각각 분석 결과 행이 분석에 용이한 COL이 됨

pred = np.array([knn_pred, rf_pred, dt_pred, ada_pred])
print(pred.shape)

# (4, 114)

 

pred

array([[0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0,
        1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1,
        0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1,
        0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1,
        0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0,
        1, 0, 0, 1],
       [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0,
        1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1,
        1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1,
        0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1,
        0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0,
        1, 0, 0, 1],
       [0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0,
        1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1,
        0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0,
        0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1,
        0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1,
        1, 0, 0, 1],
       [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0,
        1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1,
        1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0,
        0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1,
        0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0,
        1, 0, 0, 1]])

 

# transpose를 이용해 행과 열의 위치 교환. 컬럼 레벨로 각 알고리즘의 예측 결과를 피처로 만듦. 
pred = np.transpose(pred)
print(pred.shape)

# (114, 4)

 

pred

array([[0, 0, 0, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [0, 1, 0, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [0, 0, 0, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [0, 0, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [0, 0, 1, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 0, 0, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [0, 0, 1, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [0, 1, 0, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [1, 0, 0, 0],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 0, 1],
       [1, 1, 1, 1],
       [1, 1, 0, 1],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 0, 0],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 1],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 1, 1],
       [1, 1, 0, 1],
       [0, 0, 0, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [0, 1, 1, 1],
       [0, 0, 0, 1],
       [0, 0, 1, 0],
       [1, 1, 1, 1],
       [0, 0, 0, 0],
       [0, 0, 0, 0],
       [1, 1, 1, 1]])

 

메타 모델 학습/예측/평가

lr_final.fit(pred, y_test)
final = lr_final.predict(pred)

print('최종 메타 모델의 예측 정확도: {0:.4f}'.format(accuracy_score(y_test , final)))

# 최종 메타 모델의 예측 정확도: 0.9737

 

CV 셋 기반의 Stacking

학습세트를 3개 폴드하고 분리한데이터로 2번 학습시키고 1번 검증 한 후 폴드 데이터를 다시합침,

그리고 테스트 세트로 예측하고 결과를 모음 그럼결과값 = 학습 폴드 2, 3, 검증폴드로 예측한 결과값, 테스트세트 예측 결과값 등 4개 + 원본학습 레이블 => 으로 학습 , 또 테스트, 원본테스트 레이블 평가

from sklearn.model_selection import KFold
from sklearn.metrics import mean_absolute_error

# 개별 기반 모델에서 최종 메타 모델이 사용할 학습 및 테스트용 데이터를 생성하기 위한 함수. 
def get_stacking_base_datasets(model, X_train_n, y_train_n, X_test_n, n_folds ):
    # 지정된 n_folds값으로 KFold 생성.
    kf = KFold(n_splits=n_folds, shuffle=False, random_state=0)
    
    #추후에 메타 모델이 사용할 학습 데이터 반환을 위한 넘파이 배열 초기화 
    train_fold_pred = np.zeros((X_train_n.shape[0] ,1 )) # 1이면 2차원 데이터
    test_pred = np.zeros((X_test_n.shape[0],n_folds)) # N_FOLDS 만큼 만들고, 나중에 평균할때 씀
    print(model.__class__.__name__ , ' model 시작 ')
    
    for folder_counter , (train_index, valid_index) in enumerate(kf.split(X_train_n)):
        #입력된 학습 데이터에서 기반 모델이 학습/예측할 폴드 데이터 셋 추출 
        print('\t 폴드 세트: ',folder_counter,' 시작 ')
        X_tr = X_train_n[train_index] 
        y_tr = y_train_n[train_index] 
        X_te = X_train_n[valid_index]  
        
        #폴드 세트 내부에서 다시 만들어진 학습 데이터로 기반 모델의 학습 수행.
        model.fit(X_tr , y_tr)
        
        #폴드 세트 내부에서 다시 만들어진 검증 데이터로 기반 모델 예측 후 데이터 저장.
        train_fold_pred[valid_index, :] = model.predict(X_te).reshape(-1,1)
        
        #입력된 원본 테스트 데이터를 폴드 세트내 학습된 기반 모델에서 예측 후 데이터 저장. 
        test_pred[:, folder_counter] = model.predict(X_test_n)
            
    # 폴드 세트 내에서 원본 테스트 데이터를 예측한 데이터를 평균하여 테스트 데이터로 생성 
    test_pred_mean = np.mean(test_pred, axis=1).reshape(-1,1)    
    
    #train_fold_pred는 최종 메타 모델이 사용하는 학습 데이터, test_pred_mean은 테스트 데이터
    return train_fold_pred , test_pred_mean

 

knn_train, knn_test = get_stacking_base_datasets(knn_clf, X_train, y_train, X_test, 7)
rf_train, rf_test = get_stacking_base_datasets(rf_clf, X_train, y_train, X_test, 7)
dt_train, dt_test = get_stacking_base_datasets(dt_clf, X_train, y_train, X_test,  7)    
ada_train, ada_test = get_stacking_base_datasets(ada_clf, X_train, y_train, X_test, 7)

KNeighborsClassifier  model 시작 
	 폴드 세트:  0  시작 
	 폴드 세트:  1  시작 
	 폴드 세트:  2  시작 
	 폴드 세트:  3  시작 
	 폴드 세트:  4  시작 
	 폴드 세트:  5  시작 
	 폴드 세트:  6  시작 
RandomForestClassifier  model 시작 
	 폴드 세트:  0  시작 
	 폴드 세트:  1  시작 
	 폴드 세트:  2  시작 
	 폴드 세트:  3  시작 
	 폴드 세트:  4  시작 
	 폴드 세트:  5  시작 
	 폴드 세트:  6  시작 
DecisionTreeClassifier  model 시작 
	 폴드 세트:  0  시작 
	 폴드 세트:  1  시작 
	 폴드 세트:  2  시작 
	 폴드 세트:  3  시작 
	 폴드 세트:  4  시작 
	 폴드 세트:  5  시작 
	 폴드 세트:  6  시작 
AdaBoostClassifier  model 시작 
	 폴드 세트:  0  시작 
	 폴드 세트:  1  시작 
	 폴드 세트:  2  시작 
	 폴드 세트:  3  시작 
	 폴드 세트:  4  시작 
	 폴드 세트:  5  시작 
	 폴드 세트:  6  시작

 

# 간결하게 수행 CONCATENATE // 2차원인 결과들을 합침
Stack_final_X_train = np.concatenate((knn_train, rf_train, dt_train, ada_train), axis=1)
Stack_final_X_test = np.concatenate((knn_test, rf_test, dt_test, ada_test), axis=1)
print('원본 학습 피처 데이터 Shape:',X_train.shape, '원본 테스트 피처 Shape:',X_test.shape)
print('스태킹 학습 피처 데이터 Shape:', Stack_final_X_train.shape,
      '스태킹 테스트 피처 데이터 Shape:',Stack_final_X_test.shape)
      
원본 학습 피처 데이터 Shape: (455, 30) 원본 테스트 피처 Shape: (114, 30)
스태킹 학습 피처 데이터 Shape: (455, 4) 스태킹 테스트 피처 데이터 Shape: (114, 4)

 

lr_final.fit(Stack_final_X_train, y_train)
stack_final = lr_final.predict(Stack_final_X_test)

print('최종 메타 모델의 예측 정확도: {0:.4f}'.format(accuracy_score(y_test, stack_final)))

# 최종 메타 모델의 예측 정확도: 0.9737
반응형
728x90
반응형

데이터 일차 가공 및 모델 학습/예측/평가

** 데이터 로드 **
imbalanced, 284807건 중 492건 이 fraud 0.172%
feature engineering 다양하게, 중요 feature : 정규분포, log 변환 // 이상치, smote  오버 샘플링 : fraud data 뻥튀기
log 변환은 왜곡된, 치우친 분포도의 데이터를 정규분포화하는 엔지니어링 방식
Inter Quantile Range를 이용한 Outlier Removal 박스플롯
언더 샘플링 많은 레이블을 적은 수준으로 감소 불균형시 해소하려고
오버 샘플링 적은 레이블을 많은 수준으로 증식
SMOTE(Synthetic Minority Over-Sampling Technique) - k최근접이웃으로 사잇값 렌덤 데이터 신규증식, 증식으로 오버 셈플링 완료

import pandas as pd
import numpy as np 
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings("ignore")
%matplotlib inline

card_df = pd.read_csv('./creditcard.csv')
card_df.head(3)
# amount 카드 사용액
# class 0은 정상, 1은 비정상 : 우리는 1을 찾기 위함

	Time	V1	V2	V3	V4	V5	V6	V7	V8	V9	...	V21	V22	V23	V24	V25	V26	V27	V28	Amount	Class
0	0.0	-1.359807	-0.072781	2.536347	1.378155	-0.338321	0.462388	0.239599	0.098698	0.363787	...	-0.018307	0.277838	-0.110474	0.066928	0.128539	-0.189115	0.133558	-0.021053	149.62	0
1	0.0	1.191857	0.266151	0.166480	0.448154	0.060018	-0.082361	-0.078803	0.085102	-0.255425	...	-0.225775	-0.638672	0.101288	-0.339846	0.167170	0.125895	-0.008983	0.014724	2.69	0
2	1.0	-1.358354	-1.340163	1.773209	0.379780	-0.503198	1.800499	0.791461	0.247676	-1.514654	...	0.247998	0.771679	0.909412	-0.689281	-0.327642	-0.139097	-0.055353	-0.059752	378.66	0
3 rows × 31 columns

 

card_df.shape

# (284807, 31)

 

원본 DataFrame은 유지하고 데이터 가공을 위한 DataFrame을 복사하여 반환

from sklearn.model_selection import train_test_split
# 데이터 사전가공 함수 : 데이터 log변환, OD, 각 적용 후 예측 성능 평가
# 인자로 입력받은 DataFrame을 복사 한 뒤 Time 컬럼만 삭제하고 복사된 DataFrame 반환
def get_preprocessed_df(df=None):
    df_copy = df.copy()
    df_copy.drop('Time', axis=1, inplace=True)
    return df_copy

 

학습과 테스트 데이터 세트를 반환하는 함수 생성. 사전 데이터 처리가 끝난 뒤 해당 함수 호출

# 사전 데이터 가공 후 학습과 테스트 데이터 세트를 반환하는 함수.
def get_train_test_dataset(df=None):
    # 인자로 입력된 DataFrame의 사전 데이터 가공이 완료된 복사 DataFrame 반환
    df_copy = get_preprocessed_df(df)
    
    # DataFrame의 맨 마지막 컬럼이 레이블, 나머지는 피처들
    X_features = df_copy.iloc[:, :-1]
    y_target = df_copy.iloc[:, -1]
    
    # train_test_split( )으로 학습과 테스트 데이터 분할. stratify=y_target으로 Stratified 기반 분할
    X_train, X_test, y_train, y_test = \
    train_test_split(X_features, y_target, test_size=0.3, random_state=0, stratify=y_target)
    
    # 학습과 테스트 데이터 세트 반환
    return X_train, X_test, y_train, y_test

X_train, X_test, y_train, y_test = get_train_test_dataset(card_df)

 

print('학습 데이터 레이블 값 비율')
print(y_train.value_counts()/y_train.shape[0] * 100)
print('테스트 데이터 레이블 값 비율')
print(y_test.value_counts()/y_test.shape[0] * 100)



학습 데이터 레이블 값 비율
0    99.827451
1     0.172549
Name: Class, dtype: float64
테스트 데이터 레이블 값 비율
0    99.826785
1     0.173215
Name: Class, dtype: float64

 

from sklearn.metrics import confusion_matrix, accuracy_score, precision_score, recall_score, f1_score
from sklearn.metrics import roc_auc_score

# 수정된 get_clf_eval() 함수 
def get_clf_eval(y_test, pred=None, pred_proba=None):
    confusion = confusion_matrix( y_test, pred)
    accuracy = accuracy_score(y_test , pred)
    precision = precision_score(y_test , pred)
    recall = recall_score(y_test , pred)
    f1 = f1_score(y_test,pred)
    # ROC-AUC 추가 
    roc_auc = roc_auc_score(y_test, pred_proba)
    print('오차 행렬')
    print(confusion)
    # ROC-AUC print 추가
    print('정확도: {0:.4f}, 정밀도: {1:.4f}, 재현율: {2:.4f},\
    F1: {3:.4f}, AUC:{4:.4f}'.format(accuracy, precision, recall, f1, roc_auc))

 

from sklearn.linear_model import LogisticRegression

lr_clf = LogisticRegression()

lr_clf.fit(X_train, y_train)

lr_pred = lr_clf.predict(X_test)
lr_pred_proba = lr_clf.predict_proba(X_test)[:, 1]

# 3장에서 사용한 get_clf_eval() 함수를 이용하여 평가 수행. 
get_clf_eval(y_test, lr_pred, lr_pred_proba)


오차 행렬
[[85282    13]
 [   58    90]]
정확도: 0.9992, 정밀도: 0.8738, 재현율: 0.6081,    F1: 0.7171, AUC:0.9709

 

앞으로 피처 엔지니어링을 수행할 때마다 모델을 학습/예측/평가하므로 이를 위한 함수 생성

# 인자로 사이킷런의 Estimator객체와, 학습/테스트 데이터 세트를 입력 받아서 학습/예측/평가 수행.
def get_model_train_eval(model, ftr_train=None, ftr_test=None, tgt_train=None, tgt_test=None):
    model.fit(ftr_train, tgt_train)
    pred = model.predict(ftr_test)
    pred_proba = model.predict_proba(ftr_test)[:, 1]
    get_clf_eval(tgt_test, pred, pred_proba)

 

 LightGBM 학습/예측/평가

(boost_from_average가 True일 경우 레이블 값이 극도로 불균형 분포를 이루는 경우 재현률 및 ROC-AUC 성능이 매우 저하됨.)
LightGBM 2.1.0 이상 버전에서 이와 같은 현상 발생 defualt가 true가 되면서 오류가 많아짐 // 보통은 좋은데 imbalanced stisfacy에서는 불리

from lightgbm import LGBMClassifier

lgbm_clf = LGBMClassifier(n_estimators=1000, num_leaves=64, n_jobs=-1, boost_from_average=False)
get_model_train_eval(lgbm_clf, ftr_train=X_train, ftr_test=X_test, tgt_train=y_train, tgt_test=y_test)


오차 행렬
[[85289     6]
 [   36   112]]
정확도: 0.9995, 정밀도: 0.9492, 재현율: 0.7568,    F1: 0.8421, AUC:0.9797

 

중요 데이터 분포도 변환 후 모델 학습/예측/평가

중요 feature의 분포도 확인

import seaborn as sns

plt.figure(figsize=(8, 4))
plt.xticks(range(0, 30000, 1000), rotation=60)
sns.distplot(card_df['Amount'])

 

데이터 사전 가공을 위한 별도의 함수에 StandardScaler를 이용하여 Amount 피처 변환

from sklearn.preprocessing import StandardScaler

# 사이킷런의 StandardScaler를 이용하여 정규분포 형태로 Amount 피처값 변환하는 로직으로 수정. 
def get_preprocessed_df(df=None):
    df_copy = df.copy()
    scaler = StandardScaler()
    amount_n = scaler.fit_transform(df_copy['Amount'].values.reshape(-1, 1))
    
    # 변환된 Amount를 Amount_Scaled로 피처명 변경후 DataFrame맨 앞 컬럼으로 입력
    df_copy.insert(0, 'Amount_Scaled', amount_n)
    
    # 기존 Time, Amount 피처 삭제
    df_copy.drop(['Time','Amount'], axis=1, inplace=True)
    return df_copy

 

StandardScaler 변환 후 로지스틱 회귀 및 LightGBM 학습/예측/평가

# Amount를 정규분포 형태로 변환 후 로지스틱 회귀 및 LightGBM 수행. 
X_train, X_test, y_train, y_test = get_train_test_dataset(card_df)

print('### 로지스틱 회귀 예측 성능 ###')
lr_clf = LogisticRegression()
get_model_train_eval(lr_clf, ftr_train=X_train, ftr_test=X_test, tgt_train=y_train, tgt_test=y_test)

print('### LightGBM 예측 성능 ###')
lgbm_clf = LGBMClassifier(n_estimators=1000, num_leaves=64, n_jobs=-1, boost_from_average=False)
get_model_train_eval(lgbm_clf, ftr_train=X_train, ftr_test=X_test, tgt_train=y_train, tgt_test=y_test)



### 로지스틱 회귀 예측 성능 ###
오차 행렬
[[85281    14]
 [   58    90]]
정확도: 0.9992, 정밀도: 0.8654, 재현율: 0.6081,    F1: 0.7143, AUC:0.9707
### LightGBM 예측 성능 ###
오차 행렬
[[85289     6]
 [   36   112]]
정확도: 0.9995, 정밀도: 0.9492, 재현율: 0.7568,    F1: 0.8421, AUC:0.9773

 

Amount를 로그 변환

def get_preprocessed_df(df=None):
    df_copy = df.copy()
    # 넘파이의 log1p( )를 이용하여 Amount를 로그 변환 // 그냥 log에 +1하면 infini가 0이 됨
    amount_n = np.log1p(df_copy['Amount'])
    df_copy.insert(0, 'Amount_Scaled', amount_n)
    df_copy.drop(['Time','Amount'], axis=1, inplace=True)
    return df_copy

 

# log1p 와 expm1 설명 
import numpy as np

print(1e-1000 == 0.0)

print(np.log(1e-1000))

print(np.log(1e-1000 + 1))
print(np.log1p(1e-1000))

# True
# -inf
# 0.0
# 0.0

 

var_1 = np.log1p(100)
var_2 = np.expm1(var_1)
print(var_1, var_2)

# 4.61512051684126 100.00000000000003

 

X_train, X_test, y_train, y_test = get_train_test_dataset(card_df)

print('### 로지스틱 회귀 예측 성능 ###')
get_model_train_eval(lr_clf, ftr_train=X_train, ftr_test=X_test, tgt_train=y_train, tgt_test=y_test)

print('### LightGBM 예측 성능 ###')
get_model_train_eval(lgbm_clf, ftr_train=X_train, ftr_test=X_test, tgt_train=y_train, tgt_test=y_test)

### 로지스틱 회귀 예측 성능 ###
오차 행렬
[[85283    12]
 [   58    90]]
정확도: 0.9992, 정밀도: 0.8824, 재현율: 0.6081,    F1: 0.7200, AUC:0.9721
### LightGBM 예측 성능 ###
오차 행렬
[[85290     5]
 [   35   113]]
정확도: 0.9995, 정밀도: 0.9576, 재현율: 0.7635,    F1: 0.8496, AUC:0.9786

 

이상치 데이터 제거 후 모델 학습/예측/평가

** 각 피처들의 상관 관계를 시각화. 결정 레이블인 class 값과 가장 상관도가 높은 피처 추출 **

import seaborn as sns

plt.figure(figsize=(9, 9))
corr = card_df.corr()
sns.heatmap(corr, cmap='RdBu') # heatmap 가장 높은 상관도를 표시 파란색이 양의 상관관계, 붉은색이 음
# 가운데가 1인 것은 대각선으로 쌍, xy의 시간축이 겹쳐서 당연히 1, class와 상관관계가 높은 것은 v12, 14 등등

 Dataframe에서 outlier에 해당하는 데이터를 필터링하기 위한 함수 생성. outlier 레코드의 index를 반환함. ** 이상치 - 최대값 - 3/4 IQR - 2/4 IQR - 1/4 IQR - 최소값 - 이상치

import numpy as np

def get_outlier(df=None, column=None, weight=1.5):
    # fraud에 해당하는 column 데이터만 추출, 1/4 분위와 3/4 분위 지점을 np.percentile로 구함. 
    fraud = df[df['Class']==1][column] # 사기인 것만 BOOL 호출
    quantile_25 = np.percentile(fraud.values, 25) # 1분위 25 3분위 75
    quantile_75 = np.percentile(fraud.values, 75)
    
    # IQR을 구하고, IQR에 1.5를 곱하여 최대값과 최소값 지점 구함. 
    iqr = quantile_75 - quantile_25
    iqr_weight = iqr * weight
    lowest_val = quantile_25 - iqr_weight
    highest_val = quantile_75 + iqr_weight
    
    # 최대값 보다 크거나, 최소값 보다 작은 값을 아웃라이어로 설정하고 DataFrame index 반환. 
    outlier_index = fraud[(fraud < lowest_val) | (fraud > highest_val)].index
    
    return outlier_index

 

#np.percentile(card_df['V14'].values, 100)
np.max(card_df['V14']) # 같은 계산 결과

# 10.5267660517847

 

outlier_index = get_outlier(df=card_df, column='V14', weight=1.5)
print('이상치 데이터 인덱스:', outlier_index)
# 이상치가 4개 나옴

# 이상치 데이터 인덱스: Int64Index([8296, 8615, 9035, 9252], dtype='int64')

 

로그 변환 후 V14 피처의 이상치 데이터를 삭제한 뒤 모델들을 재 학습/예측/평가

# get_processed_df( )를 로그 변환 후 V14 피처의 이상치 데이터를 삭제하는 로직으로 변경. 
def get_preprocessed_df(df=None):
    df_copy = df.copy()
    amount_n = np.log1p(df_copy['Amount'])
    df_copy.insert(0, 'Amount_Scaled', amount_n)
    df_copy.drop(['Time','Amount'], axis=1, inplace=True)
    
    # 이상치 데이터 삭제하는 로직 추가
    outlier_index = get_outlier(df=df_copy, column='V14', weight=1.5)
    df_copy.drop(outlier_index, axis=0, inplace=True) # 행삭제
    return df_copy

X_train, X_test, y_train, y_test = get_train_test_dataset(card_df)

print('### 로지스틱 회귀 예측 성능 ###')
get_model_train_eval(lr_clf, ftr_train=X_train, ftr_test=X_test, tgt_train=y_train, tgt_test=y_test)

print('### LightGBM 예측 성능 ###')
get_model_train_eval(lgbm_clf, ftr_train=X_train, ftr_test=X_test, tgt_train=y_train, tgt_test=y_test)
# 이전에 비해 높아짐




### 로지스틱 회귀 예측 성능 ###
오차 행렬
[[85282    13]
 [   48    98]]
정확도: 0.9993, 정밀도: 0.8829, 재현율: 0.6712,    F1: 0.7626, AUC:0.9747
### LightGBM 예측 성능 ###
오차 행렬
[[85291     4]
 [   25   121]]
정확도: 0.9997, 정밀도: 0.9680, 재현율: 0.8288,    F1: 0.8930, AUC:0.9831

 

SMOTE 오버 샘플링 적용 후 모델 학습/예측/평가

# conda install -c conda-forge imbalanced-learn
from imblearn.over_sampling import SMOTE

smote = SMOTE(random_state=0)
X_train_over, y_train_over = smote.fit_sample(X_train, y_train)
print('SMOTE 적용 전 학습용 피처/레이블 데이터 세트: ', X_train.shape, y_train.shape)
print('SMOTE 적용 후 학습용 피처/레이블 데이터 세트: ', X_train_over.shape, y_train_over.shape)
print('SMOTE 적용 후 레이블 값 분포: \n', pd.Series(y_train_over).value_counts())


SMOTE 적용 전 학습용 피처/레이블 데이터 세트:  (199362, 29) (199362,)
SMOTE 적용 후 학습용 피처/레이블 데이터 세트:  (398040, 29) (398040,)
SMOTE 적용 후 레이블 값 분포: 
 1    199020
0    199020
dtype: int64

 

y_train.value_counts()

0    199020
1       342
Name: Class, dtype: int64

 

로지스틱 회귀로 학습/예측/평가

lr_clf = LogisticRegression()
# ftr_train과 tgt_train 인자값이 SMOTE 증식된 X_train_over와 y_train_over로 변경됨에 유의
get_model_train_eval(lr_clf, ftr_train=X_train_over, ftr_test=X_test, tgt_train=y_train_over, tgt_test=y_test)
# 이모델은 정밀도, f1 이 너무 낮아서 적용 불가


오차 행렬
[[82932  2363]
 [   11   135]]
정확도: 0.9722, 정밀도: 0.0540, 재현율: 0.9247,    F1: 0.1021, AUC:0.9737

 

Precision-Recall 곡선 시각화

import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
from sklearn.metrics import precision_recall_curve
%matplotlib inline

def precision_recall_curve_plot(y_test , pred_proba_c1):
    # threshold ndarray와 이 threshold에 따른 정밀도, 재현율 ndarray 추출. 
    precisions, recalls, thresholds = precision_recall_curve( y_test, pred_proba_c1)
    
    # X축을 threshold값으로, Y축은 정밀도, 재현율 값으로 각각 Plot 수행. 정밀도는 점선으로 표시
    plt.figure(figsize=(8,6))
    threshold_boundary = thresholds.shape[0]
    plt.plot(thresholds, precisions[0:threshold_boundary], linestyle='--', label='precision')
    plt.plot(thresholds, recalls[0:threshold_boundary],label='recall')
    
    # threshold 값 X 축의 Scale을 0.1 단위로 변경
    start, end = plt.xlim()
    plt.xticks(np.round(np.arange(start, end, 0.1),2))
    
    # x축, y축 label과 legend, 그리고 grid 설정
    plt.xlabel('Threshold value'); plt.ylabel('Precision and Recall value')
    plt.legend(); plt.grid()
    plt.show()

 

precision_recall_curve_plot( y_test, lr_clf.predict_proba(X_test)[:, 1] )
# 보통 x자인데 너무 불균형함

 

LightGBM 모델 적용

lgbm_clf = LGBMClassifier(n_estimators=1000, num_leaves=64, n_jobs=-1, boost_from_average=False)
get_model_train_eval(lgbm_clf, ftr_train=X_train_over, ftr_test=X_test,
                  tgt_train=y_train_over, tgt_test=y_test)
# 재현율이 중요하기 때문에 정밀도 조금 떨어뜨리더라도 재현율을 올림


오차 행렬
[[85287     8]
 [   22   124]]
정확도: 0.9996, 정밀도: 0.9394, 재현율: 0.8493,    F1: 0.8921, AUC:0.9778
반응형
728x90
반응형

데이터 전처리

import numpy as np 
import pandas as pd 
import matplotlib.pyplot as plt
import matplotlib

cust_df = pd.read_csv("./train_santander.csv",encoding='latin-1')
print('dataset shape:', cust_df.shape)
cust_df.head(3)
# 맨마지막 col 이 target값 0이면 만족, 1이면 불만족

dataset shape: (76020, 371)

	ID	var3	var15	imp_ent_var16_ult1	imp_op_var39_comer_ult1	imp_op_var39_comer_ult3	imp_op_var40_comer_ult1	imp_op_var40_comer_ult3	imp_op_var40_efect_ult1	imp_op_var40_efect_ult3	...	saldo_medio_var33_hace2	saldo_medio_var33_hace3	saldo_medio_var33_ult1	saldo_medio_var33_ult3	saldo_medio_var44_hace2	saldo_medio_var44_hace3	saldo_medio_var44_ult1	saldo_medio_var44_ult3	var38	TARGET
0	1	2	23	0.0	0.0	0.0	0.0	0.0	0.0	0.0	...	0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	39205.17	0
1	3	2	34	0.0	0.0	0.0	0.0	0.0	0.0	0.0	...	0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	49278.03	0
2	4	2	23	0.0	0.0	0.0	0.0	0.0	0.0	0.0	...	0.0	0.0	0.0	0.0	0.0	0.0	0.0	0.0	67333.77	0
3 rows × 371 columns

 

cust_df.info()
# 원래 null값이 나와야 하는데 너무 양이 많아서 안나옴 Columns: 371 entries, ID to TARGET

<class 'pandas.core.frame.DataFrame'>
RangeIndex: 76020 entries, 0 to 76019
Columns: 371 entries, ID to TARGET
dtypes: float64(111), int64(260)
memory usage: 215.2 MB

 

# target값이 불균형한지 확인할 필요있음
print(cust_df['TARGET'].value_counts()) # series로 만들고 호출, 비율확인
#0이 훨씬 높음 0.04

unsatisfied_cnt = cust_df[cust_df['TARGET'] == 1]['TARGET'].count()
total_cnt = cust_df['TARGET'].count()

print('unsatisfied 비율은 {0:.2f}'.format((unsatisfied_cnt / total_cnt)))

0    73012
1     3008
Name: TARGET, dtype: int64
unsatisfied 비율은 0.04

 

cust_df.describe( )
# 300건이라 너무 많고,var3 mean이 -9999라서 null값일 가능성이 높다. 일관적 업데이트 가능성

	ID	var3	var15	imp_ent_var16_ult1	imp_op_var39_comer_ult1	imp_op_var39_comer_ult3	imp_op_var40_comer_ult1	imp_op_var40_comer_ult3	imp_op_var40_efect_ult1	imp_op_var40_efect_ult3	...	saldo_medio_var33_hace2	saldo_medio_var33_hace3	saldo_medio_var33_ult1	saldo_medio_var33_ult3	saldo_medio_var44_hace2	saldo_medio_var44_hace3	saldo_medio_var44_ult1	saldo_medio_var44_ult3	var38	TARGET
count	76020.000000	76020.000000	76020.000000	76020.000000	76020.000000	76020.000000	76020.000000	76020.000000	76020.000000	76020.000000	...	76020.000000	76020.000000	76020.000000	76020.000000	76020.000000	76020.000000	76020.000000	76020.000000	7.602000e+04	76020.000000
mean	75964.050723	-1523.199277	33.212865	86.208265	72.363067	119.529632	3.559130	6.472698	0.412946	0.567352	...	7.935824	1.365146	12.215580	8.784074	31.505324	1.858575	76.026165	56.614351	1.172358e+05	0.039569
std	43781.947379	39033.462364	12.956486	1614.757313	339.315831	546.266294	93.155749	153.737066	30.604864	36.513513	...	455.887218	113.959637	783.207399	538.439211	2013.125393	147.786584	4040.337842	2852.579397	1.826646e+05	0.194945
min	1.000000	-999999.000000	5.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	...	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	5.163750e+03	0.000000
25%	38104.750000	2.000000	23.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	...	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	6.787061e+04	0.000000
50%	76043.000000	2.000000	28.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	...	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	1.064092e+05	0.000000
75%	113748.750000	2.000000	40.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	...	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	0.000000	1.187563e+05	0.000000
max	151838.000000	238.000000	105.000000	210000.000000	12888.030000	21024.810000	8237.820000	11073.570000	6600.000000	6600.000000	...	50003.880000	20385.720000	138831.630000	91778.730000	438329.220000	24650.010000	681462.900000	397884.300000	2.203474e+07	1.000000
8 rows × 371 columns

 

# 그중 var3 10개 빼서 확인해봤더니 특이하게 -99999가 있으니 각값에 영향을 주어 전처리필요
print(cust_df['var3'].value_counts( )[:10])

 2         74165
 8           138
-999999      116
 9           110
 3           108
 1           105
 13           98
 7            97
 4            86
 12           85
Name: var3, dtype: int64

 

# var3 피처 값 대체 및 ID 피처 드롭 // var3 2로 업데이트
cust_df['var3'].replace(-999999, 2, inplace=True)
cust_df.drop('ID',axis=1 , inplace=True)

# 피처 세트와 레이블 세트분리. 레이블 컬럼은 DataFrame의 맨 마지막에 위치해 컬럼 위치 -1로 분리
X_features = cust_df.iloc[:, :-1]
y_labels = cust_df.iloc[:, -1]
print('피처 데이터 shape:{0}'.format(X_features.shape))

# 피처 데이터 shape:(76020, 369)

 

from sklearn.model_selection import train_test_split

X_train, X_test, y_train, y_test = train_test_split(X_features, y_labels,
                                                    test_size=0.2, random_state=0)

train_cnt = y_train.count()
test_cnt = y_test.count()
print('학습 세트 Shape:{0}, 테스트 세트 Shape:{1}'.format(X_train.shape , X_test.shape))

# target값분포도가 잘쪼게졌는지 확인 # stratify=y_labels 쓰면 비슷한 비율로 쪼게짐
print(' 학습 세트 레이블 값 분포 비율')
print(y_train.value_counts()/train_cnt)
print('\n 테스트 세트 레이블 값 분포 비율')
print(y_test.value_counts()/test_cnt)


학습 세트 Shape:(60816, 369), 테스트 세트 Shape:(15204, 369)
 학습 세트 레이블 값 분포 비율
0    0.960964
1    0.039036
Name: TARGET, dtype: float64

 테스트 세트 레이블 값 분포 비율
0    0.9583
1    0.0417
Name: TARGET, dtype: float64

 

from xgboost import XGBClassifier
from sklearn.metrics import roc_auc_score

# n_estimators는 500으로, random state는 예제 수행 시마다 동일 예측 결과를 위해 설정. 
xgb_clf = XGBClassifier(n_estimators=500, random_state=156)

# 성능 평가 지표를 auc로, 조기 중단 파라미터는 100으로 설정하고 학습 수행. 
# 검증할때 학습, 테스트 동시에 하면 안되지만, 원하는값 비율이 너무 적어서 test 넣어서 검증
xgb_clf.fit(X_train, y_train, early_stopping_rounds=100,
            eval_metric="auc", eval_set=[(X_train, y_train), (X_test, y_test)])

xgb_roc_score = roc_auc_score(y_test, xgb_clf.predict_proba(X_test)[:,1],average='macro')
print('ROC AUC: {0:.4f}'.format(xgb_roc_score))

# auc를 감소시키는 방향으로 수행

[0]	validation_0-auc:0.799928	validation_1-auc:0.803548
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 100 rounds.
[1]	validation_0-auc:0.802222	validation_1-auc:0.805222
[2]	validation_0-auc:0.80819	validation_1-auc:0.813162
[3]	validation_0-auc:0.8127	validation_1-auc:0.813243
[4]	validation_0-auc:0.81648	validation_1-auc:0.816979
[5]	validation_0-auc:0.816018	validation_1-auc:0.816629
[6]	validation_0-auc:0.816474	validation_1-auc:0.817776
[7]	validation_0-auc:0.818148	validation_1-auc:0.818464
[8]	validation_0-auc:0.81806	validation_1-auc:0.818295
[9]	validation_0-auc:0.817039	validation_1-auc:0.818087
[10]	validation_0-auc:0.818318	validation_1-auc:0.818749
[11]	validation_0-auc:0.818711	validation_1-auc:0.818521
[12]	validation_0-auc:0.818673	validation_1-auc:0.818516
[13]	validation_0-auc:0.819156	validation_1-auc:0.818998
[14]	validation_0-auc:0.819847	validation_1-auc:0.81999
[15]	validation_0-auc:0.822152	validation_1-auc:0.821584
[16]	validation_0-auc:0.822529	validation_1-auc:0.821275
[17]	validation_0-auc:0.822404	validation_1-auc:0.821602
[18]	validation_0-auc:0.825227	validation_1-auc:0.82523
[19]	validation_0-auc:0.82554	validation_1-auc:0.824671
[20]	validation_0-auc:0.826569	validation_1-auc:0.825722
[21]	validation_0-auc:0.827255	validation_1-auc:0.825777
[22]	validation_0-auc:0.82805	validation_1-auc:0.826496
[23]	validation_0-auc:0.827807	validation_1-auc:0.826436
[24]	validation_0-auc:0.828647	validation_1-auc:0.826735
[25]	validation_0-auc:0.830001	validation_1-auc:0.827854
[26]	validation_0-auc:0.83056	validation_1-auc:0.828164
[27]	validation_0-auc:0.830946	validation_1-auc:0.828579
[28]	validation_0-auc:0.831843	validation_1-auc:0.829661
[29]	validation_0-auc:0.833095	validation_1-auc:0.830722
[30]	validation_0-auc:0.833537	validation_1-auc:0.83108
[31]	validation_0-auc:0.834132	validation_1-auc:0.830665
[32]	validation_0-auc:0.83448	validation_1-auc:0.831507
[33]	validation_0-auc:0.83477	validation_1-auc:0.831309
[34]	validation_0-auc:0.835454	validation_1-auc:0.831417
[35]	validation_0-auc:0.83601	validation_1-auc:0.831663
[36]	validation_0-auc:0.836456	validation_1-auc:0.831942
[37]	validation_0-auc:0.836577	validation_1-auc:0.832411
[38]	validation_0-auc:0.836988	validation_1-auc:0.832691
[39]	validation_0-auc:0.837236	validation_1-auc:0.833003
[40]	validation_0-auc:0.837868	validation_1-auc:0.83271
[41]	validation_0-auc:0.838638	validation_1-auc:0.833271
[42]	validation_0-auc:0.839178	validation_1-auc:0.83321
[43]	validation_0-auc:0.839711	validation_1-auc:0.833446
[44]	validation_0-auc:0.840394	validation_1-auc:0.834412
[45]	validation_0-auc:0.841118	validation_1-auc:0.834399
[46]	validation_0-auc:0.841513	validation_1-auc:0.834504
[47]	validation_0-auc:0.842197	validation_1-auc:0.835669
[48]	validation_0-auc:0.842644	validation_1-auc:0.835846
[49]	validation_0-auc:0.843012	validation_1-auc:0.835977
[50]	validation_0-auc:0.843695	validation_1-auc:0.836633
[51]	validation_0-auc:0.84425	validation_1-auc:0.83715
[52]	validation_0-auc:0.844535	validation_1-auc:0.837182
[53]	validation_0-auc:0.84508	validation_1-auc:0.837583
[54]	validation_0-auc:0.845616	validation_1-auc:0.838102
[55]	validation_0-auc:0.84602	validation_1-auc:0.837964
[56]	validation_0-auc:0.846645	validation_1-auc:0.838467
[57]	validation_0-auc:0.846941	validation_1-auc:0.838353
[58]	validation_0-auc:0.847311	validation_1-auc:0.838367
[59]	validation_0-auc:0.847784	validation_1-auc:0.838421
[60]	validation_0-auc:0.848181	validation_1-auc:0.838609
[61]	validation_0-auc:0.848445	validation_1-auc:0.838766
[62]	validation_0-auc:0.848953	validation_1-auc:0.838939
[63]	validation_0-auc:0.849405	validation_1-auc:0.839378
[64]	validation_0-auc:0.849601	validation_1-auc:0.839421
[65]	validation_0-auc:0.849941	validation_1-auc:0.839629
[66]	validation_0-auc:0.850161	validation_1-auc:0.839715
[67]	validation_0-auc:0.850322	validation_1-auc:0.839662
[68]	validation_0-auc:0.850512	validation_1-auc:0.839569
[69]	validation_0-auc:0.850868	validation_1-auc:0.839678
[70]	validation_0-auc:0.850981	validation_1-auc:0.839758
[71]	validation_0-auc:0.851145	validation_1-auc:0.839931
[72]	validation_0-auc:0.851305	validation_1-auc:0.839947
[73]	validation_0-auc:0.851522	validation_1-auc:0.84007
[74]	validation_0-auc:0.851667	validation_1-auc:0.840082
[75]	validation_0-auc:0.851955	validation_1-auc:0.840037
[76]	validation_0-auc:0.852125	validation_1-auc:0.840091
[77]	validation_0-auc:0.852316	validation_1-auc:0.839871
[78]	validation_0-auc:0.852587	validation_1-auc:0.840114
[79]	validation_0-auc:0.852811	validation_1-auc:0.840282
[80]	validation_0-auc:0.853092	validation_1-auc:0.840574
[81]	validation_0-auc:0.853296	validation_1-auc:0.84075
[82]	validation_0-auc:0.853443	validation_1-auc:0.840738
[83]	validation_0-auc:0.8536	validation_1-auc:0.840735
[84]	validation_0-auc:0.853727	validation_1-auc:0.840904
[85]	validation_0-auc:0.853878	validation_1-auc:0.840985
[86]	validation_0-auc:0.853998	validation_1-auc:0.841019
[87]	validation_0-auc:0.854281	validation_1-auc:0.840802
[88]	validation_0-auc:0.85445	validation_1-auc:0.841042
[89]	validation_0-auc:0.854524	validation_1-auc:0.841086
[90]	validation_0-auc:0.854656	validation_1-auc:0.841144
[91]	validation_0-auc:0.854908	validation_1-auc:0.841263
[92]	validation_0-auc:0.855061	validation_1-auc:0.841208
[93]	validation_0-auc:0.855142	validation_1-auc:0.841265
[94]	validation_0-auc:0.855326	validation_1-auc:0.84131
[95]	validation_0-auc:0.855442	validation_1-auc:0.841254
[96]	validation_0-auc:0.855568	validation_1-auc:0.841282
[97]	validation_0-auc:0.855671	validation_1-auc:0.841346
[98]	validation_0-auc:0.855955	validation_1-auc:0.841386
[99]	validation_0-auc:0.856271	validation_1-auc:0.841459
[100]	validation_0-auc:0.856386	validation_1-auc:0.841469
[101]	validation_0-auc:0.856627	validation_1-auc:0.841492
[102]	validation_0-auc:0.856774	validation_1-auc:0.841468
[103]	validation_0-auc:0.856898	validation_1-auc:0.841253
[104]	validation_0-auc:0.857145	validation_1-auc:0.841343
[105]	validation_0-auc:0.857279	validation_1-auc:0.841249
[106]	validation_0-auc:0.857362	validation_1-auc:0.841119
[107]	validation_0-auc:0.857423	validation_1-auc:0.841105
[108]	validation_0-auc:0.857487	validation_1-auc:0.841122
[109]	validation_0-auc:0.857658	validation_1-auc:0.841165
[110]	validation_0-auc:0.857734	validation_1-auc:0.8412
[111]	validation_0-auc:0.85792	validation_1-auc:0.841214
[112]	validation_0-auc:0.858102	validation_1-auc:0.841162
[113]	validation_0-auc:0.858246	validation_1-auc:0.841156
[114]	validation_0-auc:0.858382	validation_1-auc:0.841204
[115]	validation_0-auc:0.85846	validation_1-auc:0.841295
[116]	validation_0-auc:0.858562	validation_1-auc:0.841256
[117]	validation_0-auc:0.858616	validation_1-auc:0.84133
[118]	validation_0-auc:0.858718	validation_1-auc:0.841398
[119]	validation_0-auc:0.858829	validation_1-auc:0.841496
[120]	validation_0-auc:0.859012	validation_1-auc:0.841352
[121]	validation_0-auc:0.859142	validation_1-auc:0.841349
[122]	validation_0-auc:0.859203	validation_1-auc:0.841364
[123]	validation_0-auc:0.859284	validation_1-auc:0.84141
[124]	validation_0-auc:0.859563	validation_1-auc:0.841393
[125]	validation_0-auc:0.859639	validation_1-auc:0.84149
[126]	validation_0-auc:0.859759	validation_1-auc:0.841484
[127]	validation_0-auc:0.859957	validation_1-auc:0.841333
[128]	validation_0-auc:0.860087	validation_1-auc:0.841352
[129]	validation_0-auc:0.860211	validation_1-auc:0.841287
[130]	validation_0-auc:0.860261	validation_1-auc:0.841392
[131]	validation_0-auc:0.860444	validation_1-auc:0.841481
[132]	validation_0-auc:0.860453	validation_1-auc:0.841462
[133]	validation_0-auc:0.860607	validation_1-auc:0.841342
[134]	validation_0-auc:0.860749	validation_1-auc:0.84124
[135]	validation_0-auc:0.86094	validation_1-auc:0.841318
[136]	validation_0-auc:0.861021	validation_1-auc:0.841351
[137]	validation_0-auc:0.861131	validation_1-auc:0.841311
[138]	validation_0-auc:0.861229	validation_1-auc:0.841289
[139]	validation_0-auc:0.861279	validation_1-auc:0.841295
[140]	validation_0-auc:0.861331	validation_1-auc:0.841265
[141]	validation_0-auc:0.861418	validation_1-auc:0.841259
[142]	validation_0-auc:0.861553	validation_1-auc:0.841335
[143]	validation_0-auc:0.861682	validation_1-auc:0.841346
[144]	validation_0-auc:0.86169	validation_1-auc:0.841403
[145]	validation_0-auc:0.861852	validation_1-auc:0.841299
[146]	validation_0-auc:0.861898	validation_1-auc:0.841301
[147]	validation_0-auc:0.861998	validation_1-auc:0.841289
[148]	validation_0-auc:0.862068	validation_1-auc:0.84135
[149]	validation_0-auc:0.862132	validation_1-auc:0.841444
[150]	validation_0-auc:0.862236	validation_1-auc:0.841409
[151]	validation_0-auc:0.862314	validation_1-auc:0.841459
[152]	validation_0-auc:0.862584	validation_1-auc:0.841456
[153]	validation_0-auc:0.862843	validation_1-auc:0.841483
[154]	validation_0-auc:0.863033	validation_1-auc:0.841493
[155]	validation_0-auc:0.863132	validation_1-auc:0.841534
[156]	validation_0-auc:0.863423	validation_1-auc:0.841728
[157]	validation_0-auc:0.863578	validation_1-auc:0.841712
[158]	validation_0-auc:0.863872	validation_1-auc:0.841677
[159]	validation_0-auc:0.863924	validation_1-auc:0.841658
[160]	validation_0-auc:0.863985	validation_1-auc:0.841608
[161]	validation_0-auc:0.864019	validation_1-auc:0.841646
[162]	validation_0-auc:0.864049	validation_1-auc:0.841665
[163]	validation_0-auc:0.864148	validation_1-auc:0.841682
[164]	validation_0-auc:0.864221	validation_1-auc:0.841791
[165]	validation_0-auc:0.86426	validation_1-auc:0.841732
[166]	validation_0-auc:0.864309	validation_1-auc:0.841688
[167]	validation_0-auc:0.864411	validation_1-auc:0.841699
[168]	validation_0-auc:0.864581	validation_1-auc:0.841711
[169]	validation_0-auc:0.864619	validation_1-auc:0.841729
[170]	validation_0-auc:0.864709	validation_1-auc:0.841684
[171]	validation_0-auc:0.864849	validation_1-auc:0.841704
[172]	validation_0-auc:0.865047	validation_1-auc:0.841614
[173]	validation_0-auc:0.865085	validation_1-auc:0.84162
[174]	validation_0-auc:0.865321	validation_1-auc:0.841734
[175]	validation_0-auc:0.865523	validation_1-auc:0.8418
[176]	validation_0-auc:0.865766	validation_1-auc:0.841706
[177]	validation_0-auc:0.865925	validation_1-auc:0.841829
[178]	validation_0-auc:0.866004	validation_1-auc:0.841863
[179]	validation_0-auc:0.866111	validation_1-auc:0.841834
[180]	validation_0-auc:0.866196	validation_1-auc:0.841856
[181]	validation_0-auc:0.866366	validation_1-auc:0.841826
[182]	validation_0-auc:0.86643	validation_1-auc:0.841757
[183]	validation_0-auc:0.866606	validation_1-auc:0.841636
[184]	validation_0-auc:0.866827	validation_1-auc:0.8415
[185]	validation_0-auc:0.86688	validation_1-auc:0.841593
[186]	validation_0-auc:0.866935	validation_1-auc:0.841556
[187]	validation_0-auc:0.867004	validation_1-auc:0.84152
[188]	validation_0-auc:0.867048	validation_1-auc:0.841506
[189]	validation_0-auc:0.867219	validation_1-auc:0.841506
[190]	validation_0-auc:0.867274	validation_1-auc:0.841485
[191]	validation_0-auc:0.867332	validation_1-auc:0.841434
[192]	validation_0-auc:0.867393	validation_1-auc:0.841442
[193]	validation_0-auc:0.867493	validation_1-auc:0.841402
[194]	validation_0-auc:0.867554	validation_1-auc:0.841418
[195]	validation_0-auc:0.867612	validation_1-auc:0.841306
[196]	validation_0-auc:0.867653	validation_1-auc:0.841299
[197]	validation_0-auc:0.867884	validation_1-auc:0.841215
[198]	validation_0-auc:0.86792	validation_1-auc:0.841233
[199]	validation_0-auc:0.868009	validation_1-auc:0.841232
[200]	validation_0-auc:0.868041	validation_1-auc:0.841281
[201]	validation_0-auc:0.868068	validation_1-auc:0.841247
[202]	validation_0-auc:0.868219	validation_1-auc:0.841184
[203]	validation_0-auc:0.868387	validation_1-auc:0.841062
[204]	validation_0-auc:0.868541	validation_1-auc:0.841111
[205]	validation_0-auc:0.868656	validation_1-auc:0.84118
[206]	validation_0-auc:0.86874	validation_1-auc:0.8411
[207]	validation_0-auc:0.86877	validation_1-auc:0.841171
[208]	validation_0-auc:0.869001	validation_1-auc:0.841108
[209]	validation_0-auc:0.869076	validation_1-auc:0.841072
[210]	validation_0-auc:0.869226	validation_1-auc:0.841046
[211]	validation_0-auc:0.869248	validation_1-auc:0.841028
[212]	validation_0-auc:0.86931	validation_1-auc:0.840946
[213]	validation_0-auc:0.869368	validation_1-auc:0.84097
[214]	validation_0-auc:0.869392	validation_1-auc:0.840956
[215]	validation_0-auc:0.869424	validation_1-auc:0.840972
[216]	validation_0-auc:0.869623	validation_1-auc:0.841031
[217]	validation_0-auc:0.869782	validation_1-auc:0.841065
[218]	validation_0-auc:0.869946	validation_1-auc:0.841163
[219]	validation_0-auc:0.870121	validation_1-auc:0.841116
[220]	validation_0-auc:0.870264	validation_1-auc:0.84101
[221]	validation_0-auc:0.87038	validation_1-auc:0.840953
[222]	validation_0-auc:0.870521	validation_1-auc:0.841005
[223]	validation_0-auc:0.870696	validation_1-auc:0.8411
[224]	validation_0-auc:0.870835	validation_1-auc:0.841151
[225]	validation_0-auc:0.870946	validation_1-auc:0.841051
[226]	validation_0-auc:0.870993	validation_1-auc:0.841012
[227]	validation_0-auc:0.871048	validation_1-auc:0.841054
[228]	validation_0-auc:0.871088	validation_1-auc:0.841005
[229]	validation_0-auc:0.871115	validation_1-auc:0.84093
[230]	validation_0-auc:0.871159	validation_1-auc:0.840939
[231]	validation_0-auc:0.871225	validation_1-auc:0.840889
[232]	validation_0-auc:0.871284	validation_1-auc:0.840894
[233]	validation_0-auc:0.871308	validation_1-auc:0.840923
[234]	validation_0-auc:0.871376	validation_1-auc:0.841058
[235]	validation_0-auc:0.871439	validation_1-auc:0.841035
[236]	validation_0-auc:0.871638	validation_1-auc:0.841187
[237]	validation_0-auc:0.871762	validation_1-auc:0.841209
[238]	validation_0-auc:0.87184	validation_1-auc:0.841378
[239]	validation_0-auc:0.871884	validation_1-auc:0.841406
[240]	validation_0-auc:0.871987	validation_1-auc:0.841472
[241]	validation_0-auc:0.872169	validation_1-auc:0.841411
[242]	validation_0-auc:0.872259	validation_1-auc:0.841359
[243]	validation_0-auc:0.872465	validation_1-auc:0.841397
[244]	validation_0-auc:0.872518	validation_1-auc:0.84136
[245]	validation_0-auc:0.872531	validation_1-auc:0.841404
[246]	validation_0-auc:0.872555	validation_1-auc:0.841325
[247]	validation_0-auc:0.872693	validation_1-auc:0.841303
[248]	validation_0-auc:0.872905	validation_1-auc:0.841331
[249]	validation_0-auc:0.872917	validation_1-auc:0.841334
[250]	validation_0-auc:0.872945	validation_1-auc:0.841375
[251]	validation_0-auc:0.873042	validation_1-auc:0.841281
[252]	validation_0-auc:0.873125	validation_1-auc:0.841266
[253]	validation_0-auc:0.87328	validation_1-auc:0.841185
[254]	validation_0-auc:0.873296	validation_1-auc:0.841173
[255]	validation_0-auc:0.87335	validation_1-auc:0.841161
[256]	validation_0-auc:0.873377	validation_1-auc:0.841136
[257]	validation_0-auc:0.873481	validation_1-auc:0.841035
[258]	validation_0-auc:0.873558	validation_1-auc:0.840996
[259]	validation_0-auc:0.873674	validation_1-auc:0.840999
[260]	validation_0-auc:0.873779	validation_1-auc:0.841058
[261]	validation_0-auc:0.874002	validation_1-auc:0.841059
[262]	validation_0-auc:0.874125	validation_1-auc:0.841093
[263]	validation_0-auc:0.874144	validation_1-auc:0.841091
[264]	validation_0-auc:0.87417	validation_1-auc:0.841116
[265]	validation_0-auc:0.874196	validation_1-auc:0.841127
[266]	validation_0-auc:0.874238	validation_1-auc:0.841082
[267]	validation_0-auc:0.874271	validation_1-auc:0.841138
[268]	validation_0-auc:0.874298	validation_1-auc:0.841147
[269]	validation_0-auc:0.874343	validation_1-auc:0.841167
[270]	validation_0-auc:0.874417	validation_1-auc:0.841146
[271]	validation_0-auc:0.87444	validation_1-auc:0.841165
[272]	validation_0-auc:0.874479	validation_1-auc:0.841174
[273]	validation_0-auc:0.874482	validation_1-auc:0.841185
[274]	validation_0-auc:0.874647	validation_1-auc:0.841277
[275]	validation_0-auc:0.8747	validation_1-auc:0.841295
[276]	validation_0-auc:0.874743	validation_1-auc:0.841319
[277]	validation_0-auc:0.874795	validation_1-auc:0.841353
[278]	validation_0-auc:0.874858	validation_1-auc:0.841289
Stopping. Best iteration:
[178]	validation_0-auc:0.866004	validation_1-auc:0.841863

ROC AUC: 0.8419

 

from sklearn.model_selection import GridSearchCV
# gridsearch cv는 개인 pc로 하면 매우 매우 오래 걸림 32코어는 필요
# 하이퍼 파라미터 테스트의 수행 속도를 향상시키기 위해 n_estimators를 100으로 감소
xgb_clf = XGBClassifier(n_estimators=100)
# colsample_bytree 로 전체가 아니라 일부만 샘플링
# 좋은 알고리즘일수록, 하이퍼 파라미터를 튜닝해도 막 auc 성능이 좋아지지 않음, 이미 최적화,
# 차라리 feature reengieering : col mixing, 이상치, 정규분포형태
params = {'max_depth':[5, 7] , 'min_child_weight':[1,3] ,'colsample_bytree':[0.5, 0.75] }

# 하이퍼 파라미터 테스트의 수행속도를 향상 시키기 위해 cv 를 지정하지 않음. 그리고 early_stopping_rounds
gridcv = GridSearchCV(xgb_clf, param_grid=params)
gridcv.fit(X_train, y_train, early_stopping_rounds=30, eval_metric="auc",
           eval_set=[(X_train, y_train), (X_test, y_test)])

print('GridSearchCV 최적 파라미터:',gridcv.best_params_) 

xgb_roc_score = roc_auc_score(y_test, gridcv.predict_proba(X_test)[:,1], average='macro')
print('ROC AUC: {0:.4f}'.format(xgb_roc_score))


[0]	validation_0-auc:0.715421	validation_1-auc:0.722463
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.802656	validation_1-auc:0.807919
[2]	validation_0-auc:0.80013	validation_1-auc:0.804948
[3]	validation_0-auc:0.805843	validation_1-auc:0.809844
[4]	validation_0-auc:0.814728	validation_1-auc:0.817554
[5]	validation_0-auc:0.80989	validation_1-auc:0.812919
[6]	validation_0-auc:0.80994	validation_1-auc:0.813688
[7]	validation_0-auc:0.818246	validation_1-auc:0.820487
[8]	validation_0-auc:0.821681	validation_1-auc:0.823214
[9]	validation_0-auc:0.819933	validation_1-auc:0.821108
[10]	validation_0-auc:0.825898	validation_1-auc:0.825134
[11]	validation_0-auc:0.829776	validation_1-auc:0.827521
[12]	validation_0-auc:0.832087	validation_1-auc:0.828878
[13]	validation_0-auc:0.83185	validation_1-auc:0.827989
[14]	validation_0-auc:0.830253	validation_1-auc:0.826429
[15]	validation_0-auc:0.832652	validation_1-auc:0.828435
[16]	validation_0-auc:0.836279	validation_1-auc:0.83053
[17]	validation_0-auc:0.839277	validation_1-auc:0.833257
[18]	validation_0-auc:0.84086	validation_1-auc:0.83424
[19]	validation_0-auc:0.839642	validation_1-auc:0.833377
[20]	validation_0-auc:0.841644	validation_1-auc:0.834392
[21]	validation_0-auc:0.841097	validation_1-auc:0.833687
[22]	validation_0-auc:0.840319	validation_1-auc:0.833081
[23]	validation_0-auc:0.842612	validation_1-auc:0.835459
[24]	validation_0-auc:0.844954	validation_1-auc:0.836996
[25]	validation_0-auc:0.844977	validation_1-auc:0.835418
[26]	validation_0-auc:0.844566	validation_1-auc:0.835109
[27]	validation_0-auc:0.846626	validation_1-auc:0.836624
[28]	validation_0-auc:0.848197	validation_1-auc:0.837739
[29]	validation_0-auc:0.847941	validation_1-auc:0.838148
[30]	validation_0-auc:0.849192	validation_1-auc:0.839023
[31]	validation_0-auc:0.850516	validation_1-auc:0.840162
[32]	validation_0-auc:0.851015	validation_1-auc:0.839851
[33]	validation_0-auc:0.851686	validation_1-auc:0.839677
[34]	validation_0-auc:0.851543	validation_1-auc:0.838906
[35]	validation_0-auc:0.853218	validation_1-auc:0.839483
[36]	validation_0-auc:0.85443	validation_1-auc:0.840226
[37]	validation_0-auc:0.85449	validation_1-auc:0.839535
[38]	validation_0-auc:0.854966	validation_1-auc:0.839476
[39]	validation_0-auc:0.85524	validation_1-auc:0.839707
[40]	validation_0-auc:0.855582	validation_1-auc:0.839302
[41]	validation_0-auc:0.855556	validation_1-auc:0.838414
[42]	validation_0-auc:0.856681	validation_1-auc:0.839834
[43]	validation_0-auc:0.85673	validation_1-auc:0.83934
[44]	validation_0-auc:0.857906	validation_1-auc:0.839725
[45]	validation_0-auc:0.857764	validation_1-auc:0.839242
[46]	validation_0-auc:0.858701	validation_1-auc:0.839745
[47]	validation_0-auc:0.859505	validation_1-auc:0.840425
[48]	validation_0-auc:0.860027	validation_1-auc:0.840666
[49]	validation_0-auc:0.860516	validation_1-auc:0.840097
[50]	validation_0-auc:0.860554	validation_1-auc:0.839722
[51]	validation_0-auc:0.861609	validation_1-auc:0.840009
[52]	validation_0-auc:0.862108	validation_1-auc:0.840477
[53]	validation_0-auc:0.86259	validation_1-auc:0.840581
[54]	validation_0-auc:0.863038	validation_1-auc:0.84088
[55]	validation_0-auc:0.863586	validation_1-auc:0.841169
[56]	validation_0-auc:0.863972	validation_1-auc:0.841148
[57]	validation_0-auc:0.864433	validation_1-auc:0.841244
[58]	validation_0-auc:0.864711	validation_1-auc:0.841189
[59]	validation_0-auc:0.864751	validation_1-auc:0.841015
[60]	validation_0-auc:0.864933	validation_1-auc:0.840929
[61]	validation_0-auc:0.865206	validation_1-auc:0.840774
[62]	validation_0-auc:0.865544	validation_1-auc:0.84061
[63]	validation_0-auc:0.865914	validation_1-auc:0.840698
[64]	validation_0-auc:0.866114	validation_1-auc:0.840479
[65]	validation_0-auc:0.866552	validation_1-auc:0.84025
[66]	validation_0-auc:0.866627	validation_1-auc:0.840336
[67]	validation_0-auc:0.866908	validation_1-auc:0.840025
[68]	validation_0-auc:0.867304	validation_1-auc:0.839954
[69]	validation_0-auc:0.867472	validation_1-auc:0.839995
[70]	validation_0-auc:0.867565	validation_1-auc:0.840074
[71]	validation_0-auc:0.867673	validation_1-auc:0.839941
[72]	validation_0-auc:0.867836	validation_1-auc:0.839804
[73]	validation_0-auc:0.868074	validation_1-auc:0.839811
[74]	validation_0-auc:0.868191	validation_1-auc:0.839981
[75]	validation_0-auc:0.86867	validation_1-auc:0.839929
[76]	validation_0-auc:0.868765	validation_1-auc:0.839856
[77]	validation_0-auc:0.86878	validation_1-auc:0.839824
[78]	validation_0-auc:0.868876	validation_1-auc:0.839785
[79]	validation_0-auc:0.869176	validation_1-auc:0.839669
[80]	validation_0-auc:0.869244	validation_1-auc:0.839576
[81]	validation_0-auc:0.869393	validation_1-auc:0.839427
[82]	validation_0-auc:0.869596	validation_1-auc:0.839356
[83]	validation_0-auc:0.869877	validation_1-auc:0.839261
[84]	validation_0-auc:0.870082	validation_1-auc:0.838957
[85]	validation_0-auc:0.870146	validation_1-auc:0.838855
[86]	validation_0-auc:0.870503	validation_1-auc:0.838853
[87]	validation_0-auc:0.87104	validation_1-auc:0.839082
Stopping. Best iteration:
[57]	validation_0-auc:0.864433	validation_1-auc:0.841244

[0]	validation_0-auc:0.717372	validation_1-auc:0.725803
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.811453	validation_1-auc:0.811759
[2]	validation_0-auc:0.804735	validation_1-auc:0.808587
[3]	validation_0-auc:0.814898	validation_1-auc:0.814414
[4]	validation_0-auc:0.823423	validation_1-auc:0.822485
[5]	validation_0-auc:0.817417	validation_1-auc:0.817277
[6]	validation_0-auc:0.816492	validation_1-auc:0.816198
[7]	validation_0-auc:0.823936	validation_1-auc:0.820988
[8]	validation_0-auc:0.827756	validation_1-auc:0.82517
[9]	validation_0-auc:0.828462	validation_1-auc:0.827085
[10]	validation_0-auc:0.831469	validation_1-auc:0.828213
[11]	validation_0-auc:0.834412	validation_1-auc:0.830737
[12]	validation_0-auc:0.836393	validation_1-auc:0.832236
[13]	validation_0-auc:0.835472	validation_1-auc:0.831531
[14]	validation_0-auc:0.835833	validation_1-auc:0.830984
[15]	validation_0-auc:0.837834	validation_1-auc:0.831886
[16]	validation_0-auc:0.840135	validation_1-auc:0.834486
[17]	validation_0-auc:0.842056	validation_1-auc:0.83563
[18]	validation_0-auc:0.845173	validation_1-auc:0.83685
[19]	validation_0-auc:0.843186	validation_1-auc:0.835142
[20]	validation_0-auc:0.844921	validation_1-auc:0.83862
[21]	validation_0-auc:0.844619	validation_1-auc:0.837645
[22]	validation_0-auc:0.843271	validation_1-auc:0.836499
[23]	validation_0-auc:0.844968	validation_1-auc:0.8369
[24]	validation_0-auc:0.845573	validation_1-auc:0.837495
[25]	validation_0-auc:0.845383	validation_1-auc:0.83654
[26]	validation_0-auc:0.845436	validation_1-auc:0.836883
[27]	validation_0-auc:0.847526	validation_1-auc:0.83788
[28]	validation_0-auc:0.849183	validation_1-auc:0.838504
[29]	validation_0-auc:0.848971	validation_1-auc:0.838779
[30]	validation_0-auc:0.850182	validation_1-auc:0.839775
[31]	validation_0-auc:0.851207	validation_1-auc:0.840399
[32]	validation_0-auc:0.851972	validation_1-auc:0.840136
[33]	validation_0-auc:0.851672	validation_1-auc:0.840136
[34]	validation_0-auc:0.851647	validation_1-auc:0.839887
[35]	validation_0-auc:0.853923	validation_1-auc:0.840535
[36]	validation_0-auc:0.855327	validation_1-auc:0.841043
[37]	validation_0-auc:0.855806	validation_1-auc:0.841045
[38]	validation_0-auc:0.856156	validation_1-auc:0.840833
[39]	validation_0-auc:0.856263	validation_1-auc:0.840852
[40]	validation_0-auc:0.856429	validation_1-auc:0.840589
[41]	validation_0-auc:0.856511	validation_1-auc:0.840587
[42]	validation_0-auc:0.857765	validation_1-auc:0.841372
[43]	validation_0-auc:0.857773	validation_1-auc:0.841239
[44]	validation_0-auc:0.859167	validation_1-auc:0.841734
[45]	validation_0-auc:0.859191	validation_1-auc:0.841431
[46]	validation_0-auc:0.860261	validation_1-auc:0.842244
[47]	validation_0-auc:0.861032	validation_1-auc:0.842936
[48]	validation_0-auc:0.861347	validation_1-auc:0.842616
[49]	validation_0-auc:0.861843	validation_1-auc:0.842761
[50]	validation_0-auc:0.86211	validation_1-auc:0.842776
[51]	validation_0-auc:0.863213	validation_1-auc:0.843216
[52]	validation_0-auc:0.863963	validation_1-auc:0.844009
[53]	validation_0-auc:0.864482	validation_1-auc:0.844648
[54]	validation_0-auc:0.865133	validation_1-auc:0.844937
[55]	validation_0-auc:0.865492	validation_1-auc:0.844709
[56]	validation_0-auc:0.865993	validation_1-auc:0.844996
[57]	validation_0-auc:0.866332	validation_1-auc:0.845129
[58]	validation_0-auc:0.866537	validation_1-auc:0.844909
[59]	validation_0-auc:0.866757	validation_1-auc:0.844844
[60]	validation_0-auc:0.867255	validation_1-auc:0.844642
[61]	validation_0-auc:0.867647	validation_1-auc:0.844413
[62]	validation_0-auc:0.86778	validation_1-auc:0.844403
[63]	validation_0-auc:0.868042	validation_1-auc:0.844125
[64]	validation_0-auc:0.868285	validation_1-auc:0.844139
[65]	validation_0-auc:0.868816	validation_1-auc:0.844317
[66]	validation_0-auc:0.868901	validation_1-auc:0.844326
[67]	validation_0-auc:0.869074	validation_1-auc:0.844176
[68]	validation_0-auc:0.869285	validation_1-auc:0.843951
[69]	validation_0-auc:0.869452	validation_1-auc:0.844141
[70]	validation_0-auc:0.869665	validation_1-auc:0.844145
[71]	validation_0-auc:0.86996	validation_1-auc:0.844362
[72]	validation_0-auc:0.870279	validation_1-auc:0.84424
[73]	validation_0-auc:0.87081	validation_1-auc:0.844158
[74]	validation_0-auc:0.870916	validation_1-auc:0.844259
[75]	validation_0-auc:0.871189	validation_1-auc:0.84424
[76]	validation_0-auc:0.871421	validation_1-auc:0.844454
[77]	validation_0-auc:0.871554	validation_1-auc:0.84452
[78]	validation_0-auc:0.871838	validation_1-auc:0.844343
[79]	validation_0-auc:0.872084	validation_1-auc:0.844777
[80]	validation_0-auc:0.872198	validation_1-auc:0.844832
[81]	validation_0-auc:0.872395	validation_1-auc:0.844699
[82]	validation_0-auc:0.872528	validation_1-auc:0.844563
[83]	validation_0-auc:0.872857	validation_1-auc:0.844536
[84]	validation_0-auc:0.872873	validation_1-auc:0.844276
[85]	validation_0-auc:0.873233	validation_1-auc:0.844218
[86]	validation_0-auc:0.873572	validation_1-auc:0.844197
[87]	validation_0-auc:0.873905	validation_1-auc:0.844333
Stopping. Best iteration:
[57]	validation_0-auc:0.866332	validation_1-auc:0.845129

[0]	validation_0-auc:0.728604	validation_1-auc:0.729138
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.815073	validation_1-auc:0.81486
[2]	validation_0-auc:0.807764	validation_1-auc:0.806299
[3]	validation_0-auc:0.821197	validation_1-auc:0.820956
[4]	validation_0-auc:0.826503	validation_1-auc:0.824373
[5]	validation_0-auc:0.824434	validation_1-auc:0.821276
[6]	validation_0-auc:0.820665	validation_1-auc:0.815827
[7]	validation_0-auc:0.828395	validation_1-auc:0.826386
[8]	validation_0-auc:0.832078	validation_1-auc:0.828844
[9]	validation_0-auc:0.831194	validation_1-auc:0.828563
[10]	validation_0-auc:0.833759	validation_1-auc:0.82963
[11]	validation_0-auc:0.836244	validation_1-auc:0.832161
[12]	validation_0-auc:0.837833	validation_1-auc:0.832555
[13]	validation_0-auc:0.838108	validation_1-auc:0.832119
[14]	validation_0-auc:0.837637	validation_1-auc:0.830707
[15]	validation_0-auc:0.839684	validation_1-auc:0.833877
[16]	validation_0-auc:0.84178	validation_1-auc:0.834354
[17]	validation_0-auc:0.844027	validation_1-auc:0.836256
[18]	validation_0-auc:0.845397	validation_1-auc:0.836915
[19]	validation_0-auc:0.843901	validation_1-auc:0.835808
[20]	validation_0-auc:0.845076	validation_1-auc:0.837161
[21]	validation_0-auc:0.844337	validation_1-auc:0.836871
[22]	validation_0-auc:0.843858	validation_1-auc:0.836505
[23]	validation_0-auc:0.846581	validation_1-auc:0.837201
[24]	validation_0-auc:0.847991	validation_1-auc:0.838057
[25]	validation_0-auc:0.848381	validation_1-auc:0.836049
[26]	validation_0-auc:0.847758	validation_1-auc:0.835228
[27]	validation_0-auc:0.849366	validation_1-auc:0.836947
[28]	validation_0-auc:0.850606	validation_1-auc:0.836905
[29]	validation_0-auc:0.850292	validation_1-auc:0.836338
[30]	validation_0-auc:0.851466	validation_1-auc:0.837421
[31]	validation_0-auc:0.852847	validation_1-auc:0.83918
[32]	validation_0-auc:0.852769	validation_1-auc:0.837964
[33]	validation_0-auc:0.852843	validation_1-auc:0.83789
[34]	validation_0-auc:0.852906	validation_1-auc:0.837203
[35]	validation_0-auc:0.85458	validation_1-auc:0.838158
[36]	validation_0-auc:0.855423	validation_1-auc:0.839002
[37]	validation_0-auc:0.855633	validation_1-auc:0.838723
[38]	validation_0-auc:0.855635	validation_1-auc:0.838523
[39]	validation_0-auc:0.855794	validation_1-auc:0.838367
[40]	validation_0-auc:0.855913	validation_1-auc:0.838014
[41]	validation_0-auc:0.855953	validation_1-auc:0.837654
[42]	validation_0-auc:0.857512	validation_1-auc:0.839252
[43]	validation_0-auc:0.858137	validation_1-auc:0.83866
[44]	validation_0-auc:0.859013	validation_1-auc:0.839894
[45]	validation_0-auc:0.859183	validation_1-auc:0.839445
[46]	validation_0-auc:0.860201	validation_1-auc:0.841005
[47]	validation_0-auc:0.860864	validation_1-auc:0.841872
[48]	validation_0-auc:0.861408	validation_1-auc:0.842149
[49]	validation_0-auc:0.861814	validation_1-auc:0.842034
[50]	validation_0-auc:0.862022	validation_1-auc:0.841654
[51]	validation_0-auc:0.862978	validation_1-auc:0.842219
[52]	validation_0-auc:0.863668	validation_1-auc:0.843225
[53]	validation_0-auc:0.864105	validation_1-auc:0.843547
[54]	validation_0-auc:0.864461	validation_1-auc:0.844088
[55]	validation_0-auc:0.864768	validation_1-auc:0.843943
[56]	validation_0-auc:0.865241	validation_1-auc:0.84415
[57]	validation_0-auc:0.865541	validation_1-auc:0.844081
[58]	validation_0-auc:0.865738	validation_1-auc:0.843836
[59]	validation_0-auc:0.865894	validation_1-auc:0.843578
[60]	validation_0-auc:0.866407	validation_1-auc:0.843714
[61]	validation_0-auc:0.866793	validation_1-auc:0.843642
[62]	validation_0-auc:0.867025	validation_1-auc:0.843447
[63]	validation_0-auc:0.867428	validation_1-auc:0.843602
[64]	validation_0-auc:0.867808	validation_1-auc:0.8436
[65]	validation_0-auc:0.868301	validation_1-auc:0.84353
[66]	validation_0-auc:0.8684	validation_1-auc:0.843372
[67]	validation_0-auc:0.86873	validation_1-auc:0.84353
[68]	validation_0-auc:0.869015	validation_1-auc:0.843555
[69]	validation_0-auc:0.869285	validation_1-auc:0.843865
[70]	validation_0-auc:0.869509	validation_1-auc:0.843834
[71]	validation_0-auc:0.869666	validation_1-auc:0.843846
[72]	validation_0-auc:0.869821	validation_1-auc:0.843805
[73]	validation_0-auc:0.869965	validation_1-auc:0.843665
[74]	validation_0-auc:0.870209	validation_1-auc:0.843911
[75]	validation_0-auc:0.870388	validation_1-auc:0.843806
[76]	validation_0-auc:0.870568	validation_1-auc:0.844118
[77]	validation_0-auc:0.870765	validation_1-auc:0.843957
[78]	validation_0-auc:0.870968	validation_1-auc:0.844032
[79]	validation_0-auc:0.871265	validation_1-auc:0.843788
[80]	validation_0-auc:0.871501	validation_1-auc:0.843751
[81]	validation_0-auc:0.871686	validation_1-auc:0.843809
[82]	validation_0-auc:0.871848	validation_1-auc:0.843864
[83]	validation_0-auc:0.872375	validation_1-auc:0.843736
[84]	validation_0-auc:0.872854	validation_1-auc:0.843938
[85]	validation_0-auc:0.873012	validation_1-auc:0.843937
[86]	validation_0-auc:0.873159	validation_1-auc:0.843777
Stopping. Best iteration:
[56]	validation_0-auc:0.865241	validation_1-auc:0.84415

[0]	validation_0-auc:0.714006	validation_1-auc:0.723827
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.804177	validation_1-auc:0.810124
[2]	validation_0-auc:0.797681	validation_1-auc:0.806193
[3]	validation_0-auc:0.805563	validation_1-auc:0.8132
[4]	validation_0-auc:0.816182	validation_1-auc:0.818554
[5]	validation_0-auc:0.811689	validation_1-auc:0.81533
[6]	validation_0-auc:0.811593	validation_1-auc:0.815446
[7]	validation_0-auc:0.819138	validation_1-auc:0.821911
[8]	validation_0-auc:0.822456	validation_1-auc:0.825036
[9]	validation_0-auc:0.822175	validation_1-auc:0.824127
[10]	validation_0-auc:0.826339	validation_1-auc:0.827481
[11]	validation_0-auc:0.830609	validation_1-auc:0.830145
[12]	validation_0-auc:0.832194	validation_1-auc:0.830867
[13]	validation_0-auc:0.832881	validation_1-auc:0.830585
[14]	validation_0-auc:0.83105	validation_1-auc:0.829088
[15]	validation_0-auc:0.834064	validation_1-auc:0.830594
[16]	validation_0-auc:0.836488	validation_1-auc:0.832439
[17]	validation_0-auc:0.838566	validation_1-auc:0.834435
[18]	validation_0-auc:0.841291	validation_1-auc:0.836339
[19]	validation_0-auc:0.840159	validation_1-auc:0.835519
[20]	validation_0-auc:0.841893	validation_1-auc:0.836441
[21]	validation_0-auc:0.841405	validation_1-auc:0.836036
[22]	validation_0-auc:0.840321	validation_1-auc:0.834835
[23]	validation_0-auc:0.84251	validation_1-auc:0.837768
[24]	validation_0-auc:0.844119	validation_1-auc:0.839223
[25]	validation_0-auc:0.84386	validation_1-auc:0.83757
[26]	validation_0-auc:0.843624	validation_1-auc:0.836767
[27]	validation_0-auc:0.845274	validation_1-auc:0.838187
[28]	validation_0-auc:0.846582	validation_1-auc:0.839603
[29]	validation_0-auc:0.84632	validation_1-auc:0.83937
[30]	validation_0-auc:0.847536	validation_1-auc:0.840433
[31]	validation_0-auc:0.848681	validation_1-auc:0.841114
[32]	validation_0-auc:0.84912	validation_1-auc:0.840659
[33]	validation_0-auc:0.849533	validation_1-auc:0.840415
[34]	validation_0-auc:0.849601	validation_1-auc:0.839774
[35]	validation_0-auc:0.851658	validation_1-auc:0.84062
[36]	validation_0-auc:0.85292	validation_1-auc:0.84124
[37]	validation_0-auc:0.852735	validation_1-auc:0.841111
[38]	validation_0-auc:0.853273	validation_1-auc:0.840711
[39]	validation_0-auc:0.853404	validation_1-auc:0.840782
[40]	validation_0-auc:0.85357	validation_1-auc:0.8406
[41]	validation_0-auc:0.853778	validation_1-auc:0.83966
[42]	validation_0-auc:0.854987	validation_1-auc:0.840754
[43]	validation_0-auc:0.85516	validation_1-auc:0.84029
[44]	validation_0-auc:0.856332	validation_1-auc:0.840809
[45]	validation_0-auc:0.856144	validation_1-auc:0.840263
[46]	validation_0-auc:0.857333	validation_1-auc:0.840915
[47]	validation_0-auc:0.858082	validation_1-auc:0.841666
[48]	validation_0-auc:0.85842	validation_1-auc:0.841105
[49]	validation_0-auc:0.858715	validation_1-auc:0.841306
[50]	validation_0-auc:0.858809	validation_1-auc:0.840893
[51]	validation_0-auc:0.859444	validation_1-auc:0.841175
[52]	validation_0-auc:0.860043	validation_1-auc:0.841828
[53]	validation_0-auc:0.860604	validation_1-auc:0.841919
[54]	validation_0-auc:0.861165	validation_1-auc:0.842385
[55]	validation_0-auc:0.861594	validation_1-auc:0.842339
[56]	validation_0-auc:0.862039	validation_1-auc:0.842439
[57]	validation_0-auc:0.86256	validation_1-auc:0.842088
[58]	validation_0-auc:0.863139	validation_1-auc:0.841964
[59]	validation_0-auc:0.863195	validation_1-auc:0.841913
[60]	validation_0-auc:0.863386	validation_1-auc:0.841801
[61]	validation_0-auc:0.863476	validation_1-auc:0.841896
[62]	validation_0-auc:0.863764	validation_1-auc:0.841828
[63]	validation_0-auc:0.863996	validation_1-auc:0.841483
[64]	validation_0-auc:0.864188	validation_1-auc:0.841417
[65]	validation_0-auc:0.86447	validation_1-auc:0.841593
[66]	validation_0-auc:0.864566	validation_1-auc:0.841458
[67]	validation_0-auc:0.864617	validation_1-auc:0.841315
[68]	validation_0-auc:0.864806	validation_1-auc:0.841167
[69]	validation_0-auc:0.864969	validation_1-auc:0.84117
[70]	validation_0-auc:0.865178	validation_1-auc:0.84107
[71]	validation_0-auc:0.865355	validation_1-auc:0.841001
[72]	validation_0-auc:0.86548	validation_1-auc:0.841128
[73]	validation_0-auc:0.865794	validation_1-auc:0.840671
[74]	validation_0-auc:0.866089	validation_1-auc:0.840705
[75]	validation_0-auc:0.866319	validation_1-auc:0.840723
[76]	validation_0-auc:0.866483	validation_1-auc:0.840616
[77]	validation_0-auc:0.866564	validation_1-auc:0.840502
[78]	validation_0-auc:0.86659	validation_1-auc:0.840438
[79]	validation_0-auc:0.86674	validation_1-auc:0.840525
[80]	validation_0-auc:0.866877	validation_1-auc:0.840632
[81]	validation_0-auc:0.866951	validation_1-auc:0.840482
[82]	validation_0-auc:0.867071	validation_1-auc:0.840589
[83]	validation_0-auc:0.867311	validation_1-auc:0.840293
[84]	validation_0-auc:0.867423	validation_1-auc:0.840208
[85]	validation_0-auc:0.867531	validation_1-auc:0.840152
[86]	validation_0-auc:0.867661	validation_1-auc:0.839993
Stopping. Best iteration:
[56]	validation_0-auc:0.862039	validation_1-auc:0.842439

[0]	validation_0-auc:0.717533	validation_1-auc:0.726673
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.809734	validation_1-auc:0.81389
[2]	validation_0-auc:0.804202	validation_1-auc:0.809256
[3]	validation_0-auc:0.812949	validation_1-auc:0.814788
[4]	validation_0-auc:0.823785	validation_1-auc:0.82346
[5]	validation_0-auc:0.818969	validation_1-auc:0.820914
[6]	validation_0-auc:0.819058	validation_1-auc:0.821506
[7]	validation_0-auc:0.826851	validation_1-auc:0.82669
[8]	validation_0-auc:0.830179	validation_1-auc:0.828637
[9]	validation_0-auc:0.829568	validation_1-auc:0.828939
[10]	validation_0-auc:0.832504	validation_1-auc:0.829919
[11]	validation_0-auc:0.835181	validation_1-auc:0.832471
[12]	validation_0-auc:0.837596	validation_1-auc:0.833411
[13]	validation_0-auc:0.836468	validation_1-auc:0.8328
[14]	validation_0-auc:0.835572	validation_1-auc:0.830916
[15]	validation_0-auc:0.83788	validation_1-auc:0.832965
[16]	validation_0-auc:0.839275	validation_1-auc:0.834799
[17]	validation_0-auc:0.84088	validation_1-auc:0.836478
[18]	validation_0-auc:0.843418	validation_1-auc:0.837846
[19]	validation_0-auc:0.841999	validation_1-auc:0.836468
[20]	validation_0-auc:0.843241	validation_1-auc:0.83771
[21]	validation_0-auc:0.842973	validation_1-auc:0.83669
[22]	validation_0-auc:0.841968	validation_1-auc:0.835539
[23]	validation_0-auc:0.843748	validation_1-auc:0.83687
[24]	validation_0-auc:0.844952	validation_1-auc:0.838723
[25]	validation_0-auc:0.844679	validation_1-auc:0.83767
[26]	validation_0-auc:0.844359	validation_1-auc:0.837147
[27]	validation_0-auc:0.845869	validation_1-auc:0.838411
[28]	validation_0-auc:0.847594	validation_1-auc:0.839155
[29]	validation_0-auc:0.847181	validation_1-auc:0.839353
[30]	validation_0-auc:0.848175	validation_1-auc:0.839866
[31]	validation_0-auc:0.84937	validation_1-auc:0.840654
[32]	validation_0-auc:0.85055	validation_1-auc:0.840436
[33]	validation_0-auc:0.85021	validation_1-auc:0.840212
[34]	validation_0-auc:0.850325	validation_1-auc:0.839865
[35]	validation_0-auc:0.852574	validation_1-auc:0.840582
[36]	validation_0-auc:0.854312	validation_1-auc:0.840916
[37]	validation_0-auc:0.854618	validation_1-auc:0.841471
[38]	validation_0-auc:0.854714	validation_1-auc:0.841293
[39]	validation_0-auc:0.854831	validation_1-auc:0.841048
[40]	validation_0-auc:0.854839	validation_1-auc:0.840868
[41]	validation_0-auc:0.854947	validation_1-auc:0.840691
[42]	validation_0-auc:0.856121	validation_1-auc:0.841262
[43]	validation_0-auc:0.856168	validation_1-auc:0.841252
[44]	validation_0-auc:0.857454	validation_1-auc:0.841905
[45]	validation_0-auc:0.857409	validation_1-auc:0.841651
[46]	validation_0-auc:0.858264	validation_1-auc:0.842076
[47]	validation_0-auc:0.858972	validation_1-auc:0.842918
[48]	validation_0-auc:0.859624	validation_1-auc:0.842852
[49]	validation_0-auc:0.860071	validation_1-auc:0.843026
[50]	validation_0-auc:0.86012	validation_1-auc:0.843175
[51]	validation_0-auc:0.861097	validation_1-auc:0.843513
[52]	validation_0-auc:0.861507	validation_1-auc:0.844252
[53]	validation_0-auc:0.861959	validation_1-auc:0.844597
[54]	validation_0-auc:0.862642	validation_1-auc:0.844757
[55]	validation_0-auc:0.862863	validation_1-auc:0.844622
[56]	validation_0-auc:0.863279	validation_1-auc:0.844552
[57]	validation_0-auc:0.863689	validation_1-auc:0.844838
[58]	validation_0-auc:0.864037	validation_1-auc:0.844772
[59]	validation_0-auc:0.864241	validation_1-auc:0.844807
[60]	validation_0-auc:0.864474	validation_1-auc:0.844851
[61]	validation_0-auc:0.864937	validation_1-auc:0.844956
[62]	validation_0-auc:0.86506	validation_1-auc:0.84486
[63]	validation_0-auc:0.865367	validation_1-auc:0.844798
[64]	validation_0-auc:0.865488	validation_1-auc:0.844743
[65]	validation_0-auc:0.866056	validation_1-auc:0.844645
[66]	validation_0-auc:0.866101	validation_1-auc:0.844762
[67]	validation_0-auc:0.866303	validation_1-auc:0.844544
[68]	validation_0-auc:0.866493	validation_1-auc:0.844325
[69]	validation_0-auc:0.866651	validation_1-auc:0.844105
[70]	validation_0-auc:0.866761	validation_1-auc:0.84405
[71]	validation_0-auc:0.866972	validation_1-auc:0.844211
[72]	validation_0-auc:0.867151	validation_1-auc:0.844122
[73]	validation_0-auc:0.867376	validation_1-auc:0.844185
[74]	validation_0-auc:0.867442	validation_1-auc:0.844186
[75]	validation_0-auc:0.867606	validation_1-auc:0.84417
[76]	validation_0-auc:0.86786	validation_1-auc:0.844023
[77]	validation_0-auc:0.86797	validation_1-auc:0.844189
[78]	validation_0-auc:0.868345	validation_1-auc:0.844065
[79]	validation_0-auc:0.868528	validation_1-auc:0.844039
[80]	validation_0-auc:0.868618	validation_1-auc:0.844043
[81]	validation_0-auc:0.868874	validation_1-auc:0.844012
[82]	validation_0-auc:0.869039	validation_1-auc:0.84399
[83]	validation_0-auc:0.869377	validation_1-auc:0.844725
[84]	validation_0-auc:0.869521	validation_1-auc:0.844852
[85]	validation_0-auc:0.869774	validation_1-auc:0.844897
[86]	validation_0-auc:0.869911	validation_1-auc:0.844982
[87]	validation_0-auc:0.8703	validation_1-auc:0.844929
[88]	validation_0-auc:0.870419	validation_1-auc:0.844875
[89]	validation_0-auc:0.870459	validation_1-auc:0.84484
[90]	validation_0-auc:0.870516	validation_1-auc:0.844859
[91]	validation_0-auc:0.870733	validation_1-auc:0.844851
[92]	validation_0-auc:0.870864	validation_1-auc:0.844867
[93]	validation_0-auc:0.870989	validation_1-auc:0.844878
[94]	validation_0-auc:0.871077	validation_1-auc:0.844817
[95]	validation_0-auc:0.871415	validation_1-auc:0.844744
[96]	validation_0-auc:0.871584	validation_1-auc:0.844748
[97]	validation_0-auc:0.871635	validation_1-auc:0.844788
[98]	validation_0-auc:0.871774	validation_1-auc:0.844676
[99]	validation_0-auc:0.871842	validation_1-auc:0.844639
[0]	validation_0-auc:0.733623	validation_1-auc:0.734145
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.814232	validation_1-auc:0.811655
[2]	validation_0-auc:0.805433	validation_1-auc:0.803959
[3]	validation_0-auc:0.820416	validation_1-auc:0.819184
[4]	validation_0-auc:0.826708	validation_1-auc:0.824223
[5]	validation_0-auc:0.824914	validation_1-auc:0.822627
[6]	validation_0-auc:0.820232	validation_1-auc:0.816653
[7]	validation_0-auc:0.828122	validation_1-auc:0.826539
[8]	validation_0-auc:0.831957	validation_1-auc:0.827747
[9]	validation_0-auc:0.830333	validation_1-auc:0.827465
[10]	validation_0-auc:0.833473	validation_1-auc:0.828121
[11]	validation_0-auc:0.836154	validation_1-auc:0.83083
[12]	validation_0-auc:0.838005	validation_1-auc:0.831142
[13]	validation_0-auc:0.838252	validation_1-auc:0.830971
[14]	validation_0-auc:0.837533	validation_1-auc:0.830021
[15]	validation_0-auc:0.839397	validation_1-auc:0.832657
[16]	validation_0-auc:0.840795	validation_1-auc:0.833282
[17]	validation_0-auc:0.842479	validation_1-auc:0.835328
[18]	validation_0-auc:0.844374	validation_1-auc:0.83619
[19]	validation_0-auc:0.842834	validation_1-auc:0.834682
[20]	validation_0-auc:0.84381	validation_1-auc:0.836428
[21]	validation_0-auc:0.843498	validation_1-auc:0.835795
[22]	validation_0-auc:0.842433	validation_1-auc:0.834212
[23]	validation_0-auc:0.844346	validation_1-auc:0.83593
[24]	validation_0-auc:0.845928	validation_1-auc:0.836841
[25]	validation_0-auc:0.846023	validation_1-auc:0.835169
[26]	validation_0-auc:0.845485	validation_1-auc:0.83462
[27]	validation_0-auc:0.847301	validation_1-auc:0.835945
[28]	validation_0-auc:0.848602	validation_1-auc:0.836213
[29]	validation_0-auc:0.84829	validation_1-auc:0.83586
[30]	validation_0-auc:0.849511	validation_1-auc:0.836901
[31]	validation_0-auc:0.850757	validation_1-auc:0.838572
[32]	validation_0-auc:0.850616	validation_1-auc:0.837443
[33]	validation_0-auc:0.850696	validation_1-auc:0.836754
[34]	validation_0-auc:0.851041	validation_1-auc:0.836594
[35]	validation_0-auc:0.852538	validation_1-auc:0.83707
[36]	validation_0-auc:0.853569	validation_1-auc:0.838459
[37]	validation_0-auc:0.853913	validation_1-auc:0.838661
[38]	validation_0-auc:0.853993	validation_1-auc:0.838285
[39]	validation_0-auc:0.853844	validation_1-auc:0.838077
[40]	validation_0-auc:0.854138	validation_1-auc:0.83751
[41]	validation_0-auc:0.854157	validation_1-auc:0.837238
[42]	validation_0-auc:0.854984	validation_1-auc:0.838421
[43]	validation_0-auc:0.855198	validation_1-auc:0.838019
[44]	validation_0-auc:0.856383	validation_1-auc:0.839212
[45]	validation_0-auc:0.856338	validation_1-auc:0.838689
[46]	validation_0-auc:0.857215	validation_1-auc:0.839968
[47]	validation_0-auc:0.85793	validation_1-auc:0.840456
[48]	validation_0-auc:0.858093	validation_1-auc:0.840479
[49]	validation_0-auc:0.858635	validation_1-auc:0.840441
[50]	validation_0-auc:0.858817	validation_1-auc:0.840334
[51]	validation_0-auc:0.859512	validation_1-auc:0.841106
[52]	validation_0-auc:0.860354	validation_1-auc:0.841958
[53]	validation_0-auc:0.860969	validation_1-auc:0.842423
[54]	validation_0-auc:0.861555	validation_1-auc:0.842601
[55]	validation_0-auc:0.861944	validation_1-auc:0.842224
[56]	validation_0-auc:0.862347	validation_1-auc:0.842425
[57]	validation_0-auc:0.862652	validation_1-auc:0.842732
[58]	validation_0-auc:0.863034	validation_1-auc:0.842488
[59]	validation_0-auc:0.86303	validation_1-auc:0.842579
[60]	validation_0-auc:0.863458	validation_1-auc:0.842688
[61]	validation_0-auc:0.863564	validation_1-auc:0.842588
[62]	validation_0-auc:0.863722	validation_1-auc:0.842509
[63]	validation_0-auc:0.864069	validation_1-auc:0.842489
[64]	validation_0-auc:0.864452	validation_1-auc:0.842604
[65]	validation_0-auc:0.864718	validation_1-auc:0.842417
[66]	validation_0-auc:0.864811	validation_1-auc:0.842293
[67]	validation_0-auc:0.865309	validation_1-auc:0.842175
[68]	validation_0-auc:0.865585	validation_1-auc:0.842099
[69]	validation_0-auc:0.865867	validation_1-auc:0.842183
[70]	validation_0-auc:0.866239	validation_1-auc:0.842408
[71]	validation_0-auc:0.866501	validation_1-auc:0.842191
[72]	validation_0-auc:0.86666	validation_1-auc:0.841951
[73]	validation_0-auc:0.866731	validation_1-auc:0.84192
[74]	validation_0-auc:0.86688	validation_1-auc:0.841985
[75]	validation_0-auc:0.867218	validation_1-auc:0.841902
[76]	validation_0-auc:0.867373	validation_1-auc:0.841776
[77]	validation_0-auc:0.86768	validation_1-auc:0.84193
[78]	validation_0-auc:0.86781	validation_1-auc:0.842111
[79]	validation_0-auc:0.867876	validation_1-auc:0.842095
[80]	validation_0-auc:0.867981	validation_1-auc:0.842101
[81]	validation_0-auc:0.868122	validation_1-auc:0.842057
[82]	validation_0-auc:0.868368	validation_1-auc:0.842092
[83]	validation_0-auc:0.868809	validation_1-auc:0.841964
[84]	validation_0-auc:0.869083	validation_1-auc:0.841972
[85]	validation_0-auc:0.869285	validation_1-auc:0.842013
[86]	validation_0-auc:0.86939	validation_1-auc:0.841945
[87]	validation_0-auc:0.869506	validation_1-auc:0.841854
Stopping. Best iteration:
[57]	validation_0-auc:0.862652	validation_1-auc:0.842732

[0]	validation_0-auc:0.721496	validation_1-auc:0.726707
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.809727	validation_1-auc:0.80884
[2]	validation_0-auc:0.804309	validation_1-auc:0.807395
[3]	validation_0-auc:0.815093	validation_1-auc:0.816945
[4]	validation_0-auc:0.8242	validation_1-auc:0.82348
[5]	validation_0-auc:0.819516	validation_1-auc:0.819507
[6]	validation_0-auc:0.819212	validation_1-auc:0.818854
[7]	validation_0-auc:0.827414	validation_1-auc:0.826411
[8]	validation_0-auc:0.831965	validation_1-auc:0.829566
[9]	validation_0-auc:0.832264	validation_1-auc:0.827283
[10]	validation_0-auc:0.836521	validation_1-auc:0.829756
[11]	validation_0-auc:0.841098	validation_1-auc:0.831416
[12]	validation_0-auc:0.843738	validation_1-auc:0.83204
[13]	validation_0-auc:0.843719	validation_1-auc:0.832153
[14]	validation_0-auc:0.843353	validation_1-auc:0.830469
[15]	validation_0-auc:0.84668	validation_1-auc:0.83238
[16]	validation_0-auc:0.848975	validation_1-auc:0.835177
[17]	validation_0-auc:0.851268	validation_1-auc:0.836379
[18]	validation_0-auc:0.853478	validation_1-auc:0.836386
[19]	validation_0-auc:0.852335	validation_1-auc:0.835925
[20]	validation_0-auc:0.855544	validation_1-auc:0.836983
[21]	validation_0-auc:0.855553	validation_1-auc:0.836397
[22]	validation_0-auc:0.855033	validation_1-auc:0.834445
[23]	validation_0-auc:0.856963	validation_1-auc:0.836413
[24]	validation_0-auc:0.85907	validation_1-auc:0.838012
[25]	validation_0-auc:0.859496	validation_1-auc:0.836454
[26]	validation_0-auc:0.860395	validation_1-auc:0.836356
[27]	validation_0-auc:0.862315	validation_1-auc:0.837705
[28]	validation_0-auc:0.864206	validation_1-auc:0.837761
[29]	validation_0-auc:0.863938	validation_1-auc:0.837658
[30]	validation_0-auc:0.865498	validation_1-auc:0.83833
[31]	validation_0-auc:0.866862	validation_1-auc:0.839237
[32]	validation_0-auc:0.86727	validation_1-auc:0.837928
[33]	validation_0-auc:0.86829	validation_1-auc:0.837639
[34]	validation_0-auc:0.868446	validation_1-auc:0.837162
[35]	validation_0-auc:0.870655	validation_1-auc:0.837342
[36]	validation_0-auc:0.8727	validation_1-auc:0.836984
[37]	validation_0-auc:0.872556	validation_1-auc:0.836142
[38]	validation_0-auc:0.872834	validation_1-auc:0.835513
[39]	validation_0-auc:0.873296	validation_1-auc:0.835263
[40]	validation_0-auc:0.873191	validation_1-auc:0.834652
[41]	validation_0-auc:0.873219	validation_1-auc:0.833886
[42]	validation_0-auc:0.874688	validation_1-auc:0.835253
[43]	validation_0-auc:0.8752	validation_1-auc:0.834724
[44]	validation_0-auc:0.876505	validation_1-auc:0.836102
[45]	validation_0-auc:0.876402	validation_1-auc:0.835768
[46]	validation_0-auc:0.878151	validation_1-auc:0.83642
[47]	validation_0-auc:0.8787	validation_1-auc:0.836767
[48]	validation_0-auc:0.87914	validation_1-auc:0.836316
[49]	validation_0-auc:0.879509	validation_1-auc:0.836019
[50]	validation_0-auc:0.879609	validation_1-auc:0.835576
[51]	validation_0-auc:0.880392	validation_1-auc:0.83597
[52]	validation_0-auc:0.880906	validation_1-auc:0.836652
[53]	validation_0-auc:0.881196	validation_1-auc:0.837183
[54]	validation_0-auc:0.882012	validation_1-auc:0.837778
[55]	validation_0-auc:0.882738	validation_1-auc:0.837984
[56]	validation_0-auc:0.883416	validation_1-auc:0.838455
[57]	validation_0-auc:0.883959	validation_1-auc:0.838619
[58]	validation_0-auc:0.884248	validation_1-auc:0.838096
[59]	validation_0-auc:0.884295	validation_1-auc:0.838015
[60]	validation_0-auc:0.884566	validation_1-auc:0.837929
[61]	validation_0-auc:0.884832	validation_1-auc:0.838184
Stopping. Best iteration:
[31]	validation_0-auc:0.866862	validation_1-auc:0.839237

[0]	validation_0-auc:0.721845	validation_1-auc:0.729773
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.825015	validation_1-auc:0.81891
[2]	validation_0-auc:0.811637	validation_1-auc:0.807714
[3]	validation_0-auc:0.823134	validation_1-auc:0.817803
[4]	validation_0-auc:0.833902	validation_1-auc:0.827267
[5]	validation_0-auc:0.829359	validation_1-auc:0.823344
[6]	validation_0-auc:0.830639	validation_1-auc:0.823274
[7]	validation_0-auc:0.837037	validation_1-auc:0.828343
[8]	validation_0-auc:0.841772	validation_1-auc:0.831877
[9]	validation_0-auc:0.841507	validation_1-auc:0.830342
[10]	validation_0-auc:0.845543	validation_1-auc:0.832091
[11]	validation_0-auc:0.848388	validation_1-auc:0.833807
[12]	validation_0-auc:0.850064	validation_1-auc:0.835155
[13]	validation_0-auc:0.85001	validation_1-auc:0.83485
[14]	validation_0-auc:0.84984	validation_1-auc:0.834875
[15]	validation_0-auc:0.851484	validation_1-auc:0.835945
[16]	validation_0-auc:0.853356	validation_1-auc:0.837179
[17]	validation_0-auc:0.855071	validation_1-auc:0.838144
[18]	validation_0-auc:0.857018	validation_1-auc:0.839019
[19]	validation_0-auc:0.856393	validation_1-auc:0.837812
[20]	validation_0-auc:0.858267	validation_1-auc:0.839037
[21]	validation_0-auc:0.858189	validation_1-auc:0.83824
[22]	validation_0-auc:0.857283	validation_1-auc:0.837705
[23]	validation_0-auc:0.859071	validation_1-auc:0.837932
[24]	validation_0-auc:0.86031	validation_1-auc:0.839095
[25]	validation_0-auc:0.860258	validation_1-auc:0.838734
[26]	validation_0-auc:0.860613	validation_1-auc:0.838066
[27]	validation_0-auc:0.862691	validation_1-auc:0.838902
[28]	validation_0-auc:0.864773	validation_1-auc:0.839593
[29]	validation_0-auc:0.864699	validation_1-auc:0.839699
[30]	validation_0-auc:0.866348	validation_1-auc:0.840204
[31]	validation_0-auc:0.867754	validation_1-auc:0.840613
[32]	validation_0-auc:0.868087	validation_1-auc:0.840599
[33]	validation_0-auc:0.86853	validation_1-auc:0.839979
[34]	validation_0-auc:0.868825	validation_1-auc:0.839871
[35]	validation_0-auc:0.871375	validation_1-auc:0.840786
[36]	validation_0-auc:0.87348	validation_1-auc:0.84023
[37]	validation_0-auc:0.874177	validation_1-auc:0.839431
[38]	validation_0-auc:0.874546	validation_1-auc:0.839116
[39]	validation_0-auc:0.874621	validation_1-auc:0.838802
[40]	validation_0-auc:0.875086	validation_1-auc:0.838818
[41]	validation_0-auc:0.875383	validation_1-auc:0.838294
[42]	validation_0-auc:0.877071	validation_1-auc:0.839424
[43]	validation_0-auc:0.877365	validation_1-auc:0.839264
[44]	validation_0-auc:0.878608	validation_1-auc:0.840054
[45]	validation_0-auc:0.878694	validation_1-auc:0.839473
[46]	validation_0-auc:0.880073	validation_1-auc:0.839959
[47]	validation_0-auc:0.881035	validation_1-auc:0.841166
[48]	validation_0-auc:0.881436	validation_1-auc:0.840613
[49]	validation_0-auc:0.882167	validation_1-auc:0.840508
[50]	validation_0-auc:0.882248	validation_1-auc:0.840174
[51]	validation_0-auc:0.883353	validation_1-auc:0.840931
[52]	validation_0-auc:0.884076	validation_1-auc:0.841613
[53]	validation_0-auc:0.884554	validation_1-auc:0.842245
[54]	validation_0-auc:0.885151	validation_1-auc:0.842868
[55]	validation_0-auc:0.885573	validation_1-auc:0.842586
[56]	validation_0-auc:0.88615	validation_1-auc:0.842918
[57]	validation_0-auc:0.886603	validation_1-auc:0.843368
[58]	validation_0-auc:0.886698	validation_1-auc:0.843051
[59]	validation_0-auc:0.88684	validation_1-auc:0.843147
[60]	validation_0-auc:0.887338	validation_1-auc:0.843434
[61]	validation_0-auc:0.887574	validation_1-auc:0.843365
[62]	validation_0-auc:0.887893	validation_1-auc:0.843429
[63]	validation_0-auc:0.888288	validation_1-auc:0.843277
[64]	validation_0-auc:0.888505	validation_1-auc:0.843497
[65]	validation_0-auc:0.888902	validation_1-auc:0.843524
[66]	validation_0-auc:0.889002	validation_1-auc:0.843442
[67]	validation_0-auc:0.889172	validation_1-auc:0.843444
[68]	validation_0-auc:0.889505	validation_1-auc:0.84335
[69]	validation_0-auc:0.889678	validation_1-auc:0.843433
[70]	validation_0-auc:0.889832	validation_1-auc:0.843483
[71]	validation_0-auc:0.890227	validation_1-auc:0.843424
[72]	validation_0-auc:0.890328	validation_1-auc:0.843295
[73]	validation_0-auc:0.890488	validation_1-auc:0.843217
[74]	validation_0-auc:0.890532	validation_1-auc:0.843291
[75]	validation_0-auc:0.890771	validation_1-auc:0.842944
[76]	validation_0-auc:0.890979	validation_1-auc:0.843173
[77]	validation_0-auc:0.891137	validation_1-auc:0.842995
[78]	validation_0-auc:0.891391	validation_1-auc:0.843095
[79]	validation_0-auc:0.891539	validation_1-auc:0.842824
[80]	validation_0-auc:0.891743	validation_1-auc:0.843033
[81]	validation_0-auc:0.89187	validation_1-auc:0.843022
[82]	validation_0-auc:0.892165	validation_1-auc:0.843051
[83]	validation_0-auc:0.892317	validation_1-auc:0.842787
[84]	validation_0-auc:0.892581	validation_1-auc:0.842564
[85]	validation_0-auc:0.892614	validation_1-auc:0.84258
[86]	validation_0-auc:0.892827	validation_1-auc:0.842391
[87]	validation_0-auc:0.89297	validation_1-auc:0.842428
[88]	validation_0-auc:0.893057	validation_1-auc:0.842295
[89]	validation_0-auc:0.89308	validation_1-auc:0.842146
[90]	validation_0-auc:0.893569	validation_1-auc:0.842228
[91]	validation_0-auc:0.893742	validation_1-auc:0.842037
[92]	validation_0-auc:0.893831	validation_1-auc:0.842025
[93]	validation_0-auc:0.894054	validation_1-auc:0.841913
[94]	validation_0-auc:0.894161	validation_1-auc:0.841848
[95]	validation_0-auc:0.894718	validation_1-auc:0.841673
Stopping. Best iteration:
[65]	validation_0-auc:0.888902	validation_1-auc:0.843524

[0]	validation_0-auc:0.736837	validation_1-auc:0.734096
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.822074	validation_1-auc:0.815712
[2]	validation_0-auc:0.814373	validation_1-auc:0.806888
[3]	validation_0-auc:0.828324	validation_1-auc:0.821214
[4]	validation_0-auc:0.832905	validation_1-auc:0.825577
[5]	validation_0-auc:0.82953	validation_1-auc:0.822776
[6]	validation_0-auc:0.830211	validation_1-auc:0.822255
[7]	validation_0-auc:0.833664	validation_1-auc:0.825988
[8]	validation_0-auc:0.839	validation_1-auc:0.82913
[9]	validation_0-auc:0.838751	validation_1-auc:0.828431
[10]	validation_0-auc:0.843419	validation_1-auc:0.830844
[11]	validation_0-auc:0.845813	validation_1-auc:0.833661
[12]	validation_0-auc:0.847898	validation_1-auc:0.835004
[13]	validation_0-auc:0.848932	validation_1-auc:0.834419
[14]	validation_0-auc:0.849368	validation_1-auc:0.833846
[15]	validation_0-auc:0.850823	validation_1-auc:0.835582
[16]	validation_0-auc:0.852969	validation_1-auc:0.836422
[17]	validation_0-auc:0.854636	validation_1-auc:0.838088
[18]	validation_0-auc:0.856559	validation_1-auc:0.838071
[19]	validation_0-auc:0.856225	validation_1-auc:0.836999
[20]	validation_0-auc:0.857077	validation_1-auc:0.837472
[21]	validation_0-auc:0.858178	validation_1-auc:0.837106
[22]	validation_0-auc:0.85765	validation_1-auc:0.836876
[23]	validation_0-auc:0.859007	validation_1-auc:0.83715
[24]	validation_0-auc:0.859856	validation_1-auc:0.838029
[25]	validation_0-auc:0.860356	validation_1-auc:0.837686
[26]	validation_0-auc:0.860528	validation_1-auc:0.837073
[27]	validation_0-auc:0.862296	validation_1-auc:0.838256
[28]	validation_0-auc:0.864264	validation_1-auc:0.837744
[29]	validation_0-auc:0.86485	validation_1-auc:0.837597
[30]	validation_0-auc:0.86596	validation_1-auc:0.838473
[31]	validation_0-auc:0.8685	validation_1-auc:0.83968
[32]	validation_0-auc:0.868873	validation_1-auc:0.838396
[33]	validation_0-auc:0.869053	validation_1-auc:0.838304
[34]	validation_0-auc:0.869435	validation_1-auc:0.837779
[35]	validation_0-auc:0.871591	validation_1-auc:0.838891
[36]	validation_0-auc:0.873573	validation_1-auc:0.839362
[37]	validation_0-auc:0.873636	validation_1-auc:0.838777
[38]	validation_0-auc:0.873981	validation_1-auc:0.839223
[39]	validation_0-auc:0.873968	validation_1-auc:0.839078
[40]	validation_0-auc:0.874628	validation_1-auc:0.83853
[41]	validation_0-auc:0.874824	validation_1-auc:0.837975
[42]	validation_0-auc:0.876437	validation_1-auc:0.839473
[43]	validation_0-auc:0.876817	validation_1-auc:0.838447
[44]	validation_0-auc:0.877913	validation_1-auc:0.839642
[45]	validation_0-auc:0.878082	validation_1-auc:0.839044
[46]	validation_0-auc:0.879228	validation_1-auc:0.840144
[47]	validation_0-auc:0.880099	validation_1-auc:0.840838
[48]	validation_0-auc:0.880584	validation_1-auc:0.840936
[49]	validation_0-auc:0.880984	validation_1-auc:0.841541
[50]	validation_0-auc:0.881258	validation_1-auc:0.840748
[51]	validation_0-auc:0.882189	validation_1-auc:0.841425
[52]	validation_0-auc:0.883032	validation_1-auc:0.842079
[53]	validation_0-auc:0.883439	validation_1-auc:0.842692
[54]	validation_0-auc:0.884043	validation_1-auc:0.843499
[55]	validation_0-auc:0.884624	validation_1-auc:0.843216
[56]	validation_0-auc:0.885159	validation_1-auc:0.843123
[57]	validation_0-auc:0.885424	validation_1-auc:0.843506
[58]	validation_0-auc:0.885787	validation_1-auc:0.843427
[59]	validation_0-auc:0.885848	validation_1-auc:0.843482
[60]	validation_0-auc:0.886075	validation_1-auc:0.843796
[61]	validation_0-auc:0.88648	validation_1-auc:0.843758
[62]	validation_0-auc:0.886868	validation_1-auc:0.843708
[63]	validation_0-auc:0.887319	validation_1-auc:0.843567
[64]	validation_0-auc:0.887807	validation_1-auc:0.84377
[65]	validation_0-auc:0.888018	validation_1-auc:0.843609
[66]	validation_0-auc:0.888123	validation_1-auc:0.843312
[67]	validation_0-auc:0.888566	validation_1-auc:0.843011
[68]	validation_0-auc:0.889183	validation_1-auc:0.843144
[69]	validation_0-auc:0.889448	validation_1-auc:0.84345
[70]	validation_0-auc:0.889608	validation_1-auc:0.843503
[71]	validation_0-auc:0.890122	validation_1-auc:0.843639
[72]	validation_0-auc:0.890185	validation_1-auc:0.843574
[73]	validation_0-auc:0.890466	validation_1-auc:0.843559
[74]	validation_0-auc:0.890699	validation_1-auc:0.843461
[75]	validation_0-auc:0.891303	validation_1-auc:0.843439
[76]	validation_0-auc:0.891243	validation_1-auc:0.84361
[77]	validation_0-auc:0.891415	validation_1-auc:0.84368
[78]	validation_0-auc:0.891635	validation_1-auc:0.843646
[79]	validation_0-auc:0.891898	validation_1-auc:0.843598
[80]	validation_0-auc:0.891939	validation_1-auc:0.843504
[81]	validation_0-auc:0.892304	validation_1-auc:0.843254
[82]	validation_0-auc:0.892479	validation_1-auc:0.842822
[83]	validation_0-auc:0.892824	validation_1-auc:0.842662
[84]	validation_0-auc:0.893263	validation_1-auc:0.842552
[85]	validation_0-auc:0.893594	validation_1-auc:0.842592
[86]	validation_0-auc:0.893725	validation_1-auc:0.842361
[87]	validation_0-auc:0.894256	validation_1-auc:0.842304
[88]	validation_0-auc:0.894328	validation_1-auc:0.842297
[89]	validation_0-auc:0.894411	validation_1-auc:0.842242
[90]	validation_0-auc:0.894705	validation_1-auc:0.84218
Stopping. Best iteration:
[60]	validation_0-auc:0.886075	validation_1-auc:0.843796

[0]	validation_0-auc:0.717208	validation_1-auc:0.727812
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.809573	validation_1-auc:0.813331
[2]	validation_0-auc:0.80395	validation_1-auc:0.809303
[3]	validation_0-auc:0.816248	validation_1-auc:0.820938
[4]	validation_0-auc:0.824753	validation_1-auc:0.825794
[5]	validation_0-auc:0.821994	validation_1-auc:0.823793
[6]	validation_0-auc:0.822976	validation_1-auc:0.824266
[7]	validation_0-auc:0.82934	validation_1-auc:0.829688
[8]	validation_0-auc:0.833576	validation_1-auc:0.832642
[9]	validation_0-auc:0.834068	validation_1-auc:0.830283
[10]	validation_0-auc:0.838247	validation_1-auc:0.832031
[11]	validation_0-auc:0.841382	validation_1-auc:0.834056
[12]	validation_0-auc:0.843892	validation_1-auc:0.835138
[13]	validation_0-auc:0.843954	validation_1-auc:0.833652
[14]	validation_0-auc:0.843398	validation_1-auc:0.831399
[15]	validation_0-auc:0.846961	validation_1-auc:0.833504
[16]	validation_0-auc:0.849324	validation_1-auc:0.834163
[17]	validation_0-auc:0.851553	validation_1-auc:0.834923
[18]	validation_0-auc:0.853316	validation_1-auc:0.836029
[19]	validation_0-auc:0.852355	validation_1-auc:0.836569
[20]	validation_0-auc:0.854342	validation_1-auc:0.837467
[21]	validation_0-auc:0.854268	validation_1-auc:0.836994
[22]	validation_0-auc:0.853542	validation_1-auc:0.835193
[23]	validation_0-auc:0.855277	validation_1-auc:0.837018
[24]	validation_0-auc:0.857082	validation_1-auc:0.839001
[25]	validation_0-auc:0.85763	validation_1-auc:0.837799
[26]	validation_0-auc:0.858009	validation_1-auc:0.837573
[27]	validation_0-auc:0.859597	validation_1-auc:0.838584
[28]	validation_0-auc:0.861194	validation_1-auc:0.839271
[29]	validation_0-auc:0.860978	validation_1-auc:0.839415
[30]	validation_0-auc:0.862042	validation_1-auc:0.839836
[31]	validation_0-auc:0.863346	validation_1-auc:0.840379
[32]	validation_0-auc:0.863611	validation_1-auc:0.839452
[33]	validation_0-auc:0.864191	validation_1-auc:0.839541
[34]	validation_0-auc:0.864325	validation_1-auc:0.838546
[35]	validation_0-auc:0.866226	validation_1-auc:0.838866
[36]	validation_0-auc:0.867687	validation_1-auc:0.838601
[37]	validation_0-auc:0.867708	validation_1-auc:0.837832
[38]	validation_0-auc:0.867844	validation_1-auc:0.837074
[39]	validation_0-auc:0.868022	validation_1-auc:0.836798
[40]	validation_0-auc:0.868243	validation_1-auc:0.836399
[41]	validation_0-auc:0.868147	validation_1-auc:0.834995
[42]	validation_0-auc:0.869253	validation_1-auc:0.836406
[43]	validation_0-auc:0.869456	validation_1-auc:0.835879
[44]	validation_0-auc:0.870899	validation_1-auc:0.837103
[45]	validation_0-auc:0.870867	validation_1-auc:0.836073
[46]	validation_0-auc:0.872491	validation_1-auc:0.836406
[47]	validation_0-auc:0.873587	validation_1-auc:0.837012
[48]	validation_0-auc:0.873936	validation_1-auc:0.83661
[49]	validation_0-auc:0.874852	validation_1-auc:0.836541
[50]	validation_0-auc:0.87495	validation_1-auc:0.835912
[51]	validation_0-auc:0.875651	validation_1-auc:0.836387
[52]	validation_0-auc:0.876333	validation_1-auc:0.836816
[53]	validation_0-auc:0.876628	validation_1-auc:0.837388
[54]	validation_0-auc:0.877094	validation_1-auc:0.837832
[55]	validation_0-auc:0.877855	validation_1-auc:0.83797
[56]	validation_0-auc:0.87859	validation_1-auc:0.838514
[57]	validation_0-auc:0.878867	validation_1-auc:0.838492
[58]	validation_0-auc:0.87909	validation_1-auc:0.838433
[59]	validation_0-auc:0.879142	validation_1-auc:0.838469
[60]	validation_0-auc:0.879478	validation_1-auc:0.83843
[61]	validation_0-auc:0.879742	validation_1-auc:0.838421
Stopping. Best iteration:
[31]	validation_0-auc:0.863346	validation_1-auc:0.840379

[0]	validation_0-auc:0.721794	validation_1-auc:0.729972
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.81618	validation_1-auc:0.815586
[2]	validation_0-auc:0.810597	validation_1-auc:0.8122
[3]	validation_0-auc:0.824888	validation_1-auc:0.823809
[4]	validation_0-auc:0.834641	validation_1-auc:0.829674
[5]	validation_0-auc:0.832179	validation_1-auc:0.829251
[6]	validation_0-auc:0.831195	validation_1-auc:0.828099
[7]	validation_0-auc:0.837771	validation_1-auc:0.832911
[8]	validation_0-auc:0.841742	validation_1-auc:0.835194
[9]	validation_0-auc:0.842543	validation_1-auc:0.834109
[10]	validation_0-auc:0.845366	validation_1-auc:0.835114
[11]	validation_0-auc:0.847758	validation_1-auc:0.835784
[12]	validation_0-auc:0.849917	validation_1-auc:0.836811
[13]	validation_0-auc:0.850203	validation_1-auc:0.835643
[14]	validation_0-auc:0.849286	validation_1-auc:0.833685
[15]	validation_0-auc:0.851395	validation_1-auc:0.835117
[16]	validation_0-auc:0.852895	validation_1-auc:0.836239
[17]	validation_0-auc:0.854203	validation_1-auc:0.836926
[18]	validation_0-auc:0.855673	validation_1-auc:0.838517
[19]	validation_0-auc:0.854634	validation_1-auc:0.837588
[20]	validation_0-auc:0.856513	validation_1-auc:0.838383
[21]	validation_0-auc:0.856176	validation_1-auc:0.837248
[22]	validation_0-auc:0.855526	validation_1-auc:0.836975
[23]	validation_0-auc:0.857667	validation_1-auc:0.838367
[24]	validation_0-auc:0.858315	validation_1-auc:0.839262
[25]	validation_0-auc:0.858551	validation_1-auc:0.838832
[26]	validation_0-auc:0.85897	validation_1-auc:0.838158
[27]	validation_0-auc:0.860674	validation_1-auc:0.839041
[28]	validation_0-auc:0.862971	validation_1-auc:0.839673
[29]	validation_0-auc:0.862775	validation_1-auc:0.83966
[30]	validation_0-auc:0.863879	validation_1-auc:0.840524
[31]	validation_0-auc:0.865002	validation_1-auc:0.841838
[32]	validation_0-auc:0.865369	validation_1-auc:0.840705
[33]	validation_0-auc:0.865555	validation_1-auc:0.840092
[34]	validation_0-auc:0.865692	validation_1-auc:0.840111
[35]	validation_0-auc:0.86789	validation_1-auc:0.840698
[36]	validation_0-auc:0.869532	validation_1-auc:0.840125
[37]	validation_0-auc:0.869721	validation_1-auc:0.840024
[38]	validation_0-auc:0.869883	validation_1-auc:0.839827
[39]	validation_0-auc:0.869884	validation_1-auc:0.839855
[40]	validation_0-auc:0.870296	validation_1-auc:0.839853
[41]	validation_0-auc:0.870641	validation_1-auc:0.839354
[42]	validation_0-auc:0.872166	validation_1-auc:0.84049
[43]	validation_0-auc:0.872276	validation_1-auc:0.840176
[44]	validation_0-auc:0.873625	validation_1-auc:0.840769
[45]	validation_0-auc:0.873717	validation_1-auc:0.840161
[46]	validation_0-auc:0.87494	validation_1-auc:0.840237
[47]	validation_0-auc:0.875626	validation_1-auc:0.840903
[48]	validation_0-auc:0.875899	validation_1-auc:0.840165
[49]	validation_0-auc:0.876482	validation_1-auc:0.840187
[50]	validation_0-auc:0.876788	validation_1-auc:0.840117
[51]	validation_0-auc:0.877905	validation_1-auc:0.840337
[52]	validation_0-auc:0.878554	validation_1-auc:0.840965
[53]	validation_0-auc:0.879002	validation_1-auc:0.841274
[54]	validation_0-auc:0.879562	validation_1-auc:0.841813
[55]	validation_0-auc:0.87992	validation_1-auc:0.841831
[56]	validation_0-auc:0.880455	validation_1-auc:0.842249
[57]	validation_0-auc:0.880792	validation_1-auc:0.842402
[58]	validation_0-auc:0.880984	validation_1-auc:0.842092
[59]	validation_0-auc:0.881109	validation_1-auc:0.842093
[60]	validation_0-auc:0.881661	validation_1-auc:0.842264
[61]	validation_0-auc:0.882081	validation_1-auc:0.842373
[62]	validation_0-auc:0.88227	validation_1-auc:0.842639
[63]	validation_0-auc:0.882738	validation_1-auc:0.842615
[64]	validation_0-auc:0.882953	validation_1-auc:0.842832
[65]	validation_0-auc:0.883108	validation_1-auc:0.843105
[66]	validation_0-auc:0.883208	validation_1-auc:0.843231
[67]	validation_0-auc:0.883484	validation_1-auc:0.843256
[68]	validation_0-auc:0.883699	validation_1-auc:0.842875
[69]	validation_0-auc:0.883757	validation_1-auc:0.842887
[70]	validation_0-auc:0.883804	validation_1-auc:0.842764
[71]	validation_0-auc:0.884019	validation_1-auc:0.842942
[72]	validation_0-auc:0.884156	validation_1-auc:0.84296
[73]	validation_0-auc:0.884323	validation_1-auc:0.842994
[74]	validation_0-auc:0.88441	validation_1-auc:0.843076
[75]	validation_0-auc:0.884548	validation_1-auc:0.842907
[76]	validation_0-auc:0.884517	validation_1-auc:0.842987
[77]	validation_0-auc:0.884575	validation_1-auc:0.843024
[78]	validation_0-auc:0.884859	validation_1-auc:0.843041
[79]	validation_0-auc:0.885248	validation_1-auc:0.843286
[80]	validation_0-auc:0.885342	validation_1-auc:0.843256
[81]	validation_0-auc:0.885508	validation_1-auc:0.843218
[82]	validation_0-auc:0.885596	validation_1-auc:0.843142
[83]	validation_0-auc:0.885733	validation_1-auc:0.84328
[84]	validation_0-auc:0.885838	validation_1-auc:0.843147
[85]	validation_0-auc:0.886048	validation_1-auc:0.842863
[86]	validation_0-auc:0.886314	validation_1-auc:0.84272
[87]	validation_0-auc:0.886552	validation_1-auc:0.842872
[88]	validation_0-auc:0.886671	validation_1-auc:0.842898
[89]	validation_0-auc:0.88692	validation_1-auc:0.842702
[90]	validation_0-auc:0.887247	validation_1-auc:0.842357
[91]	validation_0-auc:0.887707	validation_1-auc:0.842458
[92]	validation_0-auc:0.887829	validation_1-auc:0.84241
[93]	validation_0-auc:0.887863	validation_1-auc:0.842457
[94]	validation_0-auc:0.888021	validation_1-auc:0.842359
[95]	validation_0-auc:0.888586	validation_1-auc:0.841931
[96]	validation_0-auc:0.888598	validation_1-auc:0.842038
[97]	validation_0-auc:0.888664	validation_1-auc:0.841914
[98]	validation_0-auc:0.8888	validation_1-auc:0.841935
[99]	validation_0-auc:0.888846	validation_1-auc:0.841805
[0]	validation_0-auc:0.740963	validation_1-auc:0.745305
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.822887	validation_1-auc:0.821497
[2]	validation_0-auc:0.813789	validation_1-auc:0.809612
[3]	validation_0-auc:0.827305	validation_1-auc:0.821955
[4]	validation_0-auc:0.83233	validation_1-auc:0.826429
[5]	validation_0-auc:0.83172	validation_1-auc:0.822129
[6]	validation_0-auc:0.832023	validation_1-auc:0.820981
[7]	validation_0-auc:0.835138	validation_1-auc:0.824732
[8]	validation_0-auc:0.840103	validation_1-auc:0.827515
[9]	validation_0-auc:0.839281	validation_1-auc:0.825906
[10]	validation_0-auc:0.843592	validation_1-auc:0.828638
[11]	validation_0-auc:0.846217	validation_1-auc:0.833174
[12]	validation_0-auc:0.848386	validation_1-auc:0.834588
[13]	validation_0-auc:0.849496	validation_1-auc:0.833773
[14]	validation_0-auc:0.849095	validation_1-auc:0.833212
[15]	validation_0-auc:0.850589	validation_1-auc:0.835387
[16]	validation_0-auc:0.851746	validation_1-auc:0.835725
[17]	validation_0-auc:0.854144	validation_1-auc:0.837466
[18]	validation_0-auc:0.855417	validation_1-auc:0.838066
[19]	validation_0-auc:0.854514	validation_1-auc:0.837763
[20]	validation_0-auc:0.85552	validation_1-auc:0.838079
[21]	validation_0-auc:0.856136	validation_1-auc:0.837891
[22]	validation_0-auc:0.85573	validation_1-auc:0.837673
[23]	validation_0-auc:0.857501	validation_1-auc:0.837776
[24]	validation_0-auc:0.858376	validation_1-auc:0.83902
[25]	validation_0-auc:0.858584	validation_1-auc:0.838329
[26]	validation_0-auc:0.858925	validation_1-auc:0.837917
[27]	validation_0-auc:0.859719	validation_1-auc:0.839459
[28]	validation_0-auc:0.861408	validation_1-auc:0.839827
[29]	validation_0-auc:0.861979	validation_1-auc:0.839394
[30]	validation_0-auc:0.863815	validation_1-auc:0.840433
[31]	validation_0-auc:0.865709	validation_1-auc:0.841593
[32]	validation_0-auc:0.865927	validation_1-auc:0.839924
[33]	validation_0-auc:0.866076	validation_1-auc:0.839817
[34]	validation_0-auc:0.865947	validation_1-auc:0.839194
[35]	validation_0-auc:0.867983	validation_1-auc:0.840204
[36]	validation_0-auc:0.869748	validation_1-auc:0.840745
[37]	validation_0-auc:0.869883	validation_1-auc:0.840638
[38]	validation_0-auc:0.869663	validation_1-auc:0.840153
[39]	validation_0-auc:0.869709	validation_1-auc:0.839812
[40]	validation_0-auc:0.869899	validation_1-auc:0.839148
[41]	validation_0-auc:0.869865	validation_1-auc:0.838532
[42]	validation_0-auc:0.871134	validation_1-auc:0.83956
[43]	validation_0-auc:0.871525	validation_1-auc:0.838586
[44]	validation_0-auc:0.872633	validation_1-auc:0.839367
[45]	validation_0-auc:0.872607	validation_1-auc:0.83907
[46]	validation_0-auc:0.873555	validation_1-auc:0.840426
[47]	validation_0-auc:0.874364	validation_1-auc:0.841433
[48]	validation_0-auc:0.87481	validation_1-auc:0.84099
[49]	validation_0-auc:0.875255	validation_1-auc:0.841202
[50]	validation_0-auc:0.875289	validation_1-auc:0.840693
[51]	validation_0-auc:0.876177	validation_1-auc:0.84147
[52]	validation_0-auc:0.877079	validation_1-auc:0.842299
[53]	validation_0-auc:0.877411	validation_1-auc:0.842925
[54]	validation_0-auc:0.877794	validation_1-auc:0.843446
[55]	validation_0-auc:0.878315	validation_1-auc:0.843288
[56]	validation_0-auc:0.878896	validation_1-auc:0.843329
[57]	validation_0-auc:0.879217	validation_1-auc:0.843467
[58]	validation_0-auc:0.879426	validation_1-auc:0.843377
[59]	validation_0-auc:0.879535	validation_1-auc:0.843356
[60]	validation_0-auc:0.879919	validation_1-auc:0.843744
[61]	validation_0-auc:0.880542	validation_1-auc:0.84383
[62]	validation_0-auc:0.880691	validation_1-auc:0.843983
[63]	validation_0-auc:0.881043	validation_1-auc:0.844164
[64]	validation_0-auc:0.881447	validation_1-auc:0.844251
[65]	validation_0-auc:0.881769	validation_1-auc:0.844174
[66]	validation_0-auc:0.881769	validation_1-auc:0.84413
[67]	validation_0-auc:0.882378	validation_1-auc:0.844388
[68]	validation_0-auc:0.882596	validation_1-auc:0.844166
[69]	validation_0-auc:0.882767	validation_1-auc:0.844062
[70]	validation_0-auc:0.883069	validation_1-auc:0.843938
[71]	validation_0-auc:0.883358	validation_1-auc:0.843956
[72]	validation_0-auc:0.883439	validation_1-auc:0.84391
[73]	validation_0-auc:0.883573	validation_1-auc:0.843779
[74]	validation_0-auc:0.883684	validation_1-auc:0.843789
[75]	validation_0-auc:0.883875	validation_1-auc:0.84386
[76]	validation_0-auc:0.883955	validation_1-auc:0.843917
[77]	validation_0-auc:0.884004	validation_1-auc:0.84391
[78]	validation_0-auc:0.884095	validation_1-auc:0.843805
[79]	validation_0-auc:0.884591	validation_1-auc:0.84377
[80]	validation_0-auc:0.884643	validation_1-auc:0.843996
[81]	validation_0-auc:0.884764	validation_1-auc:0.844039
[82]	validation_0-auc:0.884899	validation_1-auc:0.843964
[83]	validation_0-auc:0.885393	validation_1-auc:0.843945
[84]	validation_0-auc:0.885571	validation_1-auc:0.84384
[85]	validation_0-auc:0.88575	validation_1-auc:0.844052
[86]	validation_0-auc:0.886069	validation_1-auc:0.843944
[87]	validation_0-auc:0.886268	validation_1-auc:0.84395
[88]	validation_0-auc:0.886397	validation_1-auc:0.843973
[89]	validation_0-auc:0.886551	validation_1-auc:0.843937
[90]	validation_0-auc:0.886923	validation_1-auc:0.84418
[91]	validation_0-auc:0.887299	validation_1-auc:0.844073
[92]	validation_0-auc:0.887342	validation_1-auc:0.84384
[93]	validation_0-auc:0.88739	validation_1-auc:0.843615
[94]	validation_0-auc:0.888063	validation_1-auc:0.843304
[95]	validation_0-auc:0.888336	validation_1-auc:0.843171
[96]	validation_0-auc:0.888572	validation_1-auc:0.842938
[97]	validation_0-auc:0.888686	validation_1-auc:0.842989
Stopping. Best iteration:
[67]	validation_0-auc:0.882378	validation_1-auc:0.844388

[0]	validation_0-auc:0.806564	validation_1-auc:0.807879
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.813222	validation_1-auc:0.814069
[2]	validation_0-auc:0.824346	validation_1-auc:0.81954
[3]	validation_0-auc:0.823013	validation_1-auc:0.817793
[4]	validation_0-auc:0.82501	validation_1-auc:0.82085
[5]	validation_0-auc:0.827317	validation_1-auc:0.824114
[6]	validation_0-auc:0.828588	validation_1-auc:0.824279
[7]	validation_0-auc:0.83003	validation_1-auc:0.824394
[8]	validation_0-auc:0.830141	validation_1-auc:0.823415
[9]	validation_0-auc:0.831465	validation_1-auc:0.824209
[10]	validation_0-auc:0.832546	validation_1-auc:0.825038
[11]	validation_0-auc:0.832902	validation_1-auc:0.825393
[12]	validation_0-auc:0.833475	validation_1-auc:0.826162
[13]	validation_0-auc:0.835758	validation_1-auc:0.829582
[14]	validation_0-auc:0.837817	validation_1-auc:0.83159
[15]	validation_0-auc:0.838934	validation_1-auc:0.832087
[16]	validation_0-auc:0.839416	validation_1-auc:0.832332
[17]	validation_0-auc:0.841441	validation_1-auc:0.83307
[18]	validation_0-auc:0.842216	validation_1-auc:0.833377
[19]	validation_0-auc:0.84278	validation_1-auc:0.834278
[20]	validation_0-auc:0.84389	validation_1-auc:0.834801
[21]	validation_0-auc:0.845325	validation_1-auc:0.835655
[22]	validation_0-auc:0.846399	validation_1-auc:0.836368
[23]	validation_0-auc:0.847027	validation_1-auc:0.837646
[24]	validation_0-auc:0.847633	validation_1-auc:0.838251
[25]	validation_0-auc:0.848642	validation_1-auc:0.838586
[26]	validation_0-auc:0.849295	validation_1-auc:0.838726
[27]	validation_0-auc:0.849597	validation_1-auc:0.83845
[28]	validation_0-auc:0.849952	validation_1-auc:0.838506
[29]	validation_0-auc:0.851597	validation_1-auc:0.839393
[30]	validation_0-auc:0.851855	validation_1-auc:0.839927
[31]	validation_0-auc:0.852138	validation_1-auc:0.839714
[32]	validation_0-auc:0.852584	validation_1-auc:0.839271
[33]	validation_0-auc:0.853644	validation_1-auc:0.839517
[34]	validation_0-auc:0.853963	validation_1-auc:0.839671
[35]	validation_0-auc:0.854588	validation_1-auc:0.839505
[36]	validation_0-auc:0.855359	validation_1-auc:0.840182
[37]	validation_0-auc:0.856176	validation_1-auc:0.839653
[38]	validation_0-auc:0.856703	validation_1-auc:0.839686
[39]	validation_0-auc:0.857616	validation_1-auc:0.839464
[40]	validation_0-auc:0.858535	validation_1-auc:0.839425
[41]	validation_0-auc:0.859251	validation_1-auc:0.839742
[42]	validation_0-auc:0.859972	validation_1-auc:0.83978
[43]	validation_0-auc:0.860379	validation_1-auc:0.839618
[44]	validation_0-auc:0.860928	validation_1-auc:0.839061
[45]	validation_0-auc:0.861233	validation_1-auc:0.8396
[46]	validation_0-auc:0.861442	validation_1-auc:0.839624
[47]	validation_0-auc:0.862128	validation_1-auc:0.839775
[48]	validation_0-auc:0.862479	validation_1-auc:0.83958
[49]	validation_0-auc:0.863011	validation_1-auc:0.83986
[50]	validation_0-auc:0.863528	validation_1-auc:0.839928
[51]	validation_0-auc:0.864111	validation_1-auc:0.839644
[52]	validation_0-auc:0.864682	validation_1-auc:0.839431
[53]	validation_0-auc:0.864894	validation_1-auc:0.839099
[54]	validation_0-auc:0.865045	validation_1-auc:0.838965
[55]	validation_0-auc:0.865485	validation_1-auc:0.83877
[56]	validation_0-auc:0.865659	validation_1-auc:0.838593
[57]	validation_0-auc:0.866042	validation_1-auc:0.838384
[58]	validation_0-auc:0.86632	validation_1-auc:0.838455
[59]	validation_0-auc:0.866464	validation_1-auc:0.83839
[60]	validation_0-auc:0.866755	validation_1-auc:0.838102
[61]	validation_0-auc:0.866971	validation_1-auc:0.837771
[62]	validation_0-auc:0.867338	validation_1-auc:0.83819
[63]	validation_0-auc:0.86787	validation_1-auc:0.838298
[64]	validation_0-auc:0.868086	validation_1-auc:0.838147
[65]	validation_0-auc:0.868253	validation_1-auc:0.838083
[66]	validation_0-auc:0.86836	validation_1-auc:0.838071
Stopping. Best iteration:
[36]	validation_0-auc:0.855359	validation_1-auc:0.840182

[0]	validation_0-auc:0.808752	validation_1-auc:0.799538
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.814155	validation_1-auc:0.808631
[2]	validation_0-auc:0.822097	validation_1-auc:0.818521
[3]	validation_0-auc:0.824996	validation_1-auc:0.821016
[4]	validation_0-auc:0.825963	validation_1-auc:0.822125
[5]	validation_0-auc:0.828318	validation_1-auc:0.821873
[6]	validation_0-auc:0.830602	validation_1-auc:0.821908
[7]	validation_0-auc:0.831274	validation_1-auc:0.822271
[8]	validation_0-auc:0.832728	validation_1-auc:0.823545
[9]	validation_0-auc:0.834143	validation_1-auc:0.824065
[10]	validation_0-auc:0.835163	validation_1-auc:0.825245
[11]	validation_0-auc:0.836769	validation_1-auc:0.827402
[12]	validation_0-auc:0.837065	validation_1-auc:0.827606
[13]	validation_0-auc:0.83888	validation_1-auc:0.830196
[14]	validation_0-auc:0.840119	validation_1-auc:0.830559
[15]	validation_0-auc:0.8417	validation_1-auc:0.83201
[16]	validation_0-auc:0.842707	validation_1-auc:0.833228
[17]	validation_0-auc:0.843561	validation_1-auc:0.833106
[18]	validation_0-auc:0.844328	validation_1-auc:0.833713
[19]	validation_0-auc:0.84444	validation_1-auc:0.835928
[20]	validation_0-auc:0.845386	validation_1-auc:0.836078
[21]	validation_0-auc:0.845614	validation_1-auc:0.836585
[22]	validation_0-auc:0.846259	validation_1-auc:0.836932
[23]	validation_0-auc:0.846899	validation_1-auc:0.837537
[24]	validation_0-auc:0.847484	validation_1-auc:0.838143
[25]	validation_0-auc:0.849205	validation_1-auc:0.839042
[26]	validation_0-auc:0.850251	validation_1-auc:0.839023
[27]	validation_0-auc:0.85072	validation_1-auc:0.838899
[28]	validation_0-auc:0.851629	validation_1-auc:0.838847
[29]	validation_0-auc:0.85232	validation_1-auc:0.839387
[30]	validation_0-auc:0.852642	validation_1-auc:0.839619
[31]	validation_0-auc:0.853291	validation_1-auc:0.840024
[32]	validation_0-auc:0.854124	validation_1-auc:0.839951
[33]	validation_0-auc:0.854975	validation_1-auc:0.839917
[34]	validation_0-auc:0.856135	validation_1-auc:0.839838
[35]	validation_0-auc:0.857046	validation_1-auc:0.840381
[36]	validation_0-auc:0.85741	validation_1-auc:0.8408
[37]	validation_0-auc:0.857789	validation_1-auc:0.840806
[38]	validation_0-auc:0.858155	validation_1-auc:0.84107
[39]	validation_0-auc:0.858824	validation_1-auc:0.84101
[40]	validation_0-auc:0.85941	validation_1-auc:0.841506
[41]	validation_0-auc:0.860048	validation_1-auc:0.842101
[42]	validation_0-auc:0.86025	validation_1-auc:0.841948
[43]	validation_0-auc:0.860522	validation_1-auc:0.842036
[44]	validation_0-auc:0.860965	validation_1-auc:0.842301
[45]	validation_0-auc:0.861519	validation_1-auc:0.842456
[46]	validation_0-auc:0.861937	validation_1-auc:0.842474
[47]	validation_0-auc:0.862269	validation_1-auc:0.842714
[48]	validation_0-auc:0.862559	validation_1-auc:0.842486
[49]	validation_0-auc:0.863025	validation_1-auc:0.842423
[50]	validation_0-auc:0.863496	validation_1-auc:0.842829
[51]	validation_0-auc:0.863768	validation_1-auc:0.842668
[52]	validation_0-auc:0.864262	validation_1-auc:0.84272
[53]	validation_0-auc:0.864703	validation_1-auc:0.842731
[54]	validation_0-auc:0.865066	validation_1-auc:0.842704
[55]	validation_0-auc:0.865315	validation_1-auc:0.84278
[56]	validation_0-auc:0.865798	validation_1-auc:0.842555
[57]	validation_0-auc:0.866099	validation_1-auc:0.842333
[58]	validation_0-auc:0.86654	validation_1-auc:0.842647
[59]	validation_0-auc:0.866767	validation_1-auc:0.842891
[60]	validation_0-auc:0.86699	validation_1-auc:0.842946
[61]	validation_0-auc:0.867269	validation_1-auc:0.842803
[62]	validation_0-auc:0.86764	validation_1-auc:0.842946
[63]	validation_0-auc:0.868146	validation_1-auc:0.843071
[64]	validation_0-auc:0.868343	validation_1-auc:0.843233
[65]	validation_0-auc:0.86889	validation_1-auc:0.843189
[66]	validation_0-auc:0.869018	validation_1-auc:0.843203
[67]	validation_0-auc:0.869377	validation_1-auc:0.843102
[68]	validation_0-auc:0.869576	validation_1-auc:0.843008
[69]	validation_0-auc:0.869828	validation_1-auc:0.843215
[70]	validation_0-auc:0.870308	validation_1-auc:0.843049
[71]	validation_0-auc:0.870489	validation_1-auc:0.843076
[72]	validation_0-auc:0.870708	validation_1-auc:0.84323
[73]	validation_0-auc:0.871236	validation_1-auc:0.843089
[74]	validation_0-auc:0.871385	validation_1-auc:0.843148
[75]	validation_0-auc:0.871473	validation_1-auc:0.843113
[76]	validation_0-auc:0.871525	validation_1-auc:0.843165
[77]	validation_0-auc:0.871743	validation_1-auc:0.843175
[78]	validation_0-auc:0.871916	validation_1-auc:0.843163
[79]	validation_0-auc:0.872081	validation_1-auc:0.843127
[80]	validation_0-auc:0.87219	validation_1-auc:0.843217
[81]	validation_0-auc:0.872328	validation_1-auc:0.843213
[82]	validation_0-auc:0.872486	validation_1-auc:0.843369
[83]	validation_0-auc:0.872642	validation_1-auc:0.843282
[84]	validation_0-auc:0.872915	validation_1-auc:0.843394
[85]	validation_0-auc:0.873189	validation_1-auc:0.843339
[86]	validation_0-auc:0.873237	validation_1-auc:0.843311
[87]	validation_0-auc:0.873467	validation_1-auc:0.843177
[88]	validation_0-auc:0.873657	validation_1-auc:0.843256
[89]	validation_0-auc:0.87373	validation_1-auc:0.843077
[90]	validation_0-auc:0.873834	validation_1-auc:0.843074
[91]	validation_0-auc:0.874131	validation_1-auc:0.843086
[92]	validation_0-auc:0.874386	validation_1-auc:0.843006
[93]	validation_0-auc:0.874399	validation_1-auc:0.843005
[94]	validation_0-auc:0.874607	validation_1-auc:0.842878
[95]	validation_0-auc:0.874682	validation_1-auc:0.842823
[96]	validation_0-auc:0.874758	validation_1-auc:0.842755
[97]	validation_0-auc:0.874886	validation_1-auc:0.84263
[98]	validation_0-auc:0.875095	validation_1-auc:0.842895
[99]	validation_0-auc:0.875355	validation_1-auc:0.842756
[0]	validation_0-auc:0.82027	validation_1-auc:0.818497
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.824986	validation_1-auc:0.821693
[2]	validation_0-auc:0.827619	validation_1-auc:0.823498
[3]	validation_0-auc:0.829496	validation_1-auc:0.826497
[4]	validation_0-auc:0.830271	validation_1-auc:0.827178
[5]	validation_0-auc:0.830396	validation_1-auc:0.828033
[6]	validation_0-auc:0.831138	validation_1-auc:0.828416
[7]	validation_0-auc:0.832001	validation_1-auc:0.830374
[8]	validation_0-auc:0.832173	validation_1-auc:0.830509
[9]	validation_0-auc:0.833419	validation_1-auc:0.830492
[10]	validation_0-auc:0.834465	validation_1-auc:0.830492
[11]	validation_0-auc:0.835371	validation_1-auc:0.830762
[12]	validation_0-auc:0.838084	validation_1-auc:0.830698
[13]	validation_0-auc:0.840093	validation_1-auc:0.832864
[14]	validation_0-auc:0.842599	validation_1-auc:0.834286
[15]	validation_0-auc:0.843211	validation_1-auc:0.83628
[16]	validation_0-auc:0.844505	validation_1-auc:0.835855
[17]	validation_0-auc:0.844967	validation_1-auc:0.836287
[18]	validation_0-auc:0.845404	validation_1-auc:0.836205
[19]	validation_0-auc:0.845615	validation_1-auc:0.837452
[20]	validation_0-auc:0.846562	validation_1-auc:0.837294
[21]	validation_0-auc:0.846792	validation_1-auc:0.837458
[22]	validation_0-auc:0.84784	validation_1-auc:0.837779
[23]	validation_0-auc:0.848614	validation_1-auc:0.838374
[24]	validation_0-auc:0.849427	validation_1-auc:0.838081
[25]	validation_0-auc:0.850346	validation_1-auc:0.838927
[26]	validation_0-auc:0.850765	validation_1-auc:0.838686
[27]	validation_0-auc:0.850949	validation_1-auc:0.837965
[28]	validation_0-auc:0.851565	validation_1-auc:0.837888
[29]	validation_0-auc:0.852139	validation_1-auc:0.837892
[30]	validation_0-auc:0.852699	validation_1-auc:0.838732
[31]	validation_0-auc:0.853586	validation_1-auc:0.838899
[32]	validation_0-auc:0.854007	validation_1-auc:0.838936
[33]	validation_0-auc:0.854567	validation_1-auc:0.839724
[34]	validation_0-auc:0.85543	validation_1-auc:0.839813
[35]	validation_0-auc:0.856007	validation_1-auc:0.840675
[36]	validation_0-auc:0.856613	validation_1-auc:0.84121
[37]	validation_0-auc:0.85704	validation_1-auc:0.841175
[38]	validation_0-auc:0.858183	validation_1-auc:0.84155
[39]	validation_0-auc:0.858743	validation_1-auc:0.842177
[40]	validation_0-auc:0.859182	validation_1-auc:0.842212
[41]	validation_0-auc:0.859824	validation_1-auc:0.842693
[42]	validation_0-auc:0.860398	validation_1-auc:0.842827
[43]	validation_0-auc:0.860834	validation_1-auc:0.84255
[44]	validation_0-auc:0.861246	validation_1-auc:0.842194
[45]	validation_0-auc:0.861927	validation_1-auc:0.842308
[46]	validation_0-auc:0.862073	validation_1-auc:0.842397
[47]	validation_0-auc:0.862353	validation_1-auc:0.842439
[48]	validation_0-auc:0.862928	validation_1-auc:0.842538
[49]	validation_0-auc:0.863425	validation_1-auc:0.8424
[50]	validation_0-auc:0.864056	validation_1-auc:0.842676
[51]	validation_0-auc:0.864357	validation_1-auc:0.842524
[52]	validation_0-auc:0.864609	validation_1-auc:0.842659
[53]	validation_0-auc:0.864991	validation_1-auc:0.842811
[54]	validation_0-auc:0.86535	validation_1-auc:0.84274
[55]	validation_0-auc:0.865704	validation_1-auc:0.842865
[56]	validation_0-auc:0.866199	validation_1-auc:0.8428
[57]	validation_0-auc:0.866456	validation_1-auc:0.842721
[58]	validation_0-auc:0.866748	validation_1-auc:0.842644
[59]	validation_0-auc:0.866902	validation_1-auc:0.842741
[60]	validation_0-auc:0.867342	validation_1-auc:0.842627
[61]	validation_0-auc:0.867935	validation_1-auc:0.842722
[62]	validation_0-auc:0.868081	validation_1-auc:0.843042
[63]	validation_0-auc:0.868699	validation_1-auc:0.842935
[64]	validation_0-auc:0.868765	validation_1-auc:0.842849
[65]	validation_0-auc:0.869084	validation_1-auc:0.84274
[66]	validation_0-auc:0.869518	validation_1-auc:0.842769
[67]	validation_0-auc:0.869615	validation_1-auc:0.842594
[68]	validation_0-auc:0.870098	validation_1-auc:0.842384
[69]	validation_0-auc:0.870381	validation_1-auc:0.842551
[70]	validation_0-auc:0.870476	validation_1-auc:0.842501
[71]	validation_0-auc:0.870684	validation_1-auc:0.842585
[72]	validation_0-auc:0.870693	validation_1-auc:0.842682
[73]	validation_0-auc:0.870835	validation_1-auc:0.842578
[74]	validation_0-auc:0.870958	validation_1-auc:0.842491
[75]	validation_0-auc:0.871117	validation_1-auc:0.842452
[76]	validation_0-auc:0.871219	validation_1-auc:0.842313
[77]	validation_0-auc:0.87162	validation_1-auc:0.842318
[78]	validation_0-auc:0.871935	validation_1-auc:0.842509
[79]	validation_0-auc:0.872048	validation_1-auc:0.842292
[80]	validation_0-auc:0.872288	validation_1-auc:0.842413
[81]	validation_0-auc:0.872504	validation_1-auc:0.842464
[82]	validation_0-auc:0.872597	validation_1-auc:0.84242
[83]	validation_0-auc:0.872699	validation_1-auc:0.842408
[84]	validation_0-auc:0.872864	validation_1-auc:0.842397
[85]	validation_0-auc:0.872929	validation_1-auc:0.842332
[86]	validation_0-auc:0.873292	validation_1-auc:0.842409
[87]	validation_0-auc:0.873366	validation_1-auc:0.842429
[88]	validation_0-auc:0.873532	validation_1-auc:0.842624
[89]	validation_0-auc:0.873666	validation_1-auc:0.842719
[90]	validation_0-auc:0.873764	validation_1-auc:0.842678
[91]	validation_0-auc:0.873889	validation_1-auc:0.842733
[92]	validation_0-auc:0.874199	validation_1-auc:0.842561
Stopping. Best iteration:
[62]	validation_0-auc:0.868081	validation_1-auc:0.843042

[0]	validation_0-auc:0.807908	validation_1-auc:0.80725
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.812758	validation_1-auc:0.813847
[2]	validation_0-auc:0.823388	validation_1-auc:0.820791
[3]	validation_0-auc:0.825595	validation_1-auc:0.824416
[4]	validation_0-auc:0.826144	validation_1-auc:0.824778
[5]	validation_0-auc:0.827835	validation_1-auc:0.824887
[6]	validation_0-auc:0.82898	validation_1-auc:0.825512
[7]	validation_0-auc:0.829906	validation_1-auc:0.825893
[8]	validation_0-auc:0.830393	validation_1-auc:0.82604
[9]	validation_0-auc:0.831299	validation_1-auc:0.826866
[10]	validation_0-auc:0.832295	validation_1-auc:0.826932
[11]	validation_0-auc:0.83449	validation_1-auc:0.828599
[12]	validation_0-auc:0.834629	validation_1-auc:0.828345
[13]	validation_0-auc:0.837725	validation_1-auc:0.8319
[14]	validation_0-auc:0.838097	validation_1-auc:0.831941
[15]	validation_0-auc:0.838483	validation_1-auc:0.832163
[16]	validation_0-auc:0.838958	validation_1-auc:0.833213
[17]	validation_0-auc:0.840148	validation_1-auc:0.833612
[18]	validation_0-auc:0.840839	validation_1-auc:0.834048
[19]	validation_0-auc:0.841189	validation_1-auc:0.835096
[20]	validation_0-auc:0.842023	validation_1-auc:0.835207
[21]	validation_0-auc:0.84295	validation_1-auc:0.83597
[22]	validation_0-auc:0.844016	validation_1-auc:0.836358
[23]	validation_0-auc:0.845004	validation_1-auc:0.837758
[24]	validation_0-auc:0.845611	validation_1-auc:0.838423
[25]	validation_0-auc:0.846499	validation_1-auc:0.838329
[26]	validation_0-auc:0.847527	validation_1-auc:0.838481
[27]	validation_0-auc:0.847883	validation_1-auc:0.83804
[28]	validation_0-auc:0.848434	validation_1-auc:0.837716
[29]	validation_0-auc:0.849795	validation_1-auc:0.838867
[30]	validation_0-auc:0.850083	validation_1-auc:0.83939
[31]	validation_0-auc:0.850408	validation_1-auc:0.839228
[32]	validation_0-auc:0.850671	validation_1-auc:0.839367
[33]	validation_0-auc:0.851997	validation_1-auc:0.840061
[34]	validation_0-auc:0.852362	validation_1-auc:0.840047
[35]	validation_0-auc:0.852976	validation_1-auc:0.839782
[36]	validation_0-auc:0.853408	validation_1-auc:0.840085
[37]	validation_0-auc:0.853868	validation_1-auc:0.84012
[38]	validation_0-auc:0.854366	validation_1-auc:0.840126
[39]	validation_0-auc:0.85502	validation_1-auc:0.840102
[40]	validation_0-auc:0.855724	validation_1-auc:0.839915
[41]	validation_0-auc:0.856268	validation_1-auc:0.839776
[42]	validation_0-auc:0.856811	validation_1-auc:0.839747
[43]	validation_0-auc:0.857157	validation_1-auc:0.839641
[44]	validation_0-auc:0.857796	validation_1-auc:0.83979
[45]	validation_0-auc:0.858043	validation_1-auc:0.839904
[46]	validation_0-auc:0.858417	validation_1-auc:0.83977
[47]	validation_0-auc:0.858923	validation_1-auc:0.839623
[48]	validation_0-auc:0.859424	validation_1-auc:0.840192
[49]	validation_0-auc:0.85974	validation_1-auc:0.840213
[50]	validation_0-auc:0.860014	validation_1-auc:0.840235
[51]	validation_0-auc:0.860312	validation_1-auc:0.840315
[52]	validation_0-auc:0.860766	validation_1-auc:0.840366
[53]	validation_0-auc:0.860837	validation_1-auc:0.840242
[54]	validation_0-auc:0.861172	validation_1-auc:0.839955
[55]	validation_0-auc:0.861529	validation_1-auc:0.839975
[56]	validation_0-auc:0.861805	validation_1-auc:0.839891
[57]	validation_0-auc:0.86197	validation_1-auc:0.839837
[58]	validation_0-auc:0.862228	validation_1-auc:0.839871
[59]	validation_0-auc:0.862386	validation_1-auc:0.840154
[60]	validation_0-auc:0.862711	validation_1-auc:0.83997
[61]	validation_0-auc:0.862873	validation_1-auc:0.839812
[62]	validation_0-auc:0.863025	validation_1-auc:0.839451
[63]	validation_0-auc:0.863447	validation_1-auc:0.839641
[64]	validation_0-auc:0.863978	validation_1-auc:0.83975
[65]	validation_0-auc:0.86407	validation_1-auc:0.839713
[66]	validation_0-auc:0.864183	validation_1-auc:0.839757
[67]	validation_0-auc:0.864704	validation_1-auc:0.839917
[68]	validation_0-auc:0.864921	validation_1-auc:0.839709
[69]	validation_0-auc:0.865072	validation_1-auc:0.839627
[70]	validation_0-auc:0.86519	validation_1-auc:0.839672
[71]	validation_0-auc:0.865199	validation_1-auc:0.839553
[72]	validation_0-auc:0.865704	validation_1-auc:0.839194
[73]	validation_0-auc:0.865735	validation_1-auc:0.839161
[74]	validation_0-auc:0.865878	validation_1-auc:0.83915
[75]	validation_0-auc:0.866261	validation_1-auc:0.839047
[76]	validation_0-auc:0.866367	validation_1-auc:0.839125
[77]	validation_0-auc:0.866604	validation_1-auc:0.839164
[78]	validation_0-auc:0.866713	validation_1-auc:0.839139
[79]	validation_0-auc:0.867016	validation_1-auc:0.839237
[80]	validation_0-auc:0.867187	validation_1-auc:0.839175
[81]	validation_0-auc:0.867233	validation_1-auc:0.839236
[82]	validation_0-auc:0.867339	validation_1-auc:0.839096
Stopping. Best iteration:
[52]	validation_0-auc:0.860766	validation_1-auc:0.840366

[0]	validation_0-auc:0.811089	validation_1-auc:0.810522
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.816145	validation_1-auc:0.80958
[2]	validation_0-auc:0.823006	validation_1-auc:0.815162
[3]	validation_0-auc:0.826799	validation_1-auc:0.820588
[4]	validation_0-auc:0.828171	validation_1-auc:0.822185
[5]	validation_0-auc:0.830317	validation_1-auc:0.821841
[6]	validation_0-auc:0.831526	validation_1-auc:0.82214
[7]	validation_0-auc:0.833025	validation_1-auc:0.823823
[8]	validation_0-auc:0.833822	validation_1-auc:0.824124
[9]	validation_0-auc:0.835479	validation_1-auc:0.825068
[10]	validation_0-auc:0.83707	validation_1-auc:0.82645
[11]	validation_0-auc:0.838077	validation_1-auc:0.828302
[12]	validation_0-auc:0.838651	validation_1-auc:0.828116
[13]	validation_0-auc:0.83997	validation_1-auc:0.831353
[14]	validation_0-auc:0.840415	validation_1-auc:0.831227
[15]	validation_0-auc:0.841469	validation_1-auc:0.831305
[16]	validation_0-auc:0.842403	validation_1-auc:0.832497
[17]	validation_0-auc:0.843516	validation_1-auc:0.833213
[18]	validation_0-auc:0.844416	validation_1-auc:0.833861
[19]	validation_0-auc:0.844859	validation_1-auc:0.836085
[20]	validation_0-auc:0.846057	validation_1-auc:0.836833
[21]	validation_0-auc:0.846358	validation_1-auc:0.837439
[22]	validation_0-auc:0.84674	validation_1-auc:0.83818
[23]	validation_0-auc:0.847113	validation_1-auc:0.837877
[24]	validation_0-auc:0.84726	validation_1-auc:0.83782
[25]	validation_0-auc:0.848939	validation_1-auc:0.838624
[26]	validation_0-auc:0.849491	validation_1-auc:0.838651
[27]	validation_0-auc:0.850175	validation_1-auc:0.838495
[28]	validation_0-auc:0.850442	validation_1-auc:0.838131
[29]	validation_0-auc:0.851316	validation_1-auc:0.838669
[30]	validation_0-auc:0.851752	validation_1-auc:0.839129
[31]	validation_0-auc:0.852408	validation_1-auc:0.839758
[32]	validation_0-auc:0.85315	validation_1-auc:0.839857
[33]	validation_0-auc:0.853983	validation_1-auc:0.840091
[34]	validation_0-auc:0.854749	validation_1-auc:0.839868
[35]	validation_0-auc:0.85557	validation_1-auc:0.840281
[36]	validation_0-auc:0.856073	validation_1-auc:0.840607
[37]	validation_0-auc:0.856646	validation_1-auc:0.841093
[38]	validation_0-auc:0.857054	validation_1-auc:0.841084
[39]	validation_0-auc:0.857443	validation_1-auc:0.841203
[40]	validation_0-auc:0.857674	validation_1-auc:0.841241
[41]	validation_0-auc:0.858053	validation_1-auc:0.841674
[42]	validation_0-auc:0.858352	validation_1-auc:0.841783
[43]	validation_0-auc:0.858576	validation_1-auc:0.842103
[44]	validation_0-auc:0.858816	validation_1-auc:0.842234
[45]	validation_0-auc:0.859317	validation_1-auc:0.842502
[46]	validation_0-auc:0.859584	validation_1-auc:0.842582
[47]	validation_0-auc:0.859838	validation_1-auc:0.842729
[48]	validation_0-auc:0.860033	validation_1-auc:0.842699
[49]	validation_0-auc:0.860375	validation_1-auc:0.842745
[50]	validation_0-auc:0.860997	validation_1-auc:0.842902
[51]	validation_0-auc:0.861331	validation_1-auc:0.84273
[52]	validation_0-auc:0.861771	validation_1-auc:0.842472
[53]	validation_0-auc:0.862063	validation_1-auc:0.842629
[54]	validation_0-auc:0.862322	validation_1-auc:0.842702
[55]	validation_0-auc:0.862479	validation_1-auc:0.842386
[56]	validation_0-auc:0.862881	validation_1-auc:0.842618
[57]	validation_0-auc:0.863174	validation_1-auc:0.842666
[58]	validation_0-auc:0.863317	validation_1-auc:0.842772
[59]	validation_0-auc:0.863486	validation_1-auc:0.843046
[60]	validation_0-auc:0.863699	validation_1-auc:0.842779
[61]	validation_0-auc:0.864151	validation_1-auc:0.842815
[62]	validation_0-auc:0.864468	validation_1-auc:0.842803
[63]	validation_0-auc:0.865093	validation_1-auc:0.843089
[64]	validation_0-auc:0.865251	validation_1-auc:0.843064
[65]	validation_0-auc:0.865418	validation_1-auc:0.843077
[66]	validation_0-auc:0.865786	validation_1-auc:0.842983
[67]	validation_0-auc:0.866248	validation_1-auc:0.842978
[68]	validation_0-auc:0.866372	validation_1-auc:0.843042
[69]	validation_0-auc:0.866502	validation_1-auc:0.842799
[70]	validation_0-auc:0.866804	validation_1-auc:0.842782
[71]	validation_0-auc:0.867006	validation_1-auc:0.842718
[72]	validation_0-auc:0.867142	validation_1-auc:0.842663
[73]	validation_0-auc:0.867457	validation_1-auc:0.84279
[74]	validation_0-auc:0.867566	validation_1-auc:0.842808
[75]	validation_0-auc:0.867665	validation_1-auc:0.84273
[76]	validation_0-auc:0.868033	validation_1-auc:0.842775
[77]	validation_0-auc:0.868147	validation_1-auc:0.842837
[78]	validation_0-auc:0.868194	validation_1-auc:0.842876
[79]	validation_0-auc:0.868387	validation_1-auc:0.842731
[80]	validation_0-auc:0.868439	validation_1-auc:0.842819
[81]	validation_0-auc:0.868845	validation_1-auc:0.842942
[82]	validation_0-auc:0.869125	validation_1-auc:0.843046
[83]	validation_0-auc:0.86915	validation_1-auc:0.843017
[84]	validation_0-auc:0.869204	validation_1-auc:0.843058
[85]	validation_0-auc:0.869251	validation_1-auc:0.843014
[86]	validation_0-auc:0.869408	validation_1-auc:0.843048
[87]	validation_0-auc:0.86946	validation_1-auc:0.843005
[88]	validation_0-auc:0.869501	validation_1-auc:0.842965
[89]	validation_0-auc:0.869696	validation_1-auc:0.843011
[90]	validation_0-auc:0.869772	validation_1-auc:0.84293
[91]	validation_0-auc:0.869936	validation_1-auc:0.842859
[92]	validation_0-auc:0.870158	validation_1-auc:0.842843
[93]	validation_0-auc:0.87023	validation_1-auc:0.842749
Stopping. Best iteration:
[63]	validation_0-auc:0.865093	validation_1-auc:0.843089

[0]	validation_0-auc:0.81869	validation_1-auc:0.817015
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.824094	validation_1-auc:0.820241
[2]	validation_0-auc:0.826595	validation_1-auc:0.821724
[3]	validation_0-auc:0.829527	validation_1-auc:0.824427
[4]	validation_0-auc:0.829693	validation_1-auc:0.824851
[5]	validation_0-auc:0.831818	validation_1-auc:0.827222
[6]	validation_0-auc:0.834517	validation_1-auc:0.828952
[7]	validation_0-auc:0.8341	validation_1-auc:0.82885
[8]	validation_0-auc:0.834768	validation_1-auc:0.82877
[9]	validation_0-auc:0.836994	validation_1-auc:0.830518
[10]	validation_0-auc:0.837355	validation_1-auc:0.830412
[11]	validation_0-auc:0.838603	validation_1-auc:0.83147
[12]	validation_0-auc:0.839379	validation_1-auc:0.83173
[13]	validation_0-auc:0.84163	validation_1-auc:0.833085
[14]	validation_0-auc:0.842877	validation_1-auc:0.833829
[15]	validation_0-auc:0.843706	validation_1-auc:0.836046
[16]	validation_0-auc:0.844358	validation_1-auc:0.836017
[17]	validation_0-auc:0.8446	validation_1-auc:0.835917
[18]	validation_0-auc:0.845302	validation_1-auc:0.836488
[19]	validation_0-auc:0.845666	validation_1-auc:0.83753
[20]	validation_0-auc:0.846522	validation_1-auc:0.837403
[21]	validation_0-auc:0.846739	validation_1-auc:0.837018
[22]	validation_0-auc:0.847956	validation_1-auc:0.837951
[23]	validation_0-auc:0.848816	validation_1-auc:0.838302
[24]	validation_0-auc:0.849125	validation_1-auc:0.837936
[25]	validation_0-auc:0.849609	validation_1-auc:0.838377
[26]	validation_0-auc:0.849905	validation_1-auc:0.838257
[27]	validation_0-auc:0.85005	validation_1-auc:0.837932
[28]	validation_0-auc:0.85066	validation_1-auc:0.837909
[29]	validation_0-auc:0.851119	validation_1-auc:0.838139
[30]	validation_0-auc:0.851464	validation_1-auc:0.838446
[31]	validation_0-auc:0.852287	validation_1-auc:0.838654
[32]	validation_0-auc:0.852782	validation_1-auc:0.838132
[33]	validation_0-auc:0.853383	validation_1-auc:0.838304
[34]	validation_0-auc:0.854081	validation_1-auc:0.838729
[35]	validation_0-auc:0.854369	validation_1-auc:0.838728
[36]	validation_0-auc:0.854797	validation_1-auc:0.838986
[37]	validation_0-auc:0.85538	validation_1-auc:0.839478
[38]	validation_0-auc:0.855892	validation_1-auc:0.839637
[39]	validation_0-auc:0.856271	validation_1-auc:0.840056
[40]	validation_0-auc:0.856777	validation_1-auc:0.84027
[41]	validation_0-auc:0.857437	validation_1-auc:0.840334
[42]	validation_0-auc:0.857798	validation_1-auc:0.840608
[43]	validation_0-auc:0.858114	validation_1-auc:0.840437
[44]	validation_0-auc:0.8587	validation_1-auc:0.840393
[45]	validation_0-auc:0.859208	validation_1-auc:0.840227
[46]	validation_0-auc:0.859787	validation_1-auc:0.840334
[47]	validation_0-auc:0.860175	validation_1-auc:0.840495
[48]	validation_0-auc:0.860447	validation_1-auc:0.840604
[49]	validation_0-auc:0.860756	validation_1-auc:0.840793
[50]	validation_0-auc:0.8614	validation_1-auc:0.841389
[51]	validation_0-auc:0.861773	validation_1-auc:0.841365
[52]	validation_0-auc:0.861854	validation_1-auc:0.84153
[53]	validation_0-auc:0.862384	validation_1-auc:0.84149
[54]	validation_0-auc:0.862676	validation_1-auc:0.841725
[55]	validation_0-auc:0.862848	validation_1-auc:0.841829
[56]	validation_0-auc:0.863278	validation_1-auc:0.841768
[57]	validation_0-auc:0.86356	validation_1-auc:0.841612
[58]	validation_0-auc:0.86411	validation_1-auc:0.841675
[59]	validation_0-auc:0.864374	validation_1-auc:0.841882
[60]	validation_0-auc:0.864549	validation_1-auc:0.841817
[61]	validation_0-auc:0.864978	validation_1-auc:0.841943
[62]	validation_0-auc:0.865302	validation_1-auc:0.842116
[63]	validation_0-auc:0.865989	validation_1-auc:0.841957
[64]	validation_0-auc:0.86619	validation_1-auc:0.841802
[65]	validation_0-auc:0.866528	validation_1-auc:0.841468
[66]	validation_0-auc:0.866972	validation_1-auc:0.841354
[67]	validation_0-auc:0.867143	validation_1-auc:0.84139
[68]	validation_0-auc:0.867294	validation_1-auc:0.841368
[69]	validation_0-auc:0.867551	validation_1-auc:0.841426
[70]	validation_0-auc:0.867593	validation_1-auc:0.841593
[71]	validation_0-auc:0.867768	validation_1-auc:0.841558
[72]	validation_0-auc:0.86789	validation_1-auc:0.841586
[73]	validation_0-auc:0.868178	validation_1-auc:0.841583
[74]	validation_0-auc:0.86835	validation_1-auc:0.841736
[75]	validation_0-auc:0.868702	validation_1-auc:0.841783
[76]	validation_0-auc:0.869042	validation_1-auc:0.841905
[77]	validation_0-auc:0.86916	validation_1-auc:0.841884
[78]	validation_0-auc:0.869239	validation_1-auc:0.841976
[79]	validation_0-auc:0.86951	validation_1-auc:0.842162
[80]	validation_0-auc:0.869571	validation_1-auc:0.842136
[81]	validation_0-auc:0.869892	validation_1-auc:0.842127
[82]	validation_0-auc:0.870043	validation_1-auc:0.842152
[83]	validation_0-auc:0.87021	validation_1-auc:0.842025
[84]	validation_0-auc:0.870359	validation_1-auc:0.841843
[85]	validation_0-auc:0.870543	validation_1-auc:0.841961
[86]	validation_0-auc:0.87072	validation_1-auc:0.842042
[87]	validation_0-auc:0.870842	validation_1-auc:0.842
[88]	validation_0-auc:0.870963	validation_1-auc:0.842045
[89]	validation_0-auc:0.871054	validation_1-auc:0.841959
[90]	validation_0-auc:0.871463	validation_1-auc:0.841846
[91]	validation_0-auc:0.871591	validation_1-auc:0.841917
[92]	validation_0-auc:0.871796	validation_1-auc:0.841924
[93]	validation_0-auc:0.871845	validation_1-auc:0.841742
[94]	validation_0-auc:0.871905	validation_1-auc:0.841622
[95]	validation_0-auc:0.872065	validation_1-auc:0.841509
[96]	validation_0-auc:0.872127	validation_1-auc:0.841496
[97]	validation_0-auc:0.872232	validation_1-auc:0.841384
[98]	validation_0-auc:0.872411	validation_1-auc:0.841296
[99]	validation_0-auc:0.872639	validation_1-auc:0.841329
[0]	validation_0-auc:0.817712	validation_1-auc:0.810588
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.822973	validation_1-auc:0.818937
[2]	validation_0-auc:0.828214	validation_1-auc:0.822305
[3]	validation_0-auc:0.833547	validation_1-auc:0.828183
[4]	validation_0-auc:0.834204	validation_1-auc:0.827296
[5]	validation_0-auc:0.835055	validation_1-auc:0.828722
[6]	validation_0-auc:0.835412	validation_1-auc:0.827872
[7]	validation_0-auc:0.836887	validation_1-auc:0.828207
[8]	validation_0-auc:0.837771	validation_1-auc:0.82853
[9]	validation_0-auc:0.838834	validation_1-auc:0.829628
[10]	validation_0-auc:0.84078	validation_1-auc:0.830579
[11]	validation_0-auc:0.844138	validation_1-auc:0.832853
[12]	validation_0-auc:0.844703	validation_1-auc:0.83303
[13]	validation_0-auc:0.847231	validation_1-auc:0.834739
[14]	validation_0-auc:0.848425	validation_1-auc:0.834679
[15]	validation_0-auc:0.849434	validation_1-auc:0.834703
[16]	validation_0-auc:0.85006	validation_1-auc:0.83438
[17]	validation_0-auc:0.850893	validation_1-auc:0.834673
[18]	validation_0-auc:0.852682	validation_1-auc:0.835043
[19]	validation_0-auc:0.854094	validation_1-auc:0.836142
[20]	validation_0-auc:0.856011	validation_1-auc:0.837191
[21]	validation_0-auc:0.858132	validation_1-auc:0.837837
[22]	validation_0-auc:0.859643	validation_1-auc:0.837566
[23]	validation_0-auc:0.860642	validation_1-auc:0.83865
[24]	validation_0-auc:0.861403	validation_1-auc:0.839061
[25]	validation_0-auc:0.862313	validation_1-auc:0.838807
[26]	validation_0-auc:0.863779	validation_1-auc:0.839251
[27]	validation_0-auc:0.864868	validation_1-auc:0.839135
[28]	validation_0-auc:0.865905	validation_1-auc:0.839405
[29]	validation_0-auc:0.866786	validation_1-auc:0.83992
[30]	validation_0-auc:0.867471	validation_1-auc:0.839805
[31]	validation_0-auc:0.868309	validation_1-auc:0.839616
[32]	validation_0-auc:0.869498	validation_1-auc:0.839128
[33]	validation_0-auc:0.870833	validation_1-auc:0.83913
[34]	validation_0-auc:0.871762	validation_1-auc:0.838721
[35]	validation_0-auc:0.872567	validation_1-auc:0.838716
[36]	validation_0-auc:0.873911	validation_1-auc:0.838798
[37]	validation_0-auc:0.874911	validation_1-auc:0.839111
[38]	validation_0-auc:0.875604	validation_1-auc:0.83898
[39]	validation_0-auc:0.876694	validation_1-auc:0.838628
[40]	validation_0-auc:0.87779	validation_1-auc:0.839335
[41]	validation_0-auc:0.878488	validation_1-auc:0.839115
[42]	validation_0-auc:0.879337	validation_1-auc:0.838316
[43]	validation_0-auc:0.879813	validation_1-auc:0.838334
[44]	validation_0-auc:0.880703	validation_1-auc:0.837978
[45]	validation_0-auc:0.881071	validation_1-auc:0.837888
[46]	validation_0-auc:0.881764	validation_1-auc:0.837642
[47]	validation_0-auc:0.882077	validation_1-auc:0.837553
[48]	validation_0-auc:0.882411	validation_1-auc:0.837472
[49]	validation_0-auc:0.882964	validation_1-auc:0.837571
[50]	validation_0-auc:0.88336	validation_1-auc:0.837138
[51]	validation_0-auc:0.883865	validation_1-auc:0.837079
[52]	validation_0-auc:0.884422	validation_1-auc:0.836657
[53]	validation_0-auc:0.884543	validation_1-auc:0.836482
[54]	validation_0-auc:0.885125	validation_1-auc:0.836386
[55]	validation_0-auc:0.885407	validation_1-auc:0.836221
[56]	validation_0-auc:0.885601	validation_1-auc:0.836126
[57]	validation_0-auc:0.885798	validation_1-auc:0.836257
[58]	validation_0-auc:0.886399	validation_1-auc:0.83663
[59]	validation_0-auc:0.886428	validation_1-auc:0.836665
Stopping. Best iteration:
[29]	validation_0-auc:0.866786	validation_1-auc:0.83992

[0]	validation_0-auc:0.814009	validation_1-auc:0.804403
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.821275	validation_1-auc:0.813196
[2]	validation_0-auc:0.827903	validation_1-auc:0.820319
[3]	validation_0-auc:0.833379	validation_1-auc:0.827093
[4]	validation_0-auc:0.835471	validation_1-auc:0.826684
[5]	validation_0-auc:0.840934	validation_1-auc:0.831989
[6]	validation_0-auc:0.843255	validation_1-auc:0.831061
[7]	validation_0-auc:0.844488	validation_1-auc:0.82998
[8]	validation_0-auc:0.846061	validation_1-auc:0.830685
[9]	validation_0-auc:0.847914	validation_1-auc:0.830873
[10]	validation_0-auc:0.848697	validation_1-auc:0.831409
[11]	validation_0-auc:0.850248	validation_1-auc:0.832439
[12]	validation_0-auc:0.851094	validation_1-auc:0.832206
[13]	validation_0-auc:0.853428	validation_1-auc:0.834819
[14]	validation_0-auc:0.855078	validation_1-auc:0.835657
[15]	validation_0-auc:0.856117	validation_1-auc:0.835951
[16]	validation_0-auc:0.857395	validation_1-auc:0.83612
[17]	validation_0-auc:0.858986	validation_1-auc:0.835976
[18]	validation_0-auc:0.860126	validation_1-auc:0.836104
[19]	validation_0-auc:0.860182	validation_1-auc:0.837544
[20]	validation_0-auc:0.862347	validation_1-auc:0.837611
[21]	validation_0-auc:0.862989	validation_1-auc:0.838228
[22]	validation_0-auc:0.86408	validation_1-auc:0.838505
[23]	validation_0-auc:0.864833	validation_1-auc:0.839339
[24]	validation_0-auc:0.865275	validation_1-auc:0.839364
[25]	validation_0-auc:0.867226	validation_1-auc:0.839368
[26]	validation_0-auc:0.868347	validation_1-auc:0.839987
[27]	validation_0-auc:0.868822	validation_1-auc:0.839718
[28]	validation_0-auc:0.869747	validation_1-auc:0.839315
[29]	validation_0-auc:0.870582	validation_1-auc:0.839963
[30]	validation_0-auc:0.871132	validation_1-auc:0.840295
[31]	validation_0-auc:0.871655	validation_1-auc:0.8402
[32]	validation_0-auc:0.872709	validation_1-auc:0.840605
[33]	validation_0-auc:0.873597	validation_1-auc:0.840609
[34]	validation_0-auc:0.874485	validation_1-auc:0.841155
[35]	validation_0-auc:0.875671	validation_1-auc:0.840878
[36]	validation_0-auc:0.876407	validation_1-auc:0.840645
[37]	validation_0-auc:0.877606	validation_1-auc:0.840808
[38]	validation_0-auc:0.878592	validation_1-auc:0.840772
[39]	validation_0-auc:0.879625	validation_1-auc:0.840525
[40]	validation_0-auc:0.880381	validation_1-auc:0.840175
[41]	validation_0-auc:0.880983	validation_1-auc:0.840642
[42]	validation_0-auc:0.881759	validation_1-auc:0.840167
[43]	validation_0-auc:0.882392	validation_1-auc:0.840169
[44]	validation_0-auc:0.882762	validation_1-auc:0.840488
[45]	validation_0-auc:0.883719	validation_1-auc:0.840141
[46]	validation_0-auc:0.88399	validation_1-auc:0.84025
[47]	validation_0-auc:0.88429	validation_1-auc:0.839998
[48]	validation_0-auc:0.884709	validation_1-auc:0.840178
[49]	validation_0-auc:0.885064	validation_1-auc:0.840582
[50]	validation_0-auc:0.885692	validation_1-auc:0.84081
[51]	validation_0-auc:0.886023	validation_1-auc:0.840559
[52]	validation_0-auc:0.886386	validation_1-auc:0.84034
[53]	validation_0-auc:0.886683	validation_1-auc:0.840198
[54]	validation_0-auc:0.887167	validation_1-auc:0.840429
[55]	validation_0-auc:0.887491	validation_1-auc:0.840178
[56]	validation_0-auc:0.88767	validation_1-auc:0.840103
[57]	validation_0-auc:0.887937	validation_1-auc:0.840415
[58]	validation_0-auc:0.888188	validation_1-auc:0.840231
[59]	validation_0-auc:0.888409	validation_1-auc:0.840121
[60]	validation_0-auc:0.888768	validation_1-auc:0.84005
[61]	validation_0-auc:0.889063	validation_1-auc:0.839877
[62]	validation_0-auc:0.88928	validation_1-auc:0.840047
[63]	validation_0-auc:0.890143	validation_1-auc:0.840195
[64]	validation_0-auc:0.890278	validation_1-auc:0.840287
Stopping. Best iteration:
[34]	validation_0-auc:0.874485	validation_1-auc:0.841155

[0]	validation_0-auc:0.824399	validation_1-auc:0.819077
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.830053	validation_1-auc:0.823195
[2]	validation_0-auc:0.832221	validation_1-auc:0.823543
[3]	validation_0-auc:0.835389	validation_1-auc:0.825845
[4]	validation_0-auc:0.836288	validation_1-auc:0.827867
[5]	validation_0-auc:0.83797	validation_1-auc:0.829851
[6]	validation_0-auc:0.840351	validation_1-auc:0.831956
[7]	validation_0-auc:0.841858	validation_1-auc:0.832311
[8]	validation_0-auc:0.843381	validation_1-auc:0.832004
[9]	validation_0-auc:0.846369	validation_1-auc:0.834322
[10]	validation_0-auc:0.846852	validation_1-auc:0.833733
[11]	validation_0-auc:0.848206	validation_1-auc:0.834291
[12]	validation_0-auc:0.849979	validation_1-auc:0.835236
[13]	validation_0-auc:0.852205	validation_1-auc:0.835673
[14]	validation_0-auc:0.853384	validation_1-auc:0.835366
[15]	validation_0-auc:0.855281	validation_1-auc:0.836382
[16]	validation_0-auc:0.857246	validation_1-auc:0.836391
[17]	validation_0-auc:0.858562	validation_1-auc:0.83651
[18]	validation_0-auc:0.859638	validation_1-auc:0.836914
[19]	validation_0-auc:0.861104	validation_1-auc:0.837749
[20]	validation_0-auc:0.861834	validation_1-auc:0.837509
[21]	validation_0-auc:0.862292	validation_1-auc:0.837102
[22]	validation_0-auc:0.863761	validation_1-auc:0.836877
[23]	validation_0-auc:0.864326	validation_1-auc:0.837137
[24]	validation_0-auc:0.864883	validation_1-auc:0.83752
[25]	validation_0-auc:0.866094	validation_1-auc:0.838447
[26]	validation_0-auc:0.867084	validation_1-auc:0.838358
[27]	validation_0-auc:0.868579	validation_1-auc:0.838233
[28]	validation_0-auc:0.870065	validation_1-auc:0.83825
[29]	validation_0-auc:0.870945	validation_1-auc:0.83857
[30]	validation_0-auc:0.871822	validation_1-auc:0.838855
[31]	validation_0-auc:0.873046	validation_1-auc:0.8385
[32]	validation_0-auc:0.874107	validation_1-auc:0.838419
[33]	validation_0-auc:0.874597	validation_1-auc:0.838913
[34]	validation_0-auc:0.876263	validation_1-auc:0.839398
[35]	validation_0-auc:0.877463	validation_1-auc:0.839509
[36]	validation_0-auc:0.878457	validation_1-auc:0.839528
[37]	validation_0-auc:0.879484	validation_1-auc:0.839985
[38]	validation_0-auc:0.879877	validation_1-auc:0.840308
[39]	validation_0-auc:0.881285	validation_1-auc:0.840924
[40]	validation_0-auc:0.882054	validation_1-auc:0.840511
[41]	validation_0-auc:0.883073	validation_1-auc:0.840492
[42]	validation_0-auc:0.883678	validation_1-auc:0.840568
[43]	validation_0-auc:0.884181	validation_1-auc:0.840323
[44]	validation_0-auc:0.884589	validation_1-auc:0.840466
[45]	validation_0-auc:0.884991	validation_1-auc:0.840599
[46]	validation_0-auc:0.885288	validation_1-auc:0.840688
[47]	validation_0-auc:0.885791	validation_1-auc:0.840406
[48]	validation_0-auc:0.885959	validation_1-auc:0.840183
[49]	validation_0-auc:0.886592	validation_1-auc:0.840218
[50]	validation_0-auc:0.887037	validation_1-auc:0.839912
[51]	validation_0-auc:0.88741	validation_1-auc:0.83952
[52]	validation_0-auc:0.887704	validation_1-auc:0.839099
[53]	validation_0-auc:0.887982	validation_1-auc:0.839364
[54]	validation_0-auc:0.888408	validation_1-auc:0.839712
[55]	validation_0-auc:0.888556	validation_1-auc:0.839627
[56]	validation_0-auc:0.888838	validation_1-auc:0.839482
[57]	validation_0-auc:0.889265	validation_1-auc:0.839542
[58]	validation_0-auc:0.889539	validation_1-auc:0.839367
[59]	validation_0-auc:0.889707	validation_1-auc:0.839808
[60]	validation_0-auc:0.890174	validation_1-auc:0.83986
[61]	validation_0-auc:0.890404	validation_1-auc:0.839782
[62]	validation_0-auc:0.890645	validation_1-auc:0.839831
[63]	validation_0-auc:0.891371	validation_1-auc:0.839484
[64]	validation_0-auc:0.891946	validation_1-auc:0.839601
[65]	validation_0-auc:0.892215	validation_1-auc:0.839587
[66]	validation_0-auc:0.892649	validation_1-auc:0.839283
[67]	validation_0-auc:0.893291	validation_1-auc:0.839414
[68]	validation_0-auc:0.893393	validation_1-auc:0.839689
[69]	validation_0-auc:0.893628	validation_1-auc:0.839685
Stopping. Best iteration:
[39]	validation_0-auc:0.881285	validation_1-auc:0.840924

[0]	validation_0-auc:0.816109	validation_1-auc:0.811
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.819873	validation_1-auc:0.817792
[2]	validation_0-auc:0.824847	validation_1-auc:0.8229
[3]	validation_0-auc:0.829837	validation_1-auc:0.828079
[4]	validation_0-auc:0.832812	validation_1-auc:0.827416
[5]	validation_0-auc:0.834047	validation_1-auc:0.827319
[6]	validation_0-auc:0.834612	validation_1-auc:0.827051
[7]	validation_0-auc:0.837483	validation_1-auc:0.829683
[8]	validation_0-auc:0.838366	validation_1-auc:0.830073
[9]	validation_0-auc:0.838851	validation_1-auc:0.829976
[10]	validation_0-auc:0.84009	validation_1-auc:0.831103
[11]	validation_0-auc:0.841982	validation_1-auc:0.832844
[12]	validation_0-auc:0.843727	validation_1-auc:0.832937
[13]	validation_0-auc:0.846325	validation_1-auc:0.834449
[14]	validation_0-auc:0.846808	validation_1-auc:0.83439
[15]	validation_0-auc:0.84837	validation_1-auc:0.835177
[16]	validation_0-auc:0.849643	validation_1-auc:0.83589
[17]	validation_0-auc:0.850856	validation_1-auc:0.836092
[18]	validation_0-auc:0.852271	validation_1-auc:0.837136
[19]	validation_0-auc:0.85394	validation_1-auc:0.836967
[20]	validation_0-auc:0.855496	validation_1-auc:0.837424
[21]	validation_0-auc:0.857223	validation_1-auc:0.83705
[22]	validation_0-auc:0.858364	validation_1-auc:0.836773
[23]	validation_0-auc:0.859264	validation_1-auc:0.838275
[24]	validation_0-auc:0.859998	validation_1-auc:0.839437
[25]	validation_0-auc:0.860713	validation_1-auc:0.839528
[26]	validation_0-auc:0.861887	validation_1-auc:0.839759
[27]	validation_0-auc:0.862421	validation_1-auc:0.839912
[28]	validation_0-auc:0.863272	validation_1-auc:0.839747
[29]	validation_0-auc:0.864173	validation_1-auc:0.839941
[30]	validation_0-auc:0.864581	validation_1-auc:0.840214
[31]	validation_0-auc:0.865165	validation_1-auc:0.840243
[32]	validation_0-auc:0.865982	validation_1-auc:0.839967
[33]	validation_0-auc:0.867174	validation_1-auc:0.840393
[34]	validation_0-auc:0.867784	validation_1-auc:0.84028
[35]	validation_0-auc:0.868347	validation_1-auc:0.839926
[36]	validation_0-auc:0.869124	validation_1-auc:0.839652
[37]	validation_0-auc:0.86975	validation_1-auc:0.83957
[38]	validation_0-auc:0.870294	validation_1-auc:0.839306
[39]	validation_0-auc:0.870689	validation_1-auc:0.83959
[40]	validation_0-auc:0.871538	validation_1-auc:0.839411
[41]	validation_0-auc:0.87222	validation_1-auc:0.838991
[42]	validation_0-auc:0.87275	validation_1-auc:0.838814
[43]	validation_0-auc:0.873267	validation_1-auc:0.838467
[44]	validation_0-auc:0.874133	validation_1-auc:0.838313
[45]	validation_0-auc:0.874573	validation_1-auc:0.83842
[46]	validation_0-auc:0.875069	validation_1-auc:0.838436
[47]	validation_0-auc:0.875501	validation_1-auc:0.838306
[48]	validation_0-auc:0.876318	validation_1-auc:0.838231
[49]	validation_0-auc:0.876795	validation_1-auc:0.838381
[50]	validation_0-auc:0.877102	validation_1-auc:0.8384
[51]	validation_0-auc:0.877486	validation_1-auc:0.838447
[52]	validation_0-auc:0.877869	validation_1-auc:0.838069
[53]	validation_0-auc:0.877924	validation_1-auc:0.837986
[54]	validation_0-auc:0.878057	validation_1-auc:0.837856
[55]	validation_0-auc:0.878238	validation_1-auc:0.837813
[56]	validation_0-auc:0.878262	validation_1-auc:0.837831
[57]	validation_0-auc:0.878544	validation_1-auc:0.838054
[58]	validation_0-auc:0.879031	validation_1-auc:0.838101
[59]	validation_0-auc:0.879236	validation_1-auc:0.838254
[60]	validation_0-auc:0.879381	validation_1-auc:0.838044
[61]	validation_0-auc:0.879473	validation_1-auc:0.838073
[62]	validation_0-auc:0.879787	validation_1-auc:0.838048
[63]	validation_0-auc:0.879972	validation_1-auc:0.837792
Stopping. Best iteration:
[33]	validation_0-auc:0.867174	validation_1-auc:0.840393

[0]	validation_0-auc:0.814792	validation_1-auc:0.81277
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.820829	validation_1-auc:0.813089
[2]	validation_0-auc:0.828402	validation_1-auc:0.820049
[3]	validation_0-auc:0.834025	validation_1-auc:0.825544
[4]	validation_0-auc:0.836659	validation_1-auc:0.825712
[5]	validation_0-auc:0.837075	validation_1-auc:0.825205
[6]	validation_0-auc:0.838245	validation_1-auc:0.826528
[7]	validation_0-auc:0.839739	validation_1-auc:0.826771
[8]	validation_0-auc:0.841775	validation_1-auc:0.82822
[9]	validation_0-auc:0.845978	validation_1-auc:0.832237
[10]	validation_0-auc:0.84671	validation_1-auc:0.83202
[11]	validation_0-auc:0.848607	validation_1-auc:0.833908
[12]	validation_0-auc:0.849656	validation_1-auc:0.833841
[13]	validation_0-auc:0.852368	validation_1-auc:0.8363
[14]	validation_0-auc:0.853898	validation_1-auc:0.836181
[15]	validation_0-auc:0.855682	validation_1-auc:0.836348
[16]	validation_0-auc:0.857283	validation_1-auc:0.836661
[17]	validation_0-auc:0.858399	validation_1-auc:0.836068
[18]	validation_0-auc:0.859339	validation_1-auc:0.83598
[19]	validation_0-auc:0.859801	validation_1-auc:0.83813
[20]	validation_0-auc:0.860913	validation_1-auc:0.838516
[21]	validation_0-auc:0.861303	validation_1-auc:0.838247
[22]	validation_0-auc:0.861911	validation_1-auc:0.838743
[23]	validation_0-auc:0.862415	validation_1-auc:0.838619
[24]	validation_0-auc:0.862871	validation_1-auc:0.838881
[25]	validation_0-auc:0.864666	validation_1-auc:0.839139
[26]	validation_0-auc:0.866071	validation_1-auc:0.83909
[27]	validation_0-auc:0.866785	validation_1-auc:0.839038
[28]	validation_0-auc:0.867411	validation_1-auc:0.839043
[29]	validation_0-auc:0.867904	validation_1-auc:0.8398
[30]	validation_0-auc:0.868309	validation_1-auc:0.839796
[31]	validation_0-auc:0.868524	validation_1-auc:0.839662
[32]	validation_0-auc:0.869482	validation_1-auc:0.839879
[33]	validation_0-auc:0.870017	validation_1-auc:0.840794
[34]	validation_0-auc:0.870733	validation_1-auc:0.840824
[35]	validation_0-auc:0.871643	validation_1-auc:0.840995
[36]	validation_0-auc:0.872293	validation_1-auc:0.841035
[37]	validation_0-auc:0.873394	validation_1-auc:0.84168
[38]	validation_0-auc:0.873892	validation_1-auc:0.841768
[39]	validation_0-auc:0.874636	validation_1-auc:0.841555
[40]	validation_0-auc:0.875227	validation_1-auc:0.841199
[41]	validation_0-auc:0.875829	validation_1-auc:0.841302
[42]	validation_0-auc:0.876312	validation_1-auc:0.841156
[43]	validation_0-auc:0.876725	validation_1-auc:0.841359
[44]	validation_0-auc:0.87701	validation_1-auc:0.841513
[45]	validation_0-auc:0.877479	validation_1-auc:0.841239
[46]	validation_0-auc:0.877825	validation_1-auc:0.841163
[47]	validation_0-auc:0.877996	validation_1-auc:0.841038
[48]	validation_0-auc:0.878171	validation_1-auc:0.840726
[49]	validation_0-auc:0.878389	validation_1-auc:0.840901
[50]	validation_0-auc:0.879043	validation_1-auc:0.841293
[51]	validation_0-auc:0.879296	validation_1-auc:0.84155
[52]	validation_0-auc:0.879673	validation_1-auc:0.841463
[53]	validation_0-auc:0.879901	validation_1-auc:0.841496
[54]	validation_0-auc:0.8801	validation_1-auc:0.841431
[55]	validation_0-auc:0.880273	validation_1-auc:0.841207
[56]	validation_0-auc:0.880526	validation_1-auc:0.841256
[57]	validation_0-auc:0.880449	validation_1-auc:0.841305
[58]	validation_0-auc:0.880585	validation_1-auc:0.841416
[59]	validation_0-auc:0.88067	validation_1-auc:0.841416
[60]	validation_0-auc:0.880894	validation_1-auc:0.84149
[61]	validation_0-auc:0.881124	validation_1-auc:0.841412
[62]	validation_0-auc:0.881378	validation_1-auc:0.841394
[63]	validation_0-auc:0.881668	validation_1-auc:0.841709
[64]	validation_0-auc:0.88173	validation_1-auc:0.841652
[65]	validation_0-auc:0.882135	validation_1-auc:0.841732
[66]	validation_0-auc:0.882459	validation_1-auc:0.841871
[67]	validation_0-auc:0.882853	validation_1-auc:0.841903
[68]	validation_0-auc:0.883392	validation_1-auc:0.841701
[69]	validation_0-auc:0.883459	validation_1-auc:0.841788
[70]	validation_0-auc:0.883518	validation_1-auc:0.841729
[71]	validation_0-auc:0.883579	validation_1-auc:0.841537
[72]	validation_0-auc:0.883791	validation_1-auc:0.841664
[73]	validation_0-auc:0.88418	validation_1-auc:0.841649
[74]	validation_0-auc:0.884291	validation_1-auc:0.841491
[75]	validation_0-auc:0.884816	validation_1-auc:0.841402
[76]	validation_0-auc:0.884963	validation_1-auc:0.841273
[77]	validation_0-auc:0.885139	validation_1-auc:0.841172
[78]	validation_0-auc:0.885216	validation_1-auc:0.84114
[79]	validation_0-auc:0.885202	validation_1-auc:0.841049
[80]	validation_0-auc:0.885419	validation_1-auc:0.840951
[81]	validation_0-auc:0.885472	validation_1-auc:0.841018
[82]	validation_0-auc:0.885447	validation_1-auc:0.841039
[83]	validation_0-auc:0.8859	validation_1-auc:0.841014
[84]	validation_0-auc:0.886376	validation_1-auc:0.84082
[85]	validation_0-auc:0.886446	validation_1-auc:0.840746
[86]	validation_0-auc:0.886466	validation_1-auc:0.840723
[87]	validation_0-auc:0.886506	validation_1-auc:0.840773
[88]	validation_0-auc:0.886624	validation_1-auc:0.840701
[89]	validation_0-auc:0.886644	validation_1-auc:0.840606
[90]	validation_0-auc:0.886728	validation_1-auc:0.840549
[91]	validation_0-auc:0.886841	validation_1-auc:0.840455
[92]	validation_0-auc:0.886925	validation_1-auc:0.840435
[93]	validation_0-auc:0.886961	validation_1-auc:0.840425
[94]	validation_0-auc:0.886955	validation_1-auc:0.840434
[95]	validation_0-auc:0.887038	validation_1-auc:0.840503
[96]	validation_0-auc:0.887257	validation_1-auc:0.840601
[97]	validation_0-auc:0.88742	validation_1-auc:0.840526
Stopping. Best iteration:
[67]	validation_0-auc:0.882853	validation_1-auc:0.841903

[0]	validation_0-auc:0.822547	validation_1-auc:0.816922
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.828707	validation_1-auc:0.82115
[2]	validation_0-auc:0.831882	validation_1-auc:0.821944
[3]	validation_0-auc:0.834654	validation_1-auc:0.82446
[4]	validation_0-auc:0.835563	validation_1-auc:0.825211
[5]	validation_0-auc:0.839081	validation_1-auc:0.827422
[6]	validation_0-auc:0.840656	validation_1-auc:0.828467
[7]	validation_0-auc:0.840958	validation_1-auc:0.829569
[8]	validation_0-auc:0.842797	validation_1-auc:0.830023
[9]	validation_0-auc:0.844924	validation_1-auc:0.832305
[10]	validation_0-auc:0.845608	validation_1-auc:0.831611
[11]	validation_0-auc:0.84683	validation_1-auc:0.832827
[12]	validation_0-auc:0.848602	validation_1-auc:0.832492
[13]	validation_0-auc:0.851937	validation_1-auc:0.834248
[14]	validation_0-auc:0.852856	validation_1-auc:0.834055
[15]	validation_0-auc:0.853512	validation_1-auc:0.834954
[16]	validation_0-auc:0.855482	validation_1-auc:0.835445
[17]	validation_0-auc:0.856714	validation_1-auc:0.835547
[18]	validation_0-auc:0.858299	validation_1-auc:0.83585
[19]	validation_0-auc:0.858865	validation_1-auc:0.836897
[20]	validation_0-auc:0.860335	validation_1-auc:0.836987
[21]	validation_0-auc:0.860911	validation_1-auc:0.838002
[22]	validation_0-auc:0.862184	validation_1-auc:0.837764
[23]	validation_0-auc:0.862837	validation_1-auc:0.838392
[24]	validation_0-auc:0.863766	validation_1-auc:0.838581
[25]	validation_0-auc:0.86453	validation_1-auc:0.838405
[26]	validation_0-auc:0.865585	validation_1-auc:0.837857
[27]	validation_0-auc:0.866775	validation_1-auc:0.838619
[28]	validation_0-auc:0.867699	validation_1-auc:0.838616
[29]	validation_0-auc:0.868236	validation_1-auc:0.838961
[30]	validation_0-auc:0.868628	validation_1-auc:0.839717
[31]	validation_0-auc:0.869431	validation_1-auc:0.839438
[32]	validation_0-auc:0.869965	validation_1-auc:0.839587
[33]	validation_0-auc:0.870234	validation_1-auc:0.839529
[34]	validation_0-auc:0.87177	validation_1-auc:0.839573
[35]	validation_0-auc:0.872435	validation_1-auc:0.839246
[36]	validation_0-auc:0.873224	validation_1-auc:0.838972
[37]	validation_0-auc:0.873875	validation_1-auc:0.838648
[38]	validation_0-auc:0.874594	validation_1-auc:0.83885
[39]	validation_0-auc:0.875576	validation_1-auc:0.839485
[40]	validation_0-auc:0.875978	validation_1-auc:0.839683
[41]	validation_0-auc:0.876199	validation_1-auc:0.840086
[42]	validation_0-auc:0.876623	validation_1-auc:0.840356
[43]	validation_0-auc:0.877034	validation_1-auc:0.840075
[44]	validation_0-auc:0.877679	validation_1-auc:0.840102
[45]	validation_0-auc:0.877955	validation_1-auc:0.839991
[46]	validation_0-auc:0.878373	validation_1-auc:0.839821
[47]	validation_0-auc:0.879023	validation_1-auc:0.839953
[48]	validation_0-auc:0.879568	validation_1-auc:0.84044
[49]	validation_0-auc:0.880217	validation_1-auc:0.840727
[50]	validation_0-auc:0.880484	validation_1-auc:0.840697
[51]	validation_0-auc:0.881016	validation_1-auc:0.840838
[52]	validation_0-auc:0.881335	validation_1-auc:0.840874
[53]	validation_0-auc:0.881494	validation_1-auc:0.840699
[54]	validation_0-auc:0.881929	validation_1-auc:0.840715
[55]	validation_0-auc:0.882208	validation_1-auc:0.840622
[56]	validation_0-auc:0.882443	validation_1-auc:0.840653
[57]	validation_0-auc:0.882766	validation_1-auc:0.840728
[58]	validation_0-auc:0.882961	validation_1-auc:0.840774
[59]	validation_0-auc:0.883276	validation_1-auc:0.840964
[60]	validation_0-auc:0.883422	validation_1-auc:0.840901
[61]	validation_0-auc:0.883677	validation_1-auc:0.840899
[62]	validation_0-auc:0.884009	validation_1-auc:0.840771
[63]	validation_0-auc:0.88424	validation_1-auc:0.840893
[64]	validation_0-auc:0.884581	validation_1-auc:0.840899
[65]	validation_0-auc:0.885045	validation_1-auc:0.840572
[66]	validation_0-auc:0.885111	validation_1-auc:0.840598
[67]	validation_0-auc:0.885362	validation_1-auc:0.840534
[68]	validation_0-auc:0.885538	validation_1-auc:0.840539
[69]	validation_0-auc:0.885673	validation_1-auc:0.840587
[70]	validation_0-auc:0.885969	validation_1-auc:0.840465
[71]	validation_0-auc:0.88605	validation_1-auc:0.840332
[72]	validation_0-auc:0.886268	validation_1-auc:0.840306
[73]	validation_0-auc:0.886402	validation_1-auc:0.840238
[74]	validation_0-auc:0.886488	validation_1-auc:0.840123
[75]	validation_0-auc:0.886754	validation_1-auc:0.840057
[76]	validation_0-auc:0.886749	validation_1-auc:0.840125
[77]	validation_0-auc:0.886791	validation_1-auc:0.840095
[78]	validation_0-auc:0.887125	validation_1-auc:0.840039
[79]	validation_0-auc:0.887168	validation_1-auc:0.839946
[80]	validation_0-auc:0.887226	validation_1-auc:0.840027
[81]	validation_0-auc:0.887591	validation_1-auc:0.840144
[82]	validation_0-auc:0.887657	validation_1-auc:0.840054
[83]	validation_0-auc:0.887845	validation_1-auc:0.839975
[84]	validation_0-auc:0.888198	validation_1-auc:0.839896
[85]	validation_0-auc:0.888308	validation_1-auc:0.839856
[86]	validation_0-auc:0.8885	validation_1-auc:0.839681
[87]	validation_0-auc:0.888697	validation_1-auc:0.839488
[88]	validation_0-auc:0.888846	validation_1-auc:0.839488
[89]	validation_0-auc:0.88898	validation_1-auc:0.839533
Stopping. Best iteration:
[59]	validation_0-auc:0.883276	validation_1-auc:0.840964

[0]	validation_0-auc:0.823829	validation_1-auc:0.815397
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 30 rounds.
[1]	validation_0-auc:0.829238	validation_1-auc:0.820017
[2]	validation_0-auc:0.830854	validation_1-auc:0.819159
[3]	validation_0-auc:0.835135	validation_1-auc:0.824044
[4]	validation_0-auc:0.839961	validation_1-auc:0.828412
[5]	validation_0-auc:0.844842	validation_1-auc:0.832053
[6]	validation_0-auc:0.846443	validation_1-auc:0.830766
[7]	validation_0-auc:0.848003	validation_1-auc:0.830247
[8]	validation_0-auc:0.849619	validation_1-auc:0.830916
[9]	validation_0-auc:0.853097	validation_1-auc:0.833645
[10]	validation_0-auc:0.854619	validation_1-auc:0.834562
[11]	validation_0-auc:0.855719	validation_1-auc:0.834803
[12]	validation_0-auc:0.857047	validation_1-auc:0.834742
[13]	validation_0-auc:0.859465	validation_1-auc:0.83614
[14]	validation_0-auc:0.861314	validation_1-auc:0.836874
[15]	validation_0-auc:0.86297	validation_1-auc:0.838768
[16]	validation_0-auc:0.863776	validation_1-auc:0.838826
[17]	validation_0-auc:0.865301	validation_1-auc:0.838031
[18]	validation_0-auc:0.866435	validation_1-auc:0.838049
[19]	validation_0-auc:0.867971	validation_1-auc:0.839529
[20]	validation_0-auc:0.869586	validation_1-auc:0.840168
[21]	validation_0-auc:0.870444	validation_1-auc:0.840019
[22]	validation_0-auc:0.871285	validation_1-auc:0.84105
[23]	validation_0-auc:0.872073	validation_1-auc:0.841952
[24]	validation_0-auc:0.873107	validation_1-auc:0.842438
[25]	validation_0-auc:0.874992	validation_1-auc:0.842839
[26]	validation_0-auc:0.875982	validation_1-auc:0.842408
[27]	validation_0-auc:0.876847	validation_1-auc:0.842562
[28]	validation_0-auc:0.878017	validation_1-auc:0.842203
[29]	validation_0-auc:0.878863	validation_1-auc:0.842869
[30]	validation_0-auc:0.879746	validation_1-auc:0.843469
[31]	validation_0-auc:0.880674	validation_1-auc:0.843657
[32]	validation_0-auc:0.882098	validation_1-auc:0.844165
[33]	validation_0-auc:0.883132	validation_1-auc:0.84438
[34]	validation_0-auc:0.884776	validation_1-auc:0.844213
[35]	validation_0-auc:0.88613	validation_1-auc:0.844089
[36]	validation_0-auc:0.887126	validation_1-auc:0.844184
[37]	validation_0-auc:0.888606	validation_1-auc:0.844513
[38]	validation_0-auc:0.889606	validation_1-auc:0.844808
[39]	validation_0-auc:0.890489	validation_1-auc:0.844347
[40]	validation_0-auc:0.891523	validation_1-auc:0.844258
[41]	validation_0-auc:0.892368	validation_1-auc:0.843562
[42]	validation_0-auc:0.893567	validation_1-auc:0.843372
[43]	validation_0-auc:0.894295	validation_1-auc:0.843271
[44]	validation_0-auc:0.895484	validation_1-auc:0.843022
[45]	validation_0-auc:0.896278	validation_1-auc:0.843259
[46]	validation_0-auc:0.896803	validation_1-auc:0.843171
[47]	validation_0-auc:0.897354	validation_1-auc:0.8433
[48]	validation_0-auc:0.898112	validation_1-auc:0.843112
[49]	validation_0-auc:0.898969	validation_1-auc:0.843815
[50]	validation_0-auc:0.899723	validation_1-auc:0.843616
[51]	validation_0-auc:0.900311	validation_1-auc:0.843351
[52]	validation_0-auc:0.90076	validation_1-auc:0.843224
[53]	validation_0-auc:0.901219	validation_1-auc:0.843209
[54]	validation_0-auc:0.901953	validation_1-auc:0.84299
[55]	validation_0-auc:0.902292	validation_1-auc:0.843091
[56]	validation_0-auc:0.902796	validation_1-auc:0.842737
[57]	validation_0-auc:0.903409	validation_1-auc:0.842502
[58]	validation_0-auc:0.904051	validation_1-auc:0.842664
[59]	validation_0-auc:0.904293	validation_1-auc:0.842771
[60]	validation_0-auc:0.904655	validation_1-auc:0.842501
[61]	validation_0-auc:0.904978	validation_1-auc:0.842503
[62]	validation_0-auc:0.905361	validation_1-auc:0.8426
[63]	validation_0-auc:0.906145	validation_1-auc:0.842635
[64]	validation_0-auc:0.90649	validation_1-auc:0.842556
[65]	validation_0-auc:0.907426	validation_1-auc:0.84252
[66]	validation_0-auc:0.907983	validation_1-auc:0.84242
[67]	validation_0-auc:0.908941	validation_1-auc:0.842218
[68]	validation_0-auc:0.909123	validation_1-auc:0.842217
Stopping. Best iteration:
[38]	validation_0-auc:0.889606	validation_1-auc:0.844808

GridSearchCV 최적 파라미터: {'colsample_bytree': 0.75, 'max_depth': 7, 'min_child_weight': 1}
ROC AUC: 0.8448

 

# n_estimators는 1000으로 증가시키고, learning_rate=0.02로 감소, reg_alpha=0.03으로 추가함. 
xgb_clf = XGBClassifier(n_estimators=1000, random_state=156, learning_rate=0.02, max_depth=5,\
                        min_child_weight=1, colsample_bytree=0.75, reg_alpha=0.03)

# evaluation metric을 auc로, early stopping은 200 으로 설정하고 학습 수행. 
xgb_clf.fit(X_train, y_train, early_stopping_rounds=200, 
            eval_metric="auc",eval_set=[(X_train, y_train), (X_test, y_test)])

xgb_roc_score = roc_auc_score(y_test, xgb_clf.predict_proba(X_test)[:,1],average='macro')
print('ROC AUC: {0:.4f}'.format(xgb_roc_score))


[0]	validation_0-auc:0.817284	validation_1-auc:0.811534
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 200 rounds.
[1]	validation_0-auc:0.820302	validation_1-auc:0.812178
[2]	validation_0-auc:0.823363	validation_1-auc:0.816027
[3]	validation_0-auc:0.825007	validation_1-auc:0.81705
[4]	validation_0-auc:0.825243	validation_1-auc:0.817264
[5]	validation_0-auc:0.827563	validation_1-auc:0.819988
[6]	validation_0-auc:0.827812	validation_1-auc:0.819542
[7]	validation_0-auc:0.827855	validation_1-auc:0.819216
[8]	validation_0-auc:0.829221	validation_1-auc:0.819799
[9]	validation_0-auc:0.829078	validation_1-auc:0.818898
[10]	validation_0-auc:0.830731	validation_1-auc:0.820967
[11]	validation_0-auc:0.828293	validation_1-auc:0.819437
[12]	validation_0-auc:0.829608	validation_1-auc:0.821204
[13]	validation_0-auc:0.830487	validation_1-auc:0.821121
[14]	validation_0-auc:0.834894	validation_1-auc:0.827068
[15]	validation_0-auc:0.833589	validation_1-auc:0.825258
[16]	validation_0-auc:0.83663	validation_1-auc:0.828619
[17]	validation_0-auc:0.837844	validation_1-auc:0.829526
[18]	validation_0-auc:0.838153	validation_1-auc:0.829361
[19]	validation_0-auc:0.838453	validation_1-auc:0.829982
[20]	validation_0-auc:0.838553	validation_1-auc:0.830264
[21]	validation_0-auc:0.838553	validation_1-auc:0.830909
[22]	validation_0-auc:0.838518	validation_1-auc:0.830954
[23]	validation_0-auc:0.838431	validation_1-auc:0.830402
[24]	validation_0-auc:0.839075	validation_1-auc:0.83115
[25]	validation_0-auc:0.839368	validation_1-auc:0.83172
[26]	validation_0-auc:0.839445	validation_1-auc:0.831542
[27]	validation_0-auc:0.839412	validation_1-auc:0.832062
[28]	validation_0-auc:0.839326	validation_1-auc:0.832
[29]	validation_0-auc:0.839224	validation_1-auc:0.832283
[30]	validation_0-auc:0.839342	validation_1-auc:0.832267
[31]	validation_0-auc:0.839517	validation_1-auc:0.832136
[32]	validation_0-auc:0.839447	validation_1-auc:0.832037
[33]	validation_0-auc:0.839555	validation_1-auc:0.831743
[34]	validation_0-auc:0.83974	validation_1-auc:0.831614
[35]	validation_0-auc:0.839938	validation_1-auc:0.832094
[36]	validation_0-auc:0.840603	validation_1-auc:0.832049
[37]	validation_0-auc:0.840503	validation_1-auc:0.832103
[38]	validation_0-auc:0.84055	validation_1-auc:0.832401
[39]	validation_0-auc:0.840481	validation_1-auc:0.832148
[40]	validation_0-auc:0.840618	validation_1-auc:0.832186
[41]	validation_0-auc:0.840705	validation_1-auc:0.832251
[42]	validation_0-auc:0.840325	validation_1-auc:0.832067
[43]	validation_0-auc:0.840532	validation_1-auc:0.832117
[44]	validation_0-auc:0.84082	validation_1-auc:0.832339
[45]	validation_0-auc:0.841101	validation_1-auc:0.832281
[46]	validation_0-auc:0.84129	validation_1-auc:0.832504
[47]	validation_0-auc:0.841432	validation_1-auc:0.832708
[48]	validation_0-auc:0.842026	validation_1-auc:0.833033
[49]	validation_0-auc:0.8421	validation_1-auc:0.832942
[50]	validation_0-auc:0.842289	validation_1-auc:0.832659
[51]	validation_0-auc:0.842334	validation_1-auc:0.832783
[52]	validation_0-auc:0.842451	validation_1-auc:0.833009
[53]	validation_0-auc:0.842499	validation_1-auc:0.832966
[54]	validation_0-auc:0.842375	validation_1-auc:0.832536
[55]	validation_0-auc:0.842581	validation_1-auc:0.833277
[56]	validation_0-auc:0.842547	validation_1-auc:0.833274
[57]	validation_0-auc:0.842649	validation_1-auc:0.833316
[58]	validation_0-auc:0.842702	validation_1-auc:0.833425
[59]	validation_0-auc:0.843062	validation_1-auc:0.83363
[60]	validation_0-auc:0.843219	validation_1-auc:0.833802
[61]	validation_0-auc:0.843529	validation_1-auc:0.834007
[62]	validation_0-auc:0.843617	validation_1-auc:0.834113
[63]	validation_0-auc:0.843761	validation_1-auc:0.834133
[64]	validation_0-auc:0.84423	validation_1-auc:0.834416
[65]	validation_0-auc:0.844444	validation_1-auc:0.834735
[66]	validation_0-auc:0.844804	validation_1-auc:0.834918
[67]	validation_0-auc:0.844996	validation_1-auc:0.834841
[68]	validation_0-auc:0.845094	validation_1-auc:0.83516
[69]	validation_0-auc:0.845004	validation_1-auc:0.835189
[70]	validation_0-auc:0.845326	validation_1-auc:0.835275
[71]	validation_0-auc:0.845408	validation_1-auc:0.835433
[72]	validation_0-auc:0.84552	validation_1-auc:0.835614
[73]	validation_0-auc:0.845466	validation_1-auc:0.835342
[74]	validation_0-auc:0.845843	validation_1-auc:0.835665
[75]	validation_0-auc:0.845952	validation_1-auc:0.835721
[76]	validation_0-auc:0.845998	validation_1-auc:0.835604
[77]	validation_0-auc:0.846098	validation_1-auc:0.835362
[78]	validation_0-auc:0.846424	validation_1-auc:0.835534
[79]	validation_0-auc:0.846489	validation_1-auc:0.835394
[80]	validation_0-auc:0.846743	validation_1-auc:0.835392
[81]	validation_0-auc:0.846743	validation_1-auc:0.835606
[82]	validation_0-auc:0.846905	validation_1-auc:0.835359
[83]	validation_0-auc:0.846974	validation_1-auc:0.835934
[84]	validation_0-auc:0.847064	validation_1-auc:0.835823
[85]	validation_0-auc:0.847209	validation_1-auc:0.836103
[86]	validation_0-auc:0.84731	validation_1-auc:0.836052
[87]	validation_0-auc:0.847663	validation_1-auc:0.836099
[88]	validation_0-auc:0.847861	validation_1-auc:0.836171
[89]	validation_0-auc:0.848177	validation_1-auc:0.836065
[90]	validation_0-auc:0.848257	validation_1-auc:0.836448
[91]	validation_0-auc:0.848625	validation_1-auc:0.836898
[92]	validation_0-auc:0.848724	validation_1-auc:0.837275
[93]	validation_0-auc:0.848903	validation_1-auc:0.837274
[94]	validation_0-auc:0.849011	validation_1-auc:0.837741
[95]	validation_0-auc:0.849094	validation_1-auc:0.837762
[96]	validation_0-auc:0.849288	validation_1-auc:0.837709
[97]	validation_0-auc:0.84967	validation_1-auc:0.837955
[98]	validation_0-auc:0.849985	validation_1-auc:0.83814
[99]	validation_0-auc:0.850237	validation_1-auc:0.838104
[100]	validation_0-auc:0.850503	validation_1-auc:0.838319
[101]	validation_0-auc:0.850546	validation_1-auc:0.838799
[102]	validation_0-auc:0.850583	validation_1-auc:0.838842
[103]	validation_0-auc:0.850769	validation_1-auc:0.838954
[104]	validation_0-auc:0.850747	validation_1-auc:0.83913
[105]	validation_0-auc:0.850927	validation_1-auc:0.839186
[106]	validation_0-auc:0.851232	validation_1-auc:0.839245
[107]	validation_0-auc:0.85128	validation_1-auc:0.839241
[108]	validation_0-auc:0.85143	validation_1-auc:0.839267
[109]	validation_0-auc:0.851666	validation_1-auc:0.839255
[110]	validation_0-auc:0.851756	validation_1-auc:0.839365
[111]	validation_0-auc:0.851916	validation_1-auc:0.839449
[112]	validation_0-auc:0.851786	validation_1-auc:0.839645
[113]	validation_0-auc:0.851974	validation_1-auc:0.839691
[114]	validation_0-auc:0.852162	validation_1-auc:0.839735
[115]	validation_0-auc:0.852284	validation_1-auc:0.839772
[116]	validation_0-auc:0.852267	validation_1-auc:0.839879
[117]	validation_0-auc:0.852391	validation_1-auc:0.839776
[118]	validation_0-auc:0.852571	validation_1-auc:0.839681
[119]	validation_0-auc:0.852637	validation_1-auc:0.839594
[120]	validation_0-auc:0.852851	validation_1-auc:0.839699
[121]	validation_0-auc:0.852996	validation_1-auc:0.839858
[122]	validation_0-auc:0.853103	validation_1-auc:0.840002
[123]	validation_0-auc:0.853238	validation_1-auc:0.839579
[124]	validation_0-auc:0.853356	validation_1-auc:0.83937
[125]	validation_0-auc:0.853545	validation_1-auc:0.839204
[126]	validation_0-auc:0.853823	validation_1-auc:0.83968
[127]	validation_0-auc:0.853982	validation_1-auc:0.839563
[128]	validation_0-auc:0.854187	validation_1-auc:0.839545
[129]	validation_0-auc:0.854313	validation_1-auc:0.839425
[130]	validation_0-auc:0.85461	validation_1-auc:0.839629
[131]	validation_0-auc:0.854815	validation_1-auc:0.839563
[132]	validation_0-auc:0.854845	validation_1-auc:0.839523
[133]	validation_0-auc:0.854739	validation_1-auc:0.839668
[134]	validation_0-auc:0.855035	validation_1-auc:0.839752
[135]	validation_0-auc:0.854966	validation_1-auc:0.839689
[136]	validation_0-auc:0.855118	validation_1-auc:0.839766
[137]	validation_0-auc:0.855336	validation_1-auc:0.839894
[138]	validation_0-auc:0.855679	validation_1-auc:0.84004
[139]	validation_0-auc:0.8558	validation_1-auc:0.840295
[140]	validation_0-auc:0.856034	validation_1-auc:0.840582
[141]	validation_0-auc:0.85609	validation_1-auc:0.840527
[142]	validation_0-auc:0.856058	validation_1-auc:0.840824
[143]	validation_0-auc:0.856179	validation_1-auc:0.840714
[144]	validation_0-auc:0.856461	validation_1-auc:0.840902
[145]	validation_0-auc:0.856596	validation_1-auc:0.840922
[146]	validation_0-auc:0.85682	validation_1-auc:0.840999
[147]	validation_0-auc:0.856856	validation_1-auc:0.841113
[148]	validation_0-auc:0.856846	validation_1-auc:0.84122
[149]	validation_0-auc:0.856846	validation_1-auc:0.841151
[150]	validation_0-auc:0.856784	validation_1-auc:0.841142
[151]	validation_0-auc:0.856753	validation_1-auc:0.841307
[152]	validation_0-auc:0.856646	validation_1-auc:0.841443
[153]	validation_0-auc:0.856637	validation_1-auc:0.841588
[154]	validation_0-auc:0.856957	validation_1-auc:0.841629
[155]	validation_0-auc:0.857008	validation_1-auc:0.841672
[156]	validation_0-auc:0.857301	validation_1-auc:0.841631
[157]	validation_0-auc:0.857543	validation_1-auc:0.841666
[158]	validation_0-auc:0.857764	validation_1-auc:0.841749
[159]	validation_0-auc:0.857927	validation_1-auc:0.841761
[160]	validation_0-auc:0.858148	validation_1-auc:0.841758
[161]	validation_0-auc:0.858244	validation_1-auc:0.841884
[162]	validation_0-auc:0.858475	validation_1-auc:0.841785
[163]	validation_0-auc:0.85856	validation_1-auc:0.841826
[164]	validation_0-auc:0.858617	validation_1-auc:0.841824
[165]	validation_0-auc:0.858847	validation_1-auc:0.84189
[166]	validation_0-auc:0.859044	validation_1-auc:0.841925
[167]	validation_0-auc:0.859199	validation_1-auc:0.841916
[168]	validation_0-auc:0.859372	validation_1-auc:0.841903
[169]	validation_0-auc:0.859365	validation_1-auc:0.841887
[170]	validation_0-auc:0.859544	validation_1-auc:0.842024
[171]	validation_0-auc:0.859786	validation_1-auc:0.842119
[172]	validation_0-auc:0.85995	validation_1-auc:0.842194
[173]	validation_0-auc:0.86012	validation_1-auc:0.842381
[174]	validation_0-auc:0.860386	validation_1-auc:0.842369
[175]	validation_0-auc:0.860625	validation_1-auc:0.842441
[176]	validation_0-auc:0.860766	validation_1-auc:0.842464
[177]	validation_0-auc:0.860945	validation_1-auc:0.842644
[178]	validation_0-auc:0.861003	validation_1-auc:0.842805
[179]	validation_0-auc:0.861291	validation_1-auc:0.842788
[180]	validation_0-auc:0.861357	validation_1-auc:0.842727
[181]	validation_0-auc:0.861469	validation_1-auc:0.842929
[182]	validation_0-auc:0.861745	validation_1-auc:0.842969
[183]	validation_0-auc:0.861901	validation_1-auc:0.842965
[184]	validation_0-auc:0.862012	validation_1-auc:0.842979
[185]	validation_0-auc:0.86208	validation_1-auc:0.842969
[186]	validation_0-auc:0.862209	validation_1-auc:0.843109
[187]	validation_0-auc:0.86234	validation_1-auc:0.843185
[188]	validation_0-auc:0.862396	validation_1-auc:0.843389
[189]	validation_0-auc:0.862532	validation_1-auc:0.843394
[190]	validation_0-auc:0.862709	validation_1-auc:0.843271
[191]	validation_0-auc:0.862806	validation_1-auc:0.843397
[192]	validation_0-auc:0.862916	validation_1-auc:0.843408
[193]	validation_0-auc:0.862984	validation_1-auc:0.843418
[194]	validation_0-auc:0.86307	validation_1-auc:0.843425
[195]	validation_0-auc:0.863185	validation_1-auc:0.843518
[196]	validation_0-auc:0.863362	validation_1-auc:0.843597
[197]	validation_0-auc:0.863577	validation_1-auc:0.843831
[198]	validation_0-auc:0.863708	validation_1-auc:0.843939
[199]	validation_0-auc:0.863849	validation_1-auc:0.843746
[200]	validation_0-auc:0.863899	validation_1-auc:0.84371
[201]	validation_0-auc:0.864029	validation_1-auc:0.843819
[202]	validation_0-auc:0.864062	validation_1-auc:0.84374
[203]	validation_0-auc:0.864247	validation_1-auc:0.843763
[204]	validation_0-auc:0.864325	validation_1-auc:0.843856
[205]	validation_0-auc:0.864463	validation_1-auc:0.843886
[206]	validation_0-auc:0.864571	validation_1-auc:0.843809
[207]	validation_0-auc:0.864744	validation_1-auc:0.843864
[208]	validation_0-auc:0.864956	validation_1-auc:0.844002
[209]	validation_0-auc:0.865076	validation_1-auc:0.843981
[210]	validation_0-auc:0.865202	validation_1-auc:0.843969
[211]	validation_0-auc:0.865449	validation_1-auc:0.844111
[212]	validation_0-auc:0.865558	validation_1-auc:0.844096
[213]	validation_0-auc:0.86573	validation_1-auc:0.84415
[214]	validation_0-auc:0.865874	validation_1-auc:0.844148
[215]	validation_0-auc:0.866027	validation_1-auc:0.844143
[216]	validation_0-auc:0.866154	validation_1-auc:0.84414
[217]	validation_0-auc:0.866298	validation_1-auc:0.844085
[218]	validation_0-auc:0.866379	validation_1-auc:0.844085
[219]	validation_0-auc:0.866416	validation_1-auc:0.844085
[220]	validation_0-auc:0.866476	validation_1-auc:0.844075
[221]	validation_0-auc:0.866573	validation_1-auc:0.844022
[222]	validation_0-auc:0.866703	validation_1-auc:0.844025
[223]	validation_0-auc:0.866825	validation_1-auc:0.843936
[224]	validation_0-auc:0.866988	validation_1-auc:0.843933
[225]	validation_0-auc:0.867094	validation_1-auc:0.843896
[226]	validation_0-auc:0.867215	validation_1-auc:0.843936
[227]	validation_0-auc:0.867322	validation_1-auc:0.843953
[228]	validation_0-auc:0.86744	validation_1-auc:0.843836
[229]	validation_0-auc:0.86758	validation_1-auc:0.843765
[230]	validation_0-auc:0.867675	validation_1-auc:0.843743
[231]	validation_0-auc:0.867733	validation_1-auc:0.843813
[232]	validation_0-auc:0.867856	validation_1-auc:0.843802
[233]	validation_0-auc:0.867955	validation_1-auc:0.84375
[234]	validation_0-auc:0.868004	validation_1-auc:0.84381
[235]	validation_0-auc:0.8681	validation_1-auc:0.843761
[236]	validation_0-auc:0.86824	validation_1-auc:0.843648
[237]	validation_0-auc:0.868312	validation_1-auc:0.843682
[238]	validation_0-auc:0.868414	validation_1-auc:0.843635
[239]	validation_0-auc:0.868512	validation_1-auc:0.843715
[240]	validation_0-auc:0.868589	validation_1-auc:0.843717
[241]	validation_0-auc:0.868774	validation_1-auc:0.843658
[242]	validation_0-auc:0.868862	validation_1-auc:0.84362
[243]	validation_0-auc:0.86894	validation_1-auc:0.843603
[244]	validation_0-auc:0.86902	validation_1-auc:0.843705
[245]	validation_0-auc:0.869121	validation_1-auc:0.84355
[246]	validation_0-auc:0.869209	validation_1-auc:0.843535
[247]	validation_0-auc:0.869315	validation_1-auc:0.843524
[248]	validation_0-auc:0.869464	validation_1-auc:0.843452
[249]	validation_0-auc:0.869574	validation_1-auc:0.843459
[250]	validation_0-auc:0.869647	validation_1-auc:0.843408
[251]	validation_0-auc:0.869713	validation_1-auc:0.843383
[252]	validation_0-auc:0.869788	validation_1-auc:0.843255
[253]	validation_0-auc:0.86987	validation_1-auc:0.843181
[254]	validation_0-auc:0.869933	validation_1-auc:0.843175
[255]	validation_0-auc:0.87001	validation_1-auc:0.843212
[256]	validation_0-auc:0.870066	validation_1-auc:0.843206
[257]	validation_0-auc:0.870173	validation_1-auc:0.843252
[258]	validation_0-auc:0.870245	validation_1-auc:0.843094
[259]	validation_0-auc:0.870368	validation_1-auc:0.843105
[260]	validation_0-auc:0.870489	validation_1-auc:0.843106
[261]	validation_0-auc:0.870645	validation_1-auc:0.843074
[262]	validation_0-auc:0.870797	validation_1-auc:0.843108
[263]	validation_0-auc:0.870923	validation_1-auc:0.843127
[264]	validation_0-auc:0.871012	validation_1-auc:0.843105
[265]	validation_0-auc:0.871127	validation_1-auc:0.843089
[266]	validation_0-auc:0.87119	validation_1-auc:0.843067
[267]	validation_0-auc:0.871241	validation_1-auc:0.843082
[268]	validation_0-auc:0.87142	validation_1-auc:0.843153
[269]	validation_0-auc:0.871486	validation_1-auc:0.843052
[270]	validation_0-auc:0.871564	validation_1-auc:0.842988
[271]	validation_0-auc:0.871635	validation_1-auc:0.843033
[272]	validation_0-auc:0.871712	validation_1-auc:0.843044
[273]	validation_0-auc:0.871793	validation_1-auc:0.842981
[274]	validation_0-auc:0.871856	validation_1-auc:0.842912
[275]	validation_0-auc:0.871898	validation_1-auc:0.842888
[276]	validation_0-auc:0.872014	validation_1-auc:0.842917
[277]	validation_0-auc:0.872178	validation_1-auc:0.842981
[278]	validation_0-auc:0.872322	validation_1-auc:0.843005
[279]	validation_0-auc:0.872461	validation_1-auc:0.842963
[280]	validation_0-auc:0.872533	validation_1-auc:0.843
[281]	validation_0-auc:0.87261	validation_1-auc:0.843036
[282]	validation_0-auc:0.872733	validation_1-auc:0.843076
[283]	validation_0-auc:0.872776	validation_1-auc:0.843025
[284]	validation_0-auc:0.872829	validation_1-auc:0.843029
[285]	validation_0-auc:0.872917	validation_1-auc:0.842988
[286]	validation_0-auc:0.87303	validation_1-auc:0.842969
[287]	validation_0-auc:0.873159	validation_1-auc:0.842969
[288]	validation_0-auc:0.873233	validation_1-auc:0.843004
[289]	validation_0-auc:0.873346	validation_1-auc:0.843044
[290]	validation_0-auc:0.87339	validation_1-auc:0.843025
[291]	validation_0-auc:0.873534	validation_1-auc:0.843058
[292]	validation_0-auc:0.873577	validation_1-auc:0.843123
[293]	validation_0-auc:0.87364	validation_1-auc:0.843134
[294]	validation_0-auc:0.873695	validation_1-auc:0.843111
[295]	validation_0-auc:0.873769	validation_1-auc:0.843094
[296]	validation_0-auc:0.873849	validation_1-auc:0.843137
[297]	validation_0-auc:0.873987	validation_1-auc:0.843165
[298]	validation_0-auc:0.874085	validation_1-auc:0.843203
[299]	validation_0-auc:0.874167	validation_1-auc:0.843232
[300]	validation_0-auc:0.874209	validation_1-auc:0.843231
[301]	validation_0-auc:0.874313	validation_1-auc:0.843235
[302]	validation_0-auc:0.874439	validation_1-auc:0.843241
[303]	validation_0-auc:0.87457	validation_1-auc:0.843259
[304]	validation_0-auc:0.874681	validation_1-auc:0.843275
[305]	validation_0-auc:0.874774	validation_1-auc:0.843234
[306]	validation_0-auc:0.874893	validation_1-auc:0.843154
[307]	validation_0-auc:0.87498	validation_1-auc:0.843171
[308]	validation_0-auc:0.875049	validation_1-auc:0.843172
[309]	validation_0-auc:0.875123	validation_1-auc:0.843178
[310]	validation_0-auc:0.875172	validation_1-auc:0.843141
[311]	validation_0-auc:0.875193	validation_1-auc:0.843154
[312]	validation_0-auc:0.875318	validation_1-auc:0.843132
[313]	validation_0-auc:0.875378	validation_1-auc:0.843126
[314]	validation_0-auc:0.875416	validation_1-auc:0.84312
[315]	validation_0-auc:0.875543	validation_1-auc:0.843131
[316]	validation_0-auc:0.87558	validation_1-auc:0.84316
[317]	validation_0-auc:0.875673	validation_1-auc:0.843087
[318]	validation_0-auc:0.875706	validation_1-auc:0.843097
[319]	validation_0-auc:0.875812	validation_1-auc:0.843123
[320]	validation_0-auc:0.875916	validation_1-auc:0.843122
[321]	validation_0-auc:0.875938	validation_1-auc:0.843184
[322]	validation_0-auc:0.876033	validation_1-auc:0.843244
[323]	validation_0-auc:0.876129	validation_1-auc:0.843211
[324]	validation_0-auc:0.876151	validation_1-auc:0.843247
[325]	validation_0-auc:0.876227	validation_1-auc:0.843217
[326]	validation_0-auc:0.876311	validation_1-auc:0.843188
[327]	validation_0-auc:0.87641	validation_1-auc:0.843241
[328]	validation_0-auc:0.876439	validation_1-auc:0.843198
[329]	validation_0-auc:0.876501	validation_1-auc:0.843205
[330]	validation_0-auc:0.87656	validation_1-auc:0.843209
[331]	validation_0-auc:0.87663	validation_1-auc:0.843203
[332]	validation_0-auc:0.876661	validation_1-auc:0.843209
[333]	validation_0-auc:0.876731	validation_1-auc:0.843231
[334]	validation_0-auc:0.876786	validation_1-auc:0.843202
[335]	validation_0-auc:0.876843	validation_1-auc:0.843159
[336]	validation_0-auc:0.876883	validation_1-auc:0.84319
[337]	validation_0-auc:0.876955	validation_1-auc:0.843217
[338]	validation_0-auc:0.876998	validation_1-auc:0.843207
[339]	validation_0-auc:0.877034	validation_1-auc:0.843236
[340]	validation_0-auc:0.877152	validation_1-auc:0.843225
[341]	validation_0-auc:0.877217	validation_1-auc:0.84316
[342]	validation_0-auc:0.87729	validation_1-auc:0.843066
[343]	validation_0-auc:0.877355	validation_1-auc:0.843052
[344]	validation_0-auc:0.877418	validation_1-auc:0.843005
[345]	validation_0-auc:0.877454	validation_1-auc:0.842971
[346]	validation_0-auc:0.877541	validation_1-auc:0.842945
[347]	validation_0-auc:0.877636	validation_1-auc:0.842951
[348]	validation_0-auc:0.877687	validation_1-auc:0.842944
[349]	validation_0-auc:0.877797	validation_1-auc:0.842915
[350]	validation_0-auc:0.877824	validation_1-auc:0.842915
[351]	validation_0-auc:0.877899	validation_1-auc:0.842932
[352]	validation_0-auc:0.877933	validation_1-auc:0.84288
[353]	validation_0-auc:0.878024	validation_1-auc:0.842856
[354]	validation_0-auc:0.878042	validation_1-auc:0.842866
[355]	validation_0-auc:0.878148	validation_1-auc:0.842813
[356]	validation_0-auc:0.878218	validation_1-auc:0.842828
[357]	validation_0-auc:0.878279	validation_1-auc:0.842829
[358]	validation_0-auc:0.878364	validation_1-auc:0.842806
[359]	validation_0-auc:0.878432	validation_1-auc:0.842772
[360]	validation_0-auc:0.878495	validation_1-auc:0.842751
[361]	validation_0-auc:0.878545	validation_1-auc:0.842711
[362]	validation_0-auc:0.878608	validation_1-auc:0.842721
[363]	validation_0-auc:0.878657	validation_1-auc:0.84269
[364]	validation_0-auc:0.878746	validation_1-auc:0.842685
[365]	validation_0-auc:0.878816	validation_1-auc:0.842671
[366]	validation_0-auc:0.878883	validation_1-auc:0.842671
[367]	validation_0-auc:0.878917	validation_1-auc:0.842655
[368]	validation_0-auc:0.879014	validation_1-auc:0.842658
[369]	validation_0-auc:0.879161	validation_1-auc:0.842643
[370]	validation_0-auc:0.879214	validation_1-auc:0.84265
[371]	validation_0-auc:0.87931	validation_1-auc:0.842652
[372]	validation_0-auc:0.879407	validation_1-auc:0.842645
[373]	validation_0-auc:0.879461	validation_1-auc:0.842669
[374]	validation_0-auc:0.879583	validation_1-auc:0.84268
[375]	validation_0-auc:0.87964	validation_1-auc:0.842675
[376]	validation_0-auc:0.879756	validation_1-auc:0.842722
[377]	validation_0-auc:0.879805	validation_1-auc:0.842715
[378]	validation_0-auc:0.87997	validation_1-auc:0.842733
[379]	validation_0-auc:0.879996	validation_1-auc:0.842723
[380]	validation_0-auc:0.88003	validation_1-auc:0.84275
[381]	validation_0-auc:0.880162	validation_1-auc:0.842757
[382]	validation_0-auc:0.880263	validation_1-auc:0.842748
[383]	validation_0-auc:0.880311	validation_1-auc:0.842697
[384]	validation_0-auc:0.880397	validation_1-auc:0.8427
[385]	validation_0-auc:0.880414	validation_1-auc:0.842697
[386]	validation_0-auc:0.880461	validation_1-auc:0.842707
[387]	validation_0-auc:0.880529	validation_1-auc:0.842724
[388]	validation_0-auc:0.880545	validation_1-auc:0.842733
[389]	validation_0-auc:0.880555	validation_1-auc:0.842735
[390]	validation_0-auc:0.88059	validation_1-auc:0.842733
[391]	validation_0-auc:0.880663	validation_1-auc:0.842727
[392]	validation_0-auc:0.880709	validation_1-auc:0.842731
[393]	validation_0-auc:0.880721	validation_1-auc:0.84272
[394]	validation_0-auc:0.880783	validation_1-auc:0.842718
[395]	validation_0-auc:0.880856	validation_1-auc:0.842709
[396]	validation_0-auc:0.880872	validation_1-auc:0.842692
[397]	validation_0-auc:0.880916	validation_1-auc:0.842706
[398]	validation_0-auc:0.881063	validation_1-auc:0.842713
[399]	validation_0-auc:0.881139	validation_1-auc:0.842676
[400]	validation_0-auc:0.881198	validation_1-auc:0.842673
[401]	validation_0-auc:0.881212	validation_1-auc:0.84267
[402]	validation_0-auc:0.881278	validation_1-auc:0.842669
[403]	validation_0-auc:0.881344	validation_1-auc:0.842684
[404]	validation_0-auc:0.881366	validation_1-auc:0.842674
[405]	validation_0-auc:0.881395	validation_1-auc:0.842686
[406]	validation_0-auc:0.881423	validation_1-auc:0.842684
[407]	validation_0-auc:0.881581	validation_1-auc:0.842694
[408]	validation_0-auc:0.881662	validation_1-auc:0.842675
[409]	validation_0-auc:0.881706	validation_1-auc:0.842659
[410]	validation_0-auc:0.881724	validation_1-auc:0.842659
[411]	validation_0-auc:0.881738	validation_1-auc:0.842654
[412]	validation_0-auc:0.881806	validation_1-auc:0.842637
[413]	validation_0-auc:0.881832	validation_1-auc:0.842651
Stopping. Best iteration:
[213]	validation_0-auc:0.86573	validation_1-auc:0.84415

ROC AUC: 0.8442

 

# n_estimators는 1000으로 증가시키고, learning_rate=0.02로 감소, reg_alpha=0.03으로 추가함. 
xgb_clf = XGBClassifier(n_estimators=1000, random_state=156, learning_rate=0.02, max_depth=7,\
                        min_child_weight=1, colsample_bytree=0.75, reg_alpha=0.03)

# evaluation metric을 auc로, early stopping은 200 으로 설정하고 학습 수행. 
xgb_clf.fit(X_train, y_train, early_stopping_rounds=200, 
            eval_metric="auc",eval_set=[(X_train, y_train), (X_test, y_test)])

xgb_roc_score = roc_auc_score(y_test, xgb_clf.predict_proba(X_test)[:,1],average='macro')
print('ROC AUC: {0:.4f}'.format(xgb_roc_score))

[0]	validation_0-auc:0.82311	validation_1-auc:0.815226
Multiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.

Will train until validation_1-auc hasn't improved in 200 rounds.
[1]	validation_0-auc:0.827094	validation_1-auc:0.816566
[2]	validation_0-auc:0.832027	validation_1-auc:0.820393
[3]	validation_0-auc:0.835873	validation_1-auc:0.825019
[4]	validation_0-auc:0.838197	validation_1-auc:0.826078
[5]	validation_0-auc:0.837948	validation_1-auc:0.827282
[6]	validation_0-auc:0.8381	validation_1-auc:0.826888
[7]	validation_0-auc:0.838475	validation_1-auc:0.827131
[8]	validation_0-auc:0.839775	validation_1-auc:0.828198
[9]	validation_0-auc:0.83981	validation_1-auc:0.827834
[10]	validation_0-auc:0.841664	validation_1-auc:0.828996
[11]	validation_0-auc:0.841069	validation_1-auc:0.82802
[12]	validation_0-auc:0.841167	validation_1-auc:0.828517
[13]	validation_0-auc:0.841455	validation_1-auc:0.828317
[14]	validation_0-auc:0.845308	validation_1-auc:0.834099
[15]	validation_0-auc:0.844529	validation_1-auc:0.832385
[16]	validation_0-auc:0.845102	validation_1-auc:0.833248
[17]	validation_0-auc:0.847159	validation_1-auc:0.835711
[18]	validation_0-auc:0.847755	validation_1-auc:0.835706
[19]	validation_0-auc:0.848236	validation_1-auc:0.835945
[20]	validation_0-auc:0.848284	validation_1-auc:0.835709
[21]	validation_0-auc:0.848832	validation_1-auc:0.835969
[22]	validation_0-auc:0.84907	validation_1-auc:0.83569
[23]	validation_0-auc:0.849	validation_1-auc:0.835783
[24]	validation_0-auc:0.849496	validation_1-auc:0.835706
[25]	validation_0-auc:0.849995	validation_1-auc:0.835679
[26]	validation_0-auc:0.85008	validation_1-auc:0.835805
[27]	validation_0-auc:0.850219	validation_1-auc:0.836293
[28]	validation_0-auc:0.849965	validation_1-auc:0.836067
[29]	validation_0-auc:0.85013	validation_1-auc:0.835898
[30]	validation_0-auc:0.850531	validation_1-auc:0.83601
[31]	validation_0-auc:0.850575	validation_1-auc:0.835882
[32]	validation_0-auc:0.850492	validation_1-auc:0.835804
[33]	validation_0-auc:0.850882	validation_1-auc:0.835793
[34]	validation_0-auc:0.850957	validation_1-auc:0.836164
[35]	validation_0-auc:0.851248	validation_1-auc:0.836261
[36]	validation_0-auc:0.851996	validation_1-auc:0.836146
[37]	validation_0-auc:0.852284	validation_1-auc:0.836043
[38]	validation_0-auc:0.852521	validation_1-auc:0.836253
[39]	validation_0-auc:0.852865	validation_1-auc:0.836523
[40]	validation_0-auc:0.852924	validation_1-auc:0.836412
[41]	validation_0-auc:0.853175	validation_1-auc:0.836824
[42]	validation_0-auc:0.853299	validation_1-auc:0.836834
[43]	validation_0-auc:0.853536	validation_1-auc:0.836901
[44]	validation_0-auc:0.854069	validation_1-auc:0.836961
[45]	validation_0-auc:0.854553	validation_1-auc:0.836882
[46]	validation_0-auc:0.854768	validation_1-auc:0.836896
[47]	validation_0-auc:0.855037	validation_1-auc:0.83701
[48]	validation_0-auc:0.855592	validation_1-auc:0.837395
[49]	validation_0-auc:0.855888	validation_1-auc:0.837527
[50]	validation_0-auc:0.856432	validation_1-auc:0.837669
[51]	validation_0-auc:0.856693	validation_1-auc:0.837593
[52]	validation_0-auc:0.857042	validation_1-auc:0.837942
[53]	validation_0-auc:0.857489	validation_1-auc:0.838094
[54]	validation_0-auc:0.857878	validation_1-auc:0.83802
[55]	validation_0-auc:0.858173	validation_1-auc:0.837285
[56]	validation_0-auc:0.85787	validation_1-auc:0.836979
[57]	validation_0-auc:0.858151	validation_1-auc:0.837119
[58]	validation_0-auc:0.858401	validation_1-auc:0.837383
[59]	validation_0-auc:0.859439	validation_1-auc:0.837705
[60]	validation_0-auc:0.858764	validation_1-auc:0.83719
[61]	validation_0-auc:0.85906	validation_1-auc:0.837564
[62]	validation_0-auc:0.859967	validation_1-auc:0.837755
[63]	validation_0-auc:0.859661	validation_1-auc:0.83726
[64]	validation_0-auc:0.859828	validation_1-auc:0.837766
[65]	validation_0-auc:0.860574	validation_1-auc:0.837933
[66]	validation_0-auc:0.861013	validation_1-auc:0.83858
[67]	validation_0-auc:0.861262	validation_1-auc:0.838677
[68]	validation_0-auc:0.861375	validation_1-auc:0.838985
[69]	validation_0-auc:0.861231	validation_1-auc:0.838326
[70]	validation_0-auc:0.861627	validation_1-auc:0.838806
[71]	validation_0-auc:0.8618	validation_1-auc:0.83889
[72]	validation_0-auc:0.862174	validation_1-auc:0.83902
[73]	validation_0-auc:0.862118	validation_1-auc:0.838712
[74]	validation_0-auc:0.862427	validation_1-auc:0.838917
[75]	validation_0-auc:0.862756	validation_1-auc:0.839005
[76]	validation_0-auc:0.862892	validation_1-auc:0.839058
[77]	validation_0-auc:0.863271	validation_1-auc:0.839331
[78]	validation_0-auc:0.863498	validation_1-auc:0.839328
[79]	validation_0-auc:0.863669	validation_1-auc:0.839331
[80]	validation_0-auc:0.863919	validation_1-auc:0.839395
[81]	validation_0-auc:0.864168	validation_1-auc:0.839407
[82]	validation_0-auc:0.864551	validation_1-auc:0.839303
[83]	validation_0-auc:0.864439	validation_1-auc:0.839163
[84]	validation_0-auc:0.864689	validation_1-auc:0.839268
[85]	validation_0-auc:0.864978	validation_1-auc:0.839189
[86]	validation_0-auc:0.865107	validation_1-auc:0.839324
[87]	validation_0-auc:0.86551	validation_1-auc:0.839359
[88]	validation_0-auc:0.865853	validation_1-auc:0.839478
[89]	validation_0-auc:0.86614	validation_1-auc:0.839816
[90]	validation_0-auc:0.866329	validation_1-auc:0.839923
[91]	validation_0-auc:0.866734	validation_1-auc:0.840442
[92]	validation_0-auc:0.866811	validation_1-auc:0.840675
[93]	validation_0-auc:0.867217	validation_1-auc:0.840554
[94]	validation_0-auc:0.867368	validation_1-auc:0.840805
[95]	validation_0-auc:0.867548	validation_1-auc:0.840829
[96]	validation_0-auc:0.867747	validation_1-auc:0.840942
[97]	validation_0-auc:0.868011	validation_1-auc:0.841084
[98]	validation_0-auc:0.868313	validation_1-auc:0.84144
[99]	validation_0-auc:0.868491	validation_1-auc:0.841541
[100]	validation_0-auc:0.868728	validation_1-auc:0.841669
[101]	validation_0-auc:0.868926	validation_1-auc:0.841648
[102]	validation_0-auc:0.869116	validation_1-auc:0.841551
[103]	validation_0-auc:0.869399	validation_1-auc:0.841713
[104]	validation_0-auc:0.869471	validation_1-auc:0.842001
[105]	validation_0-auc:0.869659	validation_1-auc:0.841964
[106]	validation_0-auc:0.869868	validation_1-auc:0.842032
[107]	validation_0-auc:0.86996	validation_1-auc:0.84223
[108]	validation_0-auc:0.870176	validation_1-auc:0.842264
[109]	validation_0-auc:0.870518	validation_1-auc:0.842334
[110]	validation_0-auc:0.870744	validation_1-auc:0.842421
[111]	validation_0-auc:0.871218	validation_1-auc:0.842608
[112]	validation_0-auc:0.871296	validation_1-auc:0.842531
[113]	validation_0-auc:0.87165	validation_1-auc:0.842752
[114]	validation_0-auc:0.87211	validation_1-auc:0.842839
[115]	validation_0-auc:0.872618	validation_1-auc:0.842774
[116]	validation_0-auc:0.872624	validation_1-auc:0.842819
[117]	validation_0-auc:0.87299	validation_1-auc:0.842901
[118]	validation_0-auc:0.873319	validation_1-auc:0.842954
[119]	validation_0-auc:0.873501	validation_1-auc:0.842878
[120]	validation_0-auc:0.873649	validation_1-auc:0.842856
[121]	validation_0-auc:0.873867	validation_1-auc:0.84301
[122]	validation_0-auc:0.874181	validation_1-auc:0.842898
[123]	validation_0-auc:0.874522	validation_1-auc:0.843008
[124]	validation_0-auc:0.874819	validation_1-auc:0.842984
[125]	validation_0-auc:0.875148	validation_1-auc:0.842878
[126]	validation_0-auc:0.875423	validation_1-auc:0.84291
[127]	validation_0-auc:0.875561	validation_1-auc:0.842942
[128]	validation_0-auc:0.875866	validation_1-auc:0.842748
[129]	validation_0-auc:0.876054	validation_1-auc:0.842673
[130]	validation_0-auc:0.876327	validation_1-auc:0.842652
[131]	validation_0-auc:0.876557	validation_1-auc:0.84263
[132]	validation_0-auc:0.876719	validation_1-auc:0.842681
[133]	validation_0-auc:0.876843	validation_1-auc:0.842812
[134]	validation_0-auc:0.87701	validation_1-auc:0.842694
[135]	validation_0-auc:0.877162	validation_1-auc:0.842685
[136]	validation_0-auc:0.877358	validation_1-auc:0.842898
[137]	validation_0-auc:0.877665	validation_1-auc:0.842915
[138]	validation_0-auc:0.877901	validation_1-auc:0.843127
[139]	validation_0-auc:0.878061	validation_1-auc:0.843425
[140]	validation_0-auc:0.87835	validation_1-auc:0.843359
[141]	validation_0-auc:0.87846	validation_1-auc:0.843301
[142]	validation_0-auc:0.878656	validation_1-auc:0.843428
[143]	validation_0-auc:0.878713	validation_1-auc:0.843385
[144]	validation_0-auc:0.878905	validation_1-auc:0.843463
[145]	validation_0-auc:0.879131	validation_1-auc:0.843475
[146]	validation_0-auc:0.87937	validation_1-auc:0.843636
[147]	validation_0-auc:0.879459	validation_1-auc:0.843606
[148]	validation_0-auc:0.879425	validation_1-auc:0.843573
[149]	validation_0-auc:0.879568	validation_1-auc:0.843621
[150]	validation_0-auc:0.879684	validation_1-auc:0.843523
[151]	validation_0-auc:0.879746	validation_1-auc:0.843485
[152]	validation_0-auc:0.879772	validation_1-auc:0.84348
[153]	validation_0-auc:0.87969	validation_1-auc:0.843363
[154]	validation_0-auc:0.880169	validation_1-auc:0.843571
[155]	validation_0-auc:0.880258	validation_1-auc:0.84347
[156]	validation_0-auc:0.88067	validation_1-auc:0.84353
[157]	validation_0-auc:0.880969	validation_1-auc:0.843364
[158]	validation_0-auc:0.881349	validation_1-auc:0.843599
[159]	validation_0-auc:0.881438	validation_1-auc:0.843607
[160]	validation_0-auc:0.881704	validation_1-auc:0.843635
[161]	validation_0-auc:0.881873	validation_1-auc:0.843673
[162]	validation_0-auc:0.882203	validation_1-auc:0.843698
[163]	validation_0-auc:0.882267	validation_1-auc:0.843531
[164]	validation_0-auc:0.882399	validation_1-auc:0.843682
[165]	validation_0-auc:0.882724	validation_1-auc:0.843793
[166]	validation_0-auc:0.883061	validation_1-auc:0.843854
[167]	validation_0-auc:0.883356	validation_1-auc:0.843902
[168]	validation_0-auc:0.88347	validation_1-auc:0.843823
[169]	validation_0-auc:0.883536	validation_1-auc:0.843789
[170]	validation_0-auc:0.883794	validation_1-auc:0.843963
[171]	validation_0-auc:0.884101	validation_1-auc:0.844054
[172]	validation_0-auc:0.884351	validation_1-auc:0.844265
[173]	validation_0-auc:0.884537	validation_1-auc:0.844307
[174]	validation_0-auc:0.884778	validation_1-auc:0.844361
[175]	validation_0-auc:0.885077	validation_1-auc:0.844315
[176]	validation_0-auc:0.885368	validation_1-auc:0.844352
[177]	validation_0-auc:0.885621	validation_1-auc:0.84445
[178]	validation_0-auc:0.885789	validation_1-auc:0.844386
[179]	validation_0-auc:0.88606	validation_1-auc:0.844359
[180]	validation_0-auc:0.886118	validation_1-auc:0.844377
[181]	validation_0-auc:0.886429	validation_1-auc:0.844343
[182]	validation_0-auc:0.886794	validation_1-auc:0.844324
[183]	validation_0-auc:0.887064	validation_1-auc:0.844385
[184]	validation_0-auc:0.887403	validation_1-auc:0.844514
[185]	validation_0-auc:0.887572	validation_1-auc:0.844636
[186]	validation_0-auc:0.887908	validation_1-auc:0.844741
[187]	validation_0-auc:0.888215	validation_1-auc:0.844832
[188]	validation_0-auc:0.888469	validation_1-auc:0.84475
[189]	validation_0-auc:0.888691	validation_1-auc:0.844907
[190]	validation_0-auc:0.888977	validation_1-auc:0.844979
[191]	validation_0-auc:0.889277	validation_1-auc:0.844983
[192]	validation_0-auc:0.889465	validation_1-auc:0.844993
[193]	validation_0-auc:0.889655	validation_1-auc:0.845095
[194]	validation_0-auc:0.889784	validation_1-auc:0.845206
[195]	validation_0-auc:0.889961	validation_1-auc:0.845308
[196]	validation_0-auc:0.890173	validation_1-auc:0.8454
[197]	validation_0-auc:0.890497	validation_1-auc:0.845438
[198]	validation_0-auc:0.890667	validation_1-auc:0.845451
[199]	validation_0-auc:0.890945	validation_1-auc:0.845567
[200]	validation_0-auc:0.891031	validation_1-auc:0.845522
[201]	validation_0-auc:0.891207	validation_1-auc:0.845543
[202]	validation_0-auc:0.891298	validation_1-auc:0.845482
[203]	validation_0-auc:0.891458	validation_1-auc:0.845436
[204]	validation_0-auc:0.891578	validation_1-auc:0.84551
[205]	validation_0-auc:0.891798	validation_1-auc:0.84558
[206]	validation_0-auc:0.892002	validation_1-auc:0.8455
[207]	validation_0-auc:0.892367	validation_1-auc:0.84548
[208]	validation_0-auc:0.892675	validation_1-auc:0.845427
[209]	validation_0-auc:0.892895	validation_1-auc:0.845473
[210]	validation_0-auc:0.893053	validation_1-auc:0.845371
[211]	validation_0-auc:0.893252	validation_1-auc:0.845357
[212]	validation_0-auc:0.893462	validation_1-auc:0.845474
[213]	validation_0-auc:0.893646	validation_1-auc:0.845514
[214]	validation_0-auc:0.89391	validation_1-auc:0.845557
[215]	validation_0-auc:0.894032	validation_1-auc:0.845484
[216]	validation_0-auc:0.894256	validation_1-auc:0.84547
[217]	validation_0-auc:0.89447	validation_1-auc:0.845551
[218]	validation_0-auc:0.89464	validation_1-auc:0.845464
[219]	validation_0-auc:0.89485	validation_1-auc:0.845332
[220]	validation_0-auc:0.894975	validation_1-auc:0.845316
[221]	validation_0-auc:0.895187	validation_1-auc:0.845304
[222]	validation_0-auc:0.895482	validation_1-auc:0.84535
[223]	validation_0-auc:0.89567	validation_1-auc:0.845403
[224]	validation_0-auc:0.895849	validation_1-auc:0.845423
[225]	validation_0-auc:0.896047	validation_1-auc:0.845438
[226]	validation_0-auc:0.89621	validation_1-auc:0.845418
[227]	validation_0-auc:0.896408	validation_1-auc:0.845339
[228]	validation_0-auc:0.896615	validation_1-auc:0.845362
[229]	validation_0-auc:0.896762	validation_1-auc:0.845287
[230]	validation_0-auc:0.89703	validation_1-auc:0.845325
[231]	validation_0-auc:0.897147	validation_1-auc:0.845228
[232]	validation_0-auc:0.897355	validation_1-auc:0.845152
[233]	validation_0-auc:0.89752	validation_1-auc:0.845191
[234]	validation_0-auc:0.897664	validation_1-auc:0.845113
[235]	validation_0-auc:0.897808	validation_1-auc:0.845115
[236]	validation_0-auc:0.897992	validation_1-auc:0.845138
[237]	validation_0-auc:0.898097	validation_1-auc:0.845168
[238]	validation_0-auc:0.898278	validation_1-auc:0.845204
[239]	validation_0-auc:0.898428	validation_1-auc:0.84522
[240]	validation_0-auc:0.898574	validation_1-auc:0.845141
[241]	validation_0-auc:0.898743	validation_1-auc:0.845217
[242]	validation_0-auc:0.898944	validation_1-auc:0.845171
[243]	validation_0-auc:0.899045	validation_1-auc:0.845165
[244]	validation_0-auc:0.899229	validation_1-auc:0.845132
[245]	validation_0-auc:0.899378	validation_1-auc:0.845064
[246]	validation_0-auc:0.899495	validation_1-auc:0.844957
[247]	validation_0-auc:0.899596	validation_1-auc:0.844991
[248]	validation_0-auc:0.899758	validation_1-auc:0.844981
[249]	validation_0-auc:0.899908	validation_1-auc:0.84498
[250]	validation_0-auc:0.899989	validation_1-auc:0.844982
[251]	validation_0-auc:0.900135	validation_1-auc:0.844962
[252]	validation_0-auc:0.900248	validation_1-auc:0.844899
[253]	validation_0-auc:0.900318	validation_1-auc:0.844834
[254]	validation_0-auc:0.900422	validation_1-auc:0.844881
[255]	validation_0-auc:0.900577	validation_1-auc:0.844872
[256]	validation_0-auc:0.900667	validation_1-auc:0.844891
[257]	validation_0-auc:0.900862	validation_1-auc:0.844854
[258]	validation_0-auc:0.900933	validation_1-auc:0.844822
[259]	validation_0-auc:0.9011	validation_1-auc:0.844748
[260]	validation_0-auc:0.901237	validation_1-auc:0.844746
[261]	validation_0-auc:0.901455	validation_1-auc:0.84458
[262]	validation_0-auc:0.90163	validation_1-auc:0.844553
[263]	validation_0-auc:0.901757	validation_1-auc:0.844573
[264]	validation_0-auc:0.90187	validation_1-auc:0.844499
[265]	validation_0-auc:0.901929	validation_1-auc:0.844553
[266]	validation_0-auc:0.901996	validation_1-auc:0.844574
[267]	validation_0-auc:0.902034	validation_1-auc:0.844591
[268]	validation_0-auc:0.902162	validation_1-auc:0.844535
[269]	validation_0-auc:0.902272	validation_1-auc:0.844592
[270]	validation_0-auc:0.902341	validation_1-auc:0.844507
[271]	validation_0-auc:0.902427	validation_1-auc:0.844539
[272]	validation_0-auc:0.90255	validation_1-auc:0.844586
[273]	validation_0-auc:0.90265	validation_1-auc:0.844558
[274]	validation_0-auc:0.902755	validation_1-auc:0.844644
[275]	validation_0-auc:0.902865	validation_1-auc:0.84456
[276]	validation_0-auc:0.902978	validation_1-auc:0.844478
[277]	validation_0-auc:0.90313	validation_1-auc:0.844424
[278]	validation_0-auc:0.903302	validation_1-auc:0.844364
[279]	validation_0-auc:0.903486	validation_1-auc:0.844382
[280]	validation_0-auc:0.903525	validation_1-auc:0.844377
[281]	validation_0-auc:0.903597	validation_1-auc:0.844434
[282]	validation_0-auc:0.903762	validation_1-auc:0.844502
[283]	validation_0-auc:0.903835	validation_1-auc:0.84453
[284]	validation_0-auc:0.903908	validation_1-auc:0.844534
[285]	validation_0-auc:0.903983	validation_1-auc:0.844493
[286]	validation_0-auc:0.904128	validation_1-auc:0.844469
[287]	validation_0-auc:0.904224	validation_1-auc:0.844478
[288]	validation_0-auc:0.904337	validation_1-auc:0.844413
[289]	validation_0-auc:0.904409	validation_1-auc:0.844408
[290]	validation_0-auc:0.904532	validation_1-auc:0.844505
[291]	validation_0-auc:0.904571	validation_1-auc:0.84446
[292]	validation_0-auc:0.904651	validation_1-auc:0.844474
[293]	validation_0-auc:0.904717	validation_1-auc:0.84443
[294]	validation_0-auc:0.904796	validation_1-auc:0.844373
[295]	validation_0-auc:0.9049	validation_1-auc:0.844322
[296]	validation_0-auc:0.904981	validation_1-auc:0.844269
[297]	validation_0-auc:0.9051	validation_1-auc:0.844265
[298]	validation_0-auc:0.905163	validation_1-auc:0.844253
[299]	validation_0-auc:0.905254	validation_1-auc:0.844185
[300]	validation_0-auc:0.905318	validation_1-auc:0.84414
[301]	validation_0-auc:0.90541	validation_1-auc:0.8441
[302]	validation_0-auc:0.905482	validation_1-auc:0.844045
[303]	validation_0-auc:0.90562	validation_1-auc:0.844054
[304]	validation_0-auc:0.905638	validation_1-auc:0.844054
[305]	validation_0-auc:0.905808	validation_1-auc:0.844089
[306]	validation_0-auc:0.90594	validation_1-auc:0.844069
[307]	validation_0-auc:0.906004	validation_1-auc:0.844084
[308]	validation_0-auc:0.906105	validation_1-auc:0.844063
[309]	validation_0-auc:0.906136	validation_1-auc:0.844092
[310]	validation_0-auc:0.906196	validation_1-auc:0.844025
[311]	validation_0-auc:0.906218	validation_1-auc:0.84402
[312]	validation_0-auc:0.906352	validation_1-auc:0.844062
[313]	validation_0-auc:0.906449	validation_1-auc:0.843984
[314]	validation_0-auc:0.906479	validation_1-auc:0.84397
[315]	validation_0-auc:0.906534	validation_1-auc:0.843916
[316]	validation_0-auc:0.906597	validation_1-auc:0.843917
[317]	validation_0-auc:0.906699	validation_1-auc:0.843867
[318]	validation_0-auc:0.906752	validation_1-auc:0.843875
[319]	validation_0-auc:0.906771	validation_1-auc:0.843927
[320]	validation_0-auc:0.906917	validation_1-auc:0.843874
[321]	validation_0-auc:0.906945	validation_1-auc:0.843845
[322]	validation_0-auc:0.907017	validation_1-auc:0.843853
[323]	validation_0-auc:0.907131	validation_1-auc:0.843839
[324]	validation_0-auc:0.907161	validation_1-auc:0.843841
[325]	validation_0-auc:0.907276	validation_1-auc:0.843901
[326]	validation_0-auc:0.907346	validation_1-auc:0.843844
[327]	validation_0-auc:0.907471	validation_1-auc:0.843837
[328]	validation_0-auc:0.907519	validation_1-auc:0.843829
[329]	validation_0-auc:0.907562	validation_1-auc:0.843809
[330]	validation_0-auc:0.907619	validation_1-auc:0.843835
[331]	validation_0-auc:0.907733	validation_1-auc:0.843879
[332]	validation_0-auc:0.907751	validation_1-auc:0.843925
[333]	validation_0-auc:0.907785	validation_1-auc:0.843935
[334]	validation_0-auc:0.907834	validation_1-auc:0.843887
[335]	validation_0-auc:0.90793	validation_1-auc:0.843889
[336]	validation_0-auc:0.908015	validation_1-auc:0.843936
[337]	validation_0-auc:0.908047	validation_1-auc:0.843956
[338]	validation_0-auc:0.90811	validation_1-auc:0.843945
[339]	validation_0-auc:0.908172	validation_1-auc:0.843877
[340]	validation_0-auc:0.908299	validation_1-auc:0.843933
[341]	validation_0-auc:0.908365	validation_1-auc:0.843909
[342]	validation_0-auc:0.908402	validation_1-auc:0.843911
[343]	validation_0-auc:0.90843	validation_1-auc:0.843926
[344]	validation_0-auc:0.908498	validation_1-auc:0.843904
[345]	validation_0-auc:0.908592	validation_1-auc:0.843888
[346]	validation_0-auc:0.908643	validation_1-auc:0.843869
[347]	validation_0-auc:0.908682	validation_1-auc:0.843887
[348]	validation_0-auc:0.908847	validation_1-auc:0.843939
[349]	validation_0-auc:0.908932	validation_1-auc:0.843942
[350]	validation_0-auc:0.909068	validation_1-auc:0.843938
[351]	validation_0-auc:0.90916	validation_1-auc:0.843952
[352]	validation_0-auc:0.909199	validation_1-auc:0.843995
[353]	validation_0-auc:0.909223	validation_1-auc:0.843997
[354]	validation_0-auc:0.909251	validation_1-auc:0.844005
[355]	validation_0-auc:0.909347	validation_1-auc:0.844004
[356]	validation_0-auc:0.909402	validation_1-auc:0.843948
[357]	validation_0-auc:0.909506	validation_1-auc:0.843952
[358]	validation_0-auc:0.9096	validation_1-auc:0.843939
[359]	validation_0-auc:0.909709	validation_1-auc:0.84387
[360]	validation_0-auc:0.909764	validation_1-auc:0.843795
[361]	validation_0-auc:0.909851	validation_1-auc:0.843817
[362]	validation_0-auc:0.909881	validation_1-auc:0.843844
[363]	validation_0-auc:0.909925	validation_1-auc:0.843832
[364]	validation_0-auc:0.909989	validation_1-auc:0.843858
[365]	validation_0-auc:0.910067	validation_1-auc:0.843841
[366]	validation_0-auc:0.910117	validation_1-auc:0.843801
[367]	validation_0-auc:0.910138	validation_1-auc:0.84384
[368]	validation_0-auc:0.910241	validation_1-auc:0.843781
[369]	validation_0-auc:0.910315	validation_1-auc:0.843819
[370]	validation_0-auc:0.910332	validation_1-auc:0.843831
[371]	validation_0-auc:0.910433	validation_1-auc:0.84385
[372]	validation_0-auc:0.910483	validation_1-auc:0.84382
[373]	validation_0-auc:0.910544	validation_1-auc:0.843829
[374]	validation_0-auc:0.910583	validation_1-auc:0.843808
[375]	validation_0-auc:0.910673	validation_1-auc:0.843824
[376]	validation_0-auc:0.910682	validation_1-auc:0.843822
[377]	validation_0-auc:0.910775	validation_1-auc:0.843856
[378]	validation_0-auc:0.910831	validation_1-auc:0.843807
[379]	validation_0-auc:0.910856	validation_1-auc:0.843767
[380]	validation_0-auc:0.910872	validation_1-auc:0.843787
[381]	validation_0-auc:0.911075	validation_1-auc:0.843814
[382]	validation_0-auc:0.911095	validation_1-auc:0.843799
[383]	validation_0-auc:0.911174	validation_1-auc:0.843861
[384]	validation_0-auc:0.911277	validation_1-auc:0.843857
[385]	validation_0-auc:0.911381	validation_1-auc:0.843835
[386]	validation_0-auc:0.911433	validation_1-auc:0.843816
[387]	validation_0-auc:0.911502	validation_1-auc:0.843805
[388]	validation_0-auc:0.911583	validation_1-auc:0.843763
[389]	validation_0-auc:0.911676	validation_1-auc:0.843773
[390]	validation_0-auc:0.911732	validation_1-auc:0.843754
[391]	validation_0-auc:0.911744	validation_1-auc:0.843771
[392]	validation_0-auc:0.911848	validation_1-auc:0.843811
[393]	validation_0-auc:0.911876	validation_1-auc:0.843748
[394]	validation_0-auc:0.911902	validation_1-auc:0.843742
[395]	validation_0-auc:0.911934	validation_1-auc:0.843761
[396]	validation_0-auc:0.911946	validation_1-auc:0.84376
[397]	validation_0-auc:0.911956	validation_1-auc:0.84375
[398]	validation_0-auc:0.911994	validation_1-auc:0.843734
[399]	validation_0-auc:0.912041	validation_1-auc:0.843721
[400]	validation_0-auc:0.912077	validation_1-auc:0.843694
[401]	validation_0-auc:0.912108	validation_1-auc:0.843633
[402]	validation_0-auc:0.912134	validation_1-auc:0.843652
[403]	validation_0-auc:0.912181	validation_1-auc:0.843676
[404]	validation_0-auc:0.912192	validation_1-auc:0.843661
[405]	validation_0-auc:0.912239	validation_1-auc:0.84367
Stopping. Best iteration:
[205]	validation_0-auc:0.891798	validation_1-auc:0.84558

ROC AUC: 0.8456

 

from xgboost import plot_importance
import matplotlib.pyplot as plt
%matplotlib inline

fig, ax = plt.subplots(1,1,figsize=(10,8))
plot_importance(xgb_clf, ax=ax , max_num_features=20,height=0.4)

 

LightGBM 모델 학습과 하이퍼 파라미터 튜닝

from lightgbm import LGBMClassifier

lgbm_clf = LGBMClassifier(n_estimators=500)

evals = [(X_test, y_test)]
lgbm_clf.fit(X_train, y_train, early_stopping_rounds=100, eval_metric="auc", eval_set=evals,
                verbose=True)

lgbm_roc_score = roc_auc_score(y_test, lgbm_clf.predict_proba(X_test)[:,1],average='macro')
print('ROC AUC: {0:.4f}'.format(lgbm_roc_score))

[1]	valid_0's auc: 0.817384	valid_0's binary_logloss: 0.165046
Training until validation scores don't improve for 100 rounds.
[2]	valid_0's auc: 0.81863	valid_0's binary_logloss: 0.16
[3]	valid_0's auc: 0.827411	valid_0's binary_logloss: 0.156287
[4]	valid_0's auc: 0.832175	valid_0's binary_logloss: 0.153416
[5]	valid_0's auc: 0.83481	valid_0's binary_logloss: 0.151206
[6]	valid_0's auc: 0.834721	valid_0's binary_logloss: 0.149303
[7]	valid_0's auc: 0.83659	valid_0's binary_logloss: 0.147804
[8]	valid_0's auc: 0.837602	valid_0's binary_logloss: 0.146466
[9]	valid_0's auc: 0.838114	valid_0's binary_logloss: 0.145476
[10]	valid_0's auc: 0.838472	valid_0's binary_logloss: 0.144681
[11]	valid_0's auc: 0.83808	valid_0's binary_logloss: 0.143978
[12]	valid_0's auc: 0.837771	valid_0's binary_logloss: 0.14341
[13]	valid_0's auc: 0.838063	valid_0's binary_logloss: 0.142835
[14]	valid_0's auc: 0.838485	valid_0's binary_logloss: 0.142296
[15]	valid_0's auc: 0.838542	valid_0's binary_logloss: 0.141903
[16]	valid_0's auc: 0.838275	valid_0's binary_logloss: 0.14162
[17]	valid_0's auc: 0.838817	valid_0's binary_logloss: 0.141252
[18]	valid_0's auc: 0.838769	valid_0's binary_logloss: 0.14103
[19]	valid_0's auc: 0.83909	valid_0's binary_logloss: 0.140798
[20]	valid_0's auc: 0.838839	valid_0's binary_logloss: 0.140676
[21]	valid_0's auc: 0.838823	valid_0's binary_logloss: 0.140506
[22]	valid_0's auc: 0.838934	valid_0's binary_logloss: 0.140275
[23]	valid_0's auc: 0.838957	valid_0's binary_logloss: 0.140142
[24]	valid_0's auc: 0.838764	valid_0's binary_logloss: 0.140054
[25]	valid_0's auc: 0.838176	valid_0's binary_logloss: 0.140042
[26]	valid_0's auc: 0.838068	valid_0's binary_logloss: 0.140012
[27]	valid_0's auc: 0.83824	valid_0's binary_logloss: 0.139937
[28]	valid_0's auc: 0.838487	valid_0's binary_logloss: 0.139844
[29]	valid_0's auc: 0.839041	valid_0's binary_logloss: 0.139726
[30]	valid_0's auc: 0.83871	valid_0's binary_logloss: 0.139708
[31]	valid_0's auc: 0.838192	valid_0's binary_logloss: 0.139745
[32]	valid_0's auc: 0.838453	valid_0's binary_logloss: 0.139679
[33]	valid_0's auc: 0.838265	valid_0's binary_logloss: 0.139702
[34]	valid_0's auc: 0.83818	valid_0's binary_logloss: 0.139697
[35]	valid_0's auc: 0.838186	valid_0's binary_logloss: 0.139697
[36]	valid_0's auc: 0.837934	valid_0's binary_logloss: 0.139716
[37]	valid_0's auc: 0.838364	valid_0's binary_logloss: 0.139622
[38]	valid_0's auc: 0.838565	valid_0's binary_logloss: 0.139564
[39]	valid_0's auc: 0.838829	valid_0's binary_logloss: 0.139543
[40]	valid_0's auc: 0.83898	valid_0's binary_logloss: 0.139483
[41]	valid_0's auc: 0.839369	valid_0's binary_logloss: 0.139434
[42]	valid_0's auc: 0.839599	valid_0's binary_logloss: 0.139408
[43]	valid_0's auc: 0.839508	valid_0's binary_logloss: 0.139411
[44]	valid_0's auc: 0.839287	valid_0's binary_logloss: 0.139448
[45]	valid_0's auc: 0.839477	valid_0's binary_logloss: 0.139432
[46]	valid_0's auc: 0.839166	valid_0's binary_logloss: 0.139495
[47]	valid_0's auc: 0.838945	valid_0's binary_logloss: 0.139508
[48]	valid_0's auc: 0.838841	valid_0's binary_logloss: 0.139522
[49]	valid_0's auc: 0.838891	valid_0's binary_logloss: 0.139484
[50]	valid_0's auc: 0.838737	valid_0's binary_logloss: 0.139537
[51]	valid_0's auc: 0.8389	valid_0's binary_logloss: 0.139507
[52]	valid_0's auc: 0.838932	valid_0's binary_logloss: 0.139495
[53]	valid_0's auc: 0.838757	valid_0's binary_logloss: 0.139547
[54]	valid_0's auc: 0.838692	valid_0's binary_logloss: 0.139549
[55]	valid_0's auc: 0.838783	valid_0's binary_logloss: 0.139518
[56]	valid_0's auc: 0.838739	valid_0's binary_logloss: 0.139531
[57]	valid_0's auc: 0.838757	valid_0's binary_logloss: 0.139576
[58]	valid_0's auc: 0.838701	valid_0's binary_logloss: 0.139589
[59]	valid_0's auc: 0.838641	valid_0's binary_logloss: 0.139628
[60]	valid_0's auc: 0.838437	valid_0's binary_logloss: 0.139663
[61]	valid_0's auc: 0.838503	valid_0's binary_logloss: 0.139665
[62]	valid_0's auc: 0.838393	valid_0's binary_logloss: 0.139673
[63]	valid_0's auc: 0.838211	valid_0's binary_logloss: 0.139711
[64]	valid_0's auc: 0.838301	valid_0's binary_logloss: 0.139683
[65]	valid_0's auc: 0.837919	valid_0's binary_logloss: 0.139712
[66]	valid_0's auc: 0.837657	valid_0's binary_logloss: 0.139758
[67]	valid_0's auc: 0.837749	valid_0's binary_logloss: 0.139737
[68]	valid_0's auc: 0.837664	valid_0's binary_logloss: 0.13976
[69]	valid_0's auc: 0.837731	valid_0's binary_logloss: 0.139731
[70]	valid_0's auc: 0.837545	valid_0's binary_logloss: 0.139778
[71]	valid_0's auc: 0.837491	valid_0's binary_logloss: 0.139776
[72]	valid_0's auc: 0.83758	valid_0's binary_logloss: 0.139762
[73]	valid_0's auc: 0.837516	valid_0's binary_logloss: 0.13978
[74]	valid_0's auc: 0.837582	valid_0's binary_logloss: 0.13978
[75]	valid_0's auc: 0.83758	valid_0's binary_logloss: 0.13977
[76]	valid_0's auc: 0.837467	valid_0's binary_logloss: 0.139815
[77]	valid_0's auc: 0.837473	valid_0's binary_logloss: 0.139805
[78]	valid_0's auc: 0.837617	valid_0's binary_logloss: 0.139812
[79]	valid_0's auc: 0.837659	valid_0's binary_logloss: 0.139839
[80]	valid_0's auc: 0.837613	valid_0's binary_logloss: 0.139849
[81]	valid_0's auc: 0.837706	valid_0's binary_logloss: 0.139835
[82]	valid_0's auc: 0.837368	valid_0's binary_logloss: 0.139886
[83]	valid_0's auc: 0.837477	valid_0's binary_logloss: 0.13986
[84]	valid_0's auc: 0.837504	valid_0's binary_logloss: 0.13986
[85]	valid_0's auc: 0.837287	valid_0's binary_logloss: 0.13993
[86]	valid_0's auc: 0.837323	valid_0's binary_logloss: 0.139942
[87]	valid_0's auc: 0.837161	valid_0's binary_logloss: 0.139964
[88]	valid_0's auc: 0.836989	valid_0's binary_logloss: 0.140011
[89]	valid_0's auc: 0.837035	valid_0's binary_logloss: 0.140014
[90]	valid_0's auc: 0.837007	valid_0's binary_logloss: 0.140049
[91]	valid_0's auc: 0.836832	valid_0's binary_logloss: 0.140078
[92]	valid_0's auc: 0.836979	valid_0's binary_logloss: 0.14007
[93]	valid_0's auc: 0.836875	valid_0's binary_logloss: 0.140135
[94]	valid_0's auc: 0.836843	valid_0's binary_logloss: 0.140139
[95]	valid_0's auc: 0.836938	valid_0's binary_logloss: 0.140121
[96]	valid_0's auc: 0.837312	valid_0's binary_logloss: 0.14004
[97]	valid_0's auc: 0.837229	valid_0's binary_logloss: 0.140082
[98]	valid_0's auc: 0.837361	valid_0's binary_logloss: 0.140053
[99]	valid_0's auc: 0.837365	valid_0's binary_logloss: 0.140073
[100]	valid_0's auc: 0.837229	valid_0's binary_logloss: 0.140095
[101]	valid_0's auc: 0.837124	valid_0's binary_logloss: 0.14014
[102]	valid_0's auc: 0.837385	valid_0's binary_logloss: 0.140065
[103]	valid_0's auc: 0.837954	valid_0's binary_logloss: 0.139975
[104]	valid_0's auc: 0.83767	valid_0's binary_logloss: 0.140027
[105]	valid_0's auc: 0.837743	valid_0's binary_logloss: 0.140044
[106]	valid_0's auc: 0.837839	valid_0's binary_logloss: 0.140052
[107]	valid_0's auc: 0.8377	valid_0's binary_logloss: 0.140105
[108]	valid_0's auc: 0.837582	valid_0's binary_logloss: 0.140153
[109]	valid_0's auc: 0.837439	valid_0's binary_logloss: 0.140184
[110]	valid_0's auc: 0.83731	valid_0's binary_logloss: 0.140216
[111]	valid_0's auc: 0.837193	valid_0's binary_logloss: 0.140234
[112]	valid_0's auc: 0.836993	valid_0's binary_logloss: 0.140296
[113]	valid_0's auc: 0.836994	valid_0's binary_logloss: 0.140335
[114]	valid_0's auc: 0.836887	valid_0's binary_logloss: 0.140367
[115]	valid_0's auc: 0.836742	valid_0's binary_logloss: 0.140415
[116]	valid_0's auc: 0.836448	valid_0's binary_logloss: 0.140488
[117]	valid_0's auc: 0.836571	valid_0's binary_logloss: 0.140496
[118]	valid_0's auc: 0.836701	valid_0's binary_logloss: 0.140481
[119]	valid_0's auc: 0.836717	valid_0's binary_logloss: 0.140491
[120]	valid_0's auc: 0.836673	valid_0's binary_logloss: 0.140508
[121]	valid_0's auc: 0.836644	valid_0's binary_logloss: 0.14052
[122]	valid_0's auc: 0.836649	valid_0's binary_logloss: 0.140536
[123]	valid_0's auc: 0.836457	valid_0's binary_logloss: 0.140598
[124]	valid_0's auc: 0.836254	valid_0's binary_logloss: 0.140664
[125]	valid_0's auc: 0.836198	valid_0's binary_logloss: 0.140693
[126]	valid_0's auc: 0.836429	valid_0's binary_logloss: 0.140672
[127]	valid_0's auc: 0.836282	valid_0's binary_logloss: 0.14072
[128]	valid_0's auc: 0.836152	valid_0's binary_logloss: 0.140781
[129]	valid_0's auc: 0.836156	valid_0's binary_logloss: 0.140809
[130]	valid_0's auc: 0.83605	valid_0's binary_logloss: 0.140835
[131]	valid_0's auc: 0.836033	valid_0's binary_logloss: 0.140835
[132]	valid_0's auc: 0.836014	valid_0's binary_logloss: 0.140852
[133]	valid_0's auc: 0.835977	valid_0's binary_logloss: 0.1409
[134]	valid_0's auc: 0.835695	valid_0's binary_logloss: 0.140951
[135]	valid_0's auc: 0.835689	valid_0's binary_logloss: 0.140975
[136]	valid_0's auc: 0.83554	valid_0's binary_logloss: 0.141011
[137]	valid_0's auc: 0.835146	valid_0's binary_logloss: 0.141098
[138]	valid_0's auc: 0.83503	valid_0's binary_logloss: 0.141136
[139]	valid_0's auc: 0.834826	valid_0's binary_logloss: 0.141206
[140]	valid_0's auc: 0.834576	valid_0's binary_logloss: 0.141267
[141]	valid_0's auc: 0.834265	valid_0's binary_logloss: 0.141328
[142]	valid_0's auc: 0.8342	valid_0's binary_logloss: 0.141359
Early stopping, best iteration is:
[42]	valid_0's auc: 0.839599	valid_0's binary_logloss: 0.139408
ROC AUC: 0.8396

 

from sklearn.model_selection import GridSearchCV

# 하이퍼 파라미터 테스트의 수행 속도를 향상시키기 위해 n_estimators를 100으로 감소
LGBM_clf = LGBMClassifier(n_estimators=200)

params = {'num_leaves': [32, 64 ],
          'max_depth':[128, 160],
          'min_child_samples':[60, 100],
          'subsample':[0.8, 1]}


# 하이퍼 파라미터 테스트의 수행속도를 향상 시키기 위해 cv 를 지정하지 않습니다. 
gridcv = GridSearchCV(lgbm_clf, param_grid=params)
gridcv.fit(X_train, y_train, early_stopping_rounds=30, eval_metric="auc",
           eval_set=[(X_train, y_train), (X_test, y_test)])

print('GridSearchCV 최적 파라미터:', gridcv.best_params_)
lgbm_roc_score = roc_auc_score(y_test, gridcv.predict_proba(X_test)[:,1], average='macro')
print('ROC AUC: {0:.4f}'.format(lgbm_roc_score))


[1]	valid_0's auc: 0.820235	valid_0's binary_logloss: 0.156085	valid_1's auc: 0.81613	valid_1's binary_logloss: 0.164998
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.825775	valid_0's binary_logloss: 0.150951	valid_1's auc: 0.821831	valid_1's binary_logloss: 0.15988
[3]	valid_0's auc: 0.832192	valid_0's binary_logloss: 0.147167	valid_1's auc: 0.827302	valid_1's binary_logloss: 0.156397
[4]	valid_0's auc: 0.837518	valid_0's binary_logloss: 0.144131	valid_1's auc: 0.8334	valid_1's binary_logloss: 0.153325
[5]	valid_0's auc: 0.842289	valid_0's binary_logloss: 0.141651	valid_1's auc: 0.836018	valid_1's binary_logloss: 0.150959
[6]	valid_0's auc: 0.844974	valid_0's binary_logloss: 0.139661	valid_1's auc: 0.838022	valid_1's binary_logloss: 0.149046
[7]	valid_0's auc: 0.846623	valid_0's binary_logloss: 0.138001	valid_1's auc: 0.837777	valid_1's binary_logloss: 0.147509
[8]	valid_0's auc: 0.848529	valid_0's binary_logloss: 0.136578	valid_1's auc: 0.839519	valid_1's binary_logloss: 0.146015
[9]	valid_0's auc: 0.850276	valid_0's binary_logloss: 0.135274	valid_1's auc: 0.840042	valid_1's binary_logloss: 0.145043
[10]	valid_0's auc: 0.852371	valid_0's binary_logloss: 0.134185	valid_1's auc: 0.839808	valid_1's binary_logloss: 0.144182
[11]	valid_0's auc: 0.853705	valid_0's binary_logloss: 0.133238	valid_1's auc: 0.83943	valid_1's binary_logloss: 0.14345
[12]	valid_0's auc: 0.855304	valid_0's binary_logloss: 0.132409	valid_1's auc: 0.838786	valid_1's binary_logloss: 0.142878
[13]	valid_0's auc: 0.856638	valid_0's binary_logloss: 0.131658	valid_1's auc: 0.838632	valid_1's binary_logloss: 0.142368
[14]	valid_0's auc: 0.85784	valid_0's binary_logloss: 0.130967	valid_1's auc: 0.838182	valid_1's binary_logloss: 0.14198
[15]	valid_0's auc: 0.859432	valid_0's binary_logloss: 0.130373	valid_1's auc: 0.838236	valid_1's binary_logloss: 0.141582
[16]	valid_0's auc: 0.860428	valid_0's binary_logloss: 0.129814	valid_1's auc: 0.837425	valid_1's binary_logloss: 0.141389
[17]	valid_0's auc: 0.861409	valid_0's binary_logloss: 0.12923	valid_1's auc: 0.837358	valid_1's binary_logloss: 0.141106
[18]	valid_0's auc: 0.86332	valid_0's binary_logloss: 0.128681	valid_1's auc: 0.836771	valid_1's binary_logloss: 0.140932
[19]	valid_0's auc: 0.864365	valid_0's binary_logloss: 0.128233	valid_1's auc: 0.836564	valid_1's binary_logloss: 0.140796
[20]	valid_0's auc: 0.865268	valid_0's binary_logloss: 0.127815	valid_1's auc: 0.835613	valid_1's binary_logloss: 0.140715
[21]	valid_0's auc: 0.865869	valid_0's binary_logloss: 0.127427	valid_1's auc: 0.835085	valid_1's binary_logloss: 0.140641
[22]	valid_0's auc: 0.867223	valid_0's binary_logloss: 0.126993	valid_1's auc: 0.835383	valid_1's binary_logloss: 0.140519
[23]	valid_0's auc: 0.867898	valid_0's binary_logloss: 0.126644	valid_1's auc: 0.835018	valid_1's binary_logloss: 0.140432
[24]	valid_0's auc: 0.869077	valid_0's binary_logloss: 0.126246	valid_1's auc: 0.835199	valid_1's binary_logloss: 0.140328
[25]	valid_0's auc: 0.869684	valid_0's binary_logloss: 0.125917	valid_1's auc: 0.83457	valid_1's binary_logloss: 0.140301
[26]	valid_0's auc: 0.870271	valid_0's binary_logloss: 0.12561	valid_1's auc: 0.834087	valid_1's binary_logloss: 0.140349
[27]	valid_0's auc: 0.87126	valid_0's binary_logloss: 0.125302	valid_1's auc: 0.833822	valid_1's binary_logloss: 0.140277
[28]	valid_0's auc: 0.872741	valid_0's binary_logloss: 0.124882	valid_1's auc: 0.833886	valid_1's binary_logloss: 0.140255
[29]	valid_0's auc: 0.873424	valid_0's binary_logloss: 0.124594	valid_1's auc: 0.833937	valid_1's binary_logloss: 0.140189
[30]	valid_0's auc: 0.874669	valid_0's binary_logloss: 0.124295	valid_1's auc: 0.834461	valid_1's binary_logloss: 0.140113
[31]	valid_0's auc: 0.875234	valid_0's binary_logloss: 0.124066	valid_1's auc: 0.83444	valid_1's binary_logloss: 0.140064
[32]	valid_0's auc: 0.875809	valid_0's binary_logloss: 0.123813	valid_1's auc: 0.834196	valid_1's binary_logloss: 0.140095
[33]	valid_0's auc: 0.876619	valid_0's binary_logloss: 0.123531	valid_1's auc: 0.834143	valid_1's binary_logloss: 0.140029
[34]	valid_0's auc: 0.877233	valid_0's binary_logloss: 0.123254	valid_1's auc: 0.833865	valid_1's binary_logloss: 0.140055
[35]	valid_0's auc: 0.877763	valid_0's binary_logloss: 0.123009	valid_1's auc: 0.833699	valid_1's binary_logloss: 0.140082
[36]	valid_0's auc: 0.878322	valid_0's binary_logloss: 0.122755	valid_1's auc: 0.833221	valid_1's binary_logloss: 0.140158
[37]	valid_0's auc: 0.878948	valid_0's binary_logloss: 0.122495	valid_1's auc: 0.832792	valid_1's binary_logloss: 0.14018
[38]	valid_0's auc: 0.879452	valid_0's binary_logloss: 0.122301	valid_1's auc: 0.832977	valid_1's binary_logloss: 0.140154
[39]	valid_0's auc: 0.880156	valid_0's binary_logloss: 0.122063	valid_1's auc: 0.832913	valid_1's binary_logloss: 0.140188
Early stopping, best iteration is:
[9]	valid_0's auc: 0.850276	valid_0's binary_logloss: 0.135274	valid_1's auc: 0.840042	valid_1's binary_logloss: 0.145043
[1]	valid_0's auc: 0.814371	valid_0's binary_logloss: 0.156455	valid_1's auc: 0.813175	valid_1's binary_logloss: 0.16542
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827201	valid_0's binary_logloss: 0.151083	valid_1's auc: 0.820013	valid_1's binary_logloss: 0.160085
[3]	valid_0's auc: 0.837003	valid_0's binary_logloss: 0.147235	valid_1's auc: 0.828713	valid_1's binary_logloss: 0.156463
[4]	valid_0's auc: 0.840971	valid_0's binary_logloss: 0.144235	valid_1's auc: 0.831369	valid_1's binary_logloss: 0.153575
[5]	valid_0's auc: 0.842902	valid_0's binary_logloss: 0.141803	valid_1's auc: 0.833329	valid_1's binary_logloss: 0.151283
[6]	valid_0's auc: 0.847067	valid_0's binary_logloss: 0.139776	valid_1's auc: 0.836625	valid_1's binary_logloss: 0.14937
[7]	valid_0's auc: 0.848894	valid_0's binary_logloss: 0.138056	valid_1's auc: 0.837174	valid_1's binary_logloss: 0.147778
[8]	valid_0's auc: 0.850546	valid_0's binary_logloss: 0.136579	valid_1's auc: 0.837405	valid_1's binary_logloss: 0.146516
[9]	valid_0's auc: 0.852419	valid_0's binary_logloss: 0.135296	valid_1's auc: 0.837736	valid_1's binary_logloss: 0.145412
[10]	valid_0's auc: 0.85454	valid_0's binary_logloss: 0.134224	valid_1's auc: 0.838661	valid_1's binary_logloss: 0.144501
[11]	valid_0's auc: 0.856414	valid_0's binary_logloss: 0.133277	valid_1's auc: 0.838921	valid_1's binary_logloss: 0.143737
[12]	valid_0's auc: 0.857283	valid_0's binary_logloss: 0.132431	valid_1's auc: 0.837263	valid_1's binary_logloss: 0.143197
[13]	valid_0's auc: 0.858075	valid_0's binary_logloss: 0.131688	valid_1's auc: 0.837254	valid_1's binary_logloss: 0.142633
[14]	valid_0's auc: 0.858945	valid_0's binary_logloss: 0.130983	valid_1's auc: 0.837669	valid_1's binary_logloss: 0.142158
[15]	valid_0's auc: 0.859875	valid_0's binary_logloss: 0.130335	valid_1's auc: 0.837434	valid_1's binary_logloss: 0.141848
[16]	valid_0's auc: 0.860979	valid_0's binary_logloss: 0.129731	valid_1's auc: 0.837355	valid_1's binary_logloss: 0.141492
[17]	valid_0's auc: 0.861681	valid_0's binary_logloss: 0.129123	valid_1's auc: 0.837851	valid_1's binary_logloss: 0.14114
[18]	valid_0's auc: 0.863324	valid_0's binary_logloss: 0.128568	valid_1's auc: 0.838024	valid_1's binary_logloss: 0.140911
[19]	valid_0's auc: 0.864682	valid_0's binary_logloss: 0.128016	valid_1's auc: 0.838411	valid_1's binary_logloss: 0.140669
[20]	valid_0's auc: 0.865346	valid_0's binary_logloss: 0.127585	valid_1's auc: 0.838201	valid_1's binary_logloss: 0.140498
[21]	valid_0's auc: 0.8665	valid_0's binary_logloss: 0.127103	valid_1's auc: 0.83851	valid_1's binary_logloss: 0.140294
[22]	valid_0's auc: 0.867889	valid_0's binary_logloss: 0.126657	valid_1's auc: 0.839074	valid_1's binary_logloss: 0.140136
[23]	valid_0's auc: 0.868624	valid_0's binary_logloss: 0.126281	valid_1's auc: 0.838987	valid_1's binary_logloss: 0.140028
[24]	valid_0's auc: 0.869855	valid_0's binary_logloss: 0.125882	valid_1's auc: 0.838802	valid_1's binary_logloss: 0.139975
[25]	valid_0's auc: 0.870426	valid_0's binary_logloss: 0.125541	valid_1's auc: 0.83912	valid_1's binary_logloss: 0.139815
[26]	valid_0's auc: 0.871649	valid_0's binary_logloss: 0.125171	valid_1's auc: 0.838779	valid_1's binary_logloss: 0.139741
[27]	valid_0's auc: 0.872513	valid_0's binary_logloss: 0.124859	valid_1's auc: 0.839241	valid_1's binary_logloss: 0.139648
[28]	valid_0's auc: 0.873252	valid_0's binary_logloss: 0.124586	valid_1's auc: 0.839378	valid_1's binary_logloss: 0.139578
[29]	valid_0's auc: 0.87422	valid_0's binary_logloss: 0.124271	valid_1's auc: 0.839436	valid_1's binary_logloss: 0.139581
[30]	valid_0's auc: 0.874647	valid_0's binary_logloss: 0.124019	valid_1's auc: 0.83907	valid_1's binary_logloss: 0.13958
[31]	valid_0's auc: 0.875603	valid_0's binary_logloss: 0.123747	valid_1's auc: 0.839226	valid_1's binary_logloss: 0.139539
[32]	valid_0's auc: 0.87651	valid_0's binary_logloss: 0.123413	valid_1's auc: 0.838983	valid_1's binary_logloss: 0.139561
[33]	valid_0's auc: 0.877103	valid_0's binary_logloss: 0.123199	valid_1's auc: 0.839235	valid_1's binary_logloss: 0.139528
[34]	valid_0's auc: 0.878189	valid_0's binary_logloss: 0.122925	valid_1's auc: 0.839004	valid_1's binary_logloss: 0.139567
[35]	valid_0's auc: 0.878765	valid_0's binary_logloss: 0.122704	valid_1's auc: 0.838946	valid_1's binary_logloss: 0.139572
[36]	valid_0's auc: 0.879377	valid_0's binary_logloss: 0.122448	valid_1's auc: 0.83898	valid_1's binary_logloss: 0.139554
[37]	valid_0's auc: 0.880134	valid_0's binary_logloss: 0.122143	valid_1's auc: 0.838888	valid_1's binary_logloss: 0.139557
[38]	valid_0's auc: 0.880571	valid_0's binary_logloss: 0.121985	valid_1's auc: 0.839133	valid_1's binary_logloss: 0.13952
[39]	valid_0's auc: 0.88098	valid_0's binary_logloss: 0.12178	valid_1's auc: 0.838929	valid_1's binary_logloss: 0.139584
[40]	valid_0's auc: 0.881495	valid_0's binary_logloss: 0.121571	valid_1's auc: 0.838869	valid_1's binary_logloss: 0.139555
[41]	valid_0's auc: 0.881897	valid_0's binary_logloss: 0.121382	valid_1's auc: 0.838702	valid_1's binary_logloss: 0.139575
[42]	valid_0's auc: 0.882625	valid_0's binary_logloss: 0.121107	valid_1's auc: 0.83891	valid_1's binary_logloss: 0.139554
[43]	valid_0's auc: 0.882956	valid_0's binary_logloss: 0.120926	valid_1's auc: 0.83914	valid_1's binary_logloss: 0.139546
[44]	valid_0's auc: 0.883618	valid_0's binary_logloss: 0.120726	valid_1's auc: 0.838905	valid_1's binary_logloss: 0.139592
[45]	valid_0's auc: 0.88419	valid_0's binary_logloss: 0.120513	valid_1's auc: 0.838587	valid_1's binary_logloss: 0.139651
[46]	valid_0's auc: 0.884555	valid_0's binary_logloss: 0.120339	valid_1's auc: 0.838288	valid_1's binary_logloss: 0.139703
[47]	valid_0's auc: 0.884789	valid_0's binary_logloss: 0.120189	valid_1's auc: 0.838155	valid_1's binary_logloss: 0.139692
[48]	valid_0's auc: 0.884968	valid_0's binary_logloss: 0.120074	valid_1's auc: 0.8384	valid_1's binary_logloss: 0.139667
[49]	valid_0's auc: 0.885336	valid_0's binary_logloss: 0.119939	valid_1's auc: 0.83827	valid_1's binary_logloss: 0.13968
[50]	valid_0's auc: 0.885759	valid_0's binary_logloss: 0.119734	valid_1's auc: 0.838029	valid_1's binary_logloss: 0.139727
[51]	valid_0's auc: 0.886206	valid_0's binary_logloss: 0.119595	valid_1's auc: 0.838077	valid_1's binary_logloss: 0.13975
[52]	valid_0's auc: 0.886527	valid_0's binary_logloss: 0.119458	valid_1's auc: 0.838081	valid_1's binary_logloss: 0.139779
[53]	valid_0's auc: 0.886808	valid_0's binary_logloss: 0.119271	valid_1's auc: 0.838632	valid_1's binary_logloss: 0.139643
[54]	valid_0's auc: 0.887203	valid_0's binary_logloss: 0.119092	valid_1's auc: 0.838634	valid_1's binary_logloss: 0.139662
[55]	valid_0's auc: 0.88761	valid_0's binary_logloss: 0.118944	valid_1's auc: 0.838551	valid_1's binary_logloss: 0.139692
[56]	valid_0's auc: 0.887901	valid_0's binary_logloss: 0.118796	valid_1's auc: 0.838303	valid_1's binary_logloss: 0.139748
[57]	valid_0's auc: 0.888238	valid_0's binary_logloss: 0.118604	valid_1's auc: 0.838445	valid_1's binary_logloss: 0.139719
[58]	valid_0's auc: 0.888615	valid_0's binary_logloss: 0.118451	valid_1's auc: 0.838262	valid_1's binary_logloss: 0.139754
[59]	valid_0's auc: 0.889294	valid_0's binary_logloss: 0.11824	valid_1's auc: 0.837652	valid_1's binary_logloss: 0.139829
Early stopping, best iteration is:
[29]	valid_0's auc: 0.87422	valid_0's binary_logloss: 0.124271	valid_1's auc: 0.839436	valid_1's binary_logloss: 0.139581
[1]	valid_0's auc: 0.821645	valid_0's binary_logloss: 0.156526	valid_1's auc: 0.81857	valid_1's binary_logloss: 0.165099
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827652	valid_0's binary_logloss: 0.151185	valid_1's auc: 0.82254	valid_1's binary_logloss: 0.160085
[3]	valid_0's auc: 0.836059	valid_0's binary_logloss: 0.14726	valid_1's auc: 0.828119	valid_1's binary_logloss: 0.156604
[4]	valid_0's auc: 0.840512	valid_0's binary_logloss: 0.144255	valid_1's auc: 0.831906	valid_1's binary_logloss: 0.153569
[5]	valid_0's auc: 0.841872	valid_0's binary_logloss: 0.141762	valid_1's auc: 0.834269	valid_1's binary_logloss: 0.151256
[6]	valid_0's auc: 0.844933	valid_0's binary_logloss: 0.139748	valid_1's auc: 0.835284	valid_1's binary_logloss: 0.149358
[7]	valid_0's auc: 0.845797	valid_0's binary_logloss: 0.138081	valid_1's auc: 0.834956	valid_1's binary_logloss: 0.147852
[8]	valid_0's auc: 0.847134	valid_0's binary_logloss: 0.136692	valid_1's auc: 0.836859	valid_1's binary_logloss: 0.146584
[9]	valid_0's auc: 0.849562	valid_0's binary_logloss: 0.135436	valid_1's auc: 0.837503	valid_1's binary_logloss: 0.145574
[10]	valid_0's auc: 0.850799	valid_0's binary_logloss: 0.134374	valid_1's auc: 0.838107	valid_1's binary_logloss: 0.144671
[11]	valid_0's auc: 0.852171	valid_0's binary_logloss: 0.133441	valid_1's auc: 0.838177	valid_1's binary_logloss: 0.143939
[12]	valid_0's auc: 0.854501	valid_0's binary_logloss: 0.132622	valid_1's auc: 0.838294	valid_1's binary_logloss: 0.143407
[13]	valid_0's auc: 0.856936	valid_0's binary_logloss: 0.131808	valid_1's auc: 0.838199	valid_1's binary_logloss: 0.142866
[14]	valid_0's auc: 0.857673	valid_0's binary_logloss: 0.131166	valid_1's auc: 0.837548	valid_1's binary_logloss: 0.142532
[15]	valid_0's auc: 0.859044	valid_0's binary_logloss: 0.130533	valid_1's auc: 0.837939	valid_1's binary_logloss: 0.142166
[16]	valid_0's auc: 0.859941	valid_0's binary_logloss: 0.129973	valid_1's auc: 0.837854	valid_1's binary_logloss: 0.141803
[17]	valid_0's auc: 0.861036	valid_0's binary_logloss: 0.129377	valid_1's auc: 0.838222	valid_1's binary_logloss: 0.141476
[18]	valid_0's auc: 0.862799	valid_0's binary_logloss: 0.128809	valid_1's auc: 0.838732	valid_1's binary_logloss: 0.141125
[19]	valid_0's auc: 0.864128	valid_0's binary_logloss: 0.128328	valid_1's auc: 0.839441	valid_1's binary_logloss: 0.140763
[20]	valid_0's auc: 0.864975	valid_0's binary_logloss: 0.127913	valid_1's auc: 0.839957	valid_1's binary_logloss: 0.140513
[21]	valid_0's auc: 0.866258	valid_0's binary_logloss: 0.127436	valid_1's auc: 0.83993	valid_1's binary_logloss: 0.140328
[22]	valid_0's auc: 0.867054	valid_0's binary_logloss: 0.127069	valid_1's auc: 0.840099	valid_1's binary_logloss: 0.14013
[23]	valid_0's auc: 0.867852	valid_0's binary_logloss: 0.126713	valid_1's auc: 0.839768	valid_1's binary_logloss: 0.140027
[24]	valid_0's auc: 0.868599	valid_0's binary_logloss: 0.126372	valid_1's auc: 0.840299	valid_1's binary_logloss: 0.139904
[25]	valid_0's auc: 0.869485	valid_0's binary_logloss: 0.126047	valid_1's auc: 0.840676	valid_1's binary_logloss: 0.139735
[26]	valid_0's auc: 0.870541	valid_0's binary_logloss: 0.125627	valid_1's auc: 0.840242	valid_1's binary_logloss: 0.139708
[27]	valid_0's auc: 0.871191	valid_0's binary_logloss: 0.125319	valid_1's auc: 0.839924	valid_1's binary_logloss: 0.139651
[28]	valid_0's auc: 0.871788	valid_0's binary_logloss: 0.125064	valid_1's auc: 0.839647	valid_1's binary_logloss: 0.139574
[29]	valid_0's auc: 0.872714	valid_0's binary_logloss: 0.124726	valid_1's auc: 0.840154	valid_1's binary_logloss: 0.139481
[30]	valid_0's auc: 0.873746	valid_0's binary_logloss: 0.124416	valid_1's auc: 0.839602	valid_1's binary_logloss: 0.139497
[31]	valid_0's auc: 0.874715	valid_0's binary_logloss: 0.124154	valid_1's auc: 0.839072	valid_1's binary_logloss: 0.139568
[32]	valid_0's auc: 0.875774	valid_0's binary_logloss: 0.123879	valid_1's auc: 0.838748	valid_1's binary_logloss: 0.139579
[33]	valid_0's auc: 0.876333	valid_0's binary_logloss: 0.123614	valid_1's auc: 0.83895	valid_1's binary_logloss: 0.139492
[34]	valid_0's auc: 0.876841	valid_0's binary_logloss: 0.123372	valid_1's auc: 0.839078	valid_1's binary_logloss: 0.139459
[35]	valid_0's auc: 0.877422	valid_0's binary_logloss: 0.123118	valid_1's auc: 0.839413	valid_1's binary_logloss: 0.139362
[36]	valid_0's auc: 0.878163	valid_0's binary_logloss: 0.122873	valid_1's auc: 0.839157	valid_1's binary_logloss: 0.139395
[37]	valid_0's auc: 0.87856	valid_0's binary_logloss: 0.122649	valid_1's auc: 0.839051	valid_1's binary_logloss: 0.139443
[38]	valid_0's auc: 0.879102	valid_0's binary_logloss: 0.122415	valid_1's auc: 0.839482	valid_1's binary_logloss: 0.139378
[39]	valid_0's auc: 0.879978	valid_0's binary_logloss: 0.122126	valid_1's auc: 0.83945	valid_1's binary_logloss: 0.139391
[40]	valid_0's auc: 0.880399	valid_0's binary_logloss: 0.121938	valid_1's auc: 0.840127	valid_1's binary_logloss: 0.139314
[41]	valid_0's auc: 0.880914	valid_0's binary_logloss: 0.121757	valid_1's auc: 0.839733	valid_1's binary_logloss: 0.13937
[42]	valid_0's auc: 0.881674	valid_0's binary_logloss: 0.121547	valid_1's auc: 0.839744	valid_1's binary_logloss: 0.139371
[43]	valid_0's auc: 0.882352	valid_0's binary_logloss: 0.121291	valid_1's auc: 0.839733	valid_1's binary_logloss: 0.139358
[44]	valid_0's auc: 0.882869	valid_0's binary_logloss: 0.121117	valid_1's auc: 0.839827	valid_1's binary_logloss: 0.13937
[45]	valid_0's auc: 0.883308	valid_0's binary_logloss: 0.120912	valid_1's auc: 0.839923	valid_1's binary_logloss: 0.139325
[46]	valid_0's auc: 0.883814	valid_0's binary_logloss: 0.120682	valid_1's auc: 0.83985	valid_1's binary_logloss: 0.139336
[47]	valid_0's auc: 0.884201	valid_0's binary_logloss: 0.120532	valid_1's auc: 0.839839	valid_1's binary_logloss: 0.139368
[48]	valid_0's auc: 0.884428	valid_0's binary_logloss: 0.120354	valid_1's auc: 0.839815	valid_1's binary_logloss: 0.139368
[49]	valid_0's auc: 0.884565	valid_0's binary_logloss: 0.120223	valid_1's auc: 0.83995	valid_1's binary_logloss: 0.139355
[50]	valid_0's auc: 0.885102	valid_0's binary_logloss: 0.120013	valid_1's auc: 0.839807	valid_1's binary_logloss: 0.13936
[51]	valid_0's auc: 0.885668	valid_0's binary_logloss: 0.119856	valid_1's auc: 0.839722	valid_1's binary_logloss: 0.139361
[52]	valid_0's auc: 0.886053	valid_0's binary_logloss: 0.119673	valid_1's auc: 0.839593	valid_1's binary_logloss: 0.139409
[53]	valid_0's auc: 0.886187	valid_0's binary_logloss: 0.119567	valid_1's auc: 0.839723	valid_1's binary_logloss: 0.139436
[54]	valid_0's auc: 0.886397	valid_0's binary_logloss: 0.11943	valid_1's auc: 0.839999	valid_1's binary_logloss: 0.139424
[55]	valid_0's auc: 0.886702	valid_0's binary_logloss: 0.119279	valid_1's auc: 0.839951	valid_1's binary_logloss: 0.139433
Early stopping, best iteration is:
[25]	valid_0's auc: 0.869485	valid_0's binary_logloss: 0.126047	valid_1's auc: 0.840676	valid_1's binary_logloss: 0.139735
[1]	valid_0's auc: 0.820235	valid_0's binary_logloss: 0.156085	valid_1's auc: 0.81613	valid_1's binary_logloss: 0.164998
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.825775	valid_0's binary_logloss: 0.150951	valid_1's auc: 0.821831	valid_1's binary_logloss: 0.15988
[3]	valid_0's auc: 0.832192	valid_0's binary_logloss: 0.147167	valid_1's auc: 0.827302	valid_1's binary_logloss: 0.156397
[4]	valid_0's auc: 0.837518	valid_0's binary_logloss: 0.144131	valid_1's auc: 0.8334	valid_1's binary_logloss: 0.153325
[5]	valid_0's auc: 0.842289	valid_0's binary_logloss: 0.141651	valid_1's auc: 0.836018	valid_1's binary_logloss: 0.150959
[6]	valid_0's auc: 0.844974	valid_0's binary_logloss: 0.139661	valid_1's auc: 0.838022	valid_1's binary_logloss: 0.149046
[7]	valid_0's auc: 0.846623	valid_0's binary_logloss: 0.138001	valid_1's auc: 0.837777	valid_1's binary_logloss: 0.147509
[8]	valid_0's auc: 0.848529	valid_0's binary_logloss: 0.136578	valid_1's auc: 0.839519	valid_1's binary_logloss: 0.146015
[9]	valid_0's auc: 0.850276	valid_0's binary_logloss: 0.135274	valid_1's auc: 0.840042	valid_1's binary_logloss: 0.145043
[10]	valid_0's auc: 0.852371	valid_0's binary_logloss: 0.134185	valid_1's auc: 0.839808	valid_1's binary_logloss: 0.144182
[11]	valid_0's auc: 0.853705	valid_0's binary_logloss: 0.133238	valid_1's auc: 0.83943	valid_1's binary_logloss: 0.14345
[12]	valid_0's auc: 0.855304	valid_0's binary_logloss: 0.132409	valid_1's auc: 0.838786	valid_1's binary_logloss: 0.142878
[13]	valid_0's auc: 0.856638	valid_0's binary_logloss: 0.131658	valid_1's auc: 0.838632	valid_1's binary_logloss: 0.142368
[14]	valid_0's auc: 0.85784	valid_0's binary_logloss: 0.130967	valid_1's auc: 0.838182	valid_1's binary_logloss: 0.14198
[15]	valid_0's auc: 0.859432	valid_0's binary_logloss: 0.130373	valid_1's auc: 0.838236	valid_1's binary_logloss: 0.141582
[16]	valid_0's auc: 0.860428	valid_0's binary_logloss: 0.129814	valid_1's auc: 0.837425	valid_1's binary_logloss: 0.141389
[17]	valid_0's auc: 0.861409	valid_0's binary_logloss: 0.12923	valid_1's auc: 0.837358	valid_1's binary_logloss: 0.141106
[18]	valid_0's auc: 0.86332	valid_0's binary_logloss: 0.128681	valid_1's auc: 0.836771	valid_1's binary_logloss: 0.140932
[19]	valid_0's auc: 0.864365	valid_0's binary_logloss: 0.128233	valid_1's auc: 0.836564	valid_1's binary_logloss: 0.140796
[20]	valid_0's auc: 0.865268	valid_0's binary_logloss: 0.127815	valid_1's auc: 0.835613	valid_1's binary_logloss: 0.140715
[21]	valid_0's auc: 0.865869	valid_0's binary_logloss: 0.127427	valid_1's auc: 0.835085	valid_1's binary_logloss: 0.140641
[22]	valid_0's auc: 0.867223	valid_0's binary_logloss: 0.126993	valid_1's auc: 0.835383	valid_1's binary_logloss: 0.140519
[23]	valid_0's auc: 0.867898	valid_0's binary_logloss: 0.126644	valid_1's auc: 0.835018	valid_1's binary_logloss: 0.140432
[24]	valid_0's auc: 0.869077	valid_0's binary_logloss: 0.126246	valid_1's auc: 0.835199	valid_1's binary_logloss: 0.140328
[25]	valid_0's auc: 0.869684	valid_0's binary_logloss: 0.125917	valid_1's auc: 0.83457	valid_1's binary_logloss: 0.140301
[26]	valid_0's auc: 0.870271	valid_0's binary_logloss: 0.12561	valid_1's auc: 0.834087	valid_1's binary_logloss: 0.140349
[27]	valid_0's auc: 0.87126	valid_0's binary_logloss: 0.125302	valid_1's auc: 0.833822	valid_1's binary_logloss: 0.140277
[28]	valid_0's auc: 0.872741	valid_0's binary_logloss: 0.124882	valid_1's auc: 0.833886	valid_1's binary_logloss: 0.140255
[29]	valid_0's auc: 0.873424	valid_0's binary_logloss: 0.124594	valid_1's auc: 0.833937	valid_1's binary_logloss: 0.140189
[30]	valid_0's auc: 0.874669	valid_0's binary_logloss: 0.124295	valid_1's auc: 0.834461	valid_1's binary_logloss: 0.140113
[31]	valid_0's auc: 0.875234	valid_0's binary_logloss: 0.124066	valid_1's auc: 0.83444	valid_1's binary_logloss: 0.140064
[32]	valid_0's auc: 0.875809	valid_0's binary_logloss: 0.123813	valid_1's auc: 0.834196	valid_1's binary_logloss: 0.140095
[33]	valid_0's auc: 0.876619	valid_0's binary_logloss: 0.123531	valid_1's auc: 0.834143	valid_1's binary_logloss: 0.140029
[34]	valid_0's auc: 0.877233	valid_0's binary_logloss: 0.123254	valid_1's auc: 0.833865	valid_1's binary_logloss: 0.140055
[35]	valid_0's auc: 0.877763	valid_0's binary_logloss: 0.123009	valid_1's auc: 0.833699	valid_1's binary_logloss: 0.140082
[36]	valid_0's auc: 0.878322	valid_0's binary_logloss: 0.122755	valid_1's auc: 0.833221	valid_1's binary_logloss: 0.140158
[37]	valid_0's auc: 0.878948	valid_0's binary_logloss: 0.122495	valid_1's auc: 0.832792	valid_1's binary_logloss: 0.14018
[38]	valid_0's auc: 0.879452	valid_0's binary_logloss: 0.122301	valid_1's auc: 0.832977	valid_1's binary_logloss: 0.140154
[39]	valid_0's auc: 0.880156	valid_0's binary_logloss: 0.122063	valid_1's auc: 0.832913	valid_1's binary_logloss: 0.140188
Early stopping, best iteration is:
[9]	valid_0's auc: 0.850276	valid_0's binary_logloss: 0.135274	valid_1's auc: 0.840042	valid_1's binary_logloss: 0.145043
[1]	valid_0's auc: 0.814371	valid_0's binary_logloss: 0.156455	valid_1's auc: 0.813175	valid_1's binary_logloss: 0.16542
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827201	valid_0's binary_logloss: 0.151083	valid_1's auc: 0.820013	valid_1's binary_logloss: 0.160085
[3]	valid_0's auc: 0.837003	valid_0's binary_logloss: 0.147235	valid_1's auc: 0.828713	valid_1's binary_logloss: 0.156463
[4]	valid_0's auc: 0.840971	valid_0's binary_logloss: 0.144235	valid_1's auc: 0.831369	valid_1's binary_logloss: 0.153575
[5]	valid_0's auc: 0.842902	valid_0's binary_logloss: 0.141803	valid_1's auc: 0.833329	valid_1's binary_logloss: 0.151283
[6]	valid_0's auc: 0.847067	valid_0's binary_logloss: 0.139776	valid_1's auc: 0.836625	valid_1's binary_logloss: 0.14937
[7]	valid_0's auc: 0.848894	valid_0's binary_logloss: 0.138056	valid_1's auc: 0.837174	valid_1's binary_logloss: 0.147778
[8]	valid_0's auc: 0.850546	valid_0's binary_logloss: 0.136579	valid_1's auc: 0.837405	valid_1's binary_logloss: 0.146516
[9]	valid_0's auc: 0.852419	valid_0's binary_logloss: 0.135296	valid_1's auc: 0.837736	valid_1's binary_logloss: 0.145412
[10]	valid_0's auc: 0.85454	valid_0's binary_logloss: 0.134224	valid_1's auc: 0.838661	valid_1's binary_logloss: 0.144501
[11]	valid_0's auc: 0.856414	valid_0's binary_logloss: 0.133277	valid_1's auc: 0.838921	valid_1's binary_logloss: 0.143737
[12]	valid_0's auc: 0.857283	valid_0's binary_logloss: 0.132431	valid_1's auc: 0.837263	valid_1's binary_logloss: 0.143197
[13]	valid_0's auc: 0.858075	valid_0's binary_logloss: 0.131688	valid_1's auc: 0.837254	valid_1's binary_logloss: 0.142633
[14]	valid_0's auc: 0.858945	valid_0's binary_logloss: 0.130983	valid_1's auc: 0.837669	valid_1's binary_logloss: 0.142158
[15]	valid_0's auc: 0.859875	valid_0's binary_logloss: 0.130335	valid_1's auc: 0.837434	valid_1's binary_logloss: 0.141848
[16]	valid_0's auc: 0.860979	valid_0's binary_logloss: 0.129731	valid_1's auc: 0.837355	valid_1's binary_logloss: 0.141492
[17]	valid_0's auc: 0.861681	valid_0's binary_logloss: 0.129123	valid_1's auc: 0.837851	valid_1's binary_logloss: 0.14114
[18]	valid_0's auc: 0.863324	valid_0's binary_logloss: 0.128568	valid_1's auc: 0.838024	valid_1's binary_logloss: 0.140911
[19]	valid_0's auc: 0.864682	valid_0's binary_logloss: 0.128016	valid_1's auc: 0.838411	valid_1's binary_logloss: 0.140669
[20]	valid_0's auc: 0.865346	valid_0's binary_logloss: 0.127585	valid_1's auc: 0.838201	valid_1's binary_logloss: 0.140498
[21]	valid_0's auc: 0.8665	valid_0's binary_logloss: 0.127103	valid_1's auc: 0.83851	valid_1's binary_logloss: 0.140294
[22]	valid_0's auc: 0.867889	valid_0's binary_logloss: 0.126657	valid_1's auc: 0.839074	valid_1's binary_logloss: 0.140136
[23]	valid_0's auc: 0.868624	valid_0's binary_logloss: 0.126281	valid_1's auc: 0.838987	valid_1's binary_logloss: 0.140028
[24]	valid_0's auc: 0.869855	valid_0's binary_logloss: 0.125882	valid_1's auc: 0.838802	valid_1's binary_logloss: 0.139975
[25]	valid_0's auc: 0.870426	valid_0's binary_logloss: 0.125541	valid_1's auc: 0.83912	valid_1's binary_logloss: 0.139815
[26]	valid_0's auc: 0.871649	valid_0's binary_logloss: 0.125171	valid_1's auc: 0.838779	valid_1's binary_logloss: 0.139741
[27]	valid_0's auc: 0.872513	valid_0's binary_logloss: 0.124859	valid_1's auc: 0.839241	valid_1's binary_logloss: 0.139648
[28]	valid_0's auc: 0.873252	valid_0's binary_logloss: 0.124586	valid_1's auc: 0.839378	valid_1's binary_logloss: 0.139578
[29]	valid_0's auc: 0.87422	valid_0's binary_logloss: 0.124271	valid_1's auc: 0.839436	valid_1's binary_logloss: 0.139581
[30]	valid_0's auc: 0.874647	valid_0's binary_logloss: 0.124019	valid_1's auc: 0.83907	valid_1's binary_logloss: 0.13958
[31]	valid_0's auc: 0.875603	valid_0's binary_logloss: 0.123747	valid_1's auc: 0.839226	valid_1's binary_logloss: 0.139539
[32]	valid_0's auc: 0.87651	valid_0's binary_logloss: 0.123413	valid_1's auc: 0.838983	valid_1's binary_logloss: 0.139561
[33]	valid_0's auc: 0.877103	valid_0's binary_logloss: 0.123199	valid_1's auc: 0.839235	valid_1's binary_logloss: 0.139528
[34]	valid_0's auc: 0.878189	valid_0's binary_logloss: 0.122925	valid_1's auc: 0.839004	valid_1's binary_logloss: 0.139567
[35]	valid_0's auc: 0.878765	valid_0's binary_logloss: 0.122704	valid_1's auc: 0.838946	valid_1's binary_logloss: 0.139572
[36]	valid_0's auc: 0.879377	valid_0's binary_logloss: 0.122448	valid_1's auc: 0.83898	valid_1's binary_logloss: 0.139554
[37]	valid_0's auc: 0.880134	valid_0's binary_logloss: 0.122143	valid_1's auc: 0.838888	valid_1's binary_logloss: 0.139557
[38]	valid_0's auc: 0.880571	valid_0's binary_logloss: 0.121985	valid_1's auc: 0.839133	valid_1's binary_logloss: 0.13952
[39]	valid_0's auc: 0.88098	valid_0's binary_logloss: 0.12178	valid_1's auc: 0.838929	valid_1's binary_logloss: 0.139584
[40]	valid_0's auc: 0.881495	valid_0's binary_logloss: 0.121571	valid_1's auc: 0.838869	valid_1's binary_logloss: 0.139555
[41]	valid_0's auc: 0.881897	valid_0's binary_logloss: 0.121382	valid_1's auc: 0.838702	valid_1's binary_logloss: 0.139575
[42]	valid_0's auc: 0.882625	valid_0's binary_logloss: 0.121107	valid_1's auc: 0.83891	valid_1's binary_logloss: 0.139554
[43]	valid_0's auc: 0.882956	valid_0's binary_logloss: 0.120926	valid_1's auc: 0.83914	valid_1's binary_logloss: 0.139546
[44]	valid_0's auc: 0.883618	valid_0's binary_logloss: 0.120726	valid_1's auc: 0.838905	valid_1's binary_logloss: 0.139592
[45]	valid_0's auc: 0.88419	valid_0's binary_logloss: 0.120513	valid_1's auc: 0.838587	valid_1's binary_logloss: 0.139651
[46]	valid_0's auc: 0.884555	valid_0's binary_logloss: 0.120339	valid_1's auc: 0.838288	valid_1's binary_logloss: 0.139703
[47]	valid_0's auc: 0.884789	valid_0's binary_logloss: 0.120189	valid_1's auc: 0.838155	valid_1's binary_logloss: 0.139692
[48]	valid_0's auc: 0.884968	valid_0's binary_logloss: 0.120074	valid_1's auc: 0.8384	valid_1's binary_logloss: 0.139667
[49]	valid_0's auc: 0.885336	valid_0's binary_logloss: 0.119939	valid_1's auc: 0.83827	valid_1's binary_logloss: 0.13968
[50]	valid_0's auc: 0.885759	valid_0's binary_logloss: 0.119734	valid_1's auc: 0.838029	valid_1's binary_logloss: 0.139727
[51]	valid_0's auc: 0.886206	valid_0's binary_logloss: 0.119595	valid_1's auc: 0.838077	valid_1's binary_logloss: 0.13975
[52]	valid_0's auc: 0.886527	valid_0's binary_logloss: 0.119458	valid_1's auc: 0.838081	valid_1's binary_logloss: 0.139779
[53]	valid_0's auc: 0.886808	valid_0's binary_logloss: 0.119271	valid_1's auc: 0.838632	valid_1's binary_logloss: 0.139643
[54]	valid_0's auc: 0.887203	valid_0's binary_logloss: 0.119092	valid_1's auc: 0.838634	valid_1's binary_logloss: 0.139662
[55]	valid_0's auc: 0.88761	valid_0's binary_logloss: 0.118944	valid_1's auc: 0.838551	valid_1's binary_logloss: 0.139692
[56]	valid_0's auc: 0.887901	valid_0's binary_logloss: 0.118796	valid_1's auc: 0.838303	valid_1's binary_logloss: 0.139748
[57]	valid_0's auc: 0.888238	valid_0's binary_logloss: 0.118604	valid_1's auc: 0.838445	valid_1's binary_logloss: 0.139719
[58]	valid_0's auc: 0.888615	valid_0's binary_logloss: 0.118451	valid_1's auc: 0.838262	valid_1's binary_logloss: 0.139754
[59]	valid_0's auc: 0.889294	valid_0's binary_logloss: 0.11824	valid_1's auc: 0.837652	valid_1's binary_logloss: 0.139829
Early stopping, best iteration is:
[29]	valid_0's auc: 0.87422	valid_0's binary_logloss: 0.124271	valid_1's auc: 0.839436	valid_1's binary_logloss: 0.139581
[1]	valid_0's auc: 0.821645	valid_0's binary_logloss: 0.156526	valid_1's auc: 0.81857	valid_1's binary_logloss: 0.165099
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827652	valid_0's binary_logloss: 0.151185	valid_1's auc: 0.82254	valid_1's binary_logloss: 0.160085
[3]	valid_0's auc: 0.836059	valid_0's binary_logloss: 0.14726	valid_1's auc: 0.828119	valid_1's binary_logloss: 0.156604
[4]	valid_0's auc: 0.840512	valid_0's binary_logloss: 0.144255	valid_1's auc: 0.831906	valid_1's binary_logloss: 0.153569
[5]	valid_0's auc: 0.841872	valid_0's binary_logloss: 0.141762	valid_1's auc: 0.834269	valid_1's binary_logloss: 0.151256
[6]	valid_0's auc: 0.844933	valid_0's binary_logloss: 0.139748	valid_1's auc: 0.835284	valid_1's binary_logloss: 0.149358
[7]	valid_0's auc: 0.845797	valid_0's binary_logloss: 0.138081	valid_1's auc: 0.834956	valid_1's binary_logloss: 0.147852
[8]	valid_0's auc: 0.847134	valid_0's binary_logloss: 0.136692	valid_1's auc: 0.836859	valid_1's binary_logloss: 0.146584
[9]	valid_0's auc: 0.849562	valid_0's binary_logloss: 0.135436	valid_1's auc: 0.837503	valid_1's binary_logloss: 0.145574
[10]	valid_0's auc: 0.850799	valid_0's binary_logloss: 0.134374	valid_1's auc: 0.838107	valid_1's binary_logloss: 0.144671
[11]	valid_0's auc: 0.852171	valid_0's binary_logloss: 0.133441	valid_1's auc: 0.838177	valid_1's binary_logloss: 0.143939
[12]	valid_0's auc: 0.854501	valid_0's binary_logloss: 0.132622	valid_1's auc: 0.838294	valid_1's binary_logloss: 0.143407
[13]	valid_0's auc: 0.856936	valid_0's binary_logloss: 0.131808	valid_1's auc: 0.838199	valid_1's binary_logloss: 0.142866
[14]	valid_0's auc: 0.857673	valid_0's binary_logloss: 0.131166	valid_1's auc: 0.837548	valid_1's binary_logloss: 0.142532
[15]	valid_0's auc: 0.859044	valid_0's binary_logloss: 0.130533	valid_1's auc: 0.837939	valid_1's binary_logloss: 0.142166
[16]	valid_0's auc: 0.859941	valid_0's binary_logloss: 0.129973	valid_1's auc: 0.837854	valid_1's binary_logloss: 0.141803
[17]	valid_0's auc: 0.861036	valid_0's binary_logloss: 0.129377	valid_1's auc: 0.838222	valid_1's binary_logloss: 0.141476
[18]	valid_0's auc: 0.862799	valid_0's binary_logloss: 0.128809	valid_1's auc: 0.838732	valid_1's binary_logloss: 0.141125
[19]	valid_0's auc: 0.864128	valid_0's binary_logloss: 0.128328	valid_1's auc: 0.839441	valid_1's binary_logloss: 0.140763
[20]	valid_0's auc: 0.864975	valid_0's binary_logloss: 0.127913	valid_1's auc: 0.839957	valid_1's binary_logloss: 0.140513
[21]	valid_0's auc: 0.866258	valid_0's binary_logloss: 0.127436	valid_1's auc: 0.83993	valid_1's binary_logloss: 0.140328
[22]	valid_0's auc: 0.867054	valid_0's binary_logloss: 0.127069	valid_1's auc: 0.840099	valid_1's binary_logloss: 0.14013
[23]	valid_0's auc: 0.867852	valid_0's binary_logloss: 0.126713	valid_1's auc: 0.839768	valid_1's binary_logloss: 0.140027
[24]	valid_0's auc: 0.868599	valid_0's binary_logloss: 0.126372	valid_1's auc: 0.840299	valid_1's binary_logloss: 0.139904
[25]	valid_0's auc: 0.869485	valid_0's binary_logloss: 0.126047	valid_1's auc: 0.840676	valid_1's binary_logloss: 0.139735
[26]	valid_0's auc: 0.870541	valid_0's binary_logloss: 0.125627	valid_1's auc: 0.840242	valid_1's binary_logloss: 0.139708
[27]	valid_0's auc: 0.871191	valid_0's binary_logloss: 0.125319	valid_1's auc: 0.839924	valid_1's binary_logloss: 0.139651
[28]	valid_0's auc: 0.871788	valid_0's binary_logloss: 0.125064	valid_1's auc: 0.839647	valid_1's binary_logloss: 0.139574
[29]	valid_0's auc: 0.872714	valid_0's binary_logloss: 0.124726	valid_1's auc: 0.840154	valid_1's binary_logloss: 0.139481
[30]	valid_0's auc: 0.873746	valid_0's binary_logloss: 0.124416	valid_1's auc: 0.839602	valid_1's binary_logloss: 0.139497
[31]	valid_0's auc: 0.874715	valid_0's binary_logloss: 0.124154	valid_1's auc: 0.839072	valid_1's binary_logloss: 0.139568
[32]	valid_0's auc: 0.875774	valid_0's binary_logloss: 0.123879	valid_1's auc: 0.838748	valid_1's binary_logloss: 0.139579
[33]	valid_0's auc: 0.876333	valid_0's binary_logloss: 0.123614	valid_1's auc: 0.83895	valid_1's binary_logloss: 0.139492
[34]	valid_0's auc: 0.876841	valid_0's binary_logloss: 0.123372	valid_1's auc: 0.839078	valid_1's binary_logloss: 0.139459
[35]	valid_0's auc: 0.877422	valid_0's binary_logloss: 0.123118	valid_1's auc: 0.839413	valid_1's binary_logloss: 0.139362
[36]	valid_0's auc: 0.878163	valid_0's binary_logloss: 0.122873	valid_1's auc: 0.839157	valid_1's binary_logloss: 0.139395
[37]	valid_0's auc: 0.87856	valid_0's binary_logloss: 0.122649	valid_1's auc: 0.839051	valid_1's binary_logloss: 0.139443
[38]	valid_0's auc: 0.879102	valid_0's binary_logloss: 0.122415	valid_1's auc: 0.839482	valid_1's binary_logloss: 0.139378
[39]	valid_0's auc: 0.879978	valid_0's binary_logloss: 0.122126	valid_1's auc: 0.83945	valid_1's binary_logloss: 0.139391
[40]	valid_0's auc: 0.880399	valid_0's binary_logloss: 0.121938	valid_1's auc: 0.840127	valid_1's binary_logloss: 0.139314
[41]	valid_0's auc: 0.880914	valid_0's binary_logloss: 0.121757	valid_1's auc: 0.839733	valid_1's binary_logloss: 0.13937
[42]	valid_0's auc: 0.881674	valid_0's binary_logloss: 0.121547	valid_1's auc: 0.839744	valid_1's binary_logloss: 0.139371
[43]	valid_0's auc: 0.882352	valid_0's binary_logloss: 0.121291	valid_1's auc: 0.839733	valid_1's binary_logloss: 0.139358
[44]	valid_0's auc: 0.882869	valid_0's binary_logloss: 0.121117	valid_1's auc: 0.839827	valid_1's binary_logloss: 0.13937
[45]	valid_0's auc: 0.883308	valid_0's binary_logloss: 0.120912	valid_1's auc: 0.839923	valid_1's binary_logloss: 0.139325
[46]	valid_0's auc: 0.883814	valid_0's binary_logloss: 0.120682	valid_1's auc: 0.83985	valid_1's binary_logloss: 0.139336
[47]	valid_0's auc: 0.884201	valid_0's binary_logloss: 0.120532	valid_1's auc: 0.839839	valid_1's binary_logloss: 0.139368
[48]	valid_0's auc: 0.884428	valid_0's binary_logloss: 0.120354	valid_1's auc: 0.839815	valid_1's binary_logloss: 0.139368
[49]	valid_0's auc: 0.884565	valid_0's binary_logloss: 0.120223	valid_1's auc: 0.83995	valid_1's binary_logloss: 0.139355
[50]	valid_0's auc: 0.885102	valid_0's binary_logloss: 0.120013	valid_1's auc: 0.839807	valid_1's binary_logloss: 0.13936
[51]	valid_0's auc: 0.885668	valid_0's binary_logloss: 0.119856	valid_1's auc: 0.839722	valid_1's binary_logloss: 0.139361
[52]	valid_0's auc: 0.886053	valid_0's binary_logloss: 0.119673	valid_1's auc: 0.839593	valid_1's binary_logloss: 0.139409
[53]	valid_0's auc: 0.886187	valid_0's binary_logloss: 0.119567	valid_1's auc: 0.839723	valid_1's binary_logloss: 0.139436
[54]	valid_0's auc: 0.886397	valid_0's binary_logloss: 0.11943	valid_1's auc: 0.839999	valid_1's binary_logloss: 0.139424
[55]	valid_0's auc: 0.886702	valid_0's binary_logloss: 0.119279	valid_1's auc: 0.839951	valid_1's binary_logloss: 0.139433
Early stopping, best iteration is:
[25]	valid_0's auc: 0.869485	valid_0's binary_logloss: 0.126047	valid_1's auc: 0.840676	valid_1's binary_logloss: 0.139735
[1]	valid_0's auc: 0.832891	valid_0's binary_logloss: 0.155301	valid_1's auc: 0.818851	valid_1's binary_logloss: 0.164831
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.845606	valid_0's binary_logloss: 0.149818	valid_1's auc: 0.826972	valid_1's binary_logloss: 0.159925
[3]	valid_0's auc: 0.850188	valid_0's binary_logloss: 0.145683	valid_1's auc: 0.828704	valid_1's binary_logloss: 0.156313
[4]	valid_0's auc: 0.85231	valid_0's binary_logloss: 0.142507	valid_1's auc: 0.829069	valid_1's binary_logloss: 0.153533
[5]	valid_0's auc: 0.854335	valid_0's binary_logloss: 0.139906	valid_1's auc: 0.832066	valid_1's binary_logloss: 0.151309
[6]	valid_0's auc: 0.857012	valid_0's binary_logloss: 0.137695	valid_1's auc: 0.834623	valid_1's binary_logloss: 0.14943
[7]	valid_0's auc: 0.858925	valid_0's binary_logloss: 0.135794	valid_1's auc: 0.834545	valid_1's binary_logloss: 0.147904
[8]	valid_0's auc: 0.861608	valid_0's binary_logloss: 0.134145	valid_1's auc: 0.833797	valid_1's binary_logloss: 0.146702
[9]	valid_0's auc: 0.863357	valid_0's binary_logloss: 0.132708	valid_1's auc: 0.834249	valid_1's binary_logloss: 0.145747
[10]	valid_0's auc: 0.866071	valid_0's binary_logloss: 0.131394	valid_1's auc: 0.834473	valid_1's binary_logloss: 0.144785
[11]	valid_0's auc: 0.867641	valid_0's binary_logloss: 0.130276	valid_1's auc: 0.834299	valid_1's binary_logloss: 0.144038
[12]	valid_0's auc: 0.869161	valid_0's binary_logloss: 0.129189	valid_1's auc: 0.834149	valid_1's binary_logloss: 0.143493
[13]	valid_0's auc: 0.870667	valid_0's binary_logloss: 0.128249	valid_1's auc: 0.833217	valid_1's binary_logloss: 0.14314
[14]	valid_0's auc: 0.872337	valid_0's binary_logloss: 0.127379	valid_1's auc: 0.833305	valid_1's binary_logloss: 0.14275
[15]	valid_0's auc: 0.873712	valid_0's binary_logloss: 0.126538	valid_1's auc: 0.832635	valid_1's binary_logloss: 0.14257
[16]	valid_0's auc: 0.874704	valid_0's binary_logloss: 0.12582	valid_1's auc: 0.832562	valid_1's binary_logloss: 0.142228
[17]	valid_0's auc: 0.875722	valid_0's binary_logloss: 0.125162	valid_1's auc: 0.832021	valid_1's binary_logloss: 0.142052
[18]	valid_0's auc: 0.877178	valid_0's binary_logloss: 0.124543	valid_1's auc: 0.831549	valid_1's binary_logloss: 0.141895
[19]	valid_0's auc: 0.877995	valid_0's binary_logloss: 0.123978	valid_1's auc: 0.831242	valid_1's binary_logloss: 0.141804
[20]	valid_0's auc: 0.87956	valid_0's binary_logloss: 0.123373	valid_1's auc: 0.83106	valid_1's binary_logloss: 0.141701
[21]	valid_0's auc: 0.880405	valid_0's binary_logloss: 0.122832	valid_1's auc: 0.830305	valid_1's binary_logloss: 0.14165
[22]	valid_0's auc: 0.881423	valid_0's binary_logloss: 0.122355	valid_1's auc: 0.830239	valid_1's binary_logloss: 0.141656
[23]	valid_0's auc: 0.882393	valid_0's binary_logloss: 0.121843	valid_1's auc: 0.830552	valid_1's binary_logloss: 0.141537
[24]	valid_0's auc: 0.884118	valid_0's binary_logloss: 0.121284	valid_1's auc: 0.830946	valid_1's binary_logloss: 0.141426
[25]	valid_0's auc: 0.885236	valid_0's binary_logloss: 0.120821	valid_1's auc: 0.829956	valid_1's binary_logloss: 0.141489
[26]	valid_0's auc: 0.886368	valid_0's binary_logloss: 0.120378	valid_1's auc: 0.829528	valid_1's binary_logloss: 0.141559
[27]	valid_0's auc: 0.88693	valid_0's binary_logloss: 0.119952	valid_1's auc: 0.829491	valid_1's binary_logloss: 0.141578
[28]	valid_0's auc: 0.887827	valid_0's binary_logloss: 0.1195	valid_1's auc: 0.829533	valid_1's binary_logloss: 0.141548
[29]	valid_0's auc: 0.888834	valid_0's binary_logloss: 0.119055	valid_1's auc: 0.829809	valid_1's binary_logloss: 0.141549
[30]	valid_0's auc: 0.889728	valid_0's binary_logloss: 0.118695	valid_1's auc: 0.829539	valid_1's binary_logloss: 0.14162
[31]	valid_0's auc: 0.890934	valid_0's binary_logloss: 0.118306	valid_1's auc: 0.829985	valid_1's binary_logloss: 0.141525
[32]	valid_0's auc: 0.891349	valid_0's binary_logloss: 0.11798	valid_1's auc: 0.829723	valid_1's binary_logloss: 0.141529
[33]	valid_0's auc: 0.891896	valid_0's binary_logloss: 0.117633	valid_1's auc: 0.829851	valid_1's binary_logloss: 0.141488
[34]	valid_0's auc: 0.892553	valid_0's binary_logloss: 0.117331	valid_1's auc: 0.82916	valid_1's binary_logloss: 0.141607
[35]	valid_0's auc: 0.893197	valid_0's binary_logloss: 0.116998	valid_1's auc: 0.828503	valid_1's binary_logloss: 0.141781
[36]	valid_0's auc: 0.894242	valid_0's binary_logloss: 0.116658	valid_1's auc: 0.828875	valid_1's binary_logloss: 0.141779
Early stopping, best iteration is:
[6]	valid_0's auc: 0.857012	valid_0's binary_logloss: 0.137695	valid_1's auc: 0.834623	valid_1's binary_logloss: 0.14943
[1]	valid_0's auc: 0.833054	valid_0's binary_logloss: 0.155723	valid_1's auc: 0.817048	valid_1's binary_logloss: 0.165042
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.840929	valid_0's binary_logloss: 0.149897	valid_1's auc: 0.820838	valid_1's binary_logloss: 0.159671
[3]	valid_0's auc: 0.85054	valid_0's binary_logloss: 0.14575	valid_1's auc: 0.827828	valid_1's binary_logloss: 0.156036
[4]	valid_0's auc: 0.855605	valid_0's binary_logloss: 0.14246	valid_1's auc: 0.831388	valid_1's binary_logloss: 0.153065
[5]	valid_0's auc: 0.858903	valid_0's binary_logloss: 0.139711	valid_1's auc: 0.834493	valid_1's binary_logloss: 0.150758
[6]	valid_0's auc: 0.860869	valid_0's binary_logloss: 0.137459	valid_1's auc: 0.833775	valid_1's binary_logloss: 0.149055
[7]	valid_0's auc: 0.862879	valid_0's binary_logloss: 0.135513	valid_1's auc: 0.836672	valid_1's binary_logloss: 0.147391
[8]	valid_0's auc: 0.863872	valid_0's binary_logloss: 0.133916	valid_1's auc: 0.835878	valid_1's binary_logloss: 0.146178
[9]	valid_0's auc: 0.865562	valid_0's binary_logloss: 0.132428	valid_1's auc: 0.836038	valid_1's binary_logloss: 0.145107
[10]	valid_0's auc: 0.867401	valid_0's binary_logloss: 0.131116	valid_1's auc: 0.836429	valid_1's binary_logloss: 0.144153
[11]	valid_0's auc: 0.868477	valid_0's binary_logloss: 0.129981	valid_1's auc: 0.836388	valid_1's binary_logloss: 0.143351
[12]	valid_0's auc: 0.87007	valid_0's binary_logloss: 0.128907	valid_1's auc: 0.836122	valid_1's binary_logloss: 0.142803
[13]	valid_0's auc: 0.8711	valid_0's binary_logloss: 0.127994	valid_1's auc: 0.836547	valid_1's binary_logloss: 0.142243
[14]	valid_0's auc: 0.872903	valid_0's binary_logloss: 0.127122	valid_1's auc: 0.837837	valid_1's binary_logloss: 0.141718
[15]	valid_0's auc: 0.874543	valid_0's binary_logloss: 0.126262	valid_1's auc: 0.837921	valid_1's binary_logloss: 0.141291
[16]	valid_0's auc: 0.876148	valid_0's binary_logloss: 0.1255	valid_1's auc: 0.83698	valid_1's binary_logloss: 0.14114
[17]	valid_0's auc: 0.878214	valid_0's binary_logloss: 0.124724	valid_1's auc: 0.836901	valid_1's binary_logloss: 0.140905
[18]	valid_0's auc: 0.879554	valid_0's binary_logloss: 0.124053	valid_1's auc: 0.836497	valid_1's binary_logloss: 0.140787
[19]	valid_0's auc: 0.880715	valid_0's binary_logloss: 0.123405	valid_1's auc: 0.837087	valid_1's binary_logloss: 0.140481
[20]	valid_0's auc: 0.881492	valid_0's binary_logloss: 0.122867	valid_1's auc: 0.836798	valid_1's binary_logloss: 0.140353
[21]	valid_0's auc: 0.882521	valid_0's binary_logloss: 0.122301	valid_1's auc: 0.836676	valid_1's binary_logloss: 0.140236
[22]	valid_0's auc: 0.883688	valid_0's binary_logloss: 0.121776	valid_1's auc: 0.836698	valid_1's binary_logloss: 0.140127
[23]	valid_0's auc: 0.88451	valid_0's binary_logloss: 0.121226	valid_1's auc: 0.8369	valid_1's binary_logloss: 0.140044
[24]	valid_0's auc: 0.88609	valid_0's binary_logloss: 0.120643	valid_1's auc: 0.836883	valid_1's binary_logloss: 0.139917
[25]	valid_0's auc: 0.887311	valid_0's binary_logloss: 0.120104	valid_1's auc: 0.837018	valid_1's binary_logloss: 0.139879
[26]	valid_0's auc: 0.888754	valid_0's binary_logloss: 0.11961	valid_1's auc: 0.836808	valid_1's binary_logloss: 0.139895
[27]	valid_0's auc: 0.889697	valid_0's binary_logloss: 0.119114	valid_1's auc: 0.836828	valid_1's binary_logloss: 0.139904
[28]	valid_0's auc: 0.891102	valid_0's binary_logloss: 0.118637	valid_1's auc: 0.83685	valid_1's binary_logloss: 0.139833
[29]	valid_0's auc: 0.891823	valid_0's binary_logloss: 0.118217	valid_1's auc: 0.836568	valid_1's binary_logloss: 0.139836
[30]	valid_0's auc: 0.893059	valid_0's binary_logloss: 0.117781	valid_1's auc: 0.836774	valid_1's binary_logloss: 0.139827
[31]	valid_0's auc: 0.893688	valid_0's binary_logloss: 0.117412	valid_1's auc: 0.83732	valid_1's binary_logloss: 0.139721
[32]	valid_0's auc: 0.894643	valid_0's binary_logloss: 0.117013	valid_1's auc: 0.8373	valid_1's binary_logloss: 0.1397
[33]	valid_0's auc: 0.89555	valid_0's binary_logloss: 0.11666	valid_1's auc: 0.837425	valid_1's binary_logloss: 0.139692
[34]	valid_0's auc: 0.896334	valid_0's binary_logloss: 0.116252	valid_1's auc: 0.836971	valid_1's binary_logloss: 0.139746
[35]	valid_0's auc: 0.897058	valid_0's binary_logloss: 0.115923	valid_1's auc: 0.837212	valid_1's binary_logloss: 0.139756
[36]	valid_0's auc: 0.897581	valid_0's binary_logloss: 0.115642	valid_1's auc: 0.837227	valid_1's binary_logloss: 0.139719
[37]	valid_0's auc: 0.898111	valid_0's binary_logloss: 0.115297	valid_1's auc: 0.83733	valid_1's binary_logloss: 0.139758
[38]	valid_0's auc: 0.898762	valid_0's binary_logloss: 0.11499	valid_1's auc: 0.837353	valid_1's binary_logloss: 0.13975
[39]	valid_0's auc: 0.899461	valid_0's binary_logloss: 0.114643	valid_1's auc: 0.837189	valid_1's binary_logloss: 0.139803
[40]	valid_0's auc: 0.900215	valid_0's binary_logloss: 0.114372	valid_1's auc: 0.837001	valid_1's binary_logloss: 0.139865
[41]	valid_0's auc: 0.900847	valid_0's binary_logloss: 0.114036	valid_1's auc: 0.837156	valid_1's binary_logloss: 0.139874
[42]	valid_0's auc: 0.901233	valid_0's binary_logloss: 0.113757	valid_1's auc: 0.837341	valid_1's binary_logloss: 0.139879
[43]	valid_0's auc: 0.901621	valid_0's binary_logloss: 0.113498	valid_1's auc: 0.837116	valid_1's binary_logloss: 0.139982
[44]	valid_0's auc: 0.902271	valid_0's binary_logloss: 0.113194	valid_1's auc: 0.836966	valid_1's binary_logloss: 0.140012
[45]	valid_0's auc: 0.902522	valid_0's binary_logloss: 0.112949	valid_1's auc: 0.836915	valid_1's binary_logloss: 0.140035
Early stopping, best iteration is:
[15]	valid_0's auc: 0.874543	valid_0's binary_logloss: 0.126262	valid_1's auc: 0.837921	valid_1's binary_logloss: 0.141291
[1]	valid_0's auc: 0.830649	valid_0's binary_logloss: 0.155755	valid_1's auc: 0.81673	valid_1's binary_logloss: 0.164976
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.839656	valid_0's binary_logloss: 0.150015	valid_1's auc: 0.822663	valid_1's binary_logloss: 0.159866
[3]	valid_0's auc: 0.847827	valid_0's binary_logloss: 0.145888	valid_1's auc: 0.829595	valid_1's binary_logloss: 0.156171
[4]	valid_0's auc: 0.851153	valid_0's binary_logloss: 0.142542	valid_1's auc: 0.831052	valid_1's binary_logloss: 0.153261
[5]	valid_0's auc: 0.854418	valid_0's binary_logloss: 0.139824	valid_1's auc: 0.832726	valid_1's binary_logloss: 0.150974
[6]	valid_0's auc: 0.85615	valid_0's binary_logloss: 0.137634	valid_1's auc: 0.835578	valid_1's binary_logloss: 0.148988
[7]	valid_0's auc: 0.857116	valid_0's binary_logloss: 0.135891	valid_1's auc: 0.834971	valid_1's binary_logloss: 0.147626
[8]	valid_0's auc: 0.859522	valid_0's binary_logloss: 0.134235	valid_1's auc: 0.836528	valid_1's binary_logloss: 0.146258
[9]	valid_0's auc: 0.861901	valid_0's binary_logloss: 0.132802	valid_1's auc: 0.8366	valid_1's binary_logloss: 0.145288
[10]	valid_0's auc: 0.863552	valid_0's binary_logloss: 0.131531	valid_1's auc: 0.835683	valid_1's binary_logloss: 0.144554
[11]	valid_0's auc: 0.865032	valid_0's binary_logloss: 0.13038	valid_1's auc: 0.835477	valid_1's binary_logloss: 0.143965
[12]	valid_0's auc: 0.867552	valid_0's binary_logloss: 0.12931	valid_1's auc: 0.837042	valid_1's binary_logloss: 0.14326
[13]	valid_0's auc: 0.870662	valid_0's binary_logloss: 0.128333	valid_1's auc: 0.837981	valid_1's binary_logloss: 0.142696
[14]	valid_0's auc: 0.872498	valid_0's binary_logloss: 0.127408	valid_1's auc: 0.837713	valid_1's binary_logloss: 0.142262
[15]	valid_0's auc: 0.873589	valid_0's binary_logloss: 0.126603	valid_1's auc: 0.837256	valid_1's binary_logloss: 0.141869
[16]	valid_0's auc: 0.875441	valid_0's binary_logloss: 0.125783	valid_1's auc: 0.837912	valid_1's binary_logloss: 0.141528
[17]	valid_0's auc: 0.877154	valid_0's binary_logloss: 0.125036	valid_1's auc: 0.836689	valid_1's binary_logloss: 0.141384
[18]	valid_0's auc: 0.878205	valid_0's binary_logloss: 0.124367	valid_1's auc: 0.835872	valid_1's binary_logloss: 0.141256
[19]	valid_0's auc: 0.879502	valid_0's binary_logloss: 0.12371	valid_1's auc: 0.835242	valid_1's binary_logloss: 0.141186
[20]	valid_0's auc: 0.880623	valid_0's binary_logloss: 0.123116	valid_1's auc: 0.835731	valid_1's binary_logloss: 0.140946
[21]	valid_0's auc: 0.881898	valid_0's binary_logloss: 0.122562	valid_1's auc: 0.834984	valid_1's binary_logloss: 0.140914
[22]	valid_0's auc: 0.882919	valid_0's binary_logloss: 0.122011	valid_1's auc: 0.83655	valid_1's binary_logloss: 0.140596
[23]	valid_0's auc: 0.88356	valid_0's binary_logloss: 0.121524	valid_1's auc: 0.836903	valid_1's binary_logloss: 0.140423
[24]	valid_0's auc: 0.884733	valid_0's binary_logloss: 0.120948	valid_1's auc: 0.837346	valid_1's binary_logloss: 0.140282
[25]	valid_0's auc: 0.885783	valid_0's binary_logloss: 0.120481	valid_1's auc: 0.837461	valid_1's binary_logloss: 0.140158
[26]	valid_0's auc: 0.887006	valid_0's binary_logloss: 0.119965	valid_1's auc: 0.837303	valid_1's binary_logloss: 0.140169
[27]	valid_0's auc: 0.887947	valid_0's binary_logloss: 0.119452	valid_1's auc: 0.837557	valid_1's binary_logloss: 0.140113
[28]	valid_0's auc: 0.888786	valid_0's binary_logloss: 0.11903	valid_1's auc: 0.837108	valid_1's binary_logloss: 0.140154
[29]	valid_0's auc: 0.890614	valid_0's binary_logloss: 0.118542	valid_1's auc: 0.837503	valid_1's binary_logloss: 0.14008
[30]	valid_0's auc: 0.891023	valid_0's binary_logloss: 0.118199	valid_1's auc: 0.83748	valid_1's binary_logloss: 0.140051
[31]	valid_0's auc: 0.891989	valid_0's binary_logloss: 0.117866	valid_1's auc: 0.837533	valid_1's binary_logloss: 0.140006
[32]	valid_0's auc: 0.892909	valid_0's binary_logloss: 0.117477	valid_1's auc: 0.83708	valid_1's binary_logloss: 0.140054
[33]	valid_0's auc: 0.893597	valid_0's binary_logloss: 0.117091	valid_1's auc: 0.836874	valid_1's binary_logloss: 0.140061
[34]	valid_0's auc: 0.894331	valid_0's binary_logloss: 0.116711	valid_1's auc: 0.836404	valid_1's binary_logloss: 0.140111
[35]	valid_0's auc: 0.895331	valid_0's binary_logloss: 0.116306	valid_1's auc: 0.837102	valid_1's binary_logloss: 0.139942
[36]	valid_0's auc: 0.895954	valid_0's binary_logloss: 0.115973	valid_1's auc: 0.837138	valid_1's binary_logloss: 0.139958
[37]	valid_0's auc: 0.896701	valid_0's binary_logloss: 0.115673	valid_1's auc: 0.837045	valid_1's binary_logloss: 0.139954
[38]	valid_0's auc: 0.897344	valid_0's binary_logloss: 0.115335	valid_1's auc: 0.836585	valid_1's binary_logloss: 0.140026
[39]	valid_0's auc: 0.897605	valid_0's binary_logloss: 0.115043	valid_1's auc: 0.836359	valid_1's binary_logloss: 0.140104
[40]	valid_0's auc: 0.898073	valid_0's binary_logloss: 0.114754	valid_1's auc: 0.836432	valid_1's binary_logloss: 0.140096
[41]	valid_0's auc: 0.898701	valid_0's binary_logloss: 0.114389	valid_1's auc: 0.836355	valid_1's binary_logloss: 0.140115
[42]	valid_0's auc: 0.898936	valid_0's binary_logloss: 0.114144	valid_1's auc: 0.836485	valid_1's binary_logloss: 0.14011
[43]	valid_0's auc: 0.899275	valid_0's binary_logloss: 0.11385	valid_1's auc: 0.836213	valid_1's binary_logloss: 0.140214
Early stopping, best iteration is:
[13]	valid_0's auc: 0.870662	valid_0's binary_logloss: 0.128333	valid_1's auc: 0.837981	valid_1's binary_logloss: 0.142696
[1]	valid_0's auc: 0.832891	valid_0's binary_logloss: 0.155301	valid_1's auc: 0.818851	valid_1's binary_logloss: 0.164831
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.845606	valid_0's binary_logloss: 0.149818	valid_1's auc: 0.826972	valid_1's binary_logloss: 0.159925
[3]	valid_0's auc: 0.850188	valid_0's binary_logloss: 0.145683	valid_1's auc: 0.828704	valid_1's binary_logloss: 0.156313
[4]	valid_0's auc: 0.85231	valid_0's binary_logloss: 0.142507	valid_1's auc: 0.829069	valid_1's binary_logloss: 0.153533
[5]	valid_0's auc: 0.854335	valid_0's binary_logloss: 0.139906	valid_1's auc: 0.832066	valid_1's binary_logloss: 0.151309
[6]	valid_0's auc: 0.857012	valid_0's binary_logloss: 0.137695	valid_1's auc: 0.834623	valid_1's binary_logloss: 0.14943
[7]	valid_0's auc: 0.858925	valid_0's binary_logloss: 0.135794	valid_1's auc: 0.834545	valid_1's binary_logloss: 0.147904
[8]	valid_0's auc: 0.861608	valid_0's binary_logloss: 0.134145	valid_1's auc: 0.833797	valid_1's binary_logloss: 0.146702
[9]	valid_0's auc: 0.863357	valid_0's binary_logloss: 0.132708	valid_1's auc: 0.834249	valid_1's binary_logloss: 0.145747
[10]	valid_0's auc: 0.866071	valid_0's binary_logloss: 0.131394	valid_1's auc: 0.834473	valid_1's binary_logloss: 0.144785
[11]	valid_0's auc: 0.867641	valid_0's binary_logloss: 0.130276	valid_1's auc: 0.834299	valid_1's binary_logloss: 0.144038
[12]	valid_0's auc: 0.869161	valid_0's binary_logloss: 0.129189	valid_1's auc: 0.834149	valid_1's binary_logloss: 0.143493
[13]	valid_0's auc: 0.870667	valid_0's binary_logloss: 0.128249	valid_1's auc: 0.833217	valid_1's binary_logloss: 0.14314
[14]	valid_0's auc: 0.872337	valid_0's binary_logloss: 0.127379	valid_1's auc: 0.833305	valid_1's binary_logloss: 0.14275
[15]	valid_0's auc: 0.873712	valid_0's binary_logloss: 0.126538	valid_1's auc: 0.832635	valid_1's binary_logloss: 0.14257
[16]	valid_0's auc: 0.874704	valid_0's binary_logloss: 0.12582	valid_1's auc: 0.832562	valid_1's binary_logloss: 0.142228
[17]	valid_0's auc: 0.875722	valid_0's binary_logloss: 0.125162	valid_1's auc: 0.832021	valid_1's binary_logloss: 0.142052
[18]	valid_0's auc: 0.877178	valid_0's binary_logloss: 0.124543	valid_1's auc: 0.831549	valid_1's binary_logloss: 0.141895
[19]	valid_0's auc: 0.877995	valid_0's binary_logloss: 0.123978	valid_1's auc: 0.831242	valid_1's binary_logloss: 0.141804
[20]	valid_0's auc: 0.87956	valid_0's binary_logloss: 0.123373	valid_1's auc: 0.83106	valid_1's binary_logloss: 0.141701
[21]	valid_0's auc: 0.880405	valid_0's binary_logloss: 0.122832	valid_1's auc: 0.830305	valid_1's binary_logloss: 0.14165
[22]	valid_0's auc: 0.881423	valid_0's binary_logloss: 0.122355	valid_1's auc: 0.830239	valid_1's binary_logloss: 0.141656
[23]	valid_0's auc: 0.882393	valid_0's binary_logloss: 0.121843	valid_1's auc: 0.830552	valid_1's binary_logloss: 0.141537
[24]	valid_0's auc: 0.884118	valid_0's binary_logloss: 0.121284	valid_1's auc: 0.830946	valid_1's binary_logloss: 0.141426
[25]	valid_0's auc: 0.885236	valid_0's binary_logloss: 0.120821	valid_1's auc: 0.829956	valid_1's binary_logloss: 0.141489
[26]	valid_0's auc: 0.886368	valid_0's binary_logloss: 0.120378	valid_1's auc: 0.829528	valid_1's binary_logloss: 0.141559
[27]	valid_0's auc: 0.88693	valid_0's binary_logloss: 0.119952	valid_1's auc: 0.829491	valid_1's binary_logloss: 0.141578
[28]	valid_0's auc: 0.887827	valid_0's binary_logloss: 0.1195	valid_1's auc: 0.829533	valid_1's binary_logloss: 0.141548
[29]	valid_0's auc: 0.888834	valid_0's binary_logloss: 0.119055	valid_1's auc: 0.829809	valid_1's binary_logloss: 0.141549
[30]	valid_0's auc: 0.889728	valid_0's binary_logloss: 0.118695	valid_1's auc: 0.829539	valid_1's binary_logloss: 0.14162
[31]	valid_0's auc: 0.890934	valid_0's binary_logloss: 0.118306	valid_1's auc: 0.829985	valid_1's binary_logloss: 0.141525
[32]	valid_0's auc: 0.891349	valid_0's binary_logloss: 0.11798	valid_1's auc: 0.829723	valid_1's binary_logloss: 0.141529
[33]	valid_0's auc: 0.891896	valid_0's binary_logloss: 0.117633	valid_1's auc: 0.829851	valid_1's binary_logloss: 0.141488
[34]	valid_0's auc: 0.892553	valid_0's binary_logloss: 0.117331	valid_1's auc: 0.82916	valid_1's binary_logloss: 0.141607
[35]	valid_0's auc: 0.893197	valid_0's binary_logloss: 0.116998	valid_1's auc: 0.828503	valid_1's binary_logloss: 0.141781
[36]	valid_0's auc: 0.894242	valid_0's binary_logloss: 0.116658	valid_1's auc: 0.828875	valid_1's binary_logloss: 0.141779
Early stopping, best iteration is:
[6]	valid_0's auc: 0.857012	valid_0's binary_logloss: 0.137695	valid_1's auc: 0.834623	valid_1's binary_logloss: 0.14943
[1]	valid_0's auc: 0.833054	valid_0's binary_logloss: 0.155723	valid_1's auc: 0.817048	valid_1's binary_logloss: 0.165042
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.840929	valid_0's binary_logloss: 0.149897	valid_1's auc: 0.820838	valid_1's binary_logloss: 0.159671
[3]	valid_0's auc: 0.85054	valid_0's binary_logloss: 0.14575	valid_1's auc: 0.827828	valid_1's binary_logloss: 0.156036
[4]	valid_0's auc: 0.855605	valid_0's binary_logloss: 0.14246	valid_1's auc: 0.831388	valid_1's binary_logloss: 0.153065
[5]	valid_0's auc: 0.858903	valid_0's binary_logloss: 0.139711	valid_1's auc: 0.834493	valid_1's binary_logloss: 0.150758
[6]	valid_0's auc: 0.860869	valid_0's binary_logloss: 0.137459	valid_1's auc: 0.833775	valid_1's binary_logloss: 0.149055
[7]	valid_0's auc: 0.862879	valid_0's binary_logloss: 0.135513	valid_1's auc: 0.836672	valid_1's binary_logloss: 0.147391
[8]	valid_0's auc: 0.863872	valid_0's binary_logloss: 0.133916	valid_1's auc: 0.835878	valid_1's binary_logloss: 0.146178
[9]	valid_0's auc: 0.865562	valid_0's binary_logloss: 0.132428	valid_1's auc: 0.836038	valid_1's binary_logloss: 0.145107
[10]	valid_0's auc: 0.867401	valid_0's binary_logloss: 0.131116	valid_1's auc: 0.836429	valid_1's binary_logloss: 0.144153
[11]	valid_0's auc: 0.868477	valid_0's binary_logloss: 0.129981	valid_1's auc: 0.836388	valid_1's binary_logloss: 0.143351
[12]	valid_0's auc: 0.87007	valid_0's binary_logloss: 0.128907	valid_1's auc: 0.836122	valid_1's binary_logloss: 0.142803
[13]	valid_0's auc: 0.8711	valid_0's binary_logloss: 0.127994	valid_1's auc: 0.836547	valid_1's binary_logloss: 0.142243
[14]	valid_0's auc: 0.872903	valid_0's binary_logloss: 0.127122	valid_1's auc: 0.837837	valid_1's binary_logloss: 0.141718
[15]	valid_0's auc: 0.874543	valid_0's binary_logloss: 0.126262	valid_1's auc: 0.837921	valid_1's binary_logloss: 0.141291
[16]	valid_0's auc: 0.876148	valid_0's binary_logloss: 0.1255	valid_1's auc: 0.83698	valid_1's binary_logloss: 0.14114
[17]	valid_0's auc: 0.878214	valid_0's binary_logloss: 0.124724	valid_1's auc: 0.836901	valid_1's binary_logloss: 0.140905
[18]	valid_0's auc: 0.879554	valid_0's binary_logloss: 0.124053	valid_1's auc: 0.836497	valid_1's binary_logloss: 0.140787
[19]	valid_0's auc: 0.880715	valid_0's binary_logloss: 0.123405	valid_1's auc: 0.837087	valid_1's binary_logloss: 0.140481
[20]	valid_0's auc: 0.881492	valid_0's binary_logloss: 0.122867	valid_1's auc: 0.836798	valid_1's binary_logloss: 0.140353
[21]	valid_0's auc: 0.882521	valid_0's binary_logloss: 0.122301	valid_1's auc: 0.836676	valid_1's binary_logloss: 0.140236
[22]	valid_0's auc: 0.883688	valid_0's binary_logloss: 0.121776	valid_1's auc: 0.836698	valid_1's binary_logloss: 0.140127
[23]	valid_0's auc: 0.88451	valid_0's binary_logloss: 0.121226	valid_1's auc: 0.8369	valid_1's binary_logloss: 0.140044
[24]	valid_0's auc: 0.88609	valid_0's binary_logloss: 0.120643	valid_1's auc: 0.836883	valid_1's binary_logloss: 0.139917
[25]	valid_0's auc: 0.887311	valid_0's binary_logloss: 0.120104	valid_1's auc: 0.837018	valid_1's binary_logloss: 0.139879
[26]	valid_0's auc: 0.888754	valid_0's binary_logloss: 0.11961	valid_1's auc: 0.836808	valid_1's binary_logloss: 0.139895
[27]	valid_0's auc: 0.889697	valid_0's binary_logloss: 0.119114	valid_1's auc: 0.836828	valid_1's binary_logloss: 0.139904
[28]	valid_0's auc: 0.891102	valid_0's binary_logloss: 0.118637	valid_1's auc: 0.83685	valid_1's binary_logloss: 0.139833
[29]	valid_0's auc: 0.891823	valid_0's binary_logloss: 0.118217	valid_1's auc: 0.836568	valid_1's binary_logloss: 0.139836
[30]	valid_0's auc: 0.893059	valid_0's binary_logloss: 0.117781	valid_1's auc: 0.836774	valid_1's binary_logloss: 0.139827
[31]	valid_0's auc: 0.893688	valid_0's binary_logloss: 0.117412	valid_1's auc: 0.83732	valid_1's binary_logloss: 0.139721
[32]	valid_0's auc: 0.894643	valid_0's binary_logloss: 0.117013	valid_1's auc: 0.8373	valid_1's binary_logloss: 0.1397
[33]	valid_0's auc: 0.89555	valid_0's binary_logloss: 0.11666	valid_1's auc: 0.837425	valid_1's binary_logloss: 0.139692
[34]	valid_0's auc: 0.896334	valid_0's binary_logloss: 0.116252	valid_1's auc: 0.836971	valid_1's binary_logloss: 0.139746
[35]	valid_0's auc: 0.897058	valid_0's binary_logloss: 0.115923	valid_1's auc: 0.837212	valid_1's binary_logloss: 0.139756
[36]	valid_0's auc: 0.897581	valid_0's binary_logloss: 0.115642	valid_1's auc: 0.837227	valid_1's binary_logloss: 0.139719
[37]	valid_0's auc: 0.898111	valid_0's binary_logloss: 0.115297	valid_1's auc: 0.83733	valid_1's binary_logloss: 0.139758
[38]	valid_0's auc: 0.898762	valid_0's binary_logloss: 0.11499	valid_1's auc: 0.837353	valid_1's binary_logloss: 0.13975
[39]	valid_0's auc: 0.899461	valid_0's binary_logloss: 0.114643	valid_1's auc: 0.837189	valid_1's binary_logloss: 0.139803
[40]	valid_0's auc: 0.900215	valid_0's binary_logloss: 0.114372	valid_1's auc: 0.837001	valid_1's binary_logloss: 0.139865
[41]	valid_0's auc: 0.900847	valid_0's binary_logloss: 0.114036	valid_1's auc: 0.837156	valid_1's binary_logloss: 0.139874
[42]	valid_0's auc: 0.901233	valid_0's binary_logloss: 0.113757	valid_1's auc: 0.837341	valid_1's binary_logloss: 0.139879
[43]	valid_0's auc: 0.901621	valid_0's binary_logloss: 0.113498	valid_1's auc: 0.837116	valid_1's binary_logloss: 0.139982
[44]	valid_0's auc: 0.902271	valid_0's binary_logloss: 0.113194	valid_1's auc: 0.836966	valid_1's binary_logloss: 0.140012
[45]	valid_0's auc: 0.902522	valid_0's binary_logloss: 0.112949	valid_1's auc: 0.836915	valid_1's binary_logloss: 0.140035
Early stopping, best iteration is:
[15]	valid_0's auc: 0.874543	valid_0's binary_logloss: 0.126262	valid_1's auc: 0.837921	valid_1's binary_logloss: 0.141291
[1]	valid_0's auc: 0.830649	valid_0's binary_logloss: 0.155755	valid_1's auc: 0.81673	valid_1's binary_logloss: 0.164976
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.839656	valid_0's binary_logloss: 0.150015	valid_1's auc: 0.822663	valid_1's binary_logloss: 0.159866
[3]	valid_0's auc: 0.847827	valid_0's binary_logloss: 0.145888	valid_1's auc: 0.829595	valid_1's binary_logloss: 0.156171
[4]	valid_0's auc: 0.851153	valid_0's binary_logloss: 0.142542	valid_1's auc: 0.831052	valid_1's binary_logloss: 0.153261
[5]	valid_0's auc: 0.854418	valid_0's binary_logloss: 0.139824	valid_1's auc: 0.832726	valid_1's binary_logloss: 0.150974
[6]	valid_0's auc: 0.85615	valid_0's binary_logloss: 0.137634	valid_1's auc: 0.835578	valid_1's binary_logloss: 0.148988
[7]	valid_0's auc: 0.857116	valid_0's binary_logloss: 0.135891	valid_1's auc: 0.834971	valid_1's binary_logloss: 0.147626
[8]	valid_0's auc: 0.859522	valid_0's binary_logloss: 0.134235	valid_1's auc: 0.836528	valid_1's binary_logloss: 0.146258
[9]	valid_0's auc: 0.861901	valid_0's binary_logloss: 0.132802	valid_1's auc: 0.8366	valid_1's binary_logloss: 0.145288
[10]	valid_0's auc: 0.863552	valid_0's binary_logloss: 0.131531	valid_1's auc: 0.835683	valid_1's binary_logloss: 0.144554
[11]	valid_0's auc: 0.865032	valid_0's binary_logloss: 0.13038	valid_1's auc: 0.835477	valid_1's binary_logloss: 0.143965
[12]	valid_0's auc: 0.867552	valid_0's binary_logloss: 0.12931	valid_1's auc: 0.837042	valid_1's binary_logloss: 0.14326
[13]	valid_0's auc: 0.870662	valid_0's binary_logloss: 0.128333	valid_1's auc: 0.837981	valid_1's binary_logloss: 0.142696
[14]	valid_0's auc: 0.872498	valid_0's binary_logloss: 0.127408	valid_1's auc: 0.837713	valid_1's binary_logloss: 0.142262
[15]	valid_0's auc: 0.873589	valid_0's binary_logloss: 0.126603	valid_1's auc: 0.837256	valid_1's binary_logloss: 0.141869
[16]	valid_0's auc: 0.875441	valid_0's binary_logloss: 0.125783	valid_1's auc: 0.837912	valid_1's binary_logloss: 0.141528
[17]	valid_0's auc: 0.877154	valid_0's binary_logloss: 0.125036	valid_1's auc: 0.836689	valid_1's binary_logloss: 0.141384
[18]	valid_0's auc: 0.878205	valid_0's binary_logloss: 0.124367	valid_1's auc: 0.835872	valid_1's binary_logloss: 0.141256
[19]	valid_0's auc: 0.879502	valid_0's binary_logloss: 0.12371	valid_1's auc: 0.835242	valid_1's binary_logloss: 0.141186
[20]	valid_0's auc: 0.880623	valid_0's binary_logloss: 0.123116	valid_1's auc: 0.835731	valid_1's binary_logloss: 0.140946
[21]	valid_0's auc: 0.881898	valid_0's binary_logloss: 0.122562	valid_1's auc: 0.834984	valid_1's binary_logloss: 0.140914
[22]	valid_0's auc: 0.882919	valid_0's binary_logloss: 0.122011	valid_1's auc: 0.83655	valid_1's binary_logloss: 0.140596
[23]	valid_0's auc: 0.88356	valid_0's binary_logloss: 0.121524	valid_1's auc: 0.836903	valid_1's binary_logloss: 0.140423
[24]	valid_0's auc: 0.884733	valid_0's binary_logloss: 0.120948	valid_1's auc: 0.837346	valid_1's binary_logloss: 0.140282
[25]	valid_0's auc: 0.885783	valid_0's binary_logloss: 0.120481	valid_1's auc: 0.837461	valid_1's binary_logloss: 0.140158
[26]	valid_0's auc: 0.887006	valid_0's binary_logloss: 0.119965	valid_1's auc: 0.837303	valid_1's binary_logloss: 0.140169
[27]	valid_0's auc: 0.887947	valid_0's binary_logloss: 0.119452	valid_1's auc: 0.837557	valid_1's binary_logloss: 0.140113
[28]	valid_0's auc: 0.888786	valid_0's binary_logloss: 0.11903	valid_1's auc: 0.837108	valid_1's binary_logloss: 0.140154
[29]	valid_0's auc: 0.890614	valid_0's binary_logloss: 0.118542	valid_1's auc: 0.837503	valid_1's binary_logloss: 0.14008
[30]	valid_0's auc: 0.891023	valid_0's binary_logloss: 0.118199	valid_1's auc: 0.83748	valid_1's binary_logloss: 0.140051
[31]	valid_0's auc: 0.891989	valid_0's binary_logloss: 0.117866	valid_1's auc: 0.837533	valid_1's binary_logloss: 0.140006
[32]	valid_0's auc: 0.892909	valid_0's binary_logloss: 0.117477	valid_1's auc: 0.83708	valid_1's binary_logloss: 0.140054
[33]	valid_0's auc: 0.893597	valid_0's binary_logloss: 0.117091	valid_1's auc: 0.836874	valid_1's binary_logloss: 0.140061
[34]	valid_0's auc: 0.894331	valid_0's binary_logloss: 0.116711	valid_1's auc: 0.836404	valid_1's binary_logloss: 0.140111
[35]	valid_0's auc: 0.895331	valid_0's binary_logloss: 0.116306	valid_1's auc: 0.837102	valid_1's binary_logloss: 0.139942
[36]	valid_0's auc: 0.895954	valid_0's binary_logloss: 0.115973	valid_1's auc: 0.837138	valid_1's binary_logloss: 0.139958
[37]	valid_0's auc: 0.896701	valid_0's binary_logloss: 0.115673	valid_1's auc: 0.837045	valid_1's binary_logloss: 0.139954
[38]	valid_0's auc: 0.897344	valid_0's binary_logloss: 0.115335	valid_1's auc: 0.836585	valid_1's binary_logloss: 0.140026
[39]	valid_0's auc: 0.897605	valid_0's binary_logloss: 0.115043	valid_1's auc: 0.836359	valid_1's binary_logloss: 0.140104
[40]	valid_0's auc: 0.898073	valid_0's binary_logloss: 0.114754	valid_1's auc: 0.836432	valid_1's binary_logloss: 0.140096
[41]	valid_0's auc: 0.898701	valid_0's binary_logloss: 0.114389	valid_1's auc: 0.836355	valid_1's binary_logloss: 0.140115
[42]	valid_0's auc: 0.898936	valid_0's binary_logloss: 0.114144	valid_1's auc: 0.836485	valid_1's binary_logloss: 0.14011
[43]	valid_0's auc: 0.899275	valid_0's binary_logloss: 0.11385	valid_1's auc: 0.836213	valid_1's binary_logloss: 0.140214
Early stopping, best iteration is:
[13]	valid_0's auc: 0.870662	valid_0's binary_logloss: 0.128333	valid_1's auc: 0.837981	valid_1's binary_logloss: 0.142696
[1]	valid_0's auc: 0.824873	valid_0's binary_logloss: 0.156222	valid_1's auc: 0.817791	valid_1's binary_logloss: 0.165077
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.829172	valid_0's binary_logloss: 0.151168	valid_1's auc: 0.823373	valid_1's binary_logloss: 0.160071
[3]	valid_0's auc: 0.836076	valid_0's binary_logloss: 0.147371	valid_1's auc: 0.829343	valid_1's binary_logloss: 0.156297
[4]	valid_0's auc: 0.839875	valid_0's binary_logloss: 0.14444	valid_1's auc: 0.833421	valid_1's binary_logloss: 0.153356
[5]	valid_0's auc: 0.84413	valid_0's binary_logloss: 0.142061	valid_1's auc: 0.835156	valid_1's binary_logloss: 0.151047
[6]	valid_0's auc: 0.846462	valid_0's binary_logloss: 0.140071	valid_1's auc: 0.835505	valid_1's binary_logloss: 0.14915
[7]	valid_0's auc: 0.847647	valid_0's binary_logloss: 0.138475	valid_1's auc: 0.835469	valid_1's binary_logloss: 0.147559
[8]	valid_0's auc: 0.848591	valid_0's binary_logloss: 0.13704	valid_1's auc: 0.835911	valid_1's binary_logloss: 0.146381
[9]	valid_0's auc: 0.849213	valid_0's binary_logloss: 0.135881	valid_1's auc: 0.838015	valid_1's binary_logloss: 0.1453
[10]	valid_0's auc: 0.850856	valid_0's binary_logloss: 0.134808	valid_1's auc: 0.837498	valid_1's binary_logloss: 0.144374
[11]	valid_0's auc: 0.852026	valid_0's binary_logloss: 0.133912	valid_1's auc: 0.836992	valid_1's binary_logloss: 0.143626
[12]	valid_0's auc: 0.853769	valid_0's binary_logloss: 0.133022	valid_1's auc: 0.836788	valid_1's binary_logloss: 0.143124
[13]	valid_0's auc: 0.85483	valid_0's binary_logloss: 0.132302	valid_1's auc: 0.836757	valid_1's binary_logloss: 0.142528
[14]	valid_0's auc: 0.855718	valid_0's binary_logloss: 0.131638	valid_1's auc: 0.835694	valid_1's binary_logloss: 0.142224
[15]	valid_0's auc: 0.856954	valid_0's binary_logloss: 0.131049	valid_1's auc: 0.835368	valid_1's binary_logloss: 0.141812
[16]	valid_0's auc: 0.857856	valid_0's binary_logloss: 0.130502	valid_1's auc: 0.834835	valid_1's binary_logloss: 0.141587
[17]	valid_0's auc: 0.85911	valid_0's binary_logloss: 0.129988	valid_1's auc: 0.835354	valid_1's binary_logloss: 0.141283
[18]	valid_0's auc: 0.860451	valid_0's binary_logloss: 0.129489	valid_1's auc: 0.835311	valid_1's binary_logloss: 0.141081
[19]	valid_0's auc: 0.861474	valid_0's binary_logloss: 0.129065	valid_1's auc: 0.834787	valid_1's binary_logloss: 0.140992
[20]	valid_0's auc: 0.862401	valid_0's binary_logloss: 0.128653	valid_1's auc: 0.834345	valid_1's binary_logloss: 0.140795
[21]	valid_0's auc: 0.86312	valid_0's binary_logloss: 0.128288	valid_1's auc: 0.833804	valid_1's binary_logloss: 0.140723
[22]	valid_0's auc: 0.86391	valid_0's binary_logloss: 0.127911	valid_1's auc: 0.833837	valid_1's binary_logloss: 0.140571
[23]	valid_0's auc: 0.8644	valid_0's binary_logloss: 0.127608	valid_1's auc: 0.833222	valid_1's binary_logloss: 0.140512
[24]	valid_0's auc: 0.865301	valid_0's binary_logloss: 0.127243	valid_1's auc: 0.832726	valid_1's binary_logloss: 0.140541
[25]	valid_0's auc: 0.866437	valid_0's binary_logloss: 0.126901	valid_1's auc: 0.832998	valid_1's binary_logloss: 0.140458
[26]	valid_0's auc: 0.867262	valid_0's binary_logloss: 0.126595	valid_1's auc: 0.833056	valid_1's binary_logloss: 0.140424
[27]	valid_0's auc: 0.86794	valid_0's binary_logloss: 0.126301	valid_1's auc: 0.832427	valid_1's binary_logloss: 0.140421
[28]	valid_0's auc: 0.869472	valid_0's binary_logloss: 0.125953	valid_1's auc: 0.833075	valid_1's binary_logloss: 0.14028
[29]	valid_0's auc: 0.870369	valid_0's binary_logloss: 0.125647	valid_1's auc: 0.833494	valid_1's binary_logloss: 0.140215
[30]	valid_0's auc: 0.871105	valid_0's binary_logloss: 0.12536	valid_1's auc: 0.83327	valid_1's binary_logloss: 0.140214
[31]	valid_0's auc: 0.871414	valid_0's binary_logloss: 0.125161	valid_1's auc: 0.833041	valid_1's binary_logloss: 0.140216
[32]	valid_0's auc: 0.872281	valid_0's binary_logloss: 0.12493	valid_1's auc: 0.833344	valid_1's binary_logloss: 0.140148
[33]	valid_0's auc: 0.873038	valid_0's binary_logloss: 0.124672	valid_1's auc: 0.833307	valid_1's binary_logloss: 0.140091
[34]	valid_0's auc: 0.873571	valid_0's binary_logloss: 0.124499	valid_1's auc: 0.833239	valid_1's binary_logloss: 0.140069
[35]	valid_0's auc: 0.874263	valid_0's binary_logloss: 0.124311	valid_1's auc: 0.833202	valid_1's binary_logloss: 0.140067
[36]	valid_0's auc: 0.87467	valid_0's binary_logloss: 0.124165	valid_1's auc: 0.833345	valid_1's binary_logloss: 0.140029
[37]	valid_0's auc: 0.875299	valid_0's binary_logloss: 0.123937	valid_1's auc: 0.833447	valid_1's binary_logloss: 0.140009
[38]	valid_0's auc: 0.876178	valid_0's binary_logloss: 0.123686	valid_1's auc: 0.833499	valid_1's binary_logloss: 0.139986
[39]	valid_0's auc: 0.876802	valid_0's binary_logloss: 0.123446	valid_1's auc: 0.833868	valid_1's binary_logloss: 0.139955
Early stopping, best iteration is:
[9]	valid_0's auc: 0.849213	valid_0's binary_logloss: 0.135881	valid_1's auc: 0.838015	valid_1's binary_logloss: 0.1453
[1]	valid_0's auc: 0.821831	valid_0's binary_logloss: 0.156469	valid_1's auc: 0.817525	valid_1's binary_logloss: 0.165188
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.830014	valid_0's binary_logloss: 0.151109	valid_1's auc: 0.823491	valid_1's binary_logloss: 0.159651
[3]	valid_0's auc: 0.839606	valid_0's binary_logloss: 0.147325	valid_1's auc: 0.832736	valid_1's binary_logloss: 0.156031
[4]	valid_0's auc: 0.842933	valid_0's binary_logloss: 0.144392	valid_1's auc: 0.836202	valid_1's binary_logloss: 0.15311
[5]	valid_0's auc: 0.845714	valid_0's binary_logloss: 0.141965	valid_1's auc: 0.838652	valid_1's binary_logloss: 0.150749
[6]	valid_0's auc: 0.848431	valid_0's binary_logloss: 0.13995	valid_1's auc: 0.840279	valid_1's binary_logloss: 0.148948
[7]	valid_0's auc: 0.849908	valid_0's binary_logloss: 0.138315	valid_1's auc: 0.840328	valid_1's binary_logloss: 0.14741
[8]	valid_0's auc: 0.852054	valid_0's binary_logloss: 0.136907	valid_1's auc: 0.83901	valid_1's binary_logloss: 0.146175
[9]	valid_0's auc: 0.853186	valid_0's binary_logloss: 0.135648	valid_1's auc: 0.83787	valid_1's binary_logloss: 0.145198
[10]	valid_0's auc: 0.85449	valid_0's binary_logloss: 0.134596	valid_1's auc: 0.837845	valid_1's binary_logloss: 0.144271
[11]	valid_0's auc: 0.855485	valid_0's binary_logloss: 0.133677	valid_1's auc: 0.838688	valid_1's binary_logloss: 0.14351
[12]	valid_0's auc: 0.856918	valid_0's binary_logloss: 0.132832	valid_1's auc: 0.838593	valid_1's binary_logloss: 0.142834
[13]	valid_0's auc: 0.857461	valid_0's binary_logloss: 0.132079	valid_1's auc: 0.838477	valid_1's binary_logloss: 0.142308
[14]	valid_0's auc: 0.858342	valid_0's binary_logloss: 0.131428	valid_1's auc: 0.838195	valid_1's binary_logloss: 0.141925
[15]	valid_0's auc: 0.858926	valid_0's binary_logloss: 0.130816	valid_1's auc: 0.838543	valid_1's binary_logloss: 0.141466
[16]	valid_0's auc: 0.859532	valid_0's binary_logloss: 0.130275	valid_1's auc: 0.838295	valid_1's binary_logloss: 0.141112
[17]	valid_0's auc: 0.860793	valid_0's binary_logloss: 0.129728	valid_1's auc: 0.837788	valid_1's binary_logloss: 0.140844
[18]	valid_0's auc: 0.861753	valid_0's binary_logloss: 0.12924	valid_1's auc: 0.837779	valid_1's binary_logloss: 0.140599
[19]	valid_0's auc: 0.86298	valid_0's binary_logloss: 0.12873	valid_1's auc: 0.837848	valid_1's binary_logloss: 0.14042
[20]	valid_0's auc: 0.863577	valid_0's binary_logloss: 0.128318	valid_1's auc: 0.837708	valid_1's binary_logloss: 0.140222
[21]	valid_0's auc: 0.864273	valid_0's binary_logloss: 0.127905	valid_1's auc: 0.838031	valid_1's binary_logloss: 0.140093
[22]	valid_0's auc: 0.865086	valid_0's binary_logloss: 0.127531	valid_1's auc: 0.837779	valid_1's binary_logloss: 0.139982
[23]	valid_0's auc: 0.865788	valid_0's binary_logloss: 0.127146	valid_1's auc: 0.837827	valid_1's binary_logloss: 0.139856
[24]	valid_0's auc: 0.866662	valid_0's binary_logloss: 0.126815	valid_1's auc: 0.837785	valid_1's binary_logloss: 0.139755
[25]	valid_0's auc: 0.867441	valid_0's binary_logloss: 0.126498	valid_1's auc: 0.838008	valid_1's binary_logloss: 0.139673
[26]	valid_0's auc: 0.86805	valid_0's binary_logloss: 0.126176	valid_1's auc: 0.838301	valid_1's binary_logloss: 0.139585
[27]	valid_0's auc: 0.868525	valid_0's binary_logloss: 0.12589	valid_1's auc: 0.838152	valid_1's binary_logloss: 0.139564
[28]	valid_0's auc: 0.869107	valid_0's binary_logloss: 0.125656	valid_1's auc: 0.838221	valid_1's binary_logloss: 0.139481
[29]	valid_0's auc: 0.869754	valid_0's binary_logloss: 0.12538	valid_1's auc: 0.838255	valid_1's binary_logloss: 0.139428
[30]	valid_0's auc: 0.87056	valid_0's binary_logloss: 0.125113	valid_1's auc: 0.838733	valid_1's binary_logloss: 0.139354
[31]	valid_0's auc: 0.871162	valid_0's binary_logloss: 0.124861	valid_1's auc: 0.838979	valid_1's binary_logloss: 0.139225
[32]	valid_0's auc: 0.871762	valid_0's binary_logloss: 0.124652	valid_1's auc: 0.838753	valid_1's binary_logloss: 0.139233
[33]	valid_0's auc: 0.872722	valid_0's binary_logloss: 0.124394	valid_1's auc: 0.839239	valid_1's binary_logloss: 0.1391
[34]	valid_0's auc: 0.87368	valid_0's binary_logloss: 0.124127	valid_1's auc: 0.839489	valid_1's binary_logloss: 0.139029
[35]	valid_0's auc: 0.874492	valid_0's binary_logloss: 0.12392	valid_1's auc: 0.839553	valid_1's binary_logloss: 0.139024
[36]	valid_0's auc: 0.875059	valid_0's binary_logloss: 0.123723	valid_1's auc: 0.839658	valid_1's binary_logloss: 0.138948
[37]	valid_0's auc: 0.875726	valid_0's binary_logloss: 0.123495	valid_1's auc: 0.839391	valid_1's binary_logloss: 0.139005
Early stopping, best iteration is:
[7]	valid_0's auc: 0.849908	valid_0's binary_logloss: 0.138315	valid_1's auc: 0.840328	valid_1's binary_logloss: 0.14741
[1]	valid_0's auc: 0.821427	valid_0's binary_logloss: 0.156591	valid_1's auc: 0.81711	valid_1's binary_logloss: 0.165271
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827907	valid_0's binary_logloss: 0.151333	valid_1's auc: 0.821466	valid_1's binary_logloss: 0.160205
[3]	valid_0's auc: 0.837845	valid_0's binary_logloss: 0.147475	valid_1's auc: 0.828667	valid_1's binary_logloss: 0.156481
[4]	valid_0's auc: 0.840614	valid_0's binary_logloss: 0.144432	valid_1's auc: 0.831943	valid_1's binary_logloss: 0.153529
[5]	valid_0's auc: 0.843179	valid_0's binary_logloss: 0.142066	valid_1's auc: 0.834251	valid_1's binary_logloss: 0.151202
[6]	valid_0's auc: 0.843372	valid_0's binary_logloss: 0.140161	valid_1's auc: 0.834689	valid_1's binary_logloss: 0.14929
[7]	valid_0's auc: 0.844766	valid_0's binary_logloss: 0.138478	valid_1's auc: 0.835816	valid_1's binary_logloss: 0.147704
[8]	valid_0's auc: 0.847116	valid_0's binary_logloss: 0.137115	valid_1's auc: 0.836076	valid_1's binary_logloss: 0.146539
[9]	valid_0's auc: 0.850254	valid_0's binary_logloss: 0.135912	valid_1's auc: 0.836014	valid_1's binary_logloss: 0.145594
[10]	valid_0's auc: 0.851797	valid_0's binary_logloss: 0.13482	valid_1's auc: 0.836595	valid_1's binary_logloss: 0.144731
[11]	valid_0's auc: 0.852928	valid_0's binary_logloss: 0.133901	valid_1's auc: 0.835902	valid_1's binary_logloss: 0.144167
[12]	valid_0's auc: 0.854858	valid_0's binary_logloss: 0.133062	valid_1's auc: 0.836859	valid_1's binary_logloss: 0.143545
[13]	valid_0's auc: 0.856392	valid_0's binary_logloss: 0.13232	valid_1's auc: 0.837517	valid_1's binary_logloss: 0.14296
[14]	valid_0's auc: 0.857549	valid_0's binary_logloss: 0.131636	valid_1's auc: 0.837302	valid_1's binary_logloss: 0.142542
[15]	valid_0's auc: 0.858338	valid_0's binary_logloss: 0.131026	valid_1's auc: 0.837003	valid_1's binary_logloss: 0.142141
[16]	valid_0's auc: 0.859394	valid_0's binary_logloss: 0.130449	valid_1's auc: 0.836672	valid_1's binary_logloss: 0.141779
[17]	valid_0's auc: 0.860095	valid_0's binary_logloss: 0.129926	valid_1's auc: 0.83723	valid_1's binary_logloss: 0.141395
[18]	valid_0's auc: 0.861058	valid_0's binary_logloss: 0.129444	valid_1's auc: 0.837871	valid_1's binary_logloss: 0.141105
[19]	valid_0's auc: 0.862258	valid_0's binary_logloss: 0.128997	valid_1's auc: 0.83794	valid_1's binary_logloss: 0.140829
[20]	valid_0's auc: 0.863204	valid_0's binary_logloss: 0.128569	valid_1's auc: 0.838426	valid_1's binary_logloss: 0.140558
[21]	valid_0's auc: 0.864129	valid_0's binary_logloss: 0.128163	valid_1's auc: 0.838539	valid_1's binary_logloss: 0.140401
[22]	valid_0's auc: 0.865066	valid_0's binary_logloss: 0.127776	valid_1's auc: 0.838552	valid_1's binary_logloss: 0.140268
[23]	valid_0's auc: 0.866036	valid_0's binary_logloss: 0.127415	valid_1's auc: 0.838667	valid_1's binary_logloss: 0.140163
[24]	valid_0's auc: 0.866894	valid_0's binary_logloss: 0.127047	valid_1's auc: 0.838711	valid_1's binary_logloss: 0.139983
[25]	valid_0's auc: 0.867646	valid_0's binary_logloss: 0.126759	valid_1's auc: 0.83898	valid_1's binary_logloss: 0.13986
[26]	valid_0's auc: 0.868321	valid_0's binary_logloss: 0.126486	valid_1's auc: 0.83866	valid_1's binary_logloss: 0.139795
[27]	valid_0's auc: 0.869533	valid_0's binary_logloss: 0.126137	valid_1's auc: 0.838474	valid_1's binary_logloss: 0.13973
[28]	valid_0's auc: 0.87009	valid_0's binary_logloss: 0.125841	valid_1's auc: 0.838418	valid_1's binary_logloss: 0.139661
[29]	valid_0's auc: 0.870611	valid_0's binary_logloss: 0.125574	valid_1's auc: 0.838527	valid_1's binary_logloss: 0.139586
[30]	valid_0's auc: 0.871288	valid_0's binary_logloss: 0.125351	valid_1's auc: 0.838578	valid_1's binary_logloss: 0.139533
[31]	valid_0's auc: 0.871941	valid_0's binary_logloss: 0.125092	valid_1's auc: 0.839062	valid_1's binary_logloss: 0.139431
[32]	valid_0's auc: 0.872841	valid_0's binary_logloss: 0.124816	valid_1's auc: 0.839243	valid_1's binary_logloss: 0.139362
[33]	valid_0's auc: 0.873443	valid_0's binary_logloss: 0.124593	valid_1's auc: 0.839482	valid_1's binary_logloss: 0.139288
[34]	valid_0's auc: 0.874317	valid_0's binary_logloss: 0.124327	valid_1's auc: 0.839612	valid_1's binary_logloss: 0.139266
[35]	valid_0's auc: 0.875065	valid_0's binary_logloss: 0.124053	valid_1's auc: 0.839746	valid_1's binary_logloss: 0.139241
[36]	valid_0's auc: 0.875683	valid_0's binary_logloss: 0.123777	valid_1's auc: 0.840074	valid_1's binary_logloss: 0.139138
[37]	valid_0's auc: 0.876241	valid_0's binary_logloss: 0.123571	valid_1's auc: 0.840105	valid_1's binary_logloss: 0.139101
[38]	valid_0's auc: 0.876923	valid_0's binary_logloss: 0.123355	valid_1's auc: 0.839838	valid_1's binary_logloss: 0.139141
[39]	valid_0's auc: 0.877193	valid_0's binary_logloss: 0.123186	valid_1's auc: 0.84006	valid_1's binary_logloss: 0.139095
[40]	valid_0's auc: 0.877543	valid_0's binary_logloss: 0.123003	valid_1's auc: 0.839944	valid_1's binary_logloss: 0.139144
[41]	valid_0's auc: 0.877994	valid_0's binary_logloss: 0.122806	valid_1's auc: 0.839925	valid_1's binary_logloss: 0.139157
[42]	valid_0's auc: 0.878413	valid_0's binary_logloss: 0.122608	valid_1's auc: 0.839588	valid_1's binary_logloss: 0.139183
[43]	valid_0's auc: 0.87868	valid_0's binary_logloss: 0.122482	valid_1's auc: 0.839828	valid_1's binary_logloss: 0.139122
[44]	valid_0's auc: 0.878844	valid_0's binary_logloss: 0.122346	valid_1's auc: 0.839999	valid_1's binary_logloss: 0.139112
[45]	valid_0's auc: 0.879115	valid_0's binary_logloss: 0.12219	valid_1's auc: 0.839862	valid_1's binary_logloss: 0.139105
[46]	valid_0's auc: 0.879426	valid_0's binary_logloss: 0.122041	valid_1's auc: 0.839926	valid_1's binary_logloss: 0.139086
[47]	valid_0's auc: 0.879882	valid_0's binary_logloss: 0.121829	valid_1's auc: 0.840044	valid_1's binary_logloss: 0.139063
[48]	valid_0's auc: 0.880499	valid_0's binary_logloss: 0.121624	valid_1's auc: 0.840013	valid_1's binary_logloss: 0.139091
[49]	valid_0's auc: 0.880866	valid_0's binary_logloss: 0.121448	valid_1's auc: 0.839755	valid_1's binary_logloss: 0.139167
[50]	valid_0's auc: 0.881448	valid_0's binary_logloss: 0.121247	valid_1's auc: 0.839886	valid_1's binary_logloss: 0.139173
[51]	valid_0's auc: 0.881689	valid_0's binary_logloss: 0.121135	valid_1's auc: 0.840158	valid_1's binary_logloss: 0.139144
[52]	valid_0's auc: 0.882122	valid_0's binary_logloss: 0.12097	valid_1's auc: 0.839978	valid_1's binary_logloss: 0.139193
[53]	valid_0's auc: 0.882285	valid_0's binary_logloss: 0.120863	valid_1's auc: 0.83979	valid_1's binary_logloss: 0.139241
[54]	valid_0's auc: 0.882575	valid_0's binary_logloss: 0.120721	valid_1's auc: 0.839521	valid_1's binary_logloss: 0.139335
[55]	valid_0's auc: 0.88311	valid_0's binary_logloss: 0.120518	valid_1's auc: 0.839558	valid_1's binary_logloss: 0.139336
[56]	valid_0's auc: 0.883389	valid_0's binary_logloss: 0.120373	valid_1's auc: 0.83979	valid_1's binary_logloss: 0.139326
[57]	valid_0's auc: 0.88369	valid_0's binary_logloss: 0.120248	valid_1's auc: 0.839887	valid_1's binary_logloss: 0.139321
[58]	valid_0's auc: 0.884025	valid_0's binary_logloss: 0.120078	valid_1's auc: 0.839684	valid_1's binary_logloss: 0.139341
[59]	valid_0's auc: 0.884477	valid_0's binary_logloss: 0.119928	valid_1's auc: 0.839523	valid_1's binary_logloss: 0.139368
[60]	valid_0's auc: 0.884659	valid_0's binary_logloss: 0.119822	valid_1's auc: 0.839745	valid_1's binary_logloss: 0.139362
[61]	valid_0's auc: 0.885121	valid_0's binary_logloss: 0.119618	valid_1's auc: 0.839533	valid_1's binary_logloss: 0.139434
[62]	valid_0's auc: 0.885341	valid_0's binary_logloss: 0.119477	valid_1's auc: 0.839843	valid_1's binary_logloss: 0.139385
[63]	valid_0's auc: 0.885487	valid_0's binary_logloss: 0.119367	valid_1's auc: 0.839843	valid_1's binary_logloss: 0.139409
[64]	valid_0's auc: 0.885645	valid_0's binary_logloss: 0.119254	valid_1's auc: 0.839603	valid_1's binary_logloss: 0.139483
[65]	valid_0's auc: 0.886	valid_0's binary_logloss: 0.119067	valid_1's auc: 0.839806	valid_1's binary_logloss: 0.139511
[66]	valid_0's auc: 0.886267	valid_0's binary_logloss: 0.118949	valid_1's auc: 0.839758	valid_1's binary_logloss: 0.139539
[67]	valid_0's auc: 0.886435	valid_0's binary_logloss: 0.118836	valid_1's auc: 0.83953	valid_1's binary_logloss: 0.139595
[68]	valid_0's auc: 0.886593	valid_0's binary_logloss: 0.118728	valid_1's auc: 0.839422	valid_1's binary_logloss: 0.139608
[69]	valid_0's auc: 0.886791	valid_0's binary_logloss: 0.118595	valid_1's auc: 0.839492	valid_1's binary_logloss: 0.139615
[70]	valid_0's auc: 0.886904	valid_0's binary_logloss: 0.118527	valid_1's auc: 0.83981	valid_1's binary_logloss: 0.139598
[71]	valid_0's auc: 0.887299	valid_0's binary_logloss: 0.118387	valid_1's auc: 0.839869	valid_1's binary_logloss: 0.139594
[72]	valid_0's auc: 0.887483	valid_0's binary_logloss: 0.118281	valid_1's auc: 0.839788	valid_1's binary_logloss: 0.139654
[73]	valid_0's auc: 0.887557	valid_0's binary_logloss: 0.118196	valid_1's auc: 0.840147	valid_1's binary_logloss: 0.139624
[74]	valid_0's auc: 0.887851	valid_0's binary_logloss: 0.118068	valid_1's auc: 0.840137	valid_1's binary_logloss: 0.139665
[75]	valid_0's auc: 0.888042	valid_0's binary_logloss: 0.117962	valid_1's auc: 0.840289	valid_1's binary_logloss: 0.139644
[76]	valid_0's auc: 0.888422	valid_0's binary_logloss: 0.11776	valid_1's auc: 0.83992	valid_1's binary_logloss: 0.139749
[77]	valid_0's auc: 0.88853	valid_0's binary_logloss: 0.117667	valid_1's auc: 0.839998	valid_1's binary_logloss: 0.139765
Early stopping, best iteration is:
[47]	valid_0's auc: 0.879882	valid_0's binary_logloss: 0.121829	valid_1's auc: 0.840044	valid_1's binary_logloss: 0.139063
[1]	valid_0's auc: 0.824873	valid_0's binary_logloss: 0.156222	valid_1's auc: 0.817791	valid_1's binary_logloss: 0.165077
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.829172	valid_0's binary_logloss: 0.151168	valid_1's auc: 0.823373	valid_1's binary_logloss: 0.160071
[3]	valid_0's auc: 0.836076	valid_0's binary_logloss: 0.147371	valid_1's auc: 0.829343	valid_1's binary_logloss: 0.156297
[4]	valid_0's auc: 0.839875	valid_0's binary_logloss: 0.14444	valid_1's auc: 0.833421	valid_1's binary_logloss: 0.153356
[5]	valid_0's auc: 0.84413	valid_0's binary_logloss: 0.142061	valid_1's auc: 0.835156	valid_1's binary_logloss: 0.151047
[6]	valid_0's auc: 0.846462	valid_0's binary_logloss: 0.140071	valid_1's auc: 0.835505	valid_1's binary_logloss: 0.14915
[7]	valid_0's auc: 0.847647	valid_0's binary_logloss: 0.138475	valid_1's auc: 0.835469	valid_1's binary_logloss: 0.147559
[8]	valid_0's auc: 0.848591	valid_0's binary_logloss: 0.13704	valid_1's auc: 0.835911	valid_1's binary_logloss: 0.146381
[9]	valid_0's auc: 0.849213	valid_0's binary_logloss: 0.135881	valid_1's auc: 0.838015	valid_1's binary_logloss: 0.1453
[10]	valid_0's auc: 0.850856	valid_0's binary_logloss: 0.134808	valid_1's auc: 0.837498	valid_1's binary_logloss: 0.144374
[11]	valid_0's auc: 0.852026	valid_0's binary_logloss: 0.133912	valid_1's auc: 0.836992	valid_1's binary_logloss: 0.143626
[12]	valid_0's auc: 0.853769	valid_0's binary_logloss: 0.133022	valid_1's auc: 0.836788	valid_1's binary_logloss: 0.143124
[13]	valid_0's auc: 0.85483	valid_0's binary_logloss: 0.132302	valid_1's auc: 0.836757	valid_1's binary_logloss: 0.142528
[14]	valid_0's auc: 0.855718	valid_0's binary_logloss: 0.131638	valid_1's auc: 0.835694	valid_1's binary_logloss: 0.142224
[15]	valid_0's auc: 0.856954	valid_0's binary_logloss: 0.131049	valid_1's auc: 0.835368	valid_1's binary_logloss: 0.141812
[16]	valid_0's auc: 0.857856	valid_0's binary_logloss: 0.130502	valid_1's auc: 0.834835	valid_1's binary_logloss: 0.141587
[17]	valid_0's auc: 0.85911	valid_0's binary_logloss: 0.129988	valid_1's auc: 0.835354	valid_1's binary_logloss: 0.141283
[18]	valid_0's auc: 0.860451	valid_0's binary_logloss: 0.129489	valid_1's auc: 0.835311	valid_1's binary_logloss: 0.141081
[19]	valid_0's auc: 0.861474	valid_0's binary_logloss: 0.129065	valid_1's auc: 0.834787	valid_1's binary_logloss: 0.140992
[20]	valid_0's auc: 0.862401	valid_0's binary_logloss: 0.128653	valid_1's auc: 0.834345	valid_1's binary_logloss: 0.140795
[21]	valid_0's auc: 0.86312	valid_0's binary_logloss: 0.128288	valid_1's auc: 0.833804	valid_1's binary_logloss: 0.140723
[22]	valid_0's auc: 0.86391	valid_0's binary_logloss: 0.127911	valid_1's auc: 0.833837	valid_1's binary_logloss: 0.140571
[23]	valid_0's auc: 0.8644	valid_0's binary_logloss: 0.127608	valid_1's auc: 0.833222	valid_1's binary_logloss: 0.140512
[24]	valid_0's auc: 0.865301	valid_0's binary_logloss: 0.127243	valid_1's auc: 0.832726	valid_1's binary_logloss: 0.140541
[25]	valid_0's auc: 0.866437	valid_0's binary_logloss: 0.126901	valid_1's auc: 0.832998	valid_1's binary_logloss: 0.140458
[26]	valid_0's auc: 0.867262	valid_0's binary_logloss: 0.126595	valid_1's auc: 0.833056	valid_1's binary_logloss: 0.140424
[27]	valid_0's auc: 0.86794	valid_0's binary_logloss: 0.126301	valid_1's auc: 0.832427	valid_1's binary_logloss: 0.140421
[28]	valid_0's auc: 0.869472	valid_0's binary_logloss: 0.125953	valid_1's auc: 0.833075	valid_1's binary_logloss: 0.14028
[29]	valid_0's auc: 0.870369	valid_0's binary_logloss: 0.125647	valid_1's auc: 0.833494	valid_1's binary_logloss: 0.140215
[30]	valid_0's auc: 0.871105	valid_0's binary_logloss: 0.12536	valid_1's auc: 0.83327	valid_1's binary_logloss: 0.140214
[31]	valid_0's auc: 0.871414	valid_0's binary_logloss: 0.125161	valid_1's auc: 0.833041	valid_1's binary_logloss: 0.140216
[32]	valid_0's auc: 0.872281	valid_0's binary_logloss: 0.12493	valid_1's auc: 0.833344	valid_1's binary_logloss: 0.140148
[33]	valid_0's auc: 0.873038	valid_0's binary_logloss: 0.124672	valid_1's auc: 0.833307	valid_1's binary_logloss: 0.140091
[34]	valid_0's auc: 0.873571	valid_0's binary_logloss: 0.124499	valid_1's auc: 0.833239	valid_1's binary_logloss: 0.140069
[35]	valid_0's auc: 0.874263	valid_0's binary_logloss: 0.124311	valid_1's auc: 0.833202	valid_1's binary_logloss: 0.140067
[36]	valid_0's auc: 0.87467	valid_0's binary_logloss: 0.124165	valid_1's auc: 0.833345	valid_1's binary_logloss: 0.140029
[37]	valid_0's auc: 0.875299	valid_0's binary_logloss: 0.123937	valid_1's auc: 0.833447	valid_1's binary_logloss: 0.140009
[38]	valid_0's auc: 0.876178	valid_0's binary_logloss: 0.123686	valid_1's auc: 0.833499	valid_1's binary_logloss: 0.139986
[39]	valid_0's auc: 0.876802	valid_0's binary_logloss: 0.123446	valid_1's auc: 0.833868	valid_1's binary_logloss: 0.139955
Early stopping, best iteration is:
[9]	valid_0's auc: 0.849213	valid_0's binary_logloss: 0.135881	valid_1's auc: 0.838015	valid_1's binary_logloss: 0.1453
[1]	valid_0's auc: 0.821831	valid_0's binary_logloss: 0.156469	valid_1's auc: 0.817525	valid_1's binary_logloss: 0.165188
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.830014	valid_0's binary_logloss: 0.151109	valid_1's auc: 0.823491	valid_1's binary_logloss: 0.159651
[3]	valid_0's auc: 0.839606	valid_0's binary_logloss: 0.147325	valid_1's auc: 0.832736	valid_1's binary_logloss: 0.156031
[4]	valid_0's auc: 0.842933	valid_0's binary_logloss: 0.144392	valid_1's auc: 0.836202	valid_1's binary_logloss: 0.15311
[5]	valid_0's auc: 0.845714	valid_0's binary_logloss: 0.141965	valid_1's auc: 0.838652	valid_1's binary_logloss: 0.150749
[6]	valid_0's auc: 0.848431	valid_0's binary_logloss: 0.13995	valid_1's auc: 0.840279	valid_1's binary_logloss: 0.148948
[7]	valid_0's auc: 0.849908	valid_0's binary_logloss: 0.138315	valid_1's auc: 0.840328	valid_1's binary_logloss: 0.14741
[8]	valid_0's auc: 0.852054	valid_0's binary_logloss: 0.136907	valid_1's auc: 0.83901	valid_1's binary_logloss: 0.146175
[9]	valid_0's auc: 0.853186	valid_0's binary_logloss: 0.135648	valid_1's auc: 0.83787	valid_1's binary_logloss: 0.145198
[10]	valid_0's auc: 0.85449	valid_0's binary_logloss: 0.134596	valid_1's auc: 0.837845	valid_1's binary_logloss: 0.144271
[11]	valid_0's auc: 0.855485	valid_0's binary_logloss: 0.133677	valid_1's auc: 0.838688	valid_1's binary_logloss: 0.14351
[12]	valid_0's auc: 0.856918	valid_0's binary_logloss: 0.132832	valid_1's auc: 0.838593	valid_1's binary_logloss: 0.142834
[13]	valid_0's auc: 0.857461	valid_0's binary_logloss: 0.132079	valid_1's auc: 0.838477	valid_1's binary_logloss: 0.142308
[14]	valid_0's auc: 0.858342	valid_0's binary_logloss: 0.131428	valid_1's auc: 0.838195	valid_1's binary_logloss: 0.141925
[15]	valid_0's auc: 0.858926	valid_0's binary_logloss: 0.130816	valid_1's auc: 0.838543	valid_1's binary_logloss: 0.141466
[16]	valid_0's auc: 0.859532	valid_0's binary_logloss: 0.130275	valid_1's auc: 0.838295	valid_1's binary_logloss: 0.141112
[17]	valid_0's auc: 0.860793	valid_0's binary_logloss: 0.129728	valid_1's auc: 0.837788	valid_1's binary_logloss: 0.140844
[18]	valid_0's auc: 0.861753	valid_0's binary_logloss: 0.12924	valid_1's auc: 0.837779	valid_1's binary_logloss: 0.140599
[19]	valid_0's auc: 0.86298	valid_0's binary_logloss: 0.12873	valid_1's auc: 0.837848	valid_1's binary_logloss: 0.14042
[20]	valid_0's auc: 0.863577	valid_0's binary_logloss: 0.128318	valid_1's auc: 0.837708	valid_1's binary_logloss: 0.140222
[21]	valid_0's auc: 0.864273	valid_0's binary_logloss: 0.127905	valid_1's auc: 0.838031	valid_1's binary_logloss: 0.140093
[22]	valid_0's auc: 0.865086	valid_0's binary_logloss: 0.127531	valid_1's auc: 0.837779	valid_1's binary_logloss: 0.139982
[23]	valid_0's auc: 0.865788	valid_0's binary_logloss: 0.127146	valid_1's auc: 0.837827	valid_1's binary_logloss: 0.139856
[24]	valid_0's auc: 0.866662	valid_0's binary_logloss: 0.126815	valid_1's auc: 0.837785	valid_1's binary_logloss: 0.139755
[25]	valid_0's auc: 0.867441	valid_0's binary_logloss: 0.126498	valid_1's auc: 0.838008	valid_1's binary_logloss: 0.139673
[26]	valid_0's auc: 0.86805	valid_0's binary_logloss: 0.126176	valid_1's auc: 0.838301	valid_1's binary_logloss: 0.139585
[27]	valid_0's auc: 0.868525	valid_0's binary_logloss: 0.12589	valid_1's auc: 0.838152	valid_1's binary_logloss: 0.139564
[28]	valid_0's auc: 0.869107	valid_0's binary_logloss: 0.125656	valid_1's auc: 0.838221	valid_1's binary_logloss: 0.139481
[29]	valid_0's auc: 0.869754	valid_0's binary_logloss: 0.12538	valid_1's auc: 0.838255	valid_1's binary_logloss: 0.139428
[30]	valid_0's auc: 0.87056	valid_0's binary_logloss: 0.125113	valid_1's auc: 0.838733	valid_1's binary_logloss: 0.139354
[31]	valid_0's auc: 0.871162	valid_0's binary_logloss: 0.124861	valid_1's auc: 0.838979	valid_1's binary_logloss: 0.139225
[32]	valid_0's auc: 0.871762	valid_0's binary_logloss: 0.124652	valid_1's auc: 0.838753	valid_1's binary_logloss: 0.139233
[33]	valid_0's auc: 0.872722	valid_0's binary_logloss: 0.124394	valid_1's auc: 0.839239	valid_1's binary_logloss: 0.1391
[34]	valid_0's auc: 0.87368	valid_0's binary_logloss: 0.124127	valid_1's auc: 0.839489	valid_1's binary_logloss: 0.139029
[35]	valid_0's auc: 0.874492	valid_0's binary_logloss: 0.12392	valid_1's auc: 0.839553	valid_1's binary_logloss: 0.139024
[36]	valid_0's auc: 0.875059	valid_0's binary_logloss: 0.123723	valid_1's auc: 0.839658	valid_1's binary_logloss: 0.138948
[37]	valid_0's auc: 0.875726	valid_0's binary_logloss: 0.123495	valid_1's auc: 0.839391	valid_1's binary_logloss: 0.139005
Early stopping, best iteration is:
[7]	valid_0's auc: 0.849908	valid_0's binary_logloss: 0.138315	valid_1's auc: 0.840328	valid_1's binary_logloss: 0.14741
[1]	valid_0's auc: 0.821427	valid_0's binary_logloss: 0.156591	valid_1's auc: 0.81711	valid_1's binary_logloss: 0.165271
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827907	valid_0's binary_logloss: 0.151333	valid_1's auc: 0.821466	valid_1's binary_logloss: 0.160205
[3]	valid_0's auc: 0.837845	valid_0's binary_logloss: 0.147475	valid_1's auc: 0.828667	valid_1's binary_logloss: 0.156481
[4]	valid_0's auc: 0.840614	valid_0's binary_logloss: 0.144432	valid_1's auc: 0.831943	valid_1's binary_logloss: 0.153529
[5]	valid_0's auc: 0.843179	valid_0's binary_logloss: 0.142066	valid_1's auc: 0.834251	valid_1's binary_logloss: 0.151202
[6]	valid_0's auc: 0.843372	valid_0's binary_logloss: 0.140161	valid_1's auc: 0.834689	valid_1's binary_logloss: 0.14929
[7]	valid_0's auc: 0.844766	valid_0's binary_logloss: 0.138478	valid_1's auc: 0.835816	valid_1's binary_logloss: 0.147704
[8]	valid_0's auc: 0.847116	valid_0's binary_logloss: 0.137115	valid_1's auc: 0.836076	valid_1's binary_logloss: 0.146539
[9]	valid_0's auc: 0.850254	valid_0's binary_logloss: 0.135912	valid_1's auc: 0.836014	valid_1's binary_logloss: 0.145594
[10]	valid_0's auc: 0.851797	valid_0's binary_logloss: 0.13482	valid_1's auc: 0.836595	valid_1's binary_logloss: 0.144731
[11]	valid_0's auc: 0.852928	valid_0's binary_logloss: 0.133901	valid_1's auc: 0.835902	valid_1's binary_logloss: 0.144167
[12]	valid_0's auc: 0.854858	valid_0's binary_logloss: 0.133062	valid_1's auc: 0.836859	valid_1's binary_logloss: 0.143545
[13]	valid_0's auc: 0.856392	valid_0's binary_logloss: 0.13232	valid_1's auc: 0.837517	valid_1's binary_logloss: 0.14296
[14]	valid_0's auc: 0.857549	valid_0's binary_logloss: 0.131636	valid_1's auc: 0.837302	valid_1's binary_logloss: 0.142542
[15]	valid_0's auc: 0.858338	valid_0's binary_logloss: 0.131026	valid_1's auc: 0.837003	valid_1's binary_logloss: 0.142141
[16]	valid_0's auc: 0.859394	valid_0's binary_logloss: 0.130449	valid_1's auc: 0.836672	valid_1's binary_logloss: 0.141779
[17]	valid_0's auc: 0.860095	valid_0's binary_logloss: 0.129926	valid_1's auc: 0.83723	valid_1's binary_logloss: 0.141395
[18]	valid_0's auc: 0.861058	valid_0's binary_logloss: 0.129444	valid_1's auc: 0.837871	valid_1's binary_logloss: 0.141105
[19]	valid_0's auc: 0.862258	valid_0's binary_logloss: 0.128997	valid_1's auc: 0.83794	valid_1's binary_logloss: 0.140829
[20]	valid_0's auc: 0.863204	valid_0's binary_logloss: 0.128569	valid_1's auc: 0.838426	valid_1's binary_logloss: 0.140558
[21]	valid_0's auc: 0.864129	valid_0's binary_logloss: 0.128163	valid_1's auc: 0.838539	valid_1's binary_logloss: 0.140401
[22]	valid_0's auc: 0.865066	valid_0's binary_logloss: 0.127776	valid_1's auc: 0.838552	valid_1's binary_logloss: 0.140268
[23]	valid_0's auc: 0.866036	valid_0's binary_logloss: 0.127415	valid_1's auc: 0.838667	valid_1's binary_logloss: 0.140163
[24]	valid_0's auc: 0.866894	valid_0's binary_logloss: 0.127047	valid_1's auc: 0.838711	valid_1's binary_logloss: 0.139983
[25]	valid_0's auc: 0.867646	valid_0's binary_logloss: 0.126759	valid_1's auc: 0.83898	valid_1's binary_logloss: 0.13986
[26]	valid_0's auc: 0.868321	valid_0's binary_logloss: 0.126486	valid_1's auc: 0.83866	valid_1's binary_logloss: 0.139795
[27]	valid_0's auc: 0.869533	valid_0's binary_logloss: 0.126137	valid_1's auc: 0.838474	valid_1's binary_logloss: 0.13973
[28]	valid_0's auc: 0.87009	valid_0's binary_logloss: 0.125841	valid_1's auc: 0.838418	valid_1's binary_logloss: 0.139661
[29]	valid_0's auc: 0.870611	valid_0's binary_logloss: 0.125574	valid_1's auc: 0.838527	valid_1's binary_logloss: 0.139586
[30]	valid_0's auc: 0.871288	valid_0's binary_logloss: 0.125351	valid_1's auc: 0.838578	valid_1's binary_logloss: 0.139533
[31]	valid_0's auc: 0.871941	valid_0's binary_logloss: 0.125092	valid_1's auc: 0.839062	valid_1's binary_logloss: 0.139431
[32]	valid_0's auc: 0.872841	valid_0's binary_logloss: 0.124816	valid_1's auc: 0.839243	valid_1's binary_logloss: 0.139362
[33]	valid_0's auc: 0.873443	valid_0's binary_logloss: 0.124593	valid_1's auc: 0.839482	valid_1's binary_logloss: 0.139288
[34]	valid_0's auc: 0.874317	valid_0's binary_logloss: 0.124327	valid_1's auc: 0.839612	valid_1's binary_logloss: 0.139266
[35]	valid_0's auc: 0.875065	valid_0's binary_logloss: 0.124053	valid_1's auc: 0.839746	valid_1's binary_logloss: 0.139241
[36]	valid_0's auc: 0.875683	valid_0's binary_logloss: 0.123777	valid_1's auc: 0.840074	valid_1's binary_logloss: 0.139138
[37]	valid_0's auc: 0.876241	valid_0's binary_logloss: 0.123571	valid_1's auc: 0.840105	valid_1's binary_logloss: 0.139101
[38]	valid_0's auc: 0.876923	valid_0's binary_logloss: 0.123355	valid_1's auc: 0.839838	valid_1's binary_logloss: 0.139141
[39]	valid_0's auc: 0.877193	valid_0's binary_logloss: 0.123186	valid_1's auc: 0.84006	valid_1's binary_logloss: 0.139095
[40]	valid_0's auc: 0.877543	valid_0's binary_logloss: 0.123003	valid_1's auc: 0.839944	valid_1's binary_logloss: 0.139144
[41]	valid_0's auc: 0.877994	valid_0's binary_logloss: 0.122806	valid_1's auc: 0.839925	valid_1's binary_logloss: 0.139157
[42]	valid_0's auc: 0.878413	valid_0's binary_logloss: 0.122608	valid_1's auc: 0.839588	valid_1's binary_logloss: 0.139183
[43]	valid_0's auc: 0.87868	valid_0's binary_logloss: 0.122482	valid_1's auc: 0.839828	valid_1's binary_logloss: 0.139122
[44]	valid_0's auc: 0.878844	valid_0's binary_logloss: 0.122346	valid_1's auc: 0.839999	valid_1's binary_logloss: 0.139112
[45]	valid_0's auc: 0.879115	valid_0's binary_logloss: 0.12219	valid_1's auc: 0.839862	valid_1's binary_logloss: 0.139105
[46]	valid_0's auc: 0.879426	valid_0's binary_logloss: 0.122041	valid_1's auc: 0.839926	valid_1's binary_logloss: 0.139086
[47]	valid_0's auc: 0.879882	valid_0's binary_logloss: 0.121829	valid_1's auc: 0.840044	valid_1's binary_logloss: 0.139063
[48]	valid_0's auc: 0.880499	valid_0's binary_logloss: 0.121624	valid_1's auc: 0.840013	valid_1's binary_logloss: 0.139091
[49]	valid_0's auc: 0.880866	valid_0's binary_logloss: 0.121448	valid_1's auc: 0.839755	valid_1's binary_logloss: 0.139167
[50]	valid_0's auc: 0.881448	valid_0's binary_logloss: 0.121247	valid_1's auc: 0.839886	valid_1's binary_logloss: 0.139173
[51]	valid_0's auc: 0.881689	valid_0's binary_logloss: 0.121135	valid_1's auc: 0.840158	valid_1's binary_logloss: 0.139144
[52]	valid_0's auc: 0.882122	valid_0's binary_logloss: 0.12097	valid_1's auc: 0.839978	valid_1's binary_logloss: 0.139193
[53]	valid_0's auc: 0.882285	valid_0's binary_logloss: 0.120863	valid_1's auc: 0.83979	valid_1's binary_logloss: 0.139241
[54]	valid_0's auc: 0.882575	valid_0's binary_logloss: 0.120721	valid_1's auc: 0.839521	valid_1's binary_logloss: 0.139335
[55]	valid_0's auc: 0.88311	valid_0's binary_logloss: 0.120518	valid_1's auc: 0.839558	valid_1's binary_logloss: 0.139336
[56]	valid_0's auc: 0.883389	valid_0's binary_logloss: 0.120373	valid_1's auc: 0.83979	valid_1's binary_logloss: 0.139326
[57]	valid_0's auc: 0.88369	valid_0's binary_logloss: 0.120248	valid_1's auc: 0.839887	valid_1's binary_logloss: 0.139321
[58]	valid_0's auc: 0.884025	valid_0's binary_logloss: 0.120078	valid_1's auc: 0.839684	valid_1's binary_logloss: 0.139341
[59]	valid_0's auc: 0.884477	valid_0's binary_logloss: 0.119928	valid_1's auc: 0.839523	valid_1's binary_logloss: 0.139368
[60]	valid_0's auc: 0.884659	valid_0's binary_logloss: 0.119822	valid_1's auc: 0.839745	valid_1's binary_logloss: 0.139362
[61]	valid_0's auc: 0.885121	valid_0's binary_logloss: 0.119618	valid_1's auc: 0.839533	valid_1's binary_logloss: 0.139434
[62]	valid_0's auc: 0.885341	valid_0's binary_logloss: 0.119477	valid_1's auc: 0.839843	valid_1's binary_logloss: 0.139385
[63]	valid_0's auc: 0.885487	valid_0's binary_logloss: 0.119367	valid_1's auc: 0.839843	valid_1's binary_logloss: 0.139409
[64]	valid_0's auc: 0.885645	valid_0's binary_logloss: 0.119254	valid_1's auc: 0.839603	valid_1's binary_logloss: 0.139483
[65]	valid_0's auc: 0.886	valid_0's binary_logloss: 0.119067	valid_1's auc: 0.839806	valid_1's binary_logloss: 0.139511
[66]	valid_0's auc: 0.886267	valid_0's binary_logloss: 0.118949	valid_1's auc: 0.839758	valid_1's binary_logloss: 0.139539
[67]	valid_0's auc: 0.886435	valid_0's binary_logloss: 0.118836	valid_1's auc: 0.83953	valid_1's binary_logloss: 0.139595
[68]	valid_0's auc: 0.886593	valid_0's binary_logloss: 0.118728	valid_1's auc: 0.839422	valid_1's binary_logloss: 0.139608
[69]	valid_0's auc: 0.886791	valid_0's binary_logloss: 0.118595	valid_1's auc: 0.839492	valid_1's binary_logloss: 0.139615
[70]	valid_0's auc: 0.886904	valid_0's binary_logloss: 0.118527	valid_1's auc: 0.83981	valid_1's binary_logloss: 0.139598
[71]	valid_0's auc: 0.887299	valid_0's binary_logloss: 0.118387	valid_1's auc: 0.839869	valid_1's binary_logloss: 0.139594
[72]	valid_0's auc: 0.887483	valid_0's binary_logloss: 0.118281	valid_1's auc: 0.839788	valid_1's binary_logloss: 0.139654
[73]	valid_0's auc: 0.887557	valid_0's binary_logloss: 0.118196	valid_1's auc: 0.840147	valid_1's binary_logloss: 0.139624
[74]	valid_0's auc: 0.887851	valid_0's binary_logloss: 0.118068	valid_1's auc: 0.840137	valid_1's binary_logloss: 0.139665
[75]	valid_0's auc: 0.888042	valid_0's binary_logloss: 0.117962	valid_1's auc: 0.840289	valid_1's binary_logloss: 0.139644
[76]	valid_0's auc: 0.888422	valid_0's binary_logloss: 0.11776	valid_1's auc: 0.83992	valid_1's binary_logloss: 0.139749
[77]	valid_0's auc: 0.88853	valid_0's binary_logloss: 0.117667	valid_1's auc: 0.839998	valid_1's binary_logloss: 0.139765
Early stopping, best iteration is:
[47]	valid_0's auc: 0.879882	valid_0's binary_logloss: 0.121829	valid_1's auc: 0.840044	valid_1's binary_logloss: 0.139063
[1]	valid_0's auc: 0.835412	valid_0's binary_logloss: 0.155721	valid_1's auc: 0.81973	valid_1's binary_logloss: 0.164849
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.841234	valid_0's binary_logloss: 0.150325	valid_1's auc: 0.823604	valid_1's binary_logloss: 0.159865
[3]	valid_0's auc: 0.846416	valid_0's binary_logloss: 0.146303	valid_1's auc: 0.824414	valid_1's binary_logloss: 0.156273
[4]	valid_0's auc: 0.85113	valid_0's binary_logloss: 0.143142	valid_1's auc: 0.83033	valid_1's binary_logloss: 0.153388
[5]	valid_0's auc: 0.854001	valid_0's binary_logloss: 0.140572	valid_1's auc: 0.831279	valid_1's binary_logloss: 0.151139
[6]	valid_0's auc: 0.856684	valid_0's binary_logloss: 0.138389	valid_1's auc: 0.83309	valid_1's binary_logloss: 0.149282
[7]	valid_0's auc: 0.858456	valid_0's binary_logloss: 0.136552	valid_1's auc: 0.833723	valid_1's binary_logloss: 0.147799
[8]	valid_0's auc: 0.8595	valid_0's binary_logloss: 0.135038	valid_1's auc: 0.834417	valid_1's binary_logloss: 0.146527
[9]	valid_0's auc: 0.861391	valid_0's binary_logloss: 0.133585	valid_1's auc: 0.834625	valid_1's binary_logloss: 0.145543
[10]	valid_0's auc: 0.863676	valid_0's binary_logloss: 0.132396	valid_1's auc: 0.834954	valid_1's binary_logloss: 0.144707
[11]	valid_0's auc: 0.865339	valid_0's binary_logloss: 0.13134	valid_1's auc: 0.835252	valid_1's binary_logloss: 0.144034
[12]	valid_0's auc: 0.866817	valid_0's binary_logloss: 0.130311	valid_1's auc: 0.835592	valid_1's binary_logloss: 0.143404
[13]	valid_0's auc: 0.868751	valid_0's binary_logloss: 0.129388	valid_1's auc: 0.835225	valid_1's binary_logloss: 0.142853
[14]	valid_0's auc: 0.870283	valid_0's binary_logloss: 0.128551	valid_1's auc: 0.835191	valid_1's binary_logloss: 0.142412
[15]	valid_0's auc: 0.871366	valid_0's binary_logloss: 0.127828	valid_1's auc: 0.834681	valid_1's binary_logloss: 0.142173
[16]	valid_0's auc: 0.87234	valid_0's binary_logloss: 0.127146	valid_1's auc: 0.834555	valid_1's binary_logloss: 0.141876
[17]	valid_0's auc: 0.872963	valid_0's binary_logloss: 0.126553	valid_1's auc: 0.833341	valid_1's binary_logloss: 0.141845
[18]	valid_0's auc: 0.874019	valid_0's binary_logloss: 0.125934	valid_1's auc: 0.833684	valid_1's binary_logloss: 0.141515
[19]	valid_0's auc: 0.875288	valid_0's binary_logloss: 0.125352	valid_1's auc: 0.832788	valid_1's binary_logloss: 0.141499
[20]	valid_0's auc: 0.876379	valid_0's binary_logloss: 0.124793	valid_1's auc: 0.832498	valid_1's binary_logloss: 0.141369
[21]	valid_0's auc: 0.877634	valid_0's binary_logloss: 0.124225	valid_1's auc: 0.832355	valid_1's binary_logloss: 0.141226
[22]	valid_0's auc: 0.878588	valid_0's binary_logloss: 0.123762	valid_1's auc: 0.832703	valid_1's binary_logloss: 0.141078
[23]	valid_0's auc: 0.879693	valid_0's binary_logloss: 0.123255	valid_1's auc: 0.832163	valid_1's binary_logloss: 0.141039
[24]	valid_0's auc: 0.880719	valid_0's binary_logloss: 0.122807	valid_1's auc: 0.832324	valid_1's binary_logloss: 0.140925
[25]	valid_0's auc: 0.881533	valid_0's binary_logloss: 0.122313	valid_1's auc: 0.832184	valid_1's binary_logloss: 0.140891
[26]	valid_0's auc: 0.882449	valid_0's binary_logloss: 0.121884	valid_1's auc: 0.832004	valid_1's binary_logloss: 0.140834
[27]	valid_0's auc: 0.883373	valid_0's binary_logloss: 0.121461	valid_1's auc: 0.83169	valid_1's binary_logloss: 0.140849
[28]	valid_0's auc: 0.884175	valid_0's binary_logloss: 0.121053	valid_1's auc: 0.831077	valid_1's binary_logloss: 0.140897
[29]	valid_0's auc: 0.885058	valid_0's binary_logloss: 0.120689	valid_1's auc: 0.831657	valid_1's binary_logloss: 0.140775
[30]	valid_0's auc: 0.885865	valid_0's binary_logloss: 0.120366	valid_1's auc: 0.83154	valid_1's binary_logloss: 0.140767
[31]	valid_0's auc: 0.887114	valid_0's binary_logloss: 0.11994	valid_1's auc: 0.831439	valid_1's binary_logloss: 0.140816
[32]	valid_0's auc: 0.887603	valid_0's binary_logloss: 0.119623	valid_1's auc: 0.831023	valid_1's binary_logloss: 0.140912
[33]	valid_0's auc: 0.888164	valid_0's binary_logloss: 0.11928	valid_1's auc: 0.830667	valid_1's binary_logloss: 0.140953
[34]	valid_0's auc: 0.888957	valid_0's binary_logloss: 0.118937	valid_1's auc: 0.830222	valid_1's binary_logloss: 0.141016
[35]	valid_0's auc: 0.889746	valid_0's binary_logloss: 0.118573	valid_1's auc: 0.830175	valid_1's binary_logloss: 0.141039
[36]	valid_0's auc: 0.890215	valid_0's binary_logloss: 0.118288	valid_1's auc: 0.830319	valid_1's binary_logloss: 0.140986
[37]	valid_0's auc: 0.890966	valid_0's binary_logloss: 0.117947	valid_1's auc: 0.830443	valid_1's binary_logloss: 0.140993
[38]	valid_0's auc: 0.891474	valid_0's binary_logloss: 0.117667	valid_1's auc: 0.830505	valid_1's binary_logloss: 0.141016
[39]	valid_0's auc: 0.892156	valid_0's binary_logloss: 0.117328	valid_1's auc: 0.830415	valid_1's binary_logloss: 0.140994
[40]	valid_0's auc: 0.892568	valid_0's binary_logloss: 0.117061	valid_1's auc: 0.830687	valid_1's binary_logloss: 0.140918
[41]	valid_0's auc: 0.893181	valid_0's binary_logloss: 0.11681	valid_1's auc: 0.830622	valid_1's binary_logloss: 0.140963
[42]	valid_0's auc: 0.893984	valid_0's binary_logloss: 0.116476	valid_1's auc: 0.830827	valid_1's binary_logloss: 0.140945
Early stopping, best iteration is:
[12]	valid_0's auc: 0.866817	valid_0's binary_logloss: 0.130311	valid_1's auc: 0.835592	valid_1's binary_logloss: 0.143404
[1]	valid_0's auc: 0.830452	valid_0's binary_logloss: 0.155919	valid_1's auc: 0.817465	valid_1's binary_logloss: 0.164895
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.843009	valid_0's binary_logloss: 0.150278	valid_1's auc: 0.828176	valid_1's binary_logloss: 0.159411
[3]	valid_0's auc: 0.851789	valid_0's binary_logloss: 0.146252	valid_1's auc: 0.832597	valid_1's binary_logloss: 0.155736
[4]	valid_0's auc: 0.854657	valid_0's binary_logloss: 0.143075	valid_1's auc: 0.83416	valid_1's binary_logloss: 0.152859
[5]	valid_0's auc: 0.85652	valid_0's binary_logloss: 0.140511	valid_1's auc: 0.836671	valid_1's binary_logloss: 0.150427
[6]	valid_0's auc: 0.85962	valid_0's binary_logloss: 0.138318	valid_1's auc: 0.836445	valid_1's binary_logloss: 0.148531
[7]	valid_0's auc: 0.861186	valid_0's binary_logloss: 0.136426	valid_1's auc: 0.837332	valid_1's binary_logloss: 0.146966
[8]	valid_0's auc: 0.863008	valid_0's binary_logloss: 0.134791	valid_1's auc: 0.837707	valid_1's binary_logloss: 0.145637
[9]	valid_0's auc: 0.864093	valid_0's binary_logloss: 0.133383	valid_1's auc: 0.838849	valid_1's binary_logloss: 0.144456
[10]	valid_0's auc: 0.865712	valid_0's binary_logloss: 0.132144	valid_1's auc: 0.839148	valid_1's binary_logloss: 0.143548
[11]	valid_0's auc: 0.866608	valid_0's binary_logloss: 0.131102	valid_1's auc: 0.83905	valid_1's binary_logloss: 0.142819
[12]	valid_0's auc: 0.868335	valid_0's binary_logloss: 0.130069	valid_1's auc: 0.838903	valid_1's binary_logloss: 0.142184
[13]	valid_0's auc: 0.869075	valid_0's binary_logloss: 0.12923	valid_1's auc: 0.838882	valid_1's binary_logloss: 0.141681
[14]	valid_0's auc: 0.870379	valid_0's binary_logloss: 0.128406	valid_1's auc: 0.838698	valid_1's binary_logloss: 0.141227
[15]	valid_0's auc: 0.871467	valid_0's binary_logloss: 0.127626	valid_1's auc: 0.838689	valid_1's binary_logloss: 0.140825
[16]	valid_0's auc: 0.872424	valid_0's binary_logloss: 0.126889	valid_1's auc: 0.838836	valid_1's binary_logloss: 0.140446
[17]	valid_0's auc: 0.873627	valid_0's binary_logloss: 0.126201	valid_1's auc: 0.839557	valid_1's binary_logloss: 0.14014
[18]	valid_0's auc: 0.87479	valid_0's binary_logloss: 0.125597	valid_1's auc: 0.839776	valid_1's binary_logloss: 0.139941
[19]	valid_0's auc: 0.875902	valid_0's binary_logloss: 0.124996	valid_1's auc: 0.839902	valid_1's binary_logloss: 0.139701
[20]	valid_0's auc: 0.877163	valid_0's binary_logloss: 0.124367	valid_1's auc: 0.839299	valid_1's binary_logloss: 0.139581
[21]	valid_0's auc: 0.878438	valid_0's binary_logloss: 0.123757	valid_1's auc: 0.839098	valid_1's binary_logloss: 0.139467
[22]	valid_0's auc: 0.879596	valid_0's binary_logloss: 0.123223	valid_1's auc: 0.838863	valid_1's binary_logloss: 0.139407
[23]	valid_0's auc: 0.880754	valid_0's binary_logloss: 0.122702	valid_1's auc: 0.839119	valid_1's binary_logloss: 0.139326
[24]	valid_0's auc: 0.881758	valid_0's binary_logloss: 0.122226	valid_1's auc: 0.839042	valid_1's binary_logloss: 0.139233
[25]	valid_0's auc: 0.883034	valid_0's binary_logloss: 0.121743	valid_1's auc: 0.838865	valid_1's binary_logloss: 0.139239
[26]	valid_0's auc: 0.884403	valid_0's binary_logloss: 0.121255	valid_1's auc: 0.838329	valid_1's binary_logloss: 0.139254
[27]	valid_0's auc: 0.885324	valid_0's binary_logloss: 0.120835	valid_1's auc: 0.837953	valid_1's binary_logloss: 0.139276
[28]	valid_0's auc: 0.886543	valid_0's binary_logloss: 0.120403	valid_1's auc: 0.838377	valid_1's binary_logloss: 0.139233
[29]	valid_0's auc: 0.887286	valid_0's binary_logloss: 0.120007	valid_1's auc: 0.838269	valid_1's binary_logloss: 0.139189
[30]	valid_0's auc: 0.888016	valid_0's binary_logloss: 0.119609	valid_1's auc: 0.838269	valid_1's binary_logloss: 0.139184
[31]	valid_0's auc: 0.888842	valid_0's binary_logloss: 0.11924	valid_1's auc: 0.838469	valid_1's binary_logloss: 0.139125
[32]	valid_0's auc: 0.889829	valid_0's binary_logloss: 0.118853	valid_1's auc: 0.838631	valid_1's binary_logloss: 0.139121
[33]	valid_0's auc: 0.890569	valid_0's binary_logloss: 0.118502	valid_1's auc: 0.838736	valid_1's binary_logloss: 0.139065
[34]	valid_0's auc: 0.891418	valid_0's binary_logloss: 0.118158	valid_1's auc: 0.838692	valid_1's binary_logloss: 0.139051
[35]	valid_0's auc: 0.892153	valid_0's binary_logloss: 0.117818	valid_1's auc: 0.838839	valid_1's binary_logloss: 0.139022
[36]	valid_0's auc: 0.892798	valid_0's binary_logloss: 0.117475	valid_1's auc: 0.838585	valid_1's binary_logloss: 0.139057
[37]	valid_0's auc: 0.893672	valid_0's binary_logloss: 0.117095	valid_1's auc: 0.838499	valid_1's binary_logloss: 0.139109
[38]	valid_0's auc: 0.894575	valid_0's binary_logloss: 0.116783	valid_1's auc: 0.83897	valid_1's binary_logloss: 0.139067
[39]	valid_0's auc: 0.895447	valid_0's binary_logloss: 0.116421	valid_1's auc: 0.838515	valid_1's binary_logloss: 0.139143
[40]	valid_0's auc: 0.896177	valid_0's binary_logloss: 0.116084	valid_1's auc: 0.838353	valid_1's binary_logloss: 0.13918
[41]	valid_0's auc: 0.896821	valid_0's binary_logloss: 0.115822	valid_1's auc: 0.837933	valid_1's binary_logloss: 0.139263
[42]	valid_0's auc: 0.897531	valid_0's binary_logloss: 0.115503	valid_1's auc: 0.838083	valid_1's binary_logloss: 0.139286
[43]	valid_0's auc: 0.898208	valid_0's binary_logloss: 0.115195	valid_1's auc: 0.838087	valid_1's binary_logloss: 0.139312
[44]	valid_0's auc: 0.899032	valid_0's binary_logloss: 0.114921	valid_1's auc: 0.837956	valid_1's binary_logloss: 0.139352
[45]	valid_0's auc: 0.899435	valid_0's binary_logloss: 0.114646	valid_1's auc: 0.837664	valid_1's binary_logloss: 0.139425
[46]	valid_0's auc: 0.899904	valid_0's binary_logloss: 0.114381	valid_1's auc: 0.837191	valid_1's binary_logloss: 0.139562
[47]	valid_0's auc: 0.900313	valid_0's binary_logloss: 0.114172	valid_1's auc: 0.837275	valid_1's binary_logloss: 0.139576
[48]	valid_0's auc: 0.900881	valid_0's binary_logloss: 0.113898	valid_1's auc: 0.837102	valid_1's binary_logloss: 0.139641
[49]	valid_0's auc: 0.901416	valid_0's binary_logloss: 0.11361	valid_1's auc: 0.837064	valid_1's binary_logloss: 0.139694
Early stopping, best iteration is:
[19]	valid_0's auc: 0.875902	valid_0's binary_logloss: 0.124996	valid_1's auc: 0.839902	valid_1's binary_logloss: 0.139701
[1]	valid_0's auc: 0.834758	valid_0's binary_logloss: 0.156067	valid_1's auc: 0.822971	valid_1's binary_logloss: 0.165101
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.842403	valid_0's binary_logloss: 0.150502	valid_1's auc: 0.828728	valid_1's binary_logloss: 0.159785
[3]	valid_0's auc: 0.847356	valid_0's binary_logloss: 0.146328	valid_1's auc: 0.830832	valid_1's binary_logloss: 0.156017
[4]	valid_0's auc: 0.84996	valid_0's binary_logloss: 0.143134	valid_1's auc: 0.83154	valid_1's binary_logloss: 0.15319
[5]	valid_0's auc: 0.853547	valid_0's binary_logloss: 0.140471	valid_1's auc: 0.833818	valid_1's binary_logloss: 0.150761
[6]	valid_0's auc: 0.855549	valid_0's binary_logloss: 0.138282	valid_1's auc: 0.834224	valid_1's binary_logloss: 0.148815
[7]	valid_0's auc: 0.857045	valid_0's binary_logloss: 0.136467	valid_1's auc: 0.835329	valid_1's binary_logloss: 0.147338
[8]	valid_0's auc: 0.858416	valid_0's binary_logloss: 0.134953	valid_1's auc: 0.836144	valid_1's binary_logloss: 0.146094
[9]	valid_0's auc: 0.860878	valid_0's binary_logloss: 0.1336	valid_1's auc: 0.835613	valid_1's binary_logloss: 0.145179
[10]	valid_0's auc: 0.862432	valid_0's binary_logloss: 0.132438	valid_1's auc: 0.836519	valid_1's binary_logloss: 0.144304
[11]	valid_0's auc: 0.864715	valid_0's binary_logloss: 0.131298	valid_1's auc: 0.836728	valid_1's binary_logloss: 0.143576
[12]	valid_0's auc: 0.86649	valid_0's binary_logloss: 0.13029	valid_1's auc: 0.837121	valid_1's binary_logloss: 0.142892
[13]	valid_0's auc: 0.869207	valid_0's binary_logloss: 0.129293	valid_1's auc: 0.837902	valid_1's binary_logloss: 0.142287
[14]	valid_0's auc: 0.870823	valid_0's binary_logloss: 0.128463	valid_1's auc: 0.838274	valid_1's binary_logloss: 0.141902
[15]	valid_0's auc: 0.871881	valid_0's binary_logloss: 0.127657	valid_1's auc: 0.83767	valid_1's binary_logloss: 0.141625
[16]	valid_0's auc: 0.873451	valid_0's binary_logloss: 0.126932	valid_1's auc: 0.837454	valid_1's binary_logloss: 0.141273
[17]	valid_0's auc: 0.874754	valid_0's binary_logloss: 0.126215	valid_1's auc: 0.837218	valid_1's binary_logloss: 0.14099
[18]	valid_0's auc: 0.876015	valid_0's binary_logloss: 0.125583	valid_1's auc: 0.837242	valid_1's binary_logloss: 0.140723
[19]	valid_0's auc: 0.876898	valid_0's binary_logloss: 0.125006	valid_1's auc: 0.836677	valid_1's binary_logloss: 0.140576
[20]	valid_0's auc: 0.878198	valid_0's binary_logloss: 0.124464	valid_1's auc: 0.836677	valid_1's binary_logloss: 0.140492
[21]	valid_0's auc: 0.87919	valid_0's binary_logloss: 0.123881	valid_1's auc: 0.837511	valid_1's binary_logloss: 0.140273
[22]	valid_0's auc: 0.880273	valid_0's binary_logloss: 0.123358	valid_1's auc: 0.837479	valid_1's binary_logloss: 0.14014
[23]	valid_0's auc: 0.881151	valid_0's binary_logloss: 0.122898	valid_1's auc: 0.837864	valid_1's binary_logloss: 0.139991
[24]	valid_0's auc: 0.882264	valid_0's binary_logloss: 0.122383	valid_1's auc: 0.8378	valid_1's binary_logloss: 0.139955
[25]	valid_0's auc: 0.883252	valid_0's binary_logloss: 0.121909	valid_1's auc: 0.838233	valid_1's binary_logloss: 0.139841
[26]	valid_0's auc: 0.884254	valid_0's binary_logloss: 0.121477	valid_1's auc: 0.837795	valid_1's binary_logloss: 0.139804
[27]	valid_0's auc: 0.885619	valid_0's binary_logloss: 0.120986	valid_1's auc: 0.838147	valid_1's binary_logloss: 0.139714
[28]	valid_0's auc: 0.886542	valid_0's binary_logloss: 0.120573	valid_1's auc: 0.837608	valid_1's binary_logloss: 0.139727
[29]	valid_0's auc: 0.887407	valid_0's binary_logloss: 0.120146	valid_1's auc: 0.837298	valid_1's binary_logloss: 0.139725
[30]	valid_0's auc: 0.88824	valid_0's binary_logloss: 0.119775	valid_1's auc: 0.837294	valid_1's binary_logloss: 0.139714
[31]	valid_0's auc: 0.889124	valid_0's binary_logloss: 0.119428	valid_1's auc: 0.837511	valid_1's binary_logloss: 0.139644
[32]	valid_0's auc: 0.889919	valid_0's binary_logloss: 0.119052	valid_1's auc: 0.838258	valid_1's binary_logloss: 0.139519
[33]	valid_0's auc: 0.890537	valid_0's binary_logloss: 0.118727	valid_1's auc: 0.837988	valid_1's binary_logloss: 0.139526
[34]	valid_0's auc: 0.891097	valid_0's binary_logloss: 0.118375	valid_1's auc: 0.837857	valid_1's binary_logloss: 0.1396
[35]	valid_0's auc: 0.891811	valid_0's binary_logloss: 0.118027	valid_1's auc: 0.83771	valid_1's binary_logloss: 0.139627
[36]	valid_0's auc: 0.892678	valid_0's binary_logloss: 0.117704	valid_1's auc: 0.837436	valid_1's binary_logloss: 0.139695
[37]	valid_0's auc: 0.893595	valid_0's binary_logloss: 0.117332	valid_1's auc: 0.837433	valid_1's binary_logloss: 0.13969
[38]	valid_0's auc: 0.894079	valid_0's binary_logloss: 0.117021	valid_1's auc: 0.837272	valid_1's binary_logloss: 0.139691
[39]	valid_0's auc: 0.894556	valid_0's binary_logloss: 0.11674	valid_1's auc: 0.837253	valid_1's binary_logloss: 0.139685
[40]	valid_0's auc: 0.89516	valid_0's binary_logloss: 0.116428	valid_1's auc: 0.837294	valid_1's binary_logloss: 0.139678
[41]	valid_0's auc: 0.895863	valid_0's binary_logloss: 0.116155	valid_1's auc: 0.837866	valid_1's binary_logloss: 0.139562
[42]	valid_0's auc: 0.896354	valid_0's binary_logloss: 0.115871	valid_1's auc: 0.837735	valid_1's binary_logloss: 0.139612
[43]	valid_0's auc: 0.896691	valid_0's binary_logloss: 0.115612	valid_1's auc: 0.837481	valid_1's binary_logloss: 0.139702
[44]	valid_0's auc: 0.897343	valid_0's binary_logloss: 0.115316	valid_1's auc: 0.837651	valid_1's binary_logloss: 0.139672
Early stopping, best iteration is:
[14]	valid_0's auc: 0.870823	valid_0's binary_logloss: 0.128463	valid_1's auc: 0.838274	valid_1's binary_logloss: 0.141902
[1]	valid_0's auc: 0.835412	valid_0's binary_logloss: 0.155721	valid_1's auc: 0.81973	valid_1's binary_logloss: 0.164849
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.841234	valid_0's binary_logloss: 0.150325	valid_1's auc: 0.823604	valid_1's binary_logloss: 0.159865
[3]	valid_0's auc: 0.846416	valid_0's binary_logloss: 0.146303	valid_1's auc: 0.824414	valid_1's binary_logloss: 0.156273
[4]	valid_0's auc: 0.85113	valid_0's binary_logloss: 0.143142	valid_1's auc: 0.83033	valid_1's binary_logloss: 0.153388
[5]	valid_0's auc: 0.854001	valid_0's binary_logloss: 0.140572	valid_1's auc: 0.831279	valid_1's binary_logloss: 0.151139
[6]	valid_0's auc: 0.856684	valid_0's binary_logloss: 0.138389	valid_1's auc: 0.83309	valid_1's binary_logloss: 0.149282
[7]	valid_0's auc: 0.858456	valid_0's binary_logloss: 0.136552	valid_1's auc: 0.833723	valid_1's binary_logloss: 0.147799
[8]	valid_0's auc: 0.8595	valid_0's binary_logloss: 0.135038	valid_1's auc: 0.834417	valid_1's binary_logloss: 0.146527
[9]	valid_0's auc: 0.861391	valid_0's binary_logloss: 0.133585	valid_1's auc: 0.834625	valid_1's binary_logloss: 0.145543
[10]	valid_0's auc: 0.863676	valid_0's binary_logloss: 0.132396	valid_1's auc: 0.834954	valid_1's binary_logloss: 0.144707
[11]	valid_0's auc: 0.865339	valid_0's binary_logloss: 0.13134	valid_1's auc: 0.835252	valid_1's binary_logloss: 0.144034
[12]	valid_0's auc: 0.866817	valid_0's binary_logloss: 0.130311	valid_1's auc: 0.835592	valid_1's binary_logloss: 0.143404
[13]	valid_0's auc: 0.868751	valid_0's binary_logloss: 0.129388	valid_1's auc: 0.835225	valid_1's binary_logloss: 0.142853
[14]	valid_0's auc: 0.870283	valid_0's binary_logloss: 0.128551	valid_1's auc: 0.835191	valid_1's binary_logloss: 0.142412
[15]	valid_0's auc: 0.871366	valid_0's binary_logloss: 0.127828	valid_1's auc: 0.834681	valid_1's binary_logloss: 0.142173
[16]	valid_0's auc: 0.87234	valid_0's binary_logloss: 0.127146	valid_1's auc: 0.834555	valid_1's binary_logloss: 0.141876
[17]	valid_0's auc: 0.872963	valid_0's binary_logloss: 0.126553	valid_1's auc: 0.833341	valid_1's binary_logloss: 0.141845
[18]	valid_0's auc: 0.874019	valid_0's binary_logloss: 0.125934	valid_1's auc: 0.833684	valid_1's binary_logloss: 0.141515
[19]	valid_0's auc: 0.875288	valid_0's binary_logloss: 0.125352	valid_1's auc: 0.832788	valid_1's binary_logloss: 0.141499
[20]	valid_0's auc: 0.876379	valid_0's binary_logloss: 0.124793	valid_1's auc: 0.832498	valid_1's binary_logloss: 0.141369
[21]	valid_0's auc: 0.877634	valid_0's binary_logloss: 0.124225	valid_1's auc: 0.832355	valid_1's binary_logloss: 0.141226
[22]	valid_0's auc: 0.878588	valid_0's binary_logloss: 0.123762	valid_1's auc: 0.832703	valid_1's binary_logloss: 0.141078
[23]	valid_0's auc: 0.879693	valid_0's binary_logloss: 0.123255	valid_1's auc: 0.832163	valid_1's binary_logloss: 0.141039
[24]	valid_0's auc: 0.880719	valid_0's binary_logloss: 0.122807	valid_1's auc: 0.832324	valid_1's binary_logloss: 0.140925
[25]	valid_0's auc: 0.881533	valid_0's binary_logloss: 0.122313	valid_1's auc: 0.832184	valid_1's binary_logloss: 0.140891
[26]	valid_0's auc: 0.882449	valid_0's binary_logloss: 0.121884	valid_1's auc: 0.832004	valid_1's binary_logloss: 0.140834
[27]	valid_0's auc: 0.883373	valid_0's binary_logloss: 0.121461	valid_1's auc: 0.83169	valid_1's binary_logloss: 0.140849
[28]	valid_0's auc: 0.884175	valid_0's binary_logloss: 0.121053	valid_1's auc: 0.831077	valid_1's binary_logloss: 0.140897
[29]	valid_0's auc: 0.885058	valid_0's binary_logloss: 0.120689	valid_1's auc: 0.831657	valid_1's binary_logloss: 0.140775
[30]	valid_0's auc: 0.885865	valid_0's binary_logloss: 0.120366	valid_1's auc: 0.83154	valid_1's binary_logloss: 0.140767
[31]	valid_0's auc: 0.887114	valid_0's binary_logloss: 0.11994	valid_1's auc: 0.831439	valid_1's binary_logloss: 0.140816
[32]	valid_0's auc: 0.887603	valid_0's binary_logloss: 0.119623	valid_1's auc: 0.831023	valid_1's binary_logloss: 0.140912
[33]	valid_0's auc: 0.888164	valid_0's binary_logloss: 0.11928	valid_1's auc: 0.830667	valid_1's binary_logloss: 0.140953
[34]	valid_0's auc: 0.888957	valid_0's binary_logloss: 0.118937	valid_1's auc: 0.830222	valid_1's binary_logloss: 0.141016
[35]	valid_0's auc: 0.889746	valid_0's binary_logloss: 0.118573	valid_1's auc: 0.830175	valid_1's binary_logloss: 0.141039
[36]	valid_0's auc: 0.890215	valid_0's binary_logloss: 0.118288	valid_1's auc: 0.830319	valid_1's binary_logloss: 0.140986
[37]	valid_0's auc: 0.890966	valid_0's binary_logloss: 0.117947	valid_1's auc: 0.830443	valid_1's binary_logloss: 0.140993
[38]	valid_0's auc: 0.891474	valid_0's binary_logloss: 0.117667	valid_1's auc: 0.830505	valid_1's binary_logloss: 0.141016
[39]	valid_0's auc: 0.892156	valid_0's binary_logloss: 0.117328	valid_1's auc: 0.830415	valid_1's binary_logloss: 0.140994
[40]	valid_0's auc: 0.892568	valid_0's binary_logloss: 0.117061	valid_1's auc: 0.830687	valid_1's binary_logloss: 0.140918
[41]	valid_0's auc: 0.893181	valid_0's binary_logloss: 0.11681	valid_1's auc: 0.830622	valid_1's binary_logloss: 0.140963
[42]	valid_0's auc: 0.893984	valid_0's binary_logloss: 0.116476	valid_1's auc: 0.830827	valid_1's binary_logloss: 0.140945
Early stopping, best iteration is:
[12]	valid_0's auc: 0.866817	valid_0's binary_logloss: 0.130311	valid_1's auc: 0.835592	valid_1's binary_logloss: 0.143404
[1]	valid_0's auc: 0.830452	valid_0's binary_logloss: 0.155919	valid_1's auc: 0.817465	valid_1's binary_logloss: 0.164895
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.843009	valid_0's binary_logloss: 0.150278	valid_1's auc: 0.828176	valid_1's binary_logloss: 0.159411
[3]	valid_0's auc: 0.851789	valid_0's binary_logloss: 0.146252	valid_1's auc: 0.832597	valid_1's binary_logloss: 0.155736
[4]	valid_0's auc: 0.854657	valid_0's binary_logloss: 0.143075	valid_1's auc: 0.83416	valid_1's binary_logloss: 0.152859
[5]	valid_0's auc: 0.85652	valid_0's binary_logloss: 0.140511	valid_1's auc: 0.836671	valid_1's binary_logloss: 0.150427
[6]	valid_0's auc: 0.85962	valid_0's binary_logloss: 0.138318	valid_1's auc: 0.836445	valid_1's binary_logloss: 0.148531
[7]	valid_0's auc: 0.861186	valid_0's binary_logloss: 0.136426	valid_1's auc: 0.837332	valid_1's binary_logloss: 0.146966
[8]	valid_0's auc: 0.863008	valid_0's binary_logloss: 0.134791	valid_1's auc: 0.837707	valid_1's binary_logloss: 0.145637
[9]	valid_0's auc: 0.864093	valid_0's binary_logloss: 0.133383	valid_1's auc: 0.838849	valid_1's binary_logloss: 0.144456
[10]	valid_0's auc: 0.865712	valid_0's binary_logloss: 0.132144	valid_1's auc: 0.839148	valid_1's binary_logloss: 0.143548
[11]	valid_0's auc: 0.866608	valid_0's binary_logloss: 0.131102	valid_1's auc: 0.83905	valid_1's binary_logloss: 0.142819
[12]	valid_0's auc: 0.868335	valid_0's binary_logloss: 0.130069	valid_1's auc: 0.838903	valid_1's binary_logloss: 0.142184
[13]	valid_0's auc: 0.869075	valid_0's binary_logloss: 0.12923	valid_1's auc: 0.838882	valid_1's binary_logloss: 0.141681
[14]	valid_0's auc: 0.870379	valid_0's binary_logloss: 0.128406	valid_1's auc: 0.838698	valid_1's binary_logloss: 0.141227
[15]	valid_0's auc: 0.871467	valid_0's binary_logloss: 0.127626	valid_1's auc: 0.838689	valid_1's binary_logloss: 0.140825
[16]	valid_0's auc: 0.872424	valid_0's binary_logloss: 0.126889	valid_1's auc: 0.838836	valid_1's binary_logloss: 0.140446
[17]	valid_0's auc: 0.873627	valid_0's binary_logloss: 0.126201	valid_1's auc: 0.839557	valid_1's binary_logloss: 0.14014
[18]	valid_0's auc: 0.87479	valid_0's binary_logloss: 0.125597	valid_1's auc: 0.839776	valid_1's binary_logloss: 0.139941
[19]	valid_0's auc: 0.875902	valid_0's binary_logloss: 0.124996	valid_1's auc: 0.839902	valid_1's binary_logloss: 0.139701
[20]	valid_0's auc: 0.877163	valid_0's binary_logloss: 0.124367	valid_1's auc: 0.839299	valid_1's binary_logloss: 0.139581
[21]	valid_0's auc: 0.878438	valid_0's binary_logloss: 0.123757	valid_1's auc: 0.839098	valid_1's binary_logloss: 0.139467
[22]	valid_0's auc: 0.879596	valid_0's binary_logloss: 0.123223	valid_1's auc: 0.838863	valid_1's binary_logloss: 0.139407
[23]	valid_0's auc: 0.880754	valid_0's binary_logloss: 0.122702	valid_1's auc: 0.839119	valid_1's binary_logloss: 0.139326
[24]	valid_0's auc: 0.881758	valid_0's binary_logloss: 0.122226	valid_1's auc: 0.839042	valid_1's binary_logloss: 0.139233
[25]	valid_0's auc: 0.883034	valid_0's binary_logloss: 0.121743	valid_1's auc: 0.838865	valid_1's binary_logloss: 0.139239
[26]	valid_0's auc: 0.884403	valid_0's binary_logloss: 0.121255	valid_1's auc: 0.838329	valid_1's binary_logloss: 0.139254
[27]	valid_0's auc: 0.885324	valid_0's binary_logloss: 0.120835	valid_1's auc: 0.837953	valid_1's binary_logloss: 0.139276
[28]	valid_0's auc: 0.886543	valid_0's binary_logloss: 0.120403	valid_1's auc: 0.838377	valid_1's binary_logloss: 0.139233
[29]	valid_0's auc: 0.887286	valid_0's binary_logloss: 0.120007	valid_1's auc: 0.838269	valid_1's binary_logloss: 0.139189
[30]	valid_0's auc: 0.888016	valid_0's binary_logloss: 0.119609	valid_1's auc: 0.838269	valid_1's binary_logloss: 0.139184
[31]	valid_0's auc: 0.888842	valid_0's binary_logloss: 0.11924	valid_1's auc: 0.838469	valid_1's binary_logloss: 0.139125
[32]	valid_0's auc: 0.889829	valid_0's binary_logloss: 0.118853	valid_1's auc: 0.838631	valid_1's binary_logloss: 0.139121
[33]	valid_0's auc: 0.890569	valid_0's binary_logloss: 0.118502	valid_1's auc: 0.838736	valid_1's binary_logloss: 0.139065
[34]	valid_0's auc: 0.891418	valid_0's binary_logloss: 0.118158	valid_1's auc: 0.838692	valid_1's binary_logloss: 0.139051
[35]	valid_0's auc: 0.892153	valid_0's binary_logloss: 0.117818	valid_1's auc: 0.838839	valid_1's binary_logloss: 0.139022
[36]	valid_0's auc: 0.892798	valid_0's binary_logloss: 0.117475	valid_1's auc: 0.838585	valid_1's binary_logloss: 0.139057
[37]	valid_0's auc: 0.893672	valid_0's binary_logloss: 0.117095	valid_1's auc: 0.838499	valid_1's binary_logloss: 0.139109
[38]	valid_0's auc: 0.894575	valid_0's binary_logloss: 0.116783	valid_1's auc: 0.83897	valid_1's binary_logloss: 0.139067
[39]	valid_0's auc: 0.895447	valid_0's binary_logloss: 0.116421	valid_1's auc: 0.838515	valid_1's binary_logloss: 0.139143
[40]	valid_0's auc: 0.896177	valid_0's binary_logloss: 0.116084	valid_1's auc: 0.838353	valid_1's binary_logloss: 0.13918
[41]	valid_0's auc: 0.896821	valid_0's binary_logloss: 0.115822	valid_1's auc: 0.837933	valid_1's binary_logloss: 0.139263
[42]	valid_0's auc: 0.897531	valid_0's binary_logloss: 0.115503	valid_1's auc: 0.838083	valid_1's binary_logloss: 0.139286
[43]	valid_0's auc: 0.898208	valid_0's binary_logloss: 0.115195	valid_1's auc: 0.838087	valid_1's binary_logloss: 0.139312
[44]	valid_0's auc: 0.899032	valid_0's binary_logloss: 0.114921	valid_1's auc: 0.837956	valid_1's binary_logloss: 0.139352
[45]	valid_0's auc: 0.899435	valid_0's binary_logloss: 0.114646	valid_1's auc: 0.837664	valid_1's binary_logloss: 0.139425
[46]	valid_0's auc: 0.899904	valid_0's binary_logloss: 0.114381	valid_1's auc: 0.837191	valid_1's binary_logloss: 0.139562
[47]	valid_0's auc: 0.900313	valid_0's binary_logloss: 0.114172	valid_1's auc: 0.837275	valid_1's binary_logloss: 0.139576
[48]	valid_0's auc: 0.900881	valid_0's binary_logloss: 0.113898	valid_1's auc: 0.837102	valid_1's binary_logloss: 0.139641
[49]	valid_0's auc: 0.901416	valid_0's binary_logloss: 0.11361	valid_1's auc: 0.837064	valid_1's binary_logloss: 0.139694
Early stopping, best iteration is:
[19]	valid_0's auc: 0.875902	valid_0's binary_logloss: 0.124996	valid_1's auc: 0.839902	valid_1's binary_logloss: 0.139701
[1]	valid_0's auc: 0.834758	valid_0's binary_logloss: 0.156067	valid_1's auc: 0.822971	valid_1's binary_logloss: 0.165101
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.842403	valid_0's binary_logloss: 0.150502	valid_1's auc: 0.828728	valid_1's binary_logloss: 0.159785
[3]	valid_0's auc: 0.847356	valid_0's binary_logloss: 0.146328	valid_1's auc: 0.830832	valid_1's binary_logloss: 0.156017
[4]	valid_0's auc: 0.84996	valid_0's binary_logloss: 0.143134	valid_1's auc: 0.83154	valid_1's binary_logloss: 0.15319
[5]	valid_0's auc: 0.853547	valid_0's binary_logloss: 0.140471	valid_1's auc: 0.833818	valid_1's binary_logloss: 0.150761
[6]	valid_0's auc: 0.855549	valid_0's binary_logloss: 0.138282	valid_1's auc: 0.834224	valid_1's binary_logloss: 0.148815
[7]	valid_0's auc: 0.857045	valid_0's binary_logloss: 0.136467	valid_1's auc: 0.835329	valid_1's binary_logloss: 0.147338
[8]	valid_0's auc: 0.858416	valid_0's binary_logloss: 0.134953	valid_1's auc: 0.836144	valid_1's binary_logloss: 0.146094
[9]	valid_0's auc: 0.860878	valid_0's binary_logloss: 0.1336	valid_1's auc: 0.835613	valid_1's binary_logloss: 0.145179
[10]	valid_0's auc: 0.862432	valid_0's binary_logloss: 0.132438	valid_1's auc: 0.836519	valid_1's binary_logloss: 0.144304
[11]	valid_0's auc: 0.864715	valid_0's binary_logloss: 0.131298	valid_1's auc: 0.836728	valid_1's binary_logloss: 0.143576
[12]	valid_0's auc: 0.86649	valid_0's binary_logloss: 0.13029	valid_1's auc: 0.837121	valid_1's binary_logloss: 0.142892
[13]	valid_0's auc: 0.869207	valid_0's binary_logloss: 0.129293	valid_1's auc: 0.837902	valid_1's binary_logloss: 0.142287
[14]	valid_0's auc: 0.870823	valid_0's binary_logloss: 0.128463	valid_1's auc: 0.838274	valid_1's binary_logloss: 0.141902
[15]	valid_0's auc: 0.871881	valid_0's binary_logloss: 0.127657	valid_1's auc: 0.83767	valid_1's binary_logloss: 0.141625
[16]	valid_0's auc: 0.873451	valid_0's binary_logloss: 0.126932	valid_1's auc: 0.837454	valid_1's binary_logloss: 0.141273
[17]	valid_0's auc: 0.874754	valid_0's binary_logloss: 0.126215	valid_1's auc: 0.837218	valid_1's binary_logloss: 0.14099
[18]	valid_0's auc: 0.876015	valid_0's binary_logloss: 0.125583	valid_1's auc: 0.837242	valid_1's binary_logloss: 0.140723
[19]	valid_0's auc: 0.876898	valid_0's binary_logloss: 0.125006	valid_1's auc: 0.836677	valid_1's binary_logloss: 0.140576
[20]	valid_0's auc: 0.878198	valid_0's binary_logloss: 0.124464	valid_1's auc: 0.836677	valid_1's binary_logloss: 0.140492
[21]	valid_0's auc: 0.87919	valid_0's binary_logloss: 0.123881	valid_1's auc: 0.837511	valid_1's binary_logloss: 0.140273
[22]	valid_0's auc: 0.880273	valid_0's binary_logloss: 0.123358	valid_1's auc: 0.837479	valid_1's binary_logloss: 0.14014
[23]	valid_0's auc: 0.881151	valid_0's binary_logloss: 0.122898	valid_1's auc: 0.837864	valid_1's binary_logloss: 0.139991
[24]	valid_0's auc: 0.882264	valid_0's binary_logloss: 0.122383	valid_1's auc: 0.8378	valid_1's binary_logloss: 0.139955
[25]	valid_0's auc: 0.883252	valid_0's binary_logloss: 0.121909	valid_1's auc: 0.838233	valid_1's binary_logloss: 0.139841
[26]	valid_0's auc: 0.884254	valid_0's binary_logloss: 0.121477	valid_1's auc: 0.837795	valid_1's binary_logloss: 0.139804
[27]	valid_0's auc: 0.885619	valid_0's binary_logloss: 0.120986	valid_1's auc: 0.838147	valid_1's binary_logloss: 0.139714
[28]	valid_0's auc: 0.886542	valid_0's binary_logloss: 0.120573	valid_1's auc: 0.837608	valid_1's binary_logloss: 0.139727
[29]	valid_0's auc: 0.887407	valid_0's binary_logloss: 0.120146	valid_1's auc: 0.837298	valid_1's binary_logloss: 0.139725
[30]	valid_0's auc: 0.88824	valid_0's binary_logloss: 0.119775	valid_1's auc: 0.837294	valid_1's binary_logloss: 0.139714
[31]	valid_0's auc: 0.889124	valid_0's binary_logloss: 0.119428	valid_1's auc: 0.837511	valid_1's binary_logloss: 0.139644
[32]	valid_0's auc: 0.889919	valid_0's binary_logloss: 0.119052	valid_1's auc: 0.838258	valid_1's binary_logloss: 0.139519
[33]	valid_0's auc: 0.890537	valid_0's binary_logloss: 0.118727	valid_1's auc: 0.837988	valid_1's binary_logloss: 0.139526
[34]	valid_0's auc: 0.891097	valid_0's binary_logloss: 0.118375	valid_1's auc: 0.837857	valid_1's binary_logloss: 0.1396
[35]	valid_0's auc: 0.891811	valid_0's binary_logloss: 0.118027	valid_1's auc: 0.83771	valid_1's binary_logloss: 0.139627
[36]	valid_0's auc: 0.892678	valid_0's binary_logloss: 0.117704	valid_1's auc: 0.837436	valid_1's binary_logloss: 0.139695
[37]	valid_0's auc: 0.893595	valid_0's binary_logloss: 0.117332	valid_1's auc: 0.837433	valid_1's binary_logloss: 0.13969
[38]	valid_0's auc: 0.894079	valid_0's binary_logloss: 0.117021	valid_1's auc: 0.837272	valid_1's binary_logloss: 0.139691
[39]	valid_0's auc: 0.894556	valid_0's binary_logloss: 0.11674	valid_1's auc: 0.837253	valid_1's binary_logloss: 0.139685
[40]	valid_0's auc: 0.89516	valid_0's binary_logloss: 0.116428	valid_1's auc: 0.837294	valid_1's binary_logloss: 0.139678
[41]	valid_0's auc: 0.895863	valid_0's binary_logloss: 0.116155	valid_1's auc: 0.837866	valid_1's binary_logloss: 0.139562
[42]	valid_0's auc: 0.896354	valid_0's binary_logloss: 0.115871	valid_1's auc: 0.837735	valid_1's binary_logloss: 0.139612
[43]	valid_0's auc: 0.896691	valid_0's binary_logloss: 0.115612	valid_1's auc: 0.837481	valid_1's binary_logloss: 0.139702
[44]	valid_0's auc: 0.897343	valid_0's binary_logloss: 0.115316	valid_1's auc: 0.837651	valid_1's binary_logloss: 0.139672
Early stopping, best iteration is:
[14]	valid_0's auc: 0.870823	valid_0's binary_logloss: 0.128463	valid_1's auc: 0.838274	valid_1's binary_logloss: 0.141902
[1]	valid_0's auc: 0.820235	valid_0's binary_logloss: 0.156085	valid_1's auc: 0.81613	valid_1's binary_logloss: 0.164998
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.825775	valid_0's binary_logloss: 0.150951	valid_1's auc: 0.821831	valid_1's binary_logloss: 0.15988
[3]	valid_0's auc: 0.832192	valid_0's binary_logloss: 0.147167	valid_1's auc: 0.827302	valid_1's binary_logloss: 0.156397
[4]	valid_0's auc: 0.837518	valid_0's binary_logloss: 0.144131	valid_1's auc: 0.8334	valid_1's binary_logloss: 0.153325
[5]	valid_0's auc: 0.842289	valid_0's binary_logloss: 0.141651	valid_1's auc: 0.836018	valid_1's binary_logloss: 0.150959
[6]	valid_0's auc: 0.844974	valid_0's binary_logloss: 0.139661	valid_1's auc: 0.838022	valid_1's binary_logloss: 0.149046
[7]	valid_0's auc: 0.846623	valid_0's binary_logloss: 0.138001	valid_1's auc: 0.837777	valid_1's binary_logloss: 0.147509
[8]	valid_0's auc: 0.848529	valid_0's binary_logloss: 0.136578	valid_1's auc: 0.839519	valid_1's binary_logloss: 0.146015
[9]	valid_0's auc: 0.850276	valid_0's binary_logloss: 0.135274	valid_1's auc: 0.840042	valid_1's binary_logloss: 0.145043
[10]	valid_0's auc: 0.852371	valid_0's binary_logloss: 0.134185	valid_1's auc: 0.839808	valid_1's binary_logloss: 0.144182
[11]	valid_0's auc: 0.853705	valid_0's binary_logloss: 0.133238	valid_1's auc: 0.83943	valid_1's binary_logloss: 0.14345
[12]	valid_0's auc: 0.855304	valid_0's binary_logloss: 0.132409	valid_1's auc: 0.838786	valid_1's binary_logloss: 0.142878
[13]	valid_0's auc: 0.856638	valid_0's binary_logloss: 0.131658	valid_1's auc: 0.838632	valid_1's binary_logloss: 0.142368
[14]	valid_0's auc: 0.85784	valid_0's binary_logloss: 0.130967	valid_1's auc: 0.838182	valid_1's binary_logloss: 0.14198
[15]	valid_0's auc: 0.859432	valid_0's binary_logloss: 0.130373	valid_1's auc: 0.838236	valid_1's binary_logloss: 0.141582
[16]	valid_0's auc: 0.860428	valid_0's binary_logloss: 0.129814	valid_1's auc: 0.837425	valid_1's binary_logloss: 0.141389
[17]	valid_0's auc: 0.861409	valid_0's binary_logloss: 0.12923	valid_1's auc: 0.837358	valid_1's binary_logloss: 0.141106
[18]	valid_0's auc: 0.86332	valid_0's binary_logloss: 0.128681	valid_1's auc: 0.836771	valid_1's binary_logloss: 0.140932
[19]	valid_0's auc: 0.864365	valid_0's binary_logloss: 0.128233	valid_1's auc: 0.836564	valid_1's binary_logloss: 0.140796
[20]	valid_0's auc: 0.865268	valid_0's binary_logloss: 0.127815	valid_1's auc: 0.835613	valid_1's binary_logloss: 0.140715
[21]	valid_0's auc: 0.865869	valid_0's binary_logloss: 0.127427	valid_1's auc: 0.835085	valid_1's binary_logloss: 0.140641
[22]	valid_0's auc: 0.867223	valid_0's binary_logloss: 0.126993	valid_1's auc: 0.835383	valid_1's binary_logloss: 0.140519
[23]	valid_0's auc: 0.867898	valid_0's binary_logloss: 0.126644	valid_1's auc: 0.835018	valid_1's binary_logloss: 0.140432
[24]	valid_0's auc: 0.869077	valid_0's binary_logloss: 0.126246	valid_1's auc: 0.835199	valid_1's binary_logloss: 0.140328
[25]	valid_0's auc: 0.869684	valid_0's binary_logloss: 0.125917	valid_1's auc: 0.83457	valid_1's binary_logloss: 0.140301
[26]	valid_0's auc: 0.870271	valid_0's binary_logloss: 0.12561	valid_1's auc: 0.834087	valid_1's binary_logloss: 0.140349
[27]	valid_0's auc: 0.87126	valid_0's binary_logloss: 0.125302	valid_1's auc: 0.833822	valid_1's binary_logloss: 0.140277
[28]	valid_0's auc: 0.872741	valid_0's binary_logloss: 0.124882	valid_1's auc: 0.833886	valid_1's binary_logloss: 0.140255
[29]	valid_0's auc: 0.873424	valid_0's binary_logloss: 0.124594	valid_1's auc: 0.833937	valid_1's binary_logloss: 0.140189
[30]	valid_0's auc: 0.874669	valid_0's binary_logloss: 0.124295	valid_1's auc: 0.834461	valid_1's binary_logloss: 0.140113
[31]	valid_0's auc: 0.875234	valid_0's binary_logloss: 0.124066	valid_1's auc: 0.83444	valid_1's binary_logloss: 0.140064
[32]	valid_0's auc: 0.875809	valid_0's binary_logloss: 0.123813	valid_1's auc: 0.834196	valid_1's binary_logloss: 0.140095
[33]	valid_0's auc: 0.876619	valid_0's binary_logloss: 0.123531	valid_1's auc: 0.834143	valid_1's binary_logloss: 0.140029
[34]	valid_0's auc: 0.877233	valid_0's binary_logloss: 0.123254	valid_1's auc: 0.833865	valid_1's binary_logloss: 0.140055
[35]	valid_0's auc: 0.877763	valid_0's binary_logloss: 0.123009	valid_1's auc: 0.833699	valid_1's binary_logloss: 0.140082
[36]	valid_0's auc: 0.878322	valid_0's binary_logloss: 0.122755	valid_1's auc: 0.833221	valid_1's binary_logloss: 0.140158
[37]	valid_0's auc: 0.878948	valid_0's binary_logloss: 0.122495	valid_1's auc: 0.832792	valid_1's binary_logloss: 0.14018
[38]	valid_0's auc: 0.879452	valid_0's binary_logloss: 0.122301	valid_1's auc: 0.832977	valid_1's binary_logloss: 0.140154
[39]	valid_0's auc: 0.880156	valid_0's binary_logloss: 0.122063	valid_1's auc: 0.832913	valid_1's binary_logloss: 0.140188
Early stopping, best iteration is:
[9]	valid_0's auc: 0.850276	valid_0's binary_logloss: 0.135274	valid_1's auc: 0.840042	valid_1's binary_logloss: 0.145043
[1]	valid_0's auc: 0.814371	valid_0's binary_logloss: 0.156455	valid_1's auc: 0.813175	valid_1's binary_logloss: 0.16542
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827201	valid_0's binary_logloss: 0.151083	valid_1's auc: 0.820013	valid_1's binary_logloss: 0.160085
[3]	valid_0's auc: 0.837003	valid_0's binary_logloss: 0.147235	valid_1's auc: 0.828713	valid_1's binary_logloss: 0.156463
[4]	valid_0's auc: 0.840971	valid_0's binary_logloss: 0.144235	valid_1's auc: 0.831369	valid_1's binary_logloss: 0.153575
[5]	valid_0's auc: 0.842902	valid_0's binary_logloss: 0.141803	valid_1's auc: 0.833329	valid_1's binary_logloss: 0.151283
[6]	valid_0's auc: 0.847067	valid_0's binary_logloss: 0.139776	valid_1's auc: 0.836625	valid_1's binary_logloss: 0.14937
[7]	valid_0's auc: 0.848894	valid_0's binary_logloss: 0.138056	valid_1's auc: 0.837174	valid_1's binary_logloss: 0.147778
[8]	valid_0's auc: 0.850546	valid_0's binary_logloss: 0.136579	valid_1's auc: 0.837405	valid_1's binary_logloss: 0.146516
[9]	valid_0's auc: 0.852419	valid_0's binary_logloss: 0.135296	valid_1's auc: 0.837736	valid_1's binary_logloss: 0.145412
[10]	valid_0's auc: 0.85454	valid_0's binary_logloss: 0.134224	valid_1's auc: 0.838661	valid_1's binary_logloss: 0.144501
[11]	valid_0's auc: 0.856414	valid_0's binary_logloss: 0.133277	valid_1's auc: 0.838921	valid_1's binary_logloss: 0.143737
[12]	valid_0's auc: 0.857283	valid_0's binary_logloss: 0.132431	valid_1's auc: 0.837263	valid_1's binary_logloss: 0.143197
[13]	valid_0's auc: 0.858075	valid_0's binary_logloss: 0.131688	valid_1's auc: 0.837254	valid_1's binary_logloss: 0.142633
[14]	valid_0's auc: 0.858945	valid_0's binary_logloss: 0.130983	valid_1's auc: 0.837669	valid_1's binary_logloss: 0.142158
[15]	valid_0's auc: 0.859875	valid_0's binary_logloss: 0.130335	valid_1's auc: 0.837434	valid_1's binary_logloss: 0.141848
[16]	valid_0's auc: 0.860979	valid_0's binary_logloss: 0.129731	valid_1's auc: 0.837355	valid_1's binary_logloss: 0.141492
[17]	valid_0's auc: 0.861681	valid_0's binary_logloss: 0.129123	valid_1's auc: 0.837851	valid_1's binary_logloss: 0.14114
[18]	valid_0's auc: 0.863324	valid_0's binary_logloss: 0.128568	valid_1's auc: 0.838024	valid_1's binary_logloss: 0.140911
[19]	valid_0's auc: 0.864682	valid_0's binary_logloss: 0.128016	valid_1's auc: 0.838411	valid_1's binary_logloss: 0.140669
[20]	valid_0's auc: 0.865346	valid_0's binary_logloss: 0.127585	valid_1's auc: 0.838201	valid_1's binary_logloss: 0.140498
[21]	valid_0's auc: 0.8665	valid_0's binary_logloss: 0.127103	valid_1's auc: 0.83851	valid_1's binary_logloss: 0.140294
[22]	valid_0's auc: 0.867889	valid_0's binary_logloss: 0.126657	valid_1's auc: 0.839074	valid_1's binary_logloss: 0.140136
[23]	valid_0's auc: 0.868624	valid_0's binary_logloss: 0.126281	valid_1's auc: 0.838987	valid_1's binary_logloss: 0.140028
[24]	valid_0's auc: 0.869855	valid_0's binary_logloss: 0.125882	valid_1's auc: 0.838802	valid_1's binary_logloss: 0.139975
[25]	valid_0's auc: 0.870426	valid_0's binary_logloss: 0.125541	valid_1's auc: 0.83912	valid_1's binary_logloss: 0.139815
[26]	valid_0's auc: 0.871649	valid_0's binary_logloss: 0.125171	valid_1's auc: 0.838779	valid_1's binary_logloss: 0.139741
[27]	valid_0's auc: 0.872513	valid_0's binary_logloss: 0.124859	valid_1's auc: 0.839241	valid_1's binary_logloss: 0.139648
[28]	valid_0's auc: 0.873252	valid_0's binary_logloss: 0.124586	valid_1's auc: 0.839378	valid_1's binary_logloss: 0.139578
[29]	valid_0's auc: 0.87422	valid_0's binary_logloss: 0.124271	valid_1's auc: 0.839436	valid_1's binary_logloss: 0.139581
[30]	valid_0's auc: 0.874647	valid_0's binary_logloss: 0.124019	valid_1's auc: 0.83907	valid_1's binary_logloss: 0.13958
[31]	valid_0's auc: 0.875603	valid_0's binary_logloss: 0.123747	valid_1's auc: 0.839226	valid_1's binary_logloss: 0.139539
[32]	valid_0's auc: 0.87651	valid_0's binary_logloss: 0.123413	valid_1's auc: 0.838983	valid_1's binary_logloss: 0.139561
[33]	valid_0's auc: 0.877103	valid_0's binary_logloss: 0.123199	valid_1's auc: 0.839235	valid_1's binary_logloss: 0.139528
[34]	valid_0's auc: 0.878189	valid_0's binary_logloss: 0.122925	valid_1's auc: 0.839004	valid_1's binary_logloss: 0.139567
[35]	valid_0's auc: 0.878765	valid_0's binary_logloss: 0.122704	valid_1's auc: 0.838946	valid_1's binary_logloss: 0.139572
[36]	valid_0's auc: 0.879377	valid_0's binary_logloss: 0.122448	valid_1's auc: 0.83898	valid_1's binary_logloss: 0.139554
[37]	valid_0's auc: 0.880134	valid_0's binary_logloss: 0.122143	valid_1's auc: 0.838888	valid_1's binary_logloss: 0.139557
[38]	valid_0's auc: 0.880571	valid_0's binary_logloss: 0.121985	valid_1's auc: 0.839133	valid_1's binary_logloss: 0.13952
[39]	valid_0's auc: 0.88098	valid_0's binary_logloss: 0.12178	valid_1's auc: 0.838929	valid_1's binary_logloss: 0.139584
[40]	valid_0's auc: 0.881495	valid_0's binary_logloss: 0.121571	valid_1's auc: 0.838869	valid_1's binary_logloss: 0.139555
[41]	valid_0's auc: 0.881897	valid_0's binary_logloss: 0.121382	valid_1's auc: 0.838702	valid_1's binary_logloss: 0.139575
[42]	valid_0's auc: 0.882625	valid_0's binary_logloss: 0.121107	valid_1's auc: 0.83891	valid_1's binary_logloss: 0.139554
[43]	valid_0's auc: 0.882956	valid_0's binary_logloss: 0.120926	valid_1's auc: 0.83914	valid_1's binary_logloss: 0.139546
[44]	valid_0's auc: 0.883618	valid_0's binary_logloss: 0.120726	valid_1's auc: 0.838905	valid_1's binary_logloss: 0.139592
[45]	valid_0's auc: 0.88419	valid_0's binary_logloss: 0.120513	valid_1's auc: 0.838587	valid_1's binary_logloss: 0.139651
[46]	valid_0's auc: 0.884555	valid_0's binary_logloss: 0.120339	valid_1's auc: 0.838288	valid_1's binary_logloss: 0.139703
[47]	valid_0's auc: 0.884789	valid_0's binary_logloss: 0.120189	valid_1's auc: 0.838155	valid_1's binary_logloss: 0.139692
[48]	valid_0's auc: 0.884968	valid_0's binary_logloss: 0.120074	valid_1's auc: 0.8384	valid_1's binary_logloss: 0.139667
[49]	valid_0's auc: 0.885336	valid_0's binary_logloss: 0.119939	valid_1's auc: 0.83827	valid_1's binary_logloss: 0.13968
[50]	valid_0's auc: 0.885759	valid_0's binary_logloss: 0.119734	valid_1's auc: 0.838029	valid_1's binary_logloss: 0.139727
[51]	valid_0's auc: 0.886206	valid_0's binary_logloss: 0.119595	valid_1's auc: 0.838077	valid_1's binary_logloss: 0.13975
[52]	valid_0's auc: 0.886527	valid_0's binary_logloss: 0.119458	valid_1's auc: 0.838081	valid_1's binary_logloss: 0.139779
[53]	valid_0's auc: 0.886808	valid_0's binary_logloss: 0.119271	valid_1's auc: 0.838632	valid_1's binary_logloss: 0.139643
[54]	valid_0's auc: 0.887203	valid_0's binary_logloss: 0.119092	valid_1's auc: 0.838634	valid_1's binary_logloss: 0.139662
[55]	valid_0's auc: 0.88761	valid_0's binary_logloss: 0.118944	valid_1's auc: 0.838551	valid_1's binary_logloss: 0.139692
[56]	valid_0's auc: 0.887901	valid_0's binary_logloss: 0.118796	valid_1's auc: 0.838303	valid_1's binary_logloss: 0.139748
[57]	valid_0's auc: 0.888238	valid_0's binary_logloss: 0.118604	valid_1's auc: 0.838445	valid_1's binary_logloss: 0.139719
[58]	valid_0's auc: 0.888615	valid_0's binary_logloss: 0.118451	valid_1's auc: 0.838262	valid_1's binary_logloss: 0.139754
[59]	valid_0's auc: 0.889294	valid_0's binary_logloss: 0.11824	valid_1's auc: 0.837652	valid_1's binary_logloss: 0.139829
Early stopping, best iteration is:
[29]	valid_0's auc: 0.87422	valid_0's binary_logloss: 0.124271	valid_1's auc: 0.839436	valid_1's binary_logloss: 0.139581
[1]	valid_0's auc: 0.821645	valid_0's binary_logloss: 0.156526	valid_1's auc: 0.81857	valid_1's binary_logloss: 0.165099
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827652	valid_0's binary_logloss: 0.151185	valid_1's auc: 0.82254	valid_1's binary_logloss: 0.160085
[3]	valid_0's auc: 0.836059	valid_0's binary_logloss: 0.14726	valid_1's auc: 0.828119	valid_1's binary_logloss: 0.156604
[4]	valid_0's auc: 0.840512	valid_0's binary_logloss: 0.144255	valid_1's auc: 0.831906	valid_1's binary_logloss: 0.153569
[5]	valid_0's auc: 0.841872	valid_0's binary_logloss: 0.141762	valid_1's auc: 0.834269	valid_1's binary_logloss: 0.151256
[6]	valid_0's auc: 0.844933	valid_0's binary_logloss: 0.139748	valid_1's auc: 0.835284	valid_1's binary_logloss: 0.149358
[7]	valid_0's auc: 0.845797	valid_0's binary_logloss: 0.138081	valid_1's auc: 0.834956	valid_1's binary_logloss: 0.147852
[8]	valid_0's auc: 0.847134	valid_0's binary_logloss: 0.136692	valid_1's auc: 0.836859	valid_1's binary_logloss: 0.146584
[9]	valid_0's auc: 0.849562	valid_0's binary_logloss: 0.135436	valid_1's auc: 0.837503	valid_1's binary_logloss: 0.145574
[10]	valid_0's auc: 0.850799	valid_0's binary_logloss: 0.134374	valid_1's auc: 0.838107	valid_1's binary_logloss: 0.144671
[11]	valid_0's auc: 0.852171	valid_0's binary_logloss: 0.133441	valid_1's auc: 0.838177	valid_1's binary_logloss: 0.143939
[12]	valid_0's auc: 0.854501	valid_0's binary_logloss: 0.132622	valid_1's auc: 0.838294	valid_1's binary_logloss: 0.143407
[13]	valid_0's auc: 0.856936	valid_0's binary_logloss: 0.131808	valid_1's auc: 0.838199	valid_1's binary_logloss: 0.142866
[14]	valid_0's auc: 0.857673	valid_0's binary_logloss: 0.131166	valid_1's auc: 0.837548	valid_1's binary_logloss: 0.142532
[15]	valid_0's auc: 0.859044	valid_0's binary_logloss: 0.130533	valid_1's auc: 0.837939	valid_1's binary_logloss: 0.142166
[16]	valid_0's auc: 0.859941	valid_0's binary_logloss: 0.129973	valid_1's auc: 0.837854	valid_1's binary_logloss: 0.141803
[17]	valid_0's auc: 0.861036	valid_0's binary_logloss: 0.129377	valid_1's auc: 0.838222	valid_1's binary_logloss: 0.141476
[18]	valid_0's auc: 0.862799	valid_0's binary_logloss: 0.128809	valid_1's auc: 0.838732	valid_1's binary_logloss: 0.141125
[19]	valid_0's auc: 0.864128	valid_0's binary_logloss: 0.128328	valid_1's auc: 0.839441	valid_1's binary_logloss: 0.140763
[20]	valid_0's auc: 0.864975	valid_0's binary_logloss: 0.127913	valid_1's auc: 0.839957	valid_1's binary_logloss: 0.140513
[21]	valid_0's auc: 0.866258	valid_0's binary_logloss: 0.127436	valid_1's auc: 0.83993	valid_1's binary_logloss: 0.140328
[22]	valid_0's auc: 0.867054	valid_0's binary_logloss: 0.127069	valid_1's auc: 0.840099	valid_1's binary_logloss: 0.14013
[23]	valid_0's auc: 0.867852	valid_0's binary_logloss: 0.126713	valid_1's auc: 0.839768	valid_1's binary_logloss: 0.140027
[24]	valid_0's auc: 0.868599	valid_0's binary_logloss: 0.126372	valid_1's auc: 0.840299	valid_1's binary_logloss: 0.139904
[25]	valid_0's auc: 0.869485	valid_0's binary_logloss: 0.126047	valid_1's auc: 0.840676	valid_1's binary_logloss: 0.139735
[26]	valid_0's auc: 0.870541	valid_0's binary_logloss: 0.125627	valid_1's auc: 0.840242	valid_1's binary_logloss: 0.139708
[27]	valid_0's auc: 0.871191	valid_0's binary_logloss: 0.125319	valid_1's auc: 0.839924	valid_1's binary_logloss: 0.139651
[28]	valid_0's auc: 0.871788	valid_0's binary_logloss: 0.125064	valid_1's auc: 0.839647	valid_1's binary_logloss: 0.139574
[29]	valid_0's auc: 0.872714	valid_0's binary_logloss: 0.124726	valid_1's auc: 0.840154	valid_1's binary_logloss: 0.139481
[30]	valid_0's auc: 0.873746	valid_0's binary_logloss: 0.124416	valid_1's auc: 0.839602	valid_1's binary_logloss: 0.139497
[31]	valid_0's auc: 0.874715	valid_0's binary_logloss: 0.124154	valid_1's auc: 0.839072	valid_1's binary_logloss: 0.139568
[32]	valid_0's auc: 0.875774	valid_0's binary_logloss: 0.123879	valid_1's auc: 0.838748	valid_1's binary_logloss: 0.139579
[33]	valid_0's auc: 0.876333	valid_0's binary_logloss: 0.123614	valid_1's auc: 0.83895	valid_1's binary_logloss: 0.139492
[34]	valid_0's auc: 0.876841	valid_0's binary_logloss: 0.123372	valid_1's auc: 0.839078	valid_1's binary_logloss: 0.139459
[35]	valid_0's auc: 0.877422	valid_0's binary_logloss: 0.123118	valid_1's auc: 0.839413	valid_1's binary_logloss: 0.139362
[36]	valid_0's auc: 0.878163	valid_0's binary_logloss: 0.122873	valid_1's auc: 0.839157	valid_1's binary_logloss: 0.139395
[37]	valid_0's auc: 0.87856	valid_0's binary_logloss: 0.122649	valid_1's auc: 0.839051	valid_1's binary_logloss: 0.139443
[38]	valid_0's auc: 0.879102	valid_0's binary_logloss: 0.122415	valid_1's auc: 0.839482	valid_1's binary_logloss: 0.139378
[39]	valid_0's auc: 0.879978	valid_0's binary_logloss: 0.122126	valid_1's auc: 0.83945	valid_1's binary_logloss: 0.139391
[40]	valid_0's auc: 0.880399	valid_0's binary_logloss: 0.121938	valid_1's auc: 0.840127	valid_1's binary_logloss: 0.139314
[41]	valid_0's auc: 0.880914	valid_0's binary_logloss: 0.121757	valid_1's auc: 0.839733	valid_1's binary_logloss: 0.13937
[42]	valid_0's auc: 0.881674	valid_0's binary_logloss: 0.121547	valid_1's auc: 0.839744	valid_1's binary_logloss: 0.139371
[43]	valid_0's auc: 0.882352	valid_0's binary_logloss: 0.121291	valid_1's auc: 0.839733	valid_1's binary_logloss: 0.139358
[44]	valid_0's auc: 0.882869	valid_0's binary_logloss: 0.121117	valid_1's auc: 0.839827	valid_1's binary_logloss: 0.13937
[45]	valid_0's auc: 0.883308	valid_0's binary_logloss: 0.120912	valid_1's auc: 0.839923	valid_1's binary_logloss: 0.139325
[46]	valid_0's auc: 0.883814	valid_0's binary_logloss: 0.120682	valid_1's auc: 0.83985	valid_1's binary_logloss: 0.139336
[47]	valid_0's auc: 0.884201	valid_0's binary_logloss: 0.120532	valid_1's auc: 0.839839	valid_1's binary_logloss: 0.139368
[48]	valid_0's auc: 0.884428	valid_0's binary_logloss: 0.120354	valid_1's auc: 0.839815	valid_1's binary_logloss: 0.139368
[49]	valid_0's auc: 0.884565	valid_0's binary_logloss: 0.120223	valid_1's auc: 0.83995	valid_1's binary_logloss: 0.139355
[50]	valid_0's auc: 0.885102	valid_0's binary_logloss: 0.120013	valid_1's auc: 0.839807	valid_1's binary_logloss: 0.13936
[51]	valid_0's auc: 0.885668	valid_0's binary_logloss: 0.119856	valid_1's auc: 0.839722	valid_1's binary_logloss: 0.139361
[52]	valid_0's auc: 0.886053	valid_0's binary_logloss: 0.119673	valid_1's auc: 0.839593	valid_1's binary_logloss: 0.139409
[53]	valid_0's auc: 0.886187	valid_0's binary_logloss: 0.119567	valid_1's auc: 0.839723	valid_1's binary_logloss: 0.139436
[54]	valid_0's auc: 0.886397	valid_0's binary_logloss: 0.11943	valid_1's auc: 0.839999	valid_1's binary_logloss: 0.139424
[55]	valid_0's auc: 0.886702	valid_0's binary_logloss: 0.119279	valid_1's auc: 0.839951	valid_1's binary_logloss: 0.139433
Early stopping, best iteration is:
[25]	valid_0's auc: 0.869485	valid_0's binary_logloss: 0.126047	valid_1's auc: 0.840676	valid_1's binary_logloss: 0.139735
[1]	valid_0's auc: 0.820235	valid_0's binary_logloss: 0.156085	valid_1's auc: 0.81613	valid_1's binary_logloss: 0.164998
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.825775	valid_0's binary_logloss: 0.150951	valid_1's auc: 0.821831	valid_1's binary_logloss: 0.15988
[3]	valid_0's auc: 0.832192	valid_0's binary_logloss: 0.147167	valid_1's auc: 0.827302	valid_1's binary_logloss: 0.156397
[4]	valid_0's auc: 0.837518	valid_0's binary_logloss: 0.144131	valid_1's auc: 0.8334	valid_1's binary_logloss: 0.153325
[5]	valid_0's auc: 0.842289	valid_0's binary_logloss: 0.141651	valid_1's auc: 0.836018	valid_1's binary_logloss: 0.150959
[6]	valid_0's auc: 0.844974	valid_0's binary_logloss: 0.139661	valid_1's auc: 0.838022	valid_1's binary_logloss: 0.149046
[7]	valid_0's auc: 0.846623	valid_0's binary_logloss: 0.138001	valid_1's auc: 0.837777	valid_1's binary_logloss: 0.147509
[8]	valid_0's auc: 0.848529	valid_0's binary_logloss: 0.136578	valid_1's auc: 0.839519	valid_1's binary_logloss: 0.146015
[9]	valid_0's auc: 0.850276	valid_0's binary_logloss: 0.135274	valid_1's auc: 0.840042	valid_1's binary_logloss: 0.145043
[10]	valid_0's auc: 0.852371	valid_0's binary_logloss: 0.134185	valid_1's auc: 0.839808	valid_1's binary_logloss: 0.144182
[11]	valid_0's auc: 0.853705	valid_0's binary_logloss: 0.133238	valid_1's auc: 0.83943	valid_1's binary_logloss: 0.14345
[12]	valid_0's auc: 0.855304	valid_0's binary_logloss: 0.132409	valid_1's auc: 0.838786	valid_1's binary_logloss: 0.142878
[13]	valid_0's auc: 0.856638	valid_0's binary_logloss: 0.131658	valid_1's auc: 0.838632	valid_1's binary_logloss: 0.142368
[14]	valid_0's auc: 0.85784	valid_0's binary_logloss: 0.130967	valid_1's auc: 0.838182	valid_1's binary_logloss: 0.14198
[15]	valid_0's auc: 0.859432	valid_0's binary_logloss: 0.130373	valid_1's auc: 0.838236	valid_1's binary_logloss: 0.141582
[16]	valid_0's auc: 0.860428	valid_0's binary_logloss: 0.129814	valid_1's auc: 0.837425	valid_1's binary_logloss: 0.141389
[17]	valid_0's auc: 0.861409	valid_0's binary_logloss: 0.12923	valid_1's auc: 0.837358	valid_1's binary_logloss: 0.141106
[18]	valid_0's auc: 0.86332	valid_0's binary_logloss: 0.128681	valid_1's auc: 0.836771	valid_1's binary_logloss: 0.140932
[19]	valid_0's auc: 0.864365	valid_0's binary_logloss: 0.128233	valid_1's auc: 0.836564	valid_1's binary_logloss: 0.140796
[20]	valid_0's auc: 0.865268	valid_0's binary_logloss: 0.127815	valid_1's auc: 0.835613	valid_1's binary_logloss: 0.140715
[21]	valid_0's auc: 0.865869	valid_0's binary_logloss: 0.127427	valid_1's auc: 0.835085	valid_1's binary_logloss: 0.140641
[22]	valid_0's auc: 0.867223	valid_0's binary_logloss: 0.126993	valid_1's auc: 0.835383	valid_1's binary_logloss: 0.140519
[23]	valid_0's auc: 0.867898	valid_0's binary_logloss: 0.126644	valid_1's auc: 0.835018	valid_1's binary_logloss: 0.140432
[24]	valid_0's auc: 0.869077	valid_0's binary_logloss: 0.126246	valid_1's auc: 0.835199	valid_1's binary_logloss: 0.140328
[25]	valid_0's auc: 0.869684	valid_0's binary_logloss: 0.125917	valid_1's auc: 0.83457	valid_1's binary_logloss: 0.140301
[26]	valid_0's auc: 0.870271	valid_0's binary_logloss: 0.12561	valid_1's auc: 0.834087	valid_1's binary_logloss: 0.140349
[27]	valid_0's auc: 0.87126	valid_0's binary_logloss: 0.125302	valid_1's auc: 0.833822	valid_1's binary_logloss: 0.140277
[28]	valid_0's auc: 0.872741	valid_0's binary_logloss: 0.124882	valid_1's auc: 0.833886	valid_1's binary_logloss: 0.140255
[29]	valid_0's auc: 0.873424	valid_0's binary_logloss: 0.124594	valid_1's auc: 0.833937	valid_1's binary_logloss: 0.140189
[30]	valid_0's auc: 0.874669	valid_0's binary_logloss: 0.124295	valid_1's auc: 0.834461	valid_1's binary_logloss: 0.140113
[31]	valid_0's auc: 0.875234	valid_0's binary_logloss: 0.124066	valid_1's auc: 0.83444	valid_1's binary_logloss: 0.140064
[32]	valid_0's auc: 0.875809	valid_0's binary_logloss: 0.123813	valid_1's auc: 0.834196	valid_1's binary_logloss: 0.140095
[33]	valid_0's auc: 0.876619	valid_0's binary_logloss: 0.123531	valid_1's auc: 0.834143	valid_1's binary_logloss: 0.140029
[34]	valid_0's auc: 0.877233	valid_0's binary_logloss: 0.123254	valid_1's auc: 0.833865	valid_1's binary_logloss: 0.140055
[35]	valid_0's auc: 0.877763	valid_0's binary_logloss: 0.123009	valid_1's auc: 0.833699	valid_1's binary_logloss: 0.140082
[36]	valid_0's auc: 0.878322	valid_0's binary_logloss: 0.122755	valid_1's auc: 0.833221	valid_1's binary_logloss: 0.140158
[37]	valid_0's auc: 0.878948	valid_0's binary_logloss: 0.122495	valid_1's auc: 0.832792	valid_1's binary_logloss: 0.14018
[38]	valid_0's auc: 0.879452	valid_0's binary_logloss: 0.122301	valid_1's auc: 0.832977	valid_1's binary_logloss: 0.140154
[39]	valid_0's auc: 0.880156	valid_0's binary_logloss: 0.122063	valid_1's auc: 0.832913	valid_1's binary_logloss: 0.140188
Early stopping, best iteration is:
[9]	valid_0's auc: 0.850276	valid_0's binary_logloss: 0.135274	valid_1's auc: 0.840042	valid_1's binary_logloss: 0.145043
[1]	valid_0's auc: 0.814371	valid_0's binary_logloss: 0.156455	valid_1's auc: 0.813175	valid_1's binary_logloss: 0.16542
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827201	valid_0's binary_logloss: 0.151083	valid_1's auc: 0.820013	valid_1's binary_logloss: 0.160085
[3]	valid_0's auc: 0.837003	valid_0's binary_logloss: 0.147235	valid_1's auc: 0.828713	valid_1's binary_logloss: 0.156463
[4]	valid_0's auc: 0.840971	valid_0's binary_logloss: 0.144235	valid_1's auc: 0.831369	valid_1's binary_logloss: 0.153575
[5]	valid_0's auc: 0.842902	valid_0's binary_logloss: 0.141803	valid_1's auc: 0.833329	valid_1's binary_logloss: 0.151283
[6]	valid_0's auc: 0.847067	valid_0's binary_logloss: 0.139776	valid_1's auc: 0.836625	valid_1's binary_logloss: 0.14937
[7]	valid_0's auc: 0.848894	valid_0's binary_logloss: 0.138056	valid_1's auc: 0.837174	valid_1's binary_logloss: 0.147778
[8]	valid_0's auc: 0.850546	valid_0's binary_logloss: 0.136579	valid_1's auc: 0.837405	valid_1's binary_logloss: 0.146516
[9]	valid_0's auc: 0.852419	valid_0's binary_logloss: 0.135296	valid_1's auc: 0.837736	valid_1's binary_logloss: 0.145412
[10]	valid_0's auc: 0.85454	valid_0's binary_logloss: 0.134224	valid_1's auc: 0.838661	valid_1's binary_logloss: 0.144501
[11]	valid_0's auc: 0.856414	valid_0's binary_logloss: 0.133277	valid_1's auc: 0.838921	valid_1's binary_logloss: 0.143737
[12]	valid_0's auc: 0.857283	valid_0's binary_logloss: 0.132431	valid_1's auc: 0.837263	valid_1's binary_logloss: 0.143197
[13]	valid_0's auc: 0.858075	valid_0's binary_logloss: 0.131688	valid_1's auc: 0.837254	valid_1's binary_logloss: 0.142633
[14]	valid_0's auc: 0.858945	valid_0's binary_logloss: 0.130983	valid_1's auc: 0.837669	valid_1's binary_logloss: 0.142158
[15]	valid_0's auc: 0.859875	valid_0's binary_logloss: 0.130335	valid_1's auc: 0.837434	valid_1's binary_logloss: 0.141848
[16]	valid_0's auc: 0.860979	valid_0's binary_logloss: 0.129731	valid_1's auc: 0.837355	valid_1's binary_logloss: 0.141492
[17]	valid_0's auc: 0.861681	valid_0's binary_logloss: 0.129123	valid_1's auc: 0.837851	valid_1's binary_logloss: 0.14114
[18]	valid_0's auc: 0.863324	valid_0's binary_logloss: 0.128568	valid_1's auc: 0.838024	valid_1's binary_logloss: 0.140911
[19]	valid_0's auc: 0.864682	valid_0's binary_logloss: 0.128016	valid_1's auc: 0.838411	valid_1's binary_logloss: 0.140669
[20]	valid_0's auc: 0.865346	valid_0's binary_logloss: 0.127585	valid_1's auc: 0.838201	valid_1's binary_logloss: 0.140498
[21]	valid_0's auc: 0.8665	valid_0's binary_logloss: 0.127103	valid_1's auc: 0.83851	valid_1's binary_logloss: 0.140294
[22]	valid_0's auc: 0.867889	valid_0's binary_logloss: 0.126657	valid_1's auc: 0.839074	valid_1's binary_logloss: 0.140136
[23]	valid_0's auc: 0.868624	valid_0's binary_logloss: 0.126281	valid_1's auc: 0.838987	valid_1's binary_logloss: 0.140028
[24]	valid_0's auc: 0.869855	valid_0's binary_logloss: 0.125882	valid_1's auc: 0.838802	valid_1's binary_logloss: 0.139975
[25]	valid_0's auc: 0.870426	valid_0's binary_logloss: 0.125541	valid_1's auc: 0.83912	valid_1's binary_logloss: 0.139815
[26]	valid_0's auc: 0.871649	valid_0's binary_logloss: 0.125171	valid_1's auc: 0.838779	valid_1's binary_logloss: 0.139741
[27]	valid_0's auc: 0.872513	valid_0's binary_logloss: 0.124859	valid_1's auc: 0.839241	valid_1's binary_logloss: 0.139648
[28]	valid_0's auc: 0.873252	valid_0's binary_logloss: 0.124586	valid_1's auc: 0.839378	valid_1's binary_logloss: 0.139578
[29]	valid_0's auc: 0.87422	valid_0's binary_logloss: 0.124271	valid_1's auc: 0.839436	valid_1's binary_logloss: 0.139581
[30]	valid_0's auc: 0.874647	valid_0's binary_logloss: 0.124019	valid_1's auc: 0.83907	valid_1's binary_logloss: 0.13958
[31]	valid_0's auc: 0.875603	valid_0's binary_logloss: 0.123747	valid_1's auc: 0.839226	valid_1's binary_logloss: 0.139539
[32]	valid_0's auc: 0.87651	valid_0's binary_logloss: 0.123413	valid_1's auc: 0.838983	valid_1's binary_logloss: 0.139561
[33]	valid_0's auc: 0.877103	valid_0's binary_logloss: 0.123199	valid_1's auc: 0.839235	valid_1's binary_logloss: 0.139528
[34]	valid_0's auc: 0.878189	valid_0's binary_logloss: 0.122925	valid_1's auc: 0.839004	valid_1's binary_logloss: 0.139567
[35]	valid_0's auc: 0.878765	valid_0's binary_logloss: 0.122704	valid_1's auc: 0.838946	valid_1's binary_logloss: 0.139572
[36]	valid_0's auc: 0.879377	valid_0's binary_logloss: 0.122448	valid_1's auc: 0.83898	valid_1's binary_logloss: 0.139554
[37]	valid_0's auc: 0.880134	valid_0's binary_logloss: 0.122143	valid_1's auc: 0.838888	valid_1's binary_logloss: 0.139557
[38]	valid_0's auc: 0.880571	valid_0's binary_logloss: 0.121985	valid_1's auc: 0.839133	valid_1's binary_logloss: 0.13952
[39]	valid_0's auc: 0.88098	valid_0's binary_logloss: 0.12178	valid_1's auc: 0.838929	valid_1's binary_logloss: 0.139584
[40]	valid_0's auc: 0.881495	valid_0's binary_logloss: 0.121571	valid_1's auc: 0.838869	valid_1's binary_logloss: 0.139555
[41]	valid_0's auc: 0.881897	valid_0's binary_logloss: 0.121382	valid_1's auc: 0.838702	valid_1's binary_logloss: 0.139575
[42]	valid_0's auc: 0.882625	valid_0's binary_logloss: 0.121107	valid_1's auc: 0.83891	valid_1's binary_logloss: 0.139554
[43]	valid_0's auc: 0.882956	valid_0's binary_logloss: 0.120926	valid_1's auc: 0.83914	valid_1's binary_logloss: 0.139546
[44]	valid_0's auc: 0.883618	valid_0's binary_logloss: 0.120726	valid_1's auc: 0.838905	valid_1's binary_logloss: 0.139592
[45]	valid_0's auc: 0.88419	valid_0's binary_logloss: 0.120513	valid_1's auc: 0.838587	valid_1's binary_logloss: 0.139651
[46]	valid_0's auc: 0.884555	valid_0's binary_logloss: 0.120339	valid_1's auc: 0.838288	valid_1's binary_logloss: 0.139703
[47]	valid_0's auc: 0.884789	valid_0's binary_logloss: 0.120189	valid_1's auc: 0.838155	valid_1's binary_logloss: 0.139692
[48]	valid_0's auc: 0.884968	valid_0's binary_logloss: 0.120074	valid_1's auc: 0.8384	valid_1's binary_logloss: 0.139667
[49]	valid_0's auc: 0.885336	valid_0's binary_logloss: 0.119939	valid_1's auc: 0.83827	valid_1's binary_logloss: 0.13968
[50]	valid_0's auc: 0.885759	valid_0's binary_logloss: 0.119734	valid_1's auc: 0.838029	valid_1's binary_logloss: 0.139727
[51]	valid_0's auc: 0.886206	valid_0's binary_logloss: 0.119595	valid_1's auc: 0.838077	valid_1's binary_logloss: 0.13975
[52]	valid_0's auc: 0.886527	valid_0's binary_logloss: 0.119458	valid_1's auc: 0.838081	valid_1's binary_logloss: 0.139779
[53]	valid_0's auc: 0.886808	valid_0's binary_logloss: 0.119271	valid_1's auc: 0.838632	valid_1's binary_logloss: 0.139643
[54]	valid_0's auc: 0.887203	valid_0's binary_logloss: 0.119092	valid_1's auc: 0.838634	valid_1's binary_logloss: 0.139662
[55]	valid_0's auc: 0.88761	valid_0's binary_logloss: 0.118944	valid_1's auc: 0.838551	valid_1's binary_logloss: 0.139692
[56]	valid_0's auc: 0.887901	valid_0's binary_logloss: 0.118796	valid_1's auc: 0.838303	valid_1's binary_logloss: 0.139748
[57]	valid_0's auc: 0.888238	valid_0's binary_logloss: 0.118604	valid_1's auc: 0.838445	valid_1's binary_logloss: 0.139719
[58]	valid_0's auc: 0.888615	valid_0's binary_logloss: 0.118451	valid_1's auc: 0.838262	valid_1's binary_logloss: 0.139754
[59]	valid_0's auc: 0.889294	valid_0's binary_logloss: 0.11824	valid_1's auc: 0.837652	valid_1's binary_logloss: 0.139829
Early stopping, best iteration is:
[29]	valid_0's auc: 0.87422	valid_0's binary_logloss: 0.124271	valid_1's auc: 0.839436	valid_1's binary_logloss: 0.139581
[1]	valid_0's auc: 0.821645	valid_0's binary_logloss: 0.156526	valid_1's auc: 0.81857	valid_1's binary_logloss: 0.165099
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827652	valid_0's binary_logloss: 0.151185	valid_1's auc: 0.82254	valid_1's binary_logloss: 0.160085
[3]	valid_0's auc: 0.836059	valid_0's binary_logloss: 0.14726	valid_1's auc: 0.828119	valid_1's binary_logloss: 0.156604
[4]	valid_0's auc: 0.840512	valid_0's binary_logloss: 0.144255	valid_1's auc: 0.831906	valid_1's binary_logloss: 0.153569
[5]	valid_0's auc: 0.841872	valid_0's binary_logloss: 0.141762	valid_1's auc: 0.834269	valid_1's binary_logloss: 0.151256
[6]	valid_0's auc: 0.844933	valid_0's binary_logloss: 0.139748	valid_1's auc: 0.835284	valid_1's binary_logloss: 0.149358
[7]	valid_0's auc: 0.845797	valid_0's binary_logloss: 0.138081	valid_1's auc: 0.834956	valid_1's binary_logloss: 0.147852
[8]	valid_0's auc: 0.847134	valid_0's binary_logloss: 0.136692	valid_1's auc: 0.836859	valid_1's binary_logloss: 0.146584
[9]	valid_0's auc: 0.849562	valid_0's binary_logloss: 0.135436	valid_1's auc: 0.837503	valid_1's binary_logloss: 0.145574
[10]	valid_0's auc: 0.850799	valid_0's binary_logloss: 0.134374	valid_1's auc: 0.838107	valid_1's binary_logloss: 0.144671
[11]	valid_0's auc: 0.852171	valid_0's binary_logloss: 0.133441	valid_1's auc: 0.838177	valid_1's binary_logloss: 0.143939
[12]	valid_0's auc: 0.854501	valid_0's binary_logloss: 0.132622	valid_1's auc: 0.838294	valid_1's binary_logloss: 0.143407
[13]	valid_0's auc: 0.856936	valid_0's binary_logloss: 0.131808	valid_1's auc: 0.838199	valid_1's binary_logloss: 0.142866
[14]	valid_0's auc: 0.857673	valid_0's binary_logloss: 0.131166	valid_1's auc: 0.837548	valid_1's binary_logloss: 0.142532
[15]	valid_0's auc: 0.859044	valid_0's binary_logloss: 0.130533	valid_1's auc: 0.837939	valid_1's binary_logloss: 0.142166
[16]	valid_0's auc: 0.859941	valid_0's binary_logloss: 0.129973	valid_1's auc: 0.837854	valid_1's binary_logloss: 0.141803
[17]	valid_0's auc: 0.861036	valid_0's binary_logloss: 0.129377	valid_1's auc: 0.838222	valid_1's binary_logloss: 0.141476
[18]	valid_0's auc: 0.862799	valid_0's binary_logloss: 0.128809	valid_1's auc: 0.838732	valid_1's binary_logloss: 0.141125
[19]	valid_0's auc: 0.864128	valid_0's binary_logloss: 0.128328	valid_1's auc: 0.839441	valid_1's binary_logloss: 0.140763
[20]	valid_0's auc: 0.864975	valid_0's binary_logloss: 0.127913	valid_1's auc: 0.839957	valid_1's binary_logloss: 0.140513
[21]	valid_0's auc: 0.866258	valid_0's binary_logloss: 0.127436	valid_1's auc: 0.83993	valid_1's binary_logloss: 0.140328
[22]	valid_0's auc: 0.867054	valid_0's binary_logloss: 0.127069	valid_1's auc: 0.840099	valid_1's binary_logloss: 0.14013
[23]	valid_0's auc: 0.867852	valid_0's binary_logloss: 0.126713	valid_1's auc: 0.839768	valid_1's binary_logloss: 0.140027
[24]	valid_0's auc: 0.868599	valid_0's binary_logloss: 0.126372	valid_1's auc: 0.840299	valid_1's binary_logloss: 0.139904
[25]	valid_0's auc: 0.869485	valid_0's binary_logloss: 0.126047	valid_1's auc: 0.840676	valid_1's binary_logloss: 0.139735
[26]	valid_0's auc: 0.870541	valid_0's binary_logloss: 0.125627	valid_1's auc: 0.840242	valid_1's binary_logloss: 0.139708
[27]	valid_0's auc: 0.871191	valid_0's binary_logloss: 0.125319	valid_1's auc: 0.839924	valid_1's binary_logloss: 0.139651
[28]	valid_0's auc: 0.871788	valid_0's binary_logloss: 0.125064	valid_1's auc: 0.839647	valid_1's binary_logloss: 0.139574
[29]	valid_0's auc: 0.872714	valid_0's binary_logloss: 0.124726	valid_1's auc: 0.840154	valid_1's binary_logloss: 0.139481
[30]	valid_0's auc: 0.873746	valid_0's binary_logloss: 0.124416	valid_1's auc: 0.839602	valid_1's binary_logloss: 0.139497
[31]	valid_0's auc: 0.874715	valid_0's binary_logloss: 0.124154	valid_1's auc: 0.839072	valid_1's binary_logloss: 0.139568
[32]	valid_0's auc: 0.875774	valid_0's binary_logloss: 0.123879	valid_1's auc: 0.838748	valid_1's binary_logloss: 0.139579
[33]	valid_0's auc: 0.876333	valid_0's binary_logloss: 0.123614	valid_1's auc: 0.83895	valid_1's binary_logloss: 0.139492
[34]	valid_0's auc: 0.876841	valid_0's binary_logloss: 0.123372	valid_1's auc: 0.839078	valid_1's binary_logloss: 0.139459
[35]	valid_0's auc: 0.877422	valid_0's binary_logloss: 0.123118	valid_1's auc: 0.839413	valid_1's binary_logloss: 0.139362
[36]	valid_0's auc: 0.878163	valid_0's binary_logloss: 0.122873	valid_1's auc: 0.839157	valid_1's binary_logloss: 0.139395
[37]	valid_0's auc: 0.87856	valid_0's binary_logloss: 0.122649	valid_1's auc: 0.839051	valid_1's binary_logloss: 0.139443
[38]	valid_0's auc: 0.879102	valid_0's binary_logloss: 0.122415	valid_1's auc: 0.839482	valid_1's binary_logloss: 0.139378
[39]	valid_0's auc: 0.879978	valid_0's binary_logloss: 0.122126	valid_1's auc: 0.83945	valid_1's binary_logloss: 0.139391
[40]	valid_0's auc: 0.880399	valid_0's binary_logloss: 0.121938	valid_1's auc: 0.840127	valid_1's binary_logloss: 0.139314
[41]	valid_0's auc: 0.880914	valid_0's binary_logloss: 0.121757	valid_1's auc: 0.839733	valid_1's binary_logloss: 0.13937
[42]	valid_0's auc: 0.881674	valid_0's binary_logloss: 0.121547	valid_1's auc: 0.839744	valid_1's binary_logloss: 0.139371
[43]	valid_0's auc: 0.882352	valid_0's binary_logloss: 0.121291	valid_1's auc: 0.839733	valid_1's binary_logloss: 0.139358
[44]	valid_0's auc: 0.882869	valid_0's binary_logloss: 0.121117	valid_1's auc: 0.839827	valid_1's binary_logloss: 0.13937
[45]	valid_0's auc: 0.883308	valid_0's binary_logloss: 0.120912	valid_1's auc: 0.839923	valid_1's binary_logloss: 0.139325
[46]	valid_0's auc: 0.883814	valid_0's binary_logloss: 0.120682	valid_1's auc: 0.83985	valid_1's binary_logloss: 0.139336
[47]	valid_0's auc: 0.884201	valid_0's binary_logloss: 0.120532	valid_1's auc: 0.839839	valid_1's binary_logloss: 0.139368
[48]	valid_0's auc: 0.884428	valid_0's binary_logloss: 0.120354	valid_1's auc: 0.839815	valid_1's binary_logloss: 0.139368
[49]	valid_0's auc: 0.884565	valid_0's binary_logloss: 0.120223	valid_1's auc: 0.83995	valid_1's binary_logloss: 0.139355
[50]	valid_0's auc: 0.885102	valid_0's binary_logloss: 0.120013	valid_1's auc: 0.839807	valid_1's binary_logloss: 0.13936
[51]	valid_0's auc: 0.885668	valid_0's binary_logloss: 0.119856	valid_1's auc: 0.839722	valid_1's binary_logloss: 0.139361
[52]	valid_0's auc: 0.886053	valid_0's binary_logloss: 0.119673	valid_1's auc: 0.839593	valid_1's binary_logloss: 0.139409
[53]	valid_0's auc: 0.886187	valid_0's binary_logloss: 0.119567	valid_1's auc: 0.839723	valid_1's binary_logloss: 0.139436
[54]	valid_0's auc: 0.886397	valid_0's binary_logloss: 0.11943	valid_1's auc: 0.839999	valid_1's binary_logloss: 0.139424
[55]	valid_0's auc: 0.886702	valid_0's binary_logloss: 0.119279	valid_1's auc: 0.839951	valid_1's binary_logloss: 0.139433
Early stopping, best iteration is:
[25]	valid_0's auc: 0.869485	valid_0's binary_logloss: 0.126047	valid_1's auc: 0.840676	valid_1's binary_logloss: 0.139735
[1]	valid_0's auc: 0.832891	valid_0's binary_logloss: 0.155301	valid_1's auc: 0.818851	valid_1's binary_logloss: 0.164831
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.845606	valid_0's binary_logloss: 0.149818	valid_1's auc: 0.826972	valid_1's binary_logloss: 0.159925
[3]	valid_0's auc: 0.850188	valid_0's binary_logloss: 0.145683	valid_1's auc: 0.828704	valid_1's binary_logloss: 0.156313
[4]	valid_0's auc: 0.85231	valid_0's binary_logloss: 0.142507	valid_1's auc: 0.829069	valid_1's binary_logloss: 0.153533
[5]	valid_0's auc: 0.854335	valid_0's binary_logloss: 0.139906	valid_1's auc: 0.832066	valid_1's binary_logloss: 0.151309
[6]	valid_0's auc: 0.857012	valid_0's binary_logloss: 0.137695	valid_1's auc: 0.834623	valid_1's binary_logloss: 0.14943
[7]	valid_0's auc: 0.858925	valid_0's binary_logloss: 0.135794	valid_1's auc: 0.834545	valid_1's binary_logloss: 0.147904
[8]	valid_0's auc: 0.861608	valid_0's binary_logloss: 0.134145	valid_1's auc: 0.833797	valid_1's binary_logloss: 0.146702
[9]	valid_0's auc: 0.863357	valid_0's binary_logloss: 0.132708	valid_1's auc: 0.834249	valid_1's binary_logloss: 0.145747
[10]	valid_0's auc: 0.866071	valid_0's binary_logloss: 0.131394	valid_1's auc: 0.834473	valid_1's binary_logloss: 0.144785
[11]	valid_0's auc: 0.867641	valid_0's binary_logloss: 0.130276	valid_1's auc: 0.834299	valid_1's binary_logloss: 0.144038
[12]	valid_0's auc: 0.869161	valid_0's binary_logloss: 0.129189	valid_1's auc: 0.834149	valid_1's binary_logloss: 0.143493
[13]	valid_0's auc: 0.870667	valid_0's binary_logloss: 0.128249	valid_1's auc: 0.833217	valid_1's binary_logloss: 0.14314
[14]	valid_0's auc: 0.872337	valid_0's binary_logloss: 0.127379	valid_1's auc: 0.833305	valid_1's binary_logloss: 0.14275
[15]	valid_0's auc: 0.873712	valid_0's binary_logloss: 0.126538	valid_1's auc: 0.832635	valid_1's binary_logloss: 0.14257
[16]	valid_0's auc: 0.874704	valid_0's binary_logloss: 0.12582	valid_1's auc: 0.832562	valid_1's binary_logloss: 0.142228
[17]	valid_0's auc: 0.875722	valid_0's binary_logloss: 0.125162	valid_1's auc: 0.832021	valid_1's binary_logloss: 0.142052
[18]	valid_0's auc: 0.877178	valid_0's binary_logloss: 0.124543	valid_1's auc: 0.831549	valid_1's binary_logloss: 0.141895
[19]	valid_0's auc: 0.877995	valid_0's binary_logloss: 0.123978	valid_1's auc: 0.831242	valid_1's binary_logloss: 0.141804
[20]	valid_0's auc: 0.87956	valid_0's binary_logloss: 0.123373	valid_1's auc: 0.83106	valid_1's binary_logloss: 0.141701
[21]	valid_0's auc: 0.880405	valid_0's binary_logloss: 0.122832	valid_1's auc: 0.830305	valid_1's binary_logloss: 0.14165
[22]	valid_0's auc: 0.881423	valid_0's binary_logloss: 0.122355	valid_1's auc: 0.830239	valid_1's binary_logloss: 0.141656
[23]	valid_0's auc: 0.882393	valid_0's binary_logloss: 0.121843	valid_1's auc: 0.830552	valid_1's binary_logloss: 0.141537
[24]	valid_0's auc: 0.884118	valid_0's binary_logloss: 0.121284	valid_1's auc: 0.830946	valid_1's binary_logloss: 0.141426
[25]	valid_0's auc: 0.885236	valid_0's binary_logloss: 0.120821	valid_1's auc: 0.829956	valid_1's binary_logloss: 0.141489
[26]	valid_0's auc: 0.886368	valid_0's binary_logloss: 0.120378	valid_1's auc: 0.829528	valid_1's binary_logloss: 0.141559
[27]	valid_0's auc: 0.88693	valid_0's binary_logloss: 0.119952	valid_1's auc: 0.829491	valid_1's binary_logloss: 0.141578
[28]	valid_0's auc: 0.887827	valid_0's binary_logloss: 0.1195	valid_1's auc: 0.829533	valid_1's binary_logloss: 0.141548
[29]	valid_0's auc: 0.888834	valid_0's binary_logloss: 0.119055	valid_1's auc: 0.829809	valid_1's binary_logloss: 0.141549
[30]	valid_0's auc: 0.889728	valid_0's binary_logloss: 0.118695	valid_1's auc: 0.829539	valid_1's binary_logloss: 0.14162
[31]	valid_0's auc: 0.890934	valid_0's binary_logloss: 0.118306	valid_1's auc: 0.829985	valid_1's binary_logloss: 0.141525
[32]	valid_0's auc: 0.891349	valid_0's binary_logloss: 0.11798	valid_1's auc: 0.829723	valid_1's binary_logloss: 0.141529
[33]	valid_0's auc: 0.891896	valid_0's binary_logloss: 0.117633	valid_1's auc: 0.829851	valid_1's binary_logloss: 0.141488
[34]	valid_0's auc: 0.892553	valid_0's binary_logloss: 0.117331	valid_1's auc: 0.82916	valid_1's binary_logloss: 0.141607
[35]	valid_0's auc: 0.893197	valid_0's binary_logloss: 0.116998	valid_1's auc: 0.828503	valid_1's binary_logloss: 0.141781
[36]	valid_0's auc: 0.894242	valid_0's binary_logloss: 0.116658	valid_1's auc: 0.828875	valid_1's binary_logloss: 0.141779
Early stopping, best iteration is:
[6]	valid_0's auc: 0.857012	valid_0's binary_logloss: 0.137695	valid_1's auc: 0.834623	valid_1's binary_logloss: 0.14943
[1]	valid_0's auc: 0.833054	valid_0's binary_logloss: 0.155723	valid_1's auc: 0.817048	valid_1's binary_logloss: 0.165042
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.840929	valid_0's binary_logloss: 0.149897	valid_1's auc: 0.820838	valid_1's binary_logloss: 0.159671
[3]	valid_0's auc: 0.85054	valid_0's binary_logloss: 0.14575	valid_1's auc: 0.827828	valid_1's binary_logloss: 0.156036
[4]	valid_0's auc: 0.855605	valid_0's binary_logloss: 0.14246	valid_1's auc: 0.831388	valid_1's binary_logloss: 0.153065
[5]	valid_0's auc: 0.858903	valid_0's binary_logloss: 0.139711	valid_1's auc: 0.834493	valid_1's binary_logloss: 0.150758
[6]	valid_0's auc: 0.860869	valid_0's binary_logloss: 0.137459	valid_1's auc: 0.833775	valid_1's binary_logloss: 0.149055
[7]	valid_0's auc: 0.862879	valid_0's binary_logloss: 0.135513	valid_1's auc: 0.836672	valid_1's binary_logloss: 0.147391
[8]	valid_0's auc: 0.863872	valid_0's binary_logloss: 0.133916	valid_1's auc: 0.835878	valid_1's binary_logloss: 0.146178
[9]	valid_0's auc: 0.865562	valid_0's binary_logloss: 0.132428	valid_1's auc: 0.836038	valid_1's binary_logloss: 0.145107
[10]	valid_0's auc: 0.867401	valid_0's binary_logloss: 0.131116	valid_1's auc: 0.836429	valid_1's binary_logloss: 0.144153
[11]	valid_0's auc: 0.868477	valid_0's binary_logloss: 0.129981	valid_1's auc: 0.836388	valid_1's binary_logloss: 0.143351
[12]	valid_0's auc: 0.87007	valid_0's binary_logloss: 0.128907	valid_1's auc: 0.836122	valid_1's binary_logloss: 0.142803
[13]	valid_0's auc: 0.8711	valid_0's binary_logloss: 0.127994	valid_1's auc: 0.836547	valid_1's binary_logloss: 0.142243
[14]	valid_0's auc: 0.872903	valid_0's binary_logloss: 0.127122	valid_1's auc: 0.837837	valid_1's binary_logloss: 0.141718
[15]	valid_0's auc: 0.874543	valid_0's binary_logloss: 0.126262	valid_1's auc: 0.837921	valid_1's binary_logloss: 0.141291
[16]	valid_0's auc: 0.876148	valid_0's binary_logloss: 0.1255	valid_1's auc: 0.83698	valid_1's binary_logloss: 0.14114
[17]	valid_0's auc: 0.878214	valid_0's binary_logloss: 0.124724	valid_1's auc: 0.836901	valid_1's binary_logloss: 0.140905
[18]	valid_0's auc: 0.879554	valid_0's binary_logloss: 0.124053	valid_1's auc: 0.836497	valid_1's binary_logloss: 0.140787
[19]	valid_0's auc: 0.880715	valid_0's binary_logloss: 0.123405	valid_1's auc: 0.837087	valid_1's binary_logloss: 0.140481
[20]	valid_0's auc: 0.881492	valid_0's binary_logloss: 0.122867	valid_1's auc: 0.836798	valid_1's binary_logloss: 0.140353
[21]	valid_0's auc: 0.882521	valid_0's binary_logloss: 0.122301	valid_1's auc: 0.836676	valid_1's binary_logloss: 0.140236
[22]	valid_0's auc: 0.883688	valid_0's binary_logloss: 0.121776	valid_1's auc: 0.836698	valid_1's binary_logloss: 0.140127
[23]	valid_0's auc: 0.88451	valid_0's binary_logloss: 0.121226	valid_1's auc: 0.8369	valid_1's binary_logloss: 0.140044
[24]	valid_0's auc: 0.88609	valid_0's binary_logloss: 0.120643	valid_1's auc: 0.836883	valid_1's binary_logloss: 0.139917
[25]	valid_0's auc: 0.887311	valid_0's binary_logloss: 0.120104	valid_1's auc: 0.837018	valid_1's binary_logloss: 0.139879
[26]	valid_0's auc: 0.888754	valid_0's binary_logloss: 0.11961	valid_1's auc: 0.836808	valid_1's binary_logloss: 0.139895
[27]	valid_0's auc: 0.889697	valid_0's binary_logloss: 0.119114	valid_1's auc: 0.836828	valid_1's binary_logloss: 0.139904
[28]	valid_0's auc: 0.891102	valid_0's binary_logloss: 0.118637	valid_1's auc: 0.83685	valid_1's binary_logloss: 0.139833
[29]	valid_0's auc: 0.891823	valid_0's binary_logloss: 0.118217	valid_1's auc: 0.836568	valid_1's binary_logloss: 0.139836
[30]	valid_0's auc: 0.893059	valid_0's binary_logloss: 0.117781	valid_1's auc: 0.836774	valid_1's binary_logloss: 0.139827
[31]	valid_0's auc: 0.893688	valid_0's binary_logloss: 0.117412	valid_1's auc: 0.83732	valid_1's binary_logloss: 0.139721
[32]	valid_0's auc: 0.894643	valid_0's binary_logloss: 0.117013	valid_1's auc: 0.8373	valid_1's binary_logloss: 0.1397
[33]	valid_0's auc: 0.89555	valid_0's binary_logloss: 0.11666	valid_1's auc: 0.837425	valid_1's binary_logloss: 0.139692
[34]	valid_0's auc: 0.896334	valid_0's binary_logloss: 0.116252	valid_1's auc: 0.836971	valid_1's binary_logloss: 0.139746
[35]	valid_0's auc: 0.897058	valid_0's binary_logloss: 0.115923	valid_1's auc: 0.837212	valid_1's binary_logloss: 0.139756
[36]	valid_0's auc: 0.897581	valid_0's binary_logloss: 0.115642	valid_1's auc: 0.837227	valid_1's binary_logloss: 0.139719
[37]	valid_0's auc: 0.898111	valid_0's binary_logloss: 0.115297	valid_1's auc: 0.83733	valid_1's binary_logloss: 0.139758
[38]	valid_0's auc: 0.898762	valid_0's binary_logloss: 0.11499	valid_1's auc: 0.837353	valid_1's binary_logloss: 0.13975
[39]	valid_0's auc: 0.899461	valid_0's binary_logloss: 0.114643	valid_1's auc: 0.837189	valid_1's binary_logloss: 0.139803
[40]	valid_0's auc: 0.900215	valid_0's binary_logloss: 0.114372	valid_1's auc: 0.837001	valid_1's binary_logloss: 0.139865
[41]	valid_0's auc: 0.900847	valid_0's binary_logloss: 0.114036	valid_1's auc: 0.837156	valid_1's binary_logloss: 0.139874
[42]	valid_0's auc: 0.901233	valid_0's binary_logloss: 0.113757	valid_1's auc: 0.837341	valid_1's binary_logloss: 0.139879
[43]	valid_0's auc: 0.901621	valid_0's binary_logloss: 0.113498	valid_1's auc: 0.837116	valid_1's binary_logloss: 0.139982
[44]	valid_0's auc: 0.902271	valid_0's binary_logloss: 0.113194	valid_1's auc: 0.836966	valid_1's binary_logloss: 0.140012
[45]	valid_0's auc: 0.902522	valid_0's binary_logloss: 0.112949	valid_1's auc: 0.836915	valid_1's binary_logloss: 0.140035
Early stopping, best iteration is:
[15]	valid_0's auc: 0.874543	valid_0's binary_logloss: 0.126262	valid_1's auc: 0.837921	valid_1's binary_logloss: 0.141291
[1]	valid_0's auc: 0.830649	valid_0's binary_logloss: 0.155755	valid_1's auc: 0.81673	valid_1's binary_logloss: 0.164976
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.839656	valid_0's binary_logloss: 0.150015	valid_1's auc: 0.822663	valid_1's binary_logloss: 0.159866
[3]	valid_0's auc: 0.847827	valid_0's binary_logloss: 0.145888	valid_1's auc: 0.829595	valid_1's binary_logloss: 0.156171
[4]	valid_0's auc: 0.851153	valid_0's binary_logloss: 0.142542	valid_1's auc: 0.831052	valid_1's binary_logloss: 0.153261
[5]	valid_0's auc: 0.854418	valid_0's binary_logloss: 0.139824	valid_1's auc: 0.832726	valid_1's binary_logloss: 0.150974
[6]	valid_0's auc: 0.85615	valid_0's binary_logloss: 0.137634	valid_1's auc: 0.835578	valid_1's binary_logloss: 0.148988
[7]	valid_0's auc: 0.857116	valid_0's binary_logloss: 0.135891	valid_1's auc: 0.834971	valid_1's binary_logloss: 0.147626
[8]	valid_0's auc: 0.859522	valid_0's binary_logloss: 0.134235	valid_1's auc: 0.836528	valid_1's binary_logloss: 0.146258
[9]	valid_0's auc: 0.861901	valid_0's binary_logloss: 0.132802	valid_1's auc: 0.8366	valid_1's binary_logloss: 0.145288
[10]	valid_0's auc: 0.863552	valid_0's binary_logloss: 0.131531	valid_1's auc: 0.835683	valid_1's binary_logloss: 0.144554
[11]	valid_0's auc: 0.865032	valid_0's binary_logloss: 0.13038	valid_1's auc: 0.835477	valid_1's binary_logloss: 0.143965
[12]	valid_0's auc: 0.867552	valid_0's binary_logloss: 0.12931	valid_1's auc: 0.837042	valid_1's binary_logloss: 0.14326
[13]	valid_0's auc: 0.870662	valid_0's binary_logloss: 0.128333	valid_1's auc: 0.837981	valid_1's binary_logloss: 0.142696
[14]	valid_0's auc: 0.872498	valid_0's binary_logloss: 0.127408	valid_1's auc: 0.837713	valid_1's binary_logloss: 0.142262
[15]	valid_0's auc: 0.873589	valid_0's binary_logloss: 0.126603	valid_1's auc: 0.837256	valid_1's binary_logloss: 0.141869
[16]	valid_0's auc: 0.875441	valid_0's binary_logloss: 0.125783	valid_1's auc: 0.837912	valid_1's binary_logloss: 0.141528
[17]	valid_0's auc: 0.877154	valid_0's binary_logloss: 0.125036	valid_1's auc: 0.836689	valid_1's binary_logloss: 0.141384
[18]	valid_0's auc: 0.878205	valid_0's binary_logloss: 0.124367	valid_1's auc: 0.835872	valid_1's binary_logloss: 0.141256
[19]	valid_0's auc: 0.879502	valid_0's binary_logloss: 0.12371	valid_1's auc: 0.835242	valid_1's binary_logloss: 0.141186
[20]	valid_0's auc: 0.880623	valid_0's binary_logloss: 0.123116	valid_1's auc: 0.835731	valid_1's binary_logloss: 0.140946
[21]	valid_0's auc: 0.881898	valid_0's binary_logloss: 0.122562	valid_1's auc: 0.834984	valid_1's binary_logloss: 0.140914
[22]	valid_0's auc: 0.882919	valid_0's binary_logloss: 0.122011	valid_1's auc: 0.83655	valid_1's binary_logloss: 0.140596
[23]	valid_0's auc: 0.88356	valid_0's binary_logloss: 0.121524	valid_1's auc: 0.836903	valid_1's binary_logloss: 0.140423
[24]	valid_0's auc: 0.884733	valid_0's binary_logloss: 0.120948	valid_1's auc: 0.837346	valid_1's binary_logloss: 0.140282
[25]	valid_0's auc: 0.885783	valid_0's binary_logloss: 0.120481	valid_1's auc: 0.837461	valid_1's binary_logloss: 0.140158
[26]	valid_0's auc: 0.887006	valid_0's binary_logloss: 0.119965	valid_1's auc: 0.837303	valid_1's binary_logloss: 0.140169
[27]	valid_0's auc: 0.887947	valid_0's binary_logloss: 0.119452	valid_1's auc: 0.837557	valid_1's binary_logloss: 0.140113
[28]	valid_0's auc: 0.888786	valid_0's binary_logloss: 0.11903	valid_1's auc: 0.837108	valid_1's binary_logloss: 0.140154
[29]	valid_0's auc: 0.890614	valid_0's binary_logloss: 0.118542	valid_1's auc: 0.837503	valid_1's binary_logloss: 0.14008
[30]	valid_0's auc: 0.891023	valid_0's binary_logloss: 0.118199	valid_1's auc: 0.83748	valid_1's binary_logloss: 0.140051
[31]	valid_0's auc: 0.891989	valid_0's binary_logloss: 0.117866	valid_1's auc: 0.837533	valid_1's binary_logloss: 0.140006
[32]	valid_0's auc: 0.892909	valid_0's binary_logloss: 0.117477	valid_1's auc: 0.83708	valid_1's binary_logloss: 0.140054
[33]	valid_0's auc: 0.893597	valid_0's binary_logloss: 0.117091	valid_1's auc: 0.836874	valid_1's binary_logloss: 0.140061
[34]	valid_0's auc: 0.894331	valid_0's binary_logloss: 0.116711	valid_1's auc: 0.836404	valid_1's binary_logloss: 0.140111
[35]	valid_0's auc: 0.895331	valid_0's binary_logloss: 0.116306	valid_1's auc: 0.837102	valid_1's binary_logloss: 0.139942
[36]	valid_0's auc: 0.895954	valid_0's binary_logloss: 0.115973	valid_1's auc: 0.837138	valid_1's binary_logloss: 0.139958
[37]	valid_0's auc: 0.896701	valid_0's binary_logloss: 0.115673	valid_1's auc: 0.837045	valid_1's binary_logloss: 0.139954
[38]	valid_0's auc: 0.897344	valid_0's binary_logloss: 0.115335	valid_1's auc: 0.836585	valid_1's binary_logloss: 0.140026
[39]	valid_0's auc: 0.897605	valid_0's binary_logloss: 0.115043	valid_1's auc: 0.836359	valid_1's binary_logloss: 0.140104
[40]	valid_0's auc: 0.898073	valid_0's binary_logloss: 0.114754	valid_1's auc: 0.836432	valid_1's binary_logloss: 0.140096
[41]	valid_0's auc: 0.898701	valid_0's binary_logloss: 0.114389	valid_1's auc: 0.836355	valid_1's binary_logloss: 0.140115
[42]	valid_0's auc: 0.898936	valid_0's binary_logloss: 0.114144	valid_1's auc: 0.836485	valid_1's binary_logloss: 0.14011
[43]	valid_0's auc: 0.899275	valid_0's binary_logloss: 0.11385	valid_1's auc: 0.836213	valid_1's binary_logloss: 0.140214
Early stopping, best iteration is:
[13]	valid_0's auc: 0.870662	valid_0's binary_logloss: 0.128333	valid_1's auc: 0.837981	valid_1's binary_logloss: 0.142696
[1]	valid_0's auc: 0.832891	valid_0's binary_logloss: 0.155301	valid_1's auc: 0.818851	valid_1's binary_logloss: 0.164831
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.845606	valid_0's binary_logloss: 0.149818	valid_1's auc: 0.826972	valid_1's binary_logloss: 0.159925
[3]	valid_0's auc: 0.850188	valid_0's binary_logloss: 0.145683	valid_1's auc: 0.828704	valid_1's binary_logloss: 0.156313
[4]	valid_0's auc: 0.85231	valid_0's binary_logloss: 0.142507	valid_1's auc: 0.829069	valid_1's binary_logloss: 0.153533
[5]	valid_0's auc: 0.854335	valid_0's binary_logloss: 0.139906	valid_1's auc: 0.832066	valid_1's binary_logloss: 0.151309
[6]	valid_0's auc: 0.857012	valid_0's binary_logloss: 0.137695	valid_1's auc: 0.834623	valid_1's binary_logloss: 0.14943
[7]	valid_0's auc: 0.858925	valid_0's binary_logloss: 0.135794	valid_1's auc: 0.834545	valid_1's binary_logloss: 0.147904
[8]	valid_0's auc: 0.861608	valid_0's binary_logloss: 0.134145	valid_1's auc: 0.833797	valid_1's binary_logloss: 0.146702
[9]	valid_0's auc: 0.863357	valid_0's binary_logloss: 0.132708	valid_1's auc: 0.834249	valid_1's binary_logloss: 0.145747
[10]	valid_0's auc: 0.866071	valid_0's binary_logloss: 0.131394	valid_1's auc: 0.834473	valid_1's binary_logloss: 0.144785
[11]	valid_0's auc: 0.867641	valid_0's binary_logloss: 0.130276	valid_1's auc: 0.834299	valid_1's binary_logloss: 0.144038
[12]	valid_0's auc: 0.869161	valid_0's binary_logloss: 0.129189	valid_1's auc: 0.834149	valid_1's binary_logloss: 0.143493
[13]	valid_0's auc: 0.870667	valid_0's binary_logloss: 0.128249	valid_1's auc: 0.833217	valid_1's binary_logloss: 0.14314
[14]	valid_0's auc: 0.872337	valid_0's binary_logloss: 0.127379	valid_1's auc: 0.833305	valid_1's binary_logloss: 0.14275
[15]	valid_0's auc: 0.873712	valid_0's binary_logloss: 0.126538	valid_1's auc: 0.832635	valid_1's binary_logloss: 0.14257
[16]	valid_0's auc: 0.874704	valid_0's binary_logloss: 0.12582	valid_1's auc: 0.832562	valid_1's binary_logloss: 0.142228
[17]	valid_0's auc: 0.875722	valid_0's binary_logloss: 0.125162	valid_1's auc: 0.832021	valid_1's binary_logloss: 0.142052
[18]	valid_0's auc: 0.877178	valid_0's binary_logloss: 0.124543	valid_1's auc: 0.831549	valid_1's binary_logloss: 0.141895
[19]	valid_0's auc: 0.877995	valid_0's binary_logloss: 0.123978	valid_1's auc: 0.831242	valid_1's binary_logloss: 0.141804
[20]	valid_0's auc: 0.87956	valid_0's binary_logloss: 0.123373	valid_1's auc: 0.83106	valid_1's binary_logloss: 0.141701
[21]	valid_0's auc: 0.880405	valid_0's binary_logloss: 0.122832	valid_1's auc: 0.830305	valid_1's binary_logloss: 0.14165
[22]	valid_0's auc: 0.881423	valid_0's binary_logloss: 0.122355	valid_1's auc: 0.830239	valid_1's binary_logloss: 0.141656
[23]	valid_0's auc: 0.882393	valid_0's binary_logloss: 0.121843	valid_1's auc: 0.830552	valid_1's binary_logloss: 0.141537
[24]	valid_0's auc: 0.884118	valid_0's binary_logloss: 0.121284	valid_1's auc: 0.830946	valid_1's binary_logloss: 0.141426
[25]	valid_0's auc: 0.885236	valid_0's binary_logloss: 0.120821	valid_1's auc: 0.829956	valid_1's binary_logloss: 0.141489
[26]	valid_0's auc: 0.886368	valid_0's binary_logloss: 0.120378	valid_1's auc: 0.829528	valid_1's binary_logloss: 0.141559
[27]	valid_0's auc: 0.88693	valid_0's binary_logloss: 0.119952	valid_1's auc: 0.829491	valid_1's binary_logloss: 0.141578
[28]	valid_0's auc: 0.887827	valid_0's binary_logloss: 0.1195	valid_1's auc: 0.829533	valid_1's binary_logloss: 0.141548
[29]	valid_0's auc: 0.888834	valid_0's binary_logloss: 0.119055	valid_1's auc: 0.829809	valid_1's binary_logloss: 0.141549
[30]	valid_0's auc: 0.889728	valid_0's binary_logloss: 0.118695	valid_1's auc: 0.829539	valid_1's binary_logloss: 0.14162
[31]	valid_0's auc: 0.890934	valid_0's binary_logloss: 0.118306	valid_1's auc: 0.829985	valid_1's binary_logloss: 0.141525
[32]	valid_0's auc: 0.891349	valid_0's binary_logloss: 0.11798	valid_1's auc: 0.829723	valid_1's binary_logloss: 0.141529
[33]	valid_0's auc: 0.891896	valid_0's binary_logloss: 0.117633	valid_1's auc: 0.829851	valid_1's binary_logloss: 0.141488
[34]	valid_0's auc: 0.892553	valid_0's binary_logloss: 0.117331	valid_1's auc: 0.82916	valid_1's binary_logloss: 0.141607
[35]	valid_0's auc: 0.893197	valid_0's binary_logloss: 0.116998	valid_1's auc: 0.828503	valid_1's binary_logloss: 0.141781
[36]	valid_0's auc: 0.894242	valid_0's binary_logloss: 0.116658	valid_1's auc: 0.828875	valid_1's binary_logloss: 0.141779
Early stopping, best iteration is:
[6]	valid_0's auc: 0.857012	valid_0's binary_logloss: 0.137695	valid_1's auc: 0.834623	valid_1's binary_logloss: 0.14943
[1]	valid_0's auc: 0.833054	valid_0's binary_logloss: 0.155723	valid_1's auc: 0.817048	valid_1's binary_logloss: 0.165042
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.840929	valid_0's binary_logloss: 0.149897	valid_1's auc: 0.820838	valid_1's binary_logloss: 0.159671
[3]	valid_0's auc: 0.85054	valid_0's binary_logloss: 0.14575	valid_1's auc: 0.827828	valid_1's binary_logloss: 0.156036
[4]	valid_0's auc: 0.855605	valid_0's binary_logloss: 0.14246	valid_1's auc: 0.831388	valid_1's binary_logloss: 0.153065
[5]	valid_0's auc: 0.858903	valid_0's binary_logloss: 0.139711	valid_1's auc: 0.834493	valid_1's binary_logloss: 0.150758
[6]	valid_0's auc: 0.860869	valid_0's binary_logloss: 0.137459	valid_1's auc: 0.833775	valid_1's binary_logloss: 0.149055
[7]	valid_0's auc: 0.862879	valid_0's binary_logloss: 0.135513	valid_1's auc: 0.836672	valid_1's binary_logloss: 0.147391
[8]	valid_0's auc: 0.863872	valid_0's binary_logloss: 0.133916	valid_1's auc: 0.835878	valid_1's binary_logloss: 0.146178
[9]	valid_0's auc: 0.865562	valid_0's binary_logloss: 0.132428	valid_1's auc: 0.836038	valid_1's binary_logloss: 0.145107
[10]	valid_0's auc: 0.867401	valid_0's binary_logloss: 0.131116	valid_1's auc: 0.836429	valid_1's binary_logloss: 0.144153
[11]	valid_0's auc: 0.868477	valid_0's binary_logloss: 0.129981	valid_1's auc: 0.836388	valid_1's binary_logloss: 0.143351
[12]	valid_0's auc: 0.87007	valid_0's binary_logloss: 0.128907	valid_1's auc: 0.836122	valid_1's binary_logloss: 0.142803
[13]	valid_0's auc: 0.8711	valid_0's binary_logloss: 0.127994	valid_1's auc: 0.836547	valid_1's binary_logloss: 0.142243
[14]	valid_0's auc: 0.872903	valid_0's binary_logloss: 0.127122	valid_1's auc: 0.837837	valid_1's binary_logloss: 0.141718
[15]	valid_0's auc: 0.874543	valid_0's binary_logloss: 0.126262	valid_1's auc: 0.837921	valid_1's binary_logloss: 0.141291
[16]	valid_0's auc: 0.876148	valid_0's binary_logloss: 0.1255	valid_1's auc: 0.83698	valid_1's binary_logloss: 0.14114
[17]	valid_0's auc: 0.878214	valid_0's binary_logloss: 0.124724	valid_1's auc: 0.836901	valid_1's binary_logloss: 0.140905
[18]	valid_0's auc: 0.879554	valid_0's binary_logloss: 0.124053	valid_1's auc: 0.836497	valid_1's binary_logloss: 0.140787
[19]	valid_0's auc: 0.880715	valid_0's binary_logloss: 0.123405	valid_1's auc: 0.837087	valid_1's binary_logloss: 0.140481
[20]	valid_0's auc: 0.881492	valid_0's binary_logloss: 0.122867	valid_1's auc: 0.836798	valid_1's binary_logloss: 0.140353
[21]	valid_0's auc: 0.882521	valid_0's binary_logloss: 0.122301	valid_1's auc: 0.836676	valid_1's binary_logloss: 0.140236
[22]	valid_0's auc: 0.883688	valid_0's binary_logloss: 0.121776	valid_1's auc: 0.836698	valid_1's binary_logloss: 0.140127
[23]	valid_0's auc: 0.88451	valid_0's binary_logloss: 0.121226	valid_1's auc: 0.8369	valid_1's binary_logloss: 0.140044
[24]	valid_0's auc: 0.88609	valid_0's binary_logloss: 0.120643	valid_1's auc: 0.836883	valid_1's binary_logloss: 0.139917
[25]	valid_0's auc: 0.887311	valid_0's binary_logloss: 0.120104	valid_1's auc: 0.837018	valid_1's binary_logloss: 0.139879
[26]	valid_0's auc: 0.888754	valid_0's binary_logloss: 0.11961	valid_1's auc: 0.836808	valid_1's binary_logloss: 0.139895
[27]	valid_0's auc: 0.889697	valid_0's binary_logloss: 0.119114	valid_1's auc: 0.836828	valid_1's binary_logloss: 0.139904
[28]	valid_0's auc: 0.891102	valid_0's binary_logloss: 0.118637	valid_1's auc: 0.83685	valid_1's binary_logloss: 0.139833
[29]	valid_0's auc: 0.891823	valid_0's binary_logloss: 0.118217	valid_1's auc: 0.836568	valid_1's binary_logloss: 0.139836
[30]	valid_0's auc: 0.893059	valid_0's binary_logloss: 0.117781	valid_1's auc: 0.836774	valid_1's binary_logloss: 0.139827
[31]	valid_0's auc: 0.893688	valid_0's binary_logloss: 0.117412	valid_1's auc: 0.83732	valid_1's binary_logloss: 0.139721
[32]	valid_0's auc: 0.894643	valid_0's binary_logloss: 0.117013	valid_1's auc: 0.8373	valid_1's binary_logloss: 0.1397
[33]	valid_0's auc: 0.89555	valid_0's binary_logloss: 0.11666	valid_1's auc: 0.837425	valid_1's binary_logloss: 0.139692
[34]	valid_0's auc: 0.896334	valid_0's binary_logloss: 0.116252	valid_1's auc: 0.836971	valid_1's binary_logloss: 0.139746
[35]	valid_0's auc: 0.897058	valid_0's binary_logloss: 0.115923	valid_1's auc: 0.837212	valid_1's binary_logloss: 0.139756
[36]	valid_0's auc: 0.897581	valid_0's binary_logloss: 0.115642	valid_1's auc: 0.837227	valid_1's binary_logloss: 0.139719
[37]	valid_0's auc: 0.898111	valid_0's binary_logloss: 0.115297	valid_1's auc: 0.83733	valid_1's binary_logloss: 0.139758
[38]	valid_0's auc: 0.898762	valid_0's binary_logloss: 0.11499	valid_1's auc: 0.837353	valid_1's binary_logloss: 0.13975
[39]	valid_0's auc: 0.899461	valid_0's binary_logloss: 0.114643	valid_1's auc: 0.837189	valid_1's binary_logloss: 0.139803
[40]	valid_0's auc: 0.900215	valid_0's binary_logloss: 0.114372	valid_1's auc: 0.837001	valid_1's binary_logloss: 0.139865
[41]	valid_0's auc: 0.900847	valid_0's binary_logloss: 0.114036	valid_1's auc: 0.837156	valid_1's binary_logloss: 0.139874
[42]	valid_0's auc: 0.901233	valid_0's binary_logloss: 0.113757	valid_1's auc: 0.837341	valid_1's binary_logloss: 0.139879
[43]	valid_0's auc: 0.901621	valid_0's binary_logloss: 0.113498	valid_1's auc: 0.837116	valid_1's binary_logloss: 0.139982
[44]	valid_0's auc: 0.902271	valid_0's binary_logloss: 0.113194	valid_1's auc: 0.836966	valid_1's binary_logloss: 0.140012
[45]	valid_0's auc: 0.902522	valid_0's binary_logloss: 0.112949	valid_1's auc: 0.836915	valid_1's binary_logloss: 0.140035
Early stopping, best iteration is:
[15]	valid_0's auc: 0.874543	valid_0's binary_logloss: 0.126262	valid_1's auc: 0.837921	valid_1's binary_logloss: 0.141291
[1]	valid_0's auc: 0.830649	valid_0's binary_logloss: 0.155755	valid_1's auc: 0.81673	valid_1's binary_logloss: 0.164976
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.839656	valid_0's binary_logloss: 0.150015	valid_1's auc: 0.822663	valid_1's binary_logloss: 0.159866
[3]	valid_0's auc: 0.847827	valid_0's binary_logloss: 0.145888	valid_1's auc: 0.829595	valid_1's binary_logloss: 0.156171
[4]	valid_0's auc: 0.851153	valid_0's binary_logloss: 0.142542	valid_1's auc: 0.831052	valid_1's binary_logloss: 0.153261
[5]	valid_0's auc: 0.854418	valid_0's binary_logloss: 0.139824	valid_1's auc: 0.832726	valid_1's binary_logloss: 0.150974
[6]	valid_0's auc: 0.85615	valid_0's binary_logloss: 0.137634	valid_1's auc: 0.835578	valid_1's binary_logloss: 0.148988
[7]	valid_0's auc: 0.857116	valid_0's binary_logloss: 0.135891	valid_1's auc: 0.834971	valid_1's binary_logloss: 0.147626
[8]	valid_0's auc: 0.859522	valid_0's binary_logloss: 0.134235	valid_1's auc: 0.836528	valid_1's binary_logloss: 0.146258
[9]	valid_0's auc: 0.861901	valid_0's binary_logloss: 0.132802	valid_1's auc: 0.8366	valid_1's binary_logloss: 0.145288
[10]	valid_0's auc: 0.863552	valid_0's binary_logloss: 0.131531	valid_1's auc: 0.835683	valid_1's binary_logloss: 0.144554
[11]	valid_0's auc: 0.865032	valid_0's binary_logloss: 0.13038	valid_1's auc: 0.835477	valid_1's binary_logloss: 0.143965
[12]	valid_0's auc: 0.867552	valid_0's binary_logloss: 0.12931	valid_1's auc: 0.837042	valid_1's binary_logloss: 0.14326
[13]	valid_0's auc: 0.870662	valid_0's binary_logloss: 0.128333	valid_1's auc: 0.837981	valid_1's binary_logloss: 0.142696
[14]	valid_0's auc: 0.872498	valid_0's binary_logloss: 0.127408	valid_1's auc: 0.837713	valid_1's binary_logloss: 0.142262
[15]	valid_0's auc: 0.873589	valid_0's binary_logloss: 0.126603	valid_1's auc: 0.837256	valid_1's binary_logloss: 0.141869
[16]	valid_0's auc: 0.875441	valid_0's binary_logloss: 0.125783	valid_1's auc: 0.837912	valid_1's binary_logloss: 0.141528
[17]	valid_0's auc: 0.877154	valid_0's binary_logloss: 0.125036	valid_1's auc: 0.836689	valid_1's binary_logloss: 0.141384
[18]	valid_0's auc: 0.878205	valid_0's binary_logloss: 0.124367	valid_1's auc: 0.835872	valid_1's binary_logloss: 0.141256
[19]	valid_0's auc: 0.879502	valid_0's binary_logloss: 0.12371	valid_1's auc: 0.835242	valid_1's binary_logloss: 0.141186
[20]	valid_0's auc: 0.880623	valid_0's binary_logloss: 0.123116	valid_1's auc: 0.835731	valid_1's binary_logloss: 0.140946
[21]	valid_0's auc: 0.881898	valid_0's binary_logloss: 0.122562	valid_1's auc: 0.834984	valid_1's binary_logloss: 0.140914
[22]	valid_0's auc: 0.882919	valid_0's binary_logloss: 0.122011	valid_1's auc: 0.83655	valid_1's binary_logloss: 0.140596
[23]	valid_0's auc: 0.88356	valid_0's binary_logloss: 0.121524	valid_1's auc: 0.836903	valid_1's binary_logloss: 0.140423
[24]	valid_0's auc: 0.884733	valid_0's binary_logloss: 0.120948	valid_1's auc: 0.837346	valid_1's binary_logloss: 0.140282
[25]	valid_0's auc: 0.885783	valid_0's binary_logloss: 0.120481	valid_1's auc: 0.837461	valid_1's binary_logloss: 0.140158
[26]	valid_0's auc: 0.887006	valid_0's binary_logloss: 0.119965	valid_1's auc: 0.837303	valid_1's binary_logloss: 0.140169
[27]	valid_0's auc: 0.887947	valid_0's binary_logloss: 0.119452	valid_1's auc: 0.837557	valid_1's binary_logloss: 0.140113
[28]	valid_0's auc: 0.888786	valid_0's binary_logloss: 0.11903	valid_1's auc: 0.837108	valid_1's binary_logloss: 0.140154
[29]	valid_0's auc: 0.890614	valid_0's binary_logloss: 0.118542	valid_1's auc: 0.837503	valid_1's binary_logloss: 0.14008
[30]	valid_0's auc: 0.891023	valid_0's binary_logloss: 0.118199	valid_1's auc: 0.83748	valid_1's binary_logloss: 0.140051
[31]	valid_0's auc: 0.891989	valid_0's binary_logloss: 0.117866	valid_1's auc: 0.837533	valid_1's binary_logloss: 0.140006
[32]	valid_0's auc: 0.892909	valid_0's binary_logloss: 0.117477	valid_1's auc: 0.83708	valid_1's binary_logloss: 0.140054
[33]	valid_0's auc: 0.893597	valid_0's binary_logloss: 0.117091	valid_1's auc: 0.836874	valid_1's binary_logloss: 0.140061
[34]	valid_0's auc: 0.894331	valid_0's binary_logloss: 0.116711	valid_1's auc: 0.836404	valid_1's binary_logloss: 0.140111
[35]	valid_0's auc: 0.895331	valid_0's binary_logloss: 0.116306	valid_1's auc: 0.837102	valid_1's binary_logloss: 0.139942
[36]	valid_0's auc: 0.895954	valid_0's binary_logloss: 0.115973	valid_1's auc: 0.837138	valid_1's binary_logloss: 0.139958
[37]	valid_0's auc: 0.896701	valid_0's binary_logloss: 0.115673	valid_1's auc: 0.837045	valid_1's binary_logloss: 0.139954
[38]	valid_0's auc: 0.897344	valid_0's binary_logloss: 0.115335	valid_1's auc: 0.836585	valid_1's binary_logloss: 0.140026
[39]	valid_0's auc: 0.897605	valid_0's binary_logloss: 0.115043	valid_1's auc: 0.836359	valid_1's binary_logloss: 0.140104
[40]	valid_0's auc: 0.898073	valid_0's binary_logloss: 0.114754	valid_1's auc: 0.836432	valid_1's binary_logloss: 0.140096
[41]	valid_0's auc: 0.898701	valid_0's binary_logloss: 0.114389	valid_1's auc: 0.836355	valid_1's binary_logloss: 0.140115
[42]	valid_0's auc: 0.898936	valid_0's binary_logloss: 0.114144	valid_1's auc: 0.836485	valid_1's binary_logloss: 0.14011
[43]	valid_0's auc: 0.899275	valid_0's binary_logloss: 0.11385	valid_1's auc: 0.836213	valid_1's binary_logloss: 0.140214
Early stopping, best iteration is:
[13]	valid_0's auc: 0.870662	valid_0's binary_logloss: 0.128333	valid_1's auc: 0.837981	valid_1's binary_logloss: 0.142696
[1]	valid_0's auc: 0.824873	valid_0's binary_logloss: 0.156222	valid_1's auc: 0.817791	valid_1's binary_logloss: 0.165077
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.829172	valid_0's binary_logloss: 0.151168	valid_1's auc: 0.823373	valid_1's binary_logloss: 0.160071
[3]	valid_0's auc: 0.836076	valid_0's binary_logloss: 0.147371	valid_1's auc: 0.829343	valid_1's binary_logloss: 0.156297
[4]	valid_0's auc: 0.839875	valid_0's binary_logloss: 0.14444	valid_1's auc: 0.833421	valid_1's binary_logloss: 0.153356
[5]	valid_0's auc: 0.84413	valid_0's binary_logloss: 0.142061	valid_1's auc: 0.835156	valid_1's binary_logloss: 0.151047
[6]	valid_0's auc: 0.846462	valid_0's binary_logloss: 0.140071	valid_1's auc: 0.835505	valid_1's binary_logloss: 0.14915
[7]	valid_0's auc: 0.847647	valid_0's binary_logloss: 0.138475	valid_1's auc: 0.835469	valid_1's binary_logloss: 0.147559
[8]	valid_0's auc: 0.848591	valid_0's binary_logloss: 0.13704	valid_1's auc: 0.835911	valid_1's binary_logloss: 0.146381
[9]	valid_0's auc: 0.849213	valid_0's binary_logloss: 0.135881	valid_1's auc: 0.838015	valid_1's binary_logloss: 0.1453
[10]	valid_0's auc: 0.850856	valid_0's binary_logloss: 0.134808	valid_1's auc: 0.837498	valid_1's binary_logloss: 0.144374
[11]	valid_0's auc: 0.852026	valid_0's binary_logloss: 0.133912	valid_1's auc: 0.836992	valid_1's binary_logloss: 0.143626
[12]	valid_0's auc: 0.853769	valid_0's binary_logloss: 0.133022	valid_1's auc: 0.836788	valid_1's binary_logloss: 0.143124
[13]	valid_0's auc: 0.85483	valid_0's binary_logloss: 0.132302	valid_1's auc: 0.836757	valid_1's binary_logloss: 0.142528
[14]	valid_0's auc: 0.855718	valid_0's binary_logloss: 0.131638	valid_1's auc: 0.835694	valid_1's binary_logloss: 0.142224
[15]	valid_0's auc: 0.856954	valid_0's binary_logloss: 0.131049	valid_1's auc: 0.835368	valid_1's binary_logloss: 0.141812
[16]	valid_0's auc: 0.857856	valid_0's binary_logloss: 0.130502	valid_1's auc: 0.834835	valid_1's binary_logloss: 0.141587
[17]	valid_0's auc: 0.85911	valid_0's binary_logloss: 0.129988	valid_1's auc: 0.835354	valid_1's binary_logloss: 0.141283
[18]	valid_0's auc: 0.860451	valid_0's binary_logloss: 0.129489	valid_1's auc: 0.835311	valid_1's binary_logloss: 0.141081
[19]	valid_0's auc: 0.861474	valid_0's binary_logloss: 0.129065	valid_1's auc: 0.834787	valid_1's binary_logloss: 0.140992
[20]	valid_0's auc: 0.862401	valid_0's binary_logloss: 0.128653	valid_1's auc: 0.834345	valid_1's binary_logloss: 0.140795
[21]	valid_0's auc: 0.86312	valid_0's binary_logloss: 0.128288	valid_1's auc: 0.833804	valid_1's binary_logloss: 0.140723
[22]	valid_0's auc: 0.86391	valid_0's binary_logloss: 0.127911	valid_1's auc: 0.833837	valid_1's binary_logloss: 0.140571
[23]	valid_0's auc: 0.8644	valid_0's binary_logloss: 0.127608	valid_1's auc: 0.833222	valid_1's binary_logloss: 0.140512
[24]	valid_0's auc: 0.865301	valid_0's binary_logloss: 0.127243	valid_1's auc: 0.832726	valid_1's binary_logloss: 0.140541
[25]	valid_0's auc: 0.866437	valid_0's binary_logloss: 0.126901	valid_1's auc: 0.832998	valid_1's binary_logloss: 0.140458
[26]	valid_0's auc: 0.867262	valid_0's binary_logloss: 0.126595	valid_1's auc: 0.833056	valid_1's binary_logloss: 0.140424
[27]	valid_0's auc: 0.86794	valid_0's binary_logloss: 0.126301	valid_1's auc: 0.832427	valid_1's binary_logloss: 0.140421
[28]	valid_0's auc: 0.869472	valid_0's binary_logloss: 0.125953	valid_1's auc: 0.833075	valid_1's binary_logloss: 0.14028
[29]	valid_0's auc: 0.870369	valid_0's binary_logloss: 0.125647	valid_1's auc: 0.833494	valid_1's binary_logloss: 0.140215
[30]	valid_0's auc: 0.871105	valid_0's binary_logloss: 0.12536	valid_1's auc: 0.83327	valid_1's binary_logloss: 0.140214
[31]	valid_0's auc: 0.871414	valid_0's binary_logloss: 0.125161	valid_1's auc: 0.833041	valid_1's binary_logloss: 0.140216
[32]	valid_0's auc: 0.872281	valid_0's binary_logloss: 0.12493	valid_1's auc: 0.833344	valid_1's binary_logloss: 0.140148
[33]	valid_0's auc: 0.873038	valid_0's binary_logloss: 0.124672	valid_1's auc: 0.833307	valid_1's binary_logloss: 0.140091
[34]	valid_0's auc: 0.873571	valid_0's binary_logloss: 0.124499	valid_1's auc: 0.833239	valid_1's binary_logloss: 0.140069
[35]	valid_0's auc: 0.874263	valid_0's binary_logloss: 0.124311	valid_1's auc: 0.833202	valid_1's binary_logloss: 0.140067
[36]	valid_0's auc: 0.87467	valid_0's binary_logloss: 0.124165	valid_1's auc: 0.833345	valid_1's binary_logloss: 0.140029
[37]	valid_0's auc: 0.875299	valid_0's binary_logloss: 0.123937	valid_1's auc: 0.833447	valid_1's binary_logloss: 0.140009
[38]	valid_0's auc: 0.876178	valid_0's binary_logloss: 0.123686	valid_1's auc: 0.833499	valid_1's binary_logloss: 0.139986
[39]	valid_0's auc: 0.876802	valid_0's binary_logloss: 0.123446	valid_1's auc: 0.833868	valid_1's binary_logloss: 0.139955
Early stopping, best iteration is:
[9]	valid_0's auc: 0.849213	valid_0's binary_logloss: 0.135881	valid_1's auc: 0.838015	valid_1's binary_logloss: 0.1453
[1]	valid_0's auc: 0.821831	valid_0's binary_logloss: 0.156469	valid_1's auc: 0.817525	valid_1's binary_logloss: 0.165188
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.830014	valid_0's binary_logloss: 0.151109	valid_1's auc: 0.823491	valid_1's binary_logloss: 0.159651
[3]	valid_0's auc: 0.839606	valid_0's binary_logloss: 0.147325	valid_1's auc: 0.832736	valid_1's binary_logloss: 0.156031
[4]	valid_0's auc: 0.842933	valid_0's binary_logloss: 0.144392	valid_1's auc: 0.836202	valid_1's binary_logloss: 0.15311
[5]	valid_0's auc: 0.845714	valid_0's binary_logloss: 0.141965	valid_1's auc: 0.838652	valid_1's binary_logloss: 0.150749
[6]	valid_0's auc: 0.848431	valid_0's binary_logloss: 0.13995	valid_1's auc: 0.840279	valid_1's binary_logloss: 0.148948
[7]	valid_0's auc: 0.849908	valid_0's binary_logloss: 0.138315	valid_1's auc: 0.840328	valid_1's binary_logloss: 0.14741
[8]	valid_0's auc: 0.852054	valid_0's binary_logloss: 0.136907	valid_1's auc: 0.83901	valid_1's binary_logloss: 0.146175
[9]	valid_0's auc: 0.853186	valid_0's binary_logloss: 0.135648	valid_1's auc: 0.83787	valid_1's binary_logloss: 0.145198
[10]	valid_0's auc: 0.85449	valid_0's binary_logloss: 0.134596	valid_1's auc: 0.837845	valid_1's binary_logloss: 0.144271
[11]	valid_0's auc: 0.855485	valid_0's binary_logloss: 0.133677	valid_1's auc: 0.838688	valid_1's binary_logloss: 0.14351
[12]	valid_0's auc: 0.856918	valid_0's binary_logloss: 0.132832	valid_1's auc: 0.838593	valid_1's binary_logloss: 0.142834
[13]	valid_0's auc: 0.857461	valid_0's binary_logloss: 0.132079	valid_1's auc: 0.838477	valid_1's binary_logloss: 0.142308
[14]	valid_0's auc: 0.858342	valid_0's binary_logloss: 0.131428	valid_1's auc: 0.838195	valid_1's binary_logloss: 0.141925
[15]	valid_0's auc: 0.858926	valid_0's binary_logloss: 0.130816	valid_1's auc: 0.838543	valid_1's binary_logloss: 0.141466
[16]	valid_0's auc: 0.859532	valid_0's binary_logloss: 0.130275	valid_1's auc: 0.838295	valid_1's binary_logloss: 0.141112
[17]	valid_0's auc: 0.860793	valid_0's binary_logloss: 0.129728	valid_1's auc: 0.837788	valid_1's binary_logloss: 0.140844
[18]	valid_0's auc: 0.861753	valid_0's binary_logloss: 0.12924	valid_1's auc: 0.837779	valid_1's binary_logloss: 0.140599
[19]	valid_0's auc: 0.86298	valid_0's binary_logloss: 0.12873	valid_1's auc: 0.837848	valid_1's binary_logloss: 0.14042
[20]	valid_0's auc: 0.863577	valid_0's binary_logloss: 0.128318	valid_1's auc: 0.837708	valid_1's binary_logloss: 0.140222
[21]	valid_0's auc: 0.864273	valid_0's binary_logloss: 0.127905	valid_1's auc: 0.838031	valid_1's binary_logloss: 0.140093
[22]	valid_0's auc: 0.865086	valid_0's binary_logloss: 0.127531	valid_1's auc: 0.837779	valid_1's binary_logloss: 0.139982
[23]	valid_0's auc: 0.865788	valid_0's binary_logloss: 0.127146	valid_1's auc: 0.837827	valid_1's binary_logloss: 0.139856
[24]	valid_0's auc: 0.866662	valid_0's binary_logloss: 0.126815	valid_1's auc: 0.837785	valid_1's binary_logloss: 0.139755
[25]	valid_0's auc: 0.867441	valid_0's binary_logloss: 0.126498	valid_1's auc: 0.838008	valid_1's binary_logloss: 0.139673
[26]	valid_0's auc: 0.86805	valid_0's binary_logloss: 0.126176	valid_1's auc: 0.838301	valid_1's binary_logloss: 0.139585
[27]	valid_0's auc: 0.868525	valid_0's binary_logloss: 0.12589	valid_1's auc: 0.838152	valid_1's binary_logloss: 0.139564
[28]	valid_0's auc: 0.869107	valid_0's binary_logloss: 0.125656	valid_1's auc: 0.838221	valid_1's binary_logloss: 0.139481
[29]	valid_0's auc: 0.869754	valid_0's binary_logloss: 0.12538	valid_1's auc: 0.838255	valid_1's binary_logloss: 0.139428
[30]	valid_0's auc: 0.87056	valid_0's binary_logloss: 0.125113	valid_1's auc: 0.838733	valid_1's binary_logloss: 0.139354
[31]	valid_0's auc: 0.871162	valid_0's binary_logloss: 0.124861	valid_1's auc: 0.838979	valid_1's binary_logloss: 0.139225
[32]	valid_0's auc: 0.871762	valid_0's binary_logloss: 0.124652	valid_1's auc: 0.838753	valid_1's binary_logloss: 0.139233
[33]	valid_0's auc: 0.872722	valid_0's binary_logloss: 0.124394	valid_1's auc: 0.839239	valid_1's binary_logloss: 0.1391
[34]	valid_0's auc: 0.87368	valid_0's binary_logloss: 0.124127	valid_1's auc: 0.839489	valid_1's binary_logloss: 0.139029
[35]	valid_0's auc: 0.874492	valid_0's binary_logloss: 0.12392	valid_1's auc: 0.839553	valid_1's binary_logloss: 0.139024
[36]	valid_0's auc: 0.875059	valid_0's binary_logloss: 0.123723	valid_1's auc: 0.839658	valid_1's binary_logloss: 0.138948
[37]	valid_0's auc: 0.875726	valid_0's binary_logloss: 0.123495	valid_1's auc: 0.839391	valid_1's binary_logloss: 0.139005
Early stopping, best iteration is:
[7]	valid_0's auc: 0.849908	valid_0's binary_logloss: 0.138315	valid_1's auc: 0.840328	valid_1's binary_logloss: 0.14741
[1]	valid_0's auc: 0.821427	valid_0's binary_logloss: 0.156591	valid_1's auc: 0.81711	valid_1's binary_logloss: 0.165271
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827907	valid_0's binary_logloss: 0.151333	valid_1's auc: 0.821466	valid_1's binary_logloss: 0.160205
[3]	valid_0's auc: 0.837845	valid_0's binary_logloss: 0.147475	valid_1's auc: 0.828667	valid_1's binary_logloss: 0.156481
[4]	valid_0's auc: 0.840614	valid_0's binary_logloss: 0.144432	valid_1's auc: 0.831943	valid_1's binary_logloss: 0.153529
[5]	valid_0's auc: 0.843179	valid_0's binary_logloss: 0.142066	valid_1's auc: 0.834251	valid_1's binary_logloss: 0.151202
[6]	valid_0's auc: 0.843372	valid_0's binary_logloss: 0.140161	valid_1's auc: 0.834689	valid_1's binary_logloss: 0.14929
[7]	valid_0's auc: 0.844766	valid_0's binary_logloss: 0.138478	valid_1's auc: 0.835816	valid_1's binary_logloss: 0.147704
[8]	valid_0's auc: 0.847116	valid_0's binary_logloss: 0.137115	valid_1's auc: 0.836076	valid_1's binary_logloss: 0.146539
[9]	valid_0's auc: 0.850254	valid_0's binary_logloss: 0.135912	valid_1's auc: 0.836014	valid_1's binary_logloss: 0.145594
[10]	valid_0's auc: 0.851797	valid_0's binary_logloss: 0.13482	valid_1's auc: 0.836595	valid_1's binary_logloss: 0.144731
[11]	valid_0's auc: 0.852928	valid_0's binary_logloss: 0.133901	valid_1's auc: 0.835902	valid_1's binary_logloss: 0.144167
[12]	valid_0's auc: 0.854858	valid_0's binary_logloss: 0.133062	valid_1's auc: 0.836859	valid_1's binary_logloss: 0.143545
[13]	valid_0's auc: 0.856392	valid_0's binary_logloss: 0.13232	valid_1's auc: 0.837517	valid_1's binary_logloss: 0.14296
[14]	valid_0's auc: 0.857549	valid_0's binary_logloss: 0.131636	valid_1's auc: 0.837302	valid_1's binary_logloss: 0.142542
[15]	valid_0's auc: 0.858338	valid_0's binary_logloss: 0.131026	valid_1's auc: 0.837003	valid_1's binary_logloss: 0.142141
[16]	valid_0's auc: 0.859394	valid_0's binary_logloss: 0.130449	valid_1's auc: 0.836672	valid_1's binary_logloss: 0.141779
[17]	valid_0's auc: 0.860095	valid_0's binary_logloss: 0.129926	valid_1's auc: 0.83723	valid_1's binary_logloss: 0.141395
[18]	valid_0's auc: 0.861058	valid_0's binary_logloss: 0.129444	valid_1's auc: 0.837871	valid_1's binary_logloss: 0.141105
[19]	valid_0's auc: 0.862258	valid_0's binary_logloss: 0.128997	valid_1's auc: 0.83794	valid_1's binary_logloss: 0.140829
[20]	valid_0's auc: 0.863204	valid_0's binary_logloss: 0.128569	valid_1's auc: 0.838426	valid_1's binary_logloss: 0.140558
[21]	valid_0's auc: 0.864129	valid_0's binary_logloss: 0.128163	valid_1's auc: 0.838539	valid_1's binary_logloss: 0.140401
[22]	valid_0's auc: 0.865066	valid_0's binary_logloss: 0.127776	valid_1's auc: 0.838552	valid_1's binary_logloss: 0.140268
[23]	valid_0's auc: 0.866036	valid_0's binary_logloss: 0.127415	valid_1's auc: 0.838667	valid_1's binary_logloss: 0.140163
[24]	valid_0's auc: 0.866894	valid_0's binary_logloss: 0.127047	valid_1's auc: 0.838711	valid_1's binary_logloss: 0.139983
[25]	valid_0's auc: 0.867646	valid_0's binary_logloss: 0.126759	valid_1's auc: 0.83898	valid_1's binary_logloss: 0.13986
[26]	valid_0's auc: 0.868321	valid_0's binary_logloss: 0.126486	valid_1's auc: 0.83866	valid_1's binary_logloss: 0.139795
[27]	valid_0's auc: 0.869533	valid_0's binary_logloss: 0.126137	valid_1's auc: 0.838474	valid_1's binary_logloss: 0.13973
[28]	valid_0's auc: 0.87009	valid_0's binary_logloss: 0.125841	valid_1's auc: 0.838418	valid_1's binary_logloss: 0.139661
[29]	valid_0's auc: 0.870611	valid_0's binary_logloss: 0.125574	valid_1's auc: 0.838527	valid_1's binary_logloss: 0.139586
[30]	valid_0's auc: 0.871288	valid_0's binary_logloss: 0.125351	valid_1's auc: 0.838578	valid_1's binary_logloss: 0.139533
[31]	valid_0's auc: 0.871941	valid_0's binary_logloss: 0.125092	valid_1's auc: 0.839062	valid_1's binary_logloss: 0.139431
[32]	valid_0's auc: 0.872841	valid_0's binary_logloss: 0.124816	valid_1's auc: 0.839243	valid_1's binary_logloss: 0.139362
[33]	valid_0's auc: 0.873443	valid_0's binary_logloss: 0.124593	valid_1's auc: 0.839482	valid_1's binary_logloss: 0.139288
[34]	valid_0's auc: 0.874317	valid_0's binary_logloss: 0.124327	valid_1's auc: 0.839612	valid_1's binary_logloss: 0.139266
[35]	valid_0's auc: 0.875065	valid_0's binary_logloss: 0.124053	valid_1's auc: 0.839746	valid_1's binary_logloss: 0.139241
[36]	valid_0's auc: 0.875683	valid_0's binary_logloss: 0.123777	valid_1's auc: 0.840074	valid_1's binary_logloss: 0.139138
[37]	valid_0's auc: 0.876241	valid_0's binary_logloss: 0.123571	valid_1's auc: 0.840105	valid_1's binary_logloss: 0.139101
[38]	valid_0's auc: 0.876923	valid_0's binary_logloss: 0.123355	valid_1's auc: 0.839838	valid_1's binary_logloss: 0.139141
[39]	valid_0's auc: 0.877193	valid_0's binary_logloss: 0.123186	valid_1's auc: 0.84006	valid_1's binary_logloss: 0.139095
[40]	valid_0's auc: 0.877543	valid_0's binary_logloss: 0.123003	valid_1's auc: 0.839944	valid_1's binary_logloss: 0.139144
[41]	valid_0's auc: 0.877994	valid_0's binary_logloss: 0.122806	valid_1's auc: 0.839925	valid_1's binary_logloss: 0.139157
[42]	valid_0's auc: 0.878413	valid_0's binary_logloss: 0.122608	valid_1's auc: 0.839588	valid_1's binary_logloss: 0.139183
[43]	valid_0's auc: 0.87868	valid_0's binary_logloss: 0.122482	valid_1's auc: 0.839828	valid_1's binary_logloss: 0.139122
[44]	valid_0's auc: 0.878844	valid_0's binary_logloss: 0.122346	valid_1's auc: 0.839999	valid_1's binary_logloss: 0.139112
[45]	valid_0's auc: 0.879115	valid_0's binary_logloss: 0.12219	valid_1's auc: 0.839862	valid_1's binary_logloss: 0.139105
[46]	valid_0's auc: 0.879426	valid_0's binary_logloss: 0.122041	valid_1's auc: 0.839926	valid_1's binary_logloss: 0.139086
[47]	valid_0's auc: 0.879882	valid_0's binary_logloss: 0.121829	valid_1's auc: 0.840044	valid_1's binary_logloss: 0.139063
[48]	valid_0's auc: 0.880499	valid_0's binary_logloss: 0.121624	valid_1's auc: 0.840013	valid_1's binary_logloss: 0.139091
[49]	valid_0's auc: 0.880866	valid_0's binary_logloss: 0.121448	valid_1's auc: 0.839755	valid_1's binary_logloss: 0.139167
[50]	valid_0's auc: 0.881448	valid_0's binary_logloss: 0.121247	valid_1's auc: 0.839886	valid_1's binary_logloss: 0.139173
[51]	valid_0's auc: 0.881689	valid_0's binary_logloss: 0.121135	valid_1's auc: 0.840158	valid_1's binary_logloss: 0.139144
[52]	valid_0's auc: 0.882122	valid_0's binary_logloss: 0.12097	valid_1's auc: 0.839978	valid_1's binary_logloss: 0.139193
[53]	valid_0's auc: 0.882285	valid_0's binary_logloss: 0.120863	valid_1's auc: 0.83979	valid_1's binary_logloss: 0.139241
[54]	valid_0's auc: 0.882575	valid_0's binary_logloss: 0.120721	valid_1's auc: 0.839521	valid_1's binary_logloss: 0.139335
[55]	valid_0's auc: 0.88311	valid_0's binary_logloss: 0.120518	valid_1's auc: 0.839558	valid_1's binary_logloss: 0.139336
[56]	valid_0's auc: 0.883389	valid_0's binary_logloss: 0.120373	valid_1's auc: 0.83979	valid_1's binary_logloss: 0.139326
[57]	valid_0's auc: 0.88369	valid_0's binary_logloss: 0.120248	valid_1's auc: 0.839887	valid_1's binary_logloss: 0.139321
[58]	valid_0's auc: 0.884025	valid_0's binary_logloss: 0.120078	valid_1's auc: 0.839684	valid_1's binary_logloss: 0.139341
[59]	valid_0's auc: 0.884477	valid_0's binary_logloss: 0.119928	valid_1's auc: 0.839523	valid_1's binary_logloss: 0.139368
[60]	valid_0's auc: 0.884659	valid_0's binary_logloss: 0.119822	valid_1's auc: 0.839745	valid_1's binary_logloss: 0.139362
[61]	valid_0's auc: 0.885121	valid_0's binary_logloss: 0.119618	valid_1's auc: 0.839533	valid_1's binary_logloss: 0.139434
[62]	valid_0's auc: 0.885341	valid_0's binary_logloss: 0.119477	valid_1's auc: 0.839843	valid_1's binary_logloss: 0.139385
[63]	valid_0's auc: 0.885487	valid_0's binary_logloss: 0.119367	valid_1's auc: 0.839843	valid_1's binary_logloss: 0.139409
[64]	valid_0's auc: 0.885645	valid_0's binary_logloss: 0.119254	valid_1's auc: 0.839603	valid_1's binary_logloss: 0.139483
[65]	valid_0's auc: 0.886	valid_0's binary_logloss: 0.119067	valid_1's auc: 0.839806	valid_1's binary_logloss: 0.139511
[66]	valid_0's auc: 0.886267	valid_0's binary_logloss: 0.118949	valid_1's auc: 0.839758	valid_1's binary_logloss: 0.139539
[67]	valid_0's auc: 0.886435	valid_0's binary_logloss: 0.118836	valid_1's auc: 0.83953	valid_1's binary_logloss: 0.139595
[68]	valid_0's auc: 0.886593	valid_0's binary_logloss: 0.118728	valid_1's auc: 0.839422	valid_1's binary_logloss: 0.139608
[69]	valid_0's auc: 0.886791	valid_0's binary_logloss: 0.118595	valid_1's auc: 0.839492	valid_1's binary_logloss: 0.139615
[70]	valid_0's auc: 0.886904	valid_0's binary_logloss: 0.118527	valid_1's auc: 0.83981	valid_1's binary_logloss: 0.139598
[71]	valid_0's auc: 0.887299	valid_0's binary_logloss: 0.118387	valid_1's auc: 0.839869	valid_1's binary_logloss: 0.139594
[72]	valid_0's auc: 0.887483	valid_0's binary_logloss: 0.118281	valid_1's auc: 0.839788	valid_1's binary_logloss: 0.139654
[73]	valid_0's auc: 0.887557	valid_0's binary_logloss: 0.118196	valid_1's auc: 0.840147	valid_1's binary_logloss: 0.139624
[74]	valid_0's auc: 0.887851	valid_0's binary_logloss: 0.118068	valid_1's auc: 0.840137	valid_1's binary_logloss: 0.139665
[75]	valid_0's auc: 0.888042	valid_0's binary_logloss: 0.117962	valid_1's auc: 0.840289	valid_1's binary_logloss: 0.139644
[76]	valid_0's auc: 0.888422	valid_0's binary_logloss: 0.11776	valid_1's auc: 0.83992	valid_1's binary_logloss: 0.139749
[77]	valid_0's auc: 0.88853	valid_0's binary_logloss: 0.117667	valid_1's auc: 0.839998	valid_1's binary_logloss: 0.139765
Early stopping, best iteration is:
[47]	valid_0's auc: 0.879882	valid_0's binary_logloss: 0.121829	valid_1's auc: 0.840044	valid_1's binary_logloss: 0.139063
[1]	valid_0's auc: 0.824873	valid_0's binary_logloss: 0.156222	valid_1's auc: 0.817791	valid_1's binary_logloss: 0.165077
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.829172	valid_0's binary_logloss: 0.151168	valid_1's auc: 0.823373	valid_1's binary_logloss: 0.160071
[3]	valid_0's auc: 0.836076	valid_0's binary_logloss: 0.147371	valid_1's auc: 0.829343	valid_1's binary_logloss: 0.156297
[4]	valid_0's auc: 0.839875	valid_0's binary_logloss: 0.14444	valid_1's auc: 0.833421	valid_1's binary_logloss: 0.153356
[5]	valid_0's auc: 0.84413	valid_0's binary_logloss: 0.142061	valid_1's auc: 0.835156	valid_1's binary_logloss: 0.151047
[6]	valid_0's auc: 0.846462	valid_0's binary_logloss: 0.140071	valid_1's auc: 0.835505	valid_1's binary_logloss: 0.14915
[7]	valid_0's auc: 0.847647	valid_0's binary_logloss: 0.138475	valid_1's auc: 0.835469	valid_1's binary_logloss: 0.147559
[8]	valid_0's auc: 0.848591	valid_0's binary_logloss: 0.13704	valid_1's auc: 0.835911	valid_1's binary_logloss: 0.146381
[9]	valid_0's auc: 0.849213	valid_0's binary_logloss: 0.135881	valid_1's auc: 0.838015	valid_1's binary_logloss: 0.1453
[10]	valid_0's auc: 0.850856	valid_0's binary_logloss: 0.134808	valid_1's auc: 0.837498	valid_1's binary_logloss: 0.144374
[11]	valid_0's auc: 0.852026	valid_0's binary_logloss: 0.133912	valid_1's auc: 0.836992	valid_1's binary_logloss: 0.143626
[12]	valid_0's auc: 0.853769	valid_0's binary_logloss: 0.133022	valid_1's auc: 0.836788	valid_1's binary_logloss: 0.143124
[13]	valid_0's auc: 0.85483	valid_0's binary_logloss: 0.132302	valid_1's auc: 0.836757	valid_1's binary_logloss: 0.142528
[14]	valid_0's auc: 0.855718	valid_0's binary_logloss: 0.131638	valid_1's auc: 0.835694	valid_1's binary_logloss: 0.142224
[15]	valid_0's auc: 0.856954	valid_0's binary_logloss: 0.131049	valid_1's auc: 0.835368	valid_1's binary_logloss: 0.141812
[16]	valid_0's auc: 0.857856	valid_0's binary_logloss: 0.130502	valid_1's auc: 0.834835	valid_1's binary_logloss: 0.141587
[17]	valid_0's auc: 0.85911	valid_0's binary_logloss: 0.129988	valid_1's auc: 0.835354	valid_1's binary_logloss: 0.141283
[18]	valid_0's auc: 0.860451	valid_0's binary_logloss: 0.129489	valid_1's auc: 0.835311	valid_1's binary_logloss: 0.141081
[19]	valid_0's auc: 0.861474	valid_0's binary_logloss: 0.129065	valid_1's auc: 0.834787	valid_1's binary_logloss: 0.140992
[20]	valid_0's auc: 0.862401	valid_0's binary_logloss: 0.128653	valid_1's auc: 0.834345	valid_1's binary_logloss: 0.140795
[21]	valid_0's auc: 0.86312	valid_0's binary_logloss: 0.128288	valid_1's auc: 0.833804	valid_1's binary_logloss: 0.140723
[22]	valid_0's auc: 0.86391	valid_0's binary_logloss: 0.127911	valid_1's auc: 0.833837	valid_1's binary_logloss: 0.140571
[23]	valid_0's auc: 0.8644	valid_0's binary_logloss: 0.127608	valid_1's auc: 0.833222	valid_1's binary_logloss: 0.140512
[24]	valid_0's auc: 0.865301	valid_0's binary_logloss: 0.127243	valid_1's auc: 0.832726	valid_1's binary_logloss: 0.140541
[25]	valid_0's auc: 0.866437	valid_0's binary_logloss: 0.126901	valid_1's auc: 0.832998	valid_1's binary_logloss: 0.140458
[26]	valid_0's auc: 0.867262	valid_0's binary_logloss: 0.126595	valid_1's auc: 0.833056	valid_1's binary_logloss: 0.140424
[27]	valid_0's auc: 0.86794	valid_0's binary_logloss: 0.126301	valid_1's auc: 0.832427	valid_1's binary_logloss: 0.140421
[28]	valid_0's auc: 0.869472	valid_0's binary_logloss: 0.125953	valid_1's auc: 0.833075	valid_1's binary_logloss: 0.14028
[29]	valid_0's auc: 0.870369	valid_0's binary_logloss: 0.125647	valid_1's auc: 0.833494	valid_1's binary_logloss: 0.140215
[30]	valid_0's auc: 0.871105	valid_0's binary_logloss: 0.12536	valid_1's auc: 0.83327	valid_1's binary_logloss: 0.140214
[31]	valid_0's auc: 0.871414	valid_0's binary_logloss: 0.125161	valid_1's auc: 0.833041	valid_1's binary_logloss: 0.140216
[32]	valid_0's auc: 0.872281	valid_0's binary_logloss: 0.12493	valid_1's auc: 0.833344	valid_1's binary_logloss: 0.140148
[33]	valid_0's auc: 0.873038	valid_0's binary_logloss: 0.124672	valid_1's auc: 0.833307	valid_1's binary_logloss: 0.140091
[34]	valid_0's auc: 0.873571	valid_0's binary_logloss: 0.124499	valid_1's auc: 0.833239	valid_1's binary_logloss: 0.140069
[35]	valid_0's auc: 0.874263	valid_0's binary_logloss: 0.124311	valid_1's auc: 0.833202	valid_1's binary_logloss: 0.140067
[36]	valid_0's auc: 0.87467	valid_0's binary_logloss: 0.124165	valid_1's auc: 0.833345	valid_1's binary_logloss: 0.140029
[37]	valid_0's auc: 0.875299	valid_0's binary_logloss: 0.123937	valid_1's auc: 0.833447	valid_1's binary_logloss: 0.140009
[38]	valid_0's auc: 0.876178	valid_0's binary_logloss: 0.123686	valid_1's auc: 0.833499	valid_1's binary_logloss: 0.139986
[39]	valid_0's auc: 0.876802	valid_0's binary_logloss: 0.123446	valid_1's auc: 0.833868	valid_1's binary_logloss: 0.139955
Early stopping, best iteration is:
[9]	valid_0's auc: 0.849213	valid_0's binary_logloss: 0.135881	valid_1's auc: 0.838015	valid_1's binary_logloss: 0.1453
[1]	valid_0's auc: 0.821831	valid_0's binary_logloss: 0.156469	valid_1's auc: 0.817525	valid_1's binary_logloss: 0.165188
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.830014	valid_0's binary_logloss: 0.151109	valid_1's auc: 0.823491	valid_1's binary_logloss: 0.159651
[3]	valid_0's auc: 0.839606	valid_0's binary_logloss: 0.147325	valid_1's auc: 0.832736	valid_1's binary_logloss: 0.156031
[4]	valid_0's auc: 0.842933	valid_0's binary_logloss: 0.144392	valid_1's auc: 0.836202	valid_1's binary_logloss: 0.15311
[5]	valid_0's auc: 0.845714	valid_0's binary_logloss: 0.141965	valid_1's auc: 0.838652	valid_1's binary_logloss: 0.150749
[6]	valid_0's auc: 0.848431	valid_0's binary_logloss: 0.13995	valid_1's auc: 0.840279	valid_1's binary_logloss: 0.148948
[7]	valid_0's auc: 0.849908	valid_0's binary_logloss: 0.138315	valid_1's auc: 0.840328	valid_1's binary_logloss: 0.14741
[8]	valid_0's auc: 0.852054	valid_0's binary_logloss: 0.136907	valid_1's auc: 0.83901	valid_1's binary_logloss: 0.146175
[9]	valid_0's auc: 0.853186	valid_0's binary_logloss: 0.135648	valid_1's auc: 0.83787	valid_1's binary_logloss: 0.145198
[10]	valid_0's auc: 0.85449	valid_0's binary_logloss: 0.134596	valid_1's auc: 0.837845	valid_1's binary_logloss: 0.144271
[11]	valid_0's auc: 0.855485	valid_0's binary_logloss: 0.133677	valid_1's auc: 0.838688	valid_1's binary_logloss: 0.14351
[12]	valid_0's auc: 0.856918	valid_0's binary_logloss: 0.132832	valid_1's auc: 0.838593	valid_1's binary_logloss: 0.142834
[13]	valid_0's auc: 0.857461	valid_0's binary_logloss: 0.132079	valid_1's auc: 0.838477	valid_1's binary_logloss: 0.142308
[14]	valid_0's auc: 0.858342	valid_0's binary_logloss: 0.131428	valid_1's auc: 0.838195	valid_1's binary_logloss: 0.141925
[15]	valid_0's auc: 0.858926	valid_0's binary_logloss: 0.130816	valid_1's auc: 0.838543	valid_1's binary_logloss: 0.141466
[16]	valid_0's auc: 0.859532	valid_0's binary_logloss: 0.130275	valid_1's auc: 0.838295	valid_1's binary_logloss: 0.141112
[17]	valid_0's auc: 0.860793	valid_0's binary_logloss: 0.129728	valid_1's auc: 0.837788	valid_1's binary_logloss: 0.140844
[18]	valid_0's auc: 0.861753	valid_0's binary_logloss: 0.12924	valid_1's auc: 0.837779	valid_1's binary_logloss: 0.140599
[19]	valid_0's auc: 0.86298	valid_0's binary_logloss: 0.12873	valid_1's auc: 0.837848	valid_1's binary_logloss: 0.14042
[20]	valid_0's auc: 0.863577	valid_0's binary_logloss: 0.128318	valid_1's auc: 0.837708	valid_1's binary_logloss: 0.140222
[21]	valid_0's auc: 0.864273	valid_0's binary_logloss: 0.127905	valid_1's auc: 0.838031	valid_1's binary_logloss: 0.140093
[22]	valid_0's auc: 0.865086	valid_0's binary_logloss: 0.127531	valid_1's auc: 0.837779	valid_1's binary_logloss: 0.139982
[23]	valid_0's auc: 0.865788	valid_0's binary_logloss: 0.127146	valid_1's auc: 0.837827	valid_1's binary_logloss: 0.139856
[24]	valid_0's auc: 0.866662	valid_0's binary_logloss: 0.126815	valid_1's auc: 0.837785	valid_1's binary_logloss: 0.139755
[25]	valid_0's auc: 0.867441	valid_0's binary_logloss: 0.126498	valid_1's auc: 0.838008	valid_1's binary_logloss: 0.139673
[26]	valid_0's auc: 0.86805	valid_0's binary_logloss: 0.126176	valid_1's auc: 0.838301	valid_1's binary_logloss: 0.139585
[27]	valid_0's auc: 0.868525	valid_0's binary_logloss: 0.12589	valid_1's auc: 0.838152	valid_1's binary_logloss: 0.139564
[28]	valid_0's auc: 0.869107	valid_0's binary_logloss: 0.125656	valid_1's auc: 0.838221	valid_1's binary_logloss: 0.139481
[29]	valid_0's auc: 0.869754	valid_0's binary_logloss: 0.12538	valid_1's auc: 0.838255	valid_1's binary_logloss: 0.139428
[30]	valid_0's auc: 0.87056	valid_0's binary_logloss: 0.125113	valid_1's auc: 0.838733	valid_1's binary_logloss: 0.139354
[31]	valid_0's auc: 0.871162	valid_0's binary_logloss: 0.124861	valid_1's auc: 0.838979	valid_1's binary_logloss: 0.139225
[32]	valid_0's auc: 0.871762	valid_0's binary_logloss: 0.124652	valid_1's auc: 0.838753	valid_1's binary_logloss: 0.139233
[33]	valid_0's auc: 0.872722	valid_0's binary_logloss: 0.124394	valid_1's auc: 0.839239	valid_1's binary_logloss: 0.1391
[34]	valid_0's auc: 0.87368	valid_0's binary_logloss: 0.124127	valid_1's auc: 0.839489	valid_1's binary_logloss: 0.139029
[35]	valid_0's auc: 0.874492	valid_0's binary_logloss: 0.12392	valid_1's auc: 0.839553	valid_1's binary_logloss: 0.139024
[36]	valid_0's auc: 0.875059	valid_0's binary_logloss: 0.123723	valid_1's auc: 0.839658	valid_1's binary_logloss: 0.138948
[37]	valid_0's auc: 0.875726	valid_0's binary_logloss: 0.123495	valid_1's auc: 0.839391	valid_1's binary_logloss: 0.139005
Early stopping, best iteration is:
[7]	valid_0's auc: 0.849908	valid_0's binary_logloss: 0.138315	valid_1's auc: 0.840328	valid_1's binary_logloss: 0.14741
[1]	valid_0's auc: 0.821427	valid_0's binary_logloss: 0.156591	valid_1's auc: 0.81711	valid_1's binary_logloss: 0.165271
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.827907	valid_0's binary_logloss: 0.151333	valid_1's auc: 0.821466	valid_1's binary_logloss: 0.160205
[3]	valid_0's auc: 0.837845	valid_0's binary_logloss: 0.147475	valid_1's auc: 0.828667	valid_1's binary_logloss: 0.156481
[4]	valid_0's auc: 0.840614	valid_0's binary_logloss: 0.144432	valid_1's auc: 0.831943	valid_1's binary_logloss: 0.153529
[5]	valid_0's auc: 0.843179	valid_0's binary_logloss: 0.142066	valid_1's auc: 0.834251	valid_1's binary_logloss: 0.151202
[6]	valid_0's auc: 0.843372	valid_0's binary_logloss: 0.140161	valid_1's auc: 0.834689	valid_1's binary_logloss: 0.14929
[7]	valid_0's auc: 0.844766	valid_0's binary_logloss: 0.138478	valid_1's auc: 0.835816	valid_1's binary_logloss: 0.147704
[8]	valid_0's auc: 0.847116	valid_0's binary_logloss: 0.137115	valid_1's auc: 0.836076	valid_1's binary_logloss: 0.146539
[9]	valid_0's auc: 0.850254	valid_0's binary_logloss: 0.135912	valid_1's auc: 0.836014	valid_1's binary_logloss: 0.145594
[10]	valid_0's auc: 0.851797	valid_0's binary_logloss: 0.13482	valid_1's auc: 0.836595	valid_1's binary_logloss: 0.144731
[11]	valid_0's auc: 0.852928	valid_0's binary_logloss: 0.133901	valid_1's auc: 0.835902	valid_1's binary_logloss: 0.144167
[12]	valid_0's auc: 0.854858	valid_0's binary_logloss: 0.133062	valid_1's auc: 0.836859	valid_1's binary_logloss: 0.143545
[13]	valid_0's auc: 0.856392	valid_0's binary_logloss: 0.13232	valid_1's auc: 0.837517	valid_1's binary_logloss: 0.14296
[14]	valid_0's auc: 0.857549	valid_0's binary_logloss: 0.131636	valid_1's auc: 0.837302	valid_1's binary_logloss: 0.142542
[15]	valid_0's auc: 0.858338	valid_0's binary_logloss: 0.131026	valid_1's auc: 0.837003	valid_1's binary_logloss: 0.142141
[16]	valid_0's auc: 0.859394	valid_0's binary_logloss: 0.130449	valid_1's auc: 0.836672	valid_1's binary_logloss: 0.141779
[17]	valid_0's auc: 0.860095	valid_0's binary_logloss: 0.129926	valid_1's auc: 0.83723	valid_1's binary_logloss: 0.141395
[18]	valid_0's auc: 0.861058	valid_0's binary_logloss: 0.129444	valid_1's auc: 0.837871	valid_1's binary_logloss: 0.141105
[19]	valid_0's auc: 0.862258	valid_0's binary_logloss: 0.128997	valid_1's auc: 0.83794	valid_1's binary_logloss: 0.140829
[20]	valid_0's auc: 0.863204	valid_0's binary_logloss: 0.128569	valid_1's auc: 0.838426	valid_1's binary_logloss: 0.140558
[21]	valid_0's auc: 0.864129	valid_0's binary_logloss: 0.128163	valid_1's auc: 0.838539	valid_1's binary_logloss: 0.140401
[22]	valid_0's auc: 0.865066	valid_0's binary_logloss: 0.127776	valid_1's auc: 0.838552	valid_1's binary_logloss: 0.140268
[23]	valid_0's auc: 0.866036	valid_0's binary_logloss: 0.127415	valid_1's auc: 0.838667	valid_1's binary_logloss: 0.140163
[24]	valid_0's auc: 0.866894	valid_0's binary_logloss: 0.127047	valid_1's auc: 0.838711	valid_1's binary_logloss: 0.139983
[25]	valid_0's auc: 0.867646	valid_0's binary_logloss: 0.126759	valid_1's auc: 0.83898	valid_1's binary_logloss: 0.13986
[26]	valid_0's auc: 0.868321	valid_0's binary_logloss: 0.126486	valid_1's auc: 0.83866	valid_1's binary_logloss: 0.139795
[27]	valid_0's auc: 0.869533	valid_0's binary_logloss: 0.126137	valid_1's auc: 0.838474	valid_1's binary_logloss: 0.13973
[28]	valid_0's auc: 0.87009	valid_0's binary_logloss: 0.125841	valid_1's auc: 0.838418	valid_1's binary_logloss: 0.139661
[29]	valid_0's auc: 0.870611	valid_0's binary_logloss: 0.125574	valid_1's auc: 0.838527	valid_1's binary_logloss: 0.139586
[30]	valid_0's auc: 0.871288	valid_0's binary_logloss: 0.125351	valid_1's auc: 0.838578	valid_1's binary_logloss: 0.139533
[31]	valid_0's auc: 0.871941	valid_0's binary_logloss: 0.125092	valid_1's auc: 0.839062	valid_1's binary_logloss: 0.139431
[32]	valid_0's auc: 0.872841	valid_0's binary_logloss: 0.124816	valid_1's auc: 0.839243	valid_1's binary_logloss: 0.139362
[33]	valid_0's auc: 0.873443	valid_0's binary_logloss: 0.124593	valid_1's auc: 0.839482	valid_1's binary_logloss: 0.139288
[34]	valid_0's auc: 0.874317	valid_0's binary_logloss: 0.124327	valid_1's auc: 0.839612	valid_1's binary_logloss: 0.139266
[35]	valid_0's auc: 0.875065	valid_0's binary_logloss: 0.124053	valid_1's auc: 0.839746	valid_1's binary_logloss: 0.139241
[36]	valid_0's auc: 0.875683	valid_0's binary_logloss: 0.123777	valid_1's auc: 0.840074	valid_1's binary_logloss: 0.139138
[37]	valid_0's auc: 0.876241	valid_0's binary_logloss: 0.123571	valid_1's auc: 0.840105	valid_1's binary_logloss: 0.139101
[38]	valid_0's auc: 0.876923	valid_0's binary_logloss: 0.123355	valid_1's auc: 0.839838	valid_1's binary_logloss: 0.139141
[39]	valid_0's auc: 0.877193	valid_0's binary_logloss: 0.123186	valid_1's auc: 0.84006	valid_1's binary_logloss: 0.139095
[40]	valid_0's auc: 0.877543	valid_0's binary_logloss: 0.123003	valid_1's auc: 0.839944	valid_1's binary_logloss: 0.139144
[41]	valid_0's auc: 0.877994	valid_0's binary_logloss: 0.122806	valid_1's auc: 0.839925	valid_1's binary_logloss: 0.139157
[42]	valid_0's auc: 0.878413	valid_0's binary_logloss: 0.122608	valid_1's auc: 0.839588	valid_1's binary_logloss: 0.139183
[43]	valid_0's auc: 0.87868	valid_0's binary_logloss: 0.122482	valid_1's auc: 0.839828	valid_1's binary_logloss: 0.139122
[44]	valid_0's auc: 0.878844	valid_0's binary_logloss: 0.122346	valid_1's auc: 0.839999	valid_1's binary_logloss: 0.139112
[45]	valid_0's auc: 0.879115	valid_0's binary_logloss: 0.12219	valid_1's auc: 0.839862	valid_1's binary_logloss: 0.139105
[46]	valid_0's auc: 0.879426	valid_0's binary_logloss: 0.122041	valid_1's auc: 0.839926	valid_1's binary_logloss: 0.139086
[47]	valid_0's auc: 0.879882	valid_0's binary_logloss: 0.121829	valid_1's auc: 0.840044	valid_1's binary_logloss: 0.139063
[48]	valid_0's auc: 0.880499	valid_0's binary_logloss: 0.121624	valid_1's auc: 0.840013	valid_1's binary_logloss: 0.139091
[49]	valid_0's auc: 0.880866	valid_0's binary_logloss: 0.121448	valid_1's auc: 0.839755	valid_1's binary_logloss: 0.139167
[50]	valid_0's auc: 0.881448	valid_0's binary_logloss: 0.121247	valid_1's auc: 0.839886	valid_1's binary_logloss: 0.139173
[51]	valid_0's auc: 0.881689	valid_0's binary_logloss: 0.121135	valid_1's auc: 0.840158	valid_1's binary_logloss: 0.139144
[52]	valid_0's auc: 0.882122	valid_0's binary_logloss: 0.12097	valid_1's auc: 0.839978	valid_1's binary_logloss: 0.139193
[53]	valid_0's auc: 0.882285	valid_0's binary_logloss: 0.120863	valid_1's auc: 0.83979	valid_1's binary_logloss: 0.139241
[54]	valid_0's auc: 0.882575	valid_0's binary_logloss: 0.120721	valid_1's auc: 0.839521	valid_1's binary_logloss: 0.139335
[55]	valid_0's auc: 0.88311	valid_0's binary_logloss: 0.120518	valid_1's auc: 0.839558	valid_1's binary_logloss: 0.139336
[56]	valid_0's auc: 0.883389	valid_0's binary_logloss: 0.120373	valid_1's auc: 0.83979	valid_1's binary_logloss: 0.139326
[57]	valid_0's auc: 0.88369	valid_0's binary_logloss: 0.120248	valid_1's auc: 0.839887	valid_1's binary_logloss: 0.139321
[58]	valid_0's auc: 0.884025	valid_0's binary_logloss: 0.120078	valid_1's auc: 0.839684	valid_1's binary_logloss: 0.139341
[59]	valid_0's auc: 0.884477	valid_0's binary_logloss: 0.119928	valid_1's auc: 0.839523	valid_1's binary_logloss: 0.139368
[60]	valid_0's auc: 0.884659	valid_0's binary_logloss: 0.119822	valid_1's auc: 0.839745	valid_1's binary_logloss: 0.139362
[61]	valid_0's auc: 0.885121	valid_0's binary_logloss: 0.119618	valid_1's auc: 0.839533	valid_1's binary_logloss: 0.139434
[62]	valid_0's auc: 0.885341	valid_0's binary_logloss: 0.119477	valid_1's auc: 0.839843	valid_1's binary_logloss: 0.139385
[63]	valid_0's auc: 0.885487	valid_0's binary_logloss: 0.119367	valid_1's auc: 0.839843	valid_1's binary_logloss: 0.139409
[64]	valid_0's auc: 0.885645	valid_0's binary_logloss: 0.119254	valid_1's auc: 0.839603	valid_1's binary_logloss: 0.139483
[65]	valid_0's auc: 0.886	valid_0's binary_logloss: 0.119067	valid_1's auc: 0.839806	valid_1's binary_logloss: 0.139511
[66]	valid_0's auc: 0.886267	valid_0's binary_logloss: 0.118949	valid_1's auc: 0.839758	valid_1's binary_logloss: 0.139539
[67]	valid_0's auc: 0.886435	valid_0's binary_logloss: 0.118836	valid_1's auc: 0.83953	valid_1's binary_logloss: 0.139595
[68]	valid_0's auc: 0.886593	valid_0's binary_logloss: 0.118728	valid_1's auc: 0.839422	valid_1's binary_logloss: 0.139608
[69]	valid_0's auc: 0.886791	valid_0's binary_logloss: 0.118595	valid_1's auc: 0.839492	valid_1's binary_logloss: 0.139615
[70]	valid_0's auc: 0.886904	valid_0's binary_logloss: 0.118527	valid_1's auc: 0.83981	valid_1's binary_logloss: 0.139598
[71]	valid_0's auc: 0.887299	valid_0's binary_logloss: 0.118387	valid_1's auc: 0.839869	valid_1's binary_logloss: 0.139594
[72]	valid_0's auc: 0.887483	valid_0's binary_logloss: 0.118281	valid_1's auc: 0.839788	valid_1's binary_logloss: 0.139654
[73]	valid_0's auc: 0.887557	valid_0's binary_logloss: 0.118196	valid_1's auc: 0.840147	valid_1's binary_logloss: 0.139624
[74]	valid_0's auc: 0.887851	valid_0's binary_logloss: 0.118068	valid_1's auc: 0.840137	valid_1's binary_logloss: 0.139665
[75]	valid_0's auc: 0.888042	valid_0's binary_logloss: 0.117962	valid_1's auc: 0.840289	valid_1's binary_logloss: 0.139644
[76]	valid_0's auc: 0.888422	valid_0's binary_logloss: 0.11776	valid_1's auc: 0.83992	valid_1's binary_logloss: 0.139749
[77]	valid_0's auc: 0.88853	valid_0's binary_logloss: 0.117667	valid_1's auc: 0.839998	valid_1's binary_logloss: 0.139765
Early stopping, best iteration is:
[47]	valid_0's auc: 0.879882	valid_0's binary_logloss: 0.121829	valid_1's auc: 0.840044	valid_1's binary_logloss: 0.139063
[1]	valid_0's auc: 0.835412	valid_0's binary_logloss: 0.155721	valid_1's auc: 0.81973	valid_1's binary_logloss: 0.164849
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.841234	valid_0's binary_logloss: 0.150325	valid_1's auc: 0.823604	valid_1's binary_logloss: 0.159865
[3]	valid_0's auc: 0.846416	valid_0's binary_logloss: 0.146303	valid_1's auc: 0.824414	valid_1's binary_logloss: 0.156273
[4]	valid_0's auc: 0.85113	valid_0's binary_logloss: 0.143142	valid_1's auc: 0.83033	valid_1's binary_logloss: 0.153388
[5]	valid_0's auc: 0.854001	valid_0's binary_logloss: 0.140572	valid_1's auc: 0.831279	valid_1's binary_logloss: 0.151139
[6]	valid_0's auc: 0.856684	valid_0's binary_logloss: 0.138389	valid_1's auc: 0.83309	valid_1's binary_logloss: 0.149282
[7]	valid_0's auc: 0.858456	valid_0's binary_logloss: 0.136552	valid_1's auc: 0.833723	valid_1's binary_logloss: 0.147799
[8]	valid_0's auc: 0.8595	valid_0's binary_logloss: 0.135038	valid_1's auc: 0.834417	valid_1's binary_logloss: 0.146527
[9]	valid_0's auc: 0.861391	valid_0's binary_logloss: 0.133585	valid_1's auc: 0.834625	valid_1's binary_logloss: 0.145543
[10]	valid_0's auc: 0.863676	valid_0's binary_logloss: 0.132396	valid_1's auc: 0.834954	valid_1's binary_logloss: 0.144707
[11]	valid_0's auc: 0.865339	valid_0's binary_logloss: 0.13134	valid_1's auc: 0.835252	valid_1's binary_logloss: 0.144034
[12]	valid_0's auc: 0.866817	valid_0's binary_logloss: 0.130311	valid_1's auc: 0.835592	valid_1's binary_logloss: 0.143404
[13]	valid_0's auc: 0.868751	valid_0's binary_logloss: 0.129388	valid_1's auc: 0.835225	valid_1's binary_logloss: 0.142853
[14]	valid_0's auc: 0.870283	valid_0's binary_logloss: 0.128551	valid_1's auc: 0.835191	valid_1's binary_logloss: 0.142412
[15]	valid_0's auc: 0.871366	valid_0's binary_logloss: 0.127828	valid_1's auc: 0.834681	valid_1's binary_logloss: 0.142173
[16]	valid_0's auc: 0.87234	valid_0's binary_logloss: 0.127146	valid_1's auc: 0.834555	valid_1's binary_logloss: 0.141876
[17]	valid_0's auc: 0.872963	valid_0's binary_logloss: 0.126553	valid_1's auc: 0.833341	valid_1's binary_logloss: 0.141845
[18]	valid_0's auc: 0.874019	valid_0's binary_logloss: 0.125934	valid_1's auc: 0.833684	valid_1's binary_logloss: 0.141515
[19]	valid_0's auc: 0.875288	valid_0's binary_logloss: 0.125352	valid_1's auc: 0.832788	valid_1's binary_logloss: 0.141499
[20]	valid_0's auc: 0.876379	valid_0's binary_logloss: 0.124793	valid_1's auc: 0.832498	valid_1's binary_logloss: 0.141369
[21]	valid_0's auc: 0.877634	valid_0's binary_logloss: 0.124225	valid_1's auc: 0.832355	valid_1's binary_logloss: 0.141226
[22]	valid_0's auc: 0.878588	valid_0's binary_logloss: 0.123762	valid_1's auc: 0.832703	valid_1's binary_logloss: 0.141078
[23]	valid_0's auc: 0.879693	valid_0's binary_logloss: 0.123255	valid_1's auc: 0.832163	valid_1's binary_logloss: 0.141039
[24]	valid_0's auc: 0.880719	valid_0's binary_logloss: 0.122807	valid_1's auc: 0.832324	valid_1's binary_logloss: 0.140925
[25]	valid_0's auc: 0.881533	valid_0's binary_logloss: 0.122313	valid_1's auc: 0.832184	valid_1's binary_logloss: 0.140891
[26]	valid_0's auc: 0.882449	valid_0's binary_logloss: 0.121884	valid_1's auc: 0.832004	valid_1's binary_logloss: 0.140834
[27]	valid_0's auc: 0.883373	valid_0's binary_logloss: 0.121461	valid_1's auc: 0.83169	valid_1's binary_logloss: 0.140849
[28]	valid_0's auc: 0.884175	valid_0's binary_logloss: 0.121053	valid_1's auc: 0.831077	valid_1's binary_logloss: 0.140897
[29]	valid_0's auc: 0.885058	valid_0's binary_logloss: 0.120689	valid_1's auc: 0.831657	valid_1's binary_logloss: 0.140775
[30]	valid_0's auc: 0.885865	valid_0's binary_logloss: 0.120366	valid_1's auc: 0.83154	valid_1's binary_logloss: 0.140767
[31]	valid_0's auc: 0.887114	valid_0's binary_logloss: 0.11994	valid_1's auc: 0.831439	valid_1's binary_logloss: 0.140816
[32]	valid_0's auc: 0.887603	valid_0's binary_logloss: 0.119623	valid_1's auc: 0.831023	valid_1's binary_logloss: 0.140912
[33]	valid_0's auc: 0.888164	valid_0's binary_logloss: 0.11928	valid_1's auc: 0.830667	valid_1's binary_logloss: 0.140953
[34]	valid_0's auc: 0.888957	valid_0's binary_logloss: 0.118937	valid_1's auc: 0.830222	valid_1's binary_logloss: 0.141016
[35]	valid_0's auc: 0.889746	valid_0's binary_logloss: 0.118573	valid_1's auc: 0.830175	valid_1's binary_logloss: 0.141039
[36]	valid_0's auc: 0.890215	valid_0's binary_logloss: 0.118288	valid_1's auc: 0.830319	valid_1's binary_logloss: 0.140986
[37]	valid_0's auc: 0.890966	valid_0's binary_logloss: 0.117947	valid_1's auc: 0.830443	valid_1's binary_logloss: 0.140993
[38]	valid_0's auc: 0.891474	valid_0's binary_logloss: 0.117667	valid_1's auc: 0.830505	valid_1's binary_logloss: 0.141016
[39]	valid_0's auc: 0.892156	valid_0's binary_logloss: 0.117328	valid_1's auc: 0.830415	valid_1's binary_logloss: 0.140994
[40]	valid_0's auc: 0.892568	valid_0's binary_logloss: 0.117061	valid_1's auc: 0.830687	valid_1's binary_logloss: 0.140918
[41]	valid_0's auc: 0.893181	valid_0's binary_logloss: 0.11681	valid_1's auc: 0.830622	valid_1's binary_logloss: 0.140963
[42]	valid_0's auc: 0.893984	valid_0's binary_logloss: 0.116476	valid_1's auc: 0.830827	valid_1's binary_logloss: 0.140945
Early stopping, best iteration is:
[12]	valid_0's auc: 0.866817	valid_0's binary_logloss: 0.130311	valid_1's auc: 0.835592	valid_1's binary_logloss: 0.143404
[1]	valid_0's auc: 0.830452	valid_0's binary_logloss: 0.155919	valid_1's auc: 0.817465	valid_1's binary_logloss: 0.164895
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.843009	valid_0's binary_logloss: 0.150278	valid_1's auc: 0.828176	valid_1's binary_logloss: 0.159411
[3]	valid_0's auc: 0.851789	valid_0's binary_logloss: 0.146252	valid_1's auc: 0.832597	valid_1's binary_logloss: 0.155736
[4]	valid_0's auc: 0.854657	valid_0's binary_logloss: 0.143075	valid_1's auc: 0.83416	valid_1's binary_logloss: 0.152859
[5]	valid_0's auc: 0.85652	valid_0's binary_logloss: 0.140511	valid_1's auc: 0.836671	valid_1's binary_logloss: 0.150427
[6]	valid_0's auc: 0.85962	valid_0's binary_logloss: 0.138318	valid_1's auc: 0.836445	valid_1's binary_logloss: 0.148531
[7]	valid_0's auc: 0.861186	valid_0's binary_logloss: 0.136426	valid_1's auc: 0.837332	valid_1's binary_logloss: 0.146966
[8]	valid_0's auc: 0.863008	valid_0's binary_logloss: 0.134791	valid_1's auc: 0.837707	valid_1's binary_logloss: 0.145637
[9]	valid_0's auc: 0.864093	valid_0's binary_logloss: 0.133383	valid_1's auc: 0.838849	valid_1's binary_logloss: 0.144456
[10]	valid_0's auc: 0.865712	valid_0's binary_logloss: 0.132144	valid_1's auc: 0.839148	valid_1's binary_logloss: 0.143548
[11]	valid_0's auc: 0.866608	valid_0's binary_logloss: 0.131102	valid_1's auc: 0.83905	valid_1's binary_logloss: 0.142819
[12]	valid_0's auc: 0.868335	valid_0's binary_logloss: 0.130069	valid_1's auc: 0.838903	valid_1's binary_logloss: 0.142184
[13]	valid_0's auc: 0.869075	valid_0's binary_logloss: 0.12923	valid_1's auc: 0.838882	valid_1's binary_logloss: 0.141681
[14]	valid_0's auc: 0.870379	valid_0's binary_logloss: 0.128406	valid_1's auc: 0.838698	valid_1's binary_logloss: 0.141227
[15]	valid_0's auc: 0.871467	valid_0's binary_logloss: 0.127626	valid_1's auc: 0.838689	valid_1's binary_logloss: 0.140825
[16]	valid_0's auc: 0.872424	valid_0's binary_logloss: 0.126889	valid_1's auc: 0.838836	valid_1's binary_logloss: 0.140446
[17]	valid_0's auc: 0.873627	valid_0's binary_logloss: 0.126201	valid_1's auc: 0.839557	valid_1's binary_logloss: 0.14014
[18]	valid_0's auc: 0.87479	valid_0's binary_logloss: 0.125597	valid_1's auc: 0.839776	valid_1's binary_logloss: 0.139941
[19]	valid_0's auc: 0.875902	valid_0's binary_logloss: 0.124996	valid_1's auc: 0.839902	valid_1's binary_logloss: 0.139701
[20]	valid_0's auc: 0.877163	valid_0's binary_logloss: 0.124367	valid_1's auc: 0.839299	valid_1's binary_logloss: 0.139581
[21]	valid_0's auc: 0.878438	valid_0's binary_logloss: 0.123757	valid_1's auc: 0.839098	valid_1's binary_logloss: 0.139467
[22]	valid_0's auc: 0.879596	valid_0's binary_logloss: 0.123223	valid_1's auc: 0.838863	valid_1's binary_logloss: 0.139407
[23]	valid_0's auc: 0.880754	valid_0's binary_logloss: 0.122702	valid_1's auc: 0.839119	valid_1's binary_logloss: 0.139326
[24]	valid_0's auc: 0.881758	valid_0's binary_logloss: 0.122226	valid_1's auc: 0.839042	valid_1's binary_logloss: 0.139233
[25]	valid_0's auc: 0.883034	valid_0's binary_logloss: 0.121743	valid_1's auc: 0.838865	valid_1's binary_logloss: 0.139239
[26]	valid_0's auc: 0.884403	valid_0's binary_logloss: 0.121255	valid_1's auc: 0.838329	valid_1's binary_logloss: 0.139254
[27]	valid_0's auc: 0.885324	valid_0's binary_logloss: 0.120835	valid_1's auc: 0.837953	valid_1's binary_logloss: 0.139276
[28]	valid_0's auc: 0.886543	valid_0's binary_logloss: 0.120403	valid_1's auc: 0.838377	valid_1's binary_logloss: 0.139233
[29]	valid_0's auc: 0.887286	valid_0's binary_logloss: 0.120007	valid_1's auc: 0.838269	valid_1's binary_logloss: 0.139189
[30]	valid_0's auc: 0.888016	valid_0's binary_logloss: 0.119609	valid_1's auc: 0.838269	valid_1's binary_logloss: 0.139184
[31]	valid_0's auc: 0.888842	valid_0's binary_logloss: 0.11924	valid_1's auc: 0.838469	valid_1's binary_logloss: 0.139125
[32]	valid_0's auc: 0.889829	valid_0's binary_logloss: 0.118853	valid_1's auc: 0.838631	valid_1's binary_logloss: 0.139121
[33]	valid_0's auc: 0.890569	valid_0's binary_logloss: 0.118502	valid_1's auc: 0.838736	valid_1's binary_logloss: 0.139065
[34]	valid_0's auc: 0.891418	valid_0's binary_logloss: 0.118158	valid_1's auc: 0.838692	valid_1's binary_logloss: 0.139051
[35]	valid_0's auc: 0.892153	valid_0's binary_logloss: 0.117818	valid_1's auc: 0.838839	valid_1's binary_logloss: 0.139022
[36]	valid_0's auc: 0.892798	valid_0's binary_logloss: 0.117475	valid_1's auc: 0.838585	valid_1's binary_logloss: 0.139057
[37]	valid_0's auc: 0.893672	valid_0's binary_logloss: 0.117095	valid_1's auc: 0.838499	valid_1's binary_logloss: 0.139109
[38]	valid_0's auc: 0.894575	valid_0's binary_logloss: 0.116783	valid_1's auc: 0.83897	valid_1's binary_logloss: 0.139067
[39]	valid_0's auc: 0.895447	valid_0's binary_logloss: 0.116421	valid_1's auc: 0.838515	valid_1's binary_logloss: 0.139143
[40]	valid_0's auc: 0.896177	valid_0's binary_logloss: 0.116084	valid_1's auc: 0.838353	valid_1's binary_logloss: 0.13918
[41]	valid_0's auc: 0.896821	valid_0's binary_logloss: 0.115822	valid_1's auc: 0.837933	valid_1's binary_logloss: 0.139263
[42]	valid_0's auc: 0.897531	valid_0's binary_logloss: 0.115503	valid_1's auc: 0.838083	valid_1's binary_logloss: 0.139286
[43]	valid_0's auc: 0.898208	valid_0's binary_logloss: 0.115195	valid_1's auc: 0.838087	valid_1's binary_logloss: 0.139312
[44]	valid_0's auc: 0.899032	valid_0's binary_logloss: 0.114921	valid_1's auc: 0.837956	valid_1's binary_logloss: 0.139352
[45]	valid_0's auc: 0.899435	valid_0's binary_logloss: 0.114646	valid_1's auc: 0.837664	valid_1's binary_logloss: 0.139425
[46]	valid_0's auc: 0.899904	valid_0's binary_logloss: 0.114381	valid_1's auc: 0.837191	valid_1's binary_logloss: 0.139562
[47]	valid_0's auc: 0.900313	valid_0's binary_logloss: 0.114172	valid_1's auc: 0.837275	valid_1's binary_logloss: 0.139576
[48]	valid_0's auc: 0.900881	valid_0's binary_logloss: 0.113898	valid_1's auc: 0.837102	valid_1's binary_logloss: 0.139641
[49]	valid_0's auc: 0.901416	valid_0's binary_logloss: 0.11361	valid_1's auc: 0.837064	valid_1's binary_logloss: 0.139694
Early stopping, best iteration is:
[19]	valid_0's auc: 0.875902	valid_0's binary_logloss: 0.124996	valid_1's auc: 0.839902	valid_1's binary_logloss: 0.139701
[1]	valid_0's auc: 0.834758	valid_0's binary_logloss: 0.156067	valid_1's auc: 0.822971	valid_1's binary_logloss: 0.165101
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.842403	valid_0's binary_logloss: 0.150502	valid_1's auc: 0.828728	valid_1's binary_logloss: 0.159785
[3]	valid_0's auc: 0.847356	valid_0's binary_logloss: 0.146328	valid_1's auc: 0.830832	valid_1's binary_logloss: 0.156017
[4]	valid_0's auc: 0.84996	valid_0's binary_logloss: 0.143134	valid_1's auc: 0.83154	valid_1's binary_logloss: 0.15319
[5]	valid_0's auc: 0.853547	valid_0's binary_logloss: 0.140471	valid_1's auc: 0.833818	valid_1's binary_logloss: 0.150761
[6]	valid_0's auc: 0.855549	valid_0's binary_logloss: 0.138282	valid_1's auc: 0.834224	valid_1's binary_logloss: 0.148815
[7]	valid_0's auc: 0.857045	valid_0's binary_logloss: 0.136467	valid_1's auc: 0.835329	valid_1's binary_logloss: 0.147338
[8]	valid_0's auc: 0.858416	valid_0's binary_logloss: 0.134953	valid_1's auc: 0.836144	valid_1's binary_logloss: 0.146094
[9]	valid_0's auc: 0.860878	valid_0's binary_logloss: 0.1336	valid_1's auc: 0.835613	valid_1's binary_logloss: 0.145179
[10]	valid_0's auc: 0.862432	valid_0's binary_logloss: 0.132438	valid_1's auc: 0.836519	valid_1's binary_logloss: 0.144304
[11]	valid_0's auc: 0.864715	valid_0's binary_logloss: 0.131298	valid_1's auc: 0.836728	valid_1's binary_logloss: 0.143576
[12]	valid_0's auc: 0.86649	valid_0's binary_logloss: 0.13029	valid_1's auc: 0.837121	valid_1's binary_logloss: 0.142892
[13]	valid_0's auc: 0.869207	valid_0's binary_logloss: 0.129293	valid_1's auc: 0.837902	valid_1's binary_logloss: 0.142287
[14]	valid_0's auc: 0.870823	valid_0's binary_logloss: 0.128463	valid_1's auc: 0.838274	valid_1's binary_logloss: 0.141902
[15]	valid_0's auc: 0.871881	valid_0's binary_logloss: 0.127657	valid_1's auc: 0.83767	valid_1's binary_logloss: 0.141625
[16]	valid_0's auc: 0.873451	valid_0's binary_logloss: 0.126932	valid_1's auc: 0.837454	valid_1's binary_logloss: 0.141273
[17]	valid_0's auc: 0.874754	valid_0's binary_logloss: 0.126215	valid_1's auc: 0.837218	valid_1's binary_logloss: 0.14099
[18]	valid_0's auc: 0.876015	valid_0's binary_logloss: 0.125583	valid_1's auc: 0.837242	valid_1's binary_logloss: 0.140723
[19]	valid_0's auc: 0.876898	valid_0's binary_logloss: 0.125006	valid_1's auc: 0.836677	valid_1's binary_logloss: 0.140576
[20]	valid_0's auc: 0.878198	valid_0's binary_logloss: 0.124464	valid_1's auc: 0.836677	valid_1's binary_logloss: 0.140492
[21]	valid_0's auc: 0.87919	valid_0's binary_logloss: 0.123881	valid_1's auc: 0.837511	valid_1's binary_logloss: 0.140273
[22]	valid_0's auc: 0.880273	valid_0's binary_logloss: 0.123358	valid_1's auc: 0.837479	valid_1's binary_logloss: 0.14014
[23]	valid_0's auc: 0.881151	valid_0's binary_logloss: 0.122898	valid_1's auc: 0.837864	valid_1's binary_logloss: 0.139991
[24]	valid_0's auc: 0.882264	valid_0's binary_logloss: 0.122383	valid_1's auc: 0.8378	valid_1's binary_logloss: 0.139955
[25]	valid_0's auc: 0.883252	valid_0's binary_logloss: 0.121909	valid_1's auc: 0.838233	valid_1's binary_logloss: 0.139841
[26]	valid_0's auc: 0.884254	valid_0's binary_logloss: 0.121477	valid_1's auc: 0.837795	valid_1's binary_logloss: 0.139804
[27]	valid_0's auc: 0.885619	valid_0's binary_logloss: 0.120986	valid_1's auc: 0.838147	valid_1's binary_logloss: 0.139714
[28]	valid_0's auc: 0.886542	valid_0's binary_logloss: 0.120573	valid_1's auc: 0.837608	valid_1's binary_logloss: 0.139727
[29]	valid_0's auc: 0.887407	valid_0's binary_logloss: 0.120146	valid_1's auc: 0.837298	valid_1's binary_logloss: 0.139725
[30]	valid_0's auc: 0.88824	valid_0's binary_logloss: 0.119775	valid_1's auc: 0.837294	valid_1's binary_logloss: 0.139714
[31]	valid_0's auc: 0.889124	valid_0's binary_logloss: 0.119428	valid_1's auc: 0.837511	valid_1's binary_logloss: 0.139644
[32]	valid_0's auc: 0.889919	valid_0's binary_logloss: 0.119052	valid_1's auc: 0.838258	valid_1's binary_logloss: 0.139519
[33]	valid_0's auc: 0.890537	valid_0's binary_logloss: 0.118727	valid_1's auc: 0.837988	valid_1's binary_logloss: 0.139526
[34]	valid_0's auc: 0.891097	valid_0's binary_logloss: 0.118375	valid_1's auc: 0.837857	valid_1's binary_logloss: 0.1396
[35]	valid_0's auc: 0.891811	valid_0's binary_logloss: 0.118027	valid_1's auc: 0.83771	valid_1's binary_logloss: 0.139627
[36]	valid_0's auc: 0.892678	valid_0's binary_logloss: 0.117704	valid_1's auc: 0.837436	valid_1's binary_logloss: 0.139695
[37]	valid_0's auc: 0.893595	valid_0's binary_logloss: 0.117332	valid_1's auc: 0.837433	valid_1's binary_logloss: 0.13969
[38]	valid_0's auc: 0.894079	valid_0's binary_logloss: 0.117021	valid_1's auc: 0.837272	valid_1's binary_logloss: 0.139691
[39]	valid_0's auc: 0.894556	valid_0's binary_logloss: 0.11674	valid_1's auc: 0.837253	valid_1's binary_logloss: 0.139685
[40]	valid_0's auc: 0.89516	valid_0's binary_logloss: 0.116428	valid_1's auc: 0.837294	valid_1's binary_logloss: 0.139678
[41]	valid_0's auc: 0.895863	valid_0's binary_logloss: 0.116155	valid_1's auc: 0.837866	valid_1's binary_logloss: 0.139562
[42]	valid_0's auc: 0.896354	valid_0's binary_logloss: 0.115871	valid_1's auc: 0.837735	valid_1's binary_logloss: 0.139612
[43]	valid_0's auc: 0.896691	valid_0's binary_logloss: 0.115612	valid_1's auc: 0.837481	valid_1's binary_logloss: 0.139702
[44]	valid_0's auc: 0.897343	valid_0's binary_logloss: 0.115316	valid_1's auc: 0.837651	valid_1's binary_logloss: 0.139672
Early stopping, best iteration is:
[14]	valid_0's auc: 0.870823	valid_0's binary_logloss: 0.128463	valid_1's auc: 0.838274	valid_1's binary_logloss: 0.141902
[1]	valid_0's auc: 0.835412	valid_0's binary_logloss: 0.155721	valid_1's auc: 0.81973	valid_1's binary_logloss: 0.164849
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.841234	valid_0's binary_logloss: 0.150325	valid_1's auc: 0.823604	valid_1's binary_logloss: 0.159865
[3]	valid_0's auc: 0.846416	valid_0's binary_logloss: 0.146303	valid_1's auc: 0.824414	valid_1's binary_logloss: 0.156273
[4]	valid_0's auc: 0.85113	valid_0's binary_logloss: 0.143142	valid_1's auc: 0.83033	valid_1's binary_logloss: 0.153388
[5]	valid_0's auc: 0.854001	valid_0's binary_logloss: 0.140572	valid_1's auc: 0.831279	valid_1's binary_logloss: 0.151139
[6]	valid_0's auc: 0.856684	valid_0's binary_logloss: 0.138389	valid_1's auc: 0.83309	valid_1's binary_logloss: 0.149282
[7]	valid_0's auc: 0.858456	valid_0's binary_logloss: 0.136552	valid_1's auc: 0.833723	valid_1's binary_logloss: 0.147799
[8]	valid_0's auc: 0.8595	valid_0's binary_logloss: 0.135038	valid_1's auc: 0.834417	valid_1's binary_logloss: 0.146527
[9]	valid_0's auc: 0.861391	valid_0's binary_logloss: 0.133585	valid_1's auc: 0.834625	valid_1's binary_logloss: 0.145543
[10]	valid_0's auc: 0.863676	valid_0's binary_logloss: 0.132396	valid_1's auc: 0.834954	valid_1's binary_logloss: 0.144707
[11]	valid_0's auc: 0.865339	valid_0's binary_logloss: 0.13134	valid_1's auc: 0.835252	valid_1's binary_logloss: 0.144034
[12]	valid_0's auc: 0.866817	valid_0's binary_logloss: 0.130311	valid_1's auc: 0.835592	valid_1's binary_logloss: 0.143404
[13]	valid_0's auc: 0.868751	valid_0's binary_logloss: 0.129388	valid_1's auc: 0.835225	valid_1's binary_logloss: 0.142853
[14]	valid_0's auc: 0.870283	valid_0's binary_logloss: 0.128551	valid_1's auc: 0.835191	valid_1's binary_logloss: 0.142412
[15]	valid_0's auc: 0.871366	valid_0's binary_logloss: 0.127828	valid_1's auc: 0.834681	valid_1's binary_logloss: 0.142173
[16]	valid_0's auc: 0.87234	valid_0's binary_logloss: 0.127146	valid_1's auc: 0.834555	valid_1's binary_logloss: 0.141876
[17]	valid_0's auc: 0.872963	valid_0's binary_logloss: 0.126553	valid_1's auc: 0.833341	valid_1's binary_logloss: 0.141845
[18]	valid_0's auc: 0.874019	valid_0's binary_logloss: 0.125934	valid_1's auc: 0.833684	valid_1's binary_logloss: 0.141515
[19]	valid_0's auc: 0.875288	valid_0's binary_logloss: 0.125352	valid_1's auc: 0.832788	valid_1's binary_logloss: 0.141499
[20]	valid_0's auc: 0.876379	valid_0's binary_logloss: 0.124793	valid_1's auc: 0.832498	valid_1's binary_logloss: 0.141369
[21]	valid_0's auc: 0.877634	valid_0's binary_logloss: 0.124225	valid_1's auc: 0.832355	valid_1's binary_logloss: 0.141226
[22]	valid_0's auc: 0.878588	valid_0's binary_logloss: 0.123762	valid_1's auc: 0.832703	valid_1's binary_logloss: 0.141078
[23]	valid_0's auc: 0.879693	valid_0's binary_logloss: 0.123255	valid_1's auc: 0.832163	valid_1's binary_logloss: 0.141039
[24]	valid_0's auc: 0.880719	valid_0's binary_logloss: 0.122807	valid_1's auc: 0.832324	valid_1's binary_logloss: 0.140925
[25]	valid_0's auc: 0.881533	valid_0's binary_logloss: 0.122313	valid_1's auc: 0.832184	valid_1's binary_logloss: 0.140891
[26]	valid_0's auc: 0.882449	valid_0's binary_logloss: 0.121884	valid_1's auc: 0.832004	valid_1's binary_logloss: 0.140834
[27]	valid_0's auc: 0.883373	valid_0's binary_logloss: 0.121461	valid_1's auc: 0.83169	valid_1's binary_logloss: 0.140849
[28]	valid_0's auc: 0.884175	valid_0's binary_logloss: 0.121053	valid_1's auc: 0.831077	valid_1's binary_logloss: 0.140897
[29]	valid_0's auc: 0.885058	valid_0's binary_logloss: 0.120689	valid_1's auc: 0.831657	valid_1's binary_logloss: 0.140775
[30]	valid_0's auc: 0.885865	valid_0's binary_logloss: 0.120366	valid_1's auc: 0.83154	valid_1's binary_logloss: 0.140767
[31]	valid_0's auc: 0.887114	valid_0's binary_logloss: 0.11994	valid_1's auc: 0.831439	valid_1's binary_logloss: 0.140816
[32]	valid_0's auc: 0.887603	valid_0's binary_logloss: 0.119623	valid_1's auc: 0.831023	valid_1's binary_logloss: 0.140912
[33]	valid_0's auc: 0.888164	valid_0's binary_logloss: 0.11928	valid_1's auc: 0.830667	valid_1's binary_logloss: 0.140953
[34]	valid_0's auc: 0.888957	valid_0's binary_logloss: 0.118937	valid_1's auc: 0.830222	valid_1's binary_logloss: 0.141016
[35]	valid_0's auc: 0.889746	valid_0's binary_logloss: 0.118573	valid_1's auc: 0.830175	valid_1's binary_logloss: 0.141039
[36]	valid_0's auc: 0.890215	valid_0's binary_logloss: 0.118288	valid_1's auc: 0.830319	valid_1's binary_logloss: 0.140986
[37]	valid_0's auc: 0.890966	valid_0's binary_logloss: 0.117947	valid_1's auc: 0.830443	valid_1's binary_logloss: 0.140993
[38]	valid_0's auc: 0.891474	valid_0's binary_logloss: 0.117667	valid_1's auc: 0.830505	valid_1's binary_logloss: 0.141016
[39]	valid_0's auc: 0.892156	valid_0's binary_logloss: 0.117328	valid_1's auc: 0.830415	valid_1's binary_logloss: 0.140994
[40]	valid_0's auc: 0.892568	valid_0's binary_logloss: 0.117061	valid_1's auc: 0.830687	valid_1's binary_logloss: 0.140918
[41]	valid_0's auc: 0.893181	valid_0's binary_logloss: 0.11681	valid_1's auc: 0.830622	valid_1's binary_logloss: 0.140963
[42]	valid_0's auc: 0.893984	valid_0's binary_logloss: 0.116476	valid_1's auc: 0.830827	valid_1's binary_logloss: 0.140945
Early stopping, best iteration is:
[12]	valid_0's auc: 0.866817	valid_0's binary_logloss: 0.130311	valid_1's auc: 0.835592	valid_1's binary_logloss: 0.143404
[1]	valid_0's auc: 0.830452	valid_0's binary_logloss: 0.155919	valid_1's auc: 0.817465	valid_1's binary_logloss: 0.164895
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.843009	valid_0's binary_logloss: 0.150278	valid_1's auc: 0.828176	valid_1's binary_logloss: 0.159411
[3]	valid_0's auc: 0.851789	valid_0's binary_logloss: 0.146252	valid_1's auc: 0.832597	valid_1's binary_logloss: 0.155736
[4]	valid_0's auc: 0.854657	valid_0's binary_logloss: 0.143075	valid_1's auc: 0.83416	valid_1's binary_logloss: 0.152859
[5]	valid_0's auc: 0.85652	valid_0's binary_logloss: 0.140511	valid_1's auc: 0.836671	valid_1's binary_logloss: 0.150427
[6]	valid_0's auc: 0.85962	valid_0's binary_logloss: 0.138318	valid_1's auc: 0.836445	valid_1's binary_logloss: 0.148531
[7]	valid_0's auc: 0.861186	valid_0's binary_logloss: 0.136426	valid_1's auc: 0.837332	valid_1's binary_logloss: 0.146966
[8]	valid_0's auc: 0.863008	valid_0's binary_logloss: 0.134791	valid_1's auc: 0.837707	valid_1's binary_logloss: 0.145637
[9]	valid_0's auc: 0.864093	valid_0's binary_logloss: 0.133383	valid_1's auc: 0.838849	valid_1's binary_logloss: 0.144456
[10]	valid_0's auc: 0.865712	valid_0's binary_logloss: 0.132144	valid_1's auc: 0.839148	valid_1's binary_logloss: 0.143548
[11]	valid_0's auc: 0.866608	valid_0's binary_logloss: 0.131102	valid_1's auc: 0.83905	valid_1's binary_logloss: 0.142819
[12]	valid_0's auc: 0.868335	valid_0's binary_logloss: 0.130069	valid_1's auc: 0.838903	valid_1's binary_logloss: 0.142184
[13]	valid_0's auc: 0.869075	valid_0's binary_logloss: 0.12923	valid_1's auc: 0.838882	valid_1's binary_logloss: 0.141681
[14]	valid_0's auc: 0.870379	valid_0's binary_logloss: 0.128406	valid_1's auc: 0.838698	valid_1's binary_logloss: 0.141227
[15]	valid_0's auc: 0.871467	valid_0's binary_logloss: 0.127626	valid_1's auc: 0.838689	valid_1's binary_logloss: 0.140825
[16]	valid_0's auc: 0.872424	valid_0's binary_logloss: 0.126889	valid_1's auc: 0.838836	valid_1's binary_logloss: 0.140446
[17]	valid_0's auc: 0.873627	valid_0's binary_logloss: 0.126201	valid_1's auc: 0.839557	valid_1's binary_logloss: 0.14014
[18]	valid_0's auc: 0.87479	valid_0's binary_logloss: 0.125597	valid_1's auc: 0.839776	valid_1's binary_logloss: 0.139941
[19]	valid_0's auc: 0.875902	valid_0's binary_logloss: 0.124996	valid_1's auc: 0.839902	valid_1's binary_logloss: 0.139701
[20]	valid_0's auc: 0.877163	valid_0's binary_logloss: 0.124367	valid_1's auc: 0.839299	valid_1's binary_logloss: 0.139581
[21]	valid_0's auc: 0.878438	valid_0's binary_logloss: 0.123757	valid_1's auc: 0.839098	valid_1's binary_logloss: 0.139467
[22]	valid_0's auc: 0.879596	valid_0's binary_logloss: 0.123223	valid_1's auc: 0.838863	valid_1's binary_logloss: 0.139407
[23]	valid_0's auc: 0.880754	valid_0's binary_logloss: 0.122702	valid_1's auc: 0.839119	valid_1's binary_logloss: 0.139326
[24]	valid_0's auc: 0.881758	valid_0's binary_logloss: 0.122226	valid_1's auc: 0.839042	valid_1's binary_logloss: 0.139233
[25]	valid_0's auc: 0.883034	valid_0's binary_logloss: 0.121743	valid_1's auc: 0.838865	valid_1's binary_logloss: 0.139239
[26]	valid_0's auc: 0.884403	valid_0's binary_logloss: 0.121255	valid_1's auc: 0.838329	valid_1's binary_logloss: 0.139254
[27]	valid_0's auc: 0.885324	valid_0's binary_logloss: 0.120835	valid_1's auc: 0.837953	valid_1's binary_logloss: 0.139276
[28]	valid_0's auc: 0.886543	valid_0's binary_logloss: 0.120403	valid_1's auc: 0.838377	valid_1's binary_logloss: 0.139233
[29]	valid_0's auc: 0.887286	valid_0's binary_logloss: 0.120007	valid_1's auc: 0.838269	valid_1's binary_logloss: 0.139189
[30]	valid_0's auc: 0.888016	valid_0's binary_logloss: 0.119609	valid_1's auc: 0.838269	valid_1's binary_logloss: 0.139184
[31]	valid_0's auc: 0.888842	valid_0's binary_logloss: 0.11924	valid_1's auc: 0.838469	valid_1's binary_logloss: 0.139125
[32]	valid_0's auc: 0.889829	valid_0's binary_logloss: 0.118853	valid_1's auc: 0.838631	valid_1's binary_logloss: 0.139121
[33]	valid_0's auc: 0.890569	valid_0's binary_logloss: 0.118502	valid_1's auc: 0.838736	valid_1's binary_logloss: 0.139065
[34]	valid_0's auc: 0.891418	valid_0's binary_logloss: 0.118158	valid_1's auc: 0.838692	valid_1's binary_logloss: 0.139051
[35]	valid_0's auc: 0.892153	valid_0's binary_logloss: 0.117818	valid_1's auc: 0.838839	valid_1's binary_logloss: 0.139022
[36]	valid_0's auc: 0.892798	valid_0's binary_logloss: 0.117475	valid_1's auc: 0.838585	valid_1's binary_logloss: 0.139057
[37]	valid_0's auc: 0.893672	valid_0's binary_logloss: 0.117095	valid_1's auc: 0.838499	valid_1's binary_logloss: 0.139109
[38]	valid_0's auc: 0.894575	valid_0's binary_logloss: 0.116783	valid_1's auc: 0.83897	valid_1's binary_logloss: 0.139067
[39]	valid_0's auc: 0.895447	valid_0's binary_logloss: 0.116421	valid_1's auc: 0.838515	valid_1's binary_logloss: 0.139143
[40]	valid_0's auc: 0.896177	valid_0's binary_logloss: 0.116084	valid_1's auc: 0.838353	valid_1's binary_logloss: 0.13918
[41]	valid_0's auc: 0.896821	valid_0's binary_logloss: 0.115822	valid_1's auc: 0.837933	valid_1's binary_logloss: 0.139263
[42]	valid_0's auc: 0.897531	valid_0's binary_logloss: 0.115503	valid_1's auc: 0.838083	valid_1's binary_logloss: 0.139286
[43]	valid_0's auc: 0.898208	valid_0's binary_logloss: 0.115195	valid_1's auc: 0.838087	valid_1's binary_logloss: 0.139312
[44]	valid_0's auc: 0.899032	valid_0's binary_logloss: 0.114921	valid_1's auc: 0.837956	valid_1's binary_logloss: 0.139352
[45]	valid_0's auc: 0.899435	valid_0's binary_logloss: 0.114646	valid_1's auc: 0.837664	valid_1's binary_logloss: 0.139425
[46]	valid_0's auc: 0.899904	valid_0's binary_logloss: 0.114381	valid_1's auc: 0.837191	valid_1's binary_logloss: 0.139562
[47]	valid_0's auc: 0.900313	valid_0's binary_logloss: 0.114172	valid_1's auc: 0.837275	valid_1's binary_logloss: 0.139576
[48]	valid_0's auc: 0.900881	valid_0's binary_logloss: 0.113898	valid_1's auc: 0.837102	valid_1's binary_logloss: 0.139641
[49]	valid_0's auc: 0.901416	valid_0's binary_logloss: 0.11361	valid_1's auc: 0.837064	valid_1's binary_logloss: 0.139694
Early stopping, best iteration is:
[19]	valid_0's auc: 0.875902	valid_0's binary_logloss: 0.124996	valid_1's auc: 0.839902	valid_1's binary_logloss: 0.139701
[1]	valid_0's auc: 0.834758	valid_0's binary_logloss: 0.156067	valid_1's auc: 0.822971	valid_1's binary_logloss: 0.165101
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.842403	valid_0's binary_logloss: 0.150502	valid_1's auc: 0.828728	valid_1's binary_logloss: 0.159785
[3]	valid_0's auc: 0.847356	valid_0's binary_logloss: 0.146328	valid_1's auc: 0.830832	valid_1's binary_logloss: 0.156017
[4]	valid_0's auc: 0.84996	valid_0's binary_logloss: 0.143134	valid_1's auc: 0.83154	valid_1's binary_logloss: 0.15319
[5]	valid_0's auc: 0.853547	valid_0's binary_logloss: 0.140471	valid_1's auc: 0.833818	valid_1's binary_logloss: 0.150761
[6]	valid_0's auc: 0.855549	valid_0's binary_logloss: 0.138282	valid_1's auc: 0.834224	valid_1's binary_logloss: 0.148815
[7]	valid_0's auc: 0.857045	valid_0's binary_logloss: 0.136467	valid_1's auc: 0.835329	valid_1's binary_logloss: 0.147338
[8]	valid_0's auc: 0.858416	valid_0's binary_logloss: 0.134953	valid_1's auc: 0.836144	valid_1's binary_logloss: 0.146094
[9]	valid_0's auc: 0.860878	valid_0's binary_logloss: 0.1336	valid_1's auc: 0.835613	valid_1's binary_logloss: 0.145179
[10]	valid_0's auc: 0.862432	valid_0's binary_logloss: 0.132438	valid_1's auc: 0.836519	valid_1's binary_logloss: 0.144304
[11]	valid_0's auc: 0.864715	valid_0's binary_logloss: 0.131298	valid_1's auc: 0.836728	valid_1's binary_logloss: 0.143576
[12]	valid_0's auc: 0.86649	valid_0's binary_logloss: 0.13029	valid_1's auc: 0.837121	valid_1's binary_logloss: 0.142892
[13]	valid_0's auc: 0.869207	valid_0's binary_logloss: 0.129293	valid_1's auc: 0.837902	valid_1's binary_logloss: 0.142287
[14]	valid_0's auc: 0.870823	valid_0's binary_logloss: 0.128463	valid_1's auc: 0.838274	valid_1's binary_logloss: 0.141902
[15]	valid_0's auc: 0.871881	valid_0's binary_logloss: 0.127657	valid_1's auc: 0.83767	valid_1's binary_logloss: 0.141625
[16]	valid_0's auc: 0.873451	valid_0's binary_logloss: 0.126932	valid_1's auc: 0.837454	valid_1's binary_logloss: 0.141273
[17]	valid_0's auc: 0.874754	valid_0's binary_logloss: 0.126215	valid_1's auc: 0.837218	valid_1's binary_logloss: 0.14099
[18]	valid_0's auc: 0.876015	valid_0's binary_logloss: 0.125583	valid_1's auc: 0.837242	valid_1's binary_logloss: 0.140723
[19]	valid_0's auc: 0.876898	valid_0's binary_logloss: 0.125006	valid_1's auc: 0.836677	valid_1's binary_logloss: 0.140576
[20]	valid_0's auc: 0.878198	valid_0's binary_logloss: 0.124464	valid_1's auc: 0.836677	valid_1's binary_logloss: 0.140492
[21]	valid_0's auc: 0.87919	valid_0's binary_logloss: 0.123881	valid_1's auc: 0.837511	valid_1's binary_logloss: 0.140273
[22]	valid_0's auc: 0.880273	valid_0's binary_logloss: 0.123358	valid_1's auc: 0.837479	valid_1's binary_logloss: 0.14014
[23]	valid_0's auc: 0.881151	valid_0's binary_logloss: 0.122898	valid_1's auc: 0.837864	valid_1's binary_logloss: 0.139991
[24]	valid_0's auc: 0.882264	valid_0's binary_logloss: 0.122383	valid_1's auc: 0.8378	valid_1's binary_logloss: 0.139955
[25]	valid_0's auc: 0.883252	valid_0's binary_logloss: 0.121909	valid_1's auc: 0.838233	valid_1's binary_logloss: 0.139841
[26]	valid_0's auc: 0.884254	valid_0's binary_logloss: 0.121477	valid_1's auc: 0.837795	valid_1's binary_logloss: 0.139804
[27]	valid_0's auc: 0.885619	valid_0's binary_logloss: 0.120986	valid_1's auc: 0.838147	valid_1's binary_logloss: 0.139714
[28]	valid_0's auc: 0.886542	valid_0's binary_logloss: 0.120573	valid_1's auc: 0.837608	valid_1's binary_logloss: 0.139727
[29]	valid_0's auc: 0.887407	valid_0's binary_logloss: 0.120146	valid_1's auc: 0.837298	valid_1's binary_logloss: 0.139725
[30]	valid_0's auc: 0.88824	valid_0's binary_logloss: 0.119775	valid_1's auc: 0.837294	valid_1's binary_logloss: 0.139714
[31]	valid_0's auc: 0.889124	valid_0's binary_logloss: 0.119428	valid_1's auc: 0.837511	valid_1's binary_logloss: 0.139644
[32]	valid_0's auc: 0.889919	valid_0's binary_logloss: 0.119052	valid_1's auc: 0.838258	valid_1's binary_logloss: 0.139519
[33]	valid_0's auc: 0.890537	valid_0's binary_logloss: 0.118727	valid_1's auc: 0.837988	valid_1's binary_logloss: 0.139526
[34]	valid_0's auc: 0.891097	valid_0's binary_logloss: 0.118375	valid_1's auc: 0.837857	valid_1's binary_logloss: 0.1396
[35]	valid_0's auc: 0.891811	valid_0's binary_logloss: 0.118027	valid_1's auc: 0.83771	valid_1's binary_logloss: 0.139627
[36]	valid_0's auc: 0.892678	valid_0's binary_logloss: 0.117704	valid_1's auc: 0.837436	valid_1's binary_logloss: 0.139695
[37]	valid_0's auc: 0.893595	valid_0's binary_logloss: 0.117332	valid_1's auc: 0.837433	valid_1's binary_logloss: 0.13969
[38]	valid_0's auc: 0.894079	valid_0's binary_logloss: 0.117021	valid_1's auc: 0.837272	valid_1's binary_logloss: 0.139691
[39]	valid_0's auc: 0.894556	valid_0's binary_logloss: 0.11674	valid_1's auc: 0.837253	valid_1's binary_logloss: 0.139685
[40]	valid_0's auc: 0.89516	valid_0's binary_logloss: 0.116428	valid_1's auc: 0.837294	valid_1's binary_logloss: 0.139678
[41]	valid_0's auc: 0.895863	valid_0's binary_logloss: 0.116155	valid_1's auc: 0.837866	valid_1's binary_logloss: 0.139562
[42]	valid_0's auc: 0.896354	valid_0's binary_logloss: 0.115871	valid_1's auc: 0.837735	valid_1's binary_logloss: 0.139612
[43]	valid_0's auc: 0.896691	valid_0's binary_logloss: 0.115612	valid_1's auc: 0.837481	valid_1's binary_logloss: 0.139702
[44]	valid_0's auc: 0.897343	valid_0's binary_logloss: 0.115316	valid_1's auc: 0.837651	valid_1's binary_logloss: 0.139672
Early stopping, best iteration is:
[14]	valid_0's auc: 0.870823	valid_0's binary_logloss: 0.128463	valid_1's auc: 0.838274	valid_1's binary_logloss: 0.141902
[1]	valid_0's auc: 0.824305	valid_0's binary_logloss: 0.156217	valid_1's auc: 0.819488	valid_1's binary_logloss: 0.165016
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.828884	valid_0's binary_logloss: 0.150957	valid_1's auc: 0.822387	valid_1's binary_logloss: 0.159711
[3]	valid_0's auc: 0.838845	valid_0's binary_logloss: 0.147117	valid_1's auc: 0.829542	valid_1's binary_logloss: 0.156068
[4]	valid_0's auc: 0.843406	valid_0's binary_logloss: 0.144114	valid_1's auc: 0.834917	valid_1's binary_logloss: 0.153141
[5]	valid_0's auc: 0.846391	valid_0's binary_logloss: 0.141629	valid_1's auc: 0.837871	valid_1's binary_logloss: 0.150804
[6]	valid_0's auc: 0.848894	valid_0's binary_logloss: 0.13957	valid_1's auc: 0.839548	valid_1's binary_logloss: 0.148876
[7]	valid_0's auc: 0.851133	valid_0's binary_logloss: 0.137847	valid_1's auc: 0.841812	valid_1's binary_logloss: 0.147225
[8]	valid_0's auc: 0.852859	valid_0's binary_logloss: 0.136394	valid_1's auc: 0.842382	valid_1's binary_logloss: 0.14594
[9]	valid_0's auc: 0.854683	valid_0's binary_logloss: 0.135137	valid_1's auc: 0.842811	valid_1's binary_logloss: 0.144866
[10]	valid_0's auc: 0.855596	valid_0's binary_logloss: 0.134048	valid_1's auc: 0.843186	valid_1's binary_logloss: 0.143872
[11]	valid_0's auc: 0.856352	valid_0's binary_logloss: 0.133075	valid_1's auc: 0.843465	valid_1's binary_logloss: 0.14305
[12]	valid_0's auc: 0.857769	valid_0's binary_logloss: 0.132232	valid_1's auc: 0.843222	valid_1's binary_logloss: 0.142376
[13]	valid_0's auc: 0.859429	valid_0's binary_logloss: 0.131427	valid_1's auc: 0.843564	valid_1's binary_logloss: 0.141783
[14]	valid_0's auc: 0.86094	valid_0's binary_logloss: 0.130658	valid_1's auc: 0.843389	valid_1's binary_logloss: 0.141419
[15]	valid_0's auc: 0.862567	valid_0's binary_logloss: 0.129955	valid_1's auc: 0.84365	valid_1's binary_logloss: 0.141
[16]	valid_0's auc: 0.864351	valid_0's binary_logloss: 0.129293	valid_1's auc: 0.84379	valid_1's binary_logloss: 0.140661
[17]	valid_0's auc: 0.865942	valid_0's binary_logloss: 0.128724	valid_1's auc: 0.843552	valid_1's binary_logloss: 0.140416
[18]	valid_0's auc: 0.867233	valid_0's binary_logloss: 0.128187	valid_1's auc: 0.843853	valid_1's binary_logloss: 0.140054
[19]	valid_0's auc: 0.868694	valid_0's binary_logloss: 0.127637	valid_1's auc: 0.844012	valid_1's binary_logloss: 0.139782
[20]	valid_0's auc: 0.86955	valid_0's binary_logloss: 0.127154	valid_1's auc: 0.843654	valid_1's binary_logloss: 0.139614
[21]	valid_0's auc: 0.870481	valid_0's binary_logloss: 0.126741	valid_1's auc: 0.843765	valid_1's binary_logloss: 0.139406
[22]	valid_0's auc: 0.871544	valid_0's binary_logloss: 0.126309	valid_1's auc: 0.844171	valid_1's binary_logloss: 0.139253
[23]	valid_0's auc: 0.872657	valid_0's binary_logloss: 0.125895	valid_1's auc: 0.843901	valid_1's binary_logloss: 0.139093
[24]	valid_0's auc: 0.873871	valid_0's binary_logloss: 0.125508	valid_1's auc: 0.843814	valid_1's binary_logloss: 0.139015
[25]	valid_0's auc: 0.875385	valid_0's binary_logloss: 0.125076	valid_1's auc: 0.843672	valid_1's binary_logloss: 0.138915
[26]	valid_0's auc: 0.876436	valid_0's binary_logloss: 0.12471	valid_1's auc: 0.843894	valid_1's binary_logloss: 0.138828
[27]	valid_0's auc: 0.877113	valid_0's binary_logloss: 0.124381	valid_1's auc: 0.843845	valid_1's binary_logloss: 0.138723
[28]	valid_0's auc: 0.878144	valid_0's binary_logloss: 0.12406	valid_1's auc: 0.843817	valid_1's binary_logloss: 0.138681
[29]	valid_0's auc: 0.879271	valid_0's binary_logloss: 0.123721	valid_1's auc: 0.844032	valid_1's binary_logloss: 0.13859
[30]	valid_0's auc: 0.87995	valid_0's binary_logloss: 0.123447	valid_1's auc: 0.844089	valid_1's binary_logloss: 0.138538
[31]	valid_0's auc: 0.88069	valid_0's binary_logloss: 0.123148	valid_1's auc: 0.844137	valid_1's binary_logloss: 0.138484
[32]	valid_0's auc: 0.881743	valid_0's binary_logloss: 0.122823	valid_1's auc: 0.843246	valid_1's binary_logloss: 0.138591
[33]	valid_0's auc: 0.882558	valid_0's binary_logloss: 0.122542	valid_1's auc: 0.842676	valid_1's binary_logloss: 0.138654
[34]	valid_0's auc: 0.883424	valid_0's binary_logloss: 0.12228	valid_1's auc: 0.842829	valid_1's binary_logloss: 0.138612
[35]	valid_0's auc: 0.88419	valid_0's binary_logloss: 0.122011	valid_1's auc: 0.842651	valid_1's binary_logloss: 0.138595
[36]	valid_0's auc: 0.885075	valid_0's binary_logloss: 0.121697	valid_1's auc: 0.842554	valid_1's binary_logloss: 0.138593
[37]	valid_0's auc: 0.885953	valid_0's binary_logloss: 0.121404	valid_1's auc: 0.842654	valid_1's binary_logloss: 0.138558
[38]	valid_0's auc: 0.887245	valid_0's binary_logloss: 0.121133	valid_1's auc: 0.842656	valid_1's binary_logloss: 0.138548
[39]	valid_0's auc: 0.887995	valid_0's binary_logloss: 0.120851	valid_1's auc: 0.842756	valid_1's binary_logloss: 0.138472
[40]	valid_0's auc: 0.888693	valid_0's binary_logloss: 0.120601	valid_1's auc: 0.842976	valid_1's binary_logloss: 0.138411
[41]	valid_0's auc: 0.889439	valid_0's binary_logloss: 0.120343	valid_1's auc: 0.843062	valid_1's binary_logloss: 0.138364
[42]	valid_0's auc: 0.890032	valid_0's binary_logloss: 0.120105	valid_1's auc: 0.842992	valid_1's binary_logloss: 0.138358
[43]	valid_0's auc: 0.890657	valid_0's binary_logloss: 0.11989	valid_1's auc: 0.843118	valid_1's binary_logloss: 0.138331
[44]	valid_0's auc: 0.891757	valid_0's binary_logloss: 0.119626	valid_1's auc: 0.843283	valid_1's binary_logloss: 0.1383
[45]	valid_0's auc: 0.892343	valid_0's binary_logloss: 0.119426	valid_1's auc: 0.84332	valid_1's binary_logloss: 0.138306
[46]	valid_0's auc: 0.892986	valid_0's binary_logloss: 0.119211	valid_1's auc: 0.843346	valid_1's binary_logloss: 0.138288
[47]	valid_0's auc: 0.89341	valid_0's binary_logloss: 0.119028	valid_1's auc: 0.843385	valid_1's binary_logloss: 0.138241
[48]	valid_0's auc: 0.894062	valid_0's binary_logloss: 0.118789	valid_1's auc: 0.843166	valid_1's binary_logloss: 0.138298
[49]	valid_0's auc: 0.894734	valid_0's binary_logloss: 0.118543	valid_1's auc: 0.843193	valid_1's binary_logloss: 0.138271
[50]	valid_0's auc: 0.895288	valid_0's binary_logloss: 0.118352	valid_1's auc: 0.842965	valid_1's binary_logloss: 0.138309
[51]	valid_0's auc: 0.895902	valid_0's binary_logloss: 0.118145	valid_1's auc: 0.842827	valid_1's binary_logloss: 0.138343
[52]	valid_0's auc: 0.896512	valid_0's binary_logloss: 0.11792	valid_1's auc: 0.842926	valid_1's binary_logloss: 0.138311
Early stopping, best iteration is:
[22]	valid_0's auc: 0.871544	valid_0's binary_logloss: 0.126309	valid_1's auc: 0.844171	valid_1's binary_logloss: 0.139253
GridSearchCV 최적 파라미터: {'max_depth': 128, 'min_child_samples': 100, 'num_leaves': 32, 'subsample': 0.8}
ROC AUC: 0.8442

 

lgbm_clf = LGBMClassifier(n_estimators=1000, num_leaves=32, sumbsample=0.8, min_child_samples=100,
                          max_depth=128)

evals = [(X_test, y_test)]
lgbm_clf.fit(X_train, y_train, early_stopping_rounds=100, eval_metric="auc", eval_set=evals,
                verbose=True)
# 하이퍼 파라미터 튜닝은 의외로 성능을 팍올리지 않음 // tree 앙상블은 특히 param이 많아서 손이 많이 감
lgbm_roc_score = roc_auc_score(y_test, lgbm_clf.predict_proba(X_test)[:,1],average='macro')
print('ROC AUC: {0:.4f}'.format(lgbm_roc_score))


[1]	valid_0's auc: 0.819488	valid_0's binary_logloss: 0.165016
Training until validation scores don't improve for 100 rounds.
[2]	valid_0's auc: 0.822387	valid_0's binary_logloss: 0.159711
[3]	valid_0's auc: 0.829542	valid_0's binary_logloss: 0.156068
[4]	valid_0's auc: 0.834917	valid_0's binary_logloss: 0.153141
[5]	valid_0's auc: 0.837871	valid_0's binary_logloss: 0.150804
[6]	valid_0's auc: 0.839548	valid_0's binary_logloss: 0.148876
[7]	valid_0's auc: 0.841812	valid_0's binary_logloss: 0.147225
[8]	valid_0's auc: 0.842382	valid_0's binary_logloss: 0.14594
[9]	valid_0's auc: 0.842811	valid_0's binary_logloss: 0.144866
[10]	valid_0's auc: 0.843186	valid_0's binary_logloss: 0.143872
[11]	valid_0's auc: 0.843465	valid_0's binary_logloss: 0.14305
[12]	valid_0's auc: 0.843222	valid_0's binary_logloss: 0.142376
[13]	valid_0's auc: 0.843564	valid_0's binary_logloss: 0.141783
[14]	valid_0's auc: 0.843389	valid_0's binary_logloss: 0.141419
[15]	valid_0's auc: 0.84365	valid_0's binary_logloss: 0.141
[16]	valid_0's auc: 0.84379	valid_0's binary_logloss: 0.140661
[17]	valid_0's auc: 0.843552	valid_0's binary_logloss: 0.140416
[18]	valid_0's auc: 0.843853	valid_0's binary_logloss: 0.140054
[19]	valid_0's auc: 0.844012	valid_0's binary_logloss: 0.139782
[20]	valid_0's auc: 0.843654	valid_0's binary_logloss: 0.139614
[21]	valid_0's auc: 0.843765	valid_0's binary_logloss: 0.139406
[22]	valid_0's auc: 0.844171	valid_0's binary_logloss: 0.139253
[23]	valid_0's auc: 0.843901	valid_0's binary_logloss: 0.139093
[24]	valid_0's auc: 0.843814	valid_0's binary_logloss: 0.139015
[25]	valid_0's auc: 0.843672	valid_0's binary_logloss: 0.138915
[26]	valid_0's auc: 0.843894	valid_0's binary_logloss: 0.138828
[27]	valid_0's auc: 0.843845	valid_0's binary_logloss: 0.138723
[28]	valid_0's auc: 0.843817	valid_0's binary_logloss: 0.138681
[29]	valid_0's auc: 0.844032	valid_0's binary_logloss: 0.13859
[30]	valid_0's auc: 0.844089	valid_0's binary_logloss: 0.138538
[31]	valid_0's auc: 0.844137	valid_0's binary_logloss: 0.138484
[32]	valid_0's auc: 0.843246	valid_0's binary_logloss: 0.138591
[33]	valid_0's auc: 0.842676	valid_0's binary_logloss: 0.138654
[34]	valid_0's auc: 0.842829	valid_0's binary_logloss: 0.138612
[35]	valid_0's auc: 0.842651	valid_0's binary_logloss: 0.138595
[36]	valid_0's auc: 0.842554	valid_0's binary_logloss: 0.138593
[37]	valid_0's auc: 0.842654	valid_0's binary_logloss: 0.138558
[38]	valid_0's auc: 0.842656	valid_0's binary_logloss: 0.138548
[39]	valid_0's auc: 0.842756	valid_0's binary_logloss: 0.138472
[40]	valid_0's auc: 0.842976	valid_0's binary_logloss: 0.138411
[41]	valid_0's auc: 0.843062	valid_0's binary_logloss: 0.138364
[42]	valid_0's auc: 0.842992	valid_0's binary_logloss: 0.138358
[43]	valid_0's auc: 0.843118	valid_0's binary_logloss: 0.138331
[44]	valid_0's auc: 0.843283	valid_0's binary_logloss: 0.1383
[45]	valid_0's auc: 0.84332	valid_0's binary_logloss: 0.138306
[46]	valid_0's auc: 0.843346	valid_0's binary_logloss: 0.138288
[47]	valid_0's auc: 0.843385	valid_0's binary_logloss: 0.138241
[48]	valid_0's auc: 0.843166	valid_0's binary_logloss: 0.138298
[49]	valid_0's auc: 0.843193	valid_0's binary_logloss: 0.138271
[50]	valid_0's auc: 0.842965	valid_0's binary_logloss: 0.138309
[51]	valid_0's auc: 0.842827	valid_0's binary_logloss: 0.138343
[52]	valid_0's auc: 0.842926	valid_0's binary_logloss: 0.138311
[53]	valid_0's auc: 0.842742	valid_0's binary_logloss: 0.138328
[54]	valid_0's auc: 0.842496	valid_0's binary_logloss: 0.138369
[55]	valid_0's auc: 0.84257	valid_0's binary_logloss: 0.138337
[56]	valid_0's auc: 0.8428	valid_0's binary_logloss: 0.138253
[57]	valid_0's auc: 0.842529	valid_0's binary_logloss: 0.138319
[58]	valid_0's auc: 0.8424	valid_0's binary_logloss: 0.138333
[59]	valid_0's auc: 0.842268	valid_0's binary_logloss: 0.138374
[60]	valid_0's auc: 0.84235	valid_0's binary_logloss: 0.13834
[61]	valid_0's auc: 0.84188	valid_0's binary_logloss: 0.138426
[62]	valid_0's auc: 0.841824	valid_0's binary_logloss: 0.138441
[63]	valid_0's auc: 0.841519	valid_0's binary_logloss: 0.138494
[64]	valid_0's auc: 0.841793	valid_0's binary_logloss: 0.138494
[65]	valid_0's auc: 0.841631	valid_0's binary_logloss: 0.138541
[66]	valid_0's auc: 0.841514	valid_0's binary_logloss: 0.138558
[67]	valid_0's auc: 0.841441	valid_0's binary_logloss: 0.138553
[68]	valid_0's auc: 0.841335	valid_0's binary_logloss: 0.138581
[69]	valid_0's auc: 0.841157	valid_0's binary_logloss: 0.13866
[70]	valid_0's auc: 0.841363	valid_0's binary_logloss: 0.138604
[71]	valid_0's auc: 0.841247	valid_0's binary_logloss: 0.138608
[72]	valid_0's auc: 0.841129	valid_0's binary_logloss: 0.138616
[73]	valid_0's auc: 0.841231	valid_0's binary_logloss: 0.13859
[74]	valid_0's auc: 0.841063	valid_0's binary_logloss: 0.138651
[75]	valid_0's auc: 0.841226	valid_0's binary_logloss: 0.138603
[76]	valid_0's auc: 0.841163	valid_0's binary_logloss: 0.13862
[77]	valid_0's auc: 0.841357	valid_0's binary_logloss: 0.138597
[78]	valid_0's auc: 0.840873	valid_0's binary_logloss: 0.138687
[79]	valid_0's auc: 0.840753	valid_0's binary_logloss: 0.138734
[80]	valid_0's auc: 0.840892	valid_0's binary_logloss: 0.138741
[81]	valid_0's auc: 0.841138	valid_0's binary_logloss: 0.138702
[82]	valid_0's auc: 0.841058	valid_0's binary_logloss: 0.138712
[83]	valid_0's auc: 0.84078	valid_0's binary_logloss: 0.138768
[84]	valid_0's auc: 0.84061	valid_0's binary_logloss: 0.138815
[85]	valid_0's auc: 0.840361	valid_0's binary_logloss: 0.138849
[86]	valid_0's auc: 0.840272	valid_0's binary_logloss: 0.138871
[87]	valid_0's auc: 0.840075	valid_0's binary_logloss: 0.138909
[88]	valid_0's auc: 0.840357	valid_0's binary_logloss: 0.138874
[89]	valid_0's auc: 0.840169	valid_0's binary_logloss: 0.138905
[90]	valid_0's auc: 0.840125	valid_0's binary_logloss: 0.1389
[91]	valid_0's auc: 0.839679	valid_0's binary_logloss: 0.139015
[92]	valid_0's auc: 0.83983	valid_0's binary_logloss: 0.138999
[93]	valid_0's auc: 0.839799	valid_0's binary_logloss: 0.139006
[94]	valid_0's auc: 0.839851	valid_0's binary_logloss: 0.13898
[95]	valid_0's auc: 0.840149	valid_0's binary_logloss: 0.13892
[96]	valid_0's auc: 0.840139	valid_0's binary_logloss: 0.138954
[97]	valid_0's auc: 0.840006	valid_0's binary_logloss: 0.138986
[98]	valid_0's auc: 0.839846	valid_0's binary_logloss: 0.139033
[99]	valid_0's auc: 0.839834	valid_0's binary_logloss: 0.139075
[100]	valid_0's auc: 0.839472	valid_0's binary_logloss: 0.139137
[101]	valid_0's auc: 0.8394	valid_0's binary_logloss: 0.139155
[102]	valid_0's auc: 0.839448	valid_0's binary_logloss: 0.139154
[103]	valid_0's auc: 0.839538	valid_0's binary_logloss: 0.139134
[104]	valid_0's auc: 0.839496	valid_0's binary_logloss: 0.139161
[105]	valid_0's auc: 0.839596	valid_0's binary_logloss: 0.139125
[106]	valid_0's auc: 0.839639	valid_0's binary_logloss: 0.139115
[107]	valid_0's auc: 0.839791	valid_0's binary_logloss: 0.139097
[108]	valid_0's auc: 0.839814	valid_0's binary_logloss: 0.139097
[109]	valid_0's auc: 0.839695	valid_0's binary_logloss: 0.139135
[110]	valid_0's auc: 0.839293	valid_0's binary_logloss: 0.139206
[111]	valid_0's auc: 0.839318	valid_0's binary_logloss: 0.139196
[112]	valid_0's auc: 0.839257	valid_0's binary_logloss: 0.139234
[113]	valid_0's auc: 0.839251	valid_0's binary_logloss: 0.139234
[114]	valid_0's auc: 0.839284	valid_0's binary_logloss: 0.139242
[115]	valid_0's auc: 0.839367	valid_0's binary_logloss: 0.139253
[116]	valid_0's auc: 0.83942	valid_0's binary_logloss: 0.139255
[117]	valid_0's auc: 0.839346	valid_0's binary_logloss: 0.139294
[118]	valid_0's auc: 0.839207	valid_0's binary_logloss: 0.139357
[119]	valid_0's auc: 0.839105	valid_0's binary_logloss: 0.139392
[120]	valid_0's auc: 0.839302	valid_0's binary_logloss: 0.139375
[121]	valid_0's auc: 0.839406	valid_0's binary_logloss: 0.139363
[122]	valid_0's auc: 0.83949	valid_0's binary_logloss: 0.139379
Early stopping, best iteration is:
[22]	valid_0's auc: 0.844171	valid_0's binary_logloss: 0.139253
ROC AUC: 0.8442
​
반응형

+ Recent posts