import numpy as np
import pandas as pd
import seaborn as sns
import lightgbm as lgb

from xgboost import XGBClassifier

from zipfile import ZipFile
from matplotlib import pyplot as plt

from sklearn.metrics import roc_auc_score
from sklearn.preprocessing import LabelEncoder
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
%matplotlib inline
plt.rcParams['figure.figsize'] = (12, 12)

Before running the below cell, upload your kaggle token, to make sure an error doesn't popup.

!mkdir ~/.kaggle
!cp kaggle.json ~/.kaggle/
!chmod 600 ~/.kaggle/kaggle.json
mkdir: cannot create directory ‘/root/.kaggle’: File exists
!kaggle competitions download -c tabular-playground-series-may-2022
tabular-playground-series-may-2022.zip: Skipping, found more recently modified local copy (use --force to force download)
with ZipFile('/content/tabular-playground-series-may-2022.zip', 'r') as zf:
    zf.extractall('./')

Loading the data

train = pd.read_csv('train.csv', index_col='id')
train.head()
f_00 f_01 f_02 f_03 f_04 f_05 f_06 f_07 f_08 f_09 ... f_22 f_23 f_24 f_25 f_26 f_27 f_28 f_29 f_30 target
id
0 -1.373246 0.238887 -0.243376 0.567405 -0.647715 0.839326 0.113133 1 5 1 ... -2.540739 0.766952 -2.730628 -0.208177 1.363402 ABABDADBAB 67.609153 0 0 0
1 1.697021 -1.710322 -2.230332 -0.545661 1.113173 -1.552175 0.447825 1 3 4 ... 2.278315 -0.633658 -1.217077 -3.782194 -0.058316 ACACCADCEB 377.096415 0 0 1
2 1.681726 0.616746 -1.027689 0.810492 -0.609086 0.113965 -0.708660 1 0 2 ... -1.385775 -0.520558 -0.009121 2.788536 -3.703488 AAAEABCKAD -195.599702 0 2 1
3 -0.118172 -0.587835 -0.804638 2.086822 0.371005 -0.128831 -0.282575 3 2 1 ... 0.572594 -1.653213 1.686035 -2.533098 -0.608601 BDBBAACBCB 210.826205 0 0 1
4 1.148481 -0.176567 -0.664871 -1.101343 0.467875 0.500117 0.407515 3 3 0 ... -3.912929 -1.430366 2.127649 -3.306784 4.371371 BDBCBBCHFE -217.211798 0 1 1

5 rows × 32 columns

train.info()
train.describe()
<class 'pandas.core.frame.DataFrame'>
Int64Index: 900000 entries, 0 to 899999
Data columns (total 32 columns):
 #   Column  Non-Null Count   Dtype  
---  ------  --------------   -----  
 0   f_00    900000 non-null  float64
 1   f_01    900000 non-null  float64
 2   f_02    900000 non-null  float64
 3   f_03    900000 non-null  float64
 4   f_04    900000 non-null  float64
 5   f_05    900000 non-null  float64
 6   f_06    900000 non-null  float64
 7   f_07    900000 non-null  int64  
 8   f_08    900000 non-null  int64  
 9   f_09    900000 non-null  int64  
 10  f_10    900000 non-null  int64  
 11  f_11    900000 non-null  int64  
 12  f_12    900000 non-null  int64  
 13  f_13    900000 non-null  int64  
 14  f_14    900000 non-null  int64  
 15  f_15    900000 non-null  int64  
 16  f_16    900000 non-null  int64  
 17  f_17    900000 non-null  int64  
 18  f_18    900000 non-null  int64  
 19  f_19    900000 non-null  float64
 20  f_20    900000 non-null  float64
 21  f_21    900000 non-null  float64
 22  f_22    900000 non-null  float64
 23  f_23    900000 non-null  float64
 24  f_24    900000 non-null  float64
 25  f_25    900000 non-null  float64
 26  f_26    900000 non-null  float64
 27  f_27    900000 non-null  object 
 28  f_28    900000 non-null  float64
 29  f_29    900000 non-null  int64  
 30  f_30    900000 non-null  int64  
 31  target  900000 non-null  int64  
dtypes: float64(16), int64(15), object(1)
memory usage: 226.6+ MB
f_00 f_01 f_02 f_03 f_04 f_05 f_06 f_07 f_08 f_09 ... f_21 f_22 f_23 f_24 f_25 f_26 f_28 f_29 f_30 target
count 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000 ... 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000 900000.000000
mean -0.000286 0.001165 0.001174 -0.001368 -0.000571 0.000284 -0.000709 2.031460 2.057998 2.362431 ... -0.156307 -0.009273 -0.369459 -0.342738 0.176549 0.357591 -0.380876 0.345661 1.002654 0.486488
std 0.998888 0.999193 1.000514 1.000175 1.000167 0.999875 0.999942 1.656172 1.590955 1.637706 ... 2.484706 2.450797 2.453405 2.386941 2.416959 2.476020 238.773054 0.475584 0.818989 0.499818
min -4.599856 -4.682199 -4.642676 -4.658816 -4.748501 -4.750214 -4.842919 0.000000 0.000000 0.000000 ... -13.310146 -11.853530 -12.301097 -11.416189 -11.918306 -14.300577 -1229.753052 0.000000 0.000000 0.000000
25% -0.675490 -0.675162 -0.674369 -0.676114 -0.675909 -0.673437 -0.674876 1.000000 1.000000 1.000000 ... -1.820063 -1.645585 -2.019739 -1.955956 -1.440424 -1.261598 -159.427418 0.000000 0.000000 0.000000
50% 0.001144 0.002014 0.002218 -0.002227 -0.001662 -0.000438 -0.001492 2.000000 2.000000 2.000000 ... -0.152668 0.030850 -0.390966 -0.340746 0.160912 0.404212 -0.519808 0.000000 1.000000 0.000000
75% 0.674337 0.675021 0.677505 0.672544 0.673789 0.675028 0.674749 3.000000 3.000000 3.000000 ... 1.507071 1.661676 1.255408 1.266673 1.795928 2.028219 158.987357 1.000000 2.000000 1.000000
max 4.749301 4.815699 4.961982 4.454920 4.948983 4.971881 4.822668 15.000000 16.000000 14.000000 ... 14.455426 11.344080 12.247100 12.389844 12.529179 12.913041 1229.562577 1.000000 2.000000 1.000000

8 rows × 31 columns

sns.heatmap(train.corr(), annot=True, vmin=-1, vmax=1, cmap='RdYlGn')
<matplotlib.axes._subplots.AxesSubplot at 0x7f9f8e19f450>
test = pd.read_csv('test.csv', index_col='id')
test.head()
f_00 f_01 f_02 f_03 f_04 f_05 f_06 f_07 f_08 f_09 ... f_21 f_22 f_23 f_24 f_25 f_26 f_27 f_28 f_29 f_30
id
900000 0.442517 0.174380 -0.999816 0.762741 0.186778 -1.074775 0.501888 6 6 0 ... -1.006400 -1.193879 -2.435736 -2.427430 -1.966887 5.734205 BAAABADLAC 99.478419 0 0
900001 -0.605598 -0.305715 0.627667 -0.578898 -1.750931 1.355550 -0.190911 1 3 4 ... 2.382405 0.149442 1.883322 -2.848714 -0.725155 3.194219 AFABBAEGCB -65.993825 1 0
900002 0.303990 2.445110 0.246515 0.818248 0.359731 -1.331845 1.358622 3 3 4 ... -7.026098 1.312277 -5.157192 1.714005 0.585032 0.066898 BBACABBKEE -87.405622 0 1
900003 0.154053 0.260126 -1.367092 -0.093175 -1.111034 -0.948481 1.119220 0 0 4 ... -0.594532 -3.939475 1.754570 -2.364007 -1.003320 3.893099 AEBEAACQCC -281.293460 0 0
900004 -1.651904 -0.424266 -0.667356 -0.322124 -0.089462 0.181705 1.784983 2 2 2 ... 0.084906 -0.985736 -0.130467 -3.557893 1.210687 1.861884 AEBBBBDABF 25.629415 0 2

5 rows × 31 columns

There are no missing values, in the data.

if train.isna().any().any():
    print(train.isna().sum()/train.shape[0])
else:
    print("No Missing values")
No Missing values

Preparation

# Some of the feature engineering functions are from here
# https://www.kaggle.com/code/slythe/super-lightgbm-w-feature-engineering#%F0%9F%8C%9F-Feature-Engineering-%F0%9F%8C%9F

def text_engineering(df, col):
    # Adding the characters unicode value, unique characters and there length

    for i in range(10):
        df[f"{col}_{i}"] = df[col].apply(lambda x: ord(x[i]) - ord('A'))
        df["unique_chars"] = df[col].apply(lambda x: "".join(set(x)))
        df["unique_chars"] = df["unique_chars"].astype("category")
        df["unique_length"] = df["unique_chars"].apply(lambda x: len(x))

    return df

def group_central_stats(df, cols, suffix):
    df[f"sum_{suffix}"] = df[cols].sum(axis=1)
    df[f"mean_{suffix}"] = df[cols].mean(axis=1)
    df[f"std_{suffix}"] = df[cols].std(axis=1)
    df[f"min_{suffix}"] = df[cols].min(axis=1)
    df[f"max_{suffix}"] = df[cols].max(axis=1)
    df[f"median_{suffix}"] = df[cols].median(axis=1)
    df[f"mad_{suffix}"] = df[cols].mad(axis=1)
    
    df[f"range_{suffix}"] = df[cols].max(axis=1) - df[cols].min(axis=1)
    df[f"q01_{suffix}"] = df[cols].quantile(q=0.01, axis=1)
    df[f"q05_{suffix}"] = df[cols].quantile(q=0.05, axis=1)
    df[f"q25_{suffix}"] = df[cols].quantile(q=0.25, axis=1) 
    df[f"q50_{suffix}"] = df[cols].quantile(q=0.5, axis=1) 
    df[f"q75_{suffix}"] = df[cols].quantile(q=0.75, axis=1) 
    df[f"q95_{suffix}"] = df[cols].quantile(q=0.95, axis=1) 
    df[f"q99_{suffix}"] = df[cols].quantile(q=0.99, axis=1)
    df[f"kurt_{suffix}"] = df[cols].kurt(axis=1) 
    df[f"skew_{suffix}"] = df[cols].skew( axis=1)
    
    return df

def feature_engineering(df):
    col = 'f_27'
    group_floats1 = [f"f_0{i}" for i in range(7)]
    group_floats2 = [f"f_{i}" for i in range(19, 27)]

    df = text_engineering(df, col)
    df = group_central_stats(df, group_floats1, "g1")
    df = group_central_stats(df, group_floats2, "g2")

    return df
def reduce_memory_dataframe(df, verbose=True):
    # Reduce the DataFrame's Memory by altering the data types of the columns
    
    num_types = ['int8', 'int16', 'int32', 'int64', 'float16', 'float32', 'float64']
    start_memory = df.memory_usage().sum()
    
    for col in df.columns:
        if df[col].dtypes in num_types:
            c_min = df[col].min()
            c_max = df[col].max()
            
            if str(df[col].dtype).find('int') != 1:
                if c_min > np.iinfo(np.int8).min and c_max < np.iinfo(np.int8).max:
                    df[col] = df[col].astype(np.int8)
                elif c_min > np.iinfo(np.int16).min and c_max < np.iinfo(np.int16).max:
                    df[col] = df[col].astype(np.int16)
                elif c_min > np.iinfo(np.int32).min and c_max < np.iinfo(np.int32).max:
                    df[col] = df[col].astype(np.int32)
                elif c_min > np.iinfo(np.int64).min and c_max < np.iinfo(np.int64).max:
                    df[col] = df[col].astype(np.int64)
            else:
                if c_min > np.finfo(np.float16).min and c_max < np.finfo(np.float16).max:
                    df[col] = df[col].astype(np.float16)
                elif c_min > np.finfo(np.float32).min and c_max < np.finfo(np.float32).max:
                    df[col] = df[col].astype(np.float32)
                elif c_min > np.finfo(np.float64).min and c_max < np.finfo(np.float64).max:
                    df[col] = df[col].astype(np.float64)
                    
    end_memory = df.memory_usage().sum()
    
    if verbose:
        print(f"Memory Reduced by {round((start_memory - end_memory) / start_memory, 2)} from {start_memory} to {end_memory}")
        
    return df
train = feature_engineering(train)
test = feature_engineering(test)
train = reduce_memory_dataframe(train)
test = reduce_memory_dataframe(test)
Memory Reduced by 0.85 from 563566432 to 85666432
Memory Reduced by 0.85 from 432764624 to 65964624
X = train.drop(['target'], axis=1)
y = train['target']
X.drop(['f_27', 'unique_chars'], axis=1, inplace=True)
test.drop(['f_27', 'unique_chars'], axis=1, inplace=True)
X_train, X_valid, y_train, y_valid = train_test_split(X, y, train_size=0.8, random_state=88)

Modelling

Approach-1

Just Apply a normal Random Forest Ensemble to the data.

model = RandomForestClassifier(n_estimators=300, verbose=2, n_jobs=-1)
model.fit(X_train, y_train)
[Parallel(n_jobs=-1)]: Using backend ThreadingBackend with 2 concurrent workers.
building tree 1 of 300
building tree 2 of 300
building tree 3 of 300
building tree 4 of 300
building tree 5 of 300
building tree 6 of 300
building tree 7 of 300
building tree 8 of 300
building tree 9 of 300
building tree 10 of 300
building tree 11 of 300
building tree 12 of 300
building tree 13 of 300
building tree 14 of 300
building tree 15 of 300
building tree 16 of 300
building tree 17 of 300
building tree 18 of 300
building tree 19 of 300
building tree 20 of 300
building tree 21 of 300
building tree 22 of 300
building tree 23 of 300
building tree 24 of 300
building tree 25 of 300
building tree 26 of 300
building tree 27 of 300
building tree 28 of 300
building tree 29 of 300
building tree 30 of 300
building tree 31 of 300
building tree 32 of 300
building tree 33 of 300
building tree 34 of 300
building tree 35 of 300
building tree 36 of 300
building tree 37 of 300
building tree 38 of 300
[Parallel(n_jobs=-1)]: Done  37 tasks      | elapsed:  2.3min
building tree 39 of 300
building tree 40 of 300
building tree 41 of 300
building tree 42 of 300
building tree 43 of 300
building tree 44 of 300
building tree 45 of 300
building tree 46 of 300
building tree 47 of 300
building tree 48 of 300
building tree 49 of 300
building tree 50 of 300
building tree 51 of 300
building tree 52 of 300
building tree 53 of 300
building tree 54 of 300
building tree 55 of 300
building tree 56 of 300
building tree 57 of 300
building tree 58 of 300
building tree 59 of 300
building tree 60 of 300
building tree 61 of 300
building tree 62 of 300
building tree 63 of 300
building tree 64 of 300
building tree 65 of 300
building tree 66 of 300
building tree 67 of 300
building tree 68 of 300
building tree 69 of 300
building tree 70 of 300
building tree 71 of 300
building tree 72 of 300
building tree 73 of 300
building tree 74 of 300
building tree 75 of 300
building tree 76 of 300
building tree 77 of 300
building tree 78 of 300
building tree 79 of 300
building tree 80 of 300
building tree 81 of 300
building tree 82 of 300
building tree 83 of 300
building tree 84 of 300
building tree 85 of 300
building tree 86 of 300
building tree 87 of 300
building tree 88 of 300
building tree 89 of 300
building tree 90 of 300
building tree 91 of 300
building tree 92 of 300
building tree 93 of 300
building tree 94 of 300
building tree 95 of 300
building tree 96 of 300
building tree 97 of 300
building tree 98 of 300
building tree 99 of 300
building tree 100 of 300
building tree 101 of 300
building tree 102 of 300
building tree 103 of 300
building tree 104 of 300
building tree 105 of 300
building tree 106 of 300
building tree 107 of 300
building tree 108 of 300
building tree 109 of 300
building tree 110 of 300
building tree 111 of 300
building tree 112 of 300
building tree 113 of 300
building tree 114 of 300
building tree 115 of 300
building tree 116 of 300
building tree 117 of 300
building tree 118 of 300
building tree 119 of 300
building tree 120 of 300
building tree 121 of 300
building tree 122 of 300
building tree 123 of 300
building tree 124 of 300
building tree 125 of 300
building tree 126 of 300
building tree 127 of 300
building tree 128 of 300
building tree 129 of 300
building tree 130 of 300
building tree 131 of 300
building tree 132 of 300
building tree 133 of 300
building tree 134 of 300
building tree 135 of 300
building tree 136 of 300
building tree 137 of 300
building tree 138 of 300
building tree 139 of 300
building tree 140 of 300
building tree 141 of 300
building tree 142 of 300
building tree 143 of 300
building tree 144 of 300
building tree 145 of 300
building tree 146 of 300
building tree 147 of 300
building tree 148 of 300
building tree 149 of 300
building tree 150 of 300
building tree 151 of 300
building tree 152 of 300
building tree 153 of 300
building tree 154 of 300
building tree 155 of 300
building tree 156 of 300
building tree 157 of 300
building tree 158 of 300
building tree 159 of 300
[Parallel(n_jobs=-1)]: Done 158 tasks      | elapsed:  8.9min
building tree 160 of 300
building tree 161 of 300
building tree 162 of 300
building tree 163 of 300
building tree 164 of 300
building tree 165 of 300
building tree 166 of 300
building tree 167 of 300
building tree 168 of 300
building tree 169 of 300
building tree 170 of 300
building tree 171 of 300
building tree 172 of 300
building tree 173 of 300
building tree 174 of 300
building tree 175 of 300
building tree 176 of 300
building tree 177 of 300
building tree 178 of 300
building tree 179 of 300
building tree 180 of 300
building tree 181 of 300
building tree 182 of 300
building tree 183 of 300
building tree 184 of 300
building tree 185 of 300
building tree 186 of 300
building tree 187 of 300
building tree 188 of 300
building tree 189 of 300
building tree 190 of 300
building tree 191 of 300
building tree 192 of 300
building tree 193 of 300
building tree 194 of 300
building tree 195 of 300
building tree 196 of 300
building tree 197 of 300
building tree 198 of 300
building tree 199 of 300
building tree 200 of 300
building tree 201 of 300
building tree 202 of 300
building tree 203 of 300
building tree 204 of 300
building tree 205 of 300
building tree 206 of 300
building tree 207 of 300
building tree 208 of 300
building tree 209 of 300
building tree 210 of 300
building tree 211 of 300
building tree 212 of 300
building tree 213 of 300
building tree 214 of 300
building tree 215 of 300
building tree 216 of 300
building tree 217 of 300
building tree 218 of 300
building tree 219 of 300
building tree 220 of 300
building tree 221 of 300
building tree 222 of 300
building tree 223 of 300
building tree 224 of 300
building tree 225 of 300
building tree 226 of 300
building tree 227 of 300
building tree 228 of 300
building tree 229 of 300
building tree 230 of 300
building tree 231 of 300
building tree 232 of 300
building tree 233 of 300
building tree 234 of 300
building tree 235 of 300
building tree 236 of 300
building tree 237 of 300
building tree 238 of 300
building tree 239 of 300
building tree 240 of 300
building tree 241 of 300
building tree 242 of 300
building tree 243 of 300
building tree 244 of 300
building tree 245 of 300
building tree 246 of 300
building tree 247 of 300
building tree 248 of 300
building tree 249 of 300
building tree 250 of 300
building tree 251 of 300
building tree 252 of 300
building tree 253 of 300
building tree 254 of 300
building tree 255 of 300
building tree 256 of 300
building tree 257 of 300
building tree 258 of 300
building tree 259 of 300
building tree 260 of 300
building tree 261 of 300
building tree 262 of 300
building tree 263 of 300
building tree 264 of 300
building tree 265 of 300
building tree 266 of 300
building tree 267 of 300
building tree 268 of 300
building tree 269 of 300
building tree 270 of 300
building tree 271 of 300
building tree 272 of 300
building tree 273 of 300
building tree 274 of 300
building tree 275 of 300
building tree 276 of 300
building tree 277 of 300
building tree 278 of 300
building tree 279 of 300
building tree 280 of 300
building tree 281 of 300
building tree 282 of 300
building tree 283 of 300
building tree 284 of 300
building tree 285 of 300
building tree 286 of 300
building tree 287 of 300
building tree 288 of 300
building tree 289 of 300
building tree 290 of 300
building tree 291 of 300
building tree 292 of 300
building tree 293 of 300
building tree 294 of 300
building tree 295 of 300
building tree 296 of 300
building tree 297 of 300
building tree 298 of 300
building tree 299 of 300
building tree 300 of 300
[Parallel(n_jobs=-1)]: Done 300 out of 300 | elapsed: 16.6min finished
RandomForestClassifier(n_estimators=300, n_jobs=-1, verbose=2)

Approach-2

Just Apply a normal Random Forest Ensemble to the data.

model = lgb.LGBMClassifier(
    objective= 'binary',
    metric= "auc",
    num_iterations = 5000,
    num_threads= -1,
    learning_rate= 0.18319492258552644,
    boosting= 'gbdt',
    lambda_l1= 0.00028648667113792726,
    lambda_l2= 0.00026863027834978876,
    num_leaves= 229,
    max_depth= 0,
    min_child_samples=80,
    max_bins=511, 
    random_state=42 
)
model.fit(X_train,y_train, eval_set=[(X_valid, y_valid)], callbacks = [lgb.early_stopping(30)],eval_metric="auc")
/usr/local/lib/python3.7/dist-packages/lightgbm/engine.py:118: UserWarning: Found `num_iterations` in params. Will use it instead of argument
  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
[1]	valid_0's auc: 0.8203
Training until validation scores don't improve for 30 rounds.
[2]	valid_0's auc: 0.882481
[3]	valid_0's auc: 0.894322
[4]	valid_0's auc: 0.901741
[5]	valid_0's auc: 0.913209
[6]	valid_0's auc: 0.917406
[7]	valid_0's auc: 0.920175
[8]	valid_0's auc: 0.927582
[9]	valid_0's auc: 0.929363
[10]	valid_0's auc: 0.93352
[11]	valid_0's auc: 0.935097
[12]	valid_0's auc: 0.938792
[13]	valid_0's auc: 0.941911
[14]	valid_0's auc: 0.944798
[15]	valid_0's auc: 0.945886
[16]	valid_0's auc: 0.94801
[17]	valid_0's auc: 0.949103
[18]	valid_0's auc: 0.949821
[19]	valid_0's auc: 0.951012
[20]	valid_0's auc: 0.951664
[21]	valid_0's auc: 0.953079
[22]	valid_0's auc: 0.953971
[23]	valid_0's auc: 0.954947
[24]	valid_0's auc: 0.956088
[25]	valid_0's auc: 0.9567
[26]	valid_0's auc: 0.957072
[27]	valid_0's auc: 0.957583
[28]	valid_0's auc: 0.958418
[29]	valid_0's auc: 0.959088
[30]	valid_0's auc: 0.960078
[31]	valid_0's auc: 0.96061
[32]	valid_0's auc: 0.960955
[33]	valid_0's auc: 0.962133
[34]	valid_0's auc: 0.962395
[35]	valid_0's auc: 0.963141
[36]	valid_0's auc: 0.963614
[37]	valid_0's auc: 0.964255
[38]	valid_0's auc: 0.964855
[39]	valid_0's auc: 0.965219
[40]	valid_0's auc: 0.965387
[41]	valid_0's auc: 0.966084
[42]	valid_0's auc: 0.966755
[43]	valid_0's auc: 0.966891
[44]	valid_0's auc: 0.967184
[45]	valid_0's auc: 0.967609
[46]	valid_0's auc: 0.968073
[47]	valid_0's auc: 0.968279
[48]	valid_0's auc: 0.968607
[49]	valid_0's auc: 0.968849
[50]	valid_0's auc: 0.969257
[51]	valid_0's auc: 0.969543
[52]	valid_0's auc: 0.969833
[53]	valid_0's auc: 0.970017
[54]	valid_0's auc: 0.970224
[55]	valid_0's auc: 0.970542
[56]	valid_0's auc: 0.970774
[57]	valid_0's auc: 0.97086
[58]	valid_0's auc: 0.971203
[59]	valid_0's auc: 0.971541
[60]	valid_0's auc: 0.971612
[61]	valid_0's auc: 0.971862
[62]	valid_0's auc: 0.971932
[63]	valid_0's auc: 0.972278
[64]	valid_0's auc: 0.972624
[65]	valid_0's auc: 0.972691
[66]	valid_0's auc: 0.972829
[67]	valid_0's auc: 0.97301
[68]	valid_0's auc: 0.973253
[69]	valid_0's auc: 0.973329
[70]	valid_0's auc: 0.973452
[71]	valid_0's auc: 0.973643
[72]	valid_0's auc: 0.973775
[73]	valid_0's auc: 0.973881
[74]	valid_0's auc: 0.973945
[75]	valid_0's auc: 0.974038
[76]	valid_0's auc: 0.974198
[77]	valid_0's auc: 0.974302
[78]	valid_0's auc: 0.974549
[79]	valid_0's auc: 0.974568
[80]	valid_0's auc: 0.974624
[81]	valid_0's auc: 0.97465
[82]	valid_0's auc: 0.974754
[83]	valid_0's auc: 0.974973
[84]	valid_0's auc: 0.975066
[85]	valid_0's auc: 0.97518
[86]	valid_0's auc: 0.975214
[87]	valid_0's auc: 0.975291
[88]	valid_0's auc: 0.975502
[89]	valid_0's auc: 0.975618
[90]	valid_0's auc: 0.975656
[91]	valid_0's auc: 0.975801
[92]	valid_0's auc: 0.975928
[93]	valid_0's auc: 0.97607
[94]	valid_0's auc: 0.976196
[95]	valid_0's auc: 0.976219
[96]	valid_0's auc: 0.976302
[97]	valid_0's auc: 0.97638
[98]	valid_0's auc: 0.976455
[99]	valid_0's auc: 0.976492
[100]	valid_0's auc: 0.976552
[101]	valid_0's auc: 0.976622
[102]	valid_0's auc: 0.976636
[103]	valid_0's auc: 0.976639
[104]	valid_0's auc: 0.976667
[105]	valid_0's auc: 0.9767
[106]	valid_0's auc: 0.976829
[107]	valid_0's auc: 0.976919
[108]	valid_0's auc: 0.97693
[109]	valid_0's auc: 0.976948
[110]	valid_0's auc: 0.976969
[111]	valid_0's auc: 0.976992
[112]	valid_0's auc: 0.977023
[113]	valid_0's auc: 0.977056
[114]	valid_0's auc: 0.9771
[115]	valid_0's auc: 0.977121
[116]	valid_0's auc: 0.977116
[117]	valid_0's auc: 0.977111
[118]	valid_0's auc: 0.977119
[119]	valid_0's auc: 0.977127
[120]	valid_0's auc: 0.97714
[121]	valid_0's auc: 0.977141
[122]	valid_0's auc: 0.977145
[123]	valid_0's auc: 0.977172
[124]	valid_0's auc: 0.977204
[125]	valid_0's auc: 0.977262
[126]	valid_0's auc: 0.977278
[127]	valid_0's auc: 0.977314
[128]	valid_0's auc: 0.977338
[129]	valid_0's auc: 0.977351
[130]	valid_0's auc: 0.977354
[131]	valid_0's auc: 0.977356
[132]	valid_0's auc: 0.977376
[133]	valid_0's auc: 0.977385
[134]	valid_0's auc: 0.977387
[135]	valid_0's auc: 0.977389
[136]	valid_0's auc: 0.97742
[137]	valid_0's auc: 0.977415
[138]	valid_0's auc: 0.977426
[139]	valid_0's auc: 0.977441
[140]	valid_0's auc: 0.977464
[141]	valid_0's auc: 0.977475
[142]	valid_0's auc: 0.977482
[143]	valid_0's auc: 0.977484
[144]	valid_0's auc: 0.977486
[145]	valid_0's auc: 0.977505
[146]	valid_0's auc: 0.977507
[147]	valid_0's auc: 0.97751
[148]	valid_0's auc: 0.977501
[149]	valid_0's auc: 0.977493
[150]	valid_0's auc: 0.977601
[151]	valid_0's auc: 0.977636
[152]	valid_0's auc: 0.977638
[153]	valid_0's auc: 0.977659
[154]	valid_0's auc: 0.977658
[155]	valid_0's auc: 0.977663
[156]	valid_0's auc: 0.977671
[157]	valid_0's auc: 0.977692
[158]	valid_0's auc: 0.977691
[159]	valid_0's auc: 0.977696
[160]	valid_0's auc: 0.977711
[161]	valid_0's auc: 0.977711
[162]	valid_0's auc: 0.977708
[163]	valid_0's auc: 0.977706
[164]	valid_0's auc: 0.977714
[165]	valid_0's auc: 0.977705
[166]	valid_0's auc: 0.977709
[167]	valid_0's auc: 0.977707
[168]	valid_0's auc: 0.977711
[169]	valid_0's auc: 0.977718
[170]	valid_0's auc: 0.977713
[171]	valid_0's auc: 0.977731
[172]	valid_0's auc: 0.97773
[173]	valid_0's auc: 0.977734
[174]	valid_0's auc: 0.97777
[175]	valid_0's auc: 0.977761
[176]	valid_0's auc: 0.977765
[177]	valid_0's auc: 0.977771
[178]	valid_0's auc: 0.977769
[179]	valid_0's auc: 0.977763
[180]	valid_0's auc: 0.977767
[181]	valid_0's auc: 0.977776
[182]	valid_0's auc: 0.977797
[183]	valid_0's auc: 0.977844
[184]	valid_0's auc: 0.977851
[185]	valid_0's auc: 0.977855
[186]	valid_0's auc: 0.977853
[187]	valid_0's auc: 0.977867
[188]	valid_0's auc: 0.97788
[189]	valid_0's auc: 0.977884
[190]	valid_0's auc: 0.977881
[191]	valid_0's auc: 0.977887
[192]	valid_0's auc: 0.977884
[193]	valid_0's auc: 0.977879
[194]	valid_0's auc: 0.977885
[195]	valid_0's auc: 0.977886
[196]	valid_0's auc: 0.97788
[197]	valid_0's auc: 0.977883
[198]	valid_0's auc: 0.977886
[199]	valid_0's auc: 0.977883
[200]	valid_0's auc: 0.977908
[201]	valid_0's auc: 0.977905
[202]	valid_0's auc: 0.977902
[203]	valid_0's auc: 0.977909
[204]	valid_0's auc: 0.977914
[205]	valid_0's auc: 0.977918
[206]	valid_0's auc: 0.977932
[207]	valid_0's auc: 0.977939
[208]	valid_0's auc: 0.977944
[209]	valid_0's auc: 0.97795
[210]	valid_0's auc: 0.97795
[211]	valid_0's auc: 0.977947
[212]	valid_0's auc: 0.97794
[213]	valid_0's auc: 0.977977
[214]	valid_0's auc: 0.977979
[215]	valid_0's auc: 0.977973
[216]	valid_0's auc: 0.977974
[217]	valid_0's auc: 0.977997
[218]	valid_0's auc: 0.977992
[219]	valid_0's auc: 0.977992
[220]	valid_0's auc: 0.977993
[221]	valid_0's auc: 0.977997
[222]	valid_0's auc: 0.978001
[223]	valid_0's auc: 0.978001
[224]	valid_0's auc: 0.978
[225]	valid_0's auc: 0.978014
[226]	valid_0's auc: 0.978023
[227]	valid_0's auc: 0.978025
[228]	valid_0's auc: 0.97806
[229]	valid_0's auc: 0.978056
[230]	valid_0's auc: 0.978047
[231]	valid_0's auc: 0.978042
[232]	valid_0's auc: 0.978078
[233]	valid_0's auc: 0.978105
[234]	valid_0's auc: 0.978104
[235]	valid_0's auc: 0.978104
[236]	valid_0's auc: 0.978104
[237]	valid_0's auc: 0.978104
[238]	valid_0's auc: 0.978119
[239]	valid_0's auc: 0.978122
[240]	valid_0's auc: 0.978118
[241]	valid_0's auc: 0.978121
[242]	valid_0's auc: 0.978115
[243]	valid_0's auc: 0.978131
[244]	valid_0's auc: 0.978183
[245]	valid_0's auc: 0.978175
[246]	valid_0's auc: 0.978175
[247]	valid_0's auc: 0.978181
[248]	valid_0's auc: 0.97818
[249]	valid_0's auc: 0.97818
[250]	valid_0's auc: 0.978188
[251]	valid_0's auc: 0.978191
[252]	valid_0's auc: 0.978186
[253]	valid_0's auc: 0.97819
[254]	valid_0's auc: 0.978211
[255]	valid_0's auc: 0.978207
[256]	valid_0's auc: 0.978222
[257]	valid_0's auc: 0.978243
[258]	valid_0's auc: 0.978247
[259]	valid_0's auc: 0.978252
[260]	valid_0's auc: 0.978255
[261]	valid_0's auc: 0.978348
[262]	valid_0's auc: 0.978342
[263]	valid_0's auc: 0.978333
[264]	valid_0's auc: 0.978364
[265]	valid_0's auc: 0.978363
[266]	valid_0's auc: 0.978372
[267]	valid_0's auc: 0.978374
[268]	valid_0's auc: 0.97838
[269]	valid_0's auc: 0.978379
[270]	valid_0's auc: 0.978365
[271]	valid_0's auc: 0.978365
[272]	valid_0's auc: 0.97836
[273]	valid_0's auc: 0.978362
[274]	valid_0's auc: 0.978361
[275]	valid_0's auc: 0.978357
[276]	valid_0's auc: 0.978355
[277]	valid_0's auc: 0.978364
[278]	valid_0's auc: 0.978373
[279]	valid_0's auc: 0.978362
[280]	valid_0's auc: 0.978453
[281]	valid_0's auc: 0.978463
[282]	valid_0's auc: 0.978462
[283]	valid_0's auc: 0.978459
[284]	valid_0's auc: 0.978455
[285]	valid_0's auc: 0.978447
[286]	valid_0's auc: 0.978444
[287]	valid_0's auc: 0.978442
[288]	valid_0's auc: 0.978446
[289]	valid_0's auc: 0.978443
[290]	valid_0's auc: 0.978442
[291]	valid_0's auc: 0.978441
[292]	valid_0's auc: 0.978461
[293]	valid_0's auc: 0.978456
[294]	valid_0's auc: 0.97845
[295]	valid_0's auc: 0.978454
[296]	valid_0's auc: 0.978489
[297]	valid_0's auc: 0.978487
[298]	valid_0's auc: 0.978529
[299]	valid_0's auc: 0.978524
[300]	valid_0's auc: 0.978517
[301]	valid_0's auc: 0.978515
[302]	valid_0's auc: 0.978513
[303]	valid_0's auc: 0.97851
[304]	valid_0's auc: 0.978505
[305]	valid_0's auc: 0.978504
[306]	valid_0's auc: 0.978507
[307]	valid_0's auc: 0.978507
[308]	valid_0's auc: 0.978574
[309]	valid_0's auc: 0.978578
[310]	valid_0's auc: 0.978577
[311]	valid_0's auc: 0.978567
[312]	valid_0's auc: 0.978566
[313]	valid_0's auc: 0.978574
[314]	valid_0's auc: 0.978571
[315]	valid_0's auc: 0.978575
[316]	valid_0's auc: 0.978592
[317]	valid_0's auc: 0.978588
[318]	valid_0's auc: 0.978604
[319]	valid_0's auc: 0.978601
[320]	valid_0's auc: 0.978601
[321]	valid_0's auc: 0.978595
[322]	valid_0's auc: 0.978589
[323]	valid_0's auc: 0.978604
[324]	valid_0's auc: 0.978608
[325]	valid_0's auc: 0.978611
[326]	valid_0's auc: 0.978614
[327]	valid_0's auc: 0.978653
[328]	valid_0's auc: 0.978655
[329]	valid_0's auc: 0.978656
[330]	valid_0's auc: 0.978722
[331]	valid_0's auc: 0.978722
[332]	valid_0's auc: 0.978721
[333]	valid_0's auc: 0.978758
[334]	valid_0's auc: 0.978754
[335]	valid_0's auc: 0.978749
[336]	valid_0's auc: 0.978749
[337]	valid_0's auc: 0.978755
[338]	valid_0's auc: 0.978754
[339]	valid_0's auc: 0.978769
[340]	valid_0's auc: 0.97878
[341]	valid_0's auc: 0.978784
[342]	valid_0's auc: 0.978783
[343]	valid_0's auc: 0.978783
[344]	valid_0's auc: 0.978776
[345]	valid_0's auc: 0.978769
[346]	valid_0's auc: 0.978768
[347]	valid_0's auc: 0.978768
[348]	valid_0's auc: 0.978779
[349]	valid_0's auc: 0.978778
[350]	valid_0's auc: 0.978775
[351]	valid_0's auc: 0.978773
[352]	valid_0's auc: 0.978763
[353]	valid_0's auc: 0.978757
[354]	valid_0's auc: 0.978769
[355]	valid_0's auc: 0.978763
[356]	valid_0's auc: 0.978764
[357]	valid_0's auc: 0.978757
[358]	valid_0's auc: 0.978769
[359]	valid_0's auc: 0.978766
[360]	valid_0's auc: 0.978768
[361]	valid_0's auc: 0.978771
[362]	valid_0's auc: 0.978769
[363]	valid_0's auc: 0.978814
[364]	valid_0's auc: 0.978811
[365]	valid_0's auc: 0.97881
[366]	valid_0's auc: 0.9788
[367]	valid_0's auc: 0.978807
[368]	valid_0's auc: 0.97883
[369]	valid_0's auc: 0.978832
[370]	valid_0's auc: 0.978829
[371]	valid_0's auc: 0.978831
[372]	valid_0's auc: 0.978837
[373]	valid_0's auc: 0.978843
[374]	valid_0's auc: 0.978872
[375]	valid_0's auc: 0.978874
[376]	valid_0's auc: 0.97888
[377]	valid_0's auc: 0.978884
[378]	valid_0's auc: 0.978892
[379]	valid_0's auc: 0.978886
[380]	valid_0's auc: 0.978878
[381]	valid_0's auc: 0.978907
[382]	valid_0's auc: 0.978906
[383]	valid_0's auc: 0.978916
[384]	valid_0's auc: 0.97893
[385]	valid_0's auc: 0.978937
[386]	valid_0's auc: 0.978934
[387]	valid_0's auc: 0.978929
[388]	valid_0's auc: 0.978929
[389]	valid_0's auc: 0.978946
[390]	valid_0's auc: 0.978946
[391]	valid_0's auc: 0.978945
[392]	valid_0's auc: 0.978943
[393]	valid_0's auc: 0.978944
[394]	valid_0's auc: 0.978947
[395]	valid_0's auc: 0.978953
[396]	valid_0's auc: 0.978953
[397]	valid_0's auc: 0.978953
[398]	valid_0's auc: 0.97895
[399]	valid_0's auc: 0.978947
[400]	valid_0's auc: 0.978949
[401]	valid_0's auc: 0.978946
[402]	valid_0's auc: 0.978957
[403]	valid_0's auc: 0.978951
[404]	valid_0's auc: 0.978948
[405]	valid_0's auc: 0.978944
[406]	valid_0's auc: 0.978941
[407]	valid_0's auc: 0.97894
[408]	valid_0's auc: 0.97894
[409]	valid_0's auc: 0.978947
[410]	valid_0's auc: 0.978943
[411]	valid_0's auc: 0.978942
[412]	valid_0's auc: 0.978943
[413]	valid_0's auc: 0.978939
[414]	valid_0's auc: 0.978938
[415]	valid_0's auc: 0.978937
[416]	valid_0's auc: 0.978935
[417]	valid_0's auc: 0.978931
[418]	valid_0's auc: 0.978932
[419]	valid_0's auc: 0.978934
[420]	valid_0's auc: 0.978945
[421]	valid_0's auc: 0.978938
[422]	valid_0's auc: 0.978935
[423]	valid_0's auc: 0.97894
[424]	valid_0's auc: 0.978939
[425]	valid_0's auc: 0.978937
[426]	valid_0's auc: 0.978946
[427]	valid_0's auc: 0.978972
[428]	valid_0's auc: 0.978968
[429]	valid_0's auc: 0.978971
[430]	valid_0's auc: 0.97896
[431]	valid_0's auc: 0.978955
[432]	valid_0's auc: 0.978962
[433]	valid_0's auc: 0.97896
[434]	valid_0's auc: 0.978955
[435]	valid_0's auc: 0.978954
[436]	valid_0's auc: 0.978961
[437]	valid_0's auc: 0.978952
[438]	valid_0's auc: 0.978948
[439]	valid_0's auc: 0.978996
[440]	valid_0's auc: 0.978996
[441]	valid_0's auc: 0.979019
[442]	valid_0's auc: 0.97901
[443]	valid_0's auc: 0.979013
[444]	valid_0's auc: 0.979023
[445]	valid_0's auc: 0.979022
[446]	valid_0's auc: 0.979016
[447]	valid_0's auc: 0.979011
[448]	valid_0's auc: 0.979014
[449]	valid_0's auc: 0.979009
[450]	valid_0's auc: 0.979005
[451]	valid_0's auc: 0.979003
[452]	valid_0's auc: 0.979004
[453]	valid_0's auc: 0.979002
[454]	valid_0's auc: 0.979009
[455]	valid_0's auc: 0.979007
[456]	valid_0's auc: 0.979003
[457]	valid_0's auc: 0.978996
[458]	valid_0's auc: 0.978997
[459]	valid_0's auc: 0.978995
[460]	valid_0's auc: 0.979024
[461]	valid_0's auc: 0.979021
[462]	valid_0's auc: 0.979028
[463]	valid_0's auc: 0.979031
[464]	valid_0's auc: 0.979027
[465]	valid_0's auc: 0.979032
[466]	valid_0's auc: 0.97903
[467]	valid_0's auc: 0.979026
[468]	valid_0's auc: 0.979028
[469]	valid_0's auc: 0.979036
[470]	valid_0's auc: 0.979028
[471]	valid_0's auc: 0.979029
[472]	valid_0's auc: 0.979025
[473]	valid_0's auc: 0.979023
[474]	valid_0's auc: 0.979019
[475]	valid_0's auc: 0.979016
[476]	valid_0's auc: 0.979013
[477]	valid_0's auc: 0.979013
[478]	valid_0's auc: 0.979051
[479]	valid_0's auc: 0.979049
[480]	valid_0's auc: 0.979058
[481]	valid_0's auc: 0.979054
[482]	valid_0's auc: 0.979062
[483]	valid_0's auc: 0.979056
[484]	valid_0's auc: 0.979051
[485]	valid_0's auc: 0.979067
[486]	valid_0's auc: 0.979087
[487]	valid_0's auc: 0.979089
[488]	valid_0's auc: 0.979086
[489]	valid_0's auc: 0.979088
[490]	valid_0's auc: 0.979082
[491]	valid_0's auc: 0.979083
[492]	valid_0's auc: 0.979077
[493]	valid_0's auc: 0.979074
[494]	valid_0's auc: 0.979082
[495]	valid_0's auc: 0.97908
[496]	valid_0's auc: 0.979078
[497]	valid_0's auc: 0.979083
[498]	valid_0's auc: 0.979087
[499]	valid_0's auc: 0.979085
[500]	valid_0's auc: 0.979086
[501]	valid_0's auc: 0.979091
[502]	valid_0's auc: 0.979087
[503]	valid_0's auc: 0.97913
[504]	valid_0's auc: 0.979124
[505]	valid_0's auc: 0.97912
[506]	valid_0's auc: 0.979109
[507]	valid_0's auc: 0.979106
[508]	valid_0's auc: 0.979105
[509]	valid_0's auc: 0.979098
[510]	valid_0's auc: 0.979103
[511]	valid_0's auc: 0.979105
[512]	valid_0's auc: 0.979104
[513]	valid_0's auc: 0.979104
[514]	valid_0's auc: 0.979101
[515]	valid_0's auc: 0.979103
[516]	valid_0's auc: 0.979107
[517]	valid_0's auc: 0.979099
[518]	valid_0's auc: 0.979096
[519]	valid_0's auc: 0.979103
[520]	valid_0's auc: 0.979103
[521]	valid_0's auc: 0.979104
[522]	valid_0's auc: 0.979113
[523]	valid_0's auc: 0.979129
[524]	valid_0's auc: 0.979131
[525]	valid_0's auc: 0.979126
[526]	valid_0's auc: 0.979121
[527]	valid_0's auc: 0.979119
[528]	valid_0's auc: 0.979115
[529]	valid_0's auc: 0.979117
[530]	valid_0's auc: 0.979117
[531]	valid_0's auc: 0.979115
[532]	valid_0's auc: 0.97911
[533]	valid_0's auc: 0.979114
[534]	valid_0's auc: 0.979111
[535]	valid_0's auc: 0.979106
[536]	valid_0's auc: 0.979107
[537]	valid_0's auc: 0.979105
[538]	valid_0's auc: 0.979109
[539]	valid_0's auc: 0.979105
[540]	valid_0's auc: 0.979108
[541]	valid_0's auc: 0.979107
[542]	valid_0's auc: 0.979104
[543]	valid_0's auc: 0.979105
[544]	valid_0's auc: 0.979152
[545]	valid_0's auc: 0.979163
[546]	valid_0's auc: 0.979161
[547]	valid_0's auc: 0.979211
[548]	valid_0's auc: 0.979209
[549]	valid_0's auc: 0.979206
[550]	valid_0's auc: 0.979201
[551]	valid_0's auc: 0.9792
[552]	valid_0's auc: 0.979203
[553]	valid_0's auc: 0.979195
[554]	valid_0's auc: 0.979197
[555]	valid_0's auc: 0.979202
[556]	valid_0's auc: 0.979199
[557]	valid_0's auc: 0.979198
[558]	valid_0's auc: 0.9792
[559]	valid_0's auc: 0.979202
[560]	valid_0's auc: 0.979201
[561]	valid_0's auc: 0.979199
[562]	valid_0's auc: 0.979199
[563]	valid_0's auc: 0.979201
[564]	valid_0's auc: 0.979202
[565]	valid_0's auc: 0.979195
[566]	valid_0's auc: 0.979209
[567]	valid_0's auc: 0.979202
[568]	valid_0's auc: 0.979197
[569]	valid_0's auc: 0.979199
[570]	valid_0's auc: 0.979195
[571]	valid_0's auc: 0.979195
[572]	valid_0's auc: 0.979191
[573]	valid_0's auc: 0.979188
[574]	valid_0's auc: 0.979185
[575]	valid_0's auc: 0.979179
[576]	valid_0's auc: 0.97918
[577]	valid_0's auc: 0.979181
Early stopping, best iteration is:
[547]	valid_0's auc: 0.979211
LGBMClassifier(boosting='gbdt', lambda_l1=0.00028648667113792726,
               lambda_l2=0.00026863027834978876,
               learning_rate=0.18319492258552644, max_bins=511, max_depth=0,
               metric='auc', min_child_samples=80, num_iterations=5000,
               num_leaves=229, num_threads=-1, objective='binary',
               random_state=42)
print("Train Score", roc_auc_score(model.predict(X_train), y_train))
print("Valid Score", roc_auc_score(model.predict(X_valid), y_valid))
Train Score 0.9826676373940219
Valid Score 0.9266526138270555
test_preds = model.predict_proba(test)[:, 1]
submission = pd.read_csv('/content/sample_submission.csv')
submission['target'] = test_preds
submission.to_csv('output.csv', index=False)
!kaggle competitions submit -c tabular-playground-series-may-2022 -f output.csv -m "LGB heavy feature engineered"
100% 18.5M/18.5M [00:04<00:00, 3.94MB/s]
Successfully submitted to Tabular Playground Series - May 2022

AUC ROC is: 0.97941