Skip to content

Commit 2740053

Browse files
committed
added some formatting changes
1 parent 4a64f4f commit 2740053

2 files changed

Lines changed: 14 additions & 15 deletions

File tree

network_security/components/data_transformation.py

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import os
21
import sys
32

43
import numpy as np
@@ -37,13 +36,13 @@ def __init__(
3736
raise NetworkSecurityException(e, sys)
3837

3938
@staticmethod
40-
def read_data(file_path) -> pd.DataFrame:
39+
def read_data(file_path: str) -> pd.DataFrame:
4140
try:
4241
return pd.read_csv(file_path)
4342
except Exception as e:
4443
raise NetworkSecurityException(e, sys)
4544

46-
def get_data_transformer_object(cls) -> Pipeline:
45+
def get_data_transformer_object(self) -> Pipeline:
4746
"""
4847
It initialises a KNNImputer object with the parameters specified in the training_pipeline.py file
4948
and returns a Pipeline object with the KNNImputer object as the first step.
@@ -53,14 +52,15 @@ def get_data_transformer_object(cls) -> Pipeline:
5352
5453
Returns:
5554
A Pipeline object
55+
5656
"""
5757
logging.info(
58-
"Entered get_data_trnasformer_object method of Trnasformation class"
58+
"Entered get_data_trnasformer_object method of Trnasformation class",
5959
)
6060
try:
6161
imputer: KNNImputer = KNNImputer(**DATA_TRANSFORMATION_IMPUTER_PARAMS)
6262
logging.info(
63-
f"Initialise KNNImputer with {DATA_TRANSFORMATION_IMPUTER_PARAMS}"
63+
f"Initialise KNNImputer with {DATA_TRANSFORMATION_IMPUTER_PARAMS}",
6464
)
6565
processor: Pipeline = Pipeline([("imputer", imputer)])
6666
return processor
@@ -69,15 +69,15 @@ def get_data_transformer_object(cls) -> Pipeline:
6969

7070
def initiate_data_transformation(self) -> DataTransformationArtifact:
7171
logging.info(
72-
"Entered initiate_data_transformation method of DataTransformation class"
72+
"Entered initiate_data_transformation method of DataTransformation class",
7373
)
7474
try:
7575
logging.info("Starting data transformation")
7676
train_df = DataTransformation.read_data(
77-
self.data_validation_artifact.valid_train_file_path
77+
self.data_validation_artifact.valid_train_file_path,
7878
)
7979
test_df = DataTransformation.read_data(
80-
self.data_validation_artifact.valid_test_file_path
80+
self.data_validation_artifact.valid_test_file_path,
8181
)
8282

8383
## training dataframe
@@ -94,17 +94,19 @@ def initiate_data_transformation(self) -> DataTransformationArtifact:
9494

9595
preprocessor_object = preprocessor.fit(input_feature_train_df)
9696
transformed_input_train_feature = preprocessor_object.transform(
97-
input_feature_train_df
97+
input_feature_train_df,
9898
)
9999
transformed_input_test_feature = preprocessor_object.transform(
100-
input_feature_test_df
100+
input_feature_test_df,
101101
)
102102

103103
train_arr = np.c_[
104-
transformed_input_train_feature, np.array(target_feature_train_df)
104+
transformed_input_train_feature,
105+
np.array(target_feature_train_df),
105106
]
106107
test_arr = np.c_[
107-
transformed_input_test_feature, np.array(target_feature_test_df)
108+
transformed_input_test_feature,
109+
np.array(target_feature_test_df),
108110
]
109111

110112
# save numpy array data

network_security/constant/training_pipeline/__init__.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,6 @@
1-
import os
2-
import sys
31
from pathlib import Path
42

53
import numpy as np
6-
import pandas as pd
74

85
"""
96
defining common constant variable for training pipeline

0 commit comments

Comments
 (0)