-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathutil_binary_logistic_regression.py
More file actions
46 lines (38 loc) · 1.17 KB
/
util_binary_logistic_regression.py
File metadata and controls
46 lines (38 loc) · 1.17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
from numpy import exp
from sklearn.datasets import make_classification
from sklearn.model_selection import train_test_split
def toy_data(M=10000, train_size=0.8):
"""Create toy data for binary logistic regression
M number of samples per feature
train_size = 0.8 # 80% of data are used for training, 20% for testing
"""
N = 2 # number of features (excluding bias)
random_state = 12345
X, Y = make_classification(
n_samples=M,
n_features=N,
n_informative=N,
n_redundant=0,
n_classes=2,
n_clusters_per_class=1,
class_sep=1,
flip_y=0.01,
random_state=random_state,
)
X_train, X_test, Y_train, Y_test = train_test_split(
X, Y, train_size=train_size, random_state=random_state
)
return X_train, Y_train, X_test, Y_test
def init_weights():
"""Weights for the binary logistic regression, i.e. one sigmoid layer
nice numbers here, in practice these
are appropriately randomly sampled numbers
"""
w1 = 0.5
w2 = 0.25
b = 0.125
return w1, w2, b
def my_sigmoid(z):
return 1.0 / (1.0 + exp(-z))
def predict_class(y):
return (y >= 0.5) * 1