@@ -4,7 +4,7 @@ jupytext:
44 extension : .md
55 format_name : myst
66 format_version : 0.13
7- jupytext_version : 1.16.4
7+ jupytext_version : 1.17.1
88kernelspec :
99 display_name : Python 3 (ipykernel)
1010 language : python
@@ -753,20 +753,20 @@ def protocol_1(π_minus_1, T, N=1000):
753753 """
754754
755755 sequences = np.empty((N, T))
756- true_models = np.empty(N, dtype=int )
756+ true_models_F = np.empty(N, dtype=bool )
757757
758758 for i in range(N):
759759 # Nature flips coin
760760 if np.random.rand() < π_minus_1:
761761 # Generate entire sequence from f
762762 sequences[i, :] = np.random.beta(F_a, F_b, T)
763- true_models [i] = 1
763+ true_models_F [i] = True
764764 else:
765765 # Generate entire sequence from g
766766 sequences[i, :] = np.random.beta(G_a, G_b, T)
767- true_models [i] = 0
767+ true_models_F [i] = False
768768
769- return sequences, true_models
769+ return sequences, true_models_F
770770```
771771
772772** Protocol 2.** At each time $t \geq 0$, nature flips a coin and with probability $\pi_ {-1}$ draws $w_t$ from $f$ and with probability $1-\pi_ {-1}$ draws $w_t$ from $g$.
@@ -779,19 +779,19 @@ def protocol_2(π_minus_1, T, N=1000):
779779 """
780780
781781 sequences = np.empty((N, T))
782- true_models = np.empty((N, T), dtype=int )
782+ true_models_F = np.empty((N, T), dtype=bool )
783783
784784 for i in range(N):
785785 for t in range(T):
786786 # Nature flips coin at each time step t
787787 if np.random.rand() < π_minus_1:
788788 sequences[i, t] = np.random.beta(F_a, F_b)
789- true_models [i, t] = 1
789+ true_models_F [i, t] = True
790790 else:
791791 sequences[i, t] = np.random.beta(G_a, G_b)
792- true_models [i, t] = 0
792+ true_models_F [i, t] = False
793793
794- return sequences, true_models
794+ return sequences, true_models_F
795795```
796796
797797** Remark:** Under protocol 2, the $\{ w_t\} _ {t=1}^T$ is a sequence of IID draws from $h(w)$. Under protocol 1, the the $\{ w_t\} _ {t=1}^T$ is
@@ -888,12 +888,12 @@ for t in T_range:
888888
889889 # Type I error: reject H_0 when it's true
890890 # (model f generates data)
891- f_sequences = L_cumulative_p1[true_models_p1 == 1 , t_idx]
891+ f_sequences = L_cumulative_p1[true_models_p1, t_idx]
892892 α_T[t_idx] = np.mean(f_sequences < 1)
893893
894894 # Type II error: accept H_0 when it's false
895895 # (model g generates data)
896- g_sequences = L_cumulative_p1[true_models_p1 == 0 , t_idx]
896+ g_sequences = L_cumulative_p1[~ true_models_p1, t_idx]
897897 β_T[t_idx] = np.mean(g_sequences >= 1)
898898
899899 # Bayesian error probability
@@ -986,9 +986,9 @@ g_values = [g(w) for w in w_range]
986986ratio_values = [f(w)/g(w) for w in w_range]
987987
988988ax.plot(w_range, f_values, 'b-',
989- label='$f(w) \sim Beta(1,1)$', linewidth=2)
989+ label=r '$f(w) \sim Beta(1,1)$', linewidth=2)
990990ax.plot(w_range, g_values, 'r-',
991- label='$g(w) \sim Beta(3,1.2)$', linewidth=2)
991+ label=r '$g(w) \sim Beta(3,1.2)$', linewidth=2)
992992
993993ax.axvline(root, color='green', linestyle='--', alpha=0.7,
994994 label=f'decision boundary: $w=${root:.3f}')
@@ -1012,7 +1012,8 @@ for t in range(T_max):
10121012 correct_classifications[t] = np.mean(predictions == actual)
10131013
10141014plt.figure(figsize=(10, 6))
1015- plt.plot(range(1, T_max + 1), correct_classifications, 'b-', linewidth=2, label='empirical accuracy')
1015+ plt.plot(range(1, T_max + 1), correct_classifications,
1016+ 'b-', linewidth=2, label='empirical accuracy')
10161017plt.axhline(1 - theory_error, color='r', linestyle='--',
10171018 label=f'theoretical accuracy = {1 - theory_error:.4f}')
10181019plt.xlabel('time step')
0 commit comments