Show code cell source
MAKE_BOOK_FIGURES=Trueimport numpy as npimport scipy.stats as stimport matplotlib as mplimport matplotlib.pyplot as plt%matplotlib inlineimport matplotlib_inlinematplotlib_inline.backend_inline.set_matplotlib_formats('svg')import seaborn as snssns.set_context("paper")sns.set_style("ticks")def set_book_style(): plt.style.use('seaborn-v0_8-white') sns.set_style("ticks") sns.set_palette("deep") mpl.rcParams.update({ # Font settings 'font.family': 'serif', # For academic publishing 'font.size': 8, # As requested, 10pt font 'axes.labelsize': 8, 'axes.titlesize': 8, 'xtick.labelsize': 7, # Slightly smaller for better readability 'ytick.labelsize': 7, 'legend.fontsize': 7, # Line and marker settings for consistency 'axes.linewidth': 0.5, 'grid.linewidth': 0.5, 'lines.linewidth': 1.0, 'lines.markersize': 4, # Layout to prevent clipped labels 'figure.constrained_layout.use': True, # Default DPI (will override when saving) 'figure.dpi': 600, 'savefig.dpi': 600, # Despine - remove top and right spines 'axes.spines.top': False, 'axes.spines.right': False, # Remove legend frame 'legend.frameon': False, # Additional trim settings 'figure.autolayout': True, # Alternative to constrained_layout 'savefig.bbox': 'tight', # Trim when saving 'savefig.pad_inches': 0.1 # Small padding to ensure nothing gets cut off })def set_notebook_style(): plt.style.use('seaborn-v0_8-white') sns.set_style("ticks") sns.set_palette("deep") mpl.rcParams.update({ # Font settings - using default sizes 'font.family': 'serif', 'axes.labelsize': 10, 'axes.titlesize': 10, 'xtick.labelsize': 9, 'ytick.labelsize': 9, 'legend.fontsize': 9, # Line and marker settings 'axes.linewidth': 0.5, 'grid.linewidth': 0.5, 'lines.linewidth': 1.0, 'lines.markersize': 4, # Layout settings 'figure.constrained_layout.use': True, # Remove only top and right spines 'axes.spines.top': False, 'axes.spines.right': False, # Remove legend frame 'legend.frameon': False, # Additional settings 'figure.autolayout': True, 'savefig.bbox': 'tight', 'savefig.pad_inches': 0.1 })def save_for_book(fig, filename, is_vector=True, **kwargs): """ Save a figure with book-optimized settings. Parameters: ----------- fig : matplotlib figure The figure to save filename : str Filename without extension is_vector : bool If True, saves as vector at 1000 dpi. If False, saves as raster at 600 dpi. **kwargs : dict Additional kwargs to pass to savefig """ # Set appropriate DPI and format based on figure type if is_vector: dpi = 1000 ext = '.pdf' else: dpi = 600 ext = '.tif' # Save the figure with book settings fig.savefig(f"{filename}{ext}", dpi=dpi, **kwargs)def make_full_width_fig(): return plt.subplots(figsize=(4.7, 2.9), constrained_layout=True)def make_half_width_fig(): return plt.subplots(figsize=(2.35, 1.45), constrained_layout=True)if MAKE_BOOK_FIGURES: set_book_style()else: set_notebook_style()make_full_width_fig = make_full_width_fig if MAKE_BOOK_FIGURES else lambda: plt.subplots()make_half_width_fig = make_half_width_fig if MAKE_BOOK_FIGURES else lambda: plt.subplots()
Example: Inferring the probability of a coin toss from data#
We toss a coin with an unknown probability of heads \(\theta\) \(N\) times independently, and we observe the result:
Assume that we have coded the result so that heads correspond to a “1” and tails to a “0.” We aim to estimate the probability of heads \(\theta\) from this dataset.
Assuming that we know nothing, we set:
In terms of probability densities, this is:
Now, let’s write down the likelihood of the data. Because of the independence assumption, we have:
Then, each measurement is a Bernoulli with probability of success \(\theta\), i.e.,
In terms of probability densities, we have the likelihood:
Using a common mathematical trick, we can rewrite this as:
Work out the cases \(x_n=0\) and \(x_n=1\) to convince yourself.
Now, we can find the expression for the likelihood of the entire dataset. It is:
This intuitively means the probability of getting \(\sum_{n=1}^Nx_n\) heads and the rest \(N-\sum_{n=1}^Nx_n\) tails.
We can now find the posterior. It is:
In our problem:
And this is just the density corresponding to a Beta distribution:
Let’s try this out with some fake data. Take a fake coin which is a little bit biased:
import scipy.stats as st
theta_true = 0.8
X = st.bernoulli(theta_true)
Sample from it a number of times to generate our data = (x1, …, xN):
N = 5
data = X.rvs(size=N)
data
array([0, 1, 1, 1, 1])
Now we are ready to calculate the posterior which the Beta we have above:
alpha = 1.0 + data.sum()
beta = 1.0 + N - data.sum()
Theta_post = st.beta(alpha, beta)
And we can plot it:
fig, ax = plt.subplots()
thetas = np.linspace(0, 1, 100)
ax.plot(
[theta_true],
[0.0],
'o',
markeredgewidth=2,
markersize=10,
label='True value'
)
ax.plot(
thetas,
Theta_post.pdf(thetas),
label=r'$p(\theta|x_{1:N})$'
)
ax.set_xlabel(r'$\theta$')
ax.set_ylabel('Probability density')
ax.set_title(f'$N={N}$')
plt.legend(loc='best', frameon=False)
sns.despine(trim=True);
Questions#
Try \(N=0,5,10,100\) and see what happens.