Skip to content

Commit 2c4f370

Browse files
author
Bob Jansen
committed
Minor code whitespace and comment fixes
1 parent be157cd commit 2c4f370

File tree

1 file changed

+17
-19
lines changed

1 file changed

+17
-19
lines changed

Chapter2_MorePyMC/MorePyMC.ipynb

Lines changed: 17 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -424,7 +424,7 @@
424424
"cell_type": "code",
425425
"collapsed": false,
426426
"input": [
427-
"#we're using some fake data here\n",
427+
"# We're using some fake data here\n",
428428
"data = np.array([10, 25, 15, 20, 35])\n",
429429
"obs = mc.Poisson(\"obs\", lambda_, value=data, observed=True)\n",
430430
"print obs.value"
@@ -695,7 +695,7 @@
695695
"input": [
696696
"import pymc as mc\n",
697697
"\n",
698-
"# the parameters are the bounds of the Uniform.\n",
698+
"# The parameters are the bounds of the Uniform.\n",
699699
"p = mc.Uniform('p', lower=0, upper=1)"
700700
],
701701
"language": "python",
@@ -916,7 +916,6 @@
916916
"collapsed": false,
917917
"input": [
918918
"# Set up the pymc model. Again assume Uniform priors for p_A and p_B.\n",
919-
"\n",
920919
"p_A = mc.Uniform(\"p_A\", 0, 1)\n",
921920
"p_B = mc.Uniform(\"p_B\", 0, 1)\n",
922921
"\n",
@@ -980,6 +979,7 @@
980979
"collapsed": false,
981980
"input": [
982981
"figsize(12.5, 10)\n",
982+
"\n",
983983
"#histogram of posteriors\n",
984984
"\n",
985985
"ax = plt.subplot(311)\n",
@@ -1037,7 +1037,7 @@
10371037
"cell_type": "code",
10381038
"collapsed": false,
10391039
"input": [
1040-
"#count the number of samples less than 0, i.e. the area under the curve\n",
1040+
"# Count the number of samples less than 0, i.e. the area under the curve\n",
10411041
"# before 0, represent the probability that site A is worse than site B.\n",
10421042
"print \"Probability site A is WORSE than site B: %.3f\" % \\\n",
10431043
" (delta_samples < 0).mean()\n",
@@ -1337,7 +1337,7 @@
13371337
"model = mc.Model([p, true_answers, first_coin_flips,\n",
13381338
" second_coin_flips, observed_proportion, observations])\n",
13391339
"\n",
1340-
"### To be explained in Chapter 3!\n",
1340+
"# To be explained in Chapter 3!\n",
13411341
"mcmc = mc.MCMC(model)\n",
13421342
"mcmc.sample(120000, 80000, 4)"
13431343
],
@@ -1368,9 +1368,8 @@
13681368
"input": [
13691369
"figsize(12.5, 3)\n",
13701370
"p_trace = mcmc.trace(\"freq_cheating\")[:]\n",
1371-
"plt.hist(p_trace, histtype=\"stepfilled\", normed=True,\n",
1372-
" alpha=0.85, bins=30, label=\"posterior distribution\",\n",
1373-
" color=\"#348ABD\")\n",
1371+
"plt.hist(p_trace, histtype=\"stepfilled\", normed=True, alpha=0.85, bins=30, \n",
1372+
" label=\"posterior distribution\", color=\"#348ABD\")\n",
13741373
"plt.vlines([.05, .35], [0, 0], [5, 5], alpha=0.3)\n",
13751374
"plt.xlim(0, 1)\n",
13761375
"plt.legend()"
@@ -1473,7 +1472,7 @@
14731472
"input": [
14741473
"model = mc.Model([yes_responses, p_skewed, p])\n",
14751474
"\n",
1476-
"### To Be Explained in Chapter 3!\n",
1475+
"# To Be Explained in Chapter 3!\n",
14771476
"mcmc = mc.MCMC(model)\n",
14781477
"mcmc.sample(12500, 2500)"
14791478
],
@@ -1504,9 +1503,8 @@
15041503
"input": [
15051504
"figsize(12.5, 3)\n",
15061505
"p_trace = mcmc.trace(\"freq_cheating\")[:]\n",
1507-
"plt.hist(p_trace, histtype=\"stepfilled\", normed=True,\n",
1508-
" alpha=0.85, bins=30, label=\"posterior distribution\",\n",
1509-
" color=\"#348ABD\")\n",
1506+
"plt.hist(p_trace, histtype=\"stepfilled\", normed=True, alpha=0.85, bins=30, \n",
1507+
" label=\"posterior distribution\", color=\"#348ABD\")\n",
15101508
"plt.vlines([.05, .35], [0, 0], [5, 5], alpha=0.2)\n",
15111509
"plt.xlim(0, 1)\n",
15121510
"plt.legend()"
@@ -1538,9 +1536,9 @@
15381536
"\n",
15391537
"Sometimes writing a deterministic function using the `@mc.deterministic` decorator can seem like a chore, especially for a small function. I have already mentioned that elementary math operations *can* produce deterministic variables implicitly, but what about operations like indexing or slicing? Built-in `Lambda` functions can handle this with the elegance and simplicity required. For example, \n",
15401538
"\n",
1541-
" beta = mc.Normal( \"coefficients\", 0, size=(N,1) )\n",
1542-
" x = np.random.randn( (N,1) )\n",
1543-
" linear_combination = mc.Lambda( lambda x=x, beta = beta: np.dot( x.T, beta ) )\n",
1539+
" beta = mc.Normal(\"coefficients\", 0, size=(N, 1))\n",
1540+
" x = np.random.randn((N, 1))\n",
1541+
" linear_combination = mc.Lambda(lambda x=x, beta=beta: np.dot(x.T, beta))\n",
15441542
"\n",
15451543
"\n",
15461544
"#### Protip: Arrays of PyMC variables\n",
@@ -1556,7 +1554,7 @@
15561554
"N = 10\n",
15571555
"x = np.empty(N, dtype=object)\n",
15581556
"for i in range(0, N):\n",
1559-
" x[i] = mc.Exponential('x_%i' % i, (i+1)**2)"
1557+
" x[i] = mc.Exponential('x_%i' % i, (i+1)**2)"
15601558
],
15611559
"language": "python",
15621560
"metadata": {},
@@ -2118,11 +2116,11 @@
21182116
"\n",
21192117
"Previously in this Chapter, we simulated artificial dataset for the SMS example. To do this, we sampled values from the priors. We saw how varied the resulting datasets looked like, and rarely did they mimic our observed dataset. In the current example, we should sample from the *posterior* distributions to create *very plausible datasets*. Luckily, our Bayesian framework makes this very easy. We only need to create a new `Stochastic` variable, that is exactly the same as our variable that stored the observations, but minus the observations themselves. If you recall, our `Stochastic` variable that stored our observed data was:\n",
21202118
"\n",
2121-
" observed = mc.Bernoulli( \"bernoulli_obs\", p, value = D, observed=True)\n",
2119+
" observed = mc.Bernoulli( \"bernoulli_obs\", p, value=D, observed=True)\n",
21222120
"\n",
21232121
"Hence we create:\n",
21242122
" \n",
2125-
" simulated_data = mc.Bernoulli(\"simulation_data\", p )\n",
2123+
" simulated_data = mc.Bernoulli(\"simulation_data\", p)\n",
21262124
"\n",
21272125
"Let's simulate 10 000:"
21282126
]
@@ -2431,4 +2429,4 @@
24312429
"metadata": {}
24322430
}
24332431
]
2434-
}
2432+
}

0 commit comments

Comments
 (0)