-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmgb-ch03.tex
111 lines (82 loc) · 6.14 KB
/
mgb-ch03.tex
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
%mgb-ch03
%Chapter III - MGB Solutions
\begin{enumerate}
%3-1
\item[1.] \begin{enumerate}
\item[(f)] No, the variance of a negative binomial random variable cannot be smaller than its mean.
\item[(h)] Rectangular, normal, logistic, and beta with $a=b$. Note that the binomial for $p=1/2$ and $n$ even does not work.
\item[(n)] No.
\item[(o)] Yes, if the distribution of $X$ is symmetric about zero.
\end{enumerate}
%1-2
\item[2.] \begin{enumerate}
\item[(b)] If $r\le 1$, the mode is zero. If $r>1$, the mode is $(r-1)/\lambda$.
\end{enumerate}
%1-4
\item[4.] \begin{enumerate}
\item[(b)] $2\Phi(-2)$
\item[(c)] $P[X\le 0] = \Phi(-\mu/\sqrt{h(\mu)}) = \Phi(-1/\sqrt{a})$ for $h(\mu)= a\mu^2,\ \mu >0$.
\end{enumerate}
\item[6.] Let $X$ be a random variable denoting the low bid of the competition. $X$ is uniformly distributed over the interval $((3/4)C,2C)$. Let $P$ denote profit and $B$ the amount the contractor should bid. Now $P=(B-C)I_{(B,2C)}(X)$ and
\begin{eqnarray*}
E[P] &=& \int (B-C)I_{(B,2C)}(x)f_X(x)\ dx \;=\; (B-C)\int_{(3/4)C}^{2C} I_{(B,2C)}(x)\left(2C-\dfrac{3}{4}\right)^{-1}\ dx \\
&=& \dfrac{(B-C)}{\left(\dfrac{5}{4}\right)C}(2C-B).\ \text{Now maximize with respect to}\ B\ \text{and obtain}\ B =\dfrac{3C}{2}.
\end{eqnarray*}
\item[7.] \begin{enumerate}
\item[(a)] Let $k =$ number he should stock and $X$ the number he can sell in 25 days. \\
Want the minimal $k$ such that $P[X\le k] \ge .95$ where $X$ has a Poisson distribution with parameter $100$; that is, solve for $k$ in $\dsp \sum_{i=0}^{k} \dfrac{e^{-100}(100)^i}{i!} \ge .95$. \\
From a table of the Poisson distribution, $k=117$ is obtained. Using the normal approximation and $\dsp \Phi\left(\dfrac{k-100}{10}\right) = .95,\ k=117$ is obtained.
\item[(b)] Let $Z =$ number of days out of 25 that he sells no items. \\
Under appropriate assumptions (what are they?) $Z$ has a binomial distribution with $n=25$ and $p=c^{-4}$. Hence, $E[Z] = 25c^{-4}$.
\end{enumerate}
\item[8.] \begin{enumerate}
\item[(a)] $Y$ has a binomial distribution with parameters $n$ and $q$.
\item[(b)] $X$ has a binomial distribution with parameters $n$ and $15/36$.
\item[(c)] $(X+n)/2$ has a binomial distribution with parameters $n$ and $p$. \\
Hence $E[X] = n(2p-1)$.
\item[(d)] Show that $\dsp \sum_{j=0}^{k} {n\choose j}\left(p_1^jq_1^{n-j} - p_2^jq_2^{n-j}\right) = \sum_{j=0}^{k}d_j(\text{say}) \ge 0$. \\
Note that $\dsp \sum_{j=0}^{n} d_j =0$, hence it suffices to show that the first few $d_j$'s are positive, and the remaining are negative. But $d_j \ge 0$ if and only if \\ $j\le n\log(q_2/q_1)/log(p_1q_2/p_2q_1)$. \\
(Use the result of Problem 28 for an alternate proof.)
\end{enumerate}
\item[9.] $\dsp \sum_{j=60}^{100} \dfrac{\dsp {2500 \choose j}{2500 \choose {100-j}}}{\dsp {5000\choose 100}}$. The hypergeometric can be approximated by the binomial and the binomial can in turn be approximated by the normal which gives a numerical answer of approximately $1- \Phi(2) = .0228$.
\item[11.] Let $X$ denote the number of defectives in the sample. Assume that $X$ has a binomial distribution. \begin{enumerate}
\item[(a)] $P[X\ge 1] = 1 - P[X=0] = 1 - (.99)^{10}$.
\item[(b)] Want $P[X\ge 1]\approx .95;$ or, want $P[X=0] \approx .05$; \\
i.e., $(.9)^n \approx .05$, or, $n\approx 29$.
\end{enumerate}
\item[15.] $\mu + c\left[\Phi\left(\dfrac{a-\mu}{\sigma}\right) - \Phi\left(\dfrac{b-\mu}{\sigma}\right)\right]/\left[\Phi\left(\dfrac{b-\mu}{\sigma}\right) - \Phi\left(\dfrac{a-\mu}{\sigma}\right)\right]$
\item[17.] There is a misprint in this problem. The mean was intended to be 200 rather than 20. Want \\
$P[X \ge 150] \ge .90$, or, $\Phi\left(\dfrac{50}{\sigma}\right) \ge .90$, which implies $\sigma \approx 50/1.282 \approx 39$.
\item[19.] \begin{enumerate}
\item[(a)] \begin{eqnarray*}
E[X] &=& \int_{0}^{\infty} \beta^{-2}x^2\exp[-(1/2)(x/\beta)^2]\ dx \\
&=& (1/2)\sqrt{2\pi}\beta^{-1}\int_{-\infty}^{\infty}x^2(1/\beta\sqrt{2\pi})\exp[-(1/2)(x/\beta)^2]\ dx \\
&=& \beta\sqrt{2\pi}/2\ \text{by recognizing that the last integral is the variance of a}
\end{eqnarray*}
normal distribution with mean $0$ and variance $\beta^2,$ which shows how a little knowledge of probability can be an aid to integration.
\begin{eqnarray*}
var[X] &=& \beta^2(4-\pi)/2.
\end{eqnarray*}
\item[(b)] No.
\end{enumerate}
\item[25.] $\begin{array}{c|c|c|c|c|c|c|c|c}
1 & 2 & 3 & 4 & 5 & 6 & 7 & 8 & 9 \\ \hline
\dfrac{9}{81} & \dfrac{12}{81} & \dfrac{16}{81}& \dfrac{12}{81}& \dfrac{12}{81} & \dfrac{10}{81}& \dfrac{6}{81}& \dfrac{3}{81}& \dfrac{1}{81}
\end{array}$
\item[28.] Assume true and differentiate both sides with respect to p to obtain the equality:
\begin{eqnarray*}
\sum_{j=k}^{n} j{n\choose j}p^{j-1}q^{n-j} - \sum_{j=k}^{n}(n-j){n\choose j}p^jq^{n-j-1} = k{n\choose k}p^{k-1}q^{n-k}.
\end{eqnarray*}
The inequality is verified by noting the (j+1)st term of the first sum cancels the j\underline{th} term of the second sum. Work backwards.
\item[29.] Let $X$ = \# of successes in the first n Bernoulli trials \\
and $Y$ = \# of failures prior to the rth success. \\
Note that (X$\le$r-1)$\cong$(Y$>$n-r) hence $F_X(r-1)$ = $P[X\le r-1]$ = $P[Y> n-r]$ = $1 - F_Y(n-r)$.
\item[30.]${}$\vspace{-7.5ex}\begin{eqnarray*}
E[Z_\lambda] &=& (E[U^\lambda]-E[1-U^\lambda])/\lambda = 0\ \text{for}\ \lambda >-1. \\
E[Z_\lambda^2] &=& (E[U^{2\lambda}] -2E[U^\lambda(1-U)^\lambda] + E[(1-U)^{2\lambda}])/\lambda^2 \\
&=& (2/\lambda^2)([1/(2\lambda+1)] - B(\lambda+1,\lambda+1))\ \text{for}\ \lambda > -1/2. \\
E[Z_\lambda^3] &=& 0\ \text{for}\ \lambda > -1/3. \\
E[Z_\lambda^4] &=&(2/\lambda^4)([1/(4\lambda+1) - 4B(3\lambda+1,\lambda+1) + 3B(2\lambda+1,2\lambda+1)])\ \text{for}\ \lambda > -1/4.
\end{eqnarray*}
The last part is misstated. The intent was to get two different $\lambda$'s, say $\lambda_1$ and $\lambda_2$, such that $Z_{\lambda_1}$ and $Z_{\lambda_2}$ have the same skewness and kurtosis. If $\lambda_1$ and $\lambda_2$ are sought so that $Z_{\lambda_1}$ and $Z_{\lambda_2}$ have kurtosis equal to zero, then $\lambda_1\approx .135$ and $\lambda_2\approx 5.20$ will work.
\end{enumerate}