%------------------------------------------------------------------------------ %\documentclass[reqno]{amsart} \documentclass[12pt]{amsart} %\setcounter{page}{6} \usepackage[top=1in, bottom=1in, left=1in, right=1in]{geometry} \usepackage[colorlinks=true, urlcolor=blue]{hyperref} \usepackage{amssymb} \usepackage{amsmath,mathrsfs} %\oddsidemargin -0in \evensidemargin .5in %\topmargin=1.25in %\headheight 10pt \headsep 10pt \footheight 10pt \footskip 24pt %\textheight 10in \textwidth 6.5in \columnsep 10pt \columnseprule 0pt %\font\namefont=cmr10 scaled\magstep2 \font\namefont=cmr8 scaled\magstep2 \newcommand{\myindent}{\leftskip=.4in} %\voffset=-.75in \parskip=11pt %extra vertical distance for new paragraph \parindent=0in %\newtheorem{theorem}{Theorem}[section] %\newtheorem{lemma}[theorem]{Lemma} %\newtheorem{lemma}{Lemma}[section] %\newtheorem{theorem}{Theorem}[section] %\newtheorem{lemma}[theorem]{Lemma} %\newtheorem{prop}[theorem]{Proposition} %\newtheorem{cor}[theorem]{Corollary} %\newtheorem{conj}{Conjecture} \usepackage{graphicx} \usepackage{color} \usepackage{subfigure} \usepackage{amssymb} \usepackage{amsmath} \usepackage{colonequals} \usepackage{hyperref} %\usepackage{showlabels} %\usepackage[all]{xypic} %\entrymodifiers={+!!<0pt,\fontdimen22\textfont2>} \theoremstyle{definition} \newtheorem{exercise}{Exercise} \newtheorem{remark}{Remark} \renewcommand{\setminus}{\smallsetminus} \addtolength{\footskip}{17pt} %\numberwithin{table}{section} \renewcommand{\subset}{\subseteq} \renewcommand{\supset}{\supseteq} \renewcommand{\epsilon}{\varepsilon} \newcommand{\abs}[1]{\left|#1\right|} % Absolute value notation \newcommand{\absf}[1]{|#1|} % small absolute value signs \newcommand{\vnorm}[1]{\left|\left|#1\right|\right|} % norm notation \newcommand{\vnormf}[1]{||#1||} % norm notation, forced to be small \newcommand{\im}[1]{\mbox{im}#1} % Pieces of English for math mode \newcommand{\tr}[1]{\mbox{tr}#1} \newcommand{\Proj}[1]{\mbox{Proj}#1} \newcommand{\Vol}[1]{\mbox{Vol}#1} \newcommand{\Z}{\mathbf{Z}} % Blackboard notation \newcommand{\N}{\mathbf{N}} \newcommand{\E}{\mathbf{E}} \renewcommand{\P}{\mathbf{P}} \newcommand{\R}{\mathbf{R}} \newcommand{\C}{\mathbf{C}} \newcommand{\Q}{\mathbf{Q}} \newcommand{\figoneawidth}{.5\textwidth} % Image formatting parameters \newcommand{\lbreak}{\\} % Linebreak \newcommand{\italicize}[1]{\textit {#1}} % formatting commands for bibliography %\newcommand{\embolden}[1]{\textbf {#1}} \newcommand{\embolden}[1]{{#1}} \newcommand{\undline}[1]{\underline {#1}} \newcommand{\e}{\varepsilon} \renewcommand{\epsilon}{\varepsilon} \newcommand{\Samplespace}{\mathcal{C}} %consider changing this in next revision \newcommand{\samplept}{c} %\renewcommand{\colonequals}{=} \thispagestyle{empty} \begin{document} \bigskip Graduate Applied Probability II\hfill Steven Heilman\\ \noindent\rule{6.5in}{0.4pt} %\vspace{.2cm} Please provide complete and well-written solutions to the following exercises. Due March 26, 12PM noon PST, to be uploaded as a single PDF document to blackboard (under the Assignments tab). \vspace{.5cm} \begin{center} {\Large Homework 5} \end{center} \vspace{.5cm} \begin{exercise}\label{exercise9} Using the Optional Stopping Theorem, prove Wald's equations: Let $X_{1},X_{2},\ldots\colon\Samplespace\to\R$ be i.i.d. Let $N$ be a stopping time. Let $S_{0},S_{1},\ldots$ be the corresponding random walk with $S_{0}\colonequals0$. \begin{itemize} \item If $\E N<\infty$, and $\E\abs{X_{1}}<\infty$, then $\E S_{N}=\E X_{1}\E N$. \item If $\E X_{1}=0,\E X_{1}^{2}<\infty$ and $\E N<\infty$, then $\E S_{N}^{2}=\E X_{1}^{2}\E N$. \end{itemize} \end{exercise} \begin{exercise} Let $1/2
0\}$. Using Wald's equation for $\min(N,n)$ and then letting $n\to\infty$, show that $\E N=1/\E X_{1}=1/(2p-1)$. \end{exercise} \begin{exercise}\label{exercise7.5} Let $X_{0}=0$. Let $(X_{0},X_{1},\ldots)$ such that $\P(X_{i}=1)=\P(X_{i}=-1)=1/2$ for all $i\geq1$. For any $n\geq0$, let $Y_{n}=X_{0}+\cdots+X_{n}$. So, $(Y_{0},Y_{1},\ldots)$ is a symmetric simple random walk on $\Z$. Show that $Y_{n}^{2}-n$ is a martingale (with respect to $(X_{0},X_{1},\ldots)$). \end{exercise} \begin{exercise} Let $1/2
0$. Then, deduce that $\P_{0}(T_{0}=\infty)>0$. That is, there is a positive probability that the biased random walk never returns to $0$, even though it started at $0$. \end{exercise} % % %\begin{exercise} %Let $X_{1},\ldots$ be independent identically distributed random variables with $\P(X_{i}=1)=\P(X_{i}=-1)=1/2$ for every $i\geq1$. For any $n\geq1$, let $M_{n}\colonequals X_{1}+\cdots+X_{n}$. Let $M_{0}=0$. For any $n\geq1$, define %$$W_{n}\colonequals M_{0}+\sum_{m=1}^{n}H_{m}(M_{m}-M_{m-1}).$$ % %Show that if you have an infinite amount of money, then you \textit{can} make money by using the double-your-bet strategy in the game of coinflips (where if you bet $\$d$, then you win $\$d$ with probability $1/2$, and you lose $\$d$ with probability $1/2$). For example, show that if you start by betting $\$1$, and if you keep doubling your bet until you win (which should define some betting strategy $H_{1},H_{2},\ldots$ and a stopping time $T$), then $\E W_{T}=1$, for a suitable stopping time $T$. %\end{exercise}% % %\begin{exercise} %Prove the following variant of the Optional Stopping Theorem. Assume that $(M_{0},M_{1},\ldots)$ is a submartingale, and let $T$ be a stopping time such that $\P(T<\infty)$. Let $c\in\R$. Assume that $\abs{M_{n\wedge T}}\leq c$ for all $n\geq0$. Then $\E M_{T}\geq\E M_{0}$. That is, you can make money by stopping a submartingale. %\end{exercise} \begin{exercise}[\embolden{Ballot Theorem}] Let $a,b$ be positive integers. Suppose there are $c$ votes cast by $c$ people in an election. Candidate $1$ gets $a$ votes and candidate $2$ gets $b$ votes. (So $c=a+b$.) Assume $a>b$. The votes are counted one by one. The votes are counted in a uniformly random ordering, and we would like to keep a running tally of who is currently winning. (News agencies seem to enjoy reporting about this number.) Suppose the first candidate eventually wins the election. We ask: with what probability will candidate $1$ always be ahead in the running tally of who is currently winning the election? As we will see, the answer is $\frac{a-b}{a+b}$. To prove this, for any positive integer $k$, let $S_{k}$ be the number of votes for candidate $1$, minus the number of votes for candidate $2$, after $k$ votes have been counted. Then, define $X_{k}\colonequals S_{c-k}/(c-k)$. Show that $X_{0},X_{1},\ldots$ is a martingale with respect to $S_{c},S_{c-1},S_{c-2},\ldots$. Then, let $T$ such that $T=\min\{0\leq k\leq c\colon X_{k}=0\}$, or $T=c-1$ if no such $k$ exists. Apply the Optional Stopping theorem to $X_{T}$ to deduce the result. \end{exercise} % %\begin{exercise} %Let $(X_{0},X_{1},\ldots)$ be the simple random walk on $\Z$. For any $n\geq0$, define $M_{n}= X_{n}^{3}-3nX_{n}$. Show that $(M_{0},M_{1},\ldots)$ is a martingale with respect to $(X_{0},X_{1},\ldots)$ % %Now, fix $m>0$ and let $T$ be the first time that the walk hits either $0$ or $m$. Show that, for any $0< k\leq m$, %$$\E_{k}(T\,|\, X_{T}=m)=\frac{m^{2}-k^{2}}{3}.$$ %\end{exercise} \begin{exercise} Let $X_{1},X_{2},\ldots$ be i.i.d. random variables with $\E X_{i}=0$ for every $i\geq1$. Suppose there exists $\sigma>0$ such that $\mathrm{Var}(X_{i})=\sigma^{2}$ for all $i\geq1$. For any $n\geq1$, let $S_{n}=X_{1}+\cdots+X_{n}$. Show that $S_{n}^{2}-n\sigma^{2}$ is a martingale with respect to $X_{1},X_{2},\ldots$. (We let $X_{0}=0$.) Let $a>0$. Let $T=\min\{n\geq1\colon \abs{S_{n}}\geq a\}$. Using the Optional Stopping Theorem, show that $\E T\geq a^{2}/\sigma^{2}$. Observe that a simple random walk on $\Z$ has $\sigma^{2}=1$ and $\E T=a^{2}$ when $a\in\Z$. \end{exercise} % %\begin{exercise}\label{exercise13} %Show that $\cosh(x)\leq e^{x^{2}/2}$, $\forall$ $x\in\R$. %\end{exercise} % % %\begin{exercise}[\embolden{Chernoff Inequality}]\label{exercise28} %Let $0
0$ such that the following holds. Assume $p\geq\frac{c\log n}{n}$. Then with probability larger than $.9$, all vertices of $G$ have degrees in the range $(.9d,1.1d)$. (Hint: first consider a single vertex, then use the union bound over all vertices.) %\end{itemize} %\end{exercise} % \begin{exercise}[\embolden{Azuma's Inequality}] In this exercise, we prove a generalization of the Hoeffding inequality to martingales. Let $c_{1},c_{2},\ldots>0$. Let $(X_{0},X_{1},\ldots)$ be a martingale. Assume that $\abs{X_{n}-X_{n-1}}\leq c_{n}$ for all $n\geq1$. Then for any $t>0$, $$\P(\abs{X_{n}-X_{0}}>t)\leq 2 e^{-\frac{t^{2}}{2\sum_{i=1}^{n}c_{i}^{2}}}.$$ Prove this inequality using the following steps. \begin{itemize} \item Let $\alpha>0$. Show that $\E e^{\alpha(X_{n}-X_{0})}=\E [e^{\alpha(X_{n-1}-X_{0})}\E(e^{\alpha(X_{n}-X_{n-1})}|\mathcal{F}_{n-1})]$. (When $Y$ is a random variable, we denote $\E (Y|\mathcal{F}_{n})\colonequals g(X_{0},\ldots,X_{n})$ where $g(x_{0},\ldots,x_{n})\colonequals\E(Y|X_{0}=x_{0},\ldots,X_{n}=x_{n})$ for any $x_{0},\ldots,x_{n}\in\R$.) \item For any $y\in[-1,1]$, show that $e^{\alpha c_{n}y}\leq\frac{1+y}{2}e^{\alpha c_{n}}+\frac{1-y}{2}e^{-\alpha c_{n}}$. \item Take the conditional expectation of this inequality when $y= (X_{n}-X_{n-1})/c_{n}$. \item Now argue as in Hoeffding's inequality. \end{itemize} Using Azuma's inequality, deduce \textbf{McDiarmid's Inequality}. Let $X_{1},\ldots,X_{n}$ be independent real-valued random variables. Let $c_{1},c_{2},\ldots>0$. Let $f\colon\R^{n}\to\R$ be a measurable function such that, for any $1\leq m\leq n$, $$\sup_{x_{1},\ldots,x_{m-1},x_{m},x_{m}',x_{m+1},\ldots,x_{n}\in\R}\abs{f(x_{1},\ldots,x_{n})-f(x_{1},\ldots,x_{m-1},x_{m}',x_{m+1},\ldots,x_{n})}\leq c_{m}.$$ Then, for any $t>0$, $$\P(\abs{f(X_{1},\ldots,X_{n})-\E f(X_{1},\ldots,X_{n})}>t)\leq 2 e^{-\frac{t^{2}}{2\sum_{i=1}^{n}c_{i}^{2}}}.$$ (Note that a linear function $f$ recovers Hoeffding's inequality, Theorem \ref{HoeffdingIneq}.) \end{exercise} \end{document}