%------------------------------------------------------------------------------ %\documentclass[reqno]{amsart} \documentclass[12pt]{amsart} %\setcounter{page}{6} \usepackage[top=1in, bottom=1in, left=1in, right=1in]{geometry} \usepackage[colorlinks=true, urlcolor=blue]{hyperref} \usepackage{amssymb} \usepackage{amsmath,mathrsfs} %\oddsidemargin -0in \evensidemargin .5in %\topmargin=1.25in %\headheight 10pt \headsep 10pt \footheight 10pt \footskip 24pt %\textheight 10in \textwidth 6.5in \columnsep 10pt \columnseprule 0pt %\font\namefont=cmr10 scaled\magstep2 \font\namefont=cmr8 scaled\magstep2 \newcommand{\myindent}{\leftskip=.4in} %\voffset=-.75in \parskip=11pt %extra vertical distance for new paragraph \parindent=0in %\newtheorem{theorem}{Theorem}[section] %\newtheorem{lemma}[theorem]{Lemma} %\newtheorem{lemma}{Lemma}[section] %\newtheorem{theorem}{Theorem}[section] %\newtheorem{lemma}[theorem]{Lemma} %\newtheorem{prop}[theorem]{Proposition} %\newtheorem{cor}[theorem]{Corollary} %\newtheorem{conj}{Conjecture} \usepackage{graphicx} \usepackage{color} \usepackage{subfigure} \usepackage{amssymb} \usepackage{amsmath} \usepackage{colonequals} \usepackage{hyperref} %\usepackage{showlabels} %\usepackage[all]{xypic} %\entrymodifiers={+!!<0pt,\fontdimen22\textfont2>} \theoremstyle{definition} \newtheorem{exercise}{Exercise} \newtheorem{remark}{Remark} \renewcommand{\setminus}{\smallsetminus} \addtolength{\footskip}{17pt} %\numberwithin{table}{section} \renewcommand{\subset}{\subseteq} \renewcommand{\supset}{\supseteq} \renewcommand{\epsilon}{\varepsilon} \newcommand{\abs}[1]{\left|#1\right|} % Absolute value notation \newcommand{\absf}[1]{|#1|} % small absolute value signs \newcommand{\vnorm}[1]{\left|\left|#1\right|\right|} % norm notation \newcommand{\vnormf}[1]{||#1||} % norm notation, forced to be small \newcommand{\im}[1]{\mbox{im}#1} % Pieces of English for math mode \newcommand{\tr}[1]{\mbox{tr}#1} \newcommand{\Proj}[1]{\mbox{Proj}#1} \newcommand{\Vol}[1]{\mbox{Vol}#1} \newcommand{\Z}{\mathbf{Z}} % Blackboard notation \newcommand{\N}{\mathbf{N}} \newcommand{\E}{\mathbf{E}} \newcommand{\F}{\mathbb{F}} \renewcommand{\P}{\mathbf{P}} \newcommand{\R}{\mathbf{R}} \newcommand{\C}{\mathbf{C}} \newcommand{\Q}{\mathbf{Q}} \newcommand{\figoneawidth}{.5\textwidth} % Image formatting parameters \newcommand{\lbreak}{\\} % Linebreak \newcommand{\italicize}[1]{\textit {#1}} % formatting commands for bibliography %\newcommand{\embolden}[1]{\textbf {#1}} \newcommand{\embolden}[1]{{#1}} \newcommand{\undline}[1]{\underline {#1}} \newcommand{\e}{\varepsilon} \renewcommand{\epsilon}{\varepsilon} %\renewcommand{\colonequals}{=} \thispagestyle{empty} \begin{document} \bigskip Graduate Probability \hfill Steven Heilman\\ \noindent\rule{6.5in}{0.4pt} %\vspace{.2cm} Please provide complete and well-written solutions to the following exercises. Due March 22, at the beginning of class. \vspace{.5cm} \begin{center} {\Large Homework 7} \end{center} \vspace{.5cm} \begin{exercise}\label{exercise13} Show that $\cosh(x)\leq e^{x^{2}/2}$, $\forall$ $x\in\R$. \end{exercise} \begin{exercise}[\embolden{Chernoff Inequality}]\label{exercise28} Let $0
0$ such that the following holds. Assume $p\geq\frac{c\log n}{n}$. Then with probability larger than $.9$, all vertices of $G$ have degrees in the range $(.9d,1.1d)$. (Hint: first consider a single vertex, then use the union bound over all vertices.) \end{itemize} \end{exercise} %\snote{Give Vershynin application?} % %\snote{Can we do Khintchine inequality here?} \begin{exercise}[\embolden{Khintchine Inequality}] Let $0
0$. For the $B_{p}$ inequality with $0
n\,\,\mathrm{for}$ $\,\,\mathrm{infinitely}\,\,\mathrm{many}\,\,n\geq1)=1$. And $\P(\lim_{n\to\infty}\frac{X_{1}+\cdots+X_{n}}{n}\in(-\infty,\infty))=0$. (Hint: show $\sum_{n=1}^{\infty}\P(\abs{X_{n}}>n)=\infty$, then apply the second Borel-Cantelli Lemma. Write $\frac{S_{n}}{n}-\frac{S_{n+1}}{n+1}=\frac{S_{n}}{n(n+1)}-\frac{X_{n+1}}{n+1}$, and consider what happens to both sides on the set where $\lim_{n\to\infty}\frac{S_{n}}{n}\in\R$.) \end{exercise} Also, unfortunately the strong law cannot hold for triangular arrays. \begin{exercise} Let $X$ be a random variable taking values in the natural numbers with $\P(X=n)= \frac{1}{\zeta(3)} \frac{1}{n^3}$, where $\zeta(3) \colonequals \sum_{m=1}^\infty \frac{1}{m^3}$. \begin{itemize} \item Show that $X$ is absolutely integrable. \item For any $n\geq1$, let $X_{n,1},\ldots,X_{n,n}\colon\Omega\to\R$ be independent copies of $X$. Show that the random variables $\frac{X_{n,1}+\dots+X_{n,n}}{n}$ are almost surely unbounded. (Hint: for any constant $c$, show that $\frac{X_{n,1}+\dots+X_{n,n}}{n} > c$ occurs with probability at least $\epsilon/n$ for some $\epsilon > 0$ depending on $c$. Then use the second Borel-Cantelli lemma.) \end{itemize} \end{exercise} \begin{exercise}[\embolden{Second Borel-Cantelli Lemma}] Let $A_{1},A_{2},\ldots$ be independent events with $\sum_{n=1}^{\infty}\P(A_{n})=\infty$. Then $\P(A_{n}\,\,\mathrm{occurs}\,\,\mathrm{for}\,\,\mathrm{infinitely}\,\,\mathrm{many}\,\,n\geq1)=1$. (Hint: using $1-x\leq e^{-x}$ for any $x\in\R$, show $\P(\cap_{n=s}^{t}A_{n}^{c})\leq \exp(-\sum_{n=s}^{t}\P(A_{n}))$, let $t\to\infty$ to conclude $\P(\cup_{n=s}^{\infty}A_{n})=1$ for all $s\geq1$, then let $s\to\infty$.) \end{exercise} \begin{exercise}\label{exercise23} Let $X,X_{1},X_{2},\ldots$ and let $Y,Y_{1},Y_{2},\ldots$ be random variables with values in $\R$. \begin{itemize} \item[(i)] Assume that $X$ is constant almost surely. Show that $X_{1},X_{2},\ldots$ converges to $X$ in distribution if and only if $X_{1},X_{2},\ldots$ converges to $X$ in probability. \item[(ii)] Prove this Lemma from the notes: Let $\mu_{1},\mu_{2},\ldots$ be a sequence of probability measures on $\R$. Then any subsequential limit of the sequence (with respect to vague convergence) is a probability measure if and only if $\mu_{1},\mu_{2},\ldots$ is \textbf{tight}: $\forall$ $\epsilon>0$, $\exists$ $m=m(\epsilon)>0$ such that $$\limsup_{n\to\infty}(1-\mu_{n}([-m,m]))\leq\epsilon.$$ \item[(iii)] Suppose that $X_{1},X_{2},\ldots$ converges in distribution to $X$. Show there exist random variables $Z,Z_{1},Z_{2},\ldots\colon\Omega\to\R$ such that $\mu_{Z}=\mu_{X}$, $\mu_{Z_{n}}=\mu_{X_{n}}$ for any $n\geq1$, and such that $Z_{1},Z_{2},\ldots$ converges almost surely to $Z$. (Hint: use the sample space $\Omega=[0,1]$ and using an exercise from a previous homework, represent each random variable on $\Omega$ as the ``inverse'' of its cumulative distribution function.) \item[(iv)] (Slutsky's Theorem) Suppose $X_{1},X_{2},\ldots$ converges in distribution to $X$ and $Y_{1},Y_{2},\ldots$ converges in probability to $Y$. Assume $Y$ is constant almost surely. Show that $X_{1}+Y_{1},X_{2}+Y_{2},\ldots$ converges in distribution to $X+Y$. Show also that $X_{1}Y_{1},X_{2}Y_{2},\ldots$ converges in distribution to $XY$. (Hint: either use (iii) or use (ii) to control error terms.) What happens if $Y$ is not constant almost surely? \item[(v)] (Fatou's lemma) If $g\colon\R\to[0,\infty)$ is continuous, and if $X_{1},X_{2},\ldots$ converges in distribution to $X$, show that $\liminf_{n\to\infty}\E g(X_{n})\geq \E g(X)$. \item[(vi)] (Bounded convergence) If $g\colon\R\to\C$ is continuous and bounded, and if $X_{1},X_{2},\ldots$ converges in distribution to $X$, show that $\lim_{n\to\infty}\E g(X_{n})= \E g(X)$. \item[(vii)](Dominated convergence) If $X_{1},X_{2},\ldots\colon\Omega\to\R$ converges in distribution to $X$, and if there exists a random variable $Y\colon\Omega\to[0,\infty)$ with $\abs{X_{n}}\leq Y$ for all $n\geq1$ and $\E Y<\infty$, show that $\lim_{n\to\infty}\E X_{n}=\E X$. \end{itemize} \end{exercise} \begin{exercise}[\embolden{Portmanteau Theorem}] Let $X,X_{1},X_{2},\ldots$ be random variables with values in $\R$. Show that the condition ($X_{1},X_{2},\ldots$ converges in distribution to $X$) is equivalent to the following three statements: \begin{itemize} \item For any closed $K\subset\R$, $\limsup_{n \to \infty} \P(X_{n}\in K) \leq\P(X\in K)$. \item For any open $U\subset\R$, $\liminf_{n \to \infty} \P(X_{n}\in U) \leq\P(X\in U)$. \item For any Borel set $E\subset\R$ whose topological boundary $\partial E$ satisfies $\P(X\in\partial E)=0$, $\lim_{n \to \infty}\P(X_{n}\in E)=\P(X\in E)$. \end{itemize} (Hint: Urysohn's Lemma might be helpful.) \end{exercise} \begin{exercise} Let $f,g,h\colon\R\to\R$ be measurable functions. Assume that $\int_{\R}\abs{f(x)}dx,$ $\int_{\R}\abs{g(x)}dx<\infty$ and $\int_{\R}\abs{h(x)}dx<\infty$. Show that $\int_{-\infty}^{\infty}\abs{(g*h)(t)}dt<\infty$. Consequently, $(g*h)(t)\in\R$ almost surely for $t\in\R$ (with respect to Lebesgue measure on $\R$). Then, show that convolution is associative and commutative. That is, $g*h=h*g$ and $f*(g*h)=(f*g)*h$ almost surely. \end{exercise} \begin{exercise}\label{exercise25} Using convolution, show that if $X,Y$ are standard Gaussian random variables, then $aX+bY$ is a Gaussian random variable with mean $0$ and variance $a^{2}+b^{2}$. \end{exercise} \begin{exercise}\label{exercise20} Let $X,Y,Z$ be independent and uniformly distributed on $[0,1]$. Note that $f_{X}$ is not a continuous function. Using convolution, compute $f_{X+Y}$. Draw $f_{X+Y}$. Note that $f_{X+Y}$ is a continuous function, but it is not differentiable at some points. Using convolution, compute $f_{X+Y+Z}$. Draw $f_{X+Y+Z}$. Note that $f_{X+Y+Z}$ is a differentiable function, but it does not have a second derivative at some points. Make a conjecture about how many derivatives $f_{X_{1}+\cdots+X_{n}}$ has, where $X_{1},\ldots,X_{n}$ are independent and uniformly distributed on $[0,1]$. You do not have to prove this conjecture. The idea of this exercise is that convolution is a kind of average of functions. And the more averaging you do, the more derivatives $f_{X_{1}+\cdots+X_{n}}$ has. Lastly, $f_{X_{1}+\cdots+X_{n}}$ should resemble a Gaussian density when $n$ becomes large. So, we should be able to guess at a formulation of the Central Limit Theorem, at least for i.i.d. random variables with density. \end{exercise} \begin{exercise} Construct two random variables $X,Y$ such that $X$ and $Y$ are each uniformly distributed on $[0,1]$, and such that $\P(X+Y=1)=1$. % Y=1-X Then construct two random variables $W,Z$ such that $W$ and $Z$ are each uniformly distributed on $[0,1]$, and such that $W+Z$ is uniformly distributed on $[0,2]$. %X=Y (Hint: there is a way to do each of the above problems with about one line of work. That is, there is a way to solve each problem without working very hard.) \end{exercise} \end{document}