\documentclass[11pt]{article}
\usepackage{amsmath}
\usepackage{amsfonts}
\usepackage{hyperref}
\usepackage{bbm}
\newtheorem{define}{Definition}
%\newcommand{\Z}{{\bf Z}}
%\usepackage{psfig}
\oddsidemargin=0.15in
\evensidemargin=0.15in
\topmargin=-.5in
\textheight=9in
\textwidth=6.25in
\input{defs.tex}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Definitions: If you want to add definitions commands and macros to the
%% scribe, please do it in this space
\def\eps{\varepsilon}
\def\NAE{{\rm NAE}}
\def\ind{\mathbbm{1}}
\def\pmset{\{\pm 1\}}
\def\binset{\{0,1\}}
\def\one{\mathbf{\ind}}
\def\til{_{\symbol{126}}}
%\def\through{x_1,\dots,x_n}
\newcommand{\thr}[2][n]{#1_1,\dots,#1_{#2}}
%\newcommand{\through[1][2]}{#1_1,\dots,#1_#2}
\newcommand\likeip[1]{\ll {#1} \gg}
\newcommand{\boldpar}[1]{\bigskip\par\noindent\textbf{#1}}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{document}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Heading: In the parentheses below, write the sequential number of the
%% talk, the talk date, the name of the speaker, and the name or names of
%% whoever wrote the scribe (seperated by commas).
\lecture{14}{June 4, 2008}{Guy Kindler}{Barak Zackay}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Body: from here on it's your territory - write your scribe here.
\section{In Previous Lectures}
In the previous talk we proved Freidgoods theorem :
\begin{theorem} [F'95]
\label{F'95} If $I(f) \leq k$ then $f$ is $(\eps,\frac{C^{k/\eps}}{\eps^3})$
\end{theorem}
We also saw that this result is not so robust, because if we look at an altered function,
(a function that its values were altered with some probability $\alpha$)
then the influence jumps to values that the theorem would be meaningless to them.
\begin{claim}
If $I(f)\leq k$, $f'(x) = \left\{ ^{ f(x) \ \ 1- \alpha }_{-f(x)\ \ \alpha} \right $ then $I(f') \geq \theta(\alpha n ).$
\end{claim}
there are robust analouges of Theorem 1, for example, in a paper titeled "tails of fourier spectrum of boolean functions" Bourgain proves such an anlouge which is a generalization of FKN to the $k$-th degree.
We also proved the KKL theorem,
\begin{theorem}[KKL 88']
\label{KKL}
If $I(f) \leq \frac{1-\hat{f}(\emptyset)^2}{20} log(\frac{1}{\delta})$ then $ \exists i \ \ I'_i(f)\leq C(1-t^2)\frac{log(cn)}{n}$.
\end{theorem}
We also defined the analougue influence for the cube,
\begin{definition}
Let $f:\{\pm1\}^n\to\{\pm1\}$.
$I_i^\prime(f) = \Pr_{x\setminus{i}}{\left(\text{f(x) is non constant on $x_i$} \right)}$
\begin{equation}
I^\prime(f) = \sum_i{I_i^\prime(f)}
\end{equation}
\end{definition}
After we have a definition of influence on the cube,
we also introduced a version of KKL to the cube
\begin{theorem}[BKKKL]
\label{BKKKL}
Let $f:[0,1]^n \rightarrow \pmset$ and let $t:=E(f)$. Then $ \exists i $ s.t $I'_i(f) \leq C(1-t^2)\frac{log(cn)}{n}$
\end{theorem}
Which allows us to have a version of KKL for every \mu_p^{(n)}.
\textbf{Reminder:}
$\mu^{(n)}_p(x) = p^{\sharp\{ i : x_i=-1 \}}(1-p)^{n- \sharp\{ i : x_i=1 \}}$ and
$\mu^{(n)}=\prod_{i=1}^n \mu^{(1)}_p.$
Let's make this definition more generic:
\begin{definition}
$\mu_{\overline{p} = ( p_1,p_2,\dots ,p_n ) }^{(n)} = \prod_{i=1}^n\mu_{p_i}^{(1)}$
$\mu _{\overline{p}}^{(n)} = \prod_{i=1}^{n} p_i^{\sharp \left{ x_i=-1 \right} }(1-p_i)^{ \shrap \left{ x_i=1 \right}}$
\end{definition}
\begin {definition}
Let $f$ be a boolean function and define $I_i(f)^{\overline{p}}:=\Pr_{x \til \mu_{\overline{p}}^{(n)}}(f(x-i,.)\ is\ not\ const)$, and define
$ I_i^p(f) = I_i^{p,\dots,p}(f) $ and also
$ I^{\overline{p}}(f) = \sum_i I_i^{\overline{p}}(f) $.
\end{definition}
\section{Graph properties and boolean functions}
In this lecture we will try to apply our theory of boolean functions into graph properties, we will
define graph properties in boolean functions language and then we will immediatly get some interesting results
on graph properties.
Lets begin with a usefull definition,
\begin{definition}
$f$ is monotone if $(x\leq y) \rightarrow (f(x) \leq f(y))$
\end{definition}
Define graph property in boolean functions language
\begin{definition}
$f$ is a graph property if :
\begin {enumerate}
\item $\exists$ 1-1 association between coordinates and edges of a complete graph.
\item $f$ is invariant under vertex permutations(so $f$ is transitive, but isn't necesserily simetric).
\end {enumerate}
\end{definition}
%\textsl{Assume:} $f:\pmset \rightarrow \pmset$ is a monotone graph property.
Now let's define some usefull functions, $\varphi$ and $\psi$
\begin{definition}
$$ \varphi_f(\overline{p}) = \Pr_{x \til \mu_{\overline{p}}^{(n)}}{(f(x)=-1)} $$
$$ \psi_f(p) = \varphi_f(p,\dots,p) $$
\end{definition}
\subsection{Derivatives of $\varphi$ and $\psi$}
we now compute the derivatives of \varphi and \psi .
\begin{eqnarray}
$$\varphi_f(\overline{p}) = \sum_{x\in\pmset^n} {\mu_{\overline{p}}(x)\one_{\{f(x)=-1\}}} =$$
$$= \sum_{\thr[x]{n-1}}{\mu^{(n-1)}_{(\thr[p]{n-1})}(\thr[x]{n-1})\left[ p_n \one _{ \{ f(\thr[x]{n-1},-1)=-1\} }+(1-p_n)\one_{ \{ f(\thr[x]{n-1},1)=1 \}}\right] }$$
\end{eqnarray}
So we have:
$$\frac{\partial\varphi_p}{\partial p_n}(\overline p) = \sum_{\thr[x]{n-1}}{\mu_{(\thr[p]{n-1})}^{(n-1)}(\thr[x]{n-1})\one_{ \{ f(\thr[x]{n-1},.)\; is\; not-const.} \} } = I_n^{\overline{p}}(f)$$
and similarily:
$$\forall i \quad \frac{\partial\varphi_p}{\partial p_i}(\overline{p}) = I_i^{\overline{p}}(f).$$
We have (Denoting $\lambda(p) = (p,\dots,p)$)
$$(\psi_f(p))\prime = (\varphi_f\circ\lambda(p))\prime = \left< \nabla\varphi_f(\lambda(p)),\lambda'(p)\right> =$$
$$= \left< (I_1^{(p,\dots,p)}(f),\dots,I_n^{(p,\dots,p)}(f)),(1,1,\dots,1) \right> = \sum_i{I_i^p(f)} = I^p(f)$$
\subsection{Phase transition in graph properties}
Now, we will prove a result about phase changes in graph properties.
Let's assume we have a graph property $P$, such as: "the graph G has a triangle".
If we consider this graph property over graphs of size 100 with a 0.012 probability of having an edge, then the probability for having a triangle is about 0.25.
But if we look at the same property $P$ over graphs of the same
size but with a probability of 0.02 for having each edge, than the probability of having an edge is about 0.75.
In fact this property exists for all graph properties, there is a very swift and short change
in the probablity of having an edge between the phases "the property will never happen" and "the property will happen always"
\begin{theorem}[FK]
\label{FK}
Let $f$ be a monotone graph property , and let $\psi_f$ be as in definition 8. Then
$\psi_f(r)\geq\eps$ implies $\psi_f(r+\frac{\log(\frac{1}{2\eps})}{c\log(n)}) \geq 1-\eps $.
\end{theorem}
\begin{proof}
From [BKKKL] we have :
$$\forall p \; \exists i \; I_i^p(f) \geq C'(1-(\E_{\mu_p}(f))^2)\frac{\log(n)}{n}$$.
But $f$ is transitive, so $I^p(f) \geq C'(1-(\E_{\mu_p}(f))^2)\log(n) $.
Now, let's express the expectancy in the means of $\psi_f(p)$,
$$\E_{\mu_p}(f) = \Pr_{\mu_p}(f = 1) - \Pr_{\mu_p}(f=-1) = 1-\psi_f(p)-\psi_f(p) = 1-2\psi_f(p)$$.
This implies
$$ (\psi_f(p))' = I^p(f) \geq C'4\psi_f(p)(1-\psi_f(p))\log(n) = C''\psi_f(p)(1-\psi_f(p))\log(n)$$
(where $C''$ is a constant).
Suppose $\psi_f(p) \leq \frac{1}{2}$.
Then from the inequality above we get: $(\psi_f(p))'\geq \frac{C''}{2}\psi_f(p)\log(n)$.
$$(\log(\psi_f(p)))' = \frac{(\psi_f(p))'}{\psi_f(p)} \geq C\log(n)$$
We know that the derivative of $\log(\psi_f(p))$ is higher than $C\log(n)$, so
$$\log(\psi_f(r+\frac{\log(\frac{1}{2\eps})}{c\log(n)})) \geq \log(\psi_f(r))+\log(\frac{1}{2})-\log(\eps) \geq \log(\frac{1}{2}) $$
From the monotonicity of the $\log$ function we know
$ a \geq b $ then $ \log(a) \geq \log(b)$.
and therefore
$$\psi_f(r+\frac{log(\frac{1}{2\eps})}{c\log(n)}) \geq \frac{1}{2}$$.
If we repeat the same argument on the negative property $\tilde{f}$ ($\tilde{f}(G) = -f(G)$)
we get that $\psi_{\tilde{f}}(r) \leq(\eps)$ implies $\psi_{\tilde{f}}(r+\frac{\log(\frac{1}{2\eps})}{c\log(n)}) \geq \frac{1}{2}$.
We can translate it to $\psi_{f}(r) \geq(1-\eps)$ implies $\psi_{f}(r+\frac{\log(\frac{1}{2\eps})}{c\log(n)}) \leq \frac{1}{2}$.
So by combining the two arguments we get $\psi_f(r+\frac{2\log(\frac{1}{2\eps})}{C\log(n)}) \geq 1-\eps$ .
\end{proof}
If $f$ is a graph property (not necessarily simmetric) it is possible to reach a bound of $\frac{1}{\log^2(n)}$
(instead of $\frac{1}{\log(n)}$), and it turns out that this bound is tight (an example for such a graph property is the property: "there exists a triangle in the Graph").
With similar arguments it is possible to prove that every monotone function (not necessarily transitive)
is close to a junta on most values of p.
END OF COURSE. NOW ITS TIME FOR VACATION !!!
\end{document}