Commit 815170b9 authored by Michael Keller's avatar Michael Keller
Browse files

Almost make int good proof

parent 91340630
......@@ -129,201 +129,88 @@ statement with a condition on $R$ might be helpful:
met to ensure that swapping in either direction
even among neighboring pixels is never detrimental.
Let us consider the change in the score function
Let us consider the change in the score function $\Delta S$
when swapping $c_\alpha, c_\beta \in \Cps$ between
two neighboring pixels $u$ and $v$. Let $F$ denote the field
before the swap and $F'$ after. Further let
$p_c$ denote the amount of crop $c$ within pixel $p$
in the field before the swap. $p'_c$ is defined analogously
for the field after the swap. Lastly let $N(p)$ denote
the set containing all of pixel $p$'s neighbors.
\begin{align*}
\Delta S
&= S(F', R) - S(F, R)\\
&= \sum_{x \in F}
\sum_{y \in N(x)}
\sum_{c_i \in \Cps}
\sum_{c_j \in \Cps}
R(c_i, c_j) \cdot x'_{c_i} \cdot y'_{c_j}\\
&- \sum_{x \in F}
\sum_{y \in N(x)}
\sum_{c_i \in \Cps}
\sum_{c_j \in \Cps}
R(c_i, c_j) \cdot x_{c_i} \cdot y_{c_j}\\
&= \sum_{x \in F}
\sum_{y \in N(x)}
\sum_{c_i \in \Cps}
\sum_{j \in \Cps}
R(c_i, c_j) \cdot (x'_{c_i} \cdot y'_{c_j} - x_{c_i} \cdot y_{c_j})
\end{align*}
Because we are only swapping between two pixels,
we can drop the terms where $p'_c = p_c$ as these are $0$.
We therefore only need to sum up the relationships
involving at least one of either $u$ or $v$:
\begin{align*}
\Delta S
&= \sum_{y \in N(u)}
\sum_{c_i \in \Cps}
\sum_{c_j \in \Cps}
(R(c_i, c_j) + R(c_j, c_i)) \cdot (u'_{c_i} \cdot y'_{c_j} - u_{c_i} \cdot y_{c_j})\\
&+ \sum_{y \in N(v) \setminus x}
\sum_{c_i \in \Cps}
\sum_{c_j \in \Cps}
(R(c_i, c_j) + R(c_j, c_i)) \cdot (v'_{c_i} \cdot y'_{c_j} - v_{c_i} \cdot y_{c_j})
\end{align*}
In a next step we separate out the crops we are swapping
from the sum: $c_\alpha$ and $c_\beta$. Of particular
interest in these long formulas are mainly the sets
over which we are summing.
\begin{align*}
\Delta S
&= \sum_{y \in N(u)} \
\sum_{c_i \in \{c_\alpha, c_\beta\}} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_i, c_j) + R(c_j, c_i)) \cdot (u'_{c_i} \cdot y'_{c_j} - u_{c_i} \cdot y_{c_j})\\
&+ \sum_{y \in N(u)} \
\sum_{c_i \in \{c_\alpha, c_\beta\}} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_i, c_j) + R(c_j, c_i)) \cdot (u'_{c_i} \cdot y'_{c_j} - u_{c_i} \cdot y_{c_j})\\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_i \in \{c_\alpha, c_\beta\}} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_i, c_j) + R(c_j, c_i)) \cdot (v'_{c_i} \cdot y'_{c_j} - v_{c_i} \cdot y_{c_j}) \\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_i \in \{c_\alpha, c_\beta\}} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_i, c_j) + R(c_j, c_i)) \cdot (v'_{c_i} \cdot y'_{c_j} - v_{c_i} \cdot y_{c_j})
\end{align*}
Which lets us simplify even further some variables that
stay the same:
\begin{align*}
\Delta S
&= \sum_{y \in N(u)} \
\sum_{c_i \in \{c_\alpha, c_\beta\}} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_i, c_j) + R(c_j, c_i)) \cdot (u'_{c_i} \cdot y'_{c_j} - u_{c_i} \cdot y_{c_j})\\
&+ \sum_{y \in N(u)} \
\sum_{c_i \in \{c_\alpha, c_\beta\}} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_i, c_j) + R(c_j, c_i)) \cdot ((u'_{c_i} - u_{c_i}) \cdot y_{c_j})\\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_i \in \{c_\alpha, c_\beta\}} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_i, c_j) + R(c_j, c_i)) \cdot (v'_{c_i} \cdot y'_{c_j} - v_{c_i} \cdot y_{c_j}) \\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_i \in \{c_\alpha, c_\beta\}} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_i, c_j) + R(c_j, c_i)) \cdot ((v'_{c_i} - v_{c_i}) \cdot y_{c_j})
\end{align*}
And from the way we shift crops we know:
\begin{align*}
u'_{c_\alpha} &= u_{c_\alpha} - \lambda & u'_{c_\beta} &= u_{c_\beta} + \lambda\\
v'_{c_\alpha} &= v_{c_\alpha} + \lambda & v'_{c_\beta} &= v_{c_\beta} - \lambda
\end{align*}
Therefore we can conclude that the second and fourth
term are linear functions in $\lambda$. Next we further
split up the $c_\alpha, c_\beta$ sums to be able to apply our
$p'$ definitions from above:
\begin{align*}
\Delta S
&= \sum_{y \in N(u)} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_\alpha, c_j) + R(c_j, c_\alpha)) \cdot (u'_{c_\alpha} \cdot y'_{c_j} - u_{c_\alpha} \cdot y_{c_j})\\
&+ \sum_{y \in N(u)} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_\beta, c_j) + R(c_j, c_\beta)) \cdot (u'_{c_\beta} \cdot y'_{c_j} - u_{c_\beta} \cdot y_{c_j})\\
&+ \sum_{y \in N(u)} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_\alpha, c_j) + R(c_j, c_\alpha)) \cdot ((u'_{c_\alpha} - u_{c_\alpha}) \cdot y_{c_j})\\
&+ \sum_{y \in N(u)} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_\beta, c_j) + R(c_j, c_\beta)) \cdot ((u'_{c_\beta} - u_{c_\beta}) \cdot y_{c_j})\\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_\alpha, c_j) + R(c_j, c_\alpha)) \cdot (v'_{c_\alpha} \cdot y'_{c_j} - v_{c_\alpha} \cdot y_{c_j}) \\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_\beta, c_j) + R(c_j, c_\beta)) \cdot (v'_{c_\beta} \cdot y'_{c_j} - v_{c_\beta} \cdot y_{c_j}) \\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_\alpha, c_j) + R(c_j, c_\alpha)) \cdot ((v'_{c_\alpha} - v_{c_\alpha}) \cdot y_{c_j})\\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_\beta, c_j) + R(c_j, c_\beta)) \cdot ((v'_{c_\beta} - v_{c_\beta}) \cdot y_{c_j})
\end{align*}
That simplifies to:
two neighboring pixels $u$ and $v$. Because the score change
generated with the neighboring set $N(u) \cup N(v) \setminus \{u, v\}$
(the gray pixels in the sketch below) is linear, as these
neighboring pixels stay constant, we will ignore them for now.
\FloatBarrier
\begin{figure}[h]
\centering
\begin{tikzpicture}[->,>=stealth',auto,node distance=10mm,thick,main node/.style={rectangle,draw,font=\sffamily\Large}]
\node[main node] (1) {U};
\node[main node] (2) [right of=1] {V};
\node[main node, fill=black!20,] (3) [below of=1] {N};
\node[main node, fill=black!20,] (4) [right of=3] {N};
\node[main node, fill=black!20,] (5) [right of=4] {N};
\node[main node, fill=black!20,] (6) [above of=5] {N};
\node[main node, fill=black!20,] (7) [above of=6] {N};
\node[main node, fill=black!20,] (8) [left of=7] {N};
\node[main node, fill=black!20,] (9) [left of=8] {N};
\node[main node, fill=black!20,] (10) [left of=9] {N};
\node[main node, fill=black!20,] (11) [below of=10] {N};
\node[main node, fill=black!20,] (12) [below of=11] {N};
\path[every node/.style={font=\sffamily\small}]
(1) edge[bend left] node [above] {$c_\alpha$} (2)
(2) edge[bend left] node [below] {$c_\beta$} (1);
\end{tikzpicture}
\end{figure}
\FloatBarrier
The interesting score change is the following quadratic one
between the pixels $u$ and $v$:
\begin{align*}
\Delta S
&= \sum_{y \in N(u)} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_\alpha, c_j) + R(c_j, c_\alpha)) \cdot ((u_{c_\alpha} - \lambda) \cdot y'_{c_j} - u_{c_\alpha} \cdot y_{c_j})\\
&+ \sum_{y \in N(u)} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_\beta, c_j) + R(c_j, c_\beta)) \cdot ((u_{c_\beta} + \lambda) \cdot y'_{c_j} - u_{c_\beta} \cdot y_{c_j})\\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_\alpha, c_j) + R(c_j, c_\alpha)) \cdot ((v_{c_\alpha} + \lambda) \cdot y'_{c_j} - v_{c_\alpha} \cdot y_{c_j}) \\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_j \in \{c_\alpha, c_\beta\}} \
(R(c_\beta, c_j) + R(c_j, c_\beta)) \cdot ((v_{c_\beta} - \lambda) \cdot y'_{c_j} - v_{c_\beta} \cdot y_{c_j}) \\
&+ L
&\sum_{c_i \in \Cps} \sum_{c_j \in \Cps} R(c_i, c_j) \cdot (u'_i \cdot v'_{j} - u_i \cdot v_{j})\\
&= \sum_{c_i \in \{c_\alpha, c_\beta \}}
\sum_{c_j \in \{c_\alpha, c_\beta \}}
R(c_i, c_j) \cdot (u'_i \cdot v'_{j} - u_i \cdot v_{j})\\
&+ \sum_{c_i \in \{c_\alpha, c_\beta \}}
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta \}}
R(c_i, c_j) \cdot (u'_i \cdot v'_{j} - u_i \cdot v_{j})\\
&+ \sum_{c_i \in \Cps \setminus \{c_\alpha, c_\beta \}}
\sum_{c_j \in \{c_\alpha, c_\beta \}}
R(c_i, c_j) \cdot (u'_i \cdot v'_{j} - u_i \cdot v_{j})\\
&+ \sum_{c_i \in \Cps \setminus \{c_\alpha, c_\beta \}}
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta \}}
R(c_i, c_j) \cdot (u'_i \cdot v'_{j} - u_i \cdot v_{j})\\
\end{align*}
where we summarize all of the Linear terms into $L$:
We can now use that:
\begin{align*}
L
&= \sum_{y \in N(u)} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_\alpha, c_j) + R(c_j, c_\alpha)) \cdot \lambda \cdot y_{c_j}\\
&+ \sum_{y \in N(u)} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_\beta, c_j) + R(c_j, c_\beta)) \cdot -\lambda \cdot y_{c_j}\\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_\alpha, c_j) + R(c_j, c_\alpha)) \cdot -\lambda \cdot y_{c_j}\\
&+ \sum_{y \in N(v) \setminus x} \
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta\}} \
(R(c_\beta, c_j) + R(c_j, c_\beta)) \cdot \lambda \cdot y_{c_j}
u'_\alpha &= u_\alpha - \lambda & v'_\alpha &= v_\alpha + \lambda\\
u'_\beta &= u_\beta + \lambda & v'_\beta &= v_\beta - \lambda\\
\end{align*}
Then we further fuck shit up:
Which gives us:
\begin{align*}
\Delta S
&= \sum_{y \in N(u)} \
(R(c_\alpha, c_\alpha) + R(c_\alpha, c_\alpha)) \cdot ((u_{c_\alpha} - \lambda) \cdot y'_{c_\alpha} - u_{c_\alpha} \cdot y_{c_\alpha})\\
&+ \sum_{y \in N(u)} \
(R(c_\alpha, c_\beta) + R(c_\beta, c_\alpha)) \cdot ((u_{c_\alpha} - \lambda) \cdot y'_{c_\beta} - u_{c_\alpha} \cdot y_{c_\beta})\\
&+ \sum_{y \in N(u)} \
(R(c_\beta, c_\alpha) + R(c_\alpha, c_\beta)) \cdot ((u_{c_\beta} + \lambda) \cdot y'_{c_\alpha} - u_{c_\beta} \cdot y_{c_\alpha})\\
&+ \sum_{y \in N(u)} \
(R(c_\beta, c_\beta) + R(c_\beta, c_\beta)) \cdot ((u_{c_\beta} + \lambda) \cdot y'_{c_\beta} - u_{c_\beta} \cdot y_{c_\beta})\\
&+ \sum_{y \in N(v) \setminus x} \
(R(c_\alpha, c_\alpha) + R(c_\alpha, c_\alpha)) \cdot ((v_{c_\alpha} + \lambda) \cdot y'_{c_\alpha} - v_{c_\alpha} \cdot y_{c_\alpha}) \\
&+ \sum_{y \in N(v) \setminus x} \
(R(c_\alpha, c_\beta) + R(c_\beta, c_\alpha)) \cdot ((v_{c_\alpha} + \lambda) \cdot y'_{c_\beta} - v_{c_\alpha} \cdot y_{c_\beta}) \\
&+ \sum_{y \in N(v) \setminus x} \
(R(c_\beta, c_\alpha) + R(c_\alpha, c_\beta)) \cdot ((v_{c_\beta} - \lambda) \cdot y'_{c_\alpha} - v_{c_\beta} \cdot y_{c_\alpha}) \\
&+ \sum_{y \in N(v) \setminus x} \
(R(c_\beta, c_\beta) + R(c_\beta, c_\beta)) \cdot ((v_{c_\beta} - \lambda) \cdot y'_{c_\beta} - v_{c_\beta} \cdot y_{c_\beta}) \\
&+ L
&\sum_{c_i \in \Cps} \sum_{c_j \in \Cps} R(c_i, c_j) \cdot (u'_i \cdot v'_{j} - u_i \cdot v_{j})\\
&= \sum_{c_i \in \{c_\alpha, c_\beta \}}
\sum_{c_j \in \{c_\alpha, c_\beta \}}
R(c_i, c_j) \cdot (u'_i \cdot v'_{j} - u_i \cdot v_{j})\\
&+ \sum_{c_i \in \{c_\alpha, c_\beta \}}
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta \}}
R(c_i, c_j) \cdot (u'_i \cdot v_{j} - u_i \cdot v_{j})\\
&+ \sum_{c_i \in \Cps \setminus \{c_\alpha, c_\beta \}}
\sum_{c_j \in \{c_\alpha, c_\beta \}}
R(c_i, c_j) \cdot (u_i \cdot v'_{j} - u_i \cdot v_{j})\\
&+ \sum_{c_i \in \Cps \setminus \{c_\alpha, c_\beta \}}
\sum_{c_j \in \Cps \setminus \{c_\alpha, c_\beta \}}
R(c_i, c_j) \cdot (u_i \cdot v_{j} - u_i \cdot v_{j})
\end{align*}
To finally end up with:
Where the last term is simply $0$ and the second
and third terms are linear functions in $\lambda$.
We again focus only on the quadratic term:
\begin{align*}
\Delta S
&= \sum_{y \in N(u)} \
(R(c_\alpha, c_\alpha) + R(c_\alpha, c_\alpha)) \cdot ((u_{c_\alpha} - \lambda) \cdot y'_{c_\alpha} - u_{c_\alpha} \cdot y_{c_\alpha})\\
&+ \sum_{y \in N(u)} \
(R(c_\alpha, c_\beta) + R(c_\beta, c_\alpha)) \cdot ((u_{c_\alpha} - \lambda) \cdot y'_{c_\beta} - u_{c_\alpha} \cdot y_{c_\beta})\\
&+ \sum_{y \in N(u)} \
(R(c_\beta, c_\alpha) + R(c_\alpha, c_\beta)) \cdot ((u_{c_\beta} + \lambda) \cdot y'_{c_\alpha} - u_{c_\beta} \cdot y_{c_\alpha})\\
&+ \sum_{y \in N(u)} \
(R(c_\beta, c_\beta) + R(c_\beta, c_\beta)) \cdot ((u_{c_\beta} + \lambda) \cdot y'_{c_\beta} - u_{c_\beta} \cdot y_{c_\beta})\\
&+ \sum_{y \in N(v) \setminus x} \
(R(c_\alpha, c_\alpha) + R(c_\alpha, c_\alpha)) \cdot ((v_{c_\alpha} + \lambda) \cdot y_{c_\alpha} - v_{c_\alpha} \cdot y_{c_\alpha}) \\
&+ \sum_{y \in N(v) \setminus x} \
(R(c_\alpha, c_\beta) + R(c_\beta, c_\alpha)) \cdot ((v_{c_\alpha} + \lambda) \cdot y_{c_\beta} - v_{c_\alpha} \cdot y_{c_\beta}) \\
&+ \sum_{y \in N(v) \setminus x} \
(R(c_\beta, c_\alpha) + R(c_\alpha, c_\beta)) \cdot ((v_{c_\beta} - \lambda) \cdot y_{c_\alpha} - v_{c_\beta} \cdot y_{c_\alpha}) \\
&+ \sum_{y \in N(v) \setminus x} \
(R(c_\beta, c_\beta) + R(c_\beta, c_\beta)) \cdot ((v_{c_\beta} - \lambda) \cdot y_{c_\beta} - v_{c_\beta} \cdot y_{c_\beta}) \\
&+ L
&\sum_{c_i \in \{c_\alpha, c_\beta \}}
\sum_{c_j \in \{c_\alpha, c_\beta \}}
R(c_i, c_j) \cdot (u'_i \cdot v'_{j} - u_i \cdot v_{j})\\
&= R(c_\alpha, c_\alpha) \cdot (u'_\alpha \cdot v'_{\alpha} - u_\alpha \cdot v_{\alpha})\\
&+ R(c_\alpha, c_\beta) \cdot (u'_\alpha \cdot v'_{\beta} - u_\alpha \cdot v_{\beta})\\
&+ R(c_\beta, c_\alpha) \cdot (u'_\beta \cdot v'_{\alpha} - u_\beta \cdot v_{\alpha})\\
&+ R(c_\beta, c_\beta) \cdot (u'_\beta \cdot v'_{\beta} - u_\beta \cdot v_{\beta})\\
&= R(c_\alpha, c_\alpha) \cdot ((u_\alpha - \lambda) \cdot (v_\alpha + \lambda)- u_\alpha \cdot v_{\alpha})\\
&+ R(c_\alpha, c_\beta) \cdot ((u_\alpha - \lambda) \cdot (v_\beta - \lambda) - u_\alpha \cdot v_{\beta})\\
&+ R(c_\beta, c_\alpha) \cdot ((u_\beta + \lambda) \cdot (v_\alpha + \lambda) - u_\beta \cdot v_{\alpha})\\
&+ R(c_\beta, c_\beta) \cdot ((u_\beta + \lambda) \cdot (v_\beta - \lambda) - u_\beta \cdot v_{\beta})\\
\end{align*}
\end{proof}
\ No newline at end of file
No preview for this file type
......@@ -4,7 +4,7 @@
\contentsline {proof}{{Proof}{2}{}}{19}{proof.2}%
\contentsline {theorem}{{Theorem}{7.{2}}{}}{20}{theorem.7.2.2}%
\contentsline {proof}{{Proof}{3}{}}{21}{proof.3}%
\contentsline {theorem}{{Theorem}{8.{1}}{}}{27}{theorem.8.1.1}%
\contentsline {proof}{{Proof}{4}{}}{27}{proof.4}%
\contentsline {theorem}{{Theorem}{8.{2}}{}}{27}{theorem.8.1.2}%
\contentsline {proof}{{Proof}{5}{}}{27}{proof.5}%
\contentsline {theorem}{{Theorem}{8.{1}}{}}{25}{theorem.8.1.1}%
\contentsline {proof}{{Proof}{4}{}}{25}{proof.4}%
\contentsline {theorem}{{Theorem}{8.{2}}{}}{25}{theorem.8.1.2}%
\contentsline {proof}{{Proof}{5}{}}{25}{proof.5}%
......@@ -19,10 +19,10 @@
\contentsline {section}{\numberline {7.2}Creating integer solutions from fractional solutions}{19}{section.7.2}%
\contentsline {subsection}{\numberline {7.2.1}The standard method}{19}{subsection.7.2.1}%
\contentsline {subsection}{\numberline {7.2.2}The advanced method}{20}{subsection.7.2.2}%
\contentsline {chapter}{\chapternumberline {8}The Linear Programming Method}{25}{chapter.8}%
\contentsline {section}{\numberline {8.1}Problem Setup}{25}{section.8.1}%
\contentsline {section}{\numberline {8.2}Method}{28}{section.8.2}%
\contentsline {chapter}{\chapternumberline {9}Gradient Descent}{31}{chapter.9}%
\contentsline {chapter}{\chapternumberline {10}Conclusion}{33}{chapter.10}%
\contentsline {appendix}{\chapternumberline {A}Calculations Appendix}{35}{appendix.A}%
\contentsline {chapter}{Bibliography}{37}{appendix*.4}%
\contentsline {chapter}{\chapternumberline {8}The Linear Programming Method}{23}{chapter.8}%
\contentsline {section}{\numberline {8.1}Problem Setup}{23}{section.8.1}%
\contentsline {section}{\numberline {8.2}Method}{26}{section.8.2}%
\contentsline {chapter}{\chapternumberline {9}Gradient Descent}{29}{chapter.9}%
\contentsline {chapter}{\chapternumberline {10}Conclusion}{31}{chapter.10}%
\contentsline {appendix}{\chapternumberline {A}Calculations Appendix}{33}{appendix.A}%
\contentsline {chapter}{Bibliography}{35}{appendix*.4}%
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment