Compare commits
49 Commits
cc5a8d25d3
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7b5accb6c5 | ||
|
|
8f983b890f | ||
|
|
6cd2c7fbef | ||
|
|
62c424cd54 | ||
|
|
bd9171f68e | ||
|
|
efdc33035b | ||
|
|
f2c8fe241d | ||
|
|
ece887860b | ||
|
|
c3830db913 | ||
|
|
3d21171a40 | ||
|
|
5aca00ad67 | ||
|
|
374420727b | ||
|
|
8697c07c0f | ||
|
|
5287f2c557 | ||
|
|
b7faf6e1b6 | ||
|
|
0354ad37e1 | ||
|
|
32ab4e6a11 | ||
|
|
055d403dfb | ||
|
|
28b6eba094 | ||
|
|
436a25df11 | ||
|
|
5d0610a875 | ||
|
|
545b65d3d5 | ||
|
|
8db244901e | ||
|
|
72afe9ebdc | ||
|
|
81c1e5b7af | ||
|
|
6040f5f144 | ||
|
|
d5f5a09d6f | ||
|
|
a6f5ecaba2 | ||
|
|
1f3e607e8d | ||
|
|
3bf457f2cf | ||
|
|
3eb7e662b0 | ||
|
|
2411f8b1a7 | ||
|
|
fe45de00ca | ||
|
|
1e71600102 | ||
|
|
d93f1a52a9 | ||
|
|
e34a374adc | ||
|
|
f36477ed9b | ||
|
|
52dabf0f89 | ||
|
|
e00d1a33e3 | ||
|
|
c270783225 | ||
|
|
cfb77dccab | ||
|
|
4c8df5cae0 | ||
|
|
f93bbaeec1 | ||
|
|
9ec73c5992 | ||
|
|
8e7c210872 | ||
|
|
a20a4a0832 | ||
|
|
8f36bd2e07 | ||
|
|
936d2ecb6e | ||
|
|
95867bde7a |
1052
thesis/Main.bbl
BIN
thesis/Main.pdf
1313
thesis/Main.tex
@@ -24,15 +24,12 @@
|
||||
not used other than the declared sources/resources, and that I have
|
||||
explicitly indicated all material which has been quoted either
|
||||
literally or by content from the sources used.
|
||||
\ifthenelse{\equal{\ThesisTitle}{master's thesis} \or
|
||||
\equal{\ThesisTitle}{diploma thesis} \or
|
||||
\equal{\ThesisTitle}{doctoral thesis}}
|
||||
{The text document uploaded to TUGRAZonline is identical to the present \ThesisTitle.}{\reminder{TODO: fix \textbackslash ThesisTitle}}
|
||||
The text document uploaded to TUGRAZonline is identical to the present \ThesisTitle.
|
||||
|
||||
|
||||
\par\vspace*{4cm}
|
||||
\centerline{
|
||||
\begin{tabular}{m{1.5cm}cm{1.5cm}m{3cm}m{1.5cm}cm{1.5cm}}
|
||||
\cline{1-3} \cline{5-7}
|
||||
& date & & & & (signature) &\\
|
||||
\end{tabular}}
|
||||
\begin{tabular}{m{1.5cm}cm{1.5cm}m{3cm}m{1.5cm}cm{1.5cm}}
|
||||
\cline{1-3} \cline{5-7}
|
||||
& date & & & & (signature) & \\
|
||||
\end{tabular}}
|
||||
|
||||
@@ -55,7 +55,7 @@
|
||||
\makeatother
|
||||
|
||||
% header and footer texts
|
||||
\clearscrheadfoot % clear everything
|
||||
\clearpairofpagestyles % clear everything
|
||||
\KOMAoptions{headlines=1} % header needs two lines here
|
||||
% [plain]{actual (scrheadings)}
|
||||
\ihead[]{}%
|
||||
@@ -141,46 +141,46 @@
|
||||
\ifthenelse{\equal{\DocumentLanguage}{en}}{\renewcaptionname{USenglish}{\figurename}{Figure}}{}%
|
||||
\ifthenelse{\equal{\DocumentLanguage}{de}}{\renewcaptionname{ngerman}{\figurename}{Abbildung}}{}%
|
||||
\captionsetup{%
|
||||
format=hang,% hanging captions
|
||||
labelformat=simple,% just name and number
|
||||
labelsep=colon,% colon and space
|
||||
justification=justified,%
|
||||
singlelinecheck=true,% center single line captions
|
||||
font={footnotesize,it},% font style of label and text
|
||||
margin=0.025\textwidth,% margin left/right of the caption (to textwidth)
|
||||
indention=0pt,% no further indention (just hanging)
|
||||
hangindent=0pt,% no further indention (just hanging)}
|
||||
aboveskip=8pt,% same spacing above and...
|
||||
belowskip=8pt}% ...below the float (this way tables shouln't be a problem, either)
|
||||
format=hang,% hanging captions
|
||||
labelformat=simple,% just name and number
|
||||
labelsep=colon,% colon and space
|
||||
justification=justified,%
|
||||
singlelinecheck=true,% center single line captions
|
||||
font={footnotesize,it},% font style of label and text
|
||||
margin=0.025\textwidth,% margin left/right of the caption (to textwidth)
|
||||
indention=0pt,% no further indention (just hanging)
|
||||
hangindent=0pt,% no further indention (just hanging)}
|
||||
aboveskip=8pt,% same spacing above and...
|
||||
belowskip=8pt}% ...below the float (this way tables shouln't be a problem, either)
|
||||
|
||||
% code listings
|
||||
\lstloadlanguages{VHDL,Matlab,[ANSI]C,Java,[LaTeX]TeX}
|
||||
\lstset{%
|
||||
% general
|
||||
breaklines=true,% automatically break long lines
|
||||
breakatwhitespace=true,% break only at white spaces
|
||||
breakindent=1cm,% additional indentation for broken lines
|
||||
% positioning
|
||||
linewidth=\linewidth,% set width of whole thing to \linewidth
|
||||
xleftmargin=0.1\linewidth,%
|
||||
% frame and caption
|
||||
frame=tlrb,% frame the entire thing
|
||||
framexleftmargin=1cm,% to include linenumbering into frame
|
||||
captionpos=b,% caption at bottom
|
||||
% format parameters
|
||||
basicstyle=\ttfamily\tiny,% small true type font
|
||||
keywordstyle=\color{black},%
|
||||
identifierstyle=\color{black},%
|
||||
commentstyle=\color[rgb]{0.45,0.45,0.45},% gray
|
||||
stringstyle=\color{black},%
|
||||
showstringspaces=false,%
|
||||
showtabs=false,%
|
||||
tabsize=2,%
|
||||
% linenumbers
|
||||
numberstyle=\tiny,%
|
||||
numbers=left,%
|
||||
numbersep=3mm,%
|
||||
firstnumber=1,%
|
||||
stepnumber=1,% number every line (0: off)
|
||||
numberblanklines=true%
|
||||
% general
|
||||
breaklines=true,% automatically break long lines
|
||||
breakatwhitespace=true,% break only at white spaces
|
||||
breakindent=1cm,% additional indentation for broken lines
|
||||
% positioning
|
||||
linewidth=\linewidth,% set width of whole thing to \linewidth
|
||||
xleftmargin=0.1\linewidth,%
|
||||
% frame and caption
|
||||
frame=tlrb,% frame the entire thing
|
||||
framexleftmargin=1cm,% to include linenumbering into frame
|
||||
captionpos=b,% caption at bottom
|
||||
% format parameters
|
||||
basicstyle=\ttfamily\tiny,% small true type font
|
||||
keywordstyle=\color{black},%
|
||||
identifierstyle=\color{black},%
|
||||
commentstyle=\color[rgb]{0.45,0.45,0.45},% gray
|
||||
stringstyle=\color{black},%
|
||||
showstringspaces=false,%
|
||||
showtabs=false,%
|
||||
tabsize=2,%
|
||||
% linenumbers
|
||||
numberstyle=\tiny,%
|
||||
numbers=left,%
|
||||
numbersep=3mm,%
|
||||
firstnumber=1,%
|
||||
stepnumber=1,% number every line (0: off)
|
||||
numberblanklines=true%
|
||||
}
|
||||
|
||||
@@ -147,22 +147,22 @@
|
||||
% standard
|
||||
\newcommand{\fig}[3]{\begin{figure}\centering\includegraphics[width=\textwidth]{#2}\caption{#3}\label{fig:#1}\end{figure}}%
|
||||
% with controllable parameters
|
||||
\newcommand{\figc}[4]{\begin{figure}\centering\includegraphics[#1]{#2}\caption{#3}\label{fig:#4}\end{figure}}%
|
||||
\newcommand{\figc}[4]{\begin{figure}\centering\includegraphics[#4]{#2}\caption{#3}\label{fig:#1}\end{figure}}%
|
||||
% two subfigures
|
||||
\newcommand{\twofig}[6]{\begin{figure}\centering%
|
||||
\subfigure[#2]{\includegraphics[width=0.495\textwidth]{#1}}%
|
||||
\subfigure[#4]{\includegraphics[width=0.495\textwidth]{#3}}%
|
||||
\caption{#5}\label{fig:#6}\end{figure}}%
|
||||
\subfigure[#2]{\includegraphics[width=0.495\textwidth]{#1}}%
|
||||
\subfigure[#4]{\includegraphics[width=0.495\textwidth]{#3}}%
|
||||
\caption{#5}\label{fig:#6}\end{figure}}%
|
||||
% two subfigures with labels for each subplot
|
||||
\newcommand{\twofigs}[8]{\begin{figure}\centering%
|
||||
\subfigure[#2]{\includegraphics[width=0.495\textwidth]{#1}\label{fig:#8#3}}%
|
||||
\subfigure[#5]{\includegraphics[width=0.495\textwidth]{#4}\label{fig:#8#6}}%
|
||||
\caption{#7}\label{fig:#8}\end{figure}}%
|
||||
\subfigure[#2]{\includegraphics[width=0.495\textwidth]{#1}\label{fig:#8#3}}%
|
||||
\subfigure[#5]{\includegraphics[width=0.495\textwidth]{#4}\label{fig:#8#6}}%
|
||||
\caption{#7}\label{fig:#8}\end{figure}}%
|
||||
% two subfigures and controllable parameters
|
||||
\newcommand{\twofigc}[8]{\begin{figure}\centering%
|
||||
\subfigure[#3]{\includegraphics[#1]{#2}}%
|
||||
\subfigure[#6]{\includegraphics[#4]{#5}}%
|
||||
\caption{#7}\label{fig:#8}\end{figure}}%
|
||||
\subfigure[#3]{\includegraphics[#1]{#2}}%
|
||||
\subfigure[#6]{\includegraphics[#4]{#5}}%
|
||||
\caption{#7}\label{fig:#8}\end{figure}}%
|
||||
|
||||
% framed figures
|
||||
% standard
|
||||
@@ -171,19 +171,19 @@
|
||||
\newcommand{\figcf}[4]{\begin{figure}\centering\fbox{\includegraphics[#1]{#2}}\caption{#3}\label{fig:#4}\end{figure}}%
|
||||
% two subfigures
|
||||
\newcommand{\twofigf}[6]{\begin{figure}\centering%
|
||||
\fbox{\subfigure[#2]{\includegraphics[width=0.495\textwidth]{#1}}}%
|
||||
\fbox{\subfigure[#4]{\includegraphics[width=0.495\textwidth]{#3}}}%
|
||||
\caption{#5}\label{fig:#6}\end{figure}}%
|
||||
\fbox{\subfigure[#2]{\includegraphics[width=0.495\textwidth]{#1}}}%
|
||||
\fbox{\subfigure[#4]{\includegraphics[width=0.495\textwidth]{#3}}}%
|
||||
\caption{#5}\label{fig:#6}\end{figure}}%
|
||||
% two subfigures with labels for each subplot
|
||||
\newcommand{\twofigsf}[8]{\begin{figure}\centering%
|
||||
\fbox{\subfigure[#2]{\includegraphics[width=0.495\textwidth]{#1}\label{fig:#8#3}}}%
|
||||
\fbox{\subfigure[#5]{\includegraphics[width=0.495\textwidth]{#4}\label{fig:#8#6}}}%
|
||||
\caption{#7}\label{fig:#8}\end{figure}}%
|
||||
\fbox{\subfigure[#2]{\includegraphics[width=0.495\textwidth]{#1}\label{fig:#8#3}}}%
|
||||
\fbox{\subfigure[#5]{\includegraphics[width=0.495\textwidth]{#4}\label{fig:#8#6}}}%
|
||||
\caption{#7}\label{fig:#8}\end{figure}}%
|
||||
% two subfigures and controllable parameters
|
||||
\newcommand{\twofigcf}[8]{\begin{figure}\centering%
|
||||
\fbox{\subfigure[#3]{\includegraphics[#1]{#2}}}%
|
||||
\fbox{\subfigure[#6]{\includegraphics[#4]{#5}}}%
|
||||
\caption{#7}\label{fig:#8}\end{figure}}%
|
||||
\fbox{\subfigure[#3]{\includegraphics[#1]{#2}}}%
|
||||
\fbox{\subfigure[#6]{\includegraphics[#4]{#5}}}%
|
||||
\caption{#7}\label{fig:#8}\end{figure}}%
|
||||
|
||||
% listings
|
||||
\newcommand{\filelisting}[5][]{\lstinputlisting[style=#2,caption={#4},label={lst:#5},#1]{#3}}
|
||||
|
||||
@@ -47,33 +47,33 @@
|
||||
\usepackage{fixltx2e}% LaTeX 2e bugfixes
|
||||
\usepackage{ifthen}% for optional parts
|
||||
\ifthenelse{\equal{\PaperSize}{a4paper}}{
|
||||
\usepackage[paper=\PaperSize,twoside=\Twosided,%
|
||||
textheight=246mm,%
|
||||
textwidth=160mm,%
|
||||
heightrounded=true,% round textheight to multiple of lines (avoids overfull vboxes)
|
||||
ignoreall=true,% do not include header, footer, and margins in calculations
|
||||
marginparsep=5pt,% marginpar only used for signs (centered), thus only small sep. needed
|
||||
marginparwidth=10mm,% prevent margin notes to be out of page
|
||||
hmarginratio=2:1,% set margin ration (inner:outer for twoside) - (2:3 is default)
|
||||
]{geometry}}{}%
|
||||
\usepackage[paper=\PaperSize,twoside=\Twosided,%
|
||||
textheight=246mm,%
|
||||
textwidth=160mm,%
|
||||
heightrounded=true,% round textheight to multiple of lines (avoids overfull vboxes)
|
||||
ignoreall=true,% do not include header, footer, and margins in calculations
|
||||
marginparsep=5pt,% marginpar only used for signs (centered), thus only small sep. needed
|
||||
marginparwidth=10mm,% prevent margin notes to be out of page
|
||||
hmarginratio=2:1,% set margin ration (inner:outer for twoside) - (2:3 is default)
|
||||
]{geometry}}{}%
|
||||
\ifthenelse{\equal{\PaperSize}{letterpaper}}{
|
||||
\usepackage[paper=\PaperSize,twoside=\Twosided,%
|
||||
textheight=9in,%
|
||||
textwidth=6.5in,%
|
||||
heightrounded=true,% round textheight to multiple of lines (avoids overfull vboxes)
|
||||
ignoreheadfoot=false,% do not include header and footer in calculations
|
||||
marginparsep=5pt,% marginpar only used for signs (centered), thus only small sep. needed
|
||||
marginparwidth=10mm,% prevent margin notes to be out of page
|
||||
hmarginratio=3:2,% set margin ration (inner:outer for twoside) - (2:3 is default)
|
||||
]{geometry}}{}%
|
||||
\usepackage[paper=\PaperSize,twoside=\Twosided,%
|
||||
textheight=9in,%
|
||||
textwidth=6.5in,%
|
||||
heightrounded=true,% round textheight to multiple of lines (avoids overfull vboxes)
|
||||
ignoreheadfoot=false,% do not include header and footer in calculations
|
||||
marginparsep=5pt,% marginpar only used for signs (centered), thus only small sep. needed
|
||||
marginparwidth=10mm,% prevent margin notes to be out of page
|
||||
hmarginratio=3:2,% set margin ration (inner:outer for twoside) - (2:3 is default)
|
||||
]{geometry}}{}%
|
||||
\ifthenelse{\equal{\DocumentLanguage}{en}}{\usepackage[T1]{fontenc}\usepackage[utf8]{inputenc}\usepackage[USenglish]{babel}}{}%
|
||||
\ifthenelse{\equal{\DocumentLanguage}{de}}{\usepackage[T1]{fontenc}\usepackage[utf8]{inputenc}\usepackage[ngerman]{babel}}{}%
|
||||
\usepackage[%
|
||||
headtopline,plainheadtopline,% activate all lines (header and footer)
|
||||
headsepline,plainheadsepline,%
|
||||
footsepline,plainfootsepline,%
|
||||
footbotline,plainfootbotline,%
|
||||
automark% auto update \..mark
|
||||
headtopline,plainheadtopline,% activate all lines (header and footer)
|
||||
headsepline,plainheadsepline,%
|
||||
footsepline,plainfootsepline,%
|
||||
footbotline,plainfootbotline,%
|
||||
automark% auto update \..mark
|
||||
]{scrlayer-scrpage}% (KOMA)
|
||||
\usepackage{imakeidx}
|
||||
\usepackage[]{caption}% customize captions
|
||||
@@ -91,7 +91,7 @@ automark% auto update \..mark
|
||||
\usepackage[normalem]{ulem}% cross-out, strike-out, underlines (normalem: keep \emph italic)
|
||||
%\usepackage[safe]{textcomp}% loading in safe mode to avoid problems (see LaTeX companion)
|
||||
%\usepackage[geometry,misc]{ifsym}% technical symbols
|
||||
\usepackage{remreset}%\@removefromreset commands (e.g., for continuous footnote numbering)
|
||||
%\usepackage{remreset}%\@removefromreset commands (e.g., for continuous footnote numbering)
|
||||
\usepackage{paralist}% extended list environments
|
||||
% \usepackage[Sonny]{fncychap}
|
||||
\usepackage[avantgarde]{quotchap}
|
||||
@@ -140,35 +140,35 @@ automark% auto update \..mark
|
||||
\usepackage{mdwlist} %list extensions
|
||||
\ifthenelse{\equal{\DocumentLanguage}{de}}
|
||||
{
|
||||
\usepackage[german]{fancyref} %Bessere Querverweise
|
||||
\usepackage[locale=DE]{siunitx} %Zahlen und SI Einheiten => Binary units aktivieren...
|
||||
\usepackage[autostyle=true, %Anführungszeichen und Übersetzung der Literaturverweise
|
||||
german=quotes]{csquotes} %Anführungszeichen und Übersetzung der Literaturverweise
|
||||
\usepackage[german]{fancyref} %Bessere Querverweise
|
||||
\usepackage[locale=DE]{siunitx} %Zahlen und SI Einheiten => Binary units aktivieren...
|
||||
\usepackage[autostyle=true, %Anführungszeichen und Übersetzung der Literaturverweise
|
||||
german=quotes]{csquotes} %Anführungszeichen und Übersetzung der Literaturverweise
|
||||
}
|
||||
{
|
||||
\usepackage[english]{fancyref} %Bessere Querverweise
|
||||
\usepackage[locale=US]{siunitx} %Zahlen und SI Einheiten => Binary units aktivieren...
|
||||
\usepackage[autostyle=true] %Anführungszeichen und Übersetzung der Literaturverweise
|
||||
{csquotes}
|
||||
\usepackage[english]{fancyref} %Bessere Querverweise
|
||||
\usepackage[locale=US]{siunitx} %Zahlen und SI Einheiten => Binary units aktivieren...
|
||||
\usepackage[autostyle=true] %Anführungszeichen und Übersetzung der Literaturverweise
|
||||
{csquotes}
|
||||
}
|
||||
\sisetup{detect-weight=true, detect-family=true} %format like surrounding environment
|
||||
%extending fancyref for listings in both languages:
|
||||
\newcommand*{\fancyreflstlabelprefix}{lst}
|
||||
\fancyrefaddcaptions{english}{%
|
||||
\providecommand*{\freflstname}{listing}%
|
||||
\providecommand*{\Freflstname}{Listing}%
|
||||
\providecommand*{\freflstname}{listing}%
|
||||
\providecommand*{\Freflstname}{Listing}%
|
||||
}
|
||||
\fancyrefaddcaptions{german}{%
|
||||
\providecommand*{\freflstname}{Listing}%
|
||||
\providecommand*{\Freflstname}{Listing}%
|
||||
\providecommand*{\freflstname}{Listing}%
|
||||
\providecommand*{\Freflstname}{Listing}%
|
||||
}
|
||||
\frefformat{plain}{\fancyreflstlabelprefix}{\freflstname\fancyrefdefaultspacing#1}
|
||||
\Frefformat{plain}{\fancyreflstlabelprefix}{\Freflstname\fancyrefdefaultspacing#1}
|
||||
\frefformat{vario}{\fancyreflstlabelprefix}{%
|
||||
\freflstname\fancyrefdefaultspacing#1#3%
|
||||
\freflstname\fancyrefdefaultspacing#1#3%
|
||||
}
|
||||
\Frefformat{vario}{\fancyreflstlabelprefix}{%
|
||||
\Freflstname\fancyrefdefaultspacing#1#3%
|
||||
\Freflstname\fancyrefdefaultspacing#1#3%
|
||||
}
|
||||
|
||||
\sisetup{separate-uncertainty} %enable uncertainity for siunitx
|
||||
@@ -176,30 +176,30 @@ automark% auto update \..mark
|
||||
\DeclareSIUnit\permille{\text{\textperthousand}} %add \permille to siunitx
|
||||
\usepackage{xfrac} %Schönere brüche für SI Einheiten
|
||||
\sisetup{per-mode=fraction, %Bruchstriche bei SI Einheiten aktivieren
|
||||
fraction-function=\sfrac} %xfrac als Bruchstrichfunktion verwenden
|
||||
fraction-function=\sfrac} %xfrac als Bruchstrichfunktion verwenden
|
||||
\usepackage[scaled=0.78]{inconsolata}%Schreibmaschinenschrift für Quellcode
|
||||
|
||||
\usepackage[backend=biber, %Literaturverweiserweiterung Backend auswählen
|
||||
bibencoding=utf8, %.bib-File ist utf8-codiert...
|
||||
maxbibnames=99, %Immer alle Authoren in der Bibliographie darstellen...
|
||||
style=ieee
|
||||
bibencoding=utf8, %.bib-File ist utf8-codiert...
|
||||
maxbibnames=99, %Immer alle Authoren in der Bibliographie darstellen...
|
||||
style=ieee
|
||||
]{biblatex}
|
||||
\bibliography{bib/bibliography} %literatur.bib wird geladen und als Literaturverweis Datei verwendet
|
||||
|
||||
\ifthenelse{\equal{\FramedLinks}{true}}
|
||||
{
|
||||
\usepackage[%
|
||||
breaklinks=true,% allow line break in links
|
||||
colorlinks=false,% if false: framed link
|
||||
linkcolor=black,anchorcolor=black,citecolor=black,filecolor=black,%
|
||||
menucolor=black,urlcolor=black,bookmarksnumbered=true]{hyperref}% hyperlinks for references
|
||||
\usepackage[%
|
||||
breaklinks=true,% allow line break in links
|
||||
colorlinks=false,% if false: framed link
|
||||
linkcolor=black,anchorcolor=black,citecolor=black,filecolor=black,%
|
||||
menucolor=black,urlcolor=black,bookmarksnumbered=true]{hyperref}% hyperlinks for references
|
||||
}
|
||||
{
|
||||
\usepackage[%
|
||||
breaklinks=true,% allow line break in links
|
||||
colorlinks=true,% if false: framed link
|
||||
linkcolor=black,anchorcolor=black,citecolor=black,filecolor=black,%
|
||||
menucolor=black,urlcolor=black,bookmarksnumbered=true]{hyperref}% hyperlinks for references
|
||||
\usepackage[%
|
||||
breaklinks=true,% allow line break in links
|
||||
colorlinks=true,% if false: framed link
|
||||
linkcolor=black,anchorcolor=black,citecolor=black,filecolor=black,%
|
||||
menucolor=black,urlcolor=black,bookmarksnumbered=true]{hyperref}% hyperlinks for references
|
||||
}
|
||||
|
||||
\setcounter{biburlnumpenalty}{100}%Urls in Bibliographie Zeilenbrechbar machen
|
||||
@@ -213,8 +213,8 @@ style=ieee
|
||||
|
||||
\ifthenelse{\equal{\DocumentLanguage}{de}}
|
||||
{
|
||||
\deftranslation[to=ngerman] %Dem Paket babel den deutschen Abkürzungsverzeichnis-Kapitelnamen
|
||||
{Acronyms}{Abkürzungsverzeichnis} %beibringen
|
||||
\deftranslation[to=ngerman] %Dem Paket babel den deutschen Abkürzungsverzeichnis-Kapitelnamen
|
||||
{Acronyms}{Abkürzungsverzeichnis} %beibringen
|
||||
}{}
|
||||
|
||||
% misc
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
numpages = {58},
|
||||
keywords = {outlier detection, Anomaly detection},
|
||||
},
|
||||
@dataset{alexander_kyuroson_2023_7913307,
|
||||
dataset{alexander_kyuroson_2023_7913307,
|
||||
author = {Alexander Kyuroson and Niklas Dahlquist and Nikolaos Stathoulopoulos
|
||||
and Vignesh Kottayam Viswanathan and Anton Koval and George
|
||||
Nikolakopoulos},
|
||||
@@ -85,37 +85,6 @@
|
||||
pages = {716–721},
|
||||
}
|
||||
,
|
||||
@inproceedings{deepsvdd,
|
||||
title = {Deep One-Class Classification},
|
||||
author = {Ruff, Lukas and Vandermeulen, Robert and Goernitz, Nico and Deecke,
|
||||
Lucas and Siddiqui, Shoaib Ahmed and Binder, Alexander and M{\"u}ller
|
||||
, Emmanuel and Kloft, Marius},
|
||||
booktitle = {Proceedings of the 35th International Conference on Machine
|
||||
Learning},
|
||||
pages = {4393--4402},
|
||||
year = {2018},
|
||||
editor = {Dy, Jennifer and Krause, Andreas},
|
||||
volume = {80},
|
||||
series = {Proceedings of Machine Learning Research},
|
||||
month = {10--15 Jul},
|
||||
publisher = {PMLR},
|
||||
pdf = {http://proceedings.mlr.press/v80/ruff18a/ruff18a.pdf},
|
||||
url = {https://proceedings.mlr.press/v80/ruff18a.html},
|
||||
abstract = {Despite the great advances made by deep learning in many machine
|
||||
learning problems, there is a relative dearth of deep learning
|
||||
approaches for anomaly detection. Those approaches which do exist
|
||||
involve networks trained to perform a task other than anomaly
|
||||
detection, namely generative models or compression, which are in
|
||||
turn adapted for use in anomaly detection; they are not trained on
|
||||
an anomaly detection based objective. In this paper we introduce a
|
||||
new anomaly detection method—Deep Support Vector Data Description—,
|
||||
which is trained on an anomaly detection based objective. The
|
||||
adaptation to the deep regime necessitates that our neural network
|
||||
and training procedure satisfy certain properties, which we
|
||||
demonstrate theoretically. We show the effectiveness of our method
|
||||
on MNIST and CIFAR-10 image benchmark datasets as well as on the
|
||||
detection of adversarial examples of GTSRB stop signs.},
|
||||
},
|
||||
@inproceedings{deep_svdd,
|
||||
title = {Deep One-Class Classification},
|
||||
author = {Ruff, Lukas and Vandermeulen, Robert and Goernitz, Nico and Deecke,
|
||||
@@ -235,7 +204,7 @@
|
||||
performance;Current measurement},
|
||||
doi = {10.1109/IROS51168.2021.9636694},
|
||||
},
|
||||
@article{deep_learning_overview,
|
||||
article{deep_learning_overview,
|
||||
title = {Deep learning in neural networks: An overview},
|
||||
journal = {Neural Networks},
|
||||
volume = {61},
|
||||
@@ -289,7 +258,7 @@
|
||||
autoencoder algorithm are summarized, and prospected for its future
|
||||
development directions are addressed.},
|
||||
},
|
||||
@article{semi_overview,
|
||||
article{semi_overview,
|
||||
author = {Yang, Xiangli and Song, Zixing and King, Irwin and Xu, Zenglin},
|
||||
journal = {IEEE Transactions on Knowledge and Data Engineering},
|
||||
title = {A Survey on Deep Semi-Supervised Learning},
|
||||
@@ -302,7 +271,7 @@
|
||||
learning;semi-supervised learning;deep learning},
|
||||
doi = {10.1109/TKDE.2022.3220219},
|
||||
},
|
||||
@book{ai_fundamentals_book,
|
||||
book{ai_fundamentals_book,
|
||||
title = {Fundamentals of Artificial Intelligence},
|
||||
url = {http://dx.doi.org/10.1007/978-81-322-3972-7},
|
||||
DOI = {10.1007/978-81-322-3972-7},
|
||||
@@ -312,7 +281,7 @@
|
||||
language = {en},
|
||||
},
|
||||
|
||||
@article{machine_learning_overview,
|
||||
article{machine_learning_overview,
|
||||
title = {Machine Learning from Theory to Algorithms: An Overview},
|
||||
volume = {1142},
|
||||
ISSN = {1742-6596},
|
||||
@@ -550,7 +519,7 @@
|
||||
year = {1998},
|
||||
pages = {2278–2324},
|
||||
},
|
||||
@article{ef_concept_source,
|
||||
article{ef_concept_source,
|
||||
title = {Multi-Year ENSO Forecasts Using Parallel Convolutional Neural
|
||||
Networks With Heterogeneous Architecture},
|
||||
volume = {8},
|
||||
@@ -563,8 +532,226 @@
|
||||
and Tian, Hao and Song, Dehai and Wei, Zhiqiang},
|
||||
year = {2021},
|
||||
month = aug,
|
||||
},
|
||||
@article{ml_supervised_unsupervised_figure_source,
|
||||
title = {Virtual reality in biology: could we become virtual naturalists?},
|
||||
volume = {14},
|
||||
ISSN = {1936-6434},
|
||||
url = {http://dx.doi.org/10.1186/s12052-021-00147-x},
|
||||
DOI = {10.1186/s12052-021-00147-x},
|
||||
number = {1},
|
||||
journal = {Evolution: Education and Outreach},
|
||||
publisher = {Springer Science and Business Media LLC},
|
||||
author = {Morimoto, Juliano and Ponton, Fleur},
|
||||
year = {2021},
|
||||
month = may,
|
||||
},
|
||||
@article{ml_autoencoder_figure_source,
|
||||
title = "From Autoencoder to Beta-VAE",
|
||||
author = "Weng, Lilian",
|
||||
journal = "lilianweng.github.io",
|
||||
year = "2018",
|
||||
url = "https://lilianweng.github.io/posts/2018-08-12-vae/",
|
||||
},
|
||||
|
||||
@conference{bg_lidar_figure_source,
|
||||
title = "1D MEMS Micro-Scanning LiDAR",
|
||||
author = "Norbert Druml and Ievgeniia Maksymova and Thomas Thurner and Lierop,
|
||||
{D. van} and Hennecke, {Marcus E.} and Andreas Foroutan",
|
||||
year = "2018",
|
||||
month = sep,
|
||||
day = "16",
|
||||
language = "English",
|
||||
},
|
||||
@book{deep_learning_book,
|
||||
title = {Deep Learning},
|
||||
author = {Ian Goodfellow and Yoshua Bengio and Aaron Courville},
|
||||
publisher = {MIT Press},
|
||||
note = {\url{http://www.deeplearningbook.org}},
|
||||
year = {2016},
|
||||
},
|
||||
@misc{mobilenet,
|
||||
doi = {10.48550/ARXIV.1704.04861},
|
||||
url = {https://arxiv.org/abs/1704.04861},
|
||||
author = {Howard, Andrew G. and Zhu, Menglong and Chen, Bo and Kalenichenko,
|
||||
Dmitry and Wang, Weijun and Weyand, Tobias and Andreetto, Marco and
|
||||
Adam, Hartwig},
|
||||
keywords = {Computer Vision and Pattern Recognition (cs.CV), FOS: Computer and
|
||||
information sciences, FOS: Computer and information sciences},
|
||||
title = {MobileNets: Efficient Convolutional Neural Networks for Mobile Vision
|
||||
Applications},
|
||||
publisher = {arXiv},
|
||||
year = {2017},
|
||||
copyright = {arXiv.org perpetual, non-exclusive license},
|
||||
},
|
||||
@inproceedings{shufflenet,
|
||||
title = {ShuffleNet: An Extremely Efficient Convolutional Neural Network for
|
||||
Mobile Devices},
|
||||
url = {http://dx.doi.org/10.1109/CVPR.2018.00716},
|
||||
DOI = {10.1109/cvpr.2018.00716},
|
||||
booktitle = {2018 IEEE/CVF Conference on Computer Vision and Pattern
|
||||
Recognition},
|
||||
publisher = {IEEE},
|
||||
author = {Zhang, Xiangyu and Zhou, Xinyu and Lin, Mengxiao and Sun, Jian},
|
||||
year = {2018},
|
||||
month = jun,
|
||||
},
|
||||
@article{bg_svm,
|
||||
title = {Support-vector networks},
|
||||
author = {Cortes, Corinna and Vapnik, Vladimir},
|
||||
journal = {Machine learning},
|
||||
volume = {20},
|
||||
number = {3},
|
||||
pages = {273--297},
|
||||
year = {1995},
|
||||
publisher = {Springer},
|
||||
},
|
||||
|
||||
@article{bg_kmeans,
|
||||
author = {Lloyd, S.},
|
||||
journal = {IEEE Transactions on Information Theory},
|
||||
title = {Least squares quantization in PCM},
|
||||
year = {1982},
|
||||
volume = {28},
|
||||
number = {2},
|
||||
pages = {129-137},
|
||||
keywords = {Noise;Quantization (signal);Voltage;Receivers;Pulse
|
||||
modulation;Sufficient conditions;Stochastic processes;Probabilistic
|
||||
logic;Urban areas;Q measurement},
|
||||
doi = {10.1109/TIT.1982.1056489},
|
||||
},
|
||||
|
||||
@inproceedings{bg_dbscan,
|
||||
added-at = {2023-12-13T07:32:13.000+0100},
|
||||
author = {Ester, Martin and Kriegel, Hans-Peter and Sander, Jörg and Xu,
|
||||
Xiaowei},
|
||||
biburl = {
|
||||
https://www.bibsonomy.org/bibtex/279a9f3560daefa3775bd35543b4482e1/admin
|
||||
},
|
||||
booktitle = {KDD},
|
||||
crossref = {conf/kdd/1996},
|
||||
editor = {Simoudis, Evangelos and Han, Jiawei and Fayyad, Usama M.},
|
||||
ee = {http://www.aaai.org/Library/KDD/1996/kdd96-037.php},
|
||||
interhash = {ba33e4d6b4e5b26bd9f543f26b7d250a},
|
||||
intrahash = {79a9f3560daefa3775bd35543b4482e1},
|
||||
isbn = {1-57735-004-9},
|
||||
keywords = {},
|
||||
pages = {226-231},
|
||||
publisher = {AAAI Press},
|
||||
timestamp = {2023-12-13T07:32:13.000+0100},
|
||||
title = {A Density-Based Algorithm for Discovering Clusters in Large Spatial
|
||||
Databases with Noise.},
|
||||
url = {http://dblp.uni-trier.de/db/conf/kdd/kdd96.html#EsterKSX96},
|
||||
year = 1996,
|
||||
},
|
||||
@article{bg_pca,
|
||||
author = { Karl Pearson F.R.S. },
|
||||
title = {LIII. On lines and planes of closest fit to systems of points in
|
||||
space},
|
||||
journal = {The London, Edinburgh, and Dublin Philosophical Magazine and
|
||||
Journal of Science},
|
||||
volume = {2},
|
||||
number = {11},
|
||||
pages = {559-572},
|
||||
year = {1901},
|
||||
publisher = {Taylor & Francis},
|
||||
doi = {10.1080/14786440109462720},
|
||||
},
|
||||
@article{bg_infomax,
|
||||
author = {Linsker, R.},
|
||||
journal = {Computer},
|
||||
title = {Self-organization in a perceptual network},
|
||||
year = {1988},
|
||||
volume = {21},
|
||||
number = {3},
|
||||
pages = {105-117},
|
||||
keywords = {Intelligent networks;Biological information
|
||||
theory;Circuits;Biology computing;Animal
|
||||
structures;Neuroscience;Genetics;System testing;Neural
|
||||
networks;Constraint theory},
|
||||
doi = {10.1109/2.36},
|
||||
},
|
||||
@article{bg_slam,
|
||||
title = {On the Representation and Estimation of Spatial Uncertainty},
|
||||
volume = {5},
|
||||
ISSN = {1741-3176},
|
||||
url = {http://dx.doi.org/10.1177/027836498600500404},
|
||||
DOI = {10.1177/027836498600500404},
|
||||
number = {4},
|
||||
journal = {The International Journal of Robotics Research},
|
||||
publisher = {SAGE Publications},
|
||||
author = {Smith, Randall C. and Cheeseman, Peter},
|
||||
year = {1986},
|
||||
month = dec,
|
||||
pages = {56–68},
|
||||
},
|
||||
@article{roc_vs_prc2,
|
||||
title = {Context discovery for anomaly detection},
|
||||
volume = {19},
|
||||
ISSN = {2364-4168},
|
||||
url = {http://dx.doi.org/10.1007/s41060-024-00586-x},
|
||||
DOI = {10.1007/s41060-024-00586-x},
|
||||
number = {1},
|
||||
journal = {International Journal of Data Science and Analytics},
|
||||
publisher = {Springer Science and Business Media LLC},
|
||||
author = {Calikus, Ece and Nowaczyk, Slawomir and Dikmen, Onur},
|
||||
year = {2024},
|
||||
month = jun,
|
||||
pages = {99–113},
|
||||
},
|
||||
@article{roc_vs_prc,
|
||||
title = {On the evaluation of unsupervised outlier detection: measures,
|
||||
datasets, and an empirical study},
|
||||
volume = {30},
|
||||
ISSN = {1573-756X},
|
||||
url = {http://dx.doi.org/10.1007/s10618-015-0444-8},
|
||||
DOI = {10.1007/s10618-015-0444-8},
|
||||
number = {4},
|
||||
journal = {Data Mining and Knowledge Discovery},
|
||||
publisher = {Springer Science and Business Media LLC},
|
||||
author = {Campos, Guilherme O. and Zimek, Arthur and Sander, J\"{o}rg and
|
||||
Campello, Ricardo J. G. B. and Micenková, Barbora and Schubert, Erich
|
||||
and Assent, Ira and Houle, Michael E.},
|
||||
year = {2016},
|
||||
month = jan,
|
||||
pages = {891–927},
|
||||
},
|
||||
@inproceedings{roc,
|
||||
title = {Basic principles of ROC analysis},
|
||||
author = {Metz, Charles E},
|
||||
booktitle = {Seminars in nuclear medicine},
|
||||
volume = {8},
|
||||
number = {4},
|
||||
pages = {283--298},
|
||||
year = {1978},
|
||||
organization = {Elsevier},
|
||||
},
|
||||
@article{prc,
|
||||
title = {A critical investigation of recall and precision as measures of
|
||||
retrieval system performance},
|
||||
volume = {7},
|
||||
ISSN = {1558-2868},
|
||||
url = {http://dx.doi.org/10.1145/65943.65945},
|
||||
DOI = {10.1145/65943.65945},
|
||||
number = {3},
|
||||
journal = {ACM Transactions on Information Systems},
|
||||
publisher = {Association for Computing Machinery (ACM)},
|
||||
author = {Raghavan, Vijay and Bollmann, Peter and Jung, Gwang S.},
|
||||
year = {1989},
|
||||
month = jul,
|
||||
pages = {205–229},
|
||||
},
|
||||
@article{zscore,
|
||||
title = {Advanced engineering mathematics},
|
||||
author = {Kreyszig, Erwin and Stroud, K and Stephenson, G},
|
||||
journal = {Integration},
|
||||
volume = {9},
|
||||
number = {4},
|
||||
pages = {1014},
|
||||
year = {2008},
|
||||
publisher = {John Wiley \& Sons, Inc. 9 th edition, 2006 Page 2 of 6 Teaching
|
||||
methods~…},
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
\documentclass[tikz,border=10pt]{standalone}
|
||||
\usepackage{tikz}
|
||||
\usepackage{amsfonts}
|
||||
\usetikzlibrary{positioning, shapes.geometric, fit, arrows, arrows.meta, backgrounds}
|
||||
|
||||
% Define box styles
|
||||
@@ -7,9 +8,9 @@
|
||||
databox/.style={rectangle, align=center, draw=black, fill=blue!50, thick, rounded corners},%, inner sep=4},
|
||||
procbox/.style={rectangle, align=center, draw=black, fill=orange!30, thick, rounded corners},
|
||||
hyperbox/.style={rectangle, align=center, draw=black, fill=green!30, thick, rounded corners},
|
||||
stepsbox/.style={rectangle, align=left, draw=black,fill=white, rounded corners, minimum width=6cm, minimum height=1.5cm, font=\small},
|
||||
outputbox/.style={rectangle, align=center, draw=red!80, fill=red!20, rounded corners, minimum width=6cm, minimum height=1.5cm, font=\small},
|
||||
hlabelbox/.style={rectangle, align=center, draw=black,fill=white, rounded corners, minimum width=6cm, minimum height=1.5cm, font=\small},
|
||||
stepsbox/.style={rectangle, align=left, draw=black,fill=white, rounded corners, minimum width=5.2cm, minimum height=1.5cm, font=\small},
|
||||
outputbox/.style={rectangle, align=center, draw=red!80, fill=red!20, rounded corners, minimum width=5.2cm, minimum height=1.5cm, font=\small},
|
||||
hlabelbox/.style={rectangle, align=center, draw=black,fill=white, rounded corners, minimum width=5.2cm, minimum height=1.5cm, font=\small},
|
||||
vlabelbox/.style={rectangle, align=center, draw=black,fill=white, rounded corners, minimum width=3cm, minimum height=1.8cm, font=\small},
|
||||
arrow/.style={-{Latex[length=3mm]}},
|
||||
arrowlabel/.style={fill=white,inner sep=2pt,midway}
|
||||
@@ -25,11 +26,11 @@
|
||||
\begin{tikzpicture}[node distance=1cm and 2cm]
|
||||
|
||||
\node (data) {Data};
|
||||
\node[right=7 of data] (process) {Procedure};
|
||||
\node[right=7 of process] (hyper) {Hyperparameters};
|
||||
\node[right=4.9 of data] (process) {Procedure};
|
||||
\node[right=4.1 of process] (hyper) {Hyperparameters};
|
||||
|
||||
\begin{pgfonlayer}{foreground}
|
||||
\node[hlabelbox, below=of data] (unlabeled) {\boxtitle{Unlabeled Data} More normal than \\ anomalous samples required};
|
||||
\node[hlabelbox, below=1.29 of data] (unlabeled) {\boxtitle{Unlabeled Data} Significantly more normal than \\ anomalous samples required};
|
||||
\node[hlabelbox, below=.1 of unlabeled] (labeled) {\boxtitle{Labeled Data} No requirement regarding ratio \\ +1 = normal, -1 = anomalous};
|
||||
\end{pgfonlayer}
|
||||
\begin{pgfonlayer}{background}
|
||||
@@ -39,16 +40,16 @@
|
||||
%\draw[arrow] (latent.east) -- node{} (autoenc.west);
|
||||
|
||||
\begin{pgfonlayer}{foreground}
|
||||
\node[stepsbox, below=of process] (pretrainproc) {Train Autoencoder for $E_A$ Epochs \\ with $L_A$ Learning Rate \\ No Labels Used};
|
||||
\node[outputbox, below=.1 of pretrainproc] (pretrainout) {\boxtitle{Outputs} Encoder Network \\ $\mathbf{w}$: Network Weights};
|
||||
\node[stepsbox, below=of process] (pretrainproc) {Train Autoencoder $\mathcal{\phi}_{AE}$ \\ optimize Autoencoding Objective \\ for $E_A$ Epochs \\ with $L_A$ Learning Rate \\ No Labels Used / Required};
|
||||
\node[outputbox, below=.1 of pretrainproc] (pretrainout) {\boxtitle{Outputs} $\mathcal{\phi}$: Encoder / DeepSAD Network \\ $\mathcal{W}_E$: Encoder Network Weights};
|
||||
\end{pgfonlayer}
|
||||
\begin{pgfonlayer}{background}
|
||||
\node[procbox, fit=(pretrainproc) (pretrainout), label={[label distance = 1, name=pretrainlab]above:{\textbf{Pre-Training of Autoencoder}}}] (pretrain) {};
|
||||
\end{pgfonlayer}
|
||||
|
||||
\begin{pgfonlayer}{foreground}
|
||||
\node[hlabelbox, below=of hyper] (autoencarch) {\boxtitle{Autoencoder Architecture} Choose based on data type \\ Latent Space Size (based on complexity)};
|
||||
\node[hlabelbox, below=.1 of autoencarch] (pretrainhyper) {\boxtitle{Hyperparameters} $E_A$: Number of Epochs \\ $L_A$: Learning Rate};
|
||||
\node[hlabelbox, below=1.26 of hyper] (autoencarch) {\boxtitle{Autoencoder Architecture} $\mathcal{\phi}_{AE}$: Autoencoder Network \\ $\mathbb{R}^d$: Latent Space Size };
|
||||
\node[hlabelbox, below=.1 of autoencarch] (pretrainhyper) {\boxtitle{Hyperparameters} $E_A$: Number of Epochs \\ $L_A$: Learning Rate AE};
|
||||
\end{pgfonlayer}
|
||||
\begin{pgfonlayer}{background}
|
||||
\node[hyperbox, fit=(autoencarch) (pretrainhyper), label={[label distance = 1, name=autoenclabel]above:{\textbf{Pre-Training Hyperparameters}}}] (pretrainhyp) {};
|
||||
@@ -61,7 +62,7 @@
|
||||
% \draw[arrow] (node cs:name=autoenc,angle=196) |- (node cs:name=pretrain,angle=5);
|
||||
|
||||
\begin{pgfonlayer}{foreground}
|
||||
\node[stepsbox, below=1.4 of pretrain] (calccproc) {1. Init Encoder with $\mathbf{w}$ \\ 2. Forward Pass on all data \\ 3. $\mathbf{c}$ = Mean Latent Representation};
|
||||
\node[stepsbox, below=1.4 of pretrain] (calccproc) {Init Network $\mathcal{\phi}$ with $\mathcal{W}_E$ \\ Forward Pass on all data \\ Hypersphere center $\mathbf{c}$ is mean \\ of all Latent Representation};
|
||||
\node[outputbox, below=.1 of calccproc] (calccout) {\boxtitle{Outputs} $\mathbf{c}$: Hypersphere Center};
|
||||
\end{pgfonlayer}
|
||||
\begin{pgfonlayer}{background}
|
||||
@@ -76,21 +77,21 @@
|
||||
%\draw[arrow] (node cs:name=traindata,angle=-45) |- node[arrowlabel]{all training data, labels removed} (node cs:name=calcc,angle=200);
|
||||
|
||||
\begin{pgfonlayer}{foreground}
|
||||
\node[stepsbox, below=1.4 of calcc] (maintrainproc) {Train Network for $E_M$ Epochs \\ with $L_M$ Learning Rate \\ Considers Labels with $\eta$ strength};
|
||||
\node[outputbox, below=.1 of maintrainproc] (maintrainout) {\boxtitle{Outputs} Encoder Network \\ $\mathbf{w}$: Network Weights \\ $\mathbf{c}$: Hypersphere Center};
|
||||
\node[stepsbox, below=1.4 of calcc] (maintrainproc) {Init Network $\mathcal{\phi}$ with $\mathcal{W}_E$ \\ Train Network $\mathcal{\phi}$ \\ optimize DeepSAD Objective\\ for $E_M$ Epochs \\ with $L_M$ Learning Rate \\ Considers Labels with $\eta$ strength};
|
||||
\node[outputbox, below=.1 of maintrainproc] (maintrainout) {\boxtitle{Outputs} $\mathcal{\phi}$: DeepSAD Network \\ $\mathcal{W}$: DeepSAD Network Weights \\ $\mathbf{c}$: Hypersphere Center};
|
||||
\end{pgfonlayer}
|
||||
\begin{pgfonlayer}{background}
|
||||
\node[procbox, fit=(maintrainproc) (maintrainout), label={[label distance = 1, name=maintrainlab]above:{\textbf{Main Training}}}] (maintrain) {};
|
||||
\end{pgfonlayer}
|
||||
|
||||
\begin{pgfonlayer}{foreground}
|
||||
\node[hlabelbox, below=11.25 of hyper] (maintrainhyper) {$E_M$: Number of Epochs \\ $L_M$: Learning Rate \\ $\eta$: Strength Labeled/Unlabeled};
|
||||
\node[hlabelbox, below=12.48 of hyper] (maintrainhyper) {$E_M$: Number of Epochs \\ $L_M$: Learning Rate \\ $\eta$: Weight Labeled/Unlabeled};
|
||||
\end{pgfonlayer}
|
||||
\begin{pgfonlayer}{background}
|
||||
\node[hyperbox, fit=(maintrainhyper), label={[label distance = 1, name=autoenclabel]above:{\textbf{Main-Training Hyperparameters}}}] (maintrainhyp) {};
|
||||
\end{pgfonlayer}
|
||||
|
||||
\draw[arrow] (node cs:name=pretrain,angle=-20) -- +(1, 0) |- (node cs:name=maintrain,angle=20);
|
||||
\draw[arrow] (node cs:name=pretrain,angle=-50) |- +(1.5, -0.55) -- +(1.5,-5.4) -| (node cs:name=maintrain,angle=50);
|
||||
|
||||
|
||||
%\draw[arrow] (pretrainoutput.south) -- (node cs:name=maintrain,angle=22);
|
||||
@@ -101,7 +102,7 @@
|
||||
|
||||
|
||||
\begin{pgfonlayer}{foreground}
|
||||
\node[stepsbox, below=1.4 of maintrain] (inferenceproc) {Forward Pass through Network = $\mathbf{p}$ \\ Calculate Geometric Distance $\mathbf{p} \rightarrow \mathbf{c}$ \\ Anomaly Score = Geometric Distance};
|
||||
\node[stepsbox, below=1.4 of maintrain] (inferenceproc) {Init Network $\mathcal{\phi}$ with $\mathcal{W}$ \\Forward Pass on sample = $\mathbf{p}$ \\ Calculate Distance $\mathbf{p} \rightarrow \mathbf{c}$ \\ Distance = Anomaly Score};
|
||||
\node[outputbox, below=.1 of inferenceproc] (inferenceout) {\boxtitle{Outputs} Anomaly Score (Analog Value) \\ Higher for Anomalies};
|
||||
\end{pgfonlayer}
|
||||
\begin{pgfonlayer}{background}
|
||||
@@ -109,7 +110,7 @@
|
||||
\end{pgfonlayer}
|
||||
|
||||
\begin{pgfonlayer}{foreground}
|
||||
\node[hlabelbox, below=11.8 of traindata] (newdatasample) {\boxtitle{New Data Sample} Same data type as training data};
|
||||
\node[hlabelbox, below=13.32 of traindata] (newdatasample) {\boxtitle{New Data Sample} Same data type as training data};
|
||||
\end{pgfonlayer}
|
||||
\begin{pgfonlayer}{background}
|
||||
\node[databox, fit=(newdatasample), label={[label distance = 1] above:{\textbf{Unseen Data}}}] (newdata) {};
|
||||
|
||||
BIN
thesis/figures/ae_elbow_test_loss_anomaly.png
Normal file
|
After Width: | Height: | Size: 85 KiB |
BIN
thesis/figures/ae_elbow_test_loss_overall.png
Normal file
|
After Width: | Height: | Size: 88 KiB |
BIN
thesis/figures/autoencoder_principle.png
Normal file
|
After Width: | Height: | Size: 134 KiB |
|
Before Width: | Height: | Size: 211 KiB |
BIN
thesis/figures/bg_lidar_principle.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 1.4 MiB After Width: | Height: | Size: 1.4 MiB |
|
Before Width: | Height: | Size: 220 KiB After Width: | Height: | Size: 211 KiB |
|
Before Width: | Height: | Size: 31 KiB After Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 45 KiB After Width: | Height: | Size: 37 KiB |
BIN
thesis/figures/ml_learning_schema_concept.png
Normal file
|
After Width: | Height: | Size: 199 KiB |
|
Before Width: | Height: | Size: 42 KiB After Width: | Height: | Size: 36 KiB |
BIN
thesis/figures/results_ap_over_latent.png
Normal file
|
After Width: | Height: | Size: 133 KiB |
BIN
thesis/figures/results_inference_normal_vs_degraded.png
Normal file
|
After Width: | Height: | Size: 718 KiB |
BIN
thesis/figures/results_prc.png
Normal file
|
After Width: | Height: | Size: 691 KiB |
BIN
thesis/figures/results_prc_over_semi.png
Normal file
|
After Width: | Height: | Size: 365 KiB |
11
thesis/filters/drop-images.lua
Normal file
@@ -0,0 +1,11 @@
|
||||
-- drop-images.lua
|
||||
-- Replaces all images (figures, graphics) with a short placeholder.
|
||||
function Image(el) return pandoc.Str("[image omitted]") end
|
||||
|
||||
-- For LaTeX figures that are still raw
|
||||
function RawBlock(el)
|
||||
if el.format == "tex" and el.text:match("\\begin%s*{%s*figure%s*}") then
|
||||
return pandoc.Plain({pandoc.Str("[figure omitted]")})
|
||||
end
|
||||
end
|
||||
|
||||
11
thesis/filters/drop-tables.lua
Normal file
@@ -0,0 +1,11 @@
|
||||
-- drop-tables.lua
|
||||
-- Removes LaTeX tabular and tabularx environments (and their contents).
|
||||
function RawBlock(el)
|
||||
if el.format == "tex" then
|
||||
-- Check for tabular or tabularx environment
|
||||
if el.text:match("\\begin%s*{%s*tabularx?%s*}") then
|
||||
return pandoc.Plain({pandoc.Str("[table omitted]")})
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
43
thesis/filters/keep-citations.lua
Normal file
@@ -0,0 +1,43 @@
|
||||
-- keep-citations.lua
|
||||
-- Replace citations with a placeholder and eat any preceding space.
|
||||
local PH = "[citation]"
|
||||
|
||||
-- Pandoc-native citations (if the reader produced Cite nodes)
|
||||
function Cite(el) return pandoc.Str(PH) end
|
||||
|
||||
-- Raw LaTeX \cite-like macros (when not parsed as Cite)
|
||||
function RawInline(el)
|
||||
if el.format and el.format:match("tex") and el.text:match("\\%a-*cite%*?") then
|
||||
return pandoc.Str(PH)
|
||||
end
|
||||
end
|
||||
|
||||
-- Remove a single leading Space before our placeholder
|
||||
local function squash_spaces(inlines)
|
||||
local out = {}
|
||||
local i = 1
|
||||
while i <= #inlines do
|
||||
local cur = inlines[i]
|
||||
local nxt = inlines[i + 1]
|
||||
if cur and cur.t == "Space" and nxt and nxt.t == "Str" and nxt.text ==
|
||||
PH then
|
||||
table.insert(out, nxt)
|
||||
i = i + 2
|
||||
else
|
||||
table.insert(out, cur)
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
return out
|
||||
end
|
||||
|
||||
function Para(el)
|
||||
el.content = squash_spaces(el.content)
|
||||
return el
|
||||
end
|
||||
|
||||
function Plain(el)
|
||||
el.content = squash_spaces(el.content)
|
||||
return el
|
||||
end
|
||||
|
||||
48
thesis/filters/math-omit.lua
Normal file
@@ -0,0 +1,48 @@
|
||||
-- math-omit.lua
|
||||
-- Replace any math with a placeholder and ensure a space before it when appropriate.
|
||||
local PH = "[math omitted]"
|
||||
|
||||
function Math(el)
|
||||
-- Emit the placeholder as a Str; spacing is fixed in Para/Plain below.
|
||||
return pandoc.Str(PH)
|
||||
end
|
||||
|
||||
local function ensure_space_before_ph(inlines)
|
||||
local out = {}
|
||||
for i = 1, #inlines do
|
||||
local cur = inlines[i]
|
||||
if cur.t == "Str" and cur.text == PH then
|
||||
local prev = out[#out]
|
||||
local need_space = true
|
||||
|
||||
-- No space if it's the first token in the block
|
||||
if not prev then
|
||||
need_space = false
|
||||
elseif prev.t == "Space" then
|
||||
need_space = false
|
||||
elseif prev.t == "Str" then
|
||||
-- If previous char is an opening bracket/paren/slash/hyphen or whitespace, skip
|
||||
local last = prev.text:sub(-1)
|
||||
if last:match("[%(%[%{%/%-]") or last:match("%s") then
|
||||
need_space = false
|
||||
end
|
||||
end
|
||||
|
||||
if need_space then table.insert(out, pandoc.Space()) end
|
||||
table.insert(out, cur)
|
||||
else
|
||||
table.insert(out, cur)
|
||||
end
|
||||
end
|
||||
return out
|
||||
end
|
||||
|
||||
function Para(el)
|
||||
el.content = ensure_space_before_ph(el.content)
|
||||
return el
|
||||
end
|
||||
|
||||
function Plain(el)
|
||||
el.content = ensure_space_before_ph(el.content)
|
||||
return el
|
||||
end
|
||||
@@ -15,6 +15,8 @@
|
||||
let
|
||||
pkgs = import nixpkgs { inherit system; };
|
||||
|
||||
aspellWithDicts = pkgs.aspellWithDicts (d: [ d.en ]);
|
||||
|
||||
latex-packages = with pkgs; [
|
||||
texlive.combined.scheme-full
|
||||
which
|
||||
@@ -26,16 +28,42 @@
|
||||
zathura
|
||||
wmctrl
|
||||
python312
|
||||
pandoc
|
||||
pandoc-lua-filters
|
||||
];
|
||||
filtersPath = "${pkgs.pandoc-lua-filters}/share/pandoc/filters";
|
||||
in
|
||||
{
|
||||
devShell = pkgs.mkShell {
|
||||
buildInputs = [
|
||||
latex-packages
|
||||
dev-packages
|
||||
aspellWithDicts
|
||||
];
|
||||
};
|
||||
|
||||
shellHook = ''
|
||||
set -eu
|
||||
# local folder in your repo to reference in commands
|
||||
link_target="pandoc-filters"
|
||||
# refresh symlink each time you enter the shell
|
||||
ln -sfn ${filtersPath} "$link_target"
|
||||
echo "Linked $link_target -> ${filtersPath}"
|
||||
|
||||
# (optional) write a defaults file that uses the relative symlink
|
||||
if [ ! -f pandoc.defaults.yaml ]; then
|
||||
cat > pandoc.defaults.yaml <<'YAML'
|
||||
from: latex
|
||||
to: plain
|
||||
wrap: none
|
||||
lua-filter:
|
||||
- pandoc-filters/latex-hyphen.lua
|
||||
- pandoc-filters/pandoc-quotes.lua
|
||||
YAML
|
||||
echo "Wrote pandoc.defaults.yaml"
|
||||
fi
|
||||
'';
|
||||
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
61
thesis/tex2plaintext.sh
Executable file
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Usage:
|
||||
# ./tex2plaintext.sh [INPUT_TEX] [OUT_BASENAME]
|
||||
#
|
||||
# Defaults:
|
||||
# INPUT_TEX = Main.txt (your original file name)
|
||||
# OUT_BASENAME = thesis (produces thesis.txt, thesis_part1.txt, thesis_part2.txt)
|
||||
|
||||
INPUT_TEX="${1:-Main.tex}"
|
||||
OUT_BASE="${2:-thesis}"
|
||||
|
||||
FLAT_TEX="flat.tex"
|
||||
NO_TABLES_TEX="flat_notables.tex"
|
||||
PLAIN_TXT="${OUT_BASE}.txt"
|
||||
PART1_TXT="${OUT_BASE}_part1.txt"
|
||||
PART2_TXT="${OUT_BASE}_part2.txt"
|
||||
MARKER="Data and Preprocessing"
|
||||
|
||||
echo "[1/5] Flattening with latexpand -> ${FLAT_TEX}"
|
||||
latexpand "${INPUT_TEX}" > "${FLAT_TEX}"
|
||||
|
||||
echo "[2/5] Removing tabular/tabularx environments -> ${NO_TABLES_TEX}"
|
||||
# Replace entire tabular / tabularx environments with a placeholder
|
||||
perl -0777 -pe 's/\\begin\{(tabularx?)\}.*?\\end\{\1\}/[table omitted]/gs' \
|
||||
"${FLAT_TEX}" > "${NO_TABLES_TEX}"
|
||||
|
||||
echo "[3/5] Converting to plain text with pandoc -> ${PLAIN_TXT}"
|
||||
pandoc -f latex -t plain --wrap=none \
|
||||
--lua-filter=filters/keep-citations.lua \
|
||||
--lua-filter=filters/math-omit.lua \
|
||||
"${NO_TABLES_TEX}" -o "${PLAIN_TXT}"
|
||||
|
||||
echo "[4/5] Replacing [] placeholders with [figure]"
|
||||
sed -i 's/\[\]/[figure]/g' "${PLAIN_TXT}"
|
||||
|
||||
echo "[5/5] Splitting ${PLAIN_TXT} before the marker line: \"${MARKER}\""
|
||||
|
||||
# Ensure the marker exists exactly on its own line
|
||||
if ! grep -xq "${MARKER}" "${PLAIN_TXT}"; then
|
||||
echo "ERROR: Marker line not found exactly as \"${MARKER}\" in ${PLAIN_TXT}."
|
||||
echo " (It must be the only content on that line.)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Clean previous outputs if present
|
||||
rm -f -- "${PART1_TXT}" "${PART2_TXT}"
|
||||
|
||||
# Split so the marker line becomes the FIRST line of part 2
|
||||
awk -v marker="${MARKER}" -v out1="${PART1_TXT}" -v out2="${PART2_TXT}" '
|
||||
BEGIN { current = out1 }
|
||||
$0 == marker { current = out2; print $0 > current; next }
|
||||
{ print $0 > current }
|
||||
' "${PLAIN_TXT}"
|
||||
|
||||
echo "Done."
|
||||
echo " - ${PLAIN_TXT}"
|
||||
echo " - ${PART1_TXT}"
|
||||
echo " - ${PART2_TXT}"
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
\addcontentsline{toc}{chapter}{Abstract (English)}
|
||||
\begin{center}\Large\bfseries Abstract (English)\end{center}\vspace*{1cm}\noindent
|
||||
Write some fancy abstract here!
|
||||
\addcontentsline{toc}{chapter}{Abstract}
|
||||
\begin{center}\Large\bfseries Abstract\end{center}\vspace*{1cm}\noindent
|
||||
Autonomous robots are increasingly used in search and rescue (SAR) missions. In these missions, LiDAR sensors are often the most important source of environmental data. However, LiDAR data can degrade under hazardous conditions, especially when airborne particles such as smoke or dust are present. This degradation can lead to errors in mapping and navigation and may endanger both the robot and humans. Therefore, robots need a way to estimate the reliability of their LiDAR data, so that they can make better-informed decisions.
|
||||
\bigskip
|
||||
|
||||
This thesis investigates whether anomaly detection methods can be used to quantify LiDAR data degradation caused by airborne particles such as smoke and dust. We apply a semi-supervised deep learning approach called DeepSAD, which produces an anomaly score for each LiDAR scan, serving as a measure of data reliability.
|
||||
\bigskip
|
||||
|
||||
We evaluate this method against baseline methods on a subterranean dataset that includes LiDAR scans degraded by artificial smoke. Our results show that DeepSAD consistently outperforms the baselines and can clearly distinguish degraded from normal scans. At the same time, we find that the limited availability of labeled data and the lack of robust ground truth remain major challenges. Despite these limitations, our work demonstrates that anomaly detection methods are a promising tool for LiDAR degradation quantification in SAR scenarios.
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
\addcontentsline{toc}{chapter}{Acknowledgements}
|
||||
\begin{center}\Large\bfseries Acknowledgements\end{center}\vspace*{1cm}\noindent
|
||||
Here you can tell us, how thankful you are for this amazing template ;)
|
||||
\addcontentsline{toc}{chapter}{Artificial Intelligence Usage Disclaimer}
|
||||
\begin{center}\Large\bfseries Artificial Intelligence Usage Disclaimer\end{center}\vspace*{1cm}\noindent
|
||||
During the creation of this thesis, an LLM-based Artificial Intelligence tool was used for stylistic and grammatical revision of the author's own work.
|
||||
|
||||
BIN
thesis/third_party/PlotNeuralNet/deepsad/arch_ef_decoder.pdf
vendored
Normal file
@@ -30,7 +30,8 @@ arch = [
|
||||
height=H8 * 1.6,
|
||||
depth=D1,
|
||||
width=W1,
|
||||
caption=f"Latent Space",
|
||||
caption="Latent Space",
|
||||
captionshift=0,
|
||||
),
|
||||
# to_connection("fc1", "latent"),
|
||||
# --------------------------- DECODER ---------------------------
|
||||
@@ -39,19 +40,20 @@ arch = [
|
||||
"fc3",
|
||||
n_filer="{{8×128×8}}",
|
||||
zlabeloffset=0.5,
|
||||
offset="(2,0,0)",
|
||||
offset="(2,-.5,0)",
|
||||
to="(latent-east)",
|
||||
height=H1,
|
||||
depth=D512,
|
||||
width=W1,
|
||||
caption=f"FC",
|
||||
captionshift=20,
|
||||
),
|
||||
to_Conv(
|
||||
"unsqueeze",
|
||||
s_filer="{{128×8}}",
|
||||
zlabeloffset=0.4,
|
||||
n_filer=32,
|
||||
offset="(2,0,0)",
|
||||
offset="(1.4,0,0)",
|
||||
to="(fc3-east)",
|
||||
height=H8,
|
||||
depth=D128,
|
||||
@@ -62,7 +64,7 @@ arch = [
|
||||
# Reshape to 4×8×512
|
||||
to_UnPool(
|
||||
"up1",
|
||||
offset="(2,0,0)",
|
||||
offset="(1.2,0,0)",
|
||||
n_filer=32,
|
||||
to="(unsqueeze-east)",
|
||||
height=H16,
|
||||
@@ -101,7 +103,8 @@ arch = [
|
||||
height=H16,
|
||||
depth=D1024,
|
||||
width=W32,
|
||||
caption="",
|
||||
caption="Deconv2",
|
||||
captionshift=20,
|
||||
),
|
||||
to_Conv(
|
||||
"dwdeconv3",
|
||||
@@ -112,7 +115,7 @@ arch = [
|
||||
height=H16,
|
||||
depth=D1024,
|
||||
width=W1,
|
||||
caption="Deconv2",
|
||||
caption="",
|
||||
),
|
||||
to_Conv(
|
||||
"dwdeconv4",
|
||||
@@ -134,7 +137,8 @@ arch = [
|
||||
height=H32,
|
||||
depth=D2048,
|
||||
width=W16,
|
||||
caption="",
|
||||
caption="Deconv3",
|
||||
captionshift=10,
|
||||
),
|
||||
to_Conv(
|
||||
"dwdeconv5",
|
||||
@@ -145,7 +149,7 @@ arch = [
|
||||
height=H32,
|
||||
depth=D2048,
|
||||
width=W1,
|
||||
caption="Deconv3",
|
||||
caption="",
|
||||
),
|
||||
to_Conv(
|
||||
"dwdeconv6",
|
||||
@@ -164,7 +168,7 @@ arch = [
|
||||
s_filer="{{2048×32}}",
|
||||
zlabeloffset=0.15,
|
||||
n_filer=1,
|
||||
offset="(2,0,0)",
|
||||
offset="(1.5,0,0)",
|
||||
to="(dwdeconv6-east)",
|
||||
height=H32,
|
||||
depth=D2048,
|
||||
@@ -178,12 +182,13 @@ arch = [
|
||||
s_filer="{{2048×32}}",
|
||||
zlabeloffset=0.15,
|
||||
n_filer=1,
|
||||
offset="(2,0,0)",
|
||||
offset="(1.5,0,0)",
|
||||
to="(outconv-east)",
|
||||
height=H32,
|
||||
depth=D2048,
|
||||
width=W1,
|
||||
caption="Output",
|
||||
captionshift=5,
|
||||
),
|
||||
# to_connection("deconv2", "out"),
|
||||
to_end(),
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
{Box={
|
||||
name=latent,
|
||||
caption=Latent Space,
|
||||
captionshift=0,
|
||||
xlabel={{, }},
|
||||
zlabeloffset=0.3,
|
||||
zlabel=latent dim,
|
||||
@@ -39,10 +40,11 @@
|
||||
};
|
||||
|
||||
|
||||
\pic[shift={(2,0,0)}] at (latent-east)
|
||||
\pic[shift={(2,-.5,0)}] at (latent-east)
|
||||
{Box={
|
||||
name=fc3,
|
||||
caption=FC,
|
||||
captionshift=20,
|
||||
xlabel={{" ","dummy"}},
|
||||
zlabeloffset=0.5,
|
||||
zlabel={{8×128×8}},
|
||||
@@ -55,10 +57,11 @@
|
||||
};
|
||||
|
||||
|
||||
\pic[shift={(2,0,0)}] at (fc3-east)
|
||||
\pic[shift={(1.4,0,0)}] at (fc3-east)
|
||||
{Box={
|
||||
name=unsqueeze,
|
||||
caption=Unsqueeze,
|
||||
captionshift=0,
|
||||
xlabel={{32, }},
|
||||
zlabeloffset=0.4,
|
||||
zlabel={{128×8}},
|
||||
@@ -70,10 +73,11 @@
|
||||
};
|
||||
|
||||
|
||||
\pic[shift={ (2,0,0) }] at (unsqueeze-east)
|
||||
\pic[shift={ (1.2,0,0) }] at (unsqueeze-east)
|
||||
{Box={
|
||||
name=up1,
|
||||
caption=,
|
||||
captionshift=0,
|
||||
fill=\UnpoolColor,
|
||||
opacity=0.5,
|
||||
xlabel={{32, }},
|
||||
@@ -88,6 +92,7 @@
|
||||
{Box={
|
||||
name=dwdeconv1,
|
||||
caption=Deconv1,
|
||||
captionshift=0,
|
||||
xlabel={{1, }},
|
||||
zlabeloffset=0.3,
|
||||
zlabel=,
|
||||
@@ -103,6 +108,7 @@
|
||||
{Box={
|
||||
name=dwdeconv2,
|
||||
caption=,
|
||||
captionshift=0,
|
||||
xlabel={{32, }},
|
||||
zlabeloffset=0.4,
|
||||
zlabel={{256×16}},
|
||||
@@ -117,7 +123,8 @@
|
||||
\pic[shift={ (2,0,0) }] at (dwdeconv2-east)
|
||||
{Box={
|
||||
name=up2,
|
||||
caption=,
|
||||
caption=Deconv2,
|
||||
captionshift=20,
|
||||
fill=\UnpoolColor,
|
||||
opacity=0.5,
|
||||
xlabel={{32, }},
|
||||
@@ -131,7 +138,8 @@
|
||||
\pic[shift={(0,0,0)}] at (up2-east)
|
||||
{Box={
|
||||
name=dwdeconv3,
|
||||
caption=Deconv2,
|
||||
caption=,
|
||||
captionshift=0,
|
||||
xlabel={{1, }},
|
||||
zlabeloffset=0.3,
|
||||
zlabel=,
|
||||
@@ -147,6 +155,7 @@
|
||||
{Box={
|
||||
name=dwdeconv4,
|
||||
caption=,
|
||||
captionshift=0,
|
||||
xlabel={{16, }},
|
||||
zlabeloffset=0.17,
|
||||
zlabel={{1024×16}},
|
||||
@@ -161,7 +170,8 @@
|
||||
\pic[shift={ (2,0,0) }] at (dwdeconv4-east)
|
||||
{Box={
|
||||
name=up3,
|
||||
caption=,
|
||||
caption=Deconv3,
|
||||
captionshift=10,
|
||||
fill=\UnpoolColor,
|
||||
opacity=0.5,
|
||||
xlabel={{16, }},
|
||||
@@ -175,7 +185,8 @@
|
||||
\pic[shift={(0,0,0)}] at (up3-east)
|
||||
{Box={
|
||||
name=dwdeconv5,
|
||||
caption=Deconv3,
|
||||
caption=,
|
||||
captionshift=0,
|
||||
xlabel={{1, }},
|
||||
zlabeloffset=0.3,
|
||||
zlabel=,
|
||||
@@ -191,6 +202,7 @@
|
||||
{Box={
|
||||
name=dwdeconv6,
|
||||
caption=,
|
||||
captionshift=0,
|
||||
xlabel={{8, }},
|
||||
zlabeloffset=0.15,
|
||||
zlabel={{2048×32}},
|
||||
@@ -202,10 +214,11 @@
|
||||
};
|
||||
|
||||
|
||||
\pic[shift={(2,0,0)}] at (dwdeconv6-east)
|
||||
\pic[shift={(1.5,0,0)}] at (dwdeconv6-east)
|
||||
{Box={
|
||||
name=outconv,
|
||||
caption=Deconv4,
|
||||
captionshift=0,
|
||||
xlabel={{1, }},
|
||||
zlabeloffset=0.15,
|
||||
zlabel={{2048×32}},
|
||||
@@ -217,10 +230,11 @@
|
||||
};
|
||||
|
||||
|
||||
\pic[shift={(2,0,0)}] at (outconv-east)
|
||||
\pic[shift={(1.5,0,0)}] at (outconv-east)
|
||||
{Box={
|
||||
name=out,
|
||||
caption=Output,
|
||||
captionshift=5,
|
||||
xlabel={{1, }},
|
||||
zlabeloffset=0.15,
|
||||
zlabel={{2048×32}},
|
||||
|
||||
BIN
thesis/third_party/PlotNeuralNet/deepsad/arch_ef_encoder.pdf
vendored
Normal file
@@ -125,7 +125,7 @@ arch = [
|
||||
n_filer=8,
|
||||
zlabeloffset=0.45,
|
||||
s_filer="{{128×8}}",
|
||||
offset="(2,0,0)",
|
||||
offset="(1,0,0)",
|
||||
to="(pool3-east)",
|
||||
height=H8,
|
||||
depth=D128,
|
||||
@@ -137,12 +137,13 @@ arch = [
|
||||
"fc1",
|
||||
n_filer="{{8×128×8}}",
|
||||
zlabeloffset=0.5,
|
||||
offset="(2,0,0)",
|
||||
offset="(2,-.5,0)",
|
||||
to="(squeeze-east)",
|
||||
height=H1,
|
||||
depth=D512,
|
||||
width=W1,
|
||||
caption=f"FC",
|
||||
caption="FC",
|
||||
captionshift=0,
|
||||
),
|
||||
# to_connection("pool2", "fc1"),
|
||||
# --------------------------- LATENT ---------------------------
|
||||
@@ -150,7 +151,7 @@ arch = [
|
||||
"latent",
|
||||
n_filer="",
|
||||
s_filer="latent dim",
|
||||
offset="(2,0,0)",
|
||||
offset="(1.3,0.5,0)",
|
||||
to="(fc1-east)",
|
||||
height=H8 * 1.6,
|
||||
depth=D1,
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
{Box={
|
||||
name=input,
|
||||
caption=Input,
|
||||
captionshift=0,
|
||||
xlabel={{1, }},
|
||||
zlabeloffset=0.2,
|
||||
zlabel={{2048×32}},
|
||||
@@ -43,6 +44,7 @@
|
||||
{Box={
|
||||
name=dwconv1,
|
||||
caption=,
|
||||
captionshift=0,
|
||||
xlabel={{1, }},
|
||||
zlabeloffset=0.3,
|
||||
zlabel=,
|
||||
@@ -58,6 +60,7 @@
|
||||
{Box={
|
||||
name=dwconv2,
|
||||
caption=Conv1,
|
||||
captionshift=0,
|
||||
xlabel={{16, }},
|
||||
zlabeloffset=0.15,
|
||||
zlabel={{2048×32}},
|
||||
@@ -76,6 +79,7 @@
|
||||
zlabeloffset=0.3,
|
||||
zlabel={{512×32}},
|
||||
caption=,
|
||||
captionshift=0,
|
||||
fill=\PoolColor,
|
||||
opacity=0.5,
|
||||
height=26,
|
||||
@@ -89,6 +93,7 @@
|
||||
{Box={
|
||||
name=dwconv3,
|
||||
caption=,
|
||||
captionshift=0,
|
||||
xlabel={{1, }},
|
||||
zlabeloffset=0.3,
|
||||
zlabel=,
|
||||
@@ -104,6 +109,7 @@
|
||||
{Box={
|
||||
name=dwconv4,
|
||||
caption=Conv2,
|
||||
captionshift=0,
|
||||
xlabel={{32, }},
|
||||
zlabeloffset=0.3,
|
||||
zlabel={{512×32}},
|
||||
@@ -122,6 +128,7 @@
|
||||
zlabeloffset=0.45,
|
||||
zlabel={{256×16}},
|
||||
caption=,
|
||||
captionshift=0,
|
||||
fill=\PoolColor,
|
||||
opacity=0.5,
|
||||
height=18,
|
||||
@@ -138,6 +145,7 @@
|
||||
zlabeloffset=0.45,
|
||||
zlabel={{128×8}},
|
||||
caption=,
|
||||
captionshift=0,
|
||||
fill=\PoolColor,
|
||||
opacity=0.5,
|
||||
height=12,
|
||||
@@ -147,10 +155,11 @@
|
||||
};
|
||||
|
||||
|
||||
\pic[shift={(2,0,0)}] at (pool3-east)
|
||||
\pic[shift={(1,0,0)}] at (pool3-east)
|
||||
{Box={
|
||||
name=squeeze,
|
||||
caption=Squeeze,
|
||||
captionshift=0,
|
||||
xlabel={{8, }},
|
||||
zlabeloffset=0.45,
|
||||
zlabel={{128×8}},
|
||||
@@ -162,10 +171,11 @@
|
||||
};
|
||||
|
||||
|
||||
\pic[shift={(2,0,0)}] at (squeeze-east)
|
||||
\pic[shift={(2,-.5,0)}] at (squeeze-east)
|
||||
{Box={
|
||||
name=fc1,
|
||||
caption=FC,
|
||||
captionshift=0,
|
||||
xlabel={{" ","dummy"}},
|
||||
zlabeloffset=0.5,
|
||||
zlabel={{8×128×8}},
|
||||
@@ -178,10 +188,11 @@
|
||||
};
|
||||
|
||||
|
||||
\pic[shift={(2,0,0)}] at (fc1-east)
|
||||
\pic[shift={(1.3,0.5,0)}] at (fc1-east)
|
||||
{Box={
|
||||
name=latent,
|
||||
caption=Latent Space,
|
||||
captionshift=0,
|
||||
xlabel={{, }},
|
||||
zlabeloffset=0.3,
|
||||
zlabel=latent dim,
|
||||
|
||||
BIN
thesis/third_party/PlotNeuralNet/deepsad/arch_lenet_decoder.pdf
vendored
Normal file
@@ -39,19 +39,20 @@ arch = [
|
||||
"fc3",
|
||||
n_filer="{{4×512×8}}",
|
||||
zlabeloffset=0.35,
|
||||
offset="(2,0,0)",
|
||||
offset="(2,-.5,0)",
|
||||
to="(latent-east)",
|
||||
height=1.3,
|
||||
depth=D512,
|
||||
width=W1,
|
||||
caption=f"FC",
|
||||
captionshift=20,
|
||||
),
|
||||
# to_connection("latent", "fc3"),
|
||||
# Reshape to 4×8×512
|
||||
to_UnPool(
|
||||
"up1",
|
||||
n_filer=4,
|
||||
offset="(2,0,0)",
|
||||
offset="(2.5,0,0)",
|
||||
to="(fc3-east)",
|
||||
height=H16,
|
||||
depth=D1024,
|
||||
@@ -82,7 +83,8 @@ arch = [
|
||||
height=H32,
|
||||
depth=D2048,
|
||||
width=W8,
|
||||
caption="",
|
||||
caption="Deconv2",
|
||||
captionshift=10,
|
||||
),
|
||||
# to_connection("deconv1", "up2"),
|
||||
# DeConv2 (5×5, same): 8->1, 32×2048
|
||||
@@ -96,7 +98,7 @@ arch = [
|
||||
height=H32,
|
||||
depth=D2048,
|
||||
width=W1,
|
||||
caption="Deconv2",
|
||||
caption="",
|
||||
),
|
||||
# to_connection("up2", "deconv2"),
|
||||
# Output
|
||||
@@ -111,6 +113,7 @@ arch = [
|
||||
depth=D2048,
|
||||
width=1.0,
|
||||
caption="Output",
|
||||
captionshift=5,
|
||||
),
|
||||
# to_connection("deconv2", "out"),
|
||||
to_end(),
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
{Box={
|
||||
name=latent,
|
||||
caption=Latent Space,
|
||||
captionshift=0,
|
||||
xlabel={{, }},
|
||||
zlabeloffset=0.3,
|
||||
zlabel=latent dim,
|
||||
@@ -39,10 +40,11 @@
|
||||
};
|
||||
|
||||
|
||||
\pic[shift={(2,0,0)}] at (latent-east)
|
||||
\pic[shift={(2,-.5,0)}] at (latent-east)
|
||||
{Box={
|
||||
name=fc3,
|
||||
caption=FC,
|
||||
captionshift=20,
|
||||
xlabel={{" ","dummy"}},
|
||||
zlabeloffset=0.35,
|
||||
zlabel={{4×512×8}},
|
||||
@@ -55,10 +57,11 @@
|
||||
};
|
||||
|
||||
|
||||
\pic[shift={ (2,0,0) }] at (fc3-east)
|
||||
\pic[shift={ (2.5,0,0) }] at (fc3-east)
|
||||
{Box={
|
||||
name=up1,
|
||||
caption=,
|
||||
captionshift=0,
|
||||
fill=\UnpoolColor,
|
||||
opacity=0.5,
|
||||
xlabel={{4, }},
|
||||
@@ -73,6 +76,7 @@
|
||||
{Box={
|
||||
name=deconv1,
|
||||
caption=Deconv1,
|
||||
captionshift=0,
|
||||
xlabel={{8, }},
|
||||
zlabeloffset=0.2,
|
||||
zlabel={{1024×16}},
|
||||
@@ -87,7 +91,8 @@
|
||||
\pic[shift={ (2,0,0) }] at (deconv1-east)
|
||||
{Box={
|
||||
name=up2,
|
||||
caption=,
|
||||
caption=Deconv2,
|
||||
captionshift=10,
|
||||
fill=\UnpoolColor,
|
||||
opacity=0.5,
|
||||
xlabel={{8, }},
|
||||
@@ -101,7 +106,8 @@
|
||||
\pic[shift={(0,0,0)}] at (up2-east)
|
||||
{Box={
|
||||
name=deconv2,
|
||||
caption=Deconv2,
|
||||
caption=,
|
||||
captionshift=0,
|
||||
xlabel={{1, }},
|
||||
zlabeloffset=0.15,
|
||||
zlabel={{2048×32}},
|
||||
@@ -117,6 +123,7 @@
|
||||
{Box={
|
||||
name=out,
|
||||
caption=Output,
|
||||
captionshift=5,
|
||||
xlabel={{1, }},
|
||||
zlabeloffset=0.15,
|
||||
zlabel={{2048×32}},
|
||||
|
||||
BIN
thesis/third_party/PlotNeuralNet/deepsad/arch_lenet_encoder.pdf
vendored
Normal file
@@ -91,13 +91,14 @@ arch = [
|
||||
to_fc(
|
||||
"fc1",
|
||||
n_filer="{{4×512×8}}",
|
||||
offset="(2,0,0)",
|
||||
offset="(2,-.5,0)",
|
||||
zlabeloffset=0.5,
|
||||
to="(pool2-east)",
|
||||
height=1.3,
|
||||
depth=D512,
|
||||
width=W1,
|
||||
caption=f"FC",
|
||||
captionshift=20,
|
||||
),
|
||||
# to_connection("pool2", "fc1"),
|
||||
# --------------------------- LATENT ---------------------------
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
{Box={
|
||||
name=input,
|
||||
caption=Input,
|
||||
captionshift=0,
|
||||
xlabel={{1, }},
|
||||
zlabeloffset=0.15,
|
||||
zlabel={{2048×32}},
|
||||
@@ -43,6 +44,7 @@
|
||||
{Box={
|
||||
name=conv1,
|
||||
caption=Conv1,
|
||||
captionshift=0,
|
||||
xlabel={{8, }},
|
||||
zlabeloffset=0.15,
|
||||
zlabel={{2048×32}},
|
||||
@@ -61,6 +63,7 @@
|
||||
zlabeloffset=0.3,
|
||||
zlabel={{1024×16}},
|
||||
caption=,
|
||||
captionshift=0,
|
||||
fill=\PoolColor,
|
||||
opacity=0.5,
|
||||
height=18,
|
||||
@@ -74,6 +77,7 @@
|
||||
{Box={
|
||||
name=conv2,
|
||||
caption=Conv2,
|
||||
captionshift=0,
|
||||
xlabel={{4, }},
|
||||
zlabeloffset=0.4,
|
||||
zlabel={{1024×16\hspace{2.5em}512×8}},
|
||||
@@ -92,6 +96,7 @@
|
||||
zlabeloffset=0.3,
|
||||
zlabel={{}},
|
||||
caption=,
|
||||
captionshift=0,
|
||||
fill=\PoolColor,
|
||||
opacity=0.5,
|
||||
height=12,
|
||||
@@ -101,10 +106,11 @@
|
||||
};
|
||||
|
||||
|
||||
\pic[shift={(2,0,0)}] at (pool2-east)
|
||||
\pic[shift={(2,-.5,0)}] at (pool2-east)
|
||||
{Box={
|
||||
name=fc1,
|
||||
caption=FC,
|
||||
captionshift=20,
|
||||
xlabel={{" ","dummy"}},
|
||||
zlabeloffset=0.5,
|
||||
zlabel={{4×512×8}},
|
||||
@@ -121,6 +127,7 @@
|
||||
{Box={
|
||||
name=latent,
|
||||
caption=Latent Space,
|
||||
captionshift=0,
|
||||
xlabel={{, }},
|
||||
zlabeloffset=0.3,
|
||||
zlabel=latent dim,
|
||||
|
||||
10
thesis/third_party/PlotNeuralNet/layers/Box.sty
vendored
@@ -57,8 +57,12 @@
|
||||
\path (b1) edge ["\ylabel",midway] (a1); %height label
|
||||
|
||||
|
||||
\tikzstyle{captionlabel}=[text width=15*\LastEastx/\scale,text centered]
|
||||
\path (\LastEastx/2,-\y/2,+\z/2) + (0,-25pt) coordinate (cap)
|
||||
% \tikzstyle{captionlabel}=[text width=15*\LastEastx/\scale,text centered,xshift=\captionshift pt]
|
||||
% \path (\LastEastx/2,-\y/2,+\z/2) + (0,-25pt) coordinate (cap)
|
||||
% edge ["\textcolor{black}{ \bf \caption}"',captionlabel](cap) ; %Block caption/pic object label
|
||||
|
||||
% Place caption: shift the coordinate by captionshift (NEW)
|
||||
\path (\LastEastx/2,-\y/2,+\z/2) + (\captionshift pt,-25pt) coordinate (cap)
|
||||
edge ["\textcolor{black}{ \bf \caption}"',captionlabel](cap) ; %Block caption/pic object label
|
||||
|
||||
%Define nodes to be used outside on the pic object
|
||||
@@ -103,6 +107,7 @@ ylabel/.store in=\ylabel,
|
||||
zlabel/.store in=\zlabel,
|
||||
zlabeloffset/.store in=\zlabeloffset,
|
||||
caption/.store in=\caption,
|
||||
captionshift/.store in=\captionshift,
|
||||
name/.store in=\name,
|
||||
fill/.store in=\fill,
|
||||
opacity/.store in=\opacity,
|
||||
@@ -117,5 +122,6 @@ ylabel=,
|
||||
zlabel=,
|
||||
zlabeloffset=0.3,
|
||||
caption=,
|
||||
captionshift=0,
|
||||
name=,
|
||||
}
|
||||
|
||||
@@ -75,6 +75,7 @@ def to_Conv(
|
||||
height=40,
|
||||
depth=40,
|
||||
caption=" ",
|
||||
captionshift=0,
|
||||
):
|
||||
return (
|
||||
r"""
|
||||
@@ -90,6 +91,9 @@ def to_Conv(
|
||||
caption="""
|
||||
+ caption
|
||||
+ r""",
|
||||
captionshift="""
|
||||
+ str(captionshift)
|
||||
+ """,
|
||||
xlabel={{"""
|
||||
+ str(n_filer)
|
||||
+ """, }},
|
||||
@@ -182,6 +186,7 @@ def to_Pool(
|
||||
depth=32,
|
||||
opacity=0.5,
|
||||
caption=" ",
|
||||
captionshift=0,
|
||||
):
|
||||
return (
|
||||
r"""
|
||||
@@ -206,6 +211,9 @@ def to_Pool(
|
||||
caption="""
|
||||
+ caption
|
||||
+ r""",
|
||||
captionshift="""
|
||||
+ str(captionshift)
|
||||
+ """,
|
||||
fill=\PoolColor,
|
||||
opacity="""
|
||||
+ str(opacity)
|
||||
@@ -236,6 +244,7 @@ def to_UnPool(
|
||||
depth=32,
|
||||
opacity=0.5,
|
||||
caption=" ",
|
||||
captionshift=0,
|
||||
):
|
||||
return (
|
||||
r"""
|
||||
@@ -251,6 +260,9 @@ def to_UnPool(
|
||||
caption="""
|
||||
+ caption
|
||||
+ r""",
|
||||
captionshift="""
|
||||
+ str(captionshift)
|
||||
+ r""",
|
||||
fill=\UnpoolColor,
|
||||
opacity="""
|
||||
+ str(opacity)
|
||||
@@ -335,6 +347,7 @@ def to_ConvSoftMax(
|
||||
height=40,
|
||||
depth=40,
|
||||
caption=" ",
|
||||
captionshift=0,
|
||||
):
|
||||
return (
|
||||
r"""
|
||||
@@ -350,6 +363,9 @@ def to_ConvSoftMax(
|
||||
caption="""
|
||||
+ caption
|
||||
+ """,
|
||||
captionshift="""
|
||||
+ str(captionshift)
|
||||
+ """,
|
||||
zlabel="""
|
||||
+ str(s_filer)
|
||||
+ """,
|
||||
@@ -380,6 +396,7 @@ def to_SoftMax(
|
||||
depth=25,
|
||||
opacity=0.8,
|
||||
caption=" ",
|
||||
captionshift=0,
|
||||
z_label_offset=0,
|
||||
):
|
||||
return (
|
||||
@@ -396,6 +413,9 @@ def to_SoftMax(
|
||||
caption="""
|
||||
+ caption
|
||||
+ """,
|
||||
captionshift="""
|
||||
+ str(captionshift)
|
||||
+ """,
|
||||
xlabel={{" ","dummy"}},
|
||||
zlabel="""
|
||||
+ str(s_filer)
|
||||
@@ -455,6 +475,7 @@ def to_fc(
|
||||
height=2,
|
||||
depth=10,
|
||||
caption=" ",
|
||||
captionshift=0,
|
||||
# titlepos=0,
|
||||
):
|
||||
return (
|
||||
@@ -471,6 +492,9 @@ def to_fc(
|
||||
caption="""
|
||||
+ caption
|
||||
+ """,
|
||||
captionshift="""
|
||||
+ str(captionshift)
|
||||
+ """,
|
||||
xlabel={{" ","dummy"}},
|
||||
zlabeloffset="""
|
||||
+ str(zlabeloffset)
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
from pathlib import Path
|
||||
|
||||
import polars as pl
|
||||
|
||||
from plot_scripts.load_results import (
|
||||
from load_results import (
|
||||
load_pretraining_results_dataframe,
|
||||
load_results_dataframe,
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{ pkgs, ... }:
|
||||
let
|
||||
native_dependencies = with pkgs.python312Packages; [
|
||||
native_dependencies = with pkgs.python311Packages; [
|
||||
torch-bin
|
||||
torchvision-bin
|
||||
aggdraw # for visualtorch
|
||||
@@ -16,7 +16,7 @@ in
|
||||
packages = native_dependencies ++ tools;
|
||||
languages.python = {
|
||||
enable = true;
|
||||
package = pkgs.python312;
|
||||
package = pkgs.python311;
|
||||
uv = {
|
||||
enable = true;
|
||||
sync.enable = true;
|
||||
|
||||
@@ -12,7 +12,7 @@ import numpy as np
|
||||
import polars as pl
|
||||
|
||||
# CHANGE THIS IMPORT IF YOUR LOADER MODULE IS NAMED DIFFERENTLY
|
||||
from plot_scripts.load_results import load_pretraining_results_dataframe
|
||||
from load_results import load_pretraining_results_dataframe
|
||||
|
||||
# ----------------------------
|
||||
# Config
|
||||
@@ -78,8 +78,8 @@ def build_arch_curves_from_df(
|
||||
"overall": (dims, means, stds),
|
||||
} }
|
||||
"""
|
||||
if "split" not in df.columns:
|
||||
raise ValueError("Expected 'split' column in AE dataframe.")
|
||||
# if "split" not in df.columns:
|
||||
# raise ValueError("Expected 'split' column in AE dataframe.")
|
||||
if "scores" not in df.columns:
|
||||
raise ValueError("Expected 'scores' column in AE dataframe.")
|
||||
if "network" not in df.columns or "latent_dim" not in df.columns:
|
||||
@@ -88,7 +88,7 @@ def build_arch_curves_from_df(
|
||||
raise ValueError(f"Expected '{label_field}' column in AE dataframe.")
|
||||
|
||||
# Keep only test split
|
||||
df = df.filter(pl.col("split") == "test")
|
||||
# df = df.filter(pl.col("split") == "test")
|
||||
|
||||
groups: dict[tuple[str, int], dict[str, list[float]]] = {}
|
||||
|
||||
@@ -201,7 +201,7 @@ def plot_multi_loss_curve(arch_results, title, output_path, colors=None):
|
||||
|
||||
plt.xlabel("Latent Dimensionality")
|
||||
plt.ylabel("Test Loss")
|
||||
plt.title(title)
|
||||
# plt.title(title)
|
||||
plt.legend()
|
||||
plt.grid(True, alpha=0.3)
|
||||
plt.xticks(all_dims)
|
||||
|
||||
@@ -171,28 +171,28 @@ def plot_combined_timeline(
|
||||
range(num_bins), near_sensor_binned, color=color, linestyle="--", alpha=0.6
|
||||
)
|
||||
|
||||
# Add vertical lines for manually labeled frames if available
|
||||
if all_paths[i].with_suffix(".npy").name in manually_labeled_anomaly_frames:
|
||||
begin_frame, end_frame = manually_labeled_anomaly_frames[
|
||||
all_paths[i].with_suffix(".npy").name
|
||||
]
|
||||
# Convert frame numbers to normalized timeline positions
|
||||
begin_pos = (begin_frame / exp_len) * (num_bins - 1)
|
||||
end_pos = (end_frame / exp_len) * (num_bins - 1)
|
||||
# # Add vertical lines for manually labeled frames if available
|
||||
# if all_paths[i].with_suffix(".npy").name in manually_labeled_anomaly_frames:
|
||||
# begin_frame, end_frame = manually_labeled_anomaly_frames[
|
||||
# all_paths[i].with_suffix(".npy").name
|
||||
# ]
|
||||
# # Convert frame numbers to normalized timeline positions
|
||||
# begin_pos = (begin_frame / exp_len) * (num_bins - 1)
|
||||
# end_pos = (end_frame / exp_len) * (num_bins - 1)
|
||||
|
||||
# Add vertical lines with matching color and loose dotting
|
||||
ax1.axvline(
|
||||
x=begin_pos,
|
||||
color=color,
|
||||
linestyle=":",
|
||||
alpha=0.6,
|
||||
)
|
||||
ax1.axvline(
|
||||
x=end_pos,
|
||||
color=color,
|
||||
linestyle=":",
|
||||
alpha=0.6,
|
||||
)
|
||||
# # Add vertical lines with matching color and loose dotting
|
||||
# ax1.axvline(
|
||||
# x=begin_pos,
|
||||
# color=color,
|
||||
# linestyle=":",
|
||||
# alpha=0.6,
|
||||
# )
|
||||
# ax1.axvline(
|
||||
# x=end_pos,
|
||||
# color=color,
|
||||
# linestyle=":",
|
||||
# alpha=0.6,
|
||||
# )
|
||||
|
||||
# Customize axes
|
||||
ax1.set_xlabel("Normalized Timeline")
|
||||
@@ -202,7 +202,7 @@ def plot_combined_timeline(
|
||||
ax1.set_ylabel("Missing Points (%)")
|
||||
ax2.set_ylabel("Points with <0.5m Range (%)")
|
||||
|
||||
plt.title(title)
|
||||
# plt.title(title)
|
||||
|
||||
# Create legends without fixed positions
|
||||
# First get all lines and labels for experiments
|
||||
@@ -221,7 +221,8 @@ def plot_combined_timeline(
|
||||
)
|
||||
|
||||
# Create single legend in top right corner with consistent margins
|
||||
fig.legend(all_handles, all_labels, loc="upper right", borderaxespad=4.8)
|
||||
# fig.legend(all_handles, all_labels, loc="upper right", borderaxespad=2.8)
|
||||
fig.legend(all_handles, all_labels, bbox_to_anchor=(0.95, 0.99))
|
||||
|
||||
plt.grid(True, alpha=0.3)
|
||||
|
||||
|
||||
@@ -122,8 +122,8 @@ def plot_data_points_pie(normal_experiment_frames, anomaly_experiment_frames):
|
||||
|
||||
# prepare data for pie chart
|
||||
labels = [
|
||||
"Normal Lidar Frames\nNon-Degraded Pointclouds",
|
||||
"Anomalous Lidar Frames\nDegraded Pointclouds",
|
||||
"Normal Lidar Frames\nNon-Degraded Point Clouds",
|
||||
"Anomalous Lidar Frames\nDegraded Point Clouds",
|
||||
]
|
||||
sizes = [total_normal_frames, total_anomaly_frames]
|
||||
explode = (0.1, 0) # explode the normal slice
|
||||
@@ -150,9 +150,9 @@ def plot_data_points_pie(normal_experiment_frames, anomaly_experiment_frames):
|
||||
va="center",
|
||||
color="black",
|
||||
)
|
||||
plt.title(
|
||||
"Distribution of Normal and Anomalous\nPointclouds in all Experiments (Lidar Frames)"
|
||||
)
|
||||
# plt.title(
|
||||
# "Distribution of Normal and Anomalous\nPointclouds in all Experiments (Lidar Frames)"
|
||||
# )
|
||||
plt.tight_layout()
|
||||
|
||||
# save the plot
|
||||
|
||||
@@ -5,7 +5,6 @@ from pathlib import Path
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from pointcloudset import Dataset
|
||||
|
||||
# define data path containing the bag files
|
||||
all_data_path = Path("/home/fedex/mt/data/subter")
|
||||
@@ -82,7 +81,7 @@ def plot_data_points(normal_experiment_paths, anomaly_experiment_paths, title):
|
||||
plt.figure(figsize=(10, 5))
|
||||
plt.hist(missing_points_normal, bins=100, alpha=0.5, label="Normal Experiments")
|
||||
plt.hist(missing_points_anomaly, bins=100, alpha=0.5, label="Anomaly Experiments")
|
||||
plt.title(title)
|
||||
# plt.title(title)
|
||||
plt.xlabel("Number of Missing Points")
|
||||
plt.ylabel("Number of Pointclouds")
|
||||
plt.legend()
|
||||
@@ -109,7 +108,7 @@ def plot_data_points(normal_experiment_paths, anomaly_experiment_paths, title):
|
||||
label="Anomaly Experiments",
|
||||
orientation="horizontal",
|
||||
)
|
||||
plt.title(title)
|
||||
# plt.title(title)
|
||||
plt.xlabel("Number of Pointclouds")
|
||||
plt.ylabel("Number of Missing Points")
|
||||
plt.legend()
|
||||
@@ -142,7 +141,7 @@ def plot_data_points(normal_experiment_paths, anomaly_experiment_paths, title):
|
||||
label="Anomaly Experiments",
|
||||
density=True,
|
||||
)
|
||||
plt.title(title)
|
||||
# plt.title(title)
|
||||
plt.xlabel("Number of Missing Points")
|
||||
plt.ylabel("Density")
|
||||
plt.legend()
|
||||
@@ -169,7 +168,7 @@ def plot_data_points(normal_experiment_paths, anomaly_experiment_paths, title):
|
||||
label="Anomaly Experiments (With Artifical Smoke)",
|
||||
density=True,
|
||||
)
|
||||
plt.title(title)
|
||||
# plt.title(title)
|
||||
plt.xlabel("Percentage of Missing Lidar Measurements")
|
||||
plt.ylabel("Density")
|
||||
# display the x axis as percentages
|
||||
@@ -210,7 +209,7 @@ def plot_data_points(normal_experiment_paths, anomaly_experiment_paths, title):
|
||||
alpha=0.5,
|
||||
label="Anomaly Experiments",
|
||||
)
|
||||
plt.title(title)
|
||||
# plt.title(title)
|
||||
plt.xlabel("Number of Missing Points")
|
||||
plt.ylabel("Normalized Density")
|
||||
plt.legend()
|
||||
|
||||
@@ -5,7 +5,6 @@ from pathlib import Path
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from pointcloudset import Dataset
|
||||
|
||||
# define data path containing the bag files
|
||||
all_data_path = Path("/home/fedex/mt/data/subter")
|
||||
@@ -164,7 +163,7 @@ def plot_data_points(normal_experiment_paths, anomaly_experiment_paths, title):
|
||||
plt.gca().set_yticklabels(
|
||||
["{:.0f}%".format(y * 100) for y in plt.gca().get_yticks()]
|
||||
)
|
||||
plt.title("Particles Closer than 0.5m to the Sensor")
|
||||
# plt.title("Particles Closer than 0.5m to the Sensor")
|
||||
plt.ylabel("Percentage of measurements closer than 0.5m")
|
||||
plt.tight_layout()
|
||||
plt.savefig(output_datetime_path / f"particles_near_sensor_boxplot_{rt}.png")
|
||||
@@ -186,7 +185,7 @@ def plot_data_points(normal_experiment_paths, anomaly_experiment_paths, title):
|
||||
plt.gca().set_yticklabels(
|
||||
["{:.0f}%".format(y * 100) for y in plt.gca().get_yticks()]
|
||||
)
|
||||
plt.title("Particles Closer than 0.5m to the Sensor")
|
||||
# plt.title("Particles Closer than 0.5m to the Sensor")
|
||||
plt.ylabel("Percentage of measurements closer than 0.5m")
|
||||
plt.ylim(0, 0.05)
|
||||
plt.tight_layout()
|
||||
|
||||
@@ -112,18 +112,27 @@ cmap = get_colormap_with_special_missing_color(
|
||||
args.colormap, args.missing_data_color, args.reverse_colormap
|
||||
)
|
||||
|
||||
# --- Create a figure with 2 vertical subplots ---
|
||||
# --- Create a figure with 2 vertical subplots and move titles to the left ---
|
||||
fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1, figsize=(10, 5))
|
||||
for ax, frame, title in zip(
|
||||
# leave extra left margin for the left-side labels
|
||||
fig.subplots_adjust(left=0.14, hspace=0.05)
|
||||
|
||||
for ax, frame, label in zip(
|
||||
(ax1, ax2),
|
||||
(frame1, frame2),
|
||||
(
|
||||
"Projection of Lidar Frame without Degradation",
|
||||
"Projection of Lidar Frame with Degradation (Artifical Smoke)",
|
||||
),
|
||||
("(a)", "(b)"),
|
||||
):
|
||||
im = ax.imshow(frame, cmap=cmap, aspect="auto", vmin=global_vmin, vmax=global_vmax)
|
||||
ax.set_title(title)
|
||||
# place the "title" to the left, vertically centered relative to the axes
|
||||
ax.text(
|
||||
-0.02, # negative x places text left of the axes (in axes coordinates)
|
||||
0.5,
|
||||
label,
|
||||
transform=ax.transAxes,
|
||||
va="center",
|
||||
ha="right",
|
||||
fontsize=12,
|
||||
)
|
||||
ax.axis("off")
|
||||
|
||||
# Adjust layout to fit margins for a paper
|
||||
|
||||
@@ -26,7 +26,8 @@ SCHEMA_STATIC = {
|
||||
"eval": pl.Utf8, # "exp_based" | "manual_based"
|
||||
"fold": pl.Int32,
|
||||
# metrics
|
||||
"auc": pl.Float64,
|
||||
"roc_auc": pl.Float64, # <-- renamed from 'auc'
|
||||
"prc_auc": pl.Float64, # <-- new
|
||||
"ap": pl.Float64,
|
||||
# per-sample scores: list of (idx, label, score)
|
||||
"scores": pl.List(
|
||||
@@ -75,7 +76,6 @@ PRETRAIN_SCHEMA = {
|
||||
"semi_anomalous": pl.Int32,
|
||||
"model": pl.Utf8, # always "ae"
|
||||
"fold": pl.Int32,
|
||||
"split": pl.Utf8, # "train" | "test"
|
||||
# timings and optimization
|
||||
"train_time": pl.Float64,
|
||||
"test_time": pl.Float64,
|
||||
@@ -115,6 +115,43 @@ SCHEMA_INFERENCE = {
|
||||
# ------------------------------------------------------------
|
||||
# Helpers: curve/scores normalizers (tuples/ndarrays -> dict/list)
|
||||
# ------------------------------------------------------------
|
||||
|
||||
|
||||
def compute_prc_auc_from_curve(prc_curve: dict | None) -> float | None:
|
||||
"""
|
||||
Compute AUC of the Precision-Recall curve via trapezoidal rule.
|
||||
Expects prc_curve = {"precision": [...], "recall": [...], "thr": [...] (optional)}.
|
||||
Robust to NaNs, unsorted recall, and missing endpoints; returns np.nan if empty.
|
||||
"""
|
||||
if not prc_curve:
|
||||
return np.nan
|
||||
precision = np.asarray(prc_curve.get("precision", []), dtype=float)
|
||||
recall = np.asarray(prc_curve.get("recall", []), dtype=float)
|
||||
if precision.size == 0 or recall.size == 0:
|
||||
return np.nan
|
||||
|
||||
mask = ~(np.isnan(precision) | np.isnan(recall))
|
||||
precision, recall = precision[mask], recall[mask]
|
||||
if recall.size == 0:
|
||||
return np.nan
|
||||
|
||||
# Sort by recall, clip to [0,1]
|
||||
order = np.argsort(recall)
|
||||
recall = np.clip(recall[order], 0.0, 1.0)
|
||||
precision = np.clip(precision[order], 0.0, 1.0)
|
||||
|
||||
# Ensure curve spans [0,1] in recall (hold precision constant at ends)
|
||||
if recall[0] > 0.0:
|
||||
recall = np.insert(recall, 0, 0.0)
|
||||
precision = np.insert(precision, 0, precision[0])
|
||||
if recall[-1] < 1.0:
|
||||
recall = np.append(recall, 1.0)
|
||||
precision = np.append(precision, precision[-1])
|
||||
|
||||
# Trapezoidal AUC
|
||||
return float(np.trapezoid(precision, recall))
|
||||
|
||||
|
||||
def _tolist(x):
|
||||
if x is None:
|
||||
return None
|
||||
@@ -358,23 +395,28 @@ def rows_from_ocsvm_default(data: dict, evals: List[str]) -> Dict[str, dict]:
|
||||
# Build the Polars DataFrame
|
||||
# ------------------------------------------------------------
|
||||
def load_results_dataframe(root: Path, allow_cache: bool = True) -> pl.DataFrame:
|
||||
"""
|
||||
Walks experiment subdirs under `root`. For each (model, fold) it adds rows:
|
||||
Columns (SCHEMA_STATIC):
|
||||
network, latent_dim, semi_normals, semi_anomalous,
|
||||
model, eval, fold,
|
||||
auc, ap, scores{sample_idx,orig_label,score},
|
||||
roc_curve{fpr,tpr,thr}, prc_curve{precision,recall,thr},
|
||||
sample_indices, sample_labels, valid_mask,
|
||||
train_time, test_time,
|
||||
folder, k_fold_num
|
||||
"""
|
||||
if allow_cache:
|
||||
cache = root / "results_cache.parquet"
|
||||
if cache.exists():
|
||||
try:
|
||||
df = pl.read_parquet(cache)
|
||||
print(f"[info] loaded cached results frame from {cache}")
|
||||
# Backward-compat: old caches may have 'auc' but no 'roc_auc'/'prc_auc'
|
||||
if "roc_auc" not in df.columns and "auc" in df.columns:
|
||||
df = df.rename({"auc": "roc_auc"})
|
||||
if "prc_auc" not in df.columns and "prc_curve" in df.columns:
|
||||
df = df.with_columns(
|
||||
pl.struct(
|
||||
pl.col("prc_curve").struct.field("precision"),
|
||||
pl.col("prc_curve").struct.field("recall"),
|
||||
)
|
||||
.map_elements(
|
||||
lambda s: compute_prc_auc_from_curve(
|
||||
{"precision": s[0], "recall": s[1]}
|
||||
)
|
||||
)
|
||||
.alias("prc_auc")
|
||||
)
|
||||
return df
|
||||
except Exception as e:
|
||||
print(f"[warn] failed to load cache {cache}: {e}")
|
||||
@@ -409,15 +451,17 @@ def load_results_dataframe(root: Path, allow_cache: bool = True) -> pl.DataFrame
|
||||
continue
|
||||
|
||||
if model == "deepsad":
|
||||
per_eval = rows_from_deepsad(data, EVALS) # eval -> dict
|
||||
per_eval = rows_from_deepsad(data, EVALS)
|
||||
elif model == "isoforest":
|
||||
per_eval = rows_from_isoforest(data, EVALS) # eval -> dict
|
||||
per_eval = rows_from_isoforest(data, EVALS)
|
||||
elif model == "ocsvm":
|
||||
per_eval = rows_from_ocsvm_default(data, EVALS) # eval -> dict
|
||||
per_eval = rows_from_ocsvm_default(data, EVALS)
|
||||
else:
|
||||
per_eval = {}
|
||||
|
||||
for ev, vals in per_eval.items():
|
||||
# compute prc_auc now (fast), rename auc->roc_auc
|
||||
prc_auc_val = compute_prc_auc_from_curve(vals.get("prc"))
|
||||
rows.append(
|
||||
{
|
||||
"network": network,
|
||||
@@ -427,7 +471,8 @@ def load_results_dataframe(root: Path, allow_cache: bool = True) -> pl.DataFrame
|
||||
"model": model,
|
||||
"eval": ev,
|
||||
"fold": fold,
|
||||
"auc": vals["auc"],
|
||||
"roc_auc": vals["auc"], # renamed
|
||||
"prc_auc": prc_auc_val, # new
|
||||
"ap": vals["ap"],
|
||||
"scores": vals["scores"],
|
||||
"roc_curve": vals["roc"],
|
||||
@@ -443,20 +488,19 @@ def load_results_dataframe(root: Path, allow_cache: bool = True) -> pl.DataFrame
|
||||
}
|
||||
)
|
||||
|
||||
# If empty, return a typed empty frame
|
||||
if not rows:
|
||||
# Return a typed empty frame (new schema)
|
||||
return pl.DataFrame(schema=SCHEMA_STATIC)
|
||||
|
||||
df = pl.DataFrame(rows, schema=SCHEMA_STATIC)
|
||||
|
||||
# Cast to efficient dtypes (categoricals etc.) – no extra sanitation
|
||||
# Cast to efficient dtypes (categoricals etc.)
|
||||
df = df.with_columns(
|
||||
pl.col("network", "model", "eval").cast(pl.Categorical),
|
||||
pl.col(
|
||||
"latent_dim", "semi_normals", "semi_anomalous", "fold", "k_fold_num"
|
||||
).cast(pl.Int32),
|
||||
pl.col("auc", "ap", "train_time", "test_time").cast(pl.Float64),
|
||||
# NOTE: no cast on 'scores' here; it's already List(Struct) per schema.
|
||||
pl.col("roc_auc", "prc_auc", "ap", "train_time", "test_time").cast(pl.Float64),
|
||||
)
|
||||
|
||||
if allow_cache:
|
||||
@@ -577,7 +621,7 @@ def load_pretraining_results_dataframe(
|
||||
|
||||
# Cast/optimize a bit (categoricals, ints, floats)
|
||||
df = df.with_columns(
|
||||
pl.col("network", "model", "split").cast(pl.Categorical),
|
||||
pl.col("network", "model").cast(pl.Categorical),
|
||||
pl.col(
|
||||
"latent_dim", "semi_normals", "semi_anomalous", "fold", "k_fold_num"
|
||||
).cast(pl.Int32),
|
||||
|
||||
306
tools/plot_scripts/results_ae_table.py
Normal file
@@ -0,0 +1,306 @@
|
||||
# ae_losses_table_from_df.py
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
import numpy as np
|
||||
import polars as pl
|
||||
|
||||
# CHANGE THIS IMPORT IF YOUR LOADER MODULE IS NAMED DIFFERENTLY
|
||||
from load_results import load_pretraining_results_dataframe
|
||||
|
||||
# ----------------------------
|
||||
# Config
|
||||
# ----------------------------
|
||||
ROOT = Path("/home/fedex/mt/results/copy") # experiments root you pass to the loader
|
||||
OUTPUT_DIR = Path("/home/fedex/mt/plots/results_ae_table")
|
||||
|
||||
# Which label field to use from the DF; "labels_exp_based" or "labels_manual_based"
|
||||
LABEL_FIELD = "labels_exp_based"
|
||||
|
||||
# Which architectures to include (labels must match canonicalize_network)
|
||||
WANTED_NETS = {"LeNet", "Efficient"}
|
||||
|
||||
# Formatting
|
||||
DECIMALS = 4 # how many decimals to display for losses
|
||||
BOLD_BEST = False # set True to bold per-group best (lower is better)
|
||||
LOWER_IS_BETTER = True # for losses we want the minimum
|
||||
|
||||
|
||||
# ----------------------------
|
||||
# Helpers (ported/minified from your plotting script)
|
||||
# ----------------------------
|
||||
def canonicalize_network(name: str) -> str:
|
||||
low = (name or "").lower()
|
||||
if "lenet" in low:
|
||||
return "LeNet"
|
||||
if "efficient" in low:
|
||||
return "Efficient"
|
||||
return name or "unknown"
|
||||
|
||||
|
||||
def calculate_batch_mean_loss(scores: np.ndarray, batch_size: int) -> float:
|
||||
n = len(scores)
|
||||
if n == 0:
|
||||
return np.nan
|
||||
if batch_size <= 0:
|
||||
batch_size = n
|
||||
n_batches = (n + batch_size - 1) // batch_size
|
||||
acc = 0.0
|
||||
for i in range(0, n, batch_size):
|
||||
acc += float(np.mean(scores[i : i + batch_size]))
|
||||
return acc / n_batches
|
||||
|
||||
|
||||
def extract_batch_size(cfg_json: str) -> int:
|
||||
import json
|
||||
|
||||
try:
|
||||
cfg = json.loads(cfg_json) if cfg_json else {}
|
||||
except Exception:
|
||||
cfg = {}
|
||||
return int(cfg.get("ae_batch_size") or cfg.get("batch_size") or 256)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Cell:
|
||||
mean: float | None
|
||||
std: float | None
|
||||
|
||||
|
||||
def _fmt(mean: float | None) -> str:
|
||||
return "--" if (mean is None or not (mean == mean)) else f"{mean:.{DECIMALS}f}"
|
||||
|
||||
|
||||
def _bold_mask_display(
|
||||
values: List[float | None], decimals: int, lower_is_better: bool
|
||||
) -> List[bool]:
|
||||
"""
|
||||
Tie-aware bolding mask based on *displayed* precision.
|
||||
For losses, lower is better (min). For metrics where higher is better, set lower_is_better=False.
|
||||
"""
|
||||
|
||||
def disp(v: float | None) -> float | None:
|
||||
if v is None or not (v == v):
|
||||
return None
|
||||
# use string → float to match display rounding exactly
|
||||
return float(f"{v:.{decimals}f}")
|
||||
|
||||
rounded = [disp(v) for v in values]
|
||||
finite = [v for v in rounded if v is not None]
|
||||
if not finite:
|
||||
return [False] * len(values)
|
||||
target = min(finite) if lower_is_better else max(finite)
|
||||
return [(v is not None and v == target) for v in rounded]
|
||||
|
||||
|
||||
# ----------------------------
|
||||
# Core
|
||||
# ----------------------------
|
||||
def build_losses_table_from_df(
|
||||
df: pl.DataFrame, label_field: str
|
||||
) -> Tuple[str, float | None]:
|
||||
"""
|
||||
Build a LaTeX table showing Overall loss (LeNet, Efficient) and Anomaly loss (LeNet, Efficient)
|
||||
with one row per latent dimension. Returns (latex_table_string, max_std_overall).
|
||||
"""
|
||||
# Basic validation
|
||||
required_cols = {"scores", "network", "latent_dim"}
|
||||
missing = required_cols - set(df.columns)
|
||||
if missing:
|
||||
raise ValueError(f"Missing required columns in AE dataframe: {missing}")
|
||||
if label_field not in df.columns:
|
||||
raise ValueError(f"Expected '{label_field}' column in AE dataframe.")
|
||||
|
||||
# Canonicalize nets, compute per-row overall/anomaly losses
|
||||
rows: List[dict] = []
|
||||
for row in df.iter_rows(named=True):
|
||||
net = canonicalize_network(row["network"])
|
||||
if WANTED_NETS and net not in WANTED_NETS:
|
||||
continue
|
||||
dim = int(row["latent_dim"])
|
||||
batch_size = extract_batch_size(row.get("config_json"))
|
||||
scores = np.asarray(row["scores"] or [], dtype=float)
|
||||
|
||||
labels = row.get(label_field)
|
||||
labels = np.asarray(labels, dtype=int) if labels is not None else None
|
||||
|
||||
overall_loss = calculate_batch_mean_loss(scores, batch_size)
|
||||
|
||||
anomaly_loss = np.nan
|
||||
if labels is not None and labels.size == scores.size:
|
||||
anomaly_scores = scores[labels == -1]
|
||||
if anomaly_scores.size > 0:
|
||||
anomaly_loss = calculate_batch_mean_loss(anomaly_scores, batch_size)
|
||||
|
||||
rows.append(
|
||||
{
|
||||
"net": net,
|
||||
"latent_dim": dim,
|
||||
"overall": overall_loss,
|
||||
"anomaly": anomaly_loss,
|
||||
}
|
||||
)
|
||||
|
||||
if not rows:
|
||||
raise ValueError(
|
||||
"No rows available after filtering; check WANTED_NETS or input data."
|
||||
)
|
||||
|
||||
df2 = pl.DataFrame(rows)
|
||||
|
||||
# Aggregate across folds per (net, latent_dim)
|
||||
agg = df2.group_by(["net", "latent_dim"]).agg(
|
||||
pl.col("overall").mean().alias("overall_mean"),
|
||||
pl.col("overall").std().alias("overall_std"),
|
||||
pl.col("anomaly").mean().alias("anomaly_mean"),
|
||||
pl.col("anomaly").std().alias("anomaly_std"),
|
||||
)
|
||||
|
||||
# Collect union of dims across both nets
|
||||
dims = sorted(set(agg.get_column("latent_dim").to_list()))
|
||||
|
||||
# Build lookup
|
||||
keymap: Dict[Tuple[str, int], Cell] = {}
|
||||
keymap_anom: Dict[Tuple[str, int], Cell] = {}
|
||||
|
||||
max_std: float | None = None
|
||||
|
||||
def push_std(v: float | None):
|
||||
nonlocal max_std
|
||||
if v is None or not (v == v):
|
||||
return
|
||||
if max_std is None or v > max_std:
|
||||
max_std = v
|
||||
|
||||
for r in agg.iter_rows(named=True):
|
||||
k = (r["net"], int(r["latent_dim"]))
|
||||
keymap[k] = Cell(r.get("overall_mean"), r.get("overall_std"))
|
||||
keymap_anom[k] = Cell(r.get("anomaly_mean"), r.get("anomaly_std"))
|
||||
push_std(r.get("overall_std"))
|
||||
push_std(r.get("anomaly_std"))
|
||||
|
||||
# Ensure nets order consistent
|
||||
nets_order = ["LeNet", "Efficient"]
|
||||
nets_present = [n for n in nets_order if any(k[0] == n for k in keymap.keys())]
|
||||
if not nets_present:
|
||||
nets_present = sorted({k[0] for k in keymap.keys()})
|
||||
|
||||
# Build LaTeX table
|
||||
header_left = [r"LeNet", r"Efficient"]
|
||||
header_right = [r"LeNet", r"Efficient"]
|
||||
|
||||
lines: List[str] = []
|
||||
lines.append(r"\begin{table}[t]")
|
||||
lines.append(r"\centering")
|
||||
lines.append(r"\setlength{\tabcolsep}{4pt}")
|
||||
lines.append(r"\renewcommand{\arraystretch}{1.2}")
|
||||
# vertical bar between the two groups
|
||||
lines.append(r"\begin{tabularx}{\textwidth}{c*{2}{Y}|*{2}{Y}}")
|
||||
lines.append(r"\toprule")
|
||||
lines.append(
|
||||
r" & \multicolumn{2}{c}{Overall loss} & \multicolumn{2}{c}{Anomaly loss} \\"
|
||||
)
|
||||
lines.append(r"\cmidrule(lr){2-3} \cmidrule(lr){4-5}")
|
||||
lines.append(
|
||||
r"Latent Dim. & "
|
||||
+ " & ".join(header_left)
|
||||
+ " & "
|
||||
+ " & ".join(header_right)
|
||||
+ r" \\"
|
||||
)
|
||||
lines.append(r"\midrule")
|
||||
|
||||
for d in dims:
|
||||
# Gather values in order: Overall (LeNet, Efficient), Anomaly (LeNet, Efficient)
|
||||
overall_vals = [keymap.get((n, d), Cell(None, None)).mean for n in nets_present]
|
||||
anomaly_vals = [
|
||||
keymap_anom.get((n, d), Cell(None, None)).mean for n in nets_present
|
||||
]
|
||||
overall_strs = [_fmt(v) for v in overall_vals]
|
||||
anomaly_strs = [_fmt(v) for v in anomaly_vals]
|
||||
|
||||
if BOLD_BEST:
|
||||
mask_overall = _bold_mask_display(overall_vals, DECIMALS, LOWER_IS_BETTER)
|
||||
mask_anom = _bold_mask_display(anomaly_vals, DECIMALS, LOWER_IS_BETTER)
|
||||
overall_strs = [
|
||||
(r"\textbf{" + s + "}") if (m and s != "--") else s
|
||||
for s, m in zip(overall_strs, mask_overall)
|
||||
]
|
||||
anomaly_strs = [
|
||||
(r"\textbf{" + s + "}") if (m and s != "--") else s
|
||||
for s, m in zip(anomaly_strs, mask_anom)
|
||||
]
|
||||
|
||||
lines.append(
|
||||
f"{d} & "
|
||||
+ " & ".join(overall_strs)
|
||||
+ " & "
|
||||
+ " & ".join(anomaly_strs)
|
||||
+ r" \\"
|
||||
)
|
||||
|
||||
lines.append(r"\bottomrule")
|
||||
lines.append(r"\end{tabularx}")
|
||||
|
||||
max_std_str = "n/a" if max_std is None else f"{max_std:.{DECIMALS}f}"
|
||||
lines.append(
|
||||
rf"\caption{{Autoencoder pre-training MSE losses (test split) across latent dimensions. "
|
||||
rf"Left: overall loss; Right: anomaly-only loss. "
|
||||
rf"Cells show means across folds (no $\pm$std). "
|
||||
rf"Maximum observed standard deviation across all cells (not shown): {max_std_str}.}}"
|
||||
)
|
||||
lines.append(r"\end{table}")
|
||||
|
||||
return "\n".join(lines), max_std
|
||||
|
||||
|
||||
# ----------------------------
|
||||
# Entry
|
||||
# ----------------------------
|
||||
def main():
|
||||
df = load_pretraining_results_dataframe(ROOT, allow_cache=True)
|
||||
|
||||
# Build LaTeX table
|
||||
tex, max_std = build_losses_table_from_df(df, LABEL_FIELD)
|
||||
|
||||
# Output dirs
|
||||
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
ts_dir = OUTPUT_DIR / "archive" / datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
ts_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
out_name = "ae_pretraining_losses_table.tex"
|
||||
out_path = ts_dir / out_name
|
||||
out_path.write_text(tex, encoding="utf-8")
|
||||
|
||||
# Save a copy of this script
|
||||
script_path = Path(__file__)
|
||||
try:
|
||||
shutil.copy2(script_path, ts_dir / script_path.name)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Mirror latest
|
||||
latest = OUTPUT_DIR / "latest"
|
||||
latest.mkdir(parents=True, exist_ok=True)
|
||||
# Clear
|
||||
for f in latest.iterdir():
|
||||
if f.is_file():
|
||||
f.unlink()
|
||||
# Copy
|
||||
for f in ts_dir.iterdir():
|
||||
if f.is_file():
|
||||
shutil.copy2(f, latest / f.name)
|
||||
|
||||
print(f"Saved table to: {ts_dir}")
|
||||
print(f"Also updated: {latest}")
|
||||
print(f" - {out_name}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
273
tools/plot_scripts/results_ap_over_latent.py
Normal file
@@ -0,0 +1,273 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import polars as pl
|
||||
from matplotlib.ticker import MaxNLocator
|
||||
|
||||
# =========================
|
||||
# Config
|
||||
# =========================
|
||||
ROOT = Path("/home/fedex/mt/results/copy")
|
||||
OUTPUT_DIR = Path("/home/fedex/mt/plots/results_ap_over_latent")
|
||||
|
||||
# Labeling regimes (shown as separate subplots)
|
||||
SEMI_LABELING_REGIMES: list[tuple[int, int]] = [(0, 0), (50, 10), (500, 100)]
|
||||
|
||||
# Evaluations: separate figure per eval
|
||||
EVALS: list[str] = ["exp_based", "manual_based"]
|
||||
|
||||
# X-axis (latent dims)
|
||||
LATENT_DIMS: list[int] = [32, 64, 128, 256, 512, 768, 1024]
|
||||
|
||||
# Visual style
|
||||
FIGSIZE = (8, 8) # one tall figure with 3 compact subplots
|
||||
MARKERSIZE = 7
|
||||
SCATTER_ALPHA = 0.95
|
||||
LINEWIDTH = 2.0
|
||||
TREND_LINEWIDTH = 2.2
|
||||
BAND_ALPHA = 0.18
|
||||
|
||||
# Toggle: show ±1 std bands (k-fold variability)
|
||||
SHOW_STD_BANDS = True # <<< set to False to hide the bands
|
||||
|
||||
# Colors for the two DeepSAD backbones
|
||||
COLOR_LENET = "#1f77b4" # blue
|
||||
COLOR_EFFICIENT = "#ff7f0e" # orange
|
||||
|
||||
# =========================
|
||||
# Loader
|
||||
# =========================
|
||||
from load_results import load_results_dataframe
|
||||
|
||||
|
||||
# =========================
|
||||
# Helpers
|
||||
# =========================
|
||||
def _with_net_label(df: pl.DataFrame) -> pl.DataFrame:
|
||||
return df.with_columns(
|
||||
pl.when(
|
||||
pl.col("network").cast(pl.Utf8).str.to_lowercase().str.contains("lenet")
|
||||
)
|
||||
.then(pl.lit("LeNet"))
|
||||
.when(
|
||||
pl.col("network").cast(pl.Utf8).str.to_lowercase().str.contains("efficient")
|
||||
)
|
||||
.then(pl.lit("Efficient"))
|
||||
.otherwise(pl.col("network").cast(pl.Utf8))
|
||||
.alias("net_label")
|
||||
)
|
||||
|
||||
|
||||
def _filter_deepsad(df: pl.DataFrame) -> pl.DataFrame:
|
||||
return df.filter(
|
||||
(pl.col("model") == "deepsad")
|
||||
& (pl.col("eval").is_in(EVALS))
|
||||
& (pl.col("latent_dim").is_in(LATENT_DIMS))
|
||||
& (pl.col("net_label").is_in(["LeNet", "Efficient"]))
|
||||
).select(
|
||||
"eval",
|
||||
"net_label",
|
||||
"latent_dim",
|
||||
"semi_normals",
|
||||
"semi_anomalous",
|
||||
"fold",
|
||||
"ap",
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Agg:
|
||||
mean: float
|
||||
std: float
|
||||
|
||||
|
||||
def aggregate_ap(df: pl.DataFrame) -> Dict[Tuple[str, str, int, int, int], Agg]:
|
||||
out: Dict[Tuple[str, str, int, int, int], Agg] = {}
|
||||
gb = (
|
||||
df.group_by(
|
||||
["eval", "net_label", "latent_dim", "semi_normals", "semi_anomalous"]
|
||||
)
|
||||
.agg(pl.col("ap").mean().alias("mean"), pl.col("ap").std().alias("std"))
|
||||
.to_dicts()
|
||||
)
|
||||
for row in gb:
|
||||
key = (
|
||||
str(row["eval"]),
|
||||
str(row["net_label"]),
|
||||
int(row["latent_dim"]),
|
||||
int(row["semi_normals"]),
|
||||
int(row["semi_anomalous"]),
|
||||
)
|
||||
m = float(row["mean"]) if row["mean"] == row["mean"] else np.nan
|
||||
s = float(row["std"]) if row["std"] == row["std"] else np.nan
|
||||
out[key] = Agg(mean=m, std=s)
|
||||
return out
|
||||
|
||||
|
||||
def _lin_trend(xs: List[int], ys: List[float]) -> Tuple[np.ndarray, np.ndarray]:
|
||||
if len(xs) < 2:
|
||||
return np.array(xs, dtype=float), np.array(ys, dtype=float)
|
||||
x = np.array(xs, dtype=float)
|
||||
y = np.array(ys, dtype=float)
|
||||
a, b = np.polyfit(x, y, 1)
|
||||
x_fit = np.linspace(x.min(), x.max(), 200)
|
||||
y_fit = a * x_fit + b
|
||||
return x_fit, y_fit
|
||||
|
||||
|
||||
def _dynamic_ylim(all_vals: List[float], all_errs: List[float]) -> Tuple[float, float]:
|
||||
vals = np.array(all_vals, dtype=float)
|
||||
errs = np.array(all_errs, dtype=float) if SHOW_STD_BANDS else np.zeros_like(vals)
|
||||
valid = np.isfinite(vals)
|
||||
if not np.any(valid):
|
||||
return (0.0, 1.0)
|
||||
v, e = vals[valid], errs[valid]
|
||||
lo = np.min(v - e)
|
||||
hi = np.max(v + e)
|
||||
span = max(1e-3, hi - lo)
|
||||
pad = 0.08 * span
|
||||
y0 = max(0.0, lo - pad)
|
||||
y1 = min(1.0, hi + pad)
|
||||
if (y1 - y0) < 0.08:
|
||||
mid = 0.5 * (y0 + y1)
|
||||
y0 = max(0.0, mid - 0.04)
|
||||
y1 = min(1.0, mid + 0.04)
|
||||
return (float(y0), float(y1))
|
||||
|
||||
|
||||
def _get_dim_mapping(dims: list[int]) -> dict[int, int]:
|
||||
"""Map actual dimensions to evenly spaced positions (0, 1, 2, ...)"""
|
||||
return {dim: i for i, dim in enumerate(dims)}
|
||||
|
||||
|
||||
def plot_eval(ev: str, agg: Dict[Tuple[str, str, int, int, int], Agg], outdir: Path):
|
||||
fig, axes = plt.subplots(
|
||||
len(SEMI_LABELING_REGIMES),
|
||||
1,
|
||||
figsize=FIGSIZE,
|
||||
constrained_layout=True,
|
||||
sharex=True,
|
||||
)
|
||||
|
||||
if len(SEMI_LABELING_REGIMES) == 1:
|
||||
axes = [axes]
|
||||
|
||||
# Create dimension mapping
|
||||
dim_mapping = _get_dim_mapping(LATENT_DIMS)
|
||||
|
||||
for ax, regime in zip(axes, SEMI_LABELING_REGIMES):
|
||||
semi_n, semi_a = regime
|
||||
data = {}
|
||||
for net in ["LeNet", "Efficient"]:
|
||||
xs, ys, es = [], [], []
|
||||
for dim in LATENT_DIMS:
|
||||
key = (ev, net, dim, semi_n, semi_a)
|
||||
if key in agg:
|
||||
xs.append(
|
||||
dim_mapping[dim]
|
||||
) # Use mapped position instead of actual dim
|
||||
ys.append(agg[key].mean)
|
||||
es.append(agg[key].std)
|
||||
data[net] = (xs, ys, es)
|
||||
|
||||
for net, color in [("LeNet", COLOR_LENET), ("Efficient", COLOR_EFFICIENT)]:
|
||||
xs, ys, es = data[net]
|
||||
if not xs:
|
||||
continue
|
||||
|
||||
# Set evenly spaced ticks with actual dimension labels
|
||||
ax.set_xticks(list(dim_mapping.values()))
|
||||
ax.set_xticklabels(LATENT_DIMS)
|
||||
|
||||
ax.yaxis.set_major_locator(MaxNLocator(nbins=5))
|
||||
ax.scatter(
|
||||
xs, ys, s=35, color=color, alpha=SCATTER_ALPHA, label=f"{net} (points)"
|
||||
)
|
||||
x_fit, y_fit = _lin_trend(xs, ys) # Now using mapped positions
|
||||
ax.plot(
|
||||
x_fit,
|
||||
y_fit,
|
||||
color=color,
|
||||
linewidth=TREND_LINEWIDTH,
|
||||
label=f"{net} (trend)",
|
||||
)
|
||||
if SHOW_STD_BANDS and es and np.any(np.isfinite(es)):
|
||||
ylo = np.clip(np.array(ys) - np.array(es), 0.0, 1.0)
|
||||
yhi = np.clip(np.array(ys) + np.array(es), 0.0, 1.0)
|
||||
ax.fill_between(
|
||||
xs, ylo, yhi, color=color, alpha=BAND_ALPHA, linewidth=0
|
||||
)
|
||||
|
||||
all_vals, all_errs = [], []
|
||||
for net in ["LeNet", "Efficient"]:
|
||||
_, ys, es = data[net]
|
||||
all_vals.extend(ys)
|
||||
all_errs.extend(es)
|
||||
y0, y1 = _dynamic_ylim(all_vals, all_errs)
|
||||
ax.set_ylim(y0, y1)
|
||||
|
||||
ax.set_title(f"Labeling regime {semi_n}/{semi_a}", fontsize=11)
|
||||
ax.grid(True, alpha=0.35)
|
||||
|
||||
axes[-1].set_xlabel("Latent dimension")
|
||||
for ax in axes:
|
||||
ax.set_ylabel("AP")
|
||||
|
||||
handles, labels = axes[0].get_legend_handles_labels()
|
||||
fig.legend(handles, labels, ncol=2, loc="upper center", bbox_to_anchor=(0.75, 0.97))
|
||||
fig.suptitle(f"AP vs. Latent Dimensionality — {ev.replace('_', ' ')}", y=1.05)
|
||||
|
||||
fname = f"ap_trends_{ev}.png"
|
||||
fig.savefig(outdir / fname, dpi=150)
|
||||
plt.close(fig)
|
||||
|
||||
|
||||
def plot_all(agg: Dict[Tuple[str, str, int, int, int], Agg], outdir: Path):
|
||||
outdir.mkdir(parents=True, exist_ok=True)
|
||||
for ev in EVALS:
|
||||
plot_eval(ev, agg, outdir)
|
||||
|
||||
|
||||
def main():
|
||||
df = load_results_dataframe(ROOT, allow_cache=True)
|
||||
df = _with_net_label(df)
|
||||
df = _filter_deepsad(df)
|
||||
agg = aggregate_ap(df)
|
||||
|
||||
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
archive_dir = OUTPUT_DIR / "archive"
|
||||
archive_dir.mkdir(parents=True, exist_ok=True)
|
||||
ts_dir = archive_dir / datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
ts_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
plot_all(agg, ts_dir)
|
||||
|
||||
try:
|
||||
script_path = Path(__file__)
|
||||
shutil.copy2(script_path, ts_dir / script_path.name)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
latest = OUTPUT_DIR / "latest"
|
||||
latest.mkdir(parents=True, exist_ok=True)
|
||||
for f in latest.iterdir():
|
||||
if f.is_file():
|
||||
f.unlink()
|
||||
for f in ts_dir.iterdir():
|
||||
if f.is_file():
|
||||
shutil.copy2(f, latest / f.name)
|
||||
|
||||
print(f"Saved plots to: {ts_dir}")
|
||||
print(f"Also updated: {latest}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
260
tools/plot_scripts/results_ap_over_semi.py
Normal file
@@ -0,0 +1,260 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import polars as pl
|
||||
from matplotlib.ticker import MaxNLocator
|
||||
|
||||
# =========================
|
||||
# Config
|
||||
# =========================
|
||||
ROOT = Path("/home/fedex/mt/results/copy")
|
||||
OUTPUT_DIR = Path("/home/fedex/mt/plots/results_ap_over_semi")
|
||||
|
||||
# Labeling regimes (shown as separate subplots)
|
||||
SEMI_LABELING_REGIMES: list[tuple[int, int]] = [(0, 0), (50, 10), (500, 100)]
|
||||
|
||||
# Evaluations: separate figure per eval
|
||||
EVALS: list[str] = ["exp_based", "manual_based"]
|
||||
|
||||
# X-axis (latent dims)
|
||||
LATENT_DIMS: list[int] = [32, 64, 128, 256, 512, 768, 1024]
|
||||
LATENT_DIM: int = [32, 64, 128, 256, 512, 768, 1024]
|
||||
|
||||
# Visual style
|
||||
FIGSIZE = (8, 8) # one tall figure with 3 compact subplots
|
||||
MARKERSIZE = 7
|
||||
SCATTER_ALPHA = 0.95
|
||||
LINEWIDTH = 2.0
|
||||
TREND_LINEWIDTH = 2.2
|
||||
BAND_ALPHA = 0.18
|
||||
|
||||
# Toggle: show ±1 std bands (k-fold variability)
|
||||
SHOW_STD_BANDS = True # <<< set to False to hide the bands
|
||||
|
||||
# Colors for the two DeepSAD backbones
|
||||
COLOR_LENET = "#1f77b4" # blue
|
||||
COLOR_EFFICIENT = "#ff7f0e" # orange
|
||||
|
||||
# =========================
|
||||
# Loader
|
||||
# =========================
|
||||
from load_results import load_results_dataframe
|
||||
|
||||
|
||||
# =========================
|
||||
# Helpers
|
||||
# =========================
|
||||
def _with_net_label(df: pl.DataFrame) -> pl.DataFrame:
|
||||
return df.with_columns(
|
||||
pl.when(
|
||||
pl.col("network").cast(pl.Utf8).str.to_lowercase().str.contains("lenet")
|
||||
)
|
||||
.then(pl.lit("LeNet"))
|
||||
.when(
|
||||
pl.col("network").cast(pl.Utf8).str.to_lowercase().str.contains("efficient")
|
||||
)
|
||||
.then(pl.lit("Efficient"))
|
||||
.otherwise(pl.col("network").cast(pl.Utf8))
|
||||
.alias("net_label")
|
||||
)
|
||||
|
||||
|
||||
def _filter_deepsad(df: pl.DataFrame) -> pl.DataFrame:
|
||||
return df.filter(
|
||||
(pl.col("model") == "deepsad")
|
||||
& (pl.col("eval").is_in(EVALS))
|
||||
& (pl.col("latent_dim").is_in(LATENT_DIMS))
|
||||
& (pl.col("net_label").is_in(["LeNet", "Efficient"]))
|
||||
).select(
|
||||
"eval",
|
||||
"net_label",
|
||||
"latent_dim",
|
||||
"semi_normals",
|
||||
"semi_anomalous",
|
||||
"fold",
|
||||
"ap",
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Agg:
|
||||
mean: float
|
||||
std: float
|
||||
|
||||
|
||||
def aggregate_ap(df: pl.DataFrame) -> Dict[Tuple[str, str, int, int, int], Agg]:
|
||||
out: Dict[Tuple[str, str, int, int, int], Agg] = {}
|
||||
gb = (
|
||||
df.group_by(
|
||||
["eval", "net_label", "latent_dim", "semi_normals", "semi_anomalous"]
|
||||
)
|
||||
.agg(pl.col("ap").mean().alias("mean"), pl.col("ap").std().alias("std"))
|
||||
.to_dicts()
|
||||
)
|
||||
for row in gb:
|
||||
key = (
|
||||
str(row["eval"]),
|
||||
str(row["net_label"]),
|
||||
int(row["latent_dim"]),
|
||||
int(row["semi_normals"]),
|
||||
int(row["semi_anomalous"]),
|
||||
)
|
||||
m = float(row["mean"]) if row["mean"] == row["mean"] else np.nan
|
||||
s = float(row["std"]) if row["std"] == row["std"] else np.nan
|
||||
out[key] = Agg(mean=m, std=s)
|
||||
return out
|
||||
|
||||
|
||||
def _lin_trend(xs: List[int], ys: List[float]) -> Tuple[np.ndarray, np.ndarray]:
|
||||
if len(xs) < 2:
|
||||
return np.array(xs, dtype=float), np.array(ys, dtype=float)
|
||||
x = np.array(xs, dtype=float)
|
||||
y = np.array(ys, dtype=float)
|
||||
a, b = np.polyfit(x, y, 1)
|
||||
x_fit = np.linspace(x.min(), x.max(), 200)
|
||||
y_fit = a * x_fit + b
|
||||
return x_fit, y_fit
|
||||
|
||||
|
||||
def _dynamic_ylim(all_vals: List[float], all_errs: List[float]) -> Tuple[float, float]:
|
||||
vals = np.array(all_vals, dtype=float)
|
||||
errs = np.array(all_errs, dtype=float) if SHOW_STD_BANDS else np.zeros_like(vals)
|
||||
valid = np.isfinite(vals)
|
||||
if not np.any(valid):
|
||||
return (0.0, 1.0)
|
||||
v, e = vals[valid], errs[valid]
|
||||
lo = np.min(v - e)
|
||||
hi = np.max(v + e)
|
||||
span = max(1e-3, hi - lo)
|
||||
pad = 0.08 * span
|
||||
y0 = max(0.0, lo - pad)
|
||||
y1 = min(1.0, hi + pad)
|
||||
if (y1 - y0) < 0.08:
|
||||
mid = 0.5 * (y0 + y1)
|
||||
y0 = max(0.0, mid - 0.04)
|
||||
y1 = min(1.0, mid + 0.04)
|
||||
return (float(y0), float(y1))
|
||||
|
||||
|
||||
def plot_eval(ev: str, agg: Dict[Tuple[str, str, int, int, int], Agg], outdir: Path):
|
||||
fig, axes = plt.subplots(
|
||||
len(SEMI_LABELING_REGIMES),
|
||||
1,
|
||||
figsize=FIGSIZE,
|
||||
constrained_layout=True,
|
||||
sharex=True,
|
||||
)
|
||||
|
||||
if len(SEMI_LABELING_REGIMES) == 1:
|
||||
axes = [axes]
|
||||
|
||||
for ax, regime in zip(axes, SEMI_LABELING_REGIMES):
|
||||
semi_n, semi_a = regime
|
||||
data = {}
|
||||
for net in ["LeNet", "Efficient"]:
|
||||
xs, ys, es = [], [], []
|
||||
for dim in LATENT_DIMS:
|
||||
key = (ev, net, dim, semi_n, semi_a)
|
||||
if key in agg:
|
||||
xs.append(dim)
|
||||
ys.append(agg[key].mean)
|
||||
es.append(agg[key].std)
|
||||
data[net] = (xs, ys, es)
|
||||
|
||||
for net, color in [("LeNet", COLOR_LENET), ("Efficient", COLOR_EFFICIENT)]:
|
||||
xs, ys, es = data[net]
|
||||
if not xs:
|
||||
continue
|
||||
ax.set_xticks(LATENT_DIMS)
|
||||
ax.yaxis.set_major_locator(MaxNLocator(nbins=5)) # e.g., always 5 ticks
|
||||
ax.scatter(
|
||||
xs, ys, s=35, color=color, alpha=SCATTER_ALPHA, label=f"{net} (points)"
|
||||
)
|
||||
x_fit, y_fit = _lin_trend(xs, ys)
|
||||
ax.plot(
|
||||
x_fit,
|
||||
y_fit,
|
||||
color=color,
|
||||
linewidth=TREND_LINEWIDTH,
|
||||
label=f"{net} (trend)",
|
||||
)
|
||||
if SHOW_STD_BANDS and es and np.any(np.isfinite(es)):
|
||||
ylo = np.clip(np.array(ys) - np.array(es), 0.0, 1.0)
|
||||
yhi = np.clip(np.array(ys) + np.array(es), 0.0, 1.0)
|
||||
ax.fill_between(
|
||||
xs, ylo, yhi, color=color, alpha=BAND_ALPHA, linewidth=0
|
||||
)
|
||||
|
||||
all_vals, all_errs = [], []
|
||||
for net in ["LeNet", "Efficient"]:
|
||||
_, ys, es = data[net]
|
||||
all_vals.extend(ys)
|
||||
all_errs.extend(es)
|
||||
y0, y1 = _dynamic_ylim(all_vals, all_errs)
|
||||
ax.set_ylim(y0, y1)
|
||||
|
||||
ax.set_title(f"Labeling regime {semi_n}/{semi_a}", fontsize=11)
|
||||
ax.grid(True, alpha=0.35)
|
||||
|
||||
axes[-1].set_xlabel("Latent dimension")
|
||||
for ax in axes:
|
||||
ax.set_ylabel("AP")
|
||||
|
||||
handles, labels = axes[0].get_legend_handles_labels()
|
||||
fig.legend(handles, labels, ncol=2, loc="upper center", bbox_to_anchor=(0.75, 0.97))
|
||||
fig.suptitle(f"AP vs. Latent Dimensionality — {ev.replace('_', ' ')}", y=1.05)
|
||||
|
||||
fname = f"ap_trends_{ev}.png"
|
||||
fig.savefig(outdir / fname, dpi=150)
|
||||
plt.close(fig)
|
||||
|
||||
|
||||
def plot_all(agg: Dict[Tuple[str, str, int, int, int], Agg], outdir: Path):
|
||||
outdir.mkdir(parents=True, exist_ok=True)
|
||||
for ev in EVALS:
|
||||
plot_eval(ev, agg, outdir)
|
||||
|
||||
|
||||
def main():
|
||||
df = load_results_dataframe(ROOT, allow_cache=True)
|
||||
df = _with_net_label(df)
|
||||
df = _filter_deepsad(df)
|
||||
agg = aggregate_ap(df)
|
||||
|
||||
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
archive_dir = OUTPUT_DIR / "archive"
|
||||
archive_dir.mkdir(parents=True, exist_ok=True)
|
||||
ts_dir = archive_dir / datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
ts_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
plot_all(agg, ts_dir)
|
||||
|
||||
try:
|
||||
script_path = Path(__file__)
|
||||
shutil.copy2(script_path, ts_dir / script_path.name)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
latest = OUTPUT_DIR / "latest"
|
||||
latest.mkdir(parents=True, exist_ok=True)
|
||||
for f in latest.iterdir():
|
||||
if f.is_file():
|
||||
f.unlink()
|
||||
for f in ts_dir.iterdir():
|
||||
if f.is_file():
|
||||
shutil.copy2(f, latest / f.name)
|
||||
|
||||
print(f"Saved plots to: {ts_dir}")
|
||||
print(f"Also updated: {latest}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -12,6 +12,8 @@ from typing import Dict, Optional, Tuple
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import polars as pl
|
||||
from load_results import load_inference_results_dataframe
|
||||
from matplotlib.lines import Line2D
|
||||
|
||||
# =====================================
|
||||
# User-configurable params
|
||||
@@ -21,7 +23,7 @@ import polars as pl
|
||||
INFERENCE_ROOT = Path("/home/fedex/mt/results/inference/copy")
|
||||
|
||||
# Cached stats + manual labels (same location as your earlier scripts)
|
||||
CACHE_PATH = Path("/home/fedex/mt/plots/data_anomalies_timeline")
|
||||
CACHE_PATH = Path("/home/fedex/mt/plots/results_inference_exp_compare")
|
||||
|
||||
# .bag directory (used only to rebuild experiment order for mapping stats)
|
||||
ALL_DATA_PATH = Path("/home/fedex/mt/data/subter")
|
||||
@@ -35,8 +37,8 @@ EXPERIMENT_DEGRADED = "3_smoke_human_walking_2023-01-23"
|
||||
|
||||
# Shared model configuration for BOTH experiments
|
||||
LATENT_DIM = 32
|
||||
SEMI_NORMALS = 50
|
||||
SEMI_ANOMALOUS = 10
|
||||
SEMI_NORMALS = 0
|
||||
SEMI_ANOMALOUS = 0
|
||||
|
||||
# Comparison y-axis mode for methods: "baseline_z" or "baseline_tailprob"
|
||||
Y_MODE = "baseline_z"
|
||||
@@ -75,7 +77,6 @@ output_datetime_path.mkdir(exist_ok=True, parents=True)
|
||||
# =====================================
|
||||
# Load Polars DataFrame via your helper
|
||||
# =====================================
|
||||
from load_results import load_inference_results_dataframe
|
||||
|
||||
df: pl.DataFrame = load_inference_results_dataframe(INFERENCE_ROOT)
|
||||
|
||||
@@ -259,19 +260,19 @@ def baseline_transform(clean: np.ndarray, other: np.ndarray, mode: str):
|
||||
|
||||
|
||||
def pick_method_series(gdf: pl.DataFrame, label: str) -> Optional[np.ndarray]:
|
||||
if label == "DeepSAD (LeNet)":
|
||||
if label == "DeepSAD LeNet":
|
||||
sel = gdf.filter(
|
||||
(pl.col("network") == "subter_LeNet") & (pl.col("model") == "deepsad")
|
||||
)
|
||||
elif label == "DeepSAD (efficient)":
|
||||
elif label == "DeepSAD Efficient":
|
||||
sel = gdf.filter(
|
||||
(pl.col("network") == "subter_efficient") & (pl.col("model") == "deepsad")
|
||||
)
|
||||
elif label == "OCSVM (LeNet)":
|
||||
elif label == "OCSVM":
|
||||
sel = gdf.filter(
|
||||
(pl.col("network") == "subter_LeNet") & (pl.col("model") == "ocsvm")
|
||||
)
|
||||
elif label == "IsoForest (LeNet)":
|
||||
elif label == "Isolation Forest":
|
||||
sel = gdf.filter(
|
||||
(pl.col("network") == "subter_LeNet") & (pl.col("model") == "isoforest")
|
||||
)
|
||||
@@ -310,10 +311,10 @@ def compare_two_experiments_progress(
|
||||
include_stats: bool = True,
|
||||
):
|
||||
methods = [
|
||||
"DeepSAD (LeNet)",
|
||||
"DeepSAD (efficient)",
|
||||
"OCSVM (LeNet)",
|
||||
"IsoForest (LeNet)",
|
||||
"DeepSAD LeNet",
|
||||
"DeepSAD Efficient",
|
||||
"OCSVM",
|
||||
"Isolation Forest",
|
||||
]
|
||||
|
||||
g_clean = group_slice(
|
||||
@@ -359,117 +360,214 @@ def compare_two_experiments_progress(
|
||||
|
||||
x = np.linspace(0, 100, PROGRESS_BINS)
|
||||
|
||||
# ---- Figure 1: scores only
|
||||
fig1, ax1 = plt.subplots(figsize=(14, 6), constrained_layout=True)
|
||||
for label in methods:
|
||||
# Prep stats: absolute %, EMA, progress-binned
|
||||
def prep_stat_pair(a, b):
|
||||
if a is None or len(a) == 0 or b is None or len(b) == 0:
|
||||
return None, None
|
||||
a_s = ema(a.astype(float), EMA_ALPHA_STATS)
|
||||
b_s = ema(b.astype(float), EMA_ALPHA_STATS)
|
||||
return _bin_to_progress(a_s, PROGRESS_BINS), _bin_to_progress(
|
||||
b_s, PROGRESS_BINS
|
||||
)
|
||||
|
||||
mp_c, mp_d = prep_stat_pair(mp_clean, mp_deg)
|
||||
ns_c, ns_d = prep_stat_pair(ns_clean, ns_deg)
|
||||
|
||||
# Colors & styles
|
||||
COLOR_METHOD = "#d62728" # vibrant red
|
||||
COLOR_MISSING = "#9ecae1" # pale blue
|
||||
COLOR_NEAR = "#a1d99b" # pale green
|
||||
|
||||
LS_CLEAN = "--" # dashed for normal/clean
|
||||
LS_DEG = "-" # solid for anomalous/degraded
|
||||
LW_METHOD = 1.8
|
||||
LW_METHOD_CLEAN = 1.2
|
||||
LW_STATS = 1.6
|
||||
ALPHA_STATS = 0.95
|
||||
|
||||
# Build the 2x2 subplots
|
||||
fig, axes = plt.subplots(
|
||||
4, 1, figsize=(12, 16), constrained_layout=True, sharex=False
|
||||
)
|
||||
axes = axes.ravel()
|
||||
|
||||
method_to_axidx = {
|
||||
"DeepSAD LeNet": 0,
|
||||
"DeepSAD Efficient": 1,
|
||||
"OCSVM": 2,
|
||||
"Isolation Forest": 3,
|
||||
}
|
||||
|
||||
stats_available = (
|
||||
mp_c is not None and mp_d is not None and ns_c is not None and ns_d is not None
|
||||
)
|
||||
if not stats_available:
|
||||
print("[WARN] One or both stats missing. Subplots will include methods only.")
|
||||
|
||||
letters = ["a", "b", "c", "d"]
|
||||
|
||||
for label, axidx in method_to_axidx.items():
|
||||
ax = axes[axidx]
|
||||
yc = curves_clean.get(label)
|
||||
yd = curves_deg.get(label)
|
||||
if yc is None or yd is None:
|
||||
ax.text(
|
||||
0.5, 0.5, "No data", ha="center", va="center", transform=ax.transAxes
|
||||
)
|
||||
ax.set_title(f"({letters[axidx]}) {label}")
|
||||
ax.grid(True, alpha=0.3)
|
||||
continue
|
||||
ax1.plot(x, yd, label=f"{label} — degraded", linewidth=1.8)
|
||||
ax1.plot(x, yc, linestyle="--", label=f"{label} — clean", linewidth=1.2)
|
||||
ax1.set_xlabel("Progress through experiment (%)")
|
||||
ax1.set_ylabel(y_label)
|
||||
ax1.set_title(
|
||||
f"Methods across experiments (progress-normalized)\n"
|
||||
f"Clean: {experiment_clean} vs Degraded: {experiment_degraded}\n"
|
||||
f"Transform: {y_mode} | EMA(methods α={EMA_ALPHA_METHODS})"
|
||||
)
|
||||
ax1.grid(True, alpha=0.3)
|
||||
ax1.legend(ncol=2, loc="upper right")
|
||||
out1 = (
|
||||
f"compare_{experiment_clean}_vs_{experiment_degraded}"
|
||||
f"_ld{latent_dim}_sn{semi_normals}_sa{semi_anomalous}"
|
||||
f"_methods_{y_mode}.png"
|
||||
)
|
||||
fig1.savefig(output_datetime_path / out1, dpi=150)
|
||||
plt.close(fig1)
|
||||
|
||||
made = 1
|
||||
# Left axis: method score (z or tailprob)
|
||||
ax.plot(
|
||||
x,
|
||||
yd,
|
||||
linestyle=LS_DEG,
|
||||
color=COLOR_METHOD,
|
||||
linewidth=LW_METHOD,
|
||||
label=f"{label} — degraded",
|
||||
)
|
||||
ax.plot(
|
||||
x,
|
||||
yc,
|
||||
linestyle=LS_CLEAN,
|
||||
color=COLOR_METHOD,
|
||||
linewidth=LW_METHOD_CLEAN,
|
||||
label=f"{label} — clean",
|
||||
)
|
||||
ax.set_ylabel(y_label)
|
||||
ax.set_title(label)
|
||||
ax.set_title(f"({letters[axidx]}) {label}")
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
if include_stats:
|
||||
# Prep stats: absolute %, EMA, progress-binned
|
||||
def prep_stat_pair(a, b):
|
||||
if a is None or len(a) == 0 or b is None or len(b) == 0:
|
||||
return None, None
|
||||
a_s = ema(a.astype(float), EMA_ALPHA_STATS)
|
||||
b_s = ema(b.astype(float), EMA_ALPHA_STATS)
|
||||
return _bin_to_progress(a_s, PROGRESS_BINS), _bin_to_progress(
|
||||
b_s, PROGRESS_BINS
|
||||
)
|
||||
|
||||
mp_c, mp_d = prep_stat_pair(mp_clean, mp_deg)
|
||||
ns_c, ns_d = prep_stat_pair(ns_clean, ns_deg)
|
||||
|
||||
# ---- Figure 2: + Missing points (%)
|
||||
# Right axis #1 (closest to plot): Missing points (%)
|
||||
axy_miss = ax.twinx()
|
||||
if mp_c is not None and mp_d is not None:
|
||||
fig2, ax2 = plt.subplots(figsize=(14, 6), constrained_layout=True)
|
||||
axy2 = ax2.twinx()
|
||||
for label in methods:
|
||||
yc = curves_clean.get(label)
|
||||
yd = curves_deg.get(label)
|
||||
if yc is None or yd is None:
|
||||
continue
|
||||
ax2.plot(x, yd, label=f"{label} — degraded", linewidth=1.8)
|
||||
ax2.plot(x, yc, linestyle="--", label=f"{label} — clean", linewidth=1.2)
|
||||
axy2.plot(x, mp_d, linestyle="-.", label="Missing points — degraded (%)")
|
||||
axy2.plot(x, mp_c, linestyle=":", label="Missing points — clean (%)")
|
||||
ax2.set_xlabel("Progress through experiment (%)")
|
||||
ax2.set_ylabel(y_label)
|
||||
axy2.set_ylabel("Missing points (%)")
|
||||
ax2.set_title(
|
||||
f"Methods vs Missing points (absolute %) — progress-normalized\n"
|
||||
f"Clean: {experiment_clean} vs Degraded: {experiment_degraded}\n"
|
||||
f"Transform: {y_mode} | EMA(methods α={EMA_ALPHA_METHODS}, stats α={EMA_ALPHA_STATS})"
|
||||
axy_miss.plot(
|
||||
x,
|
||||
mp_d,
|
||||
linestyle=LS_DEG,
|
||||
color=COLOR_MISSING,
|
||||
alpha=ALPHA_STATS,
|
||||
linewidth=LW_STATS,
|
||||
label="Missing points — degraded (%)",
|
||||
)
|
||||
ax2.grid(True, alpha=0.3)
|
||||
L1, N1 = ax2.get_legend_handles_labels()
|
||||
L2, N2 = axy2.get_legend_handles_labels()
|
||||
ax2.legend(L1 + L2, N1 + N2, loc="upper right", ncol=2)
|
||||
out2 = (
|
||||
f"compare_{experiment_clean}_vs_{experiment_degraded}"
|
||||
f"_ld{latent_dim}_sn{semi_normals}_sa{semi_anomalous}"
|
||||
f"_{y_mode}_missing.png"
|
||||
axy_miss.plot(
|
||||
x,
|
||||
mp_c,
|
||||
linestyle=LS_CLEAN,
|
||||
color=COLOR_MISSING,
|
||||
alpha=ALPHA_STATS,
|
||||
linewidth=LW_STATS,
|
||||
label="Missing points — clean (%)",
|
||||
)
|
||||
fig2.savefig(output_datetime_path / out2, dpi=150)
|
||||
plt.close(fig2)
|
||||
made += 1
|
||||
axy_miss.set_ylabel("Missing points (%)")
|
||||
axy_miss.tick_params(axis="y") # , colors=COLOR_MISSING)
|
||||
# axy_miss.spines["right"].set_edgecolor(COLOR_MISSING)
|
||||
|
||||
# Right axis #2 (slightly offset): Near-sensor points (%)
|
||||
axy_near = ax.twinx()
|
||||
# push this spine outward so it doesn't overlap the first right axis
|
||||
axy_near.spines["right"].set_position(("axes", 1.08))
|
||||
# make patch invisible so only spine shows
|
||||
axy_near.set_frame_on(True)
|
||||
axy_near.patch.set_visible(False)
|
||||
|
||||
# ---- Figure 3: + Near-sensor (%)
|
||||
if ns_c is not None and ns_d is not None:
|
||||
fig3, ax3 = plt.subplots(figsize=(14, 6), constrained_layout=True)
|
||||
axy3 = ax3.twinx()
|
||||
for label in methods:
|
||||
yc = curves_clean.get(label)
|
||||
yd = curves_deg.get(label)
|
||||
if yc is None or yd is None:
|
||||
continue
|
||||
ax3.plot(x, yd, label=f"{label} — degraded", linewidth=1.8)
|
||||
ax3.plot(x, yc, linestyle="--", label=f"{label} — clean", linewidth=1.2)
|
||||
axy3.plot(x, ns_d, linestyle="-.", label="Near-sensor — degraded (%)")
|
||||
axy3.plot(x, ns_c, linestyle=":", label="Near-sensor — clean (%)")
|
||||
ax3.set_xlabel("Progress through experiment (%)")
|
||||
ax3.set_ylabel(y_label)
|
||||
axy3.set_ylabel("Near-sensor points (%)")
|
||||
ax3.set_title(
|
||||
f"Methods vs Near-sensor (absolute %) — progress-normalized\n"
|
||||
f"Clean: {experiment_clean} vs Degraded: {experiment_degraded}\n"
|
||||
f"Transform: {y_mode} | EMA(methods α={EMA_ALPHA_METHODS}, stats α={EMA_ALPHA_STATS})"
|
||||
axy_near.plot(
|
||||
x,
|
||||
ns_d,
|
||||
linestyle=LS_DEG,
|
||||
color=COLOR_NEAR,
|
||||
alpha=ALPHA_STATS,
|
||||
linewidth=LW_STATS,
|
||||
label="Near-sensor — degraded (%)",
|
||||
)
|
||||
ax3.grid(True, alpha=0.3)
|
||||
L1, N1 = ax3.get_legend_handles_labels()
|
||||
L2, N2 = axy3.get_legend_handles_labels()
|
||||
ax3.legend(L1 + L2, N1 + N2, loc="upper right", ncol=2)
|
||||
out3 = (
|
||||
f"compare_{experiment_clean}_vs_{experiment_degraded}"
|
||||
f"_ld{latent_dim}_sn{semi_normals}_sa{semi_anomalous}"
|
||||
f"_{y_mode}_nearsensor.png"
|
||||
axy_near.plot(
|
||||
x,
|
||||
ns_c,
|
||||
linestyle=LS_CLEAN,
|
||||
color=COLOR_NEAR,
|
||||
alpha=ALPHA_STATS,
|
||||
linewidth=LW_STATS,
|
||||
label="Near-sensor — clean (%)",
|
||||
)
|
||||
fig3.savefig(output_datetime_path / out3, dpi=150)
|
||||
plt.close(fig3)
|
||||
made += 1
|
||||
axy_near.set_ylabel("Near-sensor points (%)")
|
||||
axy_near.tick_params(axis="y") # , colors=COLOR_NEAR)
|
||||
# axy_near.spines["right"].set_edgecolor(COLOR_NEAR)
|
||||
|
||||
return made
|
||||
# Compose legend: show *method name* explicitly, plus the two stats
|
||||
handles = [
|
||||
Line2D(
|
||||
[0],
|
||||
[0],
|
||||
color=COLOR_METHOD,
|
||||
lw=LW_METHOD,
|
||||
ls=LS_DEG,
|
||||
label=f"{label} — degraded",
|
||||
),
|
||||
Line2D(
|
||||
[0],
|
||||
[0],
|
||||
color=COLOR_METHOD,
|
||||
lw=LW_METHOD_CLEAN,
|
||||
ls=LS_CLEAN,
|
||||
label=f"{label} — clean",
|
||||
),
|
||||
Line2D(
|
||||
[0],
|
||||
[0],
|
||||
color=COLOR_MISSING,
|
||||
lw=LW_STATS,
|
||||
ls=LS_DEG,
|
||||
label="Missing points — degraded",
|
||||
),
|
||||
Line2D(
|
||||
[0],
|
||||
[0],
|
||||
color=COLOR_MISSING,
|
||||
lw=LW_STATS,
|
||||
ls=LS_CLEAN,
|
||||
label="Missing points — clean",
|
||||
),
|
||||
Line2D(
|
||||
[0],
|
||||
[0],
|
||||
color=COLOR_NEAR,
|
||||
lw=LW_STATS,
|
||||
ls=LS_DEG,
|
||||
label="Near-sensor — degraded",
|
||||
),
|
||||
Line2D(
|
||||
[0],
|
||||
[0],
|
||||
color=COLOR_NEAR,
|
||||
lw=LW_STATS,
|
||||
ls=LS_CLEAN,
|
||||
label="Near-sensor — clean",
|
||||
),
|
||||
]
|
||||
ax.legend(handles=handles, loc="upper left", fontsize=9, framealpha=0.9)
|
||||
|
||||
# Shared labels / super-title
|
||||
for ax in axes:
|
||||
ax.set_xlabel("Progress through experiment (%)")
|
||||
|
||||
# fig.suptitle(
|
||||
# f"AD Method vs Stats Inference — progress-normalized\n"
|
||||
# f"Transform: z-score normalized to non-degraded experiment | EMA(α={EMA_ALPHA_METHODS})",
|
||||
# fontsize=14,
|
||||
# )
|
||||
fig.tight_layout(rect=[0, 0, 1, 0.99])
|
||||
|
||||
out_name = (
|
||||
f"4up_{EXPERIMENT_CLEAN}_vs_{EXPERIMENT_DEGRADED}"
|
||||
f"_ld{latent_dim}_sn{semi_normals}_sa{semi_anomalous}_{y_mode}_methods_vs_stats.png"
|
||||
)
|
||||
fig.savefig(output_datetime_path / out_name, dpi=150)
|
||||
plt.close(fig)
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
# =====================================
|
||||
|
||||
@@ -7,10 +7,10 @@ from pathlib import Path
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import polars as pl
|
||||
from matplotlib.lines import Line2D
|
||||
|
||||
# CHANGE THIS IMPORT IF YOUR LOADER MODULE IS NAMED DIFFERENTLY
|
||||
from plot_scripts.load_results import load_results_dataframe
|
||||
from load_results import load_results_dataframe
|
||||
from matplotlib.lines import Line2D
|
||||
|
||||
# ----------------------------
|
||||
# Config
|
||||
@@ -26,6 +26,10 @@ SEMI_ANOMALOUS = 10
|
||||
|
||||
# Which evaluation columns to plot
|
||||
EVALS = ["exp_based", "manual_based"]
|
||||
EVALS_LABELS = {
|
||||
"exp_based": "Experiment-Label-Based",
|
||||
"manual_based": "Manually-Labeled",
|
||||
}
|
||||
|
||||
# Latent dimensions to show as 7 subplots
|
||||
LATENT_DIMS = [32, 64, 128, 256, 512, 768, 1024]
|
||||
@@ -157,7 +161,7 @@ def _ensure_dim_axes(fig_title: str):
|
||||
fig, axes = plt.subplots(
|
||||
nrows=4, ncols=2, figsize=(12, 16), constrained_layout=True
|
||||
)
|
||||
fig.suptitle(fig_title, fontsize=14)
|
||||
# fig.suptitle(fig_title, fontsize=14)
|
||||
axes = axes.ravel()
|
||||
return fig, axes
|
||||
|
||||
@@ -188,7 +192,7 @@ def plot_grid_from_df(
|
||||
Create a 2x4 grid of subplots, one per latent dim; 8th panel holds legend.
|
||||
kind: 'roc' or 'prc'
|
||||
"""
|
||||
fig_title = f"{kind.upper()} — {eval_type} (semi = {semi_normals}/{semi_anomalous})"
|
||||
fig_title = f"{kind.upper()} — {EVALS_LABELS[eval_type]} (Semi-Labeling Regime = {semi_normals}/{semi_anomalous})"
|
||||
fig, axes = _ensure_dim_axes(fig_title)
|
||||
|
||||
# plotting order & colors
|
||||
@@ -209,11 +213,13 @@ def plot_grid_from_df(
|
||||
legend_labels = []
|
||||
have_legend = False
|
||||
|
||||
letters = ["a", "b", "c", "d", "e", "f", "g", "h"]
|
||||
|
||||
for i, dim in enumerate(LATENT_DIMS):
|
||||
if i >= 7:
|
||||
break # last slot reserved for legend
|
||||
ax = axes[i]
|
||||
ax.set_title(f"latent_dim = {dim}")
|
||||
ax.set_title(f"({letters[i]}) Latent Dim. = {dim}")
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
if kind == "roc":
|
||||
|
||||
505
tools/plot_scripts/results_latent_space_tables.py
Normal file
@@ -0,0 +1,505 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
import numpy as np
|
||||
import polars as pl
|
||||
|
||||
# CHANGE THIS IMPORT IF YOUR LOADER MODULE IS NAMED DIFFERENTLY
|
||||
from load_results import load_results_dataframe
|
||||
|
||||
# ----------------------------
|
||||
# Config
|
||||
# ----------------------------
|
||||
ROOT = Path("/home/fedex/mt/results/copy") # experiments root you pass to the loader
|
||||
OUTPUT_DIR = Path("/home/fedex/mt/plots/results_latent_space_tables")
|
||||
|
||||
# Semi-labeling regimes (semi_normals, semi_anomalous) in display order
|
||||
SEMI_LABELING_REGIMES: list[tuple[int, int]] = [(0, 0), (50, 10), (500, 100)]
|
||||
|
||||
# Both evals are shown side-by-side in one table
|
||||
EVALS_BOTH: tuple[str, str] = ("exp_based", "manual_based")
|
||||
|
||||
# Row order (latent dims)
|
||||
LATENT_DIMS: list[int] = [32, 64, 128, 256, 512, 768, 1024]
|
||||
|
||||
# Column order (method shown to the user)
|
||||
# We split DeepSAD into the two network backbones, like your plots.
|
||||
METHOD_COLUMNS = [
|
||||
("deepsad", "LeNet"), # DeepSAD (LeNet)
|
||||
("deepsad", "Efficient"), # DeepSAD (Efficient)
|
||||
("isoforest", "Efficient"), # IsolationForest (Efficient baseline)
|
||||
("ocsvm", "Efficient"), # OC-SVM (Efficient baseline)
|
||||
]
|
||||
|
||||
# Formatting
|
||||
DECIMALS = 3 # cells look like 1.000 or 0.928 (3 decimals)
|
||||
|
||||
|
||||
# ----------------------------
|
||||
# Helpers
|
||||
# ----------------------------
|
||||
|
||||
|
||||
def _fmt_mean_std(mean: float | None, std: float | None) -> str:
|
||||
"""Format mean ± std with 3 decimals (leading zero), or '--' if missing."""
|
||||
if mean is None or not (mean == mean): # NaN check
|
||||
return "--"
|
||||
if std is None or not (std == std):
|
||||
return f"{mean:.3f}"
|
||||
return f"{mean:.3f}$\\,\\pm\\,{std:.3f}$"
|
||||
|
||||
|
||||
def _with_net_label(df: pl.DataFrame) -> pl.DataFrame:
|
||||
"""Add a canonical 'net_label' column like the plotting script (LeNet/Efficient/fallback)."""
|
||||
return df.with_columns(
|
||||
pl.when(
|
||||
pl.col("network").cast(pl.Utf8).str.to_lowercase().str.contains("lenet")
|
||||
)
|
||||
.then(pl.lit("LeNet"))
|
||||
.when(
|
||||
pl.col("network").cast(pl.Utf8).str.to_lowercase().str.contains("efficient")
|
||||
)
|
||||
.then(pl.lit("Efficient"))
|
||||
.otherwise(pl.col("network").cast(pl.Utf8))
|
||||
.alias("net_label")
|
||||
)
|
||||
|
||||
|
||||
def _filter_base(df: pl.DataFrame) -> pl.DataFrame:
|
||||
"""Restrict to valid dims/models and needed columns (no eval/regime filtering here)."""
|
||||
return df.filter(
|
||||
(pl.col("latent_dim").is_in(LATENT_DIMS))
|
||||
& (pl.col("model").is_in(["deepsad", "isoforest", "ocsvm"]))
|
||||
& (pl.col("eval").is_in(list(EVALS_BOTH)))
|
||||
).select(
|
||||
"model",
|
||||
"net_label",
|
||||
"latent_dim",
|
||||
"fold",
|
||||
"ap",
|
||||
"eval",
|
||||
"semi_normals",
|
||||
"semi_anomalous",
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Cell:
|
||||
mean: float | None
|
||||
std: float | None
|
||||
|
||||
|
||||
def _compute_cells(df: pl.DataFrame) -> dict[tuple[str, int, str, str, int, int], Cell]:
|
||||
"""
|
||||
Compute per-(eval, latent_dim, model, net_label, semi_normals, semi_anomalous)
|
||||
mean/std for AP across folds.
|
||||
"""
|
||||
if df.is_empty():
|
||||
return {}
|
||||
|
||||
# For baselines (isoforest/ocsvm) constrain to Efficient backbone
|
||||
df = df.filter(
|
||||
pl.when(pl.col("model").is_in(["isoforest", "ocsvm"]))
|
||||
.then(pl.col("net_label") == "Efficient")
|
||||
.otherwise(True)
|
||||
)
|
||||
|
||||
agg = (
|
||||
df.group_by(
|
||||
[
|
||||
"eval",
|
||||
"latent_dim",
|
||||
"model",
|
||||
"net_label",
|
||||
"semi_normals",
|
||||
"semi_anomalous",
|
||||
]
|
||||
)
|
||||
.agg(pl.col("ap").mean().alias("mean_ap"), pl.col("ap").std().alias("std_ap"))
|
||||
.to_dicts()
|
||||
)
|
||||
|
||||
out: dict[tuple[str, int, str, str, int, int], Cell] = {}
|
||||
for row in agg:
|
||||
key = (
|
||||
str(row["eval"]),
|
||||
int(row["latent_dim"]),
|
||||
str(row["model"]),
|
||||
str(row["net_label"]),
|
||||
int(row["semi_normals"]),
|
||||
int(row["semi_anomalous"]),
|
||||
)
|
||||
out[key] = Cell(mean=row.get("mean_ap"), std=row.get("std_ap"))
|
||||
return out
|
||||
|
||||
|
||||
def method_label(model: str, net_label: str) -> str:
|
||||
"""Map (model, net_label) to the four method names used in headers/caption."""
|
||||
if model == "deepsad" and net_label == "LeNet":
|
||||
return "DeepSAD (LeNet)"
|
||||
if model == "deepsad" and net_label == "Efficient":
|
||||
return "DeepSAD (Efficient)"
|
||||
if model == "isoforest":
|
||||
return "IsoForest"
|
||||
if model == "ocsvm":
|
||||
return "OC-SVM"
|
||||
# ignore anything else (e.g., other backbones)
|
||||
return ""
|
||||
|
||||
|
||||
def per_method_median_std_from_cells(
|
||||
cells: dict[tuple[str, int, str, str, int, int], Cell],
|
||||
) -> dict[str, float]:
|
||||
"""Compute the median std across all cells, per method."""
|
||||
stds_by_method: dict[str, list[float]] = {
|
||||
"DeepSAD (LeNet)": [],
|
||||
"DeepSAD (Efficient)": [],
|
||||
"IsoForest": [],
|
||||
"OC-SVM": [],
|
||||
}
|
||||
|
||||
for key, cell in cells.items():
|
||||
(ev, dim, model, net, semi_n, semi_a) = key
|
||||
name = method_label(model, net)
|
||||
if name and (cell.std is not None) and (cell.std == cell.std): # not NaN
|
||||
stds_by_method[name].append(cell.std)
|
||||
|
||||
return {
|
||||
name: float(np.median(vals)) if vals else float("nan")
|
||||
for name, vals in stds_by_method.items()
|
||||
}
|
||||
|
||||
|
||||
def per_method_max_std_from_cells(
|
||||
cells: dict[tuple[str, int, str, str, int, int], Cell],
|
||||
) -> tuple[dict[str, float], dict[str, tuple]]:
|
||||
"""
|
||||
Scan the aggregated 'cells' and return:
|
||||
- max_std_by_method: dict {"DeepSAD (LeNet)": 0.037, ...}
|
||||
- argmax_key_by_method: which cell (eval, dim, model, net, semi_n, semi_a) produced that max
|
||||
Only considers the four methods shown in the table.
|
||||
"""
|
||||
max_std_by_method: dict[str, float] = {
|
||||
"DeepSAD (LeNet)": float("nan"),
|
||||
"DeepSAD (Efficient)": float("nan"),
|
||||
"IsoForest": float("nan"),
|
||||
"OC-SVM": float("nan"),
|
||||
}
|
||||
argmax_key_by_method: dict[str, tuple] = {}
|
||||
|
||||
for key, cell in cells.items():
|
||||
(ev, dim, model, net, semi_n, semi_a) = key
|
||||
name = method_label(model, net)
|
||||
if name == "" or cell.std is None or not (cell.std == cell.std): # empty/NaN
|
||||
continue
|
||||
cur = max_std_by_method.get(name, float("nan"))
|
||||
if (cur != cur) or (cell.std > cur): # handle NaN initial
|
||||
max_std_by_method[name] = cell.std
|
||||
argmax_key_by_method[name] = key
|
||||
|
||||
# Replace remaining NaNs with 0.0 for nice formatting
|
||||
for k, v in list(max_std_by_method.items()):
|
||||
if not (v == v): # NaN
|
||||
max_std_by_method[k] = 0.0
|
||||
|
||||
return max_std_by_method, argmax_key_by_method
|
||||
|
||||
|
||||
def _fmt_val(val: float | None) -> str:
|
||||
"""
|
||||
Format value as:
|
||||
- '--' if None/NaN
|
||||
- '1.0' if exactly 1 (within 1e-9)
|
||||
- '.xx' otherwise (2 decimals, no leading 0)
|
||||
"""
|
||||
if val is None or not (val == val): # None or NaN
|
||||
return "--"
|
||||
if abs(val - 1.0) < 1e-9:
|
||||
return "1.0"
|
||||
return f"{val:.2f}".lstrip("0")
|
||||
|
||||
|
||||
def _fmt_mean(mean: float | None) -> str:
|
||||
return "--" if (mean is None or not (mean == mean)) else f"{mean:.{DECIMALS}f}"
|
||||
|
||||
|
||||
def _bold_best_mask_display(values: list[float | None], decimals: int) -> list[bool]:
|
||||
"""
|
||||
Bolding mask based on *displayed* precision. Any entries that round (via f-string)
|
||||
to the maximum at 'decimals' places are bolded (ties bolded).
|
||||
"""
|
||||
|
||||
def disp(v: float | None) -> float | None:
|
||||
if v is None or not (v == v):
|
||||
return None
|
||||
return float(f"{v:.{decimals}f}")
|
||||
|
||||
rounded = [disp(v) for v in values]
|
||||
finite = [v for v in rounded if v is not None]
|
||||
if not finite:
|
||||
return [False] * len(values)
|
||||
maxv = max(finite)
|
||||
return [(v is not None and v == maxv) for v in rounded]
|
||||
|
||||
|
||||
def _build_exp_based_table(
|
||||
cells: dict[tuple[str, int, str, str, int, int], Cell],
|
||||
*,
|
||||
semi_labeling_regimes: list[tuple[int, int]],
|
||||
) -> str:
|
||||
"""
|
||||
Build LaTeX table with mean ± std values for experiment-based evaluation only.
|
||||
"""
|
||||
|
||||
header_cols = [
|
||||
r"\rotheader{DeepSAD\\(LeNet)}",
|
||||
r"\rotheader{DeepSAD\\(Efficient)}",
|
||||
r"\rotheader{IsoForest}",
|
||||
r"\rotheader{OC-SVM}",
|
||||
]
|
||||
|
||||
lines: list[str] = []
|
||||
lines.append(r"\begin{table}[t]")
|
||||
lines.append(r"\centering")
|
||||
lines.append(r"\setlength{\tabcolsep}{4pt}")
|
||||
lines.append(r"\renewcommand{\arraystretch}{1.2}")
|
||||
lines.append(r"\begin{tabularx}{\textwidth}{c*{4}{Y}}")
|
||||
lines.append(r"\toprule")
|
||||
lines.append(r"Latent Dim. & " + " & ".join(header_cols) + r" \\")
|
||||
lines.append(r"\midrule")
|
||||
|
||||
for idx, (semi_n, semi_a) in enumerate(semi_labeling_regimes):
|
||||
# regime label row
|
||||
lines.append(
|
||||
rf"\multicolumn{{5}}{{l}}{{\textbf{{Labeling regime: }}\(\mathbf{{{semi_n}/{semi_a}}}\)}} \\"
|
||||
)
|
||||
lines.append(r"\addlinespace[2pt]")
|
||||
|
||||
for dim in LATENT_DIMS:
|
||||
row_vals = []
|
||||
for model, net in METHOD_COLUMNS:
|
||||
key = ("exp_based", dim, model, net, semi_n, semi_a)
|
||||
cell = cells.get(key, Cell(None, None))
|
||||
row_vals.append(_fmt_mean_std(cell.mean, cell.std))
|
||||
|
||||
lines.append(f"{dim} & " + " & ".join(row_vals) + r" \\")
|
||||
|
||||
if idx < len(semi_labeling_regimes) - 1:
|
||||
lines.append(r"\midrule")
|
||||
|
||||
lines.append(r"\bottomrule")
|
||||
lines.append(r"\end{tabularx}")
|
||||
lines.append(
|
||||
r"\caption{AP means $\pm$ std across 5 folds for experiment-based evaluation only, grouped by labeling regime.}"
|
||||
)
|
||||
lines.append(r"\end{table}")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _build_single_table(
|
||||
cells: dict[tuple[str, int, str, str, int, int], Cell],
|
||||
*,
|
||||
semi_labeling_regimes: list[tuple[int, int]],
|
||||
) -> tuple[str, float | None]:
|
||||
"""
|
||||
Build the LaTeX table string with grouped headers and regime blocks.
|
||||
Returns (latex, max_std_overall).
|
||||
"""
|
||||
|
||||
# Rotated header labels (90° slanted)
|
||||
header_cols = [
|
||||
r"\rotheader{DeepSAD\\(LeNet)}",
|
||||
r"\rotheader{DeepSAD\\(Efficient)}",
|
||||
r"\rotheader{IsoForest}",
|
||||
r"\rotheader{OC-SVM}",
|
||||
]
|
||||
|
||||
# Track max std across all cells
|
||||
max_std: float | None = None
|
||||
|
||||
def push_std(std_val: float | None):
|
||||
nonlocal max_std
|
||||
if std_val is None or not (std_val == std_val):
|
||||
return
|
||||
if max_std is None or std_val > max_std:
|
||||
max_std = std_val
|
||||
|
||||
lines: list[str] = []
|
||||
|
||||
# Table preamble / structure
|
||||
lines.append(r"\begin{table}[t]")
|
||||
lines.append(r"\centering")
|
||||
lines.append(r"\setlength{\tabcolsep}{4pt}")
|
||||
lines.append(r"\renewcommand{\arraystretch}{1.2}")
|
||||
# Vertical rule between the two groups for data/header rows:
|
||||
lines.append(r"\begin{tabularx}{\textwidth}{c*{4}{Y}|*{4}{Y}}")
|
||||
lines.append(r"\toprule")
|
||||
lines.append(
|
||||
r" & \multicolumn{4}{c}{Experiment-based eval.} & \multicolumn{4}{c}{Handlabeled eval.} \\"
|
||||
)
|
||||
lines.append(r"\cmidrule(lr){2-5} \cmidrule(lr){6-9}")
|
||||
lines.append(
|
||||
r"Latent Dim. & "
|
||||
+ " & ".join(header_cols)
|
||||
+ " & "
|
||||
+ " & ".join(header_cols)
|
||||
+ r" \\"
|
||||
)
|
||||
lines.append(r"\midrule")
|
||||
|
||||
# Iterate regimes and rows
|
||||
for idx, (semi_n, semi_a) in enumerate(semi_labeling_regimes):
|
||||
# Regime label row (multicolumn suppresses the vertical bar in this row)
|
||||
lines.append(
|
||||
rf"\multicolumn{{9}}{{l}}{{\textbf{{Labeling regime: }}\(\mathbf{{{semi_n}/{semi_a}}}\) "
|
||||
rf"\textit{{(normal/anomalous samples labeled)}}}} \\"
|
||||
)
|
||||
lines.append(r"\addlinespace[2pt]")
|
||||
|
||||
for dim in LATENT_DIMS:
|
||||
# Values in order: left group (exp_based) 4 cols, right group (manual_based) 4 cols
|
||||
means_left: list[float | None] = []
|
||||
means_right: list[float | None] = []
|
||||
cell_strs_left: list[str] = []
|
||||
cell_strs_right: list[str] = []
|
||||
|
||||
# Left group: exp_based
|
||||
eval_type = EVALS_BOTH[0]
|
||||
for model, net in METHOD_COLUMNS:
|
||||
key = (eval_type, dim, model, net, semi_n, semi_a)
|
||||
cell = cells.get(key, Cell(None, None))
|
||||
means_left.append(cell.mean)
|
||||
cell_strs_left.append(_fmt_mean(cell.mean))
|
||||
# mean_str = _fmt_val(cell.mean)
|
||||
# std_str = _fmt_val(cell.std)
|
||||
# if mean_str == "--":
|
||||
# cell_strs_left.append("--")
|
||||
# else:
|
||||
# cell_strs_left.append(f"{mean_str} $\\textpm$ {std_str}")
|
||||
push_std(cell.std)
|
||||
|
||||
# Right group: manual_based
|
||||
eval_type = EVALS_BOTH[1]
|
||||
for model, net in METHOD_COLUMNS:
|
||||
key = (eval_type, dim, model, net, semi_n, semi_a)
|
||||
cell = cells.get(key, Cell(None, None))
|
||||
means_right.append(cell.mean)
|
||||
cell_strs_right.append(_fmt_mean(cell.mean))
|
||||
# mean_str = _fmt_val(cell.mean)
|
||||
# std_str = _fmt_val(cell.std)
|
||||
# if mean_str == "--":
|
||||
# cell_strs_right.append("--")
|
||||
# else:
|
||||
# cell_strs_right.append(f"{mean_str} $\\textpm$ {std_str}")
|
||||
push_std(cell.std)
|
||||
|
||||
# Bolding per group based on displayed precision
|
||||
mask_left = _bold_best_mask_display(means_left, DECIMALS)
|
||||
mask_right = _bold_best_mask_display(means_right, DECIMALS)
|
||||
|
||||
pretty_left = [
|
||||
(r"\textbf{" + s + "}") if (do_bold and s != "--") else s
|
||||
for s, do_bold in zip(cell_strs_left, mask_left)
|
||||
]
|
||||
pretty_right = [
|
||||
(r"\textbf{" + s + "}") if (do_bold and s != "--") else s
|
||||
for s, do_bold in zip(cell_strs_right, mask_right)
|
||||
]
|
||||
|
||||
# Join with the vertical bar between groups automatically handled by column spec
|
||||
lines.append(
|
||||
f"{dim} & "
|
||||
+ " & ".join(pretty_left)
|
||||
+ " & "
|
||||
+ " & ".join(pretty_right)
|
||||
+ r" \\"
|
||||
)
|
||||
|
||||
# Separator between regime blocks (but not after the last one)
|
||||
if idx < len(semi_labeling_regimes) - 1:
|
||||
lines.append(r"\midrule")
|
||||
|
||||
lines.append(r"\bottomrule")
|
||||
lines.append(r"\end{tabularx}")
|
||||
|
||||
# Compute per-method max std across everything included in the table
|
||||
# max_std_by_method, argmax_key = per_method_max_std_from_cells(cells)
|
||||
median_std_by_method = per_method_median_std_from_cells(cells)
|
||||
|
||||
# Optional: print where each max came from (helps verify)
|
||||
for name, v in median_std_by_method.items():
|
||||
print(f"[max-std] {name}: {v:.3f}")
|
||||
|
||||
cap_parts = []
|
||||
for name in ["DeepSAD (LeNet)", "DeepSAD (Efficient)", "IsoForest", "OC-SVM"]:
|
||||
v = median_std_by_method.get(name, 0.0)
|
||||
cap_parts.append(f"{name} {v:.3f}")
|
||||
cap_str = "; ".join(cap_parts)
|
||||
|
||||
lines.append(
|
||||
rf"\caption{{AP means across 5 folds for both evaluations, grouped by labeling regime. "
|
||||
rf"Maximum observed standard deviation per method (not shown in table): {cap_str}.}}"
|
||||
)
|
||||
lines.append(r"\end{table}")
|
||||
|
||||
return "\n".join(lines), max_std
|
||||
|
||||
|
||||
def main():
|
||||
# Load full results DF (cache behavior handled by your loader)
|
||||
df = load_results_dataframe(ROOT, allow_cache=True)
|
||||
df = _with_net_label(df)
|
||||
df = _filter_base(df)
|
||||
|
||||
# Prepare output dirs
|
||||
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
archive_dir = OUTPUT_DIR / "archive"
|
||||
archive_dir.mkdir(parents=True, exist_ok=True)
|
||||
ts_dir = archive_dir / datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
ts_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Pre-compute aggregated cells (mean/std) for all evals/regimes
|
||||
cells = _compute_cells(df)
|
||||
|
||||
# Build the single big table
|
||||
tex, max_std = _build_single_table(
|
||||
cells, semi_labeling_regimes=SEMI_LABELING_REGIMES
|
||||
)
|
||||
|
||||
out_name = "ap_table_all_evals_all_regimes.tex"
|
||||
out_path = ts_dir / out_name
|
||||
out_path.write_text(tex, encoding="utf-8")
|
||||
|
||||
# Build experiment-based table with mean ± std
|
||||
tex_exp = _build_exp_based_table(cells, semi_labeling_regimes=SEMI_LABELING_REGIMES)
|
||||
|
||||
out_name_exp = "ap_table_exp_based_mean_std.tex"
|
||||
out_path_exp = ts_dir / out_name_exp
|
||||
out_path_exp.write_text(tex_exp, encoding="utf-8")
|
||||
|
||||
# Copy this script to preserve the code used for the outputs
|
||||
script_path = Path(__file__)
|
||||
shutil.copy2(script_path, ts_dir / script_path.name)
|
||||
|
||||
# Mirror latest
|
||||
latest = OUTPUT_DIR / "latest"
|
||||
latest.mkdir(exist_ok=True, parents=True)
|
||||
for f in latest.iterdir():
|
||||
if f.is_file():
|
||||
f.unlink()
|
||||
for f in ts_dir.iterdir():
|
||||
if f.is_file():
|
||||
shutil.copy2(f, latest / f.name)
|
||||
|
||||
print(f"Saved table to: {ts_dir}")
|
||||
print(f"Also updated: {latest}")
|
||||
print(f" - {out_name}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -8,11 +8,11 @@ from pathlib import Path
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import polars as pl
|
||||
from matplotlib.lines import Line2D
|
||||
from scipy.stats import sem, t
|
||||
|
||||
# CHANGE THIS IMPORT IF YOUR LOADER MODULE NAME IS DIFFERENT
|
||||
from plot_scripts.load_results import load_results_dataframe
|
||||
from load_results import load_results_dataframe
|
||||
from matplotlib.lines import Line2D
|
||||
from scipy.stats import sem, t
|
||||
|
||||
# ---------------------------------
|
||||
# Config
|
||||
@@ -23,6 +23,10 @@ OUTPUT_DIR = Path("/home/fedex/mt/plots/results_semi_labels_comparison")
|
||||
LATENT_DIMS = [32, 64, 128, 256, 512, 768, 1024]
|
||||
SEMI_REGIMES = [(0, 0), (50, 10), (500, 100)]
|
||||
EVALS = ["exp_based", "manual_based"]
|
||||
EVALS_LABELS = {
|
||||
"exp_based": "Experiment-Based Labels",
|
||||
"manual_based": "Manually-Labeled",
|
||||
}
|
||||
|
||||
# Interp grids
|
||||
ROC_GRID = np.linspace(0.0, 1.0, 200)
|
||||
@@ -30,6 +34,10 @@ PRC_GRID = np.linspace(0.0, 1.0, 200)
|
||||
|
||||
# Baselines are duplicated across nets; use Efficient-only to avoid repetition
|
||||
BASELINE_NET = "Efficient"
|
||||
BASELINE_LABELS = {
|
||||
"isoforest": "Isolation Forest",
|
||||
"ocsvm": "One-Class SVM",
|
||||
}
|
||||
|
||||
# Colors/styles
|
||||
COLOR_BASELINES = {
|
||||
@@ -147,12 +155,8 @@ def _select_rows(
|
||||
return df.filter(pl.all_horizontal(exprs))
|
||||
|
||||
|
||||
def _auc_list(sub: pl.DataFrame) -> list[float]:
|
||||
return [x for x in sub.select("auc").to_series().to_list() if x is not None]
|
||||
|
||||
|
||||
def _ap_list(sub: pl.DataFrame) -> list[float]:
|
||||
return [x for x in sub.select("ap").to_series().to_list() if x is not None]
|
||||
def _auc_list(sub: pl.DataFrame, kind: str) -> list[float]:
|
||||
return [x for x in sub.select(f"{kind}_auc").to_series().to_list() if x is not None]
|
||||
|
||||
|
||||
def _plot_panel(
|
||||
@@ -165,7 +169,7 @@ def _plot_panel(
|
||||
kind: str,
|
||||
):
|
||||
"""
|
||||
Plot one panel: DeepSAD (net_for_deepsad) with 3 regimes + baselines (from Efficient).
|
||||
Plot one panel: DeepSAD (net_for_deepsad) with 3 regimes + Baselines (from Efficient).
|
||||
Legend entries include mean±CI of AUC/AP.
|
||||
"""
|
||||
ax.grid(True, alpha=0.3)
|
||||
@@ -200,9 +204,9 @@ def _plot_panel(
|
||||
continue
|
||||
|
||||
# Metric for legend
|
||||
metric_vals = _auc_list(sub_b) if kind == "roc" else _ap_list(sub_b)
|
||||
metric_vals = _auc_list(sub_b, kind)
|
||||
m, ci = mean_ci(metric_vals)
|
||||
lab = f"{model} ({'AUC' if kind == 'roc' else 'AP'}={m:.3f}±{ci:.3f})"
|
||||
lab = f"{BASELINE_LABELS[model]}\n(AUC={m:.3f}±{ci:.3f})"
|
||||
|
||||
color = COLOR_BASELINES[model]
|
||||
h = ax.plot(grid, mean_y, lw=2, color=color, label=lab)[0]
|
||||
@@ -230,9 +234,9 @@ def _plot_panel(
|
||||
if np.all(np.isnan(mean_y)):
|
||||
continue
|
||||
|
||||
metric_vals = _auc_list(sub_d) if kind == "roc" else _ap_list(sub_d)
|
||||
metric_vals = _auc_list(sub_d, kind)
|
||||
m, ci = mean_ci(metric_vals)
|
||||
lab = f"DeepSAD {net_for_deepsad} — semi {sn}/{sa} ({'AUC' if kind == 'roc' else 'AP'}={m:.3f}±{ci:.3f})"
|
||||
lab = f"DeepSAD {net_for_deepsad} — {sn}/{sa}\n(AUC={m:.3f}±{ci:.3f})"
|
||||
|
||||
color = COLOR_REGIMES[regime]
|
||||
ls = LINESTYLES[regime]
|
||||
@@ -246,7 +250,7 @@ def _plot_panel(
|
||||
ax.plot([0, 1], [0, 1], "k--", alpha=0.6, label="Chance")
|
||||
|
||||
# Legend
|
||||
ax.legend(loc="lower right", fontsize=9, frameon=True)
|
||||
ax.legend(loc="upper right", fontsize=9, frameon=True)
|
||||
|
||||
|
||||
def make_figures_for_dim(
|
||||
@@ -254,9 +258,11 @@ def make_figures_for_dim(
|
||||
):
|
||||
# ROC: 2×1
|
||||
fig_roc, axes = plt.subplots(
|
||||
nrows=1, ncols=2, figsize=(14, 5), constrained_layout=True
|
||||
nrows=2, ncols=1, figsize=(7, 10), constrained_layout=True
|
||||
)
|
||||
fig_roc.suptitle(f"ROC — {eval_type} — latent_dim={latent_dim}", fontsize=14)
|
||||
# fig_roc.suptitle(
|
||||
# f"ROC — {EVALS_LABELS[eval_type]} — Latent Dim.={latent_dim}", fontsize=14
|
||||
# )
|
||||
|
||||
_plot_panel(
|
||||
axes[0],
|
||||
@@ -266,7 +272,7 @@ def make_figures_for_dim(
|
||||
latent_dim=latent_dim,
|
||||
kind="roc",
|
||||
)
|
||||
axes[0].set_title("DeepSAD (LeNet) + baselines")
|
||||
axes[0].set_title("(a) DeepSAD (LeNet) + Baselines")
|
||||
|
||||
_plot_panel(
|
||||
axes[1],
|
||||
@@ -276,7 +282,7 @@ def make_figures_for_dim(
|
||||
latent_dim=latent_dim,
|
||||
kind="roc",
|
||||
)
|
||||
axes[1].set_title("DeepSAD (Efficient) + baselines")
|
||||
axes[1].set_title("(b) DeepSAD (Efficient) + Baselines")
|
||||
|
||||
out_roc = out_dir / f"roc_{latent_dim}_{eval_type}.png"
|
||||
fig_roc.savefig(out_roc, dpi=150, bbox_inches="tight")
|
||||
@@ -284,9 +290,11 @@ def make_figures_for_dim(
|
||||
|
||||
# PRC: 2×1
|
||||
fig_prc, axes = plt.subplots(
|
||||
nrows=1, ncols=2, figsize=(14, 5), constrained_layout=True
|
||||
nrows=2, ncols=1, figsize=(7, 10), constrained_layout=True
|
||||
)
|
||||
fig_prc.suptitle(f"PRC — {eval_type} — latent_dim={latent_dim}", fontsize=14)
|
||||
# fig_prc.suptitle(
|
||||
# f"PRC — {EVALS_LABELS[eval_type]} — Latent Dim.={latent_dim}", fontsize=14
|
||||
# )
|
||||
|
||||
_plot_panel(
|
||||
axes[0],
|
||||
@@ -296,7 +304,7 @@ def make_figures_for_dim(
|
||||
latent_dim=latent_dim,
|
||||
kind="prc",
|
||||
)
|
||||
axes[0].set_title("DeepSAD (LeNet) + baselines")
|
||||
axes[0].set_title("(a)")
|
||||
|
||||
_plot_panel(
|
||||
axes[1],
|
||||
@@ -306,7 +314,7 @@ def make_figures_for_dim(
|
||||
latent_dim=latent_dim,
|
||||
kind="prc",
|
||||
)
|
||||
axes[1].set_title("DeepSAD (Efficient) + baselines")
|
||||
axes[1].set_title("(b)")
|
||||
|
||||
out_prc = out_dir / f"prc_{latent_dim}_{eval_type}.png"
|
||||
fig_prc.savefig(out_prc, dpi=150, bbox_inches="tight")
|
||||
|
||||
@@ -6,6 +6,8 @@ readme = "README.md"
|
||||
requires-python = ">=3.11.9"
|
||||
dependencies = [
|
||||
"pandas>=2.3.2",
|
||||
"pointcloudset>=0.11.0",
|
||||
"polars>=1.33.0",
|
||||
"pyarrow>=21.0.0",
|
||||
"tabulate>=0.9.0",
|
||||
]
|
||||
|
||||