Methods
parent
700d73c2ff
commit
99f0df9d41
|
@ -172,5 +172,24 @@ cython_debug/
|
||||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
#.idea/
|
.idea/
|
||||||
|
|
||||||
|
|
||||||
|
# Latex output
|
||||||
|
*.pdf
|
||||||
|
*.dvi
|
||||||
|
*.out
|
||||||
|
*.aux
|
||||||
|
*.log
|
||||||
|
*.toc
|
||||||
|
*.synctex.gz
|
||||||
|
*.ind
|
||||||
|
*.ilg
|
||||||
|
*.idx
|
||||||
|
*.lof
|
||||||
|
*.lot
|
||||||
|
*.fls
|
||||||
|
*.fdb_latexmk
|
||||||
|
*.nav
|
||||||
|
*.snm
|
||||||
|
*.blg
|
||||||
|
|
Binary file not shown.
After Width: | Height: | Size: 48 KiB |
Binary file not shown.
After Width: | Height: | Size: 21 KiB |
Binary file not shown.
|
@ -0,0 +1,83 @@
|
||||||
|
\begin{thebibliography}{}
|
||||||
|
|
||||||
|
\bibitem [\protect \citeauthoryear {%
|
||||||
|
Alagoz%
|
||||||
|
}{%
|
||||||
|
Alagoz%
|
||||||
|
}{%
|
||||||
|
{\protect \APACyear {{\protect \bibnodate {}}}}%
|
||||||
|
}]{%
|
||||||
|
alagoz_comparative_2024}
|
||||||
|
\APACinsertmetastar {%
|
||||||
|
alagoz_comparative_2024}%
|
||||||
|
\begin{APACrefauthors}%
|
||||||
|
Alagoz, C.%
|
||||||
|
\end{APACrefauthors}%
|
||||||
|
\unskip\
|
||||||
|
\newblock
|
||||||
|
\APACrefYearMonthDay{{\protect \bibnodate {}}}{}{}.
|
||||||
|
\newblock
|
||||||
|
\APACrefbtitle {Comparative Analysis of {XGBoost} and Minirocket Algortihms for Human Activity Recognition} {Comparative analysis of {XGBoost} and minirocket algortihms for human activity recognition}\ (\BNUM\ {arXiv}:2402.18296).
|
||||||
|
\newblock
|
||||||
|
\APACaddressPublisher{}{{arXiv}}.
|
||||||
|
\newblock
|
||||||
|
\begin{APACrefURL} [{2024-12-01}]\url{http://arxiv.org/abs/2402.18296} \end{APACrefURL}
|
||||||
|
\newblock
|
||||||
|
\begin{APACrefDOI} \doi{10.48550/arXiv.2402.18296} \end{APACrefDOI}
|
||||||
|
\PrintBackRefs{\CurrentBib}
|
||||||
|
|
||||||
|
\bibitem [\protect \citeauthoryear {%
|
||||||
|
Brownlee%
|
||||||
|
}{%
|
||||||
|
Brownlee%
|
||||||
|
}{%
|
||||||
|
{\protect \APACyear {{\protect \bibnodate {}}}}%
|
||||||
|
}]{%
|
||||||
|
brownlee_gentle_2018}
|
||||||
|
\APACinsertmetastar {%
|
||||||
|
brownlee_gentle_2018}%
|
||||||
|
\begin{APACrefauthors}%
|
||||||
|
Brownlee, J.%
|
||||||
|
\end{APACrefauthors}%
|
||||||
|
\unskip\
|
||||||
|
\newblock
|
||||||
|
\APACrefYearMonthDay{{\protect \bibnodate {}}}{}{}.
|
||||||
|
\newblock
|
||||||
|
\APACrefbtitle {A Gentle Introduction to a Standard Human Activity Recognition Problem.} {A gentle introduction to a standard human activity recognition problem.}
|
||||||
|
\newblock
|
||||||
|
\begin{APACrefURL} [{2024-12-01}]\url{https://www.machinelearningmastery.com/how-to-load-and-explore-a-standard-human-activity-recognition-problem/} \end{APACrefURL}
|
||||||
|
\PrintBackRefs{\CurrentBib}
|
||||||
|
|
||||||
|
\bibitem [\protect \citeauthoryear {%
|
||||||
|
Sikder%
|
||||||
|
, Chowdhury%
|
||||||
|
, Arif%
|
||||||
|
\BCBL {}\ \BBA {} Nahid%
|
||||||
|
}{%
|
||||||
|
Sikder%
|
||||||
|
\ \protect \BOthers {.}}{%
|
||||||
|
{\protect \APACyear {{\protect \bibnodate {}}}}%
|
||||||
|
}]{%
|
||||||
|
sikder_human_2021}
|
||||||
|
\APACinsertmetastar {%
|
||||||
|
sikder_human_2021}%
|
||||||
|
\begin{APACrefauthors}%
|
||||||
|
Sikder, N.%
|
||||||
|
, Chowdhury, M\BPBI S.%
|
||||||
|
, Arif, A\BPBI S\BPBI M.%
|
||||||
|
\BCBL {}\ \BBA {} Nahid, A\BHBI A.%
|
||||||
|
\end{APACrefauthors}%
|
||||||
|
\unskip\
|
||||||
|
\newblock
|
||||||
|
\APACrefYearMonthDay{{\protect \bibnodate {}}}{}{}.
|
||||||
|
\newblock
|
||||||
|
\APACrefbtitle {Human Activity Recognition Using Multichannel Convolutional Neural Network} {Human activity recognition using multichannel convolutional neural network}\ (\BNUM\ {arXiv}:2101.06709).
|
||||||
|
\newblock
|
||||||
|
\APACaddressPublisher{}{{arXiv}}.
|
||||||
|
\newblock
|
||||||
|
\begin{APACrefURL} [{2024-12-01}]\url{http://arxiv.org/abs/2101.06709} \end{APACrefURL}
|
||||||
|
\newblock
|
||||||
|
\begin{APACrefDOI} \doi{10.48550/arXiv.2101.06709} \end{APACrefDOI}
|
||||||
|
\PrintBackRefs{\CurrentBib}
|
||||||
|
|
||||||
|
\end{thebibliography}
|
|
@ -10,10 +10,12 @@
|
||||||
\usepackage{hyperref}
|
\usepackage{hyperref}
|
||||||
\usepackage{apacite}
|
\usepackage{apacite}
|
||||||
\usepackage{tabularx}
|
\usepackage{tabularx}
|
||||||
\usepackage[affil-it]{authblk}
|
\usepackage[affil-it]{authblk}
|
||||||
%\usepackage{newtxtext}
|
%\usepackage{newtxtext}
|
||||||
%\usepackage{newtxmath}
|
%\usepackage{newtxmath}
|
||||||
\usepackage{newtx}
|
\usepackage{newtx}
|
||||||
|
\usepackage{graphicx}
|
||||||
|
\usepackage{float}
|
||||||
|
|
||||||
\usepackage{multicol}
|
\usepackage{multicol}
|
||||||
|
|
||||||
|
@ -36,4 +38,4 @@
|
||||||
pdfborder=0 0 0
|
pdfborder=0 0 0
|
||||||
}
|
}
|
||||||
|
|
||||||
\bibliographystyle{apacite}
|
\bibliographystyle{apacite}
|
||||||
|
|
|
@ -2,7 +2,39 @@
|
||||||
%! Date = 01.12.24
|
%! Date = 01.12.24
|
||||||
|
|
||||||
% Preamble
|
% Preamble
|
||||||
|
|
||||||
\section{Forschungsfragen und Methodik}\label{sec:forschungsfragen-und-methodik}
|
\section{Forschungsfragen und Methodik}\label{sec:forschungsfragen-und-methodik}
|
||||||
Die Forschungsfrage lautet, wie zuverlässig können Aktivitäten anhand von Smartphone Sensordaten erkannt werden.
|
Die Forschungsfrage lautet, wie zuverlässig können Aktivitäten anhand von Smartphone Sensordaten erkannt werden.
|
||||||
|
|
||||||
Als Basis diente ein Basisdatensatz~\cite{sikder_human_2021}.
|
Als Basis dieser Analyse, diente der Datensatz "UCI Human Activity Recognition (HAR) Dataset" \cite{sikder_human_2021}. Dieser enthält Aktivitäten, gelabelt nach Sensordaten von Smartphones (Samsung Galaxy S II). Die Daten stammen von 30 Probanden im Alter von 19 bis 48 Jahren, die sechs vordefinierte Aktivitäten ausführten:
|
||||||
|
|
||||||
|
\begin{itemize}
|
||||||
|
\item WALKING (Gehen)
|
||||||
|
\item WALKING\_UPSTAIRS (Treppe hinaufgehen)
|
||||||
|
\item WALKING\_DOWNSTAIRS (Treppe hinuntergehen)
|
||||||
|
\item SITTING (Sitzen)
|
||||||
|
\item STANDING (Stehen)
|
||||||
|
\item LAYING (Liegen)
|
||||||
|
\end{itemize}
|
||||||
|
|
||||||
|
Der Datensatz wurde bereits in einen Test und Trainingsset unterteilt, wobei das Trainingsset 70\% und das Testset 30\% der Daten enthält.
|
||||||
|
Eine Datenaufbereitung war jedoch notwending, da manche Features einen für Pandas nicht eindeutigen Namen hatten.
|
||||||
|
Die folgenden zwei Features wurden als identisch gewertet:
|
||||||
|
\begin{itemize}
|
||||||
|
\item fBodyGyro-bandsEnergy()-33,40
|
||||||
|
\item fBodyGyro-bandsEnergy()-41,48
|
||||||
|
\end{itemize}
|
||||||
|
|
||||||
|
Da der Datensatz eine hohe anzahl an Features enthält, wurde eine Feature Selection durchgeführt, um die Anzahl der Features zu reduzieren. Hierzu wurde die Korrelation jedes Features mit der Aktivität berechnet und die 20 Features mit der höchsten positiven und negativen Korrelation ausgewählt.
|
||||||
|
\begin{figure}[H]
|
||||||
|
\centering
|
||||||
|
\includegraphics[width=0.8\textwidth]{../media/corr.png}
|
||||||
|
\caption{Korrelation der Features mit der Aktivität}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
Damit wurden 3 Modelle trainiert und mit dem F1-Score evaluiert:
|
||||||
|
\begin{figure}[H]
|
||||||
|
\centering
|
||||||
|
\includegraphics[width=0.8\textwidth]{../media/models.png}
|
||||||
|
\caption{Modelle und deren F1-Score}
|
||||||
|
\end{figure}
|
||||||
|
|
Loading…
Reference in New Issue