mirror of
https://gitlab.com/orbital-debris-research/directed-study/final-report.git
synced 2025-07-27 00:21:25 +00:00
setting up quarto
This commit is contained in:
460
report.tex
Normal file
460
report.tex
Normal file
@@ -0,0 +1,460 @@
|
||||
% Options for packages loaded elsewhere
|
||||
\PassOptionsToPackage{unicode}{hyperref}
|
||||
\PassOptionsToPackage{hyphens}{url}
|
||||
\PassOptionsToPackage{dvipsnames,svgnames,x11names}{xcolor}
|
||||
%
|
||||
\documentclass[
|
||||
letterpaper,
|
||||
DIV=11,
|
||||
numbers=noendperiod,
|
||||
oneside]{scrartcl}
|
||||
\usepackage{amsmath,amssymb}
|
||||
\usepackage{lmodern}
|
||||
\usepackage{iftex}
|
||||
\ifPDFTeX
|
||||
\usepackage[T1]{fontenc}
|
||||
\usepackage[utf8]{inputenc}
|
||||
\usepackage{textcomp} % provide euro and other symbols
|
||||
\else % if luatex or xetex
|
||||
\usepackage{unicode-math}
|
||||
\defaultfontfeatures{Scale=MatchLowercase}
|
||||
\defaultfontfeatures[\rmfamily]{Ligatures=TeX,Scale=1}
|
||||
\fi
|
||||
% Use upquote if available, for straight quotes in verbatim environments
|
||||
\IfFileExists{upquote.sty}{\usepackage{upquote}}{}
|
||||
\IfFileExists{microtype.sty}{% use microtype if available
|
||||
\usepackage[]{microtype}
|
||||
\UseMicrotypeSet[protrusion]{basicmath} % disable protrusion for tt fonts
|
||||
}{}
|
||||
\makeatletter
|
||||
\@ifundefined{KOMAClassName}{% if non-KOMA class
|
||||
\IfFileExists{parskip.sty}{%
|
||||
\usepackage{parskip}
|
||||
}{% else
|
||||
\setlength{\parindent}{0pt}
|
||||
\setlength{\parskip}{6pt plus 2pt minus 1pt}}
|
||||
}{% if KOMA class
|
||||
\KOMAoptions{parskip=half}}
|
||||
\makeatother
|
||||
\usepackage{xcolor}
|
||||
\IfFileExists{xurl.sty}{\usepackage{xurl}}{} % add URL line breaks if available
|
||||
\IfFileExists{bookmark.sty}{\usepackage{bookmark}}{\usepackage{hyperref}}
|
||||
\hypersetup{
|
||||
pdftitle={Characterization of Space Debris using Machine Learning Methods},
|
||||
pdfauthor={Anson Biggs},
|
||||
colorlinks=true,
|
||||
linkcolor={blue},
|
||||
filecolor={Maroon},
|
||||
citecolor={Blue},
|
||||
urlcolor={Blue},
|
||||
pdfcreator={LaTeX via pandoc}}
|
||||
\urlstyle{same} % disable monospaced font for URLs
|
||||
\usepackage[left=1in,marginparwidth=2.0666666666667in,textwidth=4.1333333333333in,marginparsep=0.3in]{geometry}
|
||||
\setlength{\emergencystretch}{3em} % prevent overfull lines
|
||||
\setcounter{secnumdepth}{-\maxdimen} % remove section numbering
|
||||
% Make \paragraph and \subparagraph free-standing
|
||||
\ifx\paragraph\undefined\else
|
||||
\let\oldparagraph\paragraph
|
||||
\renewcommand{\paragraph}[1]{\oldparagraph{#1}\mbox{}}
|
||||
\fi
|
||||
\ifx\subparagraph\undefined\else
|
||||
\let\oldsubparagraph\subparagraph
|
||||
\renewcommand{\subparagraph}[1]{\oldsubparagraph{#1}\mbox{}}
|
||||
\fi
|
||||
|
||||
\usepackage{color}
|
||||
\usepackage{fancyvrb}
|
||||
\newcommand{\VerbBar}{|}
|
||||
\newcommand{\VERB}{\Verb[commandchars=\\\{\}]}
|
||||
\DefineVerbatimEnvironment{Highlighting}{Verbatim}{commandchars=\\\{\}}
|
||||
% Add ',fontsize=\small' for more characters per line
|
||||
\usepackage{framed}
|
||||
\definecolor{shadecolor}{RGB}{241,243,245}
|
||||
\newenvironment{Shaded}{\begin{snugshade}}{\end{snugshade}}
|
||||
\newcommand{\AlertTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
|
||||
\newcommand{\AnnotationTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{#1}}
|
||||
\newcommand{\AttributeTok}[1]{\textcolor[rgb]{0.40,0.45,0.13}{#1}}
|
||||
\newcommand{\BaseNTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
|
||||
\newcommand{\BuiltInTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
|
||||
\newcommand{\CharTok}[1]{\textcolor[rgb]{0.13,0.47,0.30}{#1}}
|
||||
\newcommand{\CommentTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{#1}}
|
||||
\newcommand{\CommentVarTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{\textit{#1}}}
|
||||
\newcommand{\ConstantTok}[1]{\textcolor[rgb]{0.56,0.35,0.01}{#1}}
|
||||
\newcommand{\ControlFlowTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
|
||||
\newcommand{\DataTypeTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
|
||||
\newcommand{\DecValTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
|
||||
\newcommand{\DocumentationTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{\textit{#1}}}
|
||||
\newcommand{\ErrorTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
|
||||
\newcommand{\ExtensionTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
|
||||
\newcommand{\FloatTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
|
||||
\newcommand{\FunctionTok}[1]{\textcolor[rgb]{0.28,0.35,0.67}{#1}}
|
||||
\newcommand{\ImportTok}[1]{\textcolor[rgb]{0.00,0.46,0.62}{#1}}
|
||||
\newcommand{\InformationTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{#1}}
|
||||
\newcommand{\KeywordTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
|
||||
\newcommand{\NormalTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
|
||||
\newcommand{\OperatorTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{#1}}
|
||||
\newcommand{\OtherTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
|
||||
\newcommand{\PreprocessorTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
|
||||
\newcommand{\RegionMarkerTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
|
||||
\newcommand{\SpecialCharTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{#1}}
|
||||
\newcommand{\SpecialStringTok}[1]{\textcolor[rgb]{0.13,0.47,0.30}{#1}}
|
||||
\newcommand{\StringTok}[1]{\textcolor[rgb]{0.13,0.47,0.30}{#1}}
|
||||
\newcommand{\VariableTok}[1]{\textcolor[rgb]{0.07,0.07,0.07}{#1}}
|
||||
\newcommand{\VerbatimStringTok}[1]{\textcolor[rgb]{0.13,0.47,0.30}{#1}}
|
||||
\newcommand{\WarningTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{\textit{#1}}}
|
||||
|
||||
\providecommand{\tightlist}{%
|
||||
\setlength{\itemsep}{0pt}\setlength{\parskip}{0pt}}\usepackage{longtable,booktabs,array}
|
||||
\usepackage{calc} % for calculating minipage widths
|
||||
% Correct order of tables after \paragraph or \subparagraph
|
||||
\usepackage{etoolbox}
|
||||
\makeatletter
|
||||
\patchcmd\longtable{\par}{\if@noskipsec\mbox{}\fi\par}{}{}
|
||||
\makeatother
|
||||
% Allow footnotes in longtable head/foot
|
||||
\IfFileExists{footnotehyper.sty}{\usepackage{footnotehyper}}{\usepackage{footnote}}
|
||||
\makesavenoteenv{longtable}
|
||||
\usepackage{graphicx}
|
||||
\makeatletter
|
||||
\def\maxwidth{\ifdim\Gin@nat@width>\linewidth\linewidth\else\Gin@nat@width\fi}
|
||||
\def\maxheight{\ifdim\Gin@nat@height>\textheight\textheight\else\Gin@nat@height\fi}
|
||||
\makeatother
|
||||
% Scale images if necessary, so that they will not overflow the page
|
||||
% margins by default, and it is still possible to overwrite the defaults
|
||||
% using explicit options in \includegraphics[width, height, ...]{}
|
||||
\setkeys{Gin}{width=\maxwidth,height=\maxheight,keepaspectratio}
|
||||
% Set default figure placement to htbp
|
||||
\makeatletter
|
||||
\def\fps@figure{htbp}
|
||||
\makeatother
|
||||
\newlength{\cslhangindent}
|
||||
\setlength{\cslhangindent}{1.5em}
|
||||
\newlength{\csllabelwidth}
|
||||
\setlength{\csllabelwidth}{3em}
|
||||
\newlength{\cslentryspacingunit} % times entry-spacing
|
||||
\setlength{\cslentryspacingunit}{\parskip}
|
||||
\newenvironment{CSLReferences}[2] % #1 hanging-ident, #2 entry spacing
|
||||
{% don't indent paragraphs
|
||||
\setlength{\parindent}{0pt}
|
||||
% turn on hanging indent if param 1 is 1
|
||||
\ifodd #1
|
||||
\let\oldpar\par
|
||||
\def\par{\hangindent=\cslhangindent\oldpar}
|
||||
\fi
|
||||
% set entry spacing
|
||||
\setlength{\parskip}{#2\cslentryspacingunit}
|
||||
}%
|
||||
{}
|
||||
\usepackage{calc}
|
||||
\newcommand{\CSLBlock}[1]{#1\hfill\break}
|
||||
\newcommand{\CSLLeftMargin}[1]{\parbox[t]{\csllabelwidth}{#1}}
|
||||
\newcommand{\CSLRightInline}[1]{\parbox[t]{\linewidth - \csllabelwidth}{#1}\break}
|
||||
\newcommand{\CSLIndent}[1]{\hspace{\cslhangindent}#1}
|
||||
|
||||
\KOMAoption{captions}{tableheading}
|
||||
\makeatletter
|
||||
\makeatother
|
||||
\makeatletter
|
||||
\@ifpackageloaded{caption}{}{\usepackage{caption}}
|
||||
\AtBeginDocument{%
|
||||
\ifdefined\contentsname
|
||||
\renewcommand*\contentsname{Table of contents}
|
||||
\else
|
||||
\newcommand\contentsname{Table of contents}
|
||||
\fi
|
||||
\ifdefined\listfigurename
|
||||
\renewcommand*\listfigurename{List of Figures}
|
||||
\else
|
||||
\newcommand\listfigurename{List of Figures}
|
||||
\fi
|
||||
\ifdefined\listtablename
|
||||
\renewcommand*\listtablename{List of Tables}
|
||||
\else
|
||||
\newcommand\listtablename{List of Tables}
|
||||
\fi
|
||||
\ifdefined\figurename
|
||||
\renewcommand*\figurename{Figure}
|
||||
\else
|
||||
\newcommand\figurename{Figure}
|
||||
\fi
|
||||
\ifdefined\tablename
|
||||
\renewcommand*\tablename{Table}
|
||||
\else
|
||||
\newcommand\tablename{Table}
|
||||
\fi
|
||||
}
|
||||
\@ifpackageloaded{float}{}{\usepackage{float}}
|
||||
\floatstyle{ruled}
|
||||
\@ifundefined{c@chapter}{\newfloat{codelisting}{h}{lop}}{\newfloat{codelisting}{h}{lop}[chapter]}
|
||||
\floatname{codelisting}{Listing}
|
||||
\newcommand*\listoflistings{\listof{codelisting}{List of Listings}}
|
||||
\makeatother
|
||||
\makeatletter
|
||||
\@ifpackageloaded{caption}{}{\usepackage{caption}}
|
||||
\@ifpackageloaded{subcaption}{}{\usepackage{subcaption}}
|
||||
\makeatother
|
||||
\makeatletter
|
||||
\@ifpackageloaded{tcolorbox}{}{\usepackage[many]{tcolorbox}}
|
||||
\makeatother
|
||||
\makeatletter
|
||||
\@ifundefined{shadecolor}{\definecolor{shadecolor}{rgb}{.97, .97, .97}}
|
||||
\makeatother
|
||||
\makeatletter
|
||||
\@ifpackageloaded{sidenotes}{}{\usepackage{sidenotes}}
|
||||
\@ifpackageloaded{marginnote}{}{\usepackage{marginnote}}
|
||||
\makeatother
|
||||
\makeatletter
|
||||
\makeatother
|
||||
\ifLuaTeX
|
||||
\usepackage{selnolig} % disable illegal ligatures
|
||||
\fi
|
||||
|
||||
\title{Characterization of Space Debris using Machine Learning Methods}
|
||||
\usepackage{etoolbox}
|
||||
\makeatletter
|
||||
\providecommand{\subtitle}[1]{% add subtitle to \maketitle
|
||||
\apptocmd{\@title}{\par {\large #1 \par}}{}{}
|
||||
}
|
||||
\makeatother
|
||||
\subtitle{Advanced processing of 3D meshes using Julia, and data science
|
||||
in Matlab.}
|
||||
\author{Anson Biggs}
|
||||
\date{4/30/2022}
|
||||
|
||||
\begin{document}
|
||||
\maketitle
|
||||
|
||||
\ifdefined\Shaded\renewenvironment{Shaded}{\begin{tcolorbox}[interior hidden, borderline west={3pt}{0pt}{shadecolor}, boxrule=0pt, enhanced, breakable, sharp corners, frame hidden]}{\end{tcolorbox}}\fi
|
||||
|
||||
\hypertarget{gathering-data}{%
|
||||
\subsection{Gathering Data}\label{gathering-data}}
|
||||
|
||||
To get started on the project before any scans of the actual debris are
|
||||
made available, I opted to find 3D models online and process them as if
|
||||
they were data collected by my team. GrabCAD is an excellent source of
|
||||
high-quality 3D models, and all the models have, at worst, a
|
||||
non-commercial license making them suitable for this study. The current
|
||||
dataset uses three separate satellite assemblies found on GrabCAD, below
|
||||
is an example of one of the satellites that was used.
|
||||
|
||||
\begin{figure}
|
||||
|
||||
{\centering \includegraphics{Figures/assembly.jpg}
|
||||
|
||||
}
|
||||
|
||||
\caption{Example CubeSat Used for Analysis}
|
||||
|
||||
\end{figure}
|
||||
|
||||
\hypertarget{data-preparation}{%
|
||||
\subsection{Data Preparation}\label{data-preparation}}
|
||||
|
||||
The models were processed in Blender, which quickly converted the
|
||||
assemblies to \texttt{stl} files, giving 108 unique parts to be
|
||||
processed. Since the expected final size of the dataset is expected to
|
||||
be in the magnitude of the thousands, an algorithm capable of getting
|
||||
the required properties of each part is the only feasible solution. From
|
||||
the analysis performed in
|
||||
\href{https://gitlab.com/orbital-debris-research/directed-study/report-1/-/blob/main/README.md}{Report
|
||||
1}, we know that the essential debris property is the moments of inertia
|
||||
which helped narrow down potential algorithms. Unfortunately, this is
|
||||
one of the more complicated things to calculate from a mesh, but thanks
|
||||
to a paper from (Eberly
|
||||
2002)\marginpar{\begin{footnotesize}\leavevmode\vadjust pre{\protect\hypertarget{ref-eberlyPolyhedralMassProperties2002}{}}%
|
||||
Eberly, David. 2002. {``Polyhedral {Mass Properties} ({Revisited}).''}
|
||||
\url{https://www.geometrictools.com/Documentation/PolyhedralMassProperties.pdf}.\vspace{2mm}\par\end{footnotesize}}
|
||||
titled
|
||||
\href{https://www.geometrictools.com/Documentation/PolyhedralMassProperties.pdf}{Polyhedral
|
||||
Mass Properties}, his algorithm was implemented in the Julia programming
|
||||
language. The current implementation of the algorithm calculates a
|
||||
moment of inertia tensor, volume, center of gravity, characteristic
|
||||
length, and surface body dimensions in a few milliseconds per part. The
|
||||
library can be found
|
||||
\href{https://gitlab.com/MisterBiggs/stl-process}{here.} The
|
||||
characteristic length is a value that is heavily used by the NASA
|
||||
DebriSat project (Murray et al.
|
||||
2019)\marginpar{\begin{footnotesize}\leavevmode\vadjust pre{\protect\hypertarget{ref-DebriSat2019}{}}%
|
||||
Murray, James, Heather Cowardin, J-C Liou, Marlon Sorge, Norman
|
||||
Fitz-Coy, and Tom Huynh. 2019. {``Analysis of the DebriSat Fragments and
|
||||
Comparison to the NASA Standard Satellite Breakup Model.''} In
|
||||
\emph{International Orbital Debris Conference (IOC)}. JSC-E-DAA-TN73918.
|
||||
\url{https://ntrs.nasa.gov/citations/20190034081}.\vspace{2mm}\par\end{footnotesize}}
|
||||
that is doing very similar work to this project. The characteristic
|
||||
length takes the maximum orthogonal dimension of a body, sums the
|
||||
dimensions then divides by 3 to produce a single scalar value that can
|
||||
be used to get an idea of thesize of a 3D object.
|
||||
|
||||
\begin{figure}
|
||||
|
||||
{\centering \includegraphics{Figures/current_process.pdf}
|
||||
|
||||
}
|
||||
|
||||
\caption{Current mesh processing pipeline}
|
||||
|
||||
\end{figure}
|
||||
|
||||
The algorithm's speed is critical not only for the eventual large number
|
||||
of debris pieces that have to be processed, but many of the data science
|
||||
algorithms we plan on performing on the compiled data need the data to
|
||||
be normalized. For the current dataset and properties, it makes the most
|
||||
sense to normalize the dataset based on volume. Volume was chosen for
|
||||
multiple reasons, namely because it was easy to implement an efficient
|
||||
algorithm to calculate volume, and currently, volume produces the least
|
||||
amount of variation out of the current set of properties calculated.
|
||||
Unfortunately, scaling a model to a specific volume is an iterative
|
||||
process, but can be done very efficiently using derivative-free
|
||||
numerical root-finding algorithms. The current implementation can scale
|
||||
and process all the properties using only 30\% more time than getting
|
||||
the properties without first scaling.
|
||||
|
||||
\begin{Shaded}
|
||||
\begin{Highlighting}[]
|
||||
\NormalTok{ Row │ variable mean min median max}
|
||||
\NormalTok{─────┼───────────────────────────────────────────────────────────────────}
|
||||
\NormalTok{ 1 │ surface\_area 25.2002 5.60865 13.3338 159.406}
|
||||
\NormalTok{ 2 │ characteristic\_length 79.5481 0.158521 1.55816 1582.23}
|
||||
\NormalTok{ 3 │ sbx 1.40222 0.0417367 0.967078 10.0663}
|
||||
\NormalTok{ 4 │ sby 3.3367 0.0125824 2.68461 9.68361}
|
||||
\NormalTok{ 5 │ sbz 3.91184 0.29006 1.8185 14.7434}
|
||||
\NormalTok{ 6 │ Ix 1.58725 0.0311782 0.23401 11.1335}
|
||||
\NormalTok{ 7 │ Iy 3.74345 0.178598 1.01592 24.6735}
|
||||
\NormalTok{ 8 │ Iz 5.20207 0.178686 1.742 32.0083}
|
||||
\end{Highlighting}
|
||||
\end{Shaded}
|
||||
|
||||
Above is a summary of the current 108 part with scaling. Since all the
|
||||
volumes are the same it is left out of the dataset, the center of
|
||||
gravity is also left out of the dataset since it currently is just an
|
||||
artifact of the \texttt{stl} file format. There are many ways to
|
||||
determine the `center' of a 3D mesh, but since only one is being
|
||||
implemented at the moment comparisons to other properties doesn't make
|
||||
sense. The other notable part of the data is the model is rotated so
|
||||
that the magnitudes of \texttt{Iz}, \texttt{Iy}, and \texttt{Ix} are in
|
||||
descending order. This makes sure that the rotation of a model doesn't
|
||||
matter for characterization. The dataset is available for download here:
|
||||
|
||||
\begin{itemize}
|
||||
\tightlist
|
||||
\item
|
||||
\href{https://gitlab.com/orbital-debris-research/directed-study/report-3/-/blob/main/scaled_dataset.csv}{scaled\_dataset.csv}
|
||||
\end{itemize}
|
||||
|
||||
\hypertarget{characterization}{%
|
||||
\subsection{Characterization}\label{characterization}}
|
||||
|
||||
The first step toward characterization is to perform a principal
|
||||
component analysis to determine what properties of the data capture the
|
||||
most variation. \texttt{PCA} also requires that the data is scaled, so
|
||||
as discussed above the dataset that is scaled by \texttt{volume} will be
|
||||
used. \texttt{PCA} is implemented manually instead of the Matlab
|
||||
built-in function as shown below:
|
||||
|
||||
\begin{Shaded}
|
||||
\begin{Highlighting}[]
|
||||
\CommentTok{\% covaraince matrix of data points}
|
||||
\VariableTok{S}\OperatorTok{=}\VariableTok{cov}\NormalTok{(}\VariableTok{scaled\_data}\NormalTok{)}\OperatorTok{;}
|
||||
|
||||
\CommentTok{\% eigenvalues of S}
|
||||
\VariableTok{eig\_vals} \OperatorTok{=} \VariableTok{eig}\NormalTok{(}\VariableTok{S}\NormalTok{)}\OperatorTok{;}
|
||||
|
||||
\CommentTok{\% sorting eigenvalues from largest to smallest}
|
||||
\NormalTok{[}\VariableTok{lambda}\OperatorTok{,} \VariableTok{sort\_index}\NormalTok{] }\OperatorTok{=} \VariableTok{sort}\NormalTok{(}\VariableTok{eig\_vals}\OperatorTok{,}\SpecialStringTok{\textquotesingle{}descend\textquotesingle{}}\NormalTok{)}\OperatorTok{;}
|
||||
|
||||
|
||||
\VariableTok{lambda\_ratio} \OperatorTok{=} \VariableTok{cumsum}\NormalTok{(}\VariableTok{lambda}\NormalTok{) }\OperatorTok{./} \VariableTok{sum}\NormalTok{(}\VariableTok{lambda}\NormalTok{)}
|
||||
\end{Highlighting}
|
||||
\end{Shaded}
|
||||
|
||||
Then plotting \texttt{lambda\_ratio}, which is the
|
||||
\texttt{cumsum}/\texttt{sum} produces the following plot:
|
||||
|
||||
\begin{figure}
|
||||
|
||||
{\centering \includegraphics{Figures/pca.png}
|
||||
|
||||
}
|
||||
|
||||
\caption{PCA Plot}
|
||||
|
||||
\end{figure}
|
||||
|
||||
The current dataset can be described incredibly well just by looking at
|
||||
\texttt{Iz}, which again the models are rotated so that \texttt{Iz} is
|
||||
the largest moment of inertia. Then including \texttt{Iy} and
|
||||
\texttt{Iz} means that a 3D plot of the principle moments of inertia
|
||||
almost capture all the variation in the data.
|
||||
|
||||
The next step for characterization is to get only the inertia's from the
|
||||
dataset. Since the current dataset is so small, the scaled dataset will
|
||||
be used for rest of the characterization process. Once more parts are
|
||||
added to the database it will make sense to start looking at the raw
|
||||
dataset. Now we can proceed to cluster the data using the k-means method
|
||||
of clustering. To properly use k-means a value of k, which is the number
|
||||
of clusters, needs to be determined. This can be done by creating an
|
||||
elbow plot using the following code:
|
||||
|
||||
\begin{Shaded}
|
||||
\begin{Highlighting}[]
|
||||
\KeywordTok{for} \VariableTok{ii}\OperatorTok{=}\FloatTok{1}\OperatorTok{:}\FloatTok{20}
|
||||
\NormalTok{ [}\VariableTok{idx}\OperatorTok{,\textasciitilde{},}\VariableTok{sumd}\NormalTok{] }\OperatorTok{=} \VariableTok{kmeans}\NormalTok{(}\VariableTok{inertia}\OperatorTok{,}\VariableTok{ii}\NormalTok{)}\OperatorTok{;}
|
||||
\VariableTok{J}\NormalTok{(}\VariableTok{ii}\NormalTok{)}\OperatorTok{=}\VariableTok{norm}\NormalTok{(}\VariableTok{sumd}\NormalTok{)}\OperatorTok{;}
|
||||
\KeywordTok{end}
|
||||
\end{Highlighting}
|
||||
\end{Shaded}
|
||||
|
||||
Which produces the following plot:
|
||||
|
||||
\begin{figure}
|
||||
|
||||
{\centering \includegraphics{Figures/kmeans.png}
|
||||
|
||||
}
|
||||
|
||||
\caption{Elbow method to determine the required number of clusters.}
|
||||
|
||||
\end{figure}
|
||||
|
||||
As can be seen in the above elbow plot, at 6 clusters there is an
|
||||
``elbow'' which is where there is a large drop in the sum distance to
|
||||
the centroid of each cluster which means that it is the optimal number
|
||||
of clusters. The inertia's can then be plotted using 6 k-means clusters
|
||||
produces the following plot:
|
||||
|
||||
\begin{figure}
|
||||
|
||||
{\centering \includegraphics{Figures/inertia3d.png}
|
||||
|
||||
}
|
||||
|
||||
\caption{Moments of Inertia plotted with 6 clusters.}
|
||||
|
||||
\end{figure}
|
||||
|
||||
From this plot it is immediately clear that there are clusters of
|
||||
outliers. These are due to the different shapes and the extreme values
|
||||
are slender rods or flat plates while the clusters closer to the center
|
||||
more closely resemble a sphere. As the dataset grows it should become
|
||||
more apparent what kind of clusters actually make up a satellite, and
|
||||
eventually space debris in general.
|
||||
|
||||
\hypertarget{next-steps}{%
|
||||
\subsection{Next Steps}\label{next-steps}}
|
||||
|
||||
The current dataset needs to be grown in both the amount of data and the
|
||||
variety of data. The most glaring issue with the current dataset is the
|
||||
lack of any debris since the parts are straight from satellite
|
||||
assemblies. Getting accurate properties from the current scans we have
|
||||
is an entire research project in itself, so hopefully, getting pieces
|
||||
that are easier to scan can help bring the project back on track. The
|
||||
other and harder-to-fix issue is finding/deriving more data properties.
|
||||
Properties such as cross-sectional or aerodynamic drag would be very
|
||||
insightful but are likely to be difficult to implement in code and
|
||||
significantly more resource intensive than the current properties the
|
||||
code can derive.
|
||||
|
||||
|
||||
|
||||
|
||||
\end{document}
|
Reference in New Issue
Block a user