This commit is contained in:
Anson 2022-04-26 00:40:18 -07:00
parent 8d3aebc70c
commit 329e2b9fcc
10 changed files with 189 additions and 1333 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
.jupyter_cache
*.log
*.tex

View File

@ -1 +0,0 @@
0.5.0

View File

@ -1,248 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "a45c1344",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
"(process:28652): GLib-GIO-WARNING **: 12:50:31.472: Unexpectedly, UWP app `KDEe.V.Okular_21.1203.941.0_x64__7vt06qxq7ptv8' (AUMId `KDEe.V.Okular_7vt06qxq7ptv8!KDEe.V.Okular') supports 5 extensions but has no verbs\n"
]
}
],
"source": [
"import IJulia\n",
"\n",
"# The julia kernel has built in support for Revise.jl, so this is the \n",
"# recommended approach for long-running sessions:\n",
"# https://github.com/JuliaLang/IJulia.jl/blob/9b10fa9b879574bbf720f5285029e07758e50a5e/src/kernel.jl#L46-L51\n",
"\n",
"# Users should enable revise within .julia/config/startup_ijulia.jl:\n",
"# https://timholy.github.io/Revise.jl/stable/config/#Using-Revise-automatically-within-Jupyter/IJulia-1\n",
"\n",
"# clear console history\n",
"IJulia.clear_history()\n",
"\n",
"# Intialize Plots w/ default fig width/height\n",
"try\n",
" fig_width = 7\n",
" fig_height = 5\n",
" fig_format = :retina\n",
" fig_dpi = 96\n",
" # no retina format type, use svg for high quality type/marks\n",
" if fig_format == :retina\n",
" fig_format = :svg\n",
" # IJulia doesn't support PDF output so use png (if the DPI \n",
" # remains the default of 300 then set to 96)\n",
" elseif fig_format == :pdf\n",
" fig_format = :png\n",
" fig_dpi = 96\n",
" end\n",
" # convert inches to pixels\n",
" fig_width = fig_width * fig_dpi\n",
" fig_height = fig_height * fig_dpi\n",
" using Plots\n",
" gr(size=(fig_width, fig_height), fmt = fig_format, dpi = fig_dpi)\n",
"catch e\n",
" # @warn \"Plots init\" exception=(e, catch_backtrace())\n",
"end\n",
"\n",
"# Set run_path if specified\n",
"try\n",
" run_path = \"\"\n",
" if !isempty(run_path)\n",
" cd(run_path)\n",
" end\n",
"catch e\n",
" @warn \"Run path init:\" exception=(e, catch_backtrace())\n",
"end\n",
"\n",
"# don't return kernel dependencies (b/c Revise should take care of dependencies)\n",
"nothing\n"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "189db2ad",
"metadata": {},
"outputs": [],
"source": [
"#| code-fold: true\n",
"#| output: false\n",
"using FileIO\n",
"using MeshIO\n",
"\n",
"using stlProcess\n",
"\n",
"using CSV\n",
"using DataFrames\n",
"\n",
"using LinearAlgebra"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "5e47d862",
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div class=\"data-frame\"><p>0 rows × 8 columns (omitted printing of 1 columns)</p><table class=\"data-frame\"><thead><tr><th></th><th>surface_area</th><th>characteristic_length</th><th>sbx</th><th>sby</th><th>sbz</th><th>Ix</th><th>Iy</th></tr><tr><th></th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th></tr></thead><tbody></tbody></table></div>"
],
"text/latex": [
"\\begin{tabular}{r|cccccccc}\n",
"\t& surface\\_area & characteristic\\_length & sbx & sby & sbz & Ix & Iy & \\\\\n",
"\t\\hline\n",
"\t& Float64 & Float64 & Float64 & Float64 & Float64 & Float64 & Float64 & \\\\\n",
"\t\\hline\n",
"\\end{tabular}\n"
],
"text/plain": [
"\u001b[1m0×8 DataFrame\u001b[0m"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# local path to https://gitlab.com/orbital-debris-research/fake-satellite-dataset\n",
"dataset_path = raw\"C:\\Coding\\fake-satellite-dataset\"\n",
"\n",
"folders = [\"1_5U\", \"assembly1\", \"cubesat\"]\n",
"\n",
"df = DataFrame(;\n",
" surface_area=Float64[],\n",
" characteristic_length=Float64[],\n",
" sbx=Float64[],\n",
" sby=Float64[],\n",
" sbz=Float64[],\n",
" Ix=Float64[],\n",
" Iy=Float64[],\n",
" Iz=Float64[],\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "6c55224d",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Processing Path: C:\\Coding\\fake-satellite-dataset\\1_5U\n",
"Processing Path: C:\\Coding\\fake-satellite-dataset\\assembly1\n",
"Processing Path: C:\\Coding\\fake-satellite-dataset\\cubesat\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"┌ Warning: Characteristic Length Algorithm failed to converge, this usually means stl is flat. Setting length in dir to 0.\n",
"└ @ stlProcess C:\\Users\\albig\\.julia\\packages\\stlProcess\\8rsc7\\src\\stlProcess.jl:153\n"
]
}
],
"source": [
"for path in dataset_path * \"\\\\\" .* folders\n",
" println(\"Processing Path: \", path)\n",
" Threads.@threads for file in readdir(path)\n",
" stl = load(path * \"\\\\\" * file)\n",
" scale = find_scale(stl)\n",
" props = get_mass_properties(stl; scale=scale)\n",
"\n",
" eigs = eigvals(props.inertia)\n",
" sort_index = sortperm(eigs)\n",
" Ix, Iy, Iz = eigs[sort_index]\n",
" sbx, sby, sbz = props.sb_values[sort_index]\n",
"\n",
" push!(\n",
" df,\n",
" [props.surface_area, props.characteristic_length, sbx, sby, sbz, Ix, Iy, Iz],\n",
" )\n",
" end\n",
"end"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "2e520bb3",
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div class=\"data-frame\"><p>8 rows × 7 columns</p><table class=\"data-frame\"><thead><tr><th></th><th>variable</th><th>mean</th><th>min</th><th>median</th><th>max</th><th>nmissing</th><th>eltype</th></tr><tr><th></th><th title=\"Symbol\">Symbol</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Int64\">Int64</th><th title=\"DataType\">DataType</th></tr></thead><tbody><tr><th>1</th><td>surface_area</td><td>25.2002</td><td>5.60865</td><td>13.3338</td><td>159.406</td><td>0</td><td>Float64</td></tr><tr><th>2</th><td>characteristic_length</td><td>79.5481</td><td>0.158521</td><td>1.55816</td><td>1582.23</td><td>0</td><td>Float64</td></tr><tr><th>3</th><td>sbx</td><td>1.40222</td><td>0.0417367</td><td>0.967078</td><td>10.0663</td><td>0</td><td>Float64</td></tr><tr><th>4</th><td>sby</td><td>3.3367</td><td>0.0125824</td><td>2.68461</td><td>9.68361</td><td>0</td><td>Float64</td></tr><tr><th>5</th><td>sbz</td><td>3.91184</td><td>0.29006</td><td>1.8185</td><td>14.7434</td><td>0</td><td>Float64</td></tr><tr><th>6</th><td>Ix</td><td>1.58725</td><td>0.0311782</td><td>0.23401</td><td>11.1335</td><td>0</td><td>Float64</td></tr><tr><th>7</th><td>Iy</td><td>3.74345</td><td>0.178598</td><td>1.01592</td><td>24.6735</td><td>0</td><td>Float64</td></tr><tr><th>8</th><td>Iz</td><td>5.20207</td><td>0.178686</td><td>1.742</td><td>32.0083</td><td>0</td><td>Float64</td></tr></tbody></table></div>"
],
"text/latex": [
"\\begin{tabular}{r|ccccccc}\n",
"\t& variable & mean & min & median & max & nmissing & eltype\\\\\n",
"\t\\hline\n",
"\t& Symbol & Float64 & Float64 & Float64 & Float64 & Int64 & DataType\\\\\n",
"\t\\hline\n",
"\t1 & surface\\_area & 25.2002 & 5.60865 & 13.3338 & 159.406 & 0 & Float64 \\\\\n",
"\t2 & characteristic\\_length & 79.5481 & 0.158521 & 1.55816 & 1582.23 & 0 & Float64 \\\\\n",
"\t3 & sbx & 1.40222 & 0.0417367 & 0.967078 & 10.0663 & 0 & Float64 \\\\\n",
"\t4 & sby & 3.3367 & 0.0125824 & 2.68461 & 9.68361 & 0 & Float64 \\\\\n",
"\t5 & sbz & 3.91184 & 0.29006 & 1.8185 & 14.7434 & 0 & Float64 \\\\\n",
"\t6 & Ix & 1.58725 & 0.0311782 & 0.23401 & 11.1335 & 0 & Float64 \\\\\n",
"\t7 & Iy & 3.74345 & 0.178598 & 1.01592 & 24.6735 & 0 & Float64 \\\\\n",
"\t8 & Iz & 5.20207 & 0.178686 & 1.742 & 32.0083 & 0 & Float64 \\\\\n",
"\\end{tabular}\n"
],
"text/plain": [
"\u001b[1m8×7 DataFrame\u001b[0m\n",
"\u001b[1m Row \u001b[0m│\u001b[1m variable \u001b[0m\u001b[1m mean \u001b[0m\u001b[1m min \u001b[0m\u001b[1m median \u001b[0m\u001b[1m max \u001b[0m\u001b[1m nmis\u001b[0m ⋯\n",
"\u001b[1m \u001b[0m│\u001b[90m Symbol \u001b[0m\u001b[90m Float64 \u001b[0m\u001b[90m Float64 \u001b[0m\u001b[90m Float64 \u001b[0m\u001b[90m Float64 \u001b[0m\u001b[90m Int6\u001b[0m ⋯\n",
"─────┼──────────────────────────────────────────────────────────────────────────\n",
" 1 │ surface_area 25.2002 5.60865 13.3338 159.406 ⋯\n",
" 2 │ characteristic_length 79.5481 0.158521 1.55816 1582.23\n",
" 3 │ sbx 1.40222 0.0417367 0.967078 10.0663\n",
" 4 │ sby 3.3367 0.0125824 2.68461 9.68361\n",
" 5 │ sbz 3.91184 0.29006 1.8185 14.7434 ⋯\n",
" 6 │ Ix 1.58725 0.0311782 0.23401 11.1335\n",
" 7 │ Iy 3.74345 0.178598 1.01592 24.6735\n",
" 8 │ Iz 5.20207 0.178686 1.742 32.0083\n",
"\u001b[36m 2 columns omitted\u001b[0m"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"describe(df)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Julia 1.8.0-beta3",
"language": "julia",
"name": "julia-1.8"
},
"language_info": {
"file_extension": ".jl",
"mimetype": "application/julia",
"name": "julia",
"version": "1.8.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@ -1,237 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "129f639c",
"metadata": {},
"outputs": [],
"source": [
"import IJulia\n",
"\n",
"# The julia kernel has built in support for Revise.jl, so this is the \n",
"# recommended approach for long-running sessions:\n",
"# https://github.com/JuliaLang/IJulia.jl/blob/9b10fa9b879574bbf720f5285029e07758e50a5e/src/kernel.jl#L46-L51\n",
"\n",
"# Users should enable revise within .julia/config/startup_ijulia.jl:\n",
"# https://timholy.github.io/Revise.jl/stable/config/#Using-Revise-automatically-within-Jupyter/IJulia-1\n",
"\n",
"# clear console history\n",
"IJulia.clear_history()\n",
"\n",
"# Intialize Plots w/ default fig width/height\n",
"try\n",
" fig_width = 7\n",
" fig_height = 5\n",
" fig_format = :retina\n",
" fig_dpi = 96\n",
" # no retina format type, use svg for high quality type/marks\n",
" if fig_format == :retina\n",
" fig_format = :svg\n",
" # IJulia doesn't support PDF output so use png (if the DPI \n",
" # remains the default of 300 then set to 96)\n",
" elseif fig_format == :pdf\n",
" fig_format = :png\n",
" fig_dpi = 96\n",
" end\n",
" # convert inches to pixels\n",
" fig_width = fig_width * fig_dpi\n",
" fig_height = fig_height * fig_dpi\n",
" using Plots\n",
" gr(size=(fig_width, fig_height), fmt = fig_format, dpi = fig_dpi)\n",
"catch e\n",
" # @warn \"Plots init\" exception=(e, catch_backtrace())\n",
"end\n",
"\n",
"# Set run_path if specified\n",
"try\n",
" run_path = \"\"\n",
" if !isempty(run_path)\n",
" cd(run_path)\n",
" end\n",
"catch e\n",
" @warn \"Run path init:\" exception=(e, catch_backtrace())\n",
"end\n",
"\n",
"# don't return kernel dependencies (b/c Revise should take care of dependencies)\n",
"nothing\n"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "8b15e8a8",
"metadata": {},
"outputs": [],
"source": [
"#| code-fold: true\n",
"#| output: false\n",
"\n",
"using FileIO\n",
"using MeshIO\n",
"\n",
"using stlProcess\n",
"\n",
"using CSV\n",
"using DataFrames\n",
"\n",
"using LinearAlgebra"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "f40c9594",
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div class=\"data-frame\"><p>0 rows × 8 columns (omitted printing of 1 columns)</p><table class=\"data-frame\"><thead><tr><th></th><th>surface_area</th><th>characteristic_length</th><th>sbx</th><th>sby</th><th>sbz</th><th>Ix</th><th>Iy</th></tr><tr><th></th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th></tr></thead><tbody></tbody></table></div>"
],
"text/latex": [
"\\begin{tabular}{r|cccccccc}\n",
"\t& surface\\_area & characteristic\\_length & sbx & sby & sbz & Ix & Iy & \\\\\n",
"\t\\hline\n",
"\t& Float64 & Float64 & Float64 & Float64 & Float64 & Float64 & Float64 & \\\\\n",
"\t\\hline\n",
"\\end{tabular}\n"
],
"text/plain": [
"\u001b[1m0×8 DataFrame\u001b[0m"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"#| code-fold: true\n",
"#| output: false\n",
"\n",
"# local path to https://gitlab.com/orbital-debris-research/fake-satellite-dataset\n",
"dataset_path = raw\"C:\\Coding\\fake-satellite-dataset\"\n",
"\n",
"folders = [\"1_5U\", \"assembly1\", \"cubesat\"]\n",
"\n",
"df = DataFrame(;\n",
" surface_area=Float64[],\n",
" characteristic_length=Float64[],\n",
" sbx=Float64[],\n",
" sby=Float64[],\n",
" sbz=Float64[],\n",
" Ix=Float64[],\n",
" Iy=Float64[],\n",
" Iz=Float64[],\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "bb8f7086",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Processing Path: C:\\Coding\\fake-satellite-dataset\\1_5U\n",
"Processing Path: C:\\Coding\\fake-satellite-dataset\\assembly1\n",
"Processing Path: C:\\Coding\\fake-satellite-dataset\\cubesat\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"┌ Warning: Characteristic Length Algorithm failed to converge, this usually means stl is flat. Setting length in dir to 0.\n",
"└ @ stlProcess C:\\Users\\albig\\.julia\\packages\\stlProcess\\8rsc7\\src\\stlProcess.jl:153\n"
]
}
],
"source": [
"for path in dataset_path * \"\\\\\" .* folders\n",
" println(\"Processing Path: \", path)\n",
" Threads.@threads for file in readdir(path)\n",
" stl = load(path * \"\\\\\" * file)\n",
" scale = find_scale(stl)\n",
" props = get_mass_properties(stl; scale=scale)\n",
"\n",
" eigs = eigvals(props.inertia)\n",
" sort_index = sortperm(eigs)\n",
" Ix, Iy, Iz = eigs[sort_index]\n",
" sbx, sby, sbz = props.sb_values[sort_index]\n",
"\n",
" push!(\n",
" df,\n",
" [props.surface_area, props.characteristic_length, sbx, sby, sbz, Ix, Iy, Iz],\n",
" )\n",
" end\n",
"end"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "a51a601f",
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div class=\"data-frame\"><p>8 rows × 7 columns</p><table class=\"data-frame\"><thead><tr><th></th><th>variable</th><th>mean</th><th>min</th><th>median</th><th>max</th><th>nmissing</th><th>eltype</th></tr><tr><th></th><th title=\"Symbol\">Symbol</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Float64\">Float64</th><th title=\"Int64\">Int64</th><th title=\"DataType\">DataType</th></tr></thead><tbody><tr><th>1</th><td>surface_area</td><td>25.2002</td><td>5.60865</td><td>13.3338</td><td>159.406</td><td>0</td><td>Float64</td></tr><tr><th>2</th><td>characteristic_length</td><td>79.5481</td><td>0.158521</td><td>1.55816</td><td>1582.23</td><td>0</td><td>Float64</td></tr><tr><th>3</th><td>sbx</td><td>1.40222</td><td>0.0417367</td><td>0.967078</td><td>10.0663</td><td>0</td><td>Float64</td></tr><tr><th>4</th><td>sby</td><td>3.3367</td><td>0.0125824</td><td>2.68461</td><td>9.68361</td><td>0</td><td>Float64</td></tr><tr><th>5</th><td>sbz</td><td>3.91184</td><td>0.29006</td><td>1.8185</td><td>14.7434</td><td>0</td><td>Float64</td></tr><tr><th>6</th><td>Ix</td><td>1.58725</td><td>0.0311782</td><td>0.23401</td><td>11.1335</td><td>0</td><td>Float64</td></tr><tr><th>7</th><td>Iy</td><td>3.74345</td><td>0.178598</td><td>1.01592</td><td>24.6735</td><td>0</td><td>Float64</td></tr><tr><th>8</th><td>Iz</td><td>5.20207</td><td>0.178686</td><td>1.742</td><td>32.0083</td><td>0</td><td>Float64</td></tr></tbody></table></div>"
],
"text/latex": [
"\\begin{tabular}{r|ccccccc}\n",
"\t& variable & mean & min & median & max & nmissing & eltype\\\\\n",
"\t\\hline\n",
"\t& Symbol & Float64 & Float64 & Float64 & Float64 & Int64 & DataType\\\\\n",
"\t\\hline\n",
"\t1 & surface\\_area & 25.2002 & 5.60865 & 13.3338 & 159.406 & 0 & Float64 \\\\\n",
"\t2 & characteristic\\_length & 79.5481 & 0.158521 & 1.55816 & 1582.23 & 0 & Float64 \\\\\n",
"\t3 & sbx & 1.40222 & 0.0417367 & 0.967078 & 10.0663 & 0 & Float64 \\\\\n",
"\t4 & sby & 3.3367 & 0.0125824 & 2.68461 & 9.68361 & 0 & Float64 \\\\\n",
"\t5 & sbz & 3.91184 & 0.29006 & 1.8185 & 14.7434 & 0 & Float64 \\\\\n",
"\t6 & Ix & 1.58725 & 0.0311782 & 0.23401 & 11.1335 & 0 & Float64 \\\\\n",
"\t7 & Iy & 3.74345 & 0.178598 & 1.01592 & 24.6735 & 0 & Float64 \\\\\n",
"\t8 & Iz & 5.20207 & 0.178686 & 1.742 & 32.0083 & 0 & Float64 \\\\\n",
"\\end{tabular}\n"
],
"text/plain": [
"\u001b[1m8×7 DataFrame\u001b[0m\n",
"\u001b[1m Row \u001b[0m│\u001b[1m variable \u001b[0m\u001b[1m mean \u001b[0m\u001b[1m min \u001b[0m\u001b[1m median \u001b[0m\u001b[1m max \u001b[0m\u001b[1m nmis\u001b[0m ⋯\n",
"\u001b[1m \u001b[0m│\u001b[90m Symbol \u001b[0m\u001b[90m Float64 \u001b[0m\u001b[90m Float64 \u001b[0m\u001b[90m Float64 \u001b[0m\u001b[90m Float64 \u001b[0m\u001b[90m Int6\u001b[0m ⋯\n",
"─────┼──────────────────────────────────────────────────────────────────────────\n",
" 1 │ surface_area 25.2002 5.60865 13.3338 159.406 ⋯\n",
" 2 │ characteristic_length 79.5481 0.158521 1.55816 1582.23\n",
" 3 │ sbx 1.40222 0.0417367 0.967078 10.0663\n",
" 4 │ sby 3.3367 0.0125824 2.68461 9.68361\n",
" 5 │ sbz 3.91184 0.29006 1.8185 14.7434 ⋯\n",
" 6 │ Ix 1.58725 0.0311782 0.23401 11.1335\n",
" 7 │ Iy 3.74345 0.178598 1.01592 24.6735\n",
" 8 │ Iz 5.20207 0.178686 1.742 32.0083\n",
"\u001b[36m 2 columns omitted\u001b[0m"
]
},
"execution_count": 16,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"describe(df)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Julia 1.8.0-beta3",
"language": "julia",
"name": "julia-1.8"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

Binary file not shown.

View File

@ -0,0 +1 @@
{"rule":"UPPERCASE_SENTENCE_START","sentence":"^\\Qmodels that are in the \\E(?:Dummy|Ina|Jimmy-)[0-9]+\\Q format.\\E$"}

File diff suppressed because one or more lines are too long

View File

@ -1,296 +0,0 @@
This is XeTeX, Version 3.141592653-2.6-0.999993 (MiKTeX 21.8) (preloaded format=xelatex 2021.10.31) 23 APR 2022 00:47
entering extended mode
**./report.tex
(report.tex
LaTeX2e <2021-06-01> patch level 1
L3 programming layer <2021-07-12> (C:\Users\albig\AppData\Roaming\MiKTeX\tex/la
tex/koma-script\scrartcl.cls
Document Class: scrartcl 2021/11/13 v3.35 KOMA-Script document class (article)
(C:\Users\albig\AppData\Roaming\MiKTeX\tex/latex/koma-script\scrkbase.sty
Package: scrkbase 2021/11/13 v3.35 KOMA-Script package (KOMA-Script-dependent b
asics and keyval usage)
(C:\Users\albig\AppData\Roaming\MiKTeX\tex/latex/koma-script\scrbase.sty
Package: scrbase 2021/11/13 v3.35 KOMA-Script package (KOMA-Script-independent
basics and keyval usage)
(C:\Users\albig\AppData\Roaming\MiKTeX\tex/latex/koma-script\scrlfile.sty
Package: scrlfile 2021/11/13 v3.35 KOMA-Script package (file load hooks)
(C:\Users\albig\AppData\Roaming\MiKTeX\tex/latex/koma-script\scrlfile-hook-3.34
.sty
Package: scrlfile-hook-3.34 2021/11/13 v3.35 KOMA-Script package (using LaTeX h
ooks)
(C:\Users\albig\AppData\Roaming\MiKTeX\tex/latex/koma-script\scrlogo.sty
Package: scrlogo 2021/11/13 v3.35 KOMA-Script package (logo)
))) (C:\Program Files\MiKTeX\tex/latex/graphics\keyval.sty
Package: keyval 2014/10/28 v1.15 key=value parser (DPC)
\KV@toks@=\toks16
)
Applying: [2021/05/01] Usage of raw or classic option list on input line 252.
Already applied: [0000/00/00] Usage of raw or classic option list on input line
368.
)) (C:\Users\albig\AppData\Roaming\MiKTeX\tex/latex/koma-script\tocbasic.sty
Package: tocbasic 2021/11/13 v3.35 KOMA-Script package (handling toc-files)
\scr@dte@tocline@numberwidth=\skip47
\scr@dte@tocline@numbox=\box50
)
Package tocbasic Info: babel extension for `toc' omitted
(tocbasic) because of missing \bbl@set@language on input line 135.
Class scrartcl Info: You've used standard option `oneside'.
(scrartcl) This is correct!
(scrartcl) Internally I'm using `twoside=false'.
(scrartcl) If you'd like to set the option with \KOMAoptions,
(scrartcl) you'd have to use `twoside=false' there
(scrartcl) instead of `oneside', too.
Class scrartcl Info: File `scrsize11pt.clo' used instead of
(scrartcl) file `scrsize11.clo' to setup font sizes on input line 224
2.
(C:\Users\albig\AppData\Roaming\MiKTeX\tex/latex/koma-script\scrsize11pt.clo
File: scrsize11pt.clo 2021/11/13 v3.35 KOMA-Script font size class option (11pt
)
) (C:\Users\albig\AppData\Roaming\MiKTeX\tex/latex/koma-script\typearea.sty
Package: typearea 2021/11/13 v3.35 KOMA-Script package (type area)
\ta@bcor=\skip48
\ta@div=\count178
Package typearea Info: You've used standard option `letterpaper'.
(typearea) This is correct!
(typearea) Internally I'm using `paper=letter'.
(typearea) If you'd like to set the option with \KOMAoptions,
(typearea) you'd have to use `paper=letter' there
(typearea) instead of `letterpaper', too.
Package typearea Info: You've used standard option `oneside'.
(typearea) This is correct!
(typearea) Internally I'm using `twoside=false'.
(typearea) If you'd like to set the option with \KOMAoptions,
(typearea) you'd have to use `twoside=false' there
(typearea) instead of `oneside', too.
\ta@hblk=\skip49
\ta@vblk=\skip50
\ta@temp=\skip51
\footheight=\skip52
Package typearea Info: These are the values describing the layout:
(typearea) DIV = 11
(typearea) BCOR = 0.0pt
(typearea) \paperwidth = 614.295pt
(typearea) \textwidth = 446.76004pt
(typearea) DIV departure = -14%
(typearea) \evensidemargin = 11.49748pt
(typearea) \oddsidemargin = 11.49748pt
(typearea) \paperheight = 794.96999pt
(typearea) \textheight = 582.20026pt
(typearea) \topmargin = -37.40001pt
(typearea) \headheight = 17.0pt
(typearea) \headsep = 20.40001pt
(typearea) \topskip = 11.0pt
(typearea) \footskip = 47.6pt
(typearea) \baselineskip = 13.6pt
(typearea) on input line 1743.
)
\c@part=\count179
\c@section=\count180
\c@subsection=\count181
\c@subsubsection=\count182
\c@paragraph=\count183
\c@subparagraph=\count184
\scr@dte@section@maxnumwidth=\skip53
Class scrartcl Info: using compatibility default `runin=bysign'
(scrartcl) for `\section on input line 4852.
Class scrartcl Info: using compatibility default `afterindent=bysign'
(scrartcl) for `\section on input line 4852.
\scr@dte@part@maxnumwidth=\skip54
Class scrartcl Info: using compatibility default `afterindent=false'
(scrartcl) for `\part on input line 4860.
\scr@dte@subsection@maxnumwidth=\skip55
Class scrartcl Info: using compatibility default `runin=bysign'
(scrartcl) for `\subsection on input line 4870.
Class scrartcl Info: using compatibility default `afterindent=bysign'
(scrartcl) for `\subsection on input line 4870.
\scr@dte@subsubsection@maxnumwidth=\skip56
Class scrartcl Info: using compatibility default `runin=bysign'
(scrartcl) for `\subsubsection on input line 4880.
Class scrartcl Info: using compatibility default `afterindent=bysign'
(scrartcl) for `\subsubsection on input line 4880.
\scr@dte@paragraph@maxnumwidth=\skip57
Class scrartcl Info: using compatibility default `runin=bysign'
(scrartcl) for `\paragraph on input line 4891.
Class scrartcl Info: using compatibility default `afterindent=bysign'
(scrartcl) for `\paragraph on input line 4891.
\scr@dte@subparagraph@maxnumwidth=\skip58
Class scrartcl Info: using compatibility default `runin=bysign'
(scrartcl) for `\subparagraph on input line 4901.
Class scrartcl Info: using compatibility default `afterindent=bysign'
(scrartcl) for `\subparagraph on input line 4901.
\abovecaptionskip=\skip59
\belowcaptionskip=\skip60
\c@pti@nb@sid@b@x=\box51
Package tocbasic Info: babel extension for `lof' omitted
(tocbasic) because of missing \bbl@set@language on input line 6076.
\scr@dte@figure@maxnumwidth=\skip61
\c@figure=\count185
Package tocbasic Info: babel extension for `lot' omitted
(tocbasic) because of missing \bbl@set@language on input line 6091.
\scr@dte@table@maxnumwidth=\skip62
\c@table=\count186
Class scrartcl Info: Redefining `\numberline' on input line 6258.
\bibindent=\dimen138
) (C:\Program Files\MiKTeX\tex/latex/amsmath\amsmath.sty
Package: amsmath 2021/04/20 v2.17j AMS math features
\@mathmargin=\skip63
For additional information on amsmath, use the `?' option.
(C:\Program Files\MiKTeX\tex/latex/amsmath\amstext.sty
Package: amstext 2000/06/29 v2.01 AMS text
(C:\Program Files\MiKTeX\tex/latex/amsmath\amsgen.sty
File: amsgen.sty 1999/11/30 v2.0 generic functions
\@emptytoks=\toks17
\ex@=\dimen139
)) (C:\Program Files\MiKTeX\tex/latex/amsmath\amsbsy.sty
Package: amsbsy 1999/11/29 v1.2d Bold Symbols
\pmbraise@=\dimen140
) (C:\Program Files\MiKTeX\tex/latex/amsmath\amsopn.sty
Package: amsopn 2016/03/08 v2.02 operator names
)
\inf@bad=\count187
LaTeX Info: Redefining \frac on input line 234.
\uproot@=\count188
\leftroot@=\count189
LaTeX Info: Redefining \overline on input line 399.
\classnum@=\count190
\DOTSCASE@=\count191
LaTeX Info: Redefining \ldots on input line 496.
LaTeX Info: Redefining \dots on input line 499.
LaTeX Info: Redefining \cdots on input line 620.
\Mathstrutbox@=\box52
\strutbox@=\box53
\big@size=\dimen141
LaTeX Font Info: Redeclaring font encoding OML on input line 743.
LaTeX Font Info: Redeclaring font encoding OMS on input line 744.
\macc@depth=\count192
\c@MaxMatrixCols=\count193
\dotsspace@=\muskip16
\c@parentequation=\count194
\dspbrk@lvl=\count195
\tag@help=\toks18
\row@=\count196
\column@=\count197
\maxfields@=\count198
\andhelp@=\toks19
\eqnshift@=\dimen142
\alignsep@=\dimen143
\tagshift@=\dimen144
\tagwidth@=\dimen145
\totwidth@=\dimen146
\lineht@=\dimen147
\@envbody=\toks20
\multlinegap=\skip64
\multlinetaggap=\skip65
\mathdisplay@stack=\toks21
LaTeX Info: Redefining \[ on input line 2923.
LaTeX Info: Redefining \] on input line 2924.
) (C:\Program Files\MiKTeX\tex/latex/amsfonts\amssymb.sty
Package: amssymb 2013/01/14 v3.01 AMS font symbols
(C:\Program Files\MiKTeX\tex/latex/amsfonts\amsfonts.sty
Package: amsfonts 2013/01/14 v3.01 Basic AMSFonts support
\symAMSa=\mathgroup4
\symAMSb=\mathgroup5
LaTeX Font Info: Redeclaring math symbol \hbar on input line 98.
LaTeX Font Info: Overwriting math alphabet `\mathfrak' in version `bold'
(Font) U/euf/m/n --> U/euf/b/n on input line 106.
)) (C:\Program Files\MiKTeX\tex/latex/lm\lmodern.sty
Package: lmodern 2009/10/30 v1.6 Latin Modern Fonts
LaTeX Font Info: Overwriting symbol font `operators' in version `normal'
(Font) OT1/cmr/m/n --> OT1/lmr/m/n on input line 22.
LaTeX Font Info: Overwriting symbol font `letters' in version `normal'
(Font) OML/cmm/m/it --> OML/lmm/m/it on input line 23.
LaTeX Font Info: Overwriting symbol font `symbols' in version `normal'
(Font) OMS/cmsy/m/n --> OMS/lmsy/m/n on input line 24.
LaTeX Font Info: Overwriting symbol font `largesymbols' in version `normal'
(Font) OMX/cmex/m/n --> OMX/lmex/m/n on input line 25.
LaTeX Font Info: Overwriting symbol font `operators' in version `bold'
(Font) OT1/cmr/bx/n --> OT1/lmr/bx/n on input line 26.
LaTeX Font Info: Overwriting symbol font `letters' in version `bold'
(Font) OML/cmm/b/it --> OML/lmm/b/it on input line 27.
LaTeX Font Info: Overwriting symbol font `symbols' in version `bold'
(Font) OMS/cmsy/b/n --> OMS/lmsy/b/n on input line 28.
LaTeX Font Info: Overwriting symbol font `largesymbols' in version `bold'
(Font) OMX/cmex/m/n --> OMX/lmex/m/n on input line 29.
LaTeX Font Info: Overwriting math alphabet `\mathbf' in version `normal'
(Font) OT1/cmr/bx/n --> OT1/lmr/bx/n on input line 31.
LaTeX Font Info: Overwriting math alphabet `\mathsf' in version `normal'
(Font) OT1/cmss/m/n --> OT1/lmss/m/n on input line 32.
LaTeX Font Info: Overwriting math alphabet `\mathit' in version `normal'
(Font) OT1/cmr/m/it --> OT1/lmr/m/it on input line 33.
LaTeX Font Info: Overwriting math alphabet `\mathtt' in version `normal'
(Font) OT1/cmtt/m/n --> OT1/lmtt/m/n on input line 34.
LaTeX Font Info: Overwriting math alphabet `\mathbf' in version `bold'
(Font) OT1/cmr/bx/n --> OT1/lmr/bx/n on input line 35.
LaTeX Font Info: Overwriting math alphabet `\mathsf' in version `bold'
(Font) OT1/cmss/bx/n --> OT1/lmss/bx/n on input line 36.
LaTeX Font Info: Overwriting math alphabet `\mathit' in version `bold'
(Font) OT1/cmr/bx/it --> OT1/lmr/bx/it on input line 37.
LaTeX Font Info: Overwriting math alphabet `\mathtt' in version `bold'
(Font) OT1/cmtt/m/n --> OT1/lmtt/m/n on input line 38.
) (C:\Program Files\MiKTeX\tex/generic/iftex\iftex.sty
Package: iftex 2020/03/06 v1.0d TeX engine tests
) (C:\Users\albig\AppData\Roaming\MiKTeX\tex/latex/unicode-math\unicode-math.st
y (C:\Program Files\MiKTeX\tex/latex/l3kernel\expl3.sty
Package: expl3 2021-07-12 L3 programming layer (loader)
(C:\Program Files\MiKTeX\tex/latex/l3backend\l3backend-xetex.def
File: l3backend-xetex.def 2021-08-04 L3 backend support: XeTeX
()
\c__kernel_sys_dvipdfmx_version_int=\count199
\l__color_backend_stack_int=\count266
\g__color_backend_stack_int=\count267
\g__graphics_track_int=\count268
\l__pdf_internal_box=\box54
\g__pdf_backend_object_int=\count269
\g__pdf_backend_annotation_int=\count270
\g__pdf_backend_link_int=\count271
))
Package: unicode-math 2020/01/31 v0.8q Unicode maths in XeLaTeX and LuaLaTeX
(C:\Users\albig\AppData\Roaming\MiKTeX\tex/latex/unicode-math\unicode-math-xete
x.sty
Package: unicode-math-xetex 2020/01/31 v0.8q Unicode maths in XeLaTeX and LuaLa
TeX
(C:\Program Files\MiKTeX\tex/latex/l3packages/xparse\xparse.sty
Package: xparse 2021-08-04 L3 Experimental document command parser
) (C:\Program Files\MiKTeX\tex/latex/l3packages/l3keys2e\l3keys2e.sty
Package: l3keys2e 2021-08-04 LaTeX2e option processing using LaTeX3 keys
) (C:\Program Files\MiKTeX\tex/latex/fontspec\fontspec.sty
Package: fontspec 2020/02/21 v2.7i Font selection for XeLaTeX and LuaLaTeX
(C:\Program Files\MiKTeX\tex/latex/fontspec\fontspec-xetex.sty
Package: fontspec-xetex 2020/02/21 v2.7i Font selection for XeLaTeX and LuaLaTe
X
\l__fontspec_script_int=\count272
\l__fontspec_language_int=\count273
\l__fontspec_strnum_int=\count274
\l__fontspec_tmp_int=\count275
\l__fontspec_tmpa_int=\count276
\l__fontspec_tmpb_int=\count277
\l__fontspec_tmpc_int=\count278
\l__fontspec_em_int=\count279
\l__fontspec_emdef_int=\count280
\l__fontspec_strong_int=\count281
\l__fontspec_strongdef_int=\count282
\l__fontspec_tmpa_dim=\dimen148
\l__fontspec_tmpb_dim=\dimen149
\l__fontspec_tmpc_dim=\dimen150
(C:\Program Files\MiKTeX\tex/latex/base\fontenc.sty
Package: fontenc 2021/04/29 v2.0v Standard LaTeX package
)
! Interruption.
\__keyval_trim:nN #1->\__keyval_trim_auxi:w #1
\s__keyval_nil \s__keyval_mark...
l.3532 \newfontlanguage{Moksha}{MOK}
Here is how much of TeX's memory you used:
6387 strings out of 411276
157379 string characters out of 2833273
600564 words of memory out of 3000000
26830 multiletter control sequences out of 15000+600000
403438 words of font info for 28 fonts, out of 8000000 for 9000
1348 hyphenation exceptions out of 8191
108i,1n,108p,10631b,270s stack positions out of 5000i,500n,10000p,200000b,80000s
No pages of output.

View File

@ -18,8 +18,54 @@ execute:
cache: true cache: true
--- ---
## Introduction
Orbital debris is a form of pollution that is growing at an exponential pace and puts current and
future space infrastructure at risk. Satellites are critical to military, commercial, and civil
operations. Unfortunately, the space that debris occupies is increasingly becoming more crowded and
dangerous, potentially leading to a cascade event that could turn orbit around the Earth into an
unusable wasteland for decades unless proper mitigation is not introduced. Existing models employed
by NASA rely on a dataset created from 2D images and are missing many crucial features required for
correctly modeling the space debris environment. This approach aims to use high-resolution 3D
scanning to fully capture the geometry of a piece of debris and allow a more advanced analysis of
each piece. Coupled with machine learning methods, the scans will allow advances to the current
cutting edge. Physical and photograph-based measurements are time-consuming, hard to replicate, and
lack precision. 3D scanning allows much more advanced and accurate analysis of each debris sample,
focusing on properties such as moment of inertia, cross-section, and drag. Once the characteristics
of space debris are more thoroughly understood, we can begin mitigating the creation and danger of
future space debris by implementing improved satellite construction methods and more advanced debris
avoidance measures.
### Current Progress
This project aims to fix very difficult issues, and although great progress has been made there is
still plenty of work to be done. Currently, algorithms have been made that are capable of getting
many key features from solid ^[A mesh with a surface that is fully closed and has no holes in its
geometry.] models that are in the `stl` format. The algorithm for processing the 3D meshes is
implemented in the Julia programming language. Syntactically the language is very similar to Python
and Matlab. Julia was chosen because it is nearly as performant as compiled languages like C, while
still having tooling geared towards engineers and scientists. The code produces a struct with all
the calculated properties as follows:
```julia
struct Properties
# Volume of the mesh
volume::Float64
# Center of gravity, meshes are not always center at [0,0,0]
center_of_gravity::Vector{Float64}
# Moment of inertia tensor
inertia::Matrix{Float64}
# Surface area of mesh
surface_area::Float64
# Average orthogonal dimension of the mesh
characteristic_length::Float64
# Projected length of farthest two points in [x,y,z] directions
solidbody_values::Vector{Float64}
end
```
```{julia} ```{julia}
#| code-fold: true #| echo: false
#| output: false #| output: false
using FileIO using FileIO
@ -29,8 +75,11 @@ using stlProcess
using CSV using CSV
using DataFrames using DataFrames
using Plots
theme(:ggplot2)
using LinearAlgebra using LinearAlgebra
using Statistics
``` ```
```{julia} ```{julia}
@ -40,7 +89,8 @@ using LinearAlgebra
# local path to https://gitlab.com/orbital-debris-research/fake-satellite-dataset # local path to https://gitlab.com/orbital-debris-research/fake-satellite-dataset
dataset_path = raw"C:\Coding\fake-satellite-dataset" dataset_path = raw"C:\Coding\fake-satellite-dataset"
folders = ["1_5U", "assembly1", "cubesat"] # folders = ["1_5U", "assembly1", "cubesat"]
folders = ["cubesat"]
df = DataFrame(; df = DataFrame(;
surface_area=Float64[], surface_area=Float64[],
@ -55,8 +105,9 @@ df = DataFrame(;
``` ```
```{julia} ```{julia}
#| output: false
for path in dataset_path * "\\" .* folders for path in dataset_path * "\\" .* folders
println("Processing Path: ", path)
Threads.@threads for file in readdir(path) Threads.@threads for file in readdir(path)
stl = load(path * "\\" * file) stl = load(path * "\\" * file)
scale = find_scale(stl) scale = find_scale(stl)
@ -64,21 +115,44 @@ for path in dataset_path * "\\" .* folders
eigs = eigvals(props.inertia) eigs = eigvals(props.inertia)
sort_index = sortperm(eigs) sort_index = sortperm(eigs)
Ix, Iy, Iz = eigs[sort_index] I1, I2, I3 = eigs[sort_index]
sbx, sby, sbz = props.sb_values[sort_index] sbx, sby, sbz = props.sb_values[sort_index]
push!( push!(
df, df,
[props.surface_area, props.characteristic_length, sbx, sby, sbz, Ix, Iy, Iz], [props.surface_area, props.characteristic_length, sbx, sby, sbz, I3, I2, I1],
) )
end end
end end
``` ```
:::{.column-body-outset}
```{julia} ```{julia}
#| echo: false
describe(df) describe(df)
``` ```
:::
```{julia}
S = cov(Matrix(df))
eig_vals = eigvals(S);
# sorting eigenvalues from largest to smallest
sort_index = sortperm(eig_vals; rev=true)
lambda = eig_vals[sort_index]
names_sorted = names(df)[sort_index]
lambda_ratio = cumsum(lambda) ./ sum(lambda)
plot(lambda_ratio, marker=:x)
xticks!(sort_index,names(df), xrotation = 15)
```
## Gathering Data ## Gathering Data
To get started on the project before any scans of the actual debris are made available, I opted to To get started on the project before any scans of the actual debris are made available, I opted to
@ -165,7 +239,7 @@ eig_vals = eig(S);
lambda_ratio = cumsum(lambda) ./ sum(lambda) lambda_ratio = cumsum(lambda) ./ sum(lambda)
``` ```
Then plotting `lambda_ratio`, which is the `cumsum`/`sum` produces the following plot: Then plotting `lambda_ratio`, which is the `cumsum ./ sum` produces the following plot:
![PCA Plot](Figures/pca.png) ![PCA Plot](Figures/pca.png)

View File

@ -1,460 +0,0 @@
% Options for packages loaded elsewhere
\PassOptionsToPackage{unicode}{hyperref}
\PassOptionsToPackage{hyphens}{url}
\PassOptionsToPackage{dvipsnames,svgnames,x11names}{xcolor}
%
\documentclass[
letterpaper,
DIV=11,
numbers=noendperiod,
oneside]{scrartcl}
\usepackage{amsmath,amssymb}
\usepackage{lmodern}
\usepackage{iftex}
\ifPDFTeX
\usepackage[T1]{fontenc}
\usepackage[utf8]{inputenc}
\usepackage{textcomp} % provide euro and other symbols
\else % if luatex or xetex
\usepackage{unicode-math}
\defaultfontfeatures{Scale=MatchLowercase}
\defaultfontfeatures[\rmfamily]{Ligatures=TeX,Scale=1}
\fi
% Use upquote if available, for straight quotes in verbatim environments
\IfFileExists{upquote.sty}{\usepackage{upquote}}{}
\IfFileExists{microtype.sty}{% use microtype if available
\usepackage[]{microtype}
\UseMicrotypeSet[protrusion]{basicmath} % disable protrusion for tt fonts
}{}
\makeatletter
\@ifundefined{KOMAClassName}{% if non-KOMA class
\IfFileExists{parskip.sty}{%
\usepackage{parskip}
}{% else
\setlength{\parindent}{0pt}
\setlength{\parskip}{6pt plus 2pt minus 1pt}}
}{% if KOMA class
\KOMAoptions{parskip=half}}
\makeatother
\usepackage{xcolor}
\IfFileExists{xurl.sty}{\usepackage{xurl}}{} % add URL line breaks if available
\IfFileExists{bookmark.sty}{\usepackage{bookmark}}{\usepackage{hyperref}}
\hypersetup{
pdftitle={Characterization of Space Debris using Machine Learning Methods},
pdfauthor={Anson Biggs},
colorlinks=true,
linkcolor={blue},
filecolor={Maroon},
citecolor={Blue},
urlcolor={Blue},
pdfcreator={LaTeX via pandoc}}
\urlstyle{same} % disable monospaced font for URLs
\usepackage[left=1in,marginparwidth=2.0666666666667in,textwidth=4.1333333333333in,marginparsep=0.3in]{geometry}
\setlength{\emergencystretch}{3em} % prevent overfull lines
\setcounter{secnumdepth}{-\maxdimen} % remove section numbering
% Make \paragraph and \subparagraph free-standing
\ifx\paragraph\undefined\else
\let\oldparagraph\paragraph
\renewcommand{\paragraph}[1]{\oldparagraph{#1}\mbox{}}
\fi
\ifx\subparagraph\undefined\else
\let\oldsubparagraph\subparagraph
\renewcommand{\subparagraph}[1]{\oldsubparagraph{#1}\mbox{}}
\fi
\usepackage{color}
\usepackage{fancyvrb}
\newcommand{\VerbBar}{|}
\newcommand{\VERB}{\Verb[commandchars=\\\{\}]}
\DefineVerbatimEnvironment{Highlighting}{Verbatim}{commandchars=\\\{\}}
% Add ',fontsize=\small' for more characters per line
\usepackage{framed}
\definecolor{shadecolor}{RGB}{241,243,245}
\newenvironment{Shaded}{\begin{snugshade}}{\end{snugshade}}
\newcommand{\AlertTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
\newcommand{\AnnotationTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{#1}}
\newcommand{\AttributeTok}[1]{\textcolor[rgb]{0.40,0.45,0.13}{#1}}
\newcommand{\BaseNTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
\newcommand{\BuiltInTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
\newcommand{\CharTok}[1]{\textcolor[rgb]{0.13,0.47,0.30}{#1}}
\newcommand{\CommentTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{#1}}
\newcommand{\CommentVarTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{\textit{#1}}}
\newcommand{\ConstantTok}[1]{\textcolor[rgb]{0.56,0.35,0.01}{#1}}
\newcommand{\ControlFlowTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
\newcommand{\DataTypeTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
\newcommand{\DecValTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
\newcommand{\DocumentationTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{\textit{#1}}}
\newcommand{\ErrorTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
\newcommand{\ExtensionTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
\newcommand{\FloatTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
\newcommand{\FunctionTok}[1]{\textcolor[rgb]{0.28,0.35,0.67}{#1}}
\newcommand{\ImportTok}[1]{\textcolor[rgb]{0.00,0.46,0.62}{#1}}
\newcommand{\InformationTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{#1}}
\newcommand{\KeywordTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
\newcommand{\NormalTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
\newcommand{\OperatorTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{#1}}
\newcommand{\OtherTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
\newcommand{\PreprocessorTok}[1]{\textcolor[rgb]{0.68,0.00,0.00}{#1}}
\newcommand{\RegionMarkerTok}[1]{\textcolor[rgb]{0.00,0.23,0.31}{#1}}
\newcommand{\SpecialCharTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{#1}}
\newcommand{\SpecialStringTok}[1]{\textcolor[rgb]{0.13,0.47,0.30}{#1}}
\newcommand{\StringTok}[1]{\textcolor[rgb]{0.13,0.47,0.30}{#1}}
\newcommand{\VariableTok}[1]{\textcolor[rgb]{0.07,0.07,0.07}{#1}}
\newcommand{\VerbatimStringTok}[1]{\textcolor[rgb]{0.13,0.47,0.30}{#1}}
\newcommand{\WarningTok}[1]{\textcolor[rgb]{0.37,0.37,0.37}{\textit{#1}}}
\providecommand{\tightlist}{%
\setlength{\itemsep}{0pt}\setlength{\parskip}{0pt}}\usepackage{longtable,booktabs,array}
\usepackage{calc} % for calculating minipage widths
% Correct order of tables after \paragraph or \subparagraph
\usepackage{etoolbox}
\makeatletter
\patchcmd\longtable{\par}{\if@noskipsec\mbox{}\fi\par}{}{}
\makeatother
% Allow footnotes in longtable head/foot
\IfFileExists{footnotehyper.sty}{\usepackage{footnotehyper}}{\usepackage{footnote}}
\makesavenoteenv{longtable}
\usepackage{graphicx}
\makeatletter
\def\maxwidth{\ifdim\Gin@nat@width>\linewidth\linewidth\else\Gin@nat@width\fi}
\def\maxheight{\ifdim\Gin@nat@height>\textheight\textheight\else\Gin@nat@height\fi}
\makeatother
% Scale images if necessary, so that they will not overflow the page
% margins by default, and it is still possible to overwrite the defaults
% using explicit options in \includegraphics[width, height, ...]{}
\setkeys{Gin}{width=\maxwidth,height=\maxheight,keepaspectratio}
% Set default figure placement to htbp
\makeatletter
\def\fps@figure{htbp}
\makeatother
\newlength{\cslhangindent}
\setlength{\cslhangindent}{1.5em}
\newlength{\csllabelwidth}
\setlength{\csllabelwidth}{3em}
\newlength{\cslentryspacingunit} % times entry-spacing
\setlength{\cslentryspacingunit}{\parskip}
\newenvironment{CSLReferences}[2] % #1 hanging-ident, #2 entry spacing
{% don't indent paragraphs
\setlength{\parindent}{0pt}
% turn on hanging indent if param 1 is 1
\ifodd #1
\let\oldpar\par
\def\par{\hangindent=\cslhangindent\oldpar}
\fi
% set entry spacing
\setlength{\parskip}{#2\cslentryspacingunit}
}%
{}
\usepackage{calc}
\newcommand{\CSLBlock}[1]{#1\hfill\break}
\newcommand{\CSLLeftMargin}[1]{\parbox[t]{\csllabelwidth}{#1}}
\newcommand{\CSLRightInline}[1]{\parbox[t]{\linewidth - \csllabelwidth}{#1}\break}
\newcommand{\CSLIndent}[1]{\hspace{\cslhangindent}#1}
\KOMAoption{captions}{tableheading}
\makeatletter
\makeatother
\makeatletter
\@ifpackageloaded{caption}{}{\usepackage{caption}}
\AtBeginDocument{%
\ifdefined\contentsname
\renewcommand*\contentsname{Table of contents}
\else
\newcommand\contentsname{Table of contents}
\fi
\ifdefined\listfigurename
\renewcommand*\listfigurename{List of Figures}
\else
\newcommand\listfigurename{List of Figures}
\fi
\ifdefined\listtablename
\renewcommand*\listtablename{List of Tables}
\else
\newcommand\listtablename{List of Tables}
\fi
\ifdefined\figurename
\renewcommand*\figurename{Figure}
\else
\newcommand\figurename{Figure}
\fi
\ifdefined\tablename
\renewcommand*\tablename{Table}
\else
\newcommand\tablename{Table}
\fi
}
\@ifpackageloaded{float}{}{\usepackage{float}}
\floatstyle{ruled}
\@ifundefined{c@chapter}{\newfloat{codelisting}{h}{lop}}{\newfloat{codelisting}{h}{lop}[chapter]}
\floatname{codelisting}{Listing}
\newcommand*\listoflistings{\listof{codelisting}{List of Listings}}
\makeatother
\makeatletter
\@ifpackageloaded{caption}{}{\usepackage{caption}}
\@ifpackageloaded{subcaption}{}{\usepackage{subcaption}}
\makeatother
\makeatletter
\@ifpackageloaded{tcolorbox}{}{\usepackage[many]{tcolorbox}}
\makeatother
\makeatletter
\@ifundefined{shadecolor}{\definecolor{shadecolor}{rgb}{.97, .97, .97}}
\makeatother
\makeatletter
\@ifpackageloaded{sidenotes}{}{\usepackage{sidenotes}}
\@ifpackageloaded{marginnote}{}{\usepackage{marginnote}}
\makeatother
\makeatletter
\makeatother
\ifLuaTeX
\usepackage{selnolig} % disable illegal ligatures
\fi
\title{Characterization of Space Debris using Machine Learning Methods}
\usepackage{etoolbox}
\makeatletter
\providecommand{\subtitle}[1]{% add subtitle to \maketitle
\apptocmd{\@title}{\par {\large #1 \par}}{}{}
}
\makeatother
\subtitle{Advanced processing of 3D meshes using Julia, and data science
in Matlab.}
\author{Anson Biggs}
\date{4/30/2022}
\begin{document}
\maketitle
\ifdefined\Shaded\renewenvironment{Shaded}{\begin{tcolorbox}[interior hidden, borderline west={3pt}{0pt}{shadecolor}, boxrule=0pt, enhanced, breakable, sharp corners, frame hidden]}{\end{tcolorbox}}\fi
\hypertarget{gathering-data}{%
\subsection{Gathering Data}\label{gathering-data}}
To get started on the project before any scans of the actual debris are
made available, I opted to find 3D models online and process them as if
they were data collected by my team. GrabCAD is an excellent source of
high-quality 3D models, and all the models have, at worst, a
non-commercial license making them suitable for this study. The current
dataset uses three separate satellite assemblies found on GrabCAD, below
is an example of one of the satellites that was used.
\begin{figure}
{\centering \includegraphics{Figures/assembly.jpg}
}
\caption{Example CubeSat Used for Analysis}
\end{figure}
\hypertarget{data-preparation}{%
\subsection{Data Preparation}\label{data-preparation}}
The models were processed in Blender, which quickly converted the
assemblies to \texttt{stl} files, giving 108 unique parts to be
processed. Since the expected final size of the dataset is expected to
be in the magnitude of the thousands, an algorithm capable of getting
the required properties of each part is the only feasible solution. From
the analysis performed in
\href{https://gitlab.com/orbital-debris-research/directed-study/report-1/-/blob/main/README.md}{Report
1}, we know that the essential debris property is the moments of inertia
which helped narrow down potential algorithms. Unfortunately, this is
one of the more complicated things to calculate from a mesh, but thanks
to a paper from (Eberly
2002)\marginpar{\begin{footnotesize}\leavevmode\vadjust pre{\protect\hypertarget{ref-eberlyPolyhedralMassProperties2002}{}}%
Eberly, David. 2002. {``Polyhedral {Mass Properties} ({Revisited}).''}
\url{https://www.geometrictools.com/Documentation/PolyhedralMassProperties.pdf}.\vspace{2mm}\par\end{footnotesize}}
titled
\href{https://www.geometrictools.com/Documentation/PolyhedralMassProperties.pdf}{Polyhedral
Mass Properties}, his algorithm was implemented in the Julia programming
language. The current implementation of the algorithm calculates a
moment of inertia tensor, volume, center of gravity, characteristic
length, and surface body dimensions in a few milliseconds per part. The
library can be found
\href{https://gitlab.com/MisterBiggs/stl-process}{here.} The
characteristic length is a value that is heavily used by the NASA
DebriSat project (Murray et al.
2019)\marginpar{\begin{footnotesize}\leavevmode\vadjust pre{\protect\hypertarget{ref-DebriSat2019}{}}%
Murray, James, Heather Cowardin, J-C Liou, Marlon Sorge, Norman
Fitz-Coy, and Tom Huynh. 2019. {``Analysis of the DebriSat Fragments and
Comparison to the NASA Standard Satellite Breakup Model.''} In
\emph{International Orbital Debris Conference (IOC)}. JSC-E-DAA-TN73918.
\url{https://ntrs.nasa.gov/citations/20190034081}.\vspace{2mm}\par\end{footnotesize}}
that is doing very similar work to this project. The characteristic
length takes the maximum orthogonal dimension of a body, sums the
dimensions then divides by 3 to produce a single scalar value that can
be used to get an idea of thesize of a 3D object.
\begin{figure}
{\centering \includegraphics{Figures/current_process.pdf}
}
\caption{Current mesh processing pipeline}
\end{figure}
The algorithm's speed is critical not only for the eventual large number
of debris pieces that have to be processed, but many of the data science
algorithms we plan on performing on the compiled data need the data to
be normalized. For the current dataset and properties, it makes the most
sense to normalize the dataset based on volume. Volume was chosen for
multiple reasons, namely because it was easy to implement an efficient
algorithm to calculate volume, and currently, volume produces the least
amount of variation out of the current set of properties calculated.
Unfortunately, scaling a model to a specific volume is an iterative
process, but can be done very efficiently using derivative-free
numerical root-finding algorithms. The current implementation can scale
and process all the properties using only 30\% more time than getting
the properties without first scaling.
\begin{Shaded}
\begin{Highlighting}[]
\NormalTok{ Row │ variable mean min median max}
\NormalTok{─────┼───────────────────────────────────────────────────────────────────}
\NormalTok{ 1 │ surface\_area 25.2002 5.60865 13.3338 159.406}
\NormalTok{ 2 │ characteristic\_length 79.5481 0.158521 1.55816 1582.23}
\NormalTok{ 3 │ sbx 1.40222 0.0417367 0.967078 10.0663}
\NormalTok{ 4 │ sby 3.3367 0.0125824 2.68461 9.68361}
\NormalTok{ 5 │ sbz 3.91184 0.29006 1.8185 14.7434}
\NormalTok{ 6 │ Ix 1.58725 0.0311782 0.23401 11.1335}
\NormalTok{ 7 │ Iy 3.74345 0.178598 1.01592 24.6735}
\NormalTok{ 8 │ Iz 5.20207 0.178686 1.742 32.0083}
\end{Highlighting}
\end{Shaded}
Above is a summary of the current 108 part with scaling. Since all the
volumes are the same it is left out of the dataset, the center of
gravity is also left out of the dataset since it currently is just an
artifact of the \texttt{stl} file format. There are many ways to
determine the `center' of a 3D mesh, but since only one is being
implemented at the moment comparisons to other properties doesn't make
sense. The other notable part of the data is the model is rotated so
that the magnitudes of \texttt{Iz}, \texttt{Iy}, and \texttt{Ix} are in
descending order. This makes sure that the rotation of a model doesn't
matter for characterization. The dataset is available for download here:
\begin{itemize}
\tightlist
\item
\href{https://gitlab.com/orbital-debris-research/directed-study/report-3/-/blob/main/scaled_dataset.csv}{scaled\_dataset.csv}
\end{itemize}
\hypertarget{characterization}{%
\subsection{Characterization}\label{characterization}}
The first step toward characterization is to perform a principal
component analysis to determine what properties of the data capture the
most variation. \texttt{PCA} also requires that the data is scaled, so
as discussed above the dataset that is scaled by \texttt{volume} will be
used. \texttt{PCA} is implemented manually instead of the Matlab
built-in function as shown below:
\begin{Shaded}
\begin{Highlighting}[]
\CommentTok{\% covaraince matrix of data points}
\VariableTok{S}\OperatorTok{=}\VariableTok{cov}\NormalTok{(}\VariableTok{scaled\_data}\NormalTok{)}\OperatorTok{;}
\CommentTok{\% eigenvalues of S}
\VariableTok{eig\_vals} \OperatorTok{=} \VariableTok{eig}\NormalTok{(}\VariableTok{S}\NormalTok{)}\OperatorTok{;}
\CommentTok{\% sorting eigenvalues from largest to smallest}
\NormalTok{[}\VariableTok{lambda}\OperatorTok{,} \VariableTok{sort\_index}\NormalTok{] }\OperatorTok{=} \VariableTok{sort}\NormalTok{(}\VariableTok{eig\_vals}\OperatorTok{,}\SpecialStringTok{\textquotesingle{}descend\textquotesingle{}}\NormalTok{)}\OperatorTok{;}
\VariableTok{lambda\_ratio} \OperatorTok{=} \VariableTok{cumsum}\NormalTok{(}\VariableTok{lambda}\NormalTok{) }\OperatorTok{./} \VariableTok{sum}\NormalTok{(}\VariableTok{lambda}\NormalTok{)}
\end{Highlighting}
\end{Shaded}
Then plotting \texttt{lambda\_ratio}, which is the
\texttt{cumsum}/\texttt{sum} produces the following plot:
\begin{figure}
{\centering \includegraphics{Figures/pca.png}
}
\caption{PCA Plot}
\end{figure}
The current dataset can be described incredibly well just by looking at
\texttt{Iz}, which again the models are rotated so that \texttt{Iz} is
the largest moment of inertia. Then including \texttt{Iy} and
\texttt{Iz} means that a 3D plot of the principle moments of inertia
almost capture all the variation in the data.
The next step for characterization is to get only the inertia's from the
dataset. Since the current dataset is so small, the scaled dataset will
be used for rest of the characterization process. Once more parts are
added to the database it will make sense to start looking at the raw
dataset. Now we can proceed to cluster the data using the k-means method
of clustering. To properly use k-means a value of k, which is the number
of clusters, needs to be determined. This can be done by creating an
elbow plot using the following code:
\begin{Shaded}
\begin{Highlighting}[]
\KeywordTok{for} \VariableTok{ii}\OperatorTok{=}\FloatTok{1}\OperatorTok{:}\FloatTok{20}
\NormalTok{ [}\VariableTok{idx}\OperatorTok{,\textasciitilde{},}\VariableTok{sumd}\NormalTok{] }\OperatorTok{=} \VariableTok{kmeans}\NormalTok{(}\VariableTok{inertia}\OperatorTok{,}\VariableTok{ii}\NormalTok{)}\OperatorTok{;}
\VariableTok{J}\NormalTok{(}\VariableTok{ii}\NormalTok{)}\OperatorTok{=}\VariableTok{norm}\NormalTok{(}\VariableTok{sumd}\NormalTok{)}\OperatorTok{;}
\KeywordTok{end}
\end{Highlighting}
\end{Shaded}
Which produces the following plot:
\begin{figure}
{\centering \includegraphics{Figures/kmeans.png}
}
\caption{Elbow method to determine the required number of clusters.}
\end{figure}
As can be seen in the above elbow plot, at 6 clusters there is an
``elbow'' which is where there is a large drop in the sum distance to
the centroid of each cluster which means that it is the optimal number
of clusters. The inertia's can then be plotted using 6 k-means clusters
produces the following plot:
\begin{figure}
{\centering \includegraphics{Figures/inertia3d.png}
}
\caption{Moments of Inertia plotted with 6 clusters.}
\end{figure}
From this plot it is immediately clear that there are clusters of
outliers. These are due to the different shapes and the extreme values
are slender rods or flat plates while the clusters closer to the center
more closely resemble a sphere. As the dataset grows it should become
more apparent what kind of clusters actually make up a satellite, and
eventually space debris in general.
\hypertarget{next-steps}{%
\subsection{Next Steps}\label{next-steps}}
The current dataset needs to be grown in both the amount of data and the
variety of data. The most glaring issue with the current dataset is the
lack of any debris since the parts are straight from satellite
assemblies. Getting accurate properties from the current scans we have
is an entire research project in itself, so hopefully, getting pieces
that are easier to scan can help bring the project back on track. The
other and harder-to-fix issue is finding/deriving more data properties.
Properties such as cross-sectional or aerodynamic drag would be very
insightful but are likely to be difficult to implement in code and
significantly more resource intensive than the current properties the
code can derive.
\end{document}