diff --git a/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/assembly.jpg b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/assembly.jpg new file mode 100644 index 0000000..4703d31 Binary files /dev/null and b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/assembly.jpg differ diff --git a/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/biplots.png b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/biplots.png new file mode 100644 index 0000000..86211f6 Binary files /dev/null and b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/biplots.png differ diff --git a/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/cg_biplot.png b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/cg_biplot.png new file mode 100644 index 0000000..4b7db39 Binary files /dev/null and b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/cg_biplot.png differ diff --git a/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/current_process.svg b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/current_process.svg new file mode 100644 index 0000000..433fdca --- /dev/null +++ b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/current_process.svg @@ -0,0 +1,16 @@ + \ No newline at end of file diff --git a/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/final_scatter.png b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/final_scatter.png new file mode 100644 index 0000000..9ef75b9 Binary files /dev/null and b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/final_scatter.png differ diff --git a/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/first_scatter.png b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/first_scatter.png new file mode 100644 index 0000000..f522490 Binary files /dev/null and b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/first_scatter.png differ diff --git a/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/inertia_biplot.png b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/inertia_biplot.png new file mode 100644 index 0000000..eeaf49c Binary files /dev/null and b/_posts/2022-04-03-machine-learning-directed-study-report-2/Figures/inertia_biplot.png differ diff --git a/_posts/2022-04-03-machine-learning-directed-study-report-2/citations.bib b/_posts/2022-04-03-machine-learning-directed-study-report-2/citations.bib new file mode 100644 index 0000000..56e90d0 --- /dev/null +++ b/_posts/2022-04-03-machine-learning-directed-study-report-2/citations.bib @@ -0,0 +1,11 @@ + +@misc{eberlyPolyhedralMassProperties2002, + title = {Polyhedral {{Mass Properties}} ({{Revisited}})}, + author = {Eberly, David}, + year = {2002}, + month = dec, + copyright = {CC BY 4.0}, + url = "https://www.geometrictools.com/Documentation/PolyhedralMassProperties.pdf" +} + + diff --git a/_posts/2022-04-03-machine-learning-directed-study-report-2/machine-learning-directed-study-report-2.Rmd b/_posts/2022-04-03-machine-learning-directed-study-report-2/machine-learning-directed-study-report-2.Rmd new file mode 100644 index 0000000..022bc8a --- /dev/null +++ b/_posts/2022-04-03-machine-learning-directed-study-report-2/machine-learning-directed-study-report-2.Rmd @@ -0,0 +1,170 @@ +--- +title: "Machine Learning Directed Study Report 2" +description: | + Advanced processing of 3D meshes using Julia, and data science in Matlab. +author: + - name: Anson Biggs + url: https://ansonbiggs.com +repository_url: https://gitlab.com/orbital-debris-research/directed-study/report-2 +date: 2022-04-03 +output: + distill::distill_article: + self_contained: false +categories: + - Matlab + - Orbital Debris + - Julia +preview: Figures/final_scatter.png +bibliography: citations.bib +draft: false +--- + +## Gathering Data + +To get started on the project before any scans of the actual debris are +made available, I opted to find 3D models online and process them as if +they were data collected by my team. GrabCAD is an excellent source of +high-quality 3D models, and all of the models have, at worst, a +non-commercial license making them suitable for this study. The current +dataset uses three separate satellite assemblies found on GrabCAD, below +is an example of one of the satellites that was used. + + + +## Data Preparation + +The models were processed in Blender, which quickly converted the +assemblies to `stl` files, giving 108 unique parts to be processed. +Since the expected final size of the dataset is expected to be in the +magnitude of the thousands, an algorithm capable of getting the required +properties of each part is the only feasible solution. From the analysis +performed in [Report +1](https://gitlab.com/orbital-debris-research/directed-study/report-1/-/blob/main/README.md), +we know that the essential debris property is the moments of inertia +which helped narrow down potential algorithms. Unfortunately, this is +one of the more complicated things to calculate from a mesh, but thanks +to a paper from [@eberlyPolyhedralMassProperties2002] titled [Polyhedral +Mass +Properties](https://www.geometrictools.com/Documentation/PolyhedralMassProperties.pdf), +his algorithm was able to be implemented in the Julia programming +language. The current implementation of the algorithm calculates a +moment of inertia tensor, volume, and center of gravity in a few +milliseconds per part. + + + +The algorithm's speed is critical not only for the eventually large +number of debris pieces that have to be processed, but many of the data +science algorithms we plan on performing on the compiled data need the +data to be normalized. I have decided that it makes the most sense to +normalize the dataset based on volume. I chose volume for a few reasons, +namely because it was easy to implement an efficient algorithm to +calculate volume, and currently, volume seems to be the least essential +property for the data analysis. Unfortunately, scaling a model to have a +specific volume is an iterative process, but can be done very +efficiently using derivative-free numerical root-finding algorithms. The +current implementation can scale and process all the properties using +only 30% more time than getting the properties without first scaling. + +``` {.txt} + Row │ variable mean min median max + │ Symbol Float64 Float64 Float64 Float64 +─────┼──────────────────────────────────────────────────────────── + 1 │ volume 0.00977609 1.05875e-10 2.0558e-5 0.893002 + 2 │ cx -0.836477 -3.13272 -0.00135877 0.0866989 + 3 │ cy -1.52983 -5.07001 -0.101678 0.177574 + 4 │ cz 0.162855 -6.83716 0.00115068 7.60925 + 5 │ Ix 0.00425039 -5.2943e-7 9.10038e-9 0.445278 + 6 │ Iy 0.0108781 1.05468e-17 1.13704e-8 1.14249 + 7 │ Iz 0.0111086 1.05596e-17 2.1906e-8 1.15363 +``` + +Above is a summary of the current 108 part dataset without scaling. The +max values are well above the median, and given the dataset's small +size, there are still significant outliers in the dataset. For now, any +significant outliers will be removed, with more explanation below, but +hopefully, this will not become as necessary or shrink the dataset as +much as the dataset grows. As mentioned before, a raw and a normalized +dataset were prepared, and the data can be found below: + +- [dataset.csv](https://gitlab.com/orbital-debris-research/directed-study/report-2/-/blob/main/dataset.csv) +- [scaled_dataset.csv](https://gitlab.com/orbital-debris-research/directed-study/report-2/-/blob/main/scaled_dataset.csv) + +## Characterization + +The first step toward characterization is to perform a principal +component analysis to determine the essential properties. In the past, +moments of inertia have been the most important for capturing the +variation in the data. However, since this dataset is significantly +different from the previous one, it is essential to ensure inertia is +still the most important. We begin by using the `pca` function in Matlab +on our scaled dataset. + +``` {.matlab} +[coeff,score,latent] = pca(scaled_data); +``` + +We can then put the `coeff` and `score` returned by the `pca` function +into a biplot to visualize what properties are the most important +easily. Unfortunately, we exist in a 3D world, so the centers of gravity +and moments of inertia have to be analyzed individually. + + + +The components of all six properties are represented in each of the +biplots by the blue lines, and the red dots represent the scores of each +property for each part. The data variation is captured pretty well for +the current dataset by both the inertia and the center of gravity. I +will continue using inertia since it performed slightly better here and +was the best when it was performed on just a single satellite. As the +dataset grows and the model ingestion pipeline becomes more robust, more +time will be spent analyzing the properties. + +Now that it has been determined that inertia will be used, k-means +clustering can be performed on the raw, unscaled dataset. + +``` {.matlab} +[IDX, C] = kmeans(inertia,3); + +histcounts(IDX) % Get the size of each cluster + 89 10 8 +``` + + + +This data has four distinct groups, with much overlap in the larger +groups. Therefore, to get a better view, only the smallest magnitude +group will be kept since it seems to have the most variation and k-means +will be performed again to understand the data better. + +``` {.matlab} +inertia = inertia(IDX == 1,:); +[IDX, C] = kmeans(inertia,3); + +histcounts(IDX) % Get the size of each cluster + 76 6 7 +``` + + + +This brings the dataset down to 89 parts from the original 108 and still +leaves some small clusters. This highlights the need to grow the dataset +by around 10x so that, hopefully, there will not be so many small, +highly localized clusters. + +## Next Steps + +The current dataset needs to be grown in both the amount of data and the +variety of data. The most glaring issue with the current dataset is the +lack of any debris since the parts are straight from satellite +assemblies. Getting accurate properties from the current scans we have +is an entire research project in itself, so hopefully, getting pieces +that are easier to scan can help bring the project back on track. The +other and harder-to-fix issue is finding/deriving more data properties. +Properties such as cross-sectional or aerodynamic drag would be very +insightful but are likely to be difficult to implement in code and +significantly more resource intensive than the current properties the +code can derive. Characteristic length is being used heavily by NASA +Debrisat and seems straightforward to implement so that will be the next +goal for the mesh processing code. Before the next report, I would like +to see this dataset grow closer to one thousand pieces. diff --git a/_posts/2022-04-03-machine-learning-directed-study-report-2/machine-learning-directed-study-report-2.html b/_posts/2022-04-03-machine-learning-directed-study-report-2/machine-learning-directed-study-report-2.html new file mode 100644 index 0000000..0b8f95d --- /dev/null +++ b/_posts/2022-04-03-machine-learning-directed-study-report-2/machine-learning-directed-study-report-2.html @@ -0,0 +1,1596 @@ + + + + +
+ + + + + + + + + + + + + + + + +Advanced processing of 3D meshes using Julia, and data science in Matlab.
+To get started on the project before any scans of the actual debris are made available, I opted to find 3D models online and process them as if they were data collected by my team. GrabCAD is an excellent source of high-quality 3D models, and all of the models have, at worst, a non-commercial license making them suitable for this study. The current dataset uses three separate satellite assemblies found on GrabCAD, below is an example of one of the satellites that was used.
+The models were processed in Blender, which quickly converted the assemblies to stl
files, giving 108 unique parts to be processed. Since the expected final size of the dataset is expected to be in the magnitude of the thousands, an algorithm capable of getting the required properties of each part is the only feasible solution. From the analysis performed in Report 1, we know that the essential debris property is the moments of inertia which helped narrow down potential algorithms. Unfortunately, this is one of the more complicated things to calculate from a mesh, but thanks to a paper from (Eberly 2002) titled Polyhedral Mass Properties, his algorithm was able to be implemented in the Julia programming language. The current implementation of the algorithm calculates a moment of inertia tensor, volume, and center of gravity in a few milliseconds per part.
The algorithm’s speed is critical not only for the eventually large number of debris pieces that have to be processed, but many of the data science algorithms we plan on performing on the compiled data need the data to be normalized. I have decided that it makes the most sense to normalize the dataset based on volume. I chose volume for a few reasons, namely because it was easy to implement an efficient algorithm to calculate volume, and currently, volume seems to be the least essential property for the data analysis. Unfortunately, scaling a model to have a specific volume is an iterative process, but can be done very efficiently using derivative-free numerical root-finding algorithms. The current implementation can scale and process all the properties using only 30% more time than getting the properties without first scaling.
+
+ Row │ variable mean min median max
+ │ Symbol Float64 Float64 Float64 Float64
+ ─────┼────────────────────────────────────────────────────────────
+ 1 │ volume 0.00977609 1.05875e-10 2.0558e-5 0.893002
+ 2 │ cx -0.836477 -3.13272 -0.00135877 0.0866989
+ 3 │ cy -1.52983 -5.07001 -0.101678 0.177574
+ 4 │ cz 0.162855 -6.83716 0.00115068 7.60925
+ 5 │ Ix 0.00425039 -5.2943e-7 9.10038e-9 0.445278
+ 6 │ Iy 0.0108781 1.05468e-17 1.13704e-8 1.14249 7 │ Iz 0.0111086 1.05596e-17 2.1906e-8 1.15363
Above is a summary of the current 108 part dataset without scaling. The max values are well above the median, and given the dataset’s small size, there are still significant outliers in the dataset. For now, any significant outliers will be removed, with more explanation below, but hopefully, this will not become as necessary or shrink the dataset as much as the dataset grows. As mentioned before, a raw and a normalized dataset were prepared, and the data can be found below:
+ +The first step toward characterization is to perform a principal component analysis to determine the essential properties. In the past, moments of inertia have been the most important for capturing the variation in the data. However, since this dataset is significantly different from the previous one, it is essential to ensure inertia is still the most important. We begin by using the pca
function in Matlab on our scaled dataset.
coeff,score,latent] = pca(scaled_data); [
We can then put the coeff
and score
returned by the pca
function into a biplot to visualize what properties are the most important easily. Unfortunately, we exist in a 3D world, so the centers of gravity and moments of inertia have to be analyzed individually.
The components of all six properties are represented in each of the biplots by the blue lines, and the red dots represent the scores of each property for each part. The data variation is captured pretty well for the current dataset by both the inertia and the center of gravity. I will continue using inertia since it performed slightly better here and was the best when it was performed on just a single satellite. As the dataset grows and the model ingestion pipeline becomes more robust, more time will be spent analyzing the properties.
+Now that it has been determined that inertia will be used, k-means clustering can be performed on the raw, unscaled dataset.
+IDX, C] = kmeans(inertia,3);
+ [
+histcounts(IDX) % Get the size of each cluster
+89 10 8
This data has four distinct groups, with much overlap in the larger groups. Therefore, to get a better view, only the smallest magnitude group will be kept since it seems to have the most variation and k-means will be performed again to understand the data better.
+inertia = inertia(IDX == 1,:);
+IDX, C] = kmeans(inertia,3);
+ [
+histcounts(IDX) % Get the size of each cluster
+76 6 7
This brings the dataset down to 89 parts from the original 108 and still leaves some small clusters. This highlights the need to grow the dataset by around 10x so that, hopefully, there will not be so many small, highly localized clusters.
+The current dataset needs to be grown in both the amount of data and the variety of data. The most glaring issue with the current dataset is the lack of any debris since the parts are straight from satellite assemblies. Getting accurate properties from the current scans we have is an entire research project in itself, so hopefully, getting pieces that are easier to scan can help bring the project back on track. The other and harder-to-fix issue is finding/deriving more data properties. Properties such as cross-sectional or aerodynamic drag would be very insightful but are likely to be difficult to implement in code and significantly more resource intensive than the current properties the code can derive. Characteristic length is being used heavily by NASA Debrisat and seems straightforward to implement so that will be the next goal for the mesh processing code. Before the next report, I would like to see this dataset grow closer to one thousand pieces.
+
If you see mistakes or want to suggest changes, please create an issue on the source repository.
+`,e.githubCompareUpdatesUrl&&(t+=`View all changes to this article since it was first published.`),t+=` + If you see mistakes or want to suggest changes, please create an issue on GitHub.
+ `);const n=e.journal;return'undefined'!=typeof n&&'Distill'===n.title&&(t+=` +Diagrams and text are licensed under Creative Commons Attribution CC-BY 4.0 with the source available on GitHub, unless noted otherwise. The figures that have been reused from other sources don’t fall under this license and can be recognized by a note in their caption: “Figure from …”.
+ `),'undefined'!=typeof e.publishedDate&&(t+=` +For attribution in academic contexts, please cite this work as
+${e.concatenatedAuthors}, "${e.title}", Distill, ${e.publishedYear}.+
BibTeX citation
+${m(e)}+ `),t}var An=Math.sqrt,En=Math.atan2,Dn=Math.sin,Mn=Math.cos,On=Math.PI,Un=Math.abs,In=Math.pow,Nn=Math.LN10,jn=Math.log,Rn=Math.max,qn=Math.ceil,Fn=Math.floor,Pn=Math.round,Hn=Math.min;const zn=['Sunday','Monday','Tuesday','Wednesday','Thursday','Friday','Saturday'],Bn=['Jan.','Feb.','March','April','May','June','July','Aug.','Sept.','Oct.','Nov.','Dec.'],Wn=(e)=>10>e?'0'+e:e,Vn=function(e){const t=zn[e.getDay()].substring(0,3),n=Wn(e.getDate()),i=Bn[e.getMonth()].substring(0,3),a=e.getFullYear().toString(),d=e.getUTCHours().toString(),r=e.getUTCMinutes().toString(),o=e.getUTCSeconds().toString();return`${t}, ${n} ${i} ${a} ${d}:${r}:${o} Z`},$n=function(e){const t=Array.from(e).reduce((e,[t,n])=>Object.assign(e,{[t]:n}),{});return t},Jn=function(e){const t=new Map;for(var n in e)e.hasOwnProperty(n)&&t.set(n,e[n]);return t};class Qn{constructor(e){this.name=e.author,this.personalURL=e.authorURL,this.affiliation=e.affiliation,this.affiliationURL=e.affiliationURL,this.affiliations=e.affiliations||[]}get firstName(){const e=this.name.split(' ');return e.slice(0,e.length-1).join(' ')}get lastName(){const e=this.name.split(' ');return e[e.length-1]}}class Gn{constructor(){this.title='unnamed article',this.description='',this.authors=[],this.bibliography=new Map,this.bibliographyParsed=!1,this.citations=[],this.citationsCollected=!1,this.journal={},this.katex={},this.publishedDate=void 0}set url(e){this._url=e}get url(){if(this._url)return this._url;return this.distillPath&&this.journal.url?this.journal.url+'/'+this.distillPath:this.journal.url?this.journal.url:void 0}get githubUrl(){return this.githubPath?'https://github.com/'+this.githubPath:void 0}set previewURL(e){this._previewURL=e}get previewURL(){return this._previewURL?this._previewURL:this.url+'/thumbnail.jpg'}get publishedDateRFC(){return Vn(this.publishedDate)}get updatedDateRFC(){return Vn(this.updatedDate)}get publishedYear(){return this.publishedDate.getFullYear()}get publishedMonth(){return Bn[this.publishedDate.getMonth()]}get publishedDay(){return this.publishedDate.getDate()}get publishedMonthPadded(){return Wn(this.publishedDate.getMonth()+1)}get publishedDayPadded(){return Wn(this.publishedDate.getDate())}get publishedISODateOnly(){return this.publishedDate.toISOString().split('T')[0]}get volume(){const e=this.publishedYear-2015;if(1>e)throw new Error('Invalid publish date detected during computing volume');return e}get issue(){return this.publishedDate.getMonth()+1}get concatenatedAuthors(){if(2
tag. We found the following text: '+t);const n=document.createElement('span');n.innerHTML=e.nodeValue,e.parentNode.insertBefore(n,e),e.parentNode.removeChild(e)}}}}).observe(this,{childList:!0})}}var Ti='undefined'==typeof window?'undefined'==typeof global?'undefined'==typeof self?{}:self:global:window,_i=f(function(e,t){(function(e){function t(){this.months=['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec'],this.notKey=[',','{','}',' ','='],this.pos=0,this.input='',this.entries=[],this.currentEntry='',this.setInput=function(e){this.input=e},this.getEntries=function(){return this.entries},this.isWhitespace=function(e){return' '==e||'\r'==e||'\t'==e||'\n'==e},this.match=function(e,t){if((void 0==t||null==t)&&(t=!0),this.skipWhitespace(t),this.input.substring(this.pos,this.pos+e.length)==e)this.pos+=e.length;else throw'Token mismatch, expected '+e+', found '+this.input.substring(this.pos);this.skipWhitespace(t)},this.tryMatch=function(e,t){return(void 0==t||null==t)&&(t=!0),this.skipWhitespace(t),this.input.substring(this.pos,this.pos+e.length)==e},this.matchAt=function(){for(;this.input.length>this.pos&&'@'!=this.input[this.pos];)this.pos++;return!('@'!=this.input[this.pos])},this.skipWhitespace=function(e){for(;this.isWhitespace(this.input[this.pos]);)this.pos++;if('%'==this.input[this.pos]&&!0==e){for(;'\n'!=this.input[this.pos];)this.pos++;this.skipWhitespace(e)}},this.value_braces=function(){var e=0;this.match('{',!1);for(var t=this.pos,n=!1;;){if(!n)if('}'==this.input[this.pos]){if(0 =k&&(++x,i=k);if(d[x]instanceof n||d[T-1].greedy)continue;w=T-x,y=e.slice(i,k),v.index-=i}if(v){g&&(h=v[1].length);var S=v.index+h,v=v[0].slice(h),C=S+v.length,_=y.slice(0,S),L=y.slice(C),A=[x,w];_&&A.push(_);var E=new n(o,u?a.tokenize(v,u):v,b,v,f);A.push(E),L&&A.push(L),Array.prototype.splice.apply(d,A)}}}}}return d},hooks:{all:{},add:function(e,t){var n=a.hooks.all;n[e]=n[e]||[],n[e].push(t)},run:function(e,t){var n=a.hooks.all[e];if(n&&n.length)for(var d,r=0;d=n[r++];)d(t)}}},i=a.Token=function(e,t,n,i,a){this.type=e,this.content=t,this.alias=n,this.length=0|(i||'').length,this.greedy=!!a};if(i.stringify=function(e,t,n){if('string'==typeof e)return e;if('Array'===a.util.type(e))return e.map(function(n){return i.stringify(n,t,e)}).join('');var d={type:e.type,content:i.stringify(e.content,t,n),tag:'span',classes:['token',e.type],attributes:{},language:t,parent:n};if('comment'==d.type&&(d.attributes.spellcheck='true'),e.alias){var r='Array'===a.util.type(e.alias)?e.alias:[e.alias];Array.prototype.push.apply(d.classes,r)}a.hooks.run('wrap',d);var l=Object.keys(d.attributes).map(function(e){return e+'="'+(d.attributes[e]||'').replace(/"/g,'"')+'"'}).join(' ');return'<'+d.tag+' class="'+d.classes.join(' ')+'"'+(l?' '+l:'')+'>'+d.content+''+d.tag+'>'},!t.document)return t.addEventListener?(t.addEventListener('message',function(e){var n=JSON.parse(e.data),i=n.language,d=n.code,r=n.immediateClose;t.postMessage(a.highlight(d,a.languages[i],i)),r&&t.close()},!1),t.Prism):t.Prism;var d=document.currentScript||[].slice.call(document.getElementsByTagName('script')).pop();return d&&(a.filename=d.src,document.addEventListener&&!d.hasAttribute('data-manual')&&('loading'===document.readyState?document.addEventListener('DOMContentLoaded',a.highlightAll):window.requestAnimationFrame?window.requestAnimationFrame(a.highlightAll):window.setTimeout(a.highlightAll,16))),t.Prism}();e.exports&&(e.exports=n),'undefined'!=typeof Ti&&(Ti.Prism=n),n.languages.markup={comment://,prolog:/<\?[\w\W]+?\?>/,doctype://i,cdata://i,tag:{pattern:/<\/?(?!\d)[^\s>\/=$<]+(?:\s+[^\s>\/=]+(?:=(?:("|')(?:\\\1|\\?(?!\1)[\w\W])*\1|[^\s'">=]+))?)*\s*\/?>/i,inside:{tag:{pattern:/^<\/?[^\s>\/]+/i,inside:{punctuation:/^<\/?/,namespace:/^[^\s>\/:]+:/}},"attr-value":{pattern:/=(?:('|")[\w\W]*?(\1)|[^\s>]+)/i,inside:{punctuation:/[=>"']/}},punctuation:/\/?>/,"attr-name":{pattern:/[^\s>\/]+/,inside:{namespace:/^[^\s>\/:]+:/}}}},entity:/?[\da-z]{1,8};/i},n.hooks.add('wrap',function(e){'entity'===e.type&&(e.attributes.title=e.content.replace(/&/,'&'))}),n.languages.xml=n.languages.markup,n.languages.html=n.languages.markup,n.languages.mathml=n.languages.markup,n.languages.svg=n.languages.markup,n.languages.css={comment:/\/\*[\w\W]*?\*\//,atrule:{pattern:/@[\w-]+?.*?(;|(?=\s*\{))/i,inside:{rule:/@[\w-]+/}},url:/url\((?:(["'])(\\(?:\r\n|[\w\W])|(?!\1)[^\\\r\n])*\1|.*?)\)/i,selector:/[^\{\}\s][^\{\};]*?(?=\s*\{)/,string:{pattern:/("|')(\\(?:\r\n|[\w\W])|(?!\1)[^\\\r\n])*\1/,greedy:!0},property:/(\b|\B)[\w-]+(?=\s*:)/i,important:/\B!important\b/i,function:/[-a-z0-9]+(?=\()/i,punctuation:/[(){};:]/},n.languages.css.atrule.inside.rest=n.util.clone(n.languages.css),n.languages.markup&&(n.languages.insertBefore('markup','tag',{style:{pattern:/(
+
+
+ ${e.map(l).map((e)=>`
`)}}const Mi=`
+d-citation-list {
+ contain: layout style;
+}
+
+d-citation-list .references {
+ grid-column: text;
+}
+
+d-citation-list .references .title {
+ font-weight: 500;
+}
+`;class Oi extends HTMLElement{static get is(){return'd-citation-list'}connectedCallback(){this.hasAttribute('distill-prerendered')||(this.style.display='none')}set citations(e){x(this,e)}}var Ui=f(function(e){var t='undefined'==typeof window?'undefined'!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:{}:window,n=function(){var e=/\blang(?:uage)?-(\w+)\b/i,n=0,a=t.Prism={util:{encode:function(e){return e instanceof i?new i(e.type,a.util.encode(e.content),e.alias):'Array'===a.util.type(e)?e.map(a.util.encode):e.replace(/&/g,'&').replace(/e.length)break tokenloop;if(!(y instanceof n)){c.lastIndex=0;var v=c.exec(y),w=1;if(!v&&f&&x!=d.length-1){if(c.lastIndex=i,v=c.exec(e),!v)break;for(var S=v.index+(g?v[1].length:0),C=v.index+v[0].length,T=x,k=i,p=d.length;T
+
+`);class Ni extends ei(Ii(HTMLElement)){renderContent(){if(this.languageName=this.getAttribute('language'),!this.languageName)return void console.warn('You need to provide a language attribute to your
Footnotes
+
+`,!1);class Fi extends qi(HTMLElement){connectedCallback(){super.connectedCallback(),this.list=this.root.querySelector('ol'),this.root.style.display='none'}set footnotes(e){if(this.list.innerHTML='',e.length){this.root.style.display='';for(const t of e){const e=document.createElement('li');e.id=t.id+'-listing',e.innerHTML=t.innerHTML;const n=document.createElement('a');n.setAttribute('class','footnote-backlink'),n.textContent='[\u21A9]',n.href='#'+t.id,e.appendChild(n),this.list.appendChild(e)}}else this.root.style.display='none'}}const Pi=ti('d-hover-box',`
+
+
+