diff --git a/.Rbuildignore b/.Rbuildignore
index ab942c3..eb78e23 100644
--- a/.Rbuildignore
+++ b/.Rbuildignore
@@ -1,2 +1,5 @@
^data-raw$
^\.github$
+^_pkgdown\.yml$
+^docs$
+^pkgdown$
diff --git a/.gitignore b/.gitignore
index 198dbfd..d454385 100644
--- a/.gitignore
+++ b/.gitignore
@@ -24,6 +24,9 @@
# produced vignettes
vignettes/*.html
vignettes/*.pdf
+## not ready yet
+vignettes/fuzzy_metrics.Rmd
+
# OAuth2 token, see https://github.com/hadley/httr/releases/tag/v0.3
.httr-oauth
@@ -39,8 +42,6 @@ vignettes/*.pdf
# R Environment Variables
.Renviron
-# pkgdown site
-docs/
# translation temp files
po/*~
diff --git a/_pkgdown.yml b/_pkgdown.yml
index ad09e58..cdd7f61 100644
--- a/_pkgdown.yml
+++ b/_pkgdown.yml
@@ -11,6 +11,13 @@ authors:
Siyuan Luo:
href: https://roseyuan.github.io/
+articles:
+- title: Articles
+ navbar: ~
+ contents:
+ - table
+ - SpatialExperiment
+
reference:
- title: Main functions
desc: >
@@ -32,6 +39,7 @@ reference:
- subtitle: Fuzzy metrics
contents:
- fuzzyHardMetrics
+ - fuzzyHardMetrics2
- fuzzyPartitionMetrics
- subtitle: Internal metrics for spatial domain detection
contents:
diff --git a/docs/404.html b/docs/404.html
index 8184b90..465de02 100644
--- a/docs/404.html
+++ b/docs/404.html
@@ -20,7 +20,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -34,6 +34,7 @@
Articles
Reference
diff --git a/docs/LICENSE.html b/docs/LICENSE.html
index 3fb346e..519cd93 100644
--- a/docs/LICENSE.html
+++ b/docs/LICENSE.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
@@ -42,12 +43,12 @@ Articles
diff --git a/docs/articles/poem.html b/docs/articles/poem.html
index 9f9c816..9c01a36 100644
--- a/docs/articles/poem.html
+++ b/docs/articles/poem.html
@@ -20,7 +20,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -34,6 +34,7 @@
Articles
Reference
@@ -81,7 +82,7 @@
getSpatialExternalMetrics ( true= sp_toys $ label , pred= sp_toys $ p1 ,
location= sp_toys [ ,c ( "x" ,"y" ) ] , level= "class" )
## SpatialWH SpatialAWH SpatialWC SpatialAWC class cluster
-## 1 NA NA 0.779245 0.5302459 1 NA
-## 2 NA NA 1.000000 1.0000000 2 NA
-## 3 1.000000 1.0000000 NA NA NA 1
-## 4 0.804378 0.5911907 NA NA NA 2
+## 1 NA NA 0.8078698 0.5929504 1 NA
+## 2 NA NA 1.0000000 1.0000000 2 NA
+## 3 1.0000000 1.0000000 NA NA NA 1
+## 4 0.8323893 0.6493279 NA NA NA 2
res1.1 <- getSpatialExternalMetrics ( true= sp_toys $ label , pred= sp_toys $ p1 ,
location= sp_toys [ ,c ( "x" ,"y" ) ] , level= "dataset" ,
@@ -546,12 +546,15 @@ Internal metricssp_toys $ c_elsa <- getSpatialInternalMetrics ( label= sp_toys $ label ,
location= sp_toys [ ,c ( "x" ,"y" ) ] , level= "element" ,
metrics= c ( "ELSA" ) ) $ ELSA
+## the specified variable is considered as categorical...
sp_toys $ p1_elsa <- getSpatialInternalMetrics ( label= sp_toys $ p1 ,
location= sp_toys [ ,c ( "x" ,"y" ) ] , level= "element" ,
metrics= c ( "ELSA" ) ) $ ELSA
+## the specified variable is considered as categorical...
sp_toys $ p2_elsa <- getSpatialInternalMetrics ( label= sp_toys $ p2 ,
location= sp_toys [ ,c ( "x" ,"y" ) ] , level= "element" ,
- metrics= c ( "ELSA" ) ) $ ELSA
+ metrics= c ( "ELSA" ) ) $ ELSA
+## the specified variable is considered as categorical...
-## R version 4.4.1 (2024-06-14)
+## R version 4.4.2 (2024-10-31)
## Platform: x86_64-pc-linux-gnu
## Running under: Ubuntu 22.04.5 LTS
##
@@ -692,11 +693,11 @@ Session info## [16] rmarkdown_2.27 munsell_0.5.1 evaluate_0.24.0
## [19] jquerylib_0.1.4 kableExtra_1.4.0 fastmap_1.2.0
## [22] yaml_2.3.10 lifecycle_1.0.4 bookdown_0.40
-## [25] stringr_1.5.1 BiocManager_1.30.23 compiler_4.4.1
+## [25] stringr_1.5.1 BiocManager_1.30.23 compiler_4.4.2
## [28] fs_1.6.4 htmlwidgets_1.6.4 rstudioapi_0.16.0
## [31] systemfonts_1.1.0 digest_0.6.36 viridisLite_0.4.2
## [34] R6_2.5.1 magrittr_2.0.3 bslib_0.8.0
-## [37] tools_4.4.1 xml2_1.3.6 pkgdown_2.1.1
+## [37] tools_4.4.2 xml2_1.3.6 pkgdown_2.1.1
## [40] cachem_1.1.0 desc_1.4.3
On this page
diff --git a/docs/authors.html b/docs/authors.html
index 9966f7f..90183c1 100644
--- a/docs/authors.html
+++ b/docs/authors.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
@@ -60,13 +61,13 @@ Citation
Luo S, Germain P (2024).
poem: POpulation-based Evaluation Metrics .
-R package version 0.99.1, https://roseyuan.github.io/poem/ .
+R package version 0.99.2, https://roseyuan.github.io/poem/ .
@Manual{,
title = {poem: POpulation-based Evaluation Metrics},
author = {Siyuan Luo and Pierre-Luc Germain},
year = {2024},
- note = {R package version 0.99.1},
+ note = {R package version 0.99.2},
url = {https://roseyuan.github.io/poem/},
}
diff --git a/docs/index.html b/docs/index.html
index 841202b..7c74639 100644
--- a/docs/index.html
+++ b/docs/index.html
@@ -22,7 +22,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -36,6 +36,7 @@
Articles
Reference
diff --git a/docs/pkgdown.yml b/docs/pkgdown.yml
index ae5523d..16c01a3 100644
--- a/docs/pkgdown.yml
+++ b/docs/pkgdown.yml
@@ -3,8 +3,9 @@ pkgdown: 2.1.1
pkgdown_sha: ~
articles:
poem: poem.html
+ SpatialExperiment: SpatialExperiment.html
table: table.html
-last_built: 2024-11-15T07:00Z
+last_built: 2024-12-01T20:25Z
urls:
reference: https://roseyuan.github.io/poem/reference
article: https://roseyuan.github.io/poem/articles
diff --git a/docs/reference/CDbw.html b/docs/reference/CDbw.html
index 614b30d..16be10f 100644
--- a/docs/reference/CDbw.html
+++ b/docs/reference/CDbw.html
@@ -15,7 +15,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -27,6 +27,7 @@
Articles
Reference
diff --git a/docs/reference/CHAOS.html b/docs/reference/CHAOS.html
index fb0cda5..6ae675a 100644
--- a/docs/reference/CHAOS.html
+++ b/docs/reference/CHAOS.html
@@ -13,7 +13,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -25,6 +25,7 @@
Articles
Reference
diff --git a/docs/reference/ELSA.html b/docs/reference/ELSA.html
index 83410f3..c14af92 100644
--- a/docs/reference/ELSA.html
+++ b/docs/reference/ELSA.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
@@ -86,6 +87,7 @@ Examplesdata ( sp_toys )
data <- sp_toys
ELSA ( data $ label , data [ ,c ( "x" , "y" ) ] , k= 6 )
+#> the specified variable is considered as categorical...
#> Ea Ec ELSA
#> 1 0.00000000 0.0000000 0.00000000
#> 2 0.00000000 0.0000000 0.00000000
@@ -328,14 +330,15 @@ Examples#> 239 0.00000000 0.0000000 0.00000000
#> 240 0.00000000 0.0000000 0.00000000
ELSA ( data $ p1 , data [ ,c ( "x" , "y" ) ] , k= 6 )
+#> the specified variable is considered as categorical...
#> Ea Ec ELSA
#> 1 0.00000000 0.0000000 0.00000000
#> 2 0.00000000 0.0000000 0.00000000
-#> 3 0.09090909 0.4138169 0.03761971
-#> 4 0.20000000 0.6840384 0.13680769
-#> 5 0.66666667 0.9709506 0.64730040
-#> 6 0.11111111 0.4689956 0.05211062
-#> 7 0.00000000 0.0000000 0.00000000
+#> 3 0.00000000 0.0000000 0.00000000
+#> 4 0.00000000 0.0000000 0.00000000
+#> 5 0.22222222 0.7219281 0.16042847
+#> 6 0.44444444 0.9709506 0.43153360
+#> 7 0.20000000 0.6840384 0.13680769
#> 8 0.00000000 0.0000000 0.00000000
#> 9 0.00000000 0.0000000 0.00000000
#> 10 0.00000000 0.0000000 0.00000000
@@ -346,11 +349,11 @@ Examples#> 15 0.00000000 0.0000000 0.00000000
#> 16 0.00000000 0.0000000 0.00000000
#> 17 0.00000000 0.0000000 0.00000000
-#> 18 0.13333333 0.5435644 0.07247526
-#> 19 0.28571429 0.8366407 0.23904021
-#> 20 0.53846154 1.0000000 0.53846154
-#> 21 0.07692308 0.3712323 0.02855633
-#> 22 0.00000000 0.0000000 0.00000000
+#> 18 0.00000000 0.0000000 0.00000000
+#> 19 0.07142857 0.3533593 0.02523995
+#> 20 0.30769231 0.8631206 0.26557556
+#> 21 0.38461538 0.9402860 0.36164845
+#> 22 0.14285714 0.5665095 0.08092993
#> 23 0.00000000 0.0000000 0.00000000
#> 24 0.00000000 0.0000000 0.00000000
#> 25 0.00000000 0.0000000 0.00000000
@@ -362,14 +365,14 @@ Examples#> 31 0.00000000 0.0000000 0.00000000
#> 32 0.00000000 0.0000000 0.00000000
#> 33 0.00000000 0.0000000 0.00000000
-#> 34 0.11764706 0.5032583 0.05920686
-#> 35 0.33333333 0.8960382 0.29867941
-#> 36 0.40000000 0.9544340 0.38177360
-#> 37 0.23529412 0.7642045 0.17981283
+#> 34 0.00000000 0.0000000 0.00000000
+#> 35 0.06666667 0.3372901 0.02248600
+#> 36 0.46666667 0.9886994 0.46139306
+#> 37 0.35294118 0.9182958 0.32410441
#> 38 0.16666667 0.6292492 0.10487487
-#> 39 0.17647059 0.6500224 0.11470984
-#> 40 0.13333333 0.5435644 0.07247526
-#> 41 0.07692308 0.3712323 0.02855633
+#> 39 0.00000000 0.0000000 0.00000000
+#> 40 0.00000000 0.0000000 0.00000000
+#> 41 0.00000000 0.0000000 0.00000000
#> 42 0.00000000 0.0000000 0.00000000
#> 43 0.00000000 0.0000000 0.00000000
#> 44 0.00000000 0.0000000 0.00000000
@@ -377,15 +380,15 @@ Examples#> 46 0.00000000 0.0000000 0.00000000
#> 47 0.00000000 0.0000000 0.00000000
#> 48 0.00000000 0.0000000 0.00000000
-#> 49 0.11764706 0.5032583 0.05920686
-#> 50 0.26666667 0.8112781 0.21634083
-#> 51 0.53333333 1.0000000 0.53333333
-#> 52 0.41176471 0.9640788 0.39697361
-#> 53 0.38888889 0.9494520 0.36923134
-#> 54 0.38888889 0.9494520 0.36923134
-#> 55 0.33333333 0.8960382 0.29867941
-#> 56 0.14285714 0.5665095 0.08092993
-#> 57 0.06666667 0.3372901 0.02248600
+#> 49 0.00000000 0.0000000 0.00000000
+#> 50 0.06666667 0.3372901 0.02248600
+#> 51 0.46666667 0.9886994 0.46139306
+#> 52 0.35294118 0.9182958 0.32410441
+#> 53 0.16666667 0.6292492 0.10487487
+#> 54 0.00000000 0.0000000 0.00000000
+#> 55 0.00000000 0.0000000 0.00000000
+#> 56 0.00000000 0.0000000 0.00000000
+#> 57 0.00000000 0.0000000 0.00000000
#> 58 0.00000000 0.0000000 0.00000000
#> 59 0.00000000 0.0000000 0.00000000
#> 60 0.00000000 0.0000000 0.00000000
@@ -393,15 +396,15 @@ Examples#> 62 0.00000000 0.0000000 0.00000000
#> 63 0.00000000 0.0000000 0.00000000
#> 64 0.00000000 0.0000000 0.00000000
-#> 65 0.06666667 0.3372901 0.02248600
-#> 66 0.26666667 0.8112781 0.21634083
-#> 67 0.35294118 0.9182958 0.32410441
+#> 65 0.00000000 0.0000000 0.00000000
+#> 66 0.20000000 0.6962123 0.13924245
+#> 67 0.41176471 0.9640788 0.39697361
#> 68 0.38888889 0.9494520 0.36923134
-#> 69 0.41176471 0.9640788 0.39697361
-#> 70 0.46666667 0.9886994 0.46139306
-#> 71 0.69230769 0.9402860 0.65096720
-#> 72 0.13333333 0.5435644 0.07247526
-#> 73 0.06250000 0.3227570 0.02017231
+#> 69 0.17647059 0.6500224 0.11470984
+#> 70 0.00000000 0.0000000 0.00000000
+#> 71 0.00000000 0.0000000 0.00000000
+#> 72 0.00000000 0.0000000 0.00000000
+#> 73 0.00000000 0.0000000 0.00000000
#> 74 0.00000000 0.0000000 0.00000000
#> 75 0.00000000 0.0000000 0.00000000
#> 76 0.00000000 0.0000000 0.00000000
@@ -409,13 +412,13 @@ Examples#> 78 0.00000000 0.0000000 0.00000000
#> 79 0.00000000 0.0000000 0.00000000
#> 80 0.00000000 0.0000000 0.00000000
-#> 81 0.13333333 0.5435644 0.07247526
-#> 82 0.17647059 0.6500224 0.11470984
-#> 83 0.16666667 0.6292492 0.10487487
-#> 84 0.22222222 0.7424876 0.16499724
-#> 85 0.33333333 0.8960382 0.29867941
-#> 86 0.42857143 0.9709506 0.41612168
-#> 87 0.13333333 0.5435644 0.07247526
+#> 81 0.20000000 0.6962123 0.13924245
+#> 82 0.41176471 0.9640788 0.39697361
+#> 83 0.38888889 0.9494520 0.36923134
+#> 84 0.16666667 0.6292492 0.10487487
+#> 85 0.00000000 0.0000000 0.00000000
+#> 86 0.00000000 0.0000000 0.00000000
+#> 87 0.00000000 0.0000000 0.00000000
#> 88 0.00000000 0.0000000 0.00000000
#> 89 0.00000000 0.0000000 0.00000000
#> 90 0.00000000 0.0000000 0.00000000
@@ -425,13 +428,13 @@ Examples#> 94 0.00000000 0.0000000 0.00000000
#> 95 0.00000000 0.0000000 0.00000000
#> 96 0.00000000 0.0000000 0.00000000
-#> 97 0.00000000 0.0000000 0.00000000
-#> 98 0.05555556 0.2974722 0.01652624
-#> 99 0.11764706 0.5032583 0.05920686
-#> 100 0.26666667 0.8112781 0.21634083
-#> 101 0.53846154 1.0000000 0.53846154
-#> 102 0.20000000 0.6962123 0.13924245
-#> 103 0.06250000 0.3227570 0.02017231
+#> 97 0.17647059 0.6500224 0.11470984
+#> 98 0.38888889 0.9494520 0.36923134
+#> 99 0.41176471 0.9640788 0.39697361
+#> 100 0.20000000 0.6962123 0.13924245
+#> 101 0.00000000 0.0000000 0.00000000
+#> 102 0.00000000 0.0000000 0.00000000
+#> 103 0.00000000 0.0000000 0.00000000
#> 104 0.00000000 0.0000000 0.00000000
#> 105 0.00000000 0.0000000 0.00000000
#> 106 0.00000000 0.0000000 0.00000000
@@ -440,12 +443,12 @@ Examples#> 109 0.00000000 0.0000000 0.00000000
#> 110 0.00000000 0.0000000 0.00000000
#> 111 0.00000000 0.0000000 0.00000000
-#> 112 0.05882353 0.3095434 0.01820844
-#> 113 0.16666667 0.6292492 0.10487487
-#> 114 0.33333333 0.8997438 0.29991459
-#> 115 0.53333333 1.0000000 0.53333333
-#> 116 0.28571429 0.8366407 0.23904021
-#> 117 0.06666667 0.3372901 0.02248600
+#> 112 0.17647059 0.6500224 0.11470984
+#> 113 0.38888889 0.9494520 0.36923134
+#> 114 0.38888889 0.9494520 0.36923134
+#> 115 0.20000000 0.6962123 0.13924245
+#> 116 0.00000000 0.0000000 0.00000000
+#> 117 0.00000000 0.0000000 0.00000000
#> 118 0.00000000 0.0000000 0.00000000
#> 119 0.00000000 0.0000000 0.00000000
#> 120 0.00000000 0.0000000 0.00000000
@@ -455,12 +458,12 @@ Examples#> 124 0.00000000 0.0000000 0.00000000
#> 125 0.00000000 0.0000000 0.00000000
#> 126 0.00000000 0.0000000 0.00000000
-#> 127 0.05882353 0.3095434 0.01820844
+#> 127 0.00000000 0.0000000 0.00000000
#> 128 0.16666667 0.6292492 0.10487487
-#> 129 0.76470588 0.8524052 0.65183925
-#> 130 0.53333333 1.0000000 0.53333333
-#> 131 0.23076923 0.7495953 0.17298352
-#> 132 0.06666667 0.3372901 0.02248600
+#> 129 0.35294118 0.9182958 0.32410441
+#> 130 0.46666667 0.9886994 0.46139306
+#> 131 0.07692308 0.3712323 0.02855633
+#> 132 0.00000000 0.0000000 0.00000000
#> 133 0.00000000 0.0000000 0.00000000
#> 134 0.00000000 0.0000000 0.00000000
#> 135 0.00000000 0.0000000 0.00000000
@@ -470,11 +473,11 @@ Examples#> 139 0.00000000 0.0000000 0.00000000
#> 140 0.00000000 0.0000000 0.00000000
#> 141 0.00000000 0.0000000 0.00000000
-#> 142 0.11764706 0.5032583 0.05920686
-#> 143 0.27777778 0.8314744 0.23096511
-#> 144 0.55555556 0.9980009 0.55444494
-#> 145 0.33333333 0.8960382 0.29867941
-#> 146 0.00000000 0.0000000 0.00000000
+#> 142 0.00000000 0.0000000 0.00000000
+#> 143 0.16666667 0.6292492 0.10487487
+#> 144 0.38888889 0.9494520 0.36923134
+#> 145 0.46666667 0.9886994 0.46139306
+#> 146 0.14285714 0.5665095 0.08092993
#> 147 0.00000000 0.0000000 0.00000000
#> 148 0.00000000 0.0000000 0.00000000
#> 149 0.00000000 0.0000000 0.00000000
@@ -486,11 +489,11 @@ Examples#> 155 0.00000000 0.0000000 0.00000000
#> 156 0.00000000 0.0000000 0.00000000
#> 157 0.00000000 0.0000000 0.00000000
-#> 158 0.16666667 0.6292492 0.10487487
-#> 159 0.35294118 0.9182958 0.32410441
-#> 160 0.46666667 0.9886994 0.46139306
-#> 161 0.07692308 0.3712323 0.02855633
-#> 162 0.00000000 0.0000000 0.00000000
+#> 158 0.00000000 0.0000000 0.00000000
+#> 159 0.11764706 0.5032583 0.05920686
+#> 160 0.33333333 0.8960382 0.29867941
+#> 161 0.38461538 0.9402860 0.36164845
+#> 162 0.06666667 0.3372901 0.02248600
#> 163 0.00000000 0.0000000 0.00000000
#> 164 0.00000000 0.0000000 0.00000000
#> 165 0.00000000 0.0000000 0.00000000
@@ -501,11 +504,11 @@ Examples#> 170 0.00000000 0.0000000 0.00000000
#> 171 0.00000000 0.0000000 0.00000000
#> 172 0.00000000 0.0000000 0.00000000
-#> 173 0.16666667 0.6292492 0.10487487
-#> 174 0.38888889 0.9494520 0.36923134
-#> 175 0.46666667 0.9886994 0.46139306
-#> 176 0.14285714 0.5665095 0.08092993
-#> 177 0.00000000 0.0000000 0.00000000
+#> 173 0.00000000 0.0000000 0.00000000
+#> 174 0.16666667 0.6292492 0.10487487
+#> 175 0.33333333 0.8960382 0.29867941
+#> 176 0.42857143 0.9709506 0.41612168
+#> 177 0.06666667 0.3372901 0.02248600
#> 178 0.00000000 0.0000000 0.00000000
#> 179 0.00000000 0.0000000 0.00000000
#> 180 0.00000000 0.0000000 0.00000000
@@ -517,11 +520,11 @@ Examples#> 186 0.00000000 0.0000000 0.00000000
#> 187 0.00000000 0.0000000 0.00000000
#> 188 0.00000000 0.0000000 0.00000000
-#> 189 0.11764706 0.5032583 0.05920686
-#> 190 0.33333333 0.8960382 0.29867941
-#> 191 0.38461538 0.9402860 0.36164845
-#> 192 0.06666667 0.3372901 0.02248600
-#> 193 0.00000000 0.0000000 0.00000000
+#> 189 0.00000000 0.0000000 0.00000000
+#> 190 0.06666667 0.3372901 0.02248600
+#> 191 0.46153846 0.9852281 0.45472068
+#> 192 0.33333333 0.8960382 0.29867941
+#> 193 0.18750000 0.6722948 0.12605528
#> 194 0.00000000 0.0000000 0.00000000
#> 195 0.00000000 0.0000000 0.00000000
#> 196 0.00000000 0.0000000 0.00000000
@@ -532,11 +535,11 @@ Examples#> 201 0.00000000 0.0000000 0.00000000
#> 202 0.00000000 0.0000000 0.00000000
#> 203 0.00000000 0.0000000 0.00000000
-#> 204 0.16666667 0.6292492 0.10487487
-#> 205 0.33333333 0.8960382 0.29867941
+#> 204 0.00000000 0.0000000 0.00000000
+#> 205 0.06666667 0.3372901 0.02248600
#> 206 0.42857143 0.9709506 0.41612168
-#> 207 0.06666667 0.3372901 0.02248600
-#> 208 0.00000000 0.0000000 0.00000000
+#> 207 0.33333333 0.8960382 0.29867941
+#> 208 0.18750000 0.6722948 0.12605528
#> 209 0.00000000 0.0000000 0.00000000
#> 210 0.00000000 0.0000000 0.00000000
#> 211 0.00000000 0.0000000 0.00000000
@@ -548,11 +551,11 @@ Examples#> 217 0.00000000 0.0000000 0.00000000
#> 218 0.00000000 0.0000000 0.00000000
#> 219 0.00000000 0.0000000 0.00000000
-#> 220 0.07692308 0.3712323 0.02855633
-#> 221 0.50000000 0.9957275 0.49786373
-#> 222 0.30769231 0.8631206 0.26557556
-#> 223 0.14285714 0.5665095 0.08092993
-#> 224 0.00000000 0.0000000 0.00000000
+#> 220 0.00000000 0.0000000 0.00000000
+#> 221 0.16666667 0.6193822 0.10323037
+#> 222 0.53846154 1.0000000 0.53846154
+#> 223 0.35714286 0.9182958 0.32796280
+#> 224 0.15384615 0.5916728 0.09102658
#> 225 0.00000000 0.0000000 0.00000000
#> 226 0.00000000 0.0000000 0.00000000
#> 227 0.00000000 0.0000000 0.00000000
@@ -563,21 +566,22 @@ Examples#> 232 0.00000000 0.0000000 0.00000000
#> 233 0.00000000 0.0000000 0.00000000
#> 234 0.00000000 0.0000000 0.00000000
-#> 235 0.11111111 0.4689956 0.05211062
-#> 236 0.55555556 1.0000000 0.55555556
-#> 237 0.22222222 0.7219281 0.16042847
-#> 238 0.10000000 0.4394970 0.04394970
-#> 239 0.00000000 0.0000000 0.00000000
+#> 235 0.00000000 0.0000000 0.00000000
+#> 236 0.22222222 0.7219281 0.16042847
+#> 237 0.66666667 0.9709506 0.64730040
+#> 238 0.30000000 0.8453509 0.25360528
+#> 239 0.12500000 0.5032583 0.06290729
#> 240 0.00000000 0.0000000 0.00000000
ELSA ( data $ p2 , data [ ,c ( "x" , "y" ) ] , k= 6 )
+#> the specified variable is considered as categorical...
#> Ea Ec ELSA
#> 1 0.00000000 0.0000000 0.00000000
#> 2 0.00000000 0.0000000 0.00000000
-#> 3 0.00000000 0.0000000 0.00000000
-#> 4 0.00000000 0.0000000 0.00000000
-#> 5 0.22222222 0.7219281 0.16042847
-#> 6 0.44444444 0.9709506 0.43153360
-#> 7 0.20000000 0.6840384 0.13680769
+#> 3 0.09090909 0.4138169 0.03761971
+#> 4 0.20000000 0.6840384 0.13680769
+#> 5 0.66666667 0.9709506 0.64730040
+#> 6 0.11111111 0.4689956 0.05211062
+#> 7 0.00000000 0.0000000 0.00000000
#> 8 0.00000000 0.0000000 0.00000000
#> 9 0.00000000 0.0000000 0.00000000
#> 10 0.00000000 0.0000000 0.00000000
@@ -588,11 +592,11 @@ Examples#> 15 0.00000000 0.0000000 0.00000000
#> 16 0.00000000 0.0000000 0.00000000
#> 17 0.00000000 0.0000000 0.00000000
-#> 18 0.00000000 0.0000000 0.00000000
-#> 19 0.07142857 0.3533593 0.02523995
-#> 20 0.30769231 0.8631206 0.26557556
-#> 21 0.38461538 0.9402860 0.36164845
-#> 22 0.14285714 0.5665095 0.08092993
+#> 18 0.13333333 0.5435644 0.07247526
+#> 19 0.28571429 0.8366407 0.23904021
+#> 20 0.53846154 1.0000000 0.53846154
+#> 21 0.07692308 0.3712323 0.02855633
+#> 22 0.00000000 0.0000000 0.00000000
#> 23 0.00000000 0.0000000 0.00000000
#> 24 0.00000000 0.0000000 0.00000000
#> 25 0.00000000 0.0000000 0.00000000
@@ -604,14 +608,14 @@ Examples#> 31 0.00000000 0.0000000 0.00000000
#> 32 0.00000000 0.0000000 0.00000000
#> 33 0.00000000 0.0000000 0.00000000
-#> 34 0.00000000 0.0000000 0.00000000
-#> 35 0.06666667 0.3372901 0.02248600
-#> 36 0.46666667 0.9886994 0.46139306
-#> 37 0.35294118 0.9182958 0.32410441
+#> 34 0.11764706 0.5032583 0.05920686
+#> 35 0.33333333 0.8960382 0.29867941
+#> 36 0.40000000 0.9544340 0.38177360
+#> 37 0.23529412 0.7642045 0.17981283
#> 38 0.16666667 0.6292492 0.10487487
-#> 39 0.00000000 0.0000000 0.00000000
-#> 40 0.00000000 0.0000000 0.00000000
-#> 41 0.00000000 0.0000000 0.00000000
+#> 39 0.17647059 0.6500224 0.11470984
+#> 40 0.13333333 0.5435644 0.07247526
+#> 41 0.07692308 0.3712323 0.02855633
#> 42 0.00000000 0.0000000 0.00000000
#> 43 0.00000000 0.0000000 0.00000000
#> 44 0.00000000 0.0000000 0.00000000
@@ -619,15 +623,15 @@ Examples#> 46 0.00000000 0.0000000 0.00000000
#> 47 0.00000000 0.0000000 0.00000000
#> 48 0.00000000 0.0000000 0.00000000
-#> 49 0.00000000 0.0000000 0.00000000
-#> 50 0.06666667 0.3372901 0.02248600
-#> 51 0.46666667 0.9886994 0.46139306
-#> 52 0.35294118 0.9182958 0.32410441
-#> 53 0.16666667 0.6292492 0.10487487
-#> 54 0.00000000 0.0000000 0.00000000
-#> 55 0.00000000 0.0000000 0.00000000
-#> 56 0.00000000 0.0000000 0.00000000
-#> 57 0.00000000 0.0000000 0.00000000
+#> 49 0.11764706 0.5032583 0.05920686
+#> 50 0.26666667 0.8112781 0.21634083
+#> 51 0.53333333 1.0000000 0.53333333
+#> 52 0.41176471 0.9640788 0.39697361
+#> 53 0.38888889 0.9494520 0.36923134
+#> 54 0.38888889 0.9494520 0.36923134
+#> 55 0.33333333 0.8960382 0.29867941
+#> 56 0.14285714 0.5665095 0.08092993
+#> 57 0.06666667 0.3372901 0.02248600
#> 58 0.00000000 0.0000000 0.00000000
#> 59 0.00000000 0.0000000 0.00000000
#> 60 0.00000000 0.0000000 0.00000000
@@ -635,15 +639,15 @@ Examples#> 62 0.00000000 0.0000000 0.00000000
#> 63 0.00000000 0.0000000 0.00000000
#> 64 0.00000000 0.0000000 0.00000000
-#> 65 0.00000000 0.0000000 0.00000000
-#> 66 0.20000000 0.6962123 0.13924245
-#> 67 0.41176471 0.9640788 0.39697361
+#> 65 0.06666667 0.3372901 0.02248600
+#> 66 0.26666667 0.8112781 0.21634083
+#> 67 0.35294118 0.9182958 0.32410441
#> 68 0.38888889 0.9494520 0.36923134
-#> 69 0.17647059 0.6500224 0.11470984
-#> 70 0.00000000 0.0000000 0.00000000
-#> 71 0.00000000 0.0000000 0.00000000
-#> 72 0.00000000 0.0000000 0.00000000
-#> 73 0.00000000 0.0000000 0.00000000
+#> 69 0.41176471 0.9640788 0.39697361
+#> 70 0.46666667 0.9886994 0.46139306
+#> 71 0.69230769 0.9402860 0.65096720
+#> 72 0.13333333 0.5435644 0.07247526
+#> 73 0.06250000 0.3227570 0.02017231
#> 74 0.00000000 0.0000000 0.00000000
#> 75 0.00000000 0.0000000 0.00000000
#> 76 0.00000000 0.0000000 0.00000000
@@ -651,13 +655,13 @@ Examples#> 78 0.00000000 0.0000000 0.00000000
#> 79 0.00000000 0.0000000 0.00000000
#> 80 0.00000000 0.0000000 0.00000000
-#> 81 0.20000000 0.6962123 0.13924245
-#> 82 0.41176471 0.9640788 0.39697361
-#> 83 0.38888889 0.9494520 0.36923134
-#> 84 0.16666667 0.6292492 0.10487487
-#> 85 0.00000000 0.0000000 0.00000000
-#> 86 0.00000000 0.0000000 0.00000000
-#> 87 0.00000000 0.0000000 0.00000000
+#> 81 0.13333333 0.5435644 0.07247526
+#> 82 0.17647059 0.6500224 0.11470984
+#> 83 0.16666667 0.6292492 0.10487487
+#> 84 0.22222222 0.7424876 0.16499724
+#> 85 0.33333333 0.8960382 0.29867941
+#> 86 0.42857143 0.9709506 0.41612168
+#> 87 0.13333333 0.5435644 0.07247526
#> 88 0.00000000 0.0000000 0.00000000
#> 89 0.00000000 0.0000000 0.00000000
#> 90 0.00000000 0.0000000 0.00000000
@@ -667,13 +671,13 @@ Examples#> 94 0.00000000 0.0000000 0.00000000
#> 95 0.00000000 0.0000000 0.00000000
#> 96 0.00000000 0.0000000 0.00000000
-#> 97 0.17647059 0.6500224 0.11470984
-#> 98 0.38888889 0.9494520 0.36923134
-#> 99 0.41176471 0.9640788 0.39697361
-#> 100 0.20000000 0.6962123 0.13924245
-#> 101 0.00000000 0.0000000 0.00000000
-#> 102 0.00000000 0.0000000 0.00000000
-#> 103 0.00000000 0.0000000 0.00000000
+#> 97 0.00000000 0.0000000 0.00000000
+#> 98 0.05555556 0.2974722 0.01652624
+#> 99 0.11764706 0.5032583 0.05920686
+#> 100 0.26666667 0.8112781 0.21634083
+#> 101 0.53846154 1.0000000 0.53846154
+#> 102 0.20000000 0.6962123 0.13924245
+#> 103 0.06250000 0.3227570 0.02017231
#> 104 0.00000000 0.0000000 0.00000000
#> 105 0.00000000 0.0000000 0.00000000
#> 106 0.00000000 0.0000000 0.00000000
@@ -682,12 +686,12 @@ Examples#> 109 0.00000000 0.0000000 0.00000000
#> 110 0.00000000 0.0000000 0.00000000
#> 111 0.00000000 0.0000000 0.00000000
-#> 112 0.17647059 0.6500224 0.11470984
-#> 113 0.38888889 0.9494520 0.36923134
-#> 114 0.38888889 0.9494520 0.36923134
-#> 115 0.20000000 0.6962123 0.13924245
-#> 116 0.00000000 0.0000000 0.00000000
-#> 117 0.00000000 0.0000000 0.00000000
+#> 112 0.05882353 0.3095434 0.01820844
+#> 113 0.16666667 0.6292492 0.10487487
+#> 114 0.33333333 0.8997438 0.29991459
+#> 115 0.53333333 1.0000000 0.53333333
+#> 116 0.28571429 0.8366407 0.23904021
+#> 117 0.06666667 0.3372901 0.02248600
#> 118 0.00000000 0.0000000 0.00000000
#> 119 0.00000000 0.0000000 0.00000000
#> 120 0.00000000 0.0000000 0.00000000
@@ -697,12 +701,12 @@ Examples#> 124 0.00000000 0.0000000 0.00000000
#> 125 0.00000000 0.0000000 0.00000000
#> 126 0.00000000 0.0000000 0.00000000
-#> 127 0.00000000 0.0000000 0.00000000
+#> 127 0.05882353 0.3095434 0.01820844
#> 128 0.16666667 0.6292492 0.10487487
-#> 129 0.35294118 0.9182958 0.32410441
-#> 130 0.46666667 0.9886994 0.46139306
-#> 131 0.07692308 0.3712323 0.02855633
-#> 132 0.00000000 0.0000000 0.00000000
+#> 129 0.76470588 0.8524052 0.65183925
+#> 130 0.53333333 1.0000000 0.53333333
+#> 131 0.23076923 0.7495953 0.17298352
+#> 132 0.06666667 0.3372901 0.02248600
#> 133 0.00000000 0.0000000 0.00000000
#> 134 0.00000000 0.0000000 0.00000000
#> 135 0.00000000 0.0000000 0.00000000
@@ -712,11 +716,11 @@ Examples#> 139 0.00000000 0.0000000 0.00000000
#> 140 0.00000000 0.0000000 0.00000000
#> 141 0.00000000 0.0000000 0.00000000
-#> 142 0.00000000 0.0000000 0.00000000
-#> 143 0.16666667 0.6292492 0.10487487
-#> 144 0.38888889 0.9494520 0.36923134
-#> 145 0.46666667 0.9886994 0.46139306
-#> 146 0.14285714 0.5665095 0.08092993
+#> 142 0.11764706 0.5032583 0.05920686
+#> 143 0.27777778 0.8314744 0.23096511
+#> 144 0.55555556 0.9980009 0.55444494
+#> 145 0.33333333 0.8960382 0.29867941
+#> 146 0.00000000 0.0000000 0.00000000
#> 147 0.00000000 0.0000000 0.00000000
#> 148 0.00000000 0.0000000 0.00000000
#> 149 0.00000000 0.0000000 0.00000000
@@ -728,11 +732,11 @@ Examples#> 155 0.00000000 0.0000000 0.00000000
#> 156 0.00000000 0.0000000 0.00000000
#> 157 0.00000000 0.0000000 0.00000000
-#> 158 0.00000000 0.0000000 0.00000000
-#> 159 0.11764706 0.5032583 0.05920686
-#> 160 0.33333333 0.8960382 0.29867941
-#> 161 0.38461538 0.9402860 0.36164845
-#> 162 0.06666667 0.3372901 0.02248600
+#> 158 0.16666667 0.6292492 0.10487487
+#> 159 0.35294118 0.9182958 0.32410441
+#> 160 0.46666667 0.9886994 0.46139306
+#> 161 0.07692308 0.3712323 0.02855633
+#> 162 0.00000000 0.0000000 0.00000000
#> 163 0.00000000 0.0000000 0.00000000
#> 164 0.00000000 0.0000000 0.00000000
#> 165 0.00000000 0.0000000 0.00000000
@@ -743,11 +747,11 @@ Examples#> 170 0.00000000 0.0000000 0.00000000
#> 171 0.00000000 0.0000000 0.00000000
#> 172 0.00000000 0.0000000 0.00000000
-#> 173 0.00000000 0.0000000 0.00000000
-#> 174 0.16666667 0.6292492 0.10487487
-#> 175 0.33333333 0.8960382 0.29867941
-#> 176 0.42857143 0.9709506 0.41612168
-#> 177 0.06666667 0.3372901 0.02248600
+#> 173 0.16666667 0.6292492 0.10487487
+#> 174 0.38888889 0.9494520 0.36923134
+#> 175 0.46666667 0.9886994 0.46139306
+#> 176 0.14285714 0.5665095 0.08092993
+#> 177 0.00000000 0.0000000 0.00000000
#> 178 0.00000000 0.0000000 0.00000000
#> 179 0.00000000 0.0000000 0.00000000
#> 180 0.00000000 0.0000000 0.00000000
@@ -759,11 +763,11 @@ Examples#> 186 0.00000000 0.0000000 0.00000000
#> 187 0.00000000 0.0000000 0.00000000
#> 188 0.00000000 0.0000000 0.00000000
-#> 189 0.00000000 0.0000000 0.00000000
-#> 190 0.06666667 0.3372901 0.02248600
-#> 191 0.46153846 0.9852281 0.45472068
-#> 192 0.33333333 0.8960382 0.29867941
-#> 193 0.18750000 0.6722948 0.12605528
+#> 189 0.11764706 0.5032583 0.05920686
+#> 190 0.33333333 0.8960382 0.29867941
+#> 191 0.38461538 0.9402860 0.36164845
+#> 192 0.06666667 0.3372901 0.02248600
+#> 193 0.00000000 0.0000000 0.00000000
#> 194 0.00000000 0.0000000 0.00000000
#> 195 0.00000000 0.0000000 0.00000000
#> 196 0.00000000 0.0000000 0.00000000
@@ -774,11 +778,11 @@ Examples#> 201 0.00000000 0.0000000 0.00000000
#> 202 0.00000000 0.0000000 0.00000000
#> 203 0.00000000 0.0000000 0.00000000
-#> 204 0.00000000 0.0000000 0.00000000
-#> 205 0.06666667 0.3372901 0.02248600
+#> 204 0.16666667 0.6292492 0.10487487
+#> 205 0.33333333 0.8960382 0.29867941
#> 206 0.42857143 0.9709506 0.41612168
-#> 207 0.33333333 0.8960382 0.29867941
-#> 208 0.18750000 0.6722948 0.12605528
+#> 207 0.06666667 0.3372901 0.02248600
+#> 208 0.00000000 0.0000000 0.00000000
#> 209 0.00000000 0.0000000 0.00000000
#> 210 0.00000000 0.0000000 0.00000000
#> 211 0.00000000 0.0000000 0.00000000
@@ -790,11 +794,11 @@ Examples#> 217 0.00000000 0.0000000 0.00000000
#> 218 0.00000000 0.0000000 0.00000000
#> 219 0.00000000 0.0000000 0.00000000
-#> 220 0.00000000 0.0000000 0.00000000
-#> 221 0.16666667 0.6193822 0.10323037
-#> 222 0.53846154 1.0000000 0.53846154
-#> 223 0.35714286 0.9182958 0.32796280
-#> 224 0.15384615 0.5916728 0.09102658
+#> 220 0.07692308 0.3712323 0.02855633
+#> 221 0.50000000 0.9957275 0.49786373
+#> 222 0.30769231 0.8631206 0.26557556
+#> 223 0.14285714 0.5665095 0.08092993
+#> 224 0.00000000 0.0000000 0.00000000
#> 225 0.00000000 0.0000000 0.00000000
#> 226 0.00000000 0.0000000 0.00000000
#> 227 0.00000000 0.0000000 0.00000000
@@ -805,11 +809,11 @@ Examples#> 232 0.00000000 0.0000000 0.00000000
#> 233 0.00000000 0.0000000 0.00000000
#> 234 0.00000000 0.0000000 0.00000000
-#> 235 0.00000000 0.0000000 0.00000000
-#> 236 0.22222222 0.7219281 0.16042847
-#> 237 0.66666667 0.9709506 0.64730040
-#> 238 0.30000000 0.8453509 0.25360528
-#> 239 0.12500000 0.5032583 0.06290729
+#> 235 0.11111111 0.4689956 0.05211062
+#> 236 0.55555556 1.0000000 0.55555556
+#> 237 0.22222222 0.7219281 0.16042847
+#> 238 0.10000000 0.4394970 0.04394970
+#> 239 0.00000000 0.0000000 0.00000000
#> 240 0.00000000 0.0000000 0.00000000
diff --git a/docs/reference/FMeasure.html b/docs/reference/FMeasure.html
index 77e55c9..f26fa0c 100644
--- a/docs/reference/FMeasure.html
+++ b/docs/reference/FMeasure.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/PAS.html b/docs/reference/PAS.html
index da63f4a..c4aba44 100644
--- a/docs/reference/PAS.html
+++ b/docs/reference/PAS.html
@@ -11,7 +11,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -23,6 +23,7 @@
Articles
Reference
@@ -116,20 +117,20 @@ Examples#>
PAS ( data $ p1 , data [ ,c ( "x" , "y" ) ] , k= 6 )
#> $PAS
-#> [1] 0.01666667
+#> [1] 0.004166667
#>
#> $abnormalty
-#> [1] FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
+#> [1] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
-#> [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE
+#> [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [109] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
-#> [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE
+#> [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [133] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [145] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [157] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
@@ -138,24 +139,24 @@ Examples#> [193] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [205] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [217] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
-#> [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE
+#> [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE
#>
PAS ( data $ p2 , data [ ,c ( "x" , "y" ) ] , k= 6 )
#> $PAS
-#> [1] 0.004166667
+#> [1] 0.01666667
#>
#> $abnormalty
-#> [1] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
+#> [1] FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
-#> [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
+#> [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE
#> [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [109] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
-#> [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
+#> [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE
#> [133] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [145] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [157] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
@@ -164,7 +165,7 @@ Examples#> [193] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [205] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#> [217] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
-#> [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE
+#> [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE
#>
diff --git a/docs/reference/dbcv.html b/docs/reference/dbcv.html
index 6ac10ff..1cf1731 100644
--- a/docs/reference/dbcv.html
+++ b/docs/reference/dbcv.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/dot-check_duplicated_samples.html b/docs/reference/dot-check_duplicated_samples.html
index 955d4f1..ac613f6 100644
--- a/docs/reference/dot-check_duplicated_samples.html
+++ b/docs/reference/dot-check_duplicated_samples.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/dot-compute_cluster_core_distance.html b/docs/reference/dot-compute_cluster_core_distance.html
index 7b4855c..dfa6aa2 100644
--- a/docs/reference/dot-compute_cluster_core_distance.html
+++ b/docs/reference/dot-compute_cluster_core_distance.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/dot-compute_mutual_reach_dists.html b/docs/reference/dot-compute_mutual_reach_dists.html
index e2efd19..4b6c83f 100644
--- a/docs/reference/dot-compute_mutual_reach_dists.html
+++ b/docs/reference/dot-compute_mutual_reach_dists.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/dot-compute_pair_to_pair_dists.html b/docs/reference/dot-compute_pair_to_pair_dists.html
index 873f057..de42983 100644
--- a/docs/reference/dot-compute_pair_to_pair_dists.html
+++ b/docs/reference/dot-compute_pair_to_pair_dists.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/dot-convert_singleton_clusters_to_noise.html b/docs/reference/dot-convert_singleton_clusters_to_noise.html
index c57f29f..e4adf46 100644
--- a/docs/reference/dot-convert_singleton_clusters_to_noise.html
+++ b/docs/reference/dot-convert_singleton_clusters_to_noise.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/dot-fn_density_separation.html b/docs/reference/dot-fn_density_separation.html
index 43ff144..0c633b1 100644
--- a/docs/reference/dot-fn_density_separation.html
+++ b/docs/reference/dot-fn_density_separation.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/dot-fn_density_sparseness.html b/docs/reference/dot-fn_density_sparseness.html
index 82789a9..0d5c866 100644
--- a/docs/reference/dot-fn_density_sparseness.html
+++ b/docs/reference/dot-fn_density_sparseness.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/dot-get_internal_objects.html b/docs/reference/dot-get_internal_objects.html
index eca051a..cb0de3d 100644
--- a/docs/reference/dot-get_internal_objects.html
+++ b/docs/reference/dot-get_internal_objects.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/dot-get_submatrix.html b/docs/reference/dot-get_submatrix.html
index 3c45190..53976c4 100644
--- a/docs/reference/dot-get_submatrix.html
+++ b/docs/reference/dot-get_submatrix.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/emb2knn.html b/docs/reference/emb2knn.html
index ee246fe..8707d22 100644
--- a/docs/reference/emb2knn.html
+++ b/docs/reference/emb2knn.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/emb2snn.html b/docs/reference/emb2snn.html
index ff12053..d6d8b79 100644
--- a/docs/reference/emb2snn.html
+++ b/docs/reference/emb2snn.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
@@ -86,9 +87,9 @@ Value Examples
d1 <- mockData ( )
emb2snn ( as.matrix ( d1 [ ,1 : 2 ] ) ,k= 5 )
-#> IGRAPH 1ee951a U-W- 40 257 --
+#> IGRAPH 09455b4 U-W- 40 257 --
#> + attr: weight (e/n)
-#> + edges from 1ee951a:
+#> + edges from 09455b4:
#> [1] 1-- 3 3-- 4 2-- 4 1-- 5 3-- 6 1-- 6 4-- 6 5-- 7 1-- 7 6-- 7
#> [11] 3-- 7 4-- 7 4-- 8 3-- 8 7-- 8 5-- 9 7-- 9 8-- 9 2--10 3--10
#> [21] 6--10 9--11 8--11 7--11 4--11 3--11 6--12 7--12 1--12 3--12
diff --git a/docs/reference/findSpatialKNN.html b/docs/reference/findSpatialKNN.html
index 03a3e95..9a6b881 100644
--- a/docs/reference/findSpatialKNN.html
+++ b/docs/reference/findSpatialKNN.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/fuzzyHardMetrics.html b/docs/reference/fuzzyHardMetrics.html
index 5750b4d..3fae877 100644
--- a/docs/reference/fuzzyHardMetrics.html
+++ b/docs/reference/fuzzyHardMetrics.html
@@ -19,7 +19,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -31,6 +31,7 @@
Articles
Reference
@@ -90,7 +91,9 @@ ArgumentsfuzzyTrue
A object coercible to a numeric matrix with membership
probability of elements (rows) in clusters (columns). Must have the same
-number of rows as the length of hardTrue
.
+number of rows as the length of hardTrue
. Also note that the columns of
+fuzzyTrue
should be in the order of the levels (or integer values) of
+hardTrue
.
hardPred
diff --git a/docs/reference/fuzzyHardMetrics2.html b/docs/reference/fuzzyHardMetrics2.html
new file mode 100644
index 0000000..b34bcc8
--- /dev/null
+++ b/docs/reference/fuzzyHardMetrics2.html
@@ -0,0 +1,232 @@
+
+Compute fuzzy-hard metrics with lower memory requirement — fuzzyHardMetrics2 • poem
+ Skip to contents
+
+
+
+
+
poem
+
+
0.99.2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Usage
+
fuzzyHardMetrics2 (
+ hardTrue ,
+ fuzzyTrue ,
+ hardPred ,
+ nperms = 10 ,
+ returnElementPairAccuracy = FALSE ,
+ verbose = TRUE ,
+ BPPARAM = BiocParallel :: SerialParam ( )
+)
+
+
+
+
Arguments
+
+
+
hardTrue
+An atomic vector coercible to a factor or integer vector
+containing the true hard labels. Must have the same length as hardPred
.
+
+
+fuzzyTrue
+A object coercible to a numeric matrix with membership
+probability of elements (rows) in clusters (columns). Must have the same
+number of rows as the length of hardTrue
. Also note that the columns of
+fuzzyTrue
should be in the order of the levels (or integer values) of
+hardTrue
.
+
+
+hardPred
+An atomic vector coercible to a factor or integer vector
+containing the predicted hard labels.
+
+
+nperms
+The number of permutations (for correction for chance). If
+NULL (default), a first set of 10 permutations will be run to estimate
+whether the variation across permutations is above 0.0025, in which case
+more (max 1000) permutations will be run.
+
+
+returnElementPairAccuracy
+Logical. If TRUE, returns the per-element
+pair accuracy instead of the various parition-level and dataset-level metrics.
+Default FALSE.
+
+
+verbose
+Logical; whether to print info and warnings, including the
+standard error of the mean across permutations (giving an idea of the
+precision of the adjusted metrics).
+
+
+BPPARAM
+BiocParallel params for multithreading (default none)
+
+
+
+
Value
+
A list of metrics:
+
NDC
+Hullermeier's NDC (fuzzy rand index)
+
+ACI
+Ambrosio's Adjusted Concordance Index (ACI), i.e. a
+permutation-based fuzzy version of the adjusted Rand index.
+
+fuzzyWH
+Fuzzy Wallace Homogeneity index
+
+fuzzyWC
+Fuzzy Wallace Completeness index
+
+fuzzyAWH
+Adjusted fuzzy Wallace Homogeneity index
+
+fuzzyAWC
+Adjusted fuzzy Wallace Completeness index
+
+
+
+
References
+
Hullermeier et al. 2012; 10.1109/TFUZZ.2011.2179303;
+
D'Ambrosio et al. 2021; 10.1007/s00357-020-09367-0
+
+
+
See also
+
poem::fuzzyHardMetrics().
+
+
+
Author
+
Pierre-Luc Germain
+
+
+
+
Examples
+
# generate a fuzzy truth:
+fuzzyTrue <- matrix ( c (
+ 0.95 , 0.025 , 0.025 ,
+ 0.98 , 0.01 , 0.01 ,
+ 0.96 , 0.02 , 0.02 ,
+ 0.95 , 0.04 , 0.01 ,
+ 0.95 , 0.01 , 0.04 ,
+ 0.99 , 0.005 , 0.005 ,
+ 0.025 , 0.95 , 0.025 ,
+ 0.97 , 0.02 , 0.01 ,
+ 0.025 , 0.025 , 0.95 ) ,
+ ncol = 3 , byrow= TRUE )
+# a hard truth:
+hardTrue <- apply ( fuzzyTrue ,1 ,FUN= which.max )
+# some predicted labels:
+hardPred <- c ( 1 ,1 ,1 ,1 ,1 ,1 ,2 ,2 ,2 )
+fuzzyHardMetrics2 ( hardTrue , fuzzyTrue , hardPred , nperms= 3 )
+#> Standard error of the mean NDC across permutations:0.00016
+#> $NDC
+#> [1] 0.7581944
+#>
+#> $ACI
+#> [1] 0.4675841
+#>
+#> $fuzzyWH
+#> $fuzzyWH$global
+#> [1] 0.7581944
+#>
+#> $fuzzyWH$perPartition
+#> 1 2
+#> 0.8772917 0.5200000
+#>
+#>
+#> $fuzzyWC
+#> $fuzzyWC$global
+#> [1] 0.7647297
+#>
+#> $fuzzyWC$perPartition
+#> 1 2 3
+#> 0.7558929 0.7922222 0.7922222
+#>
+#>
+#> $fuzzyAWH
+#> $fuzzyAWH$global
+#> [1] 0.4675841
+#>
+#> $fuzzyAWH$perPartition
+#> 1 2
+#> 0.6921871 0.1507556
+#>
+#>
+#> $fuzzyAWC
+#> $fuzzyAWC$global
+#> [1] 0.4675841
+#>
+#> $fuzzyAWC$perPartition
+#> 1 2 3
+#> 0.4763455 0.3407756 0.5024390
+#>
+#>
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/reference/fuzzyHardSpotConcordance.html b/docs/reference/fuzzyHardSpotConcordance.html
index d64f535..78f5ad3 100644
--- a/docs/reference/fuzzyHardSpotConcordance.html
+++ b/docs/reference/fuzzyHardSpotConcordance.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
@@ -114,8 +115,7 @@ Examples# some predicted labels:
hardPred <- c ( 1 ,1 ,1 ,1 ,1 ,1 ,2 ,2 ,2 )
fuzzyHardSpotConcordance ( hardTrue , fuzzyTrue , hardPred )
-#> 1 2 3 4 5 6 7 8 9
-#> 0.87750 0.87625 0.87625 0.87750 0.87875 0.87750 0.76625 0.02750 0.76625
+#> [1] 0.87750 0.87625 0.87625 0.87750 0.87875 0.87750 0.76625 0.02750 0.76625
On this page
diff --git a/docs/reference/fuzzyPartitionMetrics.html b/docs/reference/fuzzyPartitionMetrics.html
index 0f96553..bc33b96 100644
--- a/docs/reference/fuzzyPartitionMetrics.html
+++ b/docs/reference/fuzzyPartitionMetrics.html
@@ -15,7 +15,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -27,6 +27,7 @@
Articles
Reference
@@ -181,12 +182,12 @@ Examplescolnames ( m1 ) <- colnames ( m2 ) <- LETTERS [ 1 : 3 ]
fuzzyPartitionMetrics ( m1 ,m2 )
#> Running 100 extra permutations.
-#> Standard error of the mean NDC across permutations:0.00212
+#> Standard error of the mean NDC across permutations:0.00216
#> $NDC
#> [1] 0.5338889
#>
#> $ACI
-#> [1] 0.08344258
+#> [1] 0.08249493
#>
#> $fuzzyWH
#> $fuzzyWH$global
@@ -208,20 +209,20 @@ Examples#>
#> $fuzzyAWH
#> $fuzzyAWH$global
-#> [1] 0.2114734
+#> [1] 0.2110446
#>
#> $fuzzyAWH$perPartition
#> A B C
-#> 0.8554001 0.8021785 -1.1479958
+#> 0.8450279 0.8102834 -1.0825844
#>
#>
#> $fuzzyAWC
#> $fuzzyAWC$global
-#> [1] 0.04973933
+#> [1] 0.04921198
#>
#> $fuzzyAWC$perPartition
#> A B C
-#> 0.05005958 -0.03133296 0.02342049
+#> 0.04951609 -0.05389235 0.05126432
#>
#>
diff --git a/docs/reference/fuzzySpotConcordance.html b/docs/reference/fuzzySpotConcordance.html
index 63e2857..6dd55b8 100644
--- a/docs/reference/fuzzySpotConcordance.html
+++ b/docs/reference/fuzzySpotConcordance.html
@@ -1,5 +1,5 @@
-Per-element concordance between two fuzzy partitions — fuzzySpotConcordance • poem
+Per-element concordance between two fuzzy partitions — fuzzySpotConcordance • poem
Skip to contents
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
@@ -44,7 +45,7 @@ Per-element concordance between two fuzzy partitions
-
Per-element concordance between two fuzzy partitionings
+
Per-element concordance between two fuzzy partitions
diff --git a/docs/reference/getEmbeddingClassMetrics.html b/docs/reference/getEmbeddingClassMetrics.html
index b5001c1..59468de 100644
--- a/docs/reference/getEmbeddingClassMetrics.html
+++ b/docs/reference/getEmbeddingClassMetrics.html
@@ -7,7 +7,7 @@
poem
-
0.99.1
+
0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/getEmbeddingElementMetrics.html b/docs/reference/getEmbeddingElementMetrics.html
index 6a34cbf..d678421 100644
--- a/docs/reference/getEmbeddingElementMetrics.html
+++ b/docs/reference/getEmbeddingElementMetrics.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/getEmbeddingGlobalMetrics.html b/docs/reference/getEmbeddingGlobalMetrics.html
index d860a18..5b95755 100644
--- a/docs/reference/getEmbeddingGlobalMetrics.html
+++ b/docs/reference/getEmbeddingGlobalMetrics.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/getEmbeddingMetrics.html b/docs/reference/getEmbeddingMetrics.html
index c62deb8..018bb30 100644
--- a/docs/reference/getEmbeddingMetrics.html
+++ b/docs/reference/getEmbeddingMetrics.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
On this page
diff --git a/docs/reference/getFuzzyLabel.html b/docs/reference/getFuzzyLabel.html
index 32b333b..e78dcf2 100644
--- a/docs/reference/getFuzzyLabel.html
+++ b/docs/reference/getFuzzyLabel.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/getFuzzyPartitionElementMetrics.html b/docs/reference/getFuzzyPartitionElementMetrics.html
index dccc6a6..ecbc1f0 100644
--- a/docs/reference/getFuzzyPartitionElementMetrics.html
+++ b/docs/reference/getFuzzyPartitionElementMetrics.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/getFuzzyPartitionMetrics.html b/docs/reference/getFuzzyPartitionMetrics.html
index 57c9750..5e7e05c 100644
--- a/docs/reference/getFuzzyPartitionMetrics.html
+++ b/docs/reference/getFuzzyPartitionMetrics.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
@@ -176,14 +177,14 @@ ExamplesgetFuzzyPartitionMetrics ( fuzzyTrue= m1 ,fuzzyPred= m2 , level= "class" )
#> Comparing between a fuzzy truth and a fuzzy prediction...
#> Running 100 extra permutations.
-#> Standard error of the mean NDC across permutations:0.00234
+#> Standard error of the mean NDC across permutations:0.00239
#> fuzzyWC fuzzyAWC class fuzzyWH fuzzyAWH cluster
-#> 1 0.3445840 0.04752717 1 NA NA NA
-#> 2 0.7242508 -0.08734288 2 NA NA NA
-#> 3 0.7520319 0.03868490 3 NA NA NA
-#> 4 NA NA NA 0.9359492 0.8097549 1
-#> 5 NA NA NA 0.9214151 0.8187337 2
-#> 6 NA NA NA 0.1588990 -0.9221584 3
+#> 1 0.3445840 0.04564740 1 NA NA NA
+#> 2 0.7242508 -0.08932182 2 NA NA NA
+#> 3 0.7520319 0.03572075 3 NA NA NA
+#> 4 NA NA NA 0.9359492 0.8194385 1
+#> 5 NA NA NA 0.9214151 0.7916720 2
+#> 6 NA NA NA 0.1588990 -0.7970378 3
# generate a fuzzy truth:
fuzzyTrue <- matrix ( c (
@@ -203,14 +204,13 @@ ExampleshardPred <- c ( 1 ,1 ,1 ,1 ,1 ,1 ,2 ,2 ,2 )
getFuzzyPartitionMetrics ( hardPred= hardPred , hardTrue= hardTrue , fuzzyTrue= fuzzyTrue , nperms= 3 , level= "class" )
#> Comparing between a fuzzy truth and a hard prediction...
-#> Standard error of the mean NDC across permutations:0.0357
-#> You might want to increase the number of permutations to increase the robustness of the adjusted metrics.
-#> fuzzyWC fuzzyAWC class fuzzyWH fuzzyAWH cluster
-#> 1 0.7195238 0.4677711 1 NA NA NA
-#> 2 1.0000000 NaN 2 NA NA NA
-#> 3 1.0000000 NaN 3 NA NA NA
-#> 4 NA NA NA 1.00000000 1.000000 1
-#> 5 NA NA NA 0.06166667 -3.421466 2
+#> Standard error of the mean NDC across permutations:0.000424
+#> fuzzyWC fuzzyAWC class fuzzyWH fuzzyAWH cluster
+#> 1 0.7195238 0.4977974 1 NA NA NA
+#> 2 1.0000000 NaN 2 NA NA NA
+#> 3 1.0000000 NaN 3 NA NA NA
+#> 4 NA NA NA 1.00000000 1 1
+#> 5 NA NA NA 0.06166667 -Inf 2
diff --git a/docs/reference/getGraphClassMetrics.html b/docs/reference/getGraphClassMetrics.html
index 9dbbc13..d8caa58 100644
--- a/docs/reference/getGraphClassMetrics.html
+++ b/docs/reference/getGraphClassMetrics.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/getGraphElementMetrics.html b/docs/reference/getGraphElementMetrics.html
index 69ad6a0..6741a98 100644
--- a/docs/reference/getGraphElementMetrics.html
+++ b/docs/reference/getGraphElementMetrics.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/getGraphMetrics.html b/docs/reference/getGraphMetrics.html
index 2928763..197f527 100644
--- a/docs/reference/getGraphMetrics.html
+++ b/docs/reference/getGraphMetrics.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
@@ -108,40 +109,40 @@ Examplesdata ( sp_toys )
data <- sp_toys
getNeighboringPairConcordance ( data $ label , data $ p1 , data [ ,c ( "x" , "y" ) ] , k= 6 )
-#> [1] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [1] 1.0000000 1.0000000 1.0000000 0.7500000 0.2500000 0.5000000 1.0000000
#> [8] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [15] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [15] 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667
#> [22] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [29] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [36] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [29] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667
+#> [36] 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
#> [43] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [50] 1.0000000 0.7500000 0.6666667 0.6666667 0.6666667 0.6666667 0.8333333
+#> [50] 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000
#> [57] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [64] 1.0000000 1.0000000 0.8000000 0.3333333 0.6666667 0.6666667 0.6666667
-#> [71] 0.3333333 0.8333333 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [78] 1.0000000 1.0000000 1.0000000 0.6666667 0.6666667 1.0000000 1.0000000
-#> [85] 0.8333333 0.5000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [92] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.6666667
-#> [99] 1.0000000 1.0000000 0.5000000 0.8333333 1.0000000 1.0000000 1.0000000
+#> [64] 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000
+#> [71] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [78] 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000
+#> [85] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [92] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333
+#> [99] 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
#> [106] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667
-#> [113] 0.5000000 0.6666667 0.5000000 0.6666667 1.0000000 1.0000000 1.0000000
+#> [113] 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
#> [120] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [127] 1.0000000 0.8000000 0.5000000 0.6666667 0.8333333 1.0000000 1.0000000
+#> [127] 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000
#> [134] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [141] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [141] 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000
#> [148] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [155] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [155] 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667
#> [162] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [169] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [176] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [169] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333
+#> [176] 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
#> [183] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [190] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [190] 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000
#> [197] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [204] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [204] 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000
#> [211] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [218] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [218] 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000
#> [225] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
-#> [232] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000
+#> [232] 1.0000000 1.0000000 1.0000000 1.0000000 0.5000000 0.2500000 0.7500000
#> [239] 1.0000000 1.0000000
diff --git a/docs/reference/getPairConcordance.html b/docs/reference/getPairConcordance.html
index 55218b8..430cd46 100644
--- a/docs/reference/getPairConcordance.html
+++ b/docs/reference/getPairConcordance.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/getPartitionClassMetrics.html b/docs/reference/getPartitionClassMetrics.html
index d6aaf08..5af22e6 100644
--- a/docs/reference/getPartitionClassMetrics.html
+++ b/docs/reference/getPartitionClassMetrics.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/getPartitionElementMetrics.html b/docs/reference/getPartitionElementMetrics.html
index 65b3c20..fb26040 100644
--- a/docs/reference/getPartitionElementMetrics.html
+++ b/docs/reference/getPartitionElementMetrics.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/getPartitionGlobalMetrics.html b/docs/reference/getPartitionGlobalMetrics.html
index fd75b1e..8b1180d 100644
--- a/docs/reference/getPartitionGlobalMetrics.html
+++ b/docs/reference/getPartitionGlobalMetrics.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/getPartitionMetrics.html b/docs/reference/getPartitionMetrics.html
index 5e72bd1..b9516b3 100644
--- a/docs/reference/getPartitionMetrics.html
+++ b/docs/reference/getPartitionMetrics.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/getSpatialClassExternalMetrics.html b/docs/reference/getSpatialClassExternalMetrics.html
index 5ede1b4..d9025fe 100644
--- a/docs/reference/getSpatialClassExternalMetrics.html
+++ b/docs/reference/getSpatialClassExternalMetrics.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/getSpatialClassInternalMetrics.html b/docs/reference/getSpatialClassInternalMetrics.html
index 6c45fb5..8c3279e 100644
--- a/docs/reference/getSpatialClassInternalMetrics.html
+++ b/docs/reference/getSpatialClassInternalMetrics.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/getSpatialElementExternalMetrics.html b/docs/reference/getSpatialElementExternalMetrics.html
index d2e7435..f04faba 100644
--- a/docs/reference/getSpatialElementExternalMetrics.html
+++ b/docs/reference/getSpatialElementExternalMetrics.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/getSpatialElementInternalMetrics.html b/docs/reference/getSpatialElementInternalMetrics.html
index 4f01f1c..c632968 100644
--- a/docs/reference/getSpatialElementInternalMetrics.html
+++ b/docs/reference/getSpatialElementInternalMetrics.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/getSpatialExternalMetrics.html b/docs/reference/getSpatialExternalMetrics.html
index ded56e2..f803a60 100644
--- a/docs/reference/getSpatialExternalMetrics.html
+++ b/docs/reference/getSpatialExternalMetrics.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
@@ -137,12 +138,12 @@ Examplesdata <- sp_toys
getSpatialExternalMetrics ( data $ label , data $ p1 , data [ ,c ( "x" , "y" ) ] , k= 6 , level= "class" )
#> Comparing between a fuzzy truth and a hard prediction...
-#> Standard error of the mean NDC across permutations:0.000636
+#> Standard error of the mean NDC across permutations:0.000521
#> SpatialWH SpatialAWH SpatialWC SpatialAWC class cluster
-#> 1 NA NA 0.779245 0.5328745 1 NA
-#> 2 NA NA 1.000000 1.0000000 2 NA
-#> 3 1.000000 1.0000000 NA NA NA 1
-#> 4 0.804378 0.5906445 NA NA NA 2
+#> 1 NA NA 0.8078698 0.5987364 1 NA
+#> 2 NA NA 1.0000000 1.0000000 2 NA
+#> 3 1.0000000 1.0000000 NA NA NA 1
+#> 4 0.8323893 0.6502603 NA NA NA 2
On this page
diff --git a/docs/reference/getSpatialGlobalExternalMetrics.html b/docs/reference/getSpatialGlobalExternalMetrics.html
index 7e537e5..99daf6b 100644
--- a/docs/reference/getSpatialGlobalExternalMetrics.html
+++ b/docs/reference/getSpatialGlobalExternalMetrics.html
@@ -11,7 +11,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -23,6 +23,7 @@
Articles
Reference
diff --git a/docs/reference/getSpatialGlobalInternalMetrics.html b/docs/reference/getSpatialGlobalInternalMetrics.html
index eaad58c..cb3a84a 100644
--- a/docs/reference/getSpatialGlobalInternalMetrics.html
+++ b/docs/reference/getSpatialGlobalInternalMetrics.html
@@ -11,7 +11,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -23,6 +23,7 @@
Articles
Reference
diff --git a/docs/reference/getSpatialInternalMetrics.html b/docs/reference/getSpatialInternalMetrics.html
index 3c56e7e..f882a2d 100644
--- a/docs/reference/getSpatialInternalMetrics.html
+++ b/docs/reference/getSpatialInternalMetrics.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
@@ -115,6 +116,7 @@ Examplesdata ( sp_toys )
data <- sp_toys
getSpatialInternalMetrics ( data $ label , data [ ,c ( "x" , "y" ) ] , k= 6 , level= "class" )
+#> the specified variable is considered as categorical...
#> class PAS ELSA.Ea ELSA.Ec ELSA CHAOS
#> 1 1 0.008333333 0.07106927 0.1940084 0.06040663 0.2305866
#> 2 2 0.008333333 0.06918805 0.1944579 0.05840334 0.2305866
diff --git a/docs/reference/index.html b/docs/reference/index.html
index 3acb8ef..0cfb10e 100644
--- a/docs/reference/index.html
+++ b/docs/reference/index.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
@@ -136,6 +137,12 @@ Fuzzy metricsfuzzyHardMetrics2()
+
+
+ Compute fuzzy-hard metrics with lower memory requirement
+
+
fuzzyPartitionMetrics()
diff --git a/docs/reference/knnComposition.html b/docs/reference/knnComposition.html
index 7c143ae..a60e924 100644
--- a/docs/reference/knnComposition.html
+++ b/docs/reference/knnComposition.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/matchSets.html b/docs/reference/matchSets.html
index dd50d02..b939b46 100644
--- a/docs/reference/matchSets.html
+++ b/docs/reference/matchSets.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/metric_info.html b/docs/reference/metric_info.html
index 9f73512..2065da0 100644
--- a/docs/reference/metric_info.html
+++ b/docs/reference/metric_info.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/mockData.html b/docs/reference/mockData.html
index 76915bf..79931b3 100644
--- a/docs/reference/mockData.html
+++ b/docs/reference/mockData.html
@@ -9,7 +9,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -21,6 +21,7 @@
Articles
Reference
diff --git a/docs/reference/nnWeightedAccuracy.html b/docs/reference/nnWeightedAccuracy.html
index 84fc4b5..00e5dd4 100644
--- a/docs/reference/nnWeightedAccuracy.html
+++ b/docs/reference/nnWeightedAccuracy.html
@@ -13,7 +13,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -25,6 +25,7 @@
Articles
Reference
diff --git a/docs/reference/noisy_moon.html b/docs/reference/noisy_moon.html
index aa1850b..1aefc75 100644
--- a/docs/reference/noisy_moon.html
+++ b/docs/reference/noisy_moon.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/setMatchingAccuracy.html b/docs/reference/setMatchingAccuracy.html
index 9037545..6417331 100644
--- a/docs/reference/setMatchingAccuracy.html
+++ b/docs/reference/setMatchingAccuracy.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/sp_toys.html b/docs/reference/sp_toys.html
index 28b2e64..4de11bb 100644
--- a/docs/reference/sp_toys.html
+++ b/docs/reference/sp_toys.html
@@ -7,7 +7,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -19,6 +19,7 @@
Articles
Reference
diff --git a/docs/reference/toyExamples.html b/docs/reference/toyExamples.html
index ea5d174..bf1c133 100644
--- a/docs/reference/toyExamples.html
+++ b/docs/reference/toyExamples.html
@@ -13,7 +13,7 @@
poem
- 0.99.1
+ 0.99.2
@@ -25,6 +25,7 @@
Articles
Reference
diff --git a/docs/search.json b/docs/search.json
index 82b8491..2735d87 100644
--- a/docs/search.json
+++ b/docs/search.json
@@ -1 +1 @@
-[{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":null,"dir":"","previous_headings":"","what":"GNU General Public License","title":"GNU General Public License","text":"Version 3, 29 June 2007Copyright © 2007 Free Software Foundation, Inc. Everyone permitted copy distribute verbatim copies license document, changing allowed.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"preamble","dir":"","previous_headings":"","what":"Preamble","title":"GNU General Public License","text":"GNU General Public License free, copyleft license software kinds works. licenses software practical works designed take away freedom share change works. contrast, GNU General Public License intended guarantee freedom share change versions program–make sure remains free software users. , Free Software Foundation, use GNU General Public License software; applies also work released way authors. can apply programs, . speak free software, referring freedom, price. General Public Licenses designed make sure freedom distribute copies free software (charge wish), receive source code can get want , can change software use pieces new free programs, know can things. protect rights, need prevent others denying rights asking surrender rights. Therefore, certain responsibilities distribute copies software, modify : responsibilities respect freedom others. example, distribute copies program, whether gratis fee, must pass recipients freedoms received. must make sure , , receive can get source code. must show terms know rights. Developers use GNU GPL protect rights two steps: (1) assert copyright software, (2) offer License giving legal permission copy, distribute /modify . developers’ authors’ protection, GPL clearly explains warranty free software. users’ authors’ sake, GPL requires modified versions marked changed, problems attributed erroneously authors previous versions. devices designed deny users access install run modified versions software inside , although manufacturer can . fundamentally incompatible aim protecting users’ freedom change software. systematic pattern abuse occurs area products individuals use, precisely unacceptable. Therefore, designed version GPL prohibit practice products. problems arise substantially domains, stand ready extend provision domains future versions GPL, needed protect freedom users. Finally, every program threatened constantly software patents. States allow patents restrict development use software general-purpose computers, , wish avoid special danger patents applied free program make effectively proprietary. prevent , GPL assures patents used render program non-free. precise terms conditions copying, distribution modification follow.","code":""},{"path":[]},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_0-definitions","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"0. Definitions","title":"GNU General Public License","text":"“License” refers version 3 GNU General Public License. “Copyright” also means copyright-like laws apply kinds works, semiconductor masks. “Program” refers copyrightable work licensed License. licensee addressed “”. “Licensees” “recipients” may individuals organizations. “modify” work means copy adapt part work fashion requiring copyright permission, making exact copy. resulting work called “modified version” earlier work work “based ” earlier work. “covered work” means either unmodified Program work based Program. “propagate” work means anything , without permission, make directly secondarily liable infringement applicable copyright law, except executing computer modifying private copy. Propagation includes copying, distribution (without modification), making available public, countries activities well. “convey” work means kind propagation enables parties make receive copies. Mere interaction user computer network, transfer copy, conveying. interactive user interface displays “Appropriate Legal Notices” extent includes convenient prominently visible feature (1) displays appropriate copyright notice, (2) tells user warranty work (except extent warranties provided), licensees may convey work License, view copy License. interface presents list user commands options, menu, prominent item list meets criterion.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_1-source-code","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"1. Source Code","title":"GNU General Public License","text":"“source code” work means preferred form work making modifications . “Object code” means non-source form work. “Standard Interface” means interface either official standard defined recognized standards body, , case interfaces specified particular programming language, one widely used among developers working language. “System Libraries” executable work include anything, work whole, () included normal form packaging Major Component, part Major Component, (b) serves enable use work Major Component, implement Standard Interface implementation available public source code form. “Major Component”, context, means major essential component (kernel, window system, ) specific operating system () executable work runs, compiler used produce work, object code interpreter used run . “Corresponding Source” work object code form means source code needed generate, install, (executable work) run object code modify work, including scripts control activities. However, include work’s System Libraries, general-purpose tools generally available free programs used unmodified performing activities part work. example, Corresponding Source includes interface definition files associated source files work, source code shared libraries dynamically linked subprograms work specifically designed require, intimate data communication control flow subprograms parts work. Corresponding Source need include anything users can regenerate automatically parts Corresponding Source. Corresponding Source work source code form work.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_2-basic-permissions","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"2. Basic Permissions","title":"GNU General Public License","text":"rights granted License granted term copyright Program, irrevocable provided stated conditions met. License explicitly affirms unlimited permission run unmodified Program. output running covered work covered License output, given content, constitutes covered work. License acknowledges rights fair use equivalent, provided copyright law. may make, run propagate covered works convey, without conditions long license otherwise remains force. may convey covered works others sole purpose make modifications exclusively , provide facilities running works, provided comply terms License conveying material control copyright. thus making running covered works must exclusively behalf, direction control, terms prohibit making copies copyrighted material outside relationship . Conveying circumstances permitted solely conditions stated . Sublicensing allowed; section 10 makes unnecessary.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_3-protecting-users-legal-rights-from-anti-circumvention-law","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"3. Protecting Users’ Legal Rights From Anti-Circumvention Law","title":"GNU General Public License","text":"covered work shall deemed part effective technological measure applicable law fulfilling obligations article 11 WIPO copyright treaty adopted 20 December 1996, similar laws prohibiting restricting circumvention measures. convey covered work, waive legal power forbid circumvention technological measures extent circumvention effected exercising rights License respect covered work, disclaim intention limit operation modification work means enforcing, work’s users, third parties’ legal rights forbid circumvention technological measures.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_4-conveying-verbatim-copies","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"4. Conveying Verbatim Copies","title":"GNU General Public License","text":"may convey verbatim copies Program’s source code receive , medium, provided conspicuously appropriately publish copy appropriate copyright notice; keep intact notices stating License non-permissive terms added accord section 7 apply code; keep intact notices absence warranty; give recipients copy License along Program. may charge price price copy convey, may offer support warranty protection fee.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_5-conveying-modified-source-versions","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"5. Conveying Modified Source Versions","title":"GNU General Public License","text":"may convey work based Program, modifications produce Program, form source code terms section 4, provided also meet conditions: ) work must carry prominent notices stating modified , giving relevant date. b) work must carry prominent notices stating released License conditions added section 7. requirement modifies requirement section 4 “keep intact notices”. c) must license entire work, whole, License anyone comes possession copy. License therefore apply, along applicable section 7 additional terms, whole work, parts, regardless packaged. License gives permission license work way, invalidate permission separately received . d) work interactive user interfaces, must display Appropriate Legal Notices; however, Program interactive interfaces display Appropriate Legal Notices, work need make . compilation covered work separate independent works, nature extensions covered work, combined form larger program, volume storage distribution medium, called “aggregate” compilation resulting copyright used limit access legal rights compilation’s users beyond individual works permit. Inclusion covered work aggregate cause License apply parts aggregate.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_6-conveying-non-source-forms","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"6. Conveying Non-Source Forms","title":"GNU General Public License","text":"may convey covered work object code form terms sections 4 5, provided also convey machine-readable Corresponding Source terms License, one ways: ) Convey object code , embodied , physical product (including physical distribution medium), accompanied Corresponding Source fixed durable physical medium customarily used software interchange. b) Convey object code , embodied , physical product (including physical distribution medium), accompanied written offer, valid least three years valid long offer spare parts customer support product model, give anyone possesses object code either (1) copy Corresponding Source software product covered License, durable physical medium customarily used software interchange, price reasonable cost physically performing conveying source, (2) access copy Corresponding Source network server charge. c) Convey individual copies object code copy written offer provide Corresponding Source. alternative allowed occasionally noncommercially, received object code offer, accord subsection 6b. d) Convey object code offering access designated place (gratis charge), offer equivalent access Corresponding Source way place charge. need require recipients copy Corresponding Source along object code. place copy object code network server, Corresponding Source may different server (operated third party) supports equivalent copying facilities, provided maintain clear directions next object code saying find Corresponding Source. Regardless server hosts Corresponding Source, remain obligated ensure available long needed satisfy requirements. e) Convey object code using peer--peer transmission, provided inform peers object code Corresponding Source work offered general public charge subsection 6d. separable portion object code, whose source code excluded Corresponding Source System Library, need included conveying object code work. “User Product” either (1) “consumer product”, means tangible personal property normally used personal, family, household purposes, (2) anything designed sold incorporation dwelling. determining whether product consumer product, doubtful cases shall resolved favor coverage. particular product received particular user, “normally used” refers typical common use class product, regardless status particular user way particular user actually uses, expects expected use, product. product consumer product regardless whether product substantial commercial, industrial non-consumer uses, unless uses represent significant mode use product. “Installation Information” User Product means methods, procedures, authorization keys, information required install execute modified versions covered work User Product modified version Corresponding Source. information must suffice ensure continued functioning modified object code case prevented interfered solely modification made. convey object code work section , , specifically use , User Product, conveying occurs part transaction right possession use User Product transferred recipient perpetuity fixed term (regardless transaction characterized), Corresponding Source conveyed section must accompanied Installation Information. requirement apply neither third party retains ability install modified object code User Product (example, work installed ROM). requirement provide Installation Information include requirement continue provide support service, warranty, updates work modified installed recipient, User Product modified installed. Access network may denied modification materially adversely affects operation network violates rules protocols communication across network. Corresponding Source conveyed, Installation Information provided, accord section must format publicly documented (implementation available public source code form), must require special password key unpacking, reading copying.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_7-additional-terms","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"7. Additional Terms","title":"GNU General Public License","text":"“Additional permissions” terms supplement terms License making exceptions one conditions. Additional permissions applicable entire Program shall treated though included License, extent valid applicable law. additional permissions apply part Program, part may used separately permissions, entire Program remains governed License without regard additional permissions. convey copy covered work, may option remove additional permissions copy, part . (Additional permissions may written require removal certain cases modify work.) may place additional permissions material, added covered work, can give appropriate copyright permission. Notwithstanding provision License, material add covered work, may (authorized copyright holders material) supplement terms License terms: ) Disclaiming warranty limiting liability differently terms sections 15 16 License; b) Requiring preservation specified reasonable legal notices author attributions material Appropriate Legal Notices displayed works containing ; c) Prohibiting misrepresentation origin material, requiring modified versions material marked reasonable ways different original version; d) Limiting use publicity purposes names licensors authors material; e) Declining grant rights trademark law use trade names, trademarks, service marks; f) Requiring indemnification licensors authors material anyone conveys material (modified versions ) contractual assumptions liability recipient, liability contractual assumptions directly impose licensors authors. non-permissive additional terms considered “restrictions” within meaning section 10. Program received , part , contains notice stating governed License along term restriction, may remove term. license document contains restriction permits relicensing conveying License, may add covered work material governed terms license document, provided restriction survive relicensing conveying. add terms covered work accord section, must place, relevant source files, statement additional terms apply files, notice indicating find applicable terms. Additional terms, permissive non-permissive, may stated form separately written license, stated exceptions; requirements apply either way.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_8-termination","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"8. Termination","title":"GNU General Public License","text":"may propagate modify covered work except expressly provided License. attempt otherwise propagate modify void, automatically terminate rights License (including patent licenses granted third paragraph section 11). However, cease violation License, license particular copyright holder reinstated () provisionally, unless copyright holder explicitly finally terminates license, (b) permanently, copyright holder fails notify violation reasonable means prior 60 days cessation. Moreover, license particular copyright holder reinstated permanently copyright holder notifies violation reasonable means, first time received notice violation License (work) copyright holder, cure violation prior 30 days receipt notice. Termination rights section terminate licenses parties received copies rights License. rights terminated permanently reinstated, qualify receive new licenses material section 10.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_9-acceptance-not-required-for-having-copies","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"9. Acceptance Not Required for Having Copies","title":"GNU General Public License","text":"required accept License order receive run copy Program. Ancillary propagation covered work occurring solely consequence using peer--peer transmission receive copy likewise require acceptance. However, nothing License grants permission propagate modify covered work. actions infringe copyright accept License. Therefore, modifying propagating covered work, indicate acceptance License .","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_10-automatic-licensing-of-downstream-recipients","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"10. Automatic Licensing of Downstream Recipients","title":"GNU General Public License","text":"time convey covered work, recipient automatically receives license original licensors, run, modify propagate work, subject License. responsible enforcing compliance third parties License. “entity transaction” transaction transferring control organization, substantially assets one, subdividing organization, merging organizations. propagation covered work results entity transaction, party transaction receives copy work also receives whatever licenses work party’s predecessor interest give previous paragraph, plus right possession Corresponding Source work predecessor interest, predecessor can get reasonable efforts. may impose restrictions exercise rights granted affirmed License. example, may impose license fee, royalty, charge exercise rights granted License, may initiate litigation (including cross-claim counterclaim lawsuit) alleging patent claim infringed making, using, selling, offering sale, importing Program portion .","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_11-patents","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"11. Patents","title":"GNU General Public License","text":"“contributor” copyright holder authorizes use License Program work Program based. work thus licensed called contributor’s “contributor version”. contributor’s “essential patent claims” patent claims owned controlled contributor, whether already acquired hereafter acquired, infringed manner, permitted License, making, using, selling contributor version, include claims infringed consequence modification contributor version. purposes definition, “control” includes right grant patent sublicenses manner consistent requirements License. contributor grants non-exclusive, worldwide, royalty-free patent license contributor’s essential patent claims, make, use, sell, offer sale, import otherwise run, modify propagate contents contributor version. following three paragraphs, “patent license” express agreement commitment, however denominated, enforce patent (express permission practice patent covenant sue patent infringement). “grant” patent license party means make agreement commitment enforce patent party. convey covered work, knowingly relying patent license, Corresponding Source work available anyone copy, free charge terms License, publicly available network server readily accessible means, must either (1) cause Corresponding Source available, (2) arrange deprive benefit patent license particular work, (3) arrange, manner consistent requirements License, extend patent license downstream recipients. “Knowingly relying” means actual knowledge , patent license, conveying covered work country, recipient’s use covered work country, infringe one identifiable patents country reason believe valid. , pursuant connection single transaction arrangement, convey, propagate procuring conveyance , covered work, grant patent license parties receiving covered work authorizing use, propagate, modify convey specific copy covered work, patent license grant automatically extended recipients covered work works based . patent license “discriminatory” include within scope coverage, prohibits exercise , conditioned non-exercise one rights specifically granted License. may convey covered work party arrangement third party business distributing software, make payment third party based extent activity conveying work, third party grants, parties receive covered work , discriminatory patent license () connection copies covered work conveyed (copies made copies), (b) primarily connection specific products compilations contain covered work, unless entered arrangement, patent license granted, prior 28 March 2007. Nothing License shall construed excluding limiting implied license defenses infringement may otherwise available applicable patent law.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_12-no-surrender-of-others-freedom","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"12. No Surrender of Others’ Freedom","title":"GNU General Public License","text":"conditions imposed (whether court order, agreement otherwise) contradict conditions License, excuse conditions License. convey covered work satisfy simultaneously obligations License pertinent obligations, consequence may convey . example, agree terms obligate collect royalty conveying convey Program, way satisfy terms License refrain entirely conveying Program.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_13-use-with-the-gnu-affero-general-public-license","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"13. Use with the GNU Affero General Public License","title":"GNU General Public License","text":"Notwithstanding provision License, permission link combine covered work work licensed version 3 GNU Affero General Public License single combined work, convey resulting work. terms License continue apply part covered work, special requirements GNU Affero General Public License, section 13, concerning interaction network apply combination .","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_14-revised-versions-of-this-license","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"14. Revised Versions of this License","title":"GNU General Public License","text":"Free Software Foundation may publish revised /new versions GNU General Public License time time. new versions similar spirit present version, may differ detail address new problems concerns. version given distinguishing version number. Program specifies certain numbered version GNU General Public License “later version” applies , option following terms conditions either numbered version later version published Free Software Foundation. Program specify version number GNU General Public License, may choose version ever published Free Software Foundation. Program specifies proxy can decide future versions GNU General Public License can used, proxy’s public statement acceptance version permanently authorizes choose version Program. Later license versions may give additional different permissions. However, additional obligations imposed author copyright holder result choosing follow later version.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_15-disclaimer-of-warranty","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"15. Disclaimer of Warranty","title":"GNU General Public License","text":"WARRANTY PROGRAM, EXTENT PERMITTED APPLICABLE LAW. EXCEPT OTHERWISE STATED WRITING COPYRIGHT HOLDERS /PARTIES PROVIDE PROGRAM “” WITHOUT WARRANTY KIND, EITHER EXPRESSED IMPLIED, INCLUDING, LIMITED , IMPLIED WARRANTIES MERCHANTABILITY FITNESS PARTICULAR PURPOSE. ENTIRE RISK QUALITY PERFORMANCE PROGRAM . PROGRAM PROVE DEFECTIVE, ASSUME COST NECESSARY SERVICING, REPAIR CORRECTION.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_16-limitation-of-liability","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"16. Limitation of Liability","title":"GNU General Public License","text":"EVENT UNLESS REQUIRED APPLICABLE LAW AGREED WRITING COPYRIGHT HOLDER, PARTY MODIFIES /CONVEYS PROGRAM PERMITTED , LIABLE DAMAGES, INCLUDING GENERAL, SPECIAL, INCIDENTAL CONSEQUENTIAL DAMAGES ARISING USE INABILITY USE PROGRAM (INCLUDING LIMITED LOSS DATA DATA RENDERED INACCURATE LOSSES SUSTAINED THIRD PARTIES FAILURE PROGRAM OPERATE PROGRAMS), EVEN HOLDER PARTY ADVISED POSSIBILITY DAMAGES.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_17-interpretation-of-sections-15-and-16","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"17. Interpretation of Sections 15 and 16","title":"GNU General Public License","text":"disclaimer warranty limitation liability provided given local legal effect according terms, reviewing courts shall apply local law closely approximates absolute waiver civil liability connection Program, unless warranty assumption liability accompanies copy Program return fee. END TERMS CONDITIONS","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"how-to-apply-these-terms-to-your-new-programs","dir":"","previous_headings":"","what":"How to Apply These Terms to Your New Programs","title":"GNU General Public License","text":"develop new program, want greatest possible use public, best way achieve make free software everyone can redistribute change terms. , attach following notices program. safest attach start source file effectively state exclusion warranty; file least “copyright” line pointer full notice found. Also add information contact electronic paper mail. program terminal interaction, make output short notice like starts interactive mode: hypothetical commands show w show c show appropriate parts General Public License. course, program’s commands might different; GUI interface, use “box”. also get employer (work programmer) school, , sign “copyright disclaimer” program, necessary. information , apply follow GNU GPL, see . GNU General Public License permit incorporating program proprietary programs. program subroutine library, may consider useful permit linking proprietary applications library. want , use GNU Lesser General Public License instead License. first, please read .","code":" Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'. This is free software, and you are welcome to redistribute it under certain conditions; type 'show c' for details."},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"installation-loading","dir":"Articles","previous_headings":"","what":"Installation & loading","title":"Introduction to `poem`","text":"","code":"if (!requireNamespace(\"devtools\", quietly = TRUE)) install.packages(\"devtools\") devtools::install_github(\"RoseYuan/poem\") library(poem) library(ggplot2) library(dplyr) library(tidyr) library(ggnetwork) library(igraph) library(cowplot)"},{"path":[]},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"what-is-this-package-for","dir":"Articles","previous_headings":"Introduction","what":"What is this package for?","title":"Introduction to `poem`","text":"package provides multiple approaches comparing two partitions1 dataset, evaluating alignment dataset’s embedding/graph representations partition. Besides, package offers methods comparing two fuzzy partitions2 well comparing hard partition fuzzy partition. allows evaluation fuzzy partition results assessing agreement fuzzy hard ground-truth partition. Finally, package implements visualization evaluation metrics tailored domain detection spatially-resolved -omics data. include especially external evaluation metrics (.e. based comparison ground truth labels), also internal metrics.","code":""},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"main-functions","dir":"Articles","previous_headings":"Introduction","what":"Main functions","title":"Introduction to `poem`","text":"package poem includes many metrics perform different kinds evaluations, metrics can retrieved via 6 main wrapper functions. Unless specified, “partition” means “hard” partition. : getEmbeddingMetrics(): Metrics compare embedding data points partition data points. getGraphMetrics(): Metrics compare graph (e.g. kNN/sNN) partition, nodes graph data points partition. getPartitionMetrics(): Metrics compare two partitions dataset. getfuzzyPartitionMetrics(): Metrics compare two fuzzy partitions, compare fuzzy hard partition dataset. getSpatialExternalMetrics(): External metrics evaluating spatial clustering results spatial-aware fashion. non-spatial-aware evaluation, one can directly use getPartitionMetrics(). getSpatialInternalMetrics(): Internal metrics evaluating spatial clustering results spatial-aware fashion. 3 different levels one can perform -mentioned evaluation: element-level, class-level, dataset-level. Element-level evaluation reports metric values data point; Class-level evaluation reports metrics classes3 clusters4; dataset-level evaluation returns single metric value whole dataset. following table illustrates available metrics different evaluation levels, main functions used retrieve .","code":"data(metric_info) DT::datatable(metric_info)"},{"path":[]},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"example-data","dir":"Articles","previous_headings":"Getting started","what":"Example data","title":"Introduction to `poem`","text":"showcase main functions, use simulated datasets examples vignette. two datasets, g1 g2, contain 80 data points x y coordinates 4 different classes. plot :","code":"data(toyExamples) g1 <- toyExamples[toyExamples$graph==\"graph1\",] g2 <- toyExamples[toyExamples$graph==\"graph2\",] head(g1) ## graph x y class ## 641 graph1 -0.6290416 -0.487293 class1 ## 642 graph1 -2.5646982 -1.742079 class1 ## 643 graph1 -1.6368716 -1.911560 class1 ## 644 graph1 -1.3671374 -2.120897 class1 ## 645 graph1 -1.5957317 -3.194329 class1 ## 646 graph1 -2.1061245 -1.388003 class1 ggplot(rbind(g1,g2), aes(x,y,color=class, shape=class)) + geom_point() + facet_wrap(~graph) + theme_bw()"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"embedding-evaluation","dir":"Articles","previous_headings":"","what":"Embedding evaluation","title":"Introduction to `poem`","text":"Let’s assume g1 g2 contain two different embeddings set objects. “good” embedding put objects class together, objects different class apart. Since know ground-truth class object, one can evaluation “goodness” embedding calculating embedding evaluation metrics. One can calculate metrics element-wise, class/cluster, whole dataset.","code":""},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"element-level-evaluation","dir":"Articles","previous_headings":"Embedding evaluation","what":"Element-level evaluation","title":"Introduction to `poem`","text":"example, element level, one can calculate Silhouette Width specifying level=\"element\" metrics=c(\"SW\"): output data.frame containing metric values specified level.","code":"sw <- getEmbeddingMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, metrics=c(\"SW\"), level=\"element\") head(sw) ## class SW ## 641 class1 0.2986628 ## 642 class1 0.5818507 ## 643 class1 0.6299871 ## 644 class1 0.5867285 ## 645 class1 0.5191290 ## 646 class1 0.5679847 g1$sw <- getEmbeddingMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, metrics=c(\"SW\"), level=\"element\")$SW g2$sw <- getEmbeddingMetrics(x=g2[,c(\"x\",\"y\")], labels=g2$class, metrics=c(\"SW\"), level=\"element\")$SW ggplot(rbind(g1,g2), aes(x, y, color=sw, shape=class)) + geom_point() + facet_wrap(~graph) + theme_bw()"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"class-level-evaluation","dir":"Articles","previous_headings":"Embedding evaluation","what":"Class-level evaluation","title":"Introduction to `poem`","text":"One can also evaluate class level, specifying level=\"class\". Check ?getEmbeddingMetrics see allowed metrics class level. example:","code":"cl <- getEmbeddingMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, metrics=c(\"dbcv\", \"meanSW\"), level=\"class\") head(cl) ## class meanSW dbcv ## 1 class1 0.4240817 -0.37367780 ## 2 class2 0.4897828 -0.34617982 ## 3 class3 0.5021555 0.07752233 ## 4 class4 0.5957709 0.26757842 res1 <- getEmbeddingMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, metrics=c(\"dbcv\", \"meanSW\"), level=\"class\") res2 <- getEmbeddingMetrics(x=g2[,c(\"x\",\"y\")], labels=g2$class, metrics=c(\"dbcv\", \"meanSW\"), level=\"class\") bind_rows(list(graph1=res1, graph2=res2), .id=\"graph\") %>% pivot_longer(cols=c(\"meanSW\",\"dbcv\"), names_to=\"metric\",values_to=\"value\") %>% ggplot(aes(class, value, fill=graph, group=graph)) + geom_bar(position = \"dodge\", stat = \"identity\") + facet_wrap(~metric) + theme_bw()"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"dataset-level-evaluation","dir":"Articles","previous_headings":"Embedding evaluation","what":"Dataset-level evaluation","title":"Introduction to `poem`","text":"Similarly, one can evaluate dataset level specifying level=\"dataset\". example:","code":"getEmbeddingMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, level=\"dataset\", metrics=c(\"meanSW\", \"meanClassSW\", \"pnSW\", \"minClassSW\", \"cdbw\", \"cohesion\", \"compactness\", \"sep\", \"dbcv\")) ## meanSW meanClassSW pnSW minClassSW cdbw cohesion compactness ## 1 0.5029477 0.5029477 0.0375 0.4240817 0.0553208 0.2732925 0.2800803 ## sep dbcv ## 1 0.7227335 -0.09368922"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"graph-evaluation","dir":"Articles","previous_headings":"","what":"Graph evaluation","title":"Introduction to `poem`","text":"Instead directly using distances densities embedding space evaluation, one may want evaluate connectivity stand point looking graph structure constructed datasets. getGraphMetrics() can perform k nearest neighbor (KNN) graph shared nearest neighbor graph (SNN) construction embedding apply graph-based evaluation metrics. examples g1 g2, constructed graphs look like: Use ?getGraphMetrics() check optional arguments KNN/SNN graph construction. Similarly, level can \"element\", \"class\" \"dataset\". Alternatively, getGraphMetrics() can take igraph object x, enables application evaluation metrics general graph, list nearest neighbors x, accelerate computation large datasets.","code":"# Some functions for plotting plotGraphs <- function(d, k=7){ gn <- dplyr::bind_rows(lapply(split(d[,-1],d$graph), FUN=function(d1){ nn <- emb2knn(as.matrix(d1[,c(\"x\",\"y\")]), k=k) g <- poem:::.nn2graph(nn, labels=d1$class) ggnetwork(g, layout=as.matrix(d1[,1:2]), scale=FALSE) }), .id=\"graph\") ggplot(gn, aes(x = x, y = y, xend = xend, yend = yend)) + theme_blank() + theme(legend.position = \"right\") + geom_edges(alpha=0.5, colour=\"grey\") + geom_nodes(aes(colour=class, shape=class), size=2) + facet_wrap(~graph, nrow=1) } plotGraphs(bind_rows(list(g1,g2), .id=\"graph\")) getGraphMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, metrics=c(\"PWC\",\"ISI\"), level=\"class\", directed=FALSE, k=7, shared=FALSE) ## class PWC ISI ## class1 class1 0.05 1.186272 ## class2 class2 0.10 1.224188 ## class3 class3 0.05 1.149098 ## class4 class4 0.05 1.251146 res1 <- getGraphMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class,metrics=c(\"PWC\",\"ISI\"), level=\"class\", directed=FALSE, k=7, shared=FALSE) res2 <- getGraphMetrics(x=g2[,c(\"x\",\"y\")], labels=g2$class, metrics=c(\"PWC\",\"ISI\"), level=\"class\", directed=FALSE, k=7, shared=FALSE) bind_rows(list(graph1=res1, graph2=res2), .id=\"graph\") %>% pivot_longer(cols=c(\"PWC\",\"ISI\"), names_to=\"metric\",values_to=\"value\") %>% ggplot(aes(class, value, fill=graph, group=graph)) + geom_bar(position = \"dodge\", stat = \"identity\") + facet_wrap(~metric) + theme_bw()"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"partition-evaluation","dir":"Articles","previous_headings":"","what":"Partition evaluation","title":"Introduction to `poem`","text":"construct SNN graph g1 g2 embeddings, apply Louvain algorithm get partitions . compare predictions known labels using partition metrics: Note class-level metrics, reported per class, (specifically, “WH”, “AWH) reported per cluster.","code":"k <- 7 r <- 0.5 snn1 <- emb2snn(as.matrix(g1[,c(\"x\",\"y\")]), k=k) snn2 <- emb2snn(as.matrix(g2[,c(\"x\",\"y\")]), k=k) g1$cluster <- factor(igraph::cluster_louvain(snn1, resolution = r)$membership) g2$cluster <- factor(igraph::cluster_louvain(snn2, resolution = r)$membership) ggplot(rbind(g1,g2), aes(x,y,color=cluster, shape=class)) + geom_point() + facet_wrap(~graph) + theme_bw() # for g1 getPartitionMetrics(true=g1$class, pred=g1$cluster, level=\"dataset\", metrics = c(\"RI\", \"WC\", \"WH\", \"ARI\", \"AWC\", \"AWH\", \"FM\", \"AMI\")) ## RI WC WH ARI AWC AWH FM AMI ## 1 0.9636076 0.925 0.9237845 0.9004285 0.9012088 0.8996496 0.9624922 0.8872892 # for g2 getPartitionMetrics(true=g2$class, pred=g2$cluster, level=\"dataset\", metrics = c(\"RI\", \"WC\", \"WH\", \"ARI\", \"AWC\", \"AWH\", \"FM\", \"AMI\")) ## RI WC WH ARI AWC AWH FM AMI ## 1 0.721519 0.95 0.4616368 0.4400954 0.9010025 0.2911552 0.6501669 0.4193846 getPartitionMetrics(true=g1$class, pred=g2$cluster, level=\"class\") ## WC AWC FM class WH AWH cluster ## 1 0.9 0.802005 0.6551724 class1 NA NA ## 2 0.9 0.802005 0.6551724 class2 NA NA ## 3 1.0 1.000000 0.6451613 class3 NA NA ## 4 1.0 1.000000 0.6451613 class4 NA NA ## 5 NA NA NA 0.4864865 0.3238739 1 ## 6 NA NA NA 0.4413473 0.2644406 2"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"fuzzy-partition-evaluation","dir":"Articles","previous_headings":"","what":"Fuzzy partition evaluation","title":"Introduction to `poem`","text":"comparing two fuzzy partitions comparing fuzzy partition hard patition, one can use getFuzzyPartitionMetrics(). fuzzy reprensentation partion look like following, row data point, value class memberships class. row sums 1. using input hardPred, hardTrue, fuzzyPred, fuzzyTrue, one can control whether fuzzy hard version two partitions used comparison. example, fuzzyTrue fuzzyPred NULL, metrics comparing two fuzzy partitions used.","code":"fuzzyTrue <- matrix(c( 0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) # a hard truth: hardTrue <- apply(fuzzyTrue,1,FUN=which.max) # some predicted labels: hardPred <- c(1,1,1,1,1,1,2,2,2) getFuzzyPartitionMetrics(hardPred=hardPred, hardTrue=hardTrue, fuzzyTrue=fuzzyTrue, nperms=3, level=\"class\") ## fuzzyWC fuzzyAWC class fuzzyWH fuzzyAWH cluster ## 1 0.7195238 0.3542847 1 NA NA NA ## 2 1.0000000 NaN 2 NA NA NA ## 3 1.0000000 NaN 3 NA NA NA ## 4 NA NA NA 1.00000000 1.0000000 1 ## 5 NA NA NA 0.06166667 -0.8064171 2"},{"path":[]},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"example-data-1","dir":"Articles","previous_headings":"Spatial clustering evaluation","what":"Example data","title":"Introduction to `poem`","text":"use another toy example dataset package, sp_toys, illustrate spatial clustering evaluation. C, spots colored ground-truth class. P1 P2, color inside spot according ground-truth class, color border according clustering predictions. P1 P2 misclassified amount red spots blue cluster.","code":"data(sp_toys) s <- 3 st <- 1 p1 <- ggplot(sp_toys, aes(x, y, color=label)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + geom_point(shape = 1, size = s, stroke = st, aes(color=p1)) + labs(x=\"\",y=\"\", title=\"P1\") p0 <- ggplot(sp_toys, aes(x, y, color=label)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + geom_point(shape = 1, size = s, stroke = st, aes(color=label)) + labs(x=\"\",y=\"\", title=\"C\") p2 <- ggplot(sp_toys, aes(x, y, color=label)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + geom_point(shape = 1, size = s, stroke = st, aes(color=p2)) + labs(x=\"\",y=\"\", title=\"P2\") plot_grid(p0 + theme(legend.position = \"none\", plot.title = element_text(hjust = 0.5)), p1 + theme(legend.position = \"none\", plot.title = element_text(hjust = 0.5)), p2 + theme(legend.position = \"none\", plot.title = element_text(hjust = 0.5)), ncol = 3)"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"external-metrics","dir":"Articles","previous_headings":"Spatial clustering evaluation","what":"External metrics","title":"Introduction to `poem`","text":"Let’s quantify calculating external spatial metrics: specifying fuzzy_true fuzzy_pred, one can control whether fuzzy hard version true pred used comparison. fuzzy_true fuzzy_pred TRUE, spatial neighborhood information used construct fuzzy representation class/cluster memberships. See xxx details. evaluation non-spatial-aware, P1 P2 get ARI score. However, spatial-aware metrics like SpatialARI SpatialAccuracy, P2 gets higher scores P1.","code":"getSpatialExternalMetrics(true=sp_toys$label, pred=sp_toys$p1, location=sp_toys[,c(\"x\",\"y\")], level=\"dataset\", metrics=c(\"SpatialARI\",\"SpatialAccuracy\"), fuzzy_true = TRUE, fuzzy_pred = FALSE) ## SpatialARI SpatialAccuracy ## 1 0.7532872 0.9388889 getSpatialExternalMetrics(true=sp_toys$label, pred=sp_toys$p1, location=sp_toys[,c(\"x\",\"y\")], level=\"class\") ## SpatialWH SpatialAWH SpatialWC SpatialAWC class cluster ## 1 NA NA 0.779245 0.5302459 1 NA ## 2 NA NA 1.000000 1.0000000 2 NA ## 3 1.000000 1.0000000 NA NA NA 1 ## 4 0.804378 0.5911907 NA NA NA 2 res1.1 <- getSpatialExternalMetrics(true=sp_toys$label, pred=sp_toys$p1, location=sp_toys[,c(\"x\",\"y\")], level=\"dataset\", metrics=c(\"SpatialARI\",\"SpatialAccuracy\"), fuzzy_true = TRUE, fuzzy_pred = FALSE) res2.1 <- getSpatialExternalMetrics(true=sp_toys$label, pred=sp_toys$p2, location=sp_toys[,c(\"x\",\"y\")], level=\"dataset\", metrics=c(\"SpatialARI\",\"SpatialAccuracy\"), fuzzy_true = TRUE, fuzzy_pred = FALSE) res1.2 <- getPartitionMetrics(true=sp_toys$label, pred=sp_toys$p1, level=\"dataset\", metrics=c(\"ARI\")) res2.2 <- getPartitionMetrics(true=sp_toys$label, pred=sp_toys$p2, level=\"dataset\", metrics=c(\"ARI\")) cbind(bind_rows(list(res1.1, res2.1), .id=\"P\"), bind_rows(list(res1.2, res2.2), .id=\"P\")) %>% pivot_longer(cols=c(\"SpatialARI\", \"SpatialAccuracy\", \"ARI\"), names_to=\"metric\", values_to=\"value\") %>% ggplot(aes(x=P, y=value, group=metric)) + geom_point(size=3, aes(color=P)) + facet_wrap(~metric) + theme_bw() + labs(x=\"Prediction\")"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"internal-metrics","dir":"Articles","previous_headings":"Spatial clustering evaluation","what":"Internal metrics","title":"Introduction to `poem`","text":"Last least, internal metrics spatial clustering evaluation:","code":"sp_toys$c_elsa <- getSpatialInternalMetrics(label=sp_toys$label, location=sp_toys[,c(\"x\",\"y\")], level=\"element\", metrics=c(\"ELSA\"))$ELSA sp_toys$p1_elsa <- getSpatialInternalMetrics(label=sp_toys$p1, location=sp_toys[,c(\"x\",\"y\")], level=\"element\", metrics=c(\"ELSA\"))$ELSA sp_toys$p2_elsa <- getSpatialInternalMetrics(label=sp_toys$p2, location=sp_toys[,c(\"x\",\"y\")], level=\"element\", metrics=c(\"ELSA\"))$ELSA s <- 3 st <- 1 p1 <- ggplot(sp_toys, aes(x, y, color=p1_elsa)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + labs(x=\"\",y=\"\", title=\"P1\", color=\"ELSA\") + scico::scale_color_scico(palette = \"roma\", limits = c(0, 1), direction=-1) p0 <- ggplot(sp_toys, aes(x, y, color=c_elsa)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + labs(x=\"\",y=\"\", title=\"C\", color=\"ELSA\") + scico::scale_color_scico(palette = \"roma\", limits = c(0, 1), direction=-1) p2 <- ggplot(sp_toys, aes(x, y, color=p2_elsa)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + labs(x=\"\",y=\"\", title=\"P2\", color=\"ELSA\") + scico::scale_color_scico(palette = \"roma\", limits = c(0, 1), direction=-1) plot_grid(p0 + theme(plot.title = element_text(hjust = 0.5)), p1 + theme(plot.title = element_text(hjust = 0.5)), p2 + theme(plot.title = element_text(hjust = 0.5)), nrow=1, rel_width=c(1,1,1))"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"session-info","dir":"Articles","previous_headings":"","what":"Session info","title":"Introduction to `poem`","text":"","code":"sessionInfo() ## R version 4.4.1 (2024-06-14) ## Platform: x86_64-pc-linux-gnu ## Running under: Ubuntu 22.04.5 LTS ## ## Matrix products: default ## BLAS: /usr/lib/x86_64-linux-gnu/blas/libblas.so.3.10.0 ## LAPACK: /usr/lib/x86_64-linux-gnu/lapack/liblapack.so.3.10.0 ## ## locale: ## [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C ## [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 ## [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 ## [7] LC_PAPER=en_US.UTF-8 LC_NAME=C ## [9] LC_ADDRESS=C LC_TELEPHONE=C ## [11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C ## ## time zone: Europe/Zurich ## tzcode source: system (glibc) ## ## attached base packages: ## [1] stats graphics grDevices utils datasets methods base ## ## other attached packages: ## [1] cowplot_1.1.3 igraph_2.1.1 ggnetwork_0.5.13 tidyr_1.3.1 ## [5] dplyr_1.1.4 ggplot2_3.5.1 poem_0.99.1 BiocStyle_2.32.1 ## ## loaded via a namespace (and not attached): ## [1] tidyselect_1.2.1 farver_2.1.2 fastmap_1.2.0 ## [4] bluster_1.14.0 digest_0.6.36 lifecycle_1.0.4 ## [7] sf_1.0-6 cluster_2.1.6 terra_1.5-21 ## [10] dbscan_1.2-0 magrittr_2.0.3 compiler_4.4.1 ## [13] rlang_1.1.4 sass_0.4.9 tools_4.4.1 ## [16] mclustcomp_0.3.3 utf8_1.2.4 yaml_2.3.10 ## [19] knitr_1.48 labeling_0.4.3 htmlwidgets_1.6.4 ## [22] sp_2.1-4 classInt_0.4-10 scico_1.5.0 ## [25] BiocParallel_1.38.0 KernSmooth_2.23-24 fclust_2.1.1.1 ## [28] elsa_1.1-28 withr_3.0.1 purrr_1.0.2 ## [31] BiocGenerics_0.50.0 desc_1.4.3 grid_4.4.1 ## [34] stats4_4.4.1 fansi_1.0.6 e1071_1.7-9 ## [37] colorspace_2.1-1 aricode_1.0.3 scales_1.3.0 ## [40] MASS_7.3-61 cli_3.6.3 rmarkdown_2.27 ## [43] ragg_1.3.2 generics_0.1.3 rstudioapi_0.16.0 ## [46] spdep_1.3-6 DBI_1.2.3 cachem_1.1.0 ## [49] proxy_0.4-27 parallel_4.4.1 BiocManager_1.30.23 ## [52] s2_1.1.7 vctrs_0.6.5 boot_1.3-30 ## [55] Matrix_1.7-0 jsonlite_1.8.8 spData_2.3.3 ## [58] bookdown_0.40 clevr_0.1.2 S4Vectors_0.42.1 ## [61] BiocNeighbors_1.22.0 clue_0.3-65 crosstalk_1.2.1 ## [64] systemfonts_1.1.0 jquerylib_0.1.4 units_0.8-0 ## [67] glue_1.8.0 pkgdown_2.1.1 codetools_0.2-20 ## [70] DT_0.33 gtable_0.3.5 deldir_2.0-4 ## [73] raster_3.5-15 munsell_0.5.1 tibble_3.2.1 ## [76] pillar_1.9.0 htmltools_0.5.8.1 R6_2.5.1 ## [79] wk_0.9.4 textshaping_0.3.6 Rdpack_2.6.1 ## [82] evaluate_0.24.0 lattice_0.22-6 highr_0.11 ## [85] rbibutils_2.3 bslib_0.8.0 class_7.3-22 ## [88] Rcpp_1.0.13 xfun_0.46 fs_1.6.4 ## [91] pkgconfig_2.0.3"},{"path":"https://roseyuan.github.io/poem/articles/table.html","id":"partition-based-metrics","dir":"Articles","previous_headings":"","what":"Partition-based metrics","title":"Details about each evaluation metrics","text":"Partition-based metrics. notation used common throughout table: consider comparing predicted partition PP ground-truth partition GG; aa number pairs group PP GG; bb number pairs class GG different clusters PP; cc number pairs different classes GG cluster PP; dd number pairs different groups PP GG; nn total number objects; EE expectation operator; H(⋅)H(⋅) Shannon entropy; $eta$ ratio weight attributed homogeneity vs completeness; expactation value RI, WH, WC calculated assuming generalized hypergeometric model.","code":""},{"path":"https://roseyuan.github.io/poem/articles/table.html","id":"embedding-based-metrics","dir":"Articles","previous_headings":"","what":"Embedding-based metrics","title":"Details about each evaluation metrics","text":"Embedding-based metrics.","code":""},{"path":"https://roseyuan.github.io/poem/articles/table.html","id":"graph-based-metrics","dir":"Articles","previous_headings":"","what":"Graph-based metrics","title":"Details about each evaluation metrics","text":"Graph-based metrics.","code":""},{"path":"https://roseyuan.github.io/poem/articles/table.html","id":"metrics-for-spatial-clusterings","dir":"Articles","previous_headings":"","what":"Metrics for spatial clusterings","title":"Details about each evaluation metrics","text":"Metrics spatial clusterings.","code":""},{"path":"https://roseyuan.github.io/poem/articles/table.html","id":"session-info","dir":"Articles","previous_headings":"","what":"Session info","title":"Details about each evaluation metrics","text":"","code":"sessionInfo() ## R version 4.4.1 (2024-06-14) ## Platform: x86_64-pc-linux-gnu ## Running under: Ubuntu 22.04.5 LTS ## ## Matrix products: default ## BLAS: /usr/lib/x86_64-linux-gnu/blas/libblas.so.3.10.0 ## LAPACK: /usr/lib/x86_64-linux-gnu/lapack/liblapack.so.3.10.0 ## ## locale: ## [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C ## [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 ## [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 ## [7] LC_PAPER=en_US.UTF-8 LC_NAME=C ## [9] LC_ADDRESS=C LC_TELEPHONE=C ## [11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C ## ## time zone: Europe/Zurich ## tzcode source: system (glibc) ## ## attached base packages: ## [1] stats graphics grDevices utils datasets methods base ## ## other attached packages: ## [1] BiocStyle_2.32.1 ## ## loaded via a namespace (and not attached): ## [1] vctrs_0.6.5 svglite_2.1.3 cli_3.6.3 ## [4] knitr_1.48 rlang_1.1.4 xfun_0.46 ## [7] stringi_1.8.4 textshaping_0.3.6 jsonlite_1.8.8 ## [10] glue_1.8.0 colorspace_2.1-1 htmltools_0.5.8.1 ## [13] ragg_1.3.2 sass_0.4.9 scales_1.3.0 ## [16] rmarkdown_2.27 munsell_0.5.1 evaluate_0.24.0 ## [19] jquerylib_0.1.4 kableExtra_1.4.0 fastmap_1.2.0 ## [22] yaml_2.3.10 lifecycle_1.0.4 bookdown_0.40 ## [25] stringr_1.5.1 BiocManager_1.30.23 compiler_4.4.1 ## [28] fs_1.6.4 htmlwidgets_1.6.4 rstudioapi_0.16.0 ## [31] systemfonts_1.1.0 digest_0.6.36 viridisLite_0.4.2 ## [34] R6_2.5.1 magrittr_2.0.3 bslib_0.8.0 ## [37] tools_4.4.1 xml2_1.3.6 pkgdown_2.1.1 ## [40] cachem_1.1.0 desc_1.4.3"},{"path":"https://roseyuan.github.io/poem/authors.html","id":null,"dir":"","previous_headings":"","what":"Authors","title":"Authors and Citation","text":"Siyuan Luo. Maintainer, author. Pierre-Luc Germain. Author, contributor.","code":""},{"path":"https://roseyuan.github.io/poem/authors.html","id":"citation","dir":"","previous_headings":"","what":"Citation","title":"Authors and Citation","text":"Luo S, Germain P (2024). poem: POpulation-based Evaluation Metrics. R package version 0.99.1, https://roseyuan.github.io/poem/.","code":"@Manual{, title = {poem: POpulation-based Evaluation Metrics}, author = {Siyuan Luo and Pierre-Luc Germain}, year = {2024}, note = {R package version 0.99.1}, url = {https://roseyuan.github.io/poem/}, }"},{"path":[]},{"path":"https://roseyuan.github.io/poem/index.html","id":"overview","dir":"","previous_headings":"","what":"Overview","title":"POpulation-based Evaluation Metrics","text":"poem package provides multiple metrics evaluating subpopulation structure identification dataset. include: Metrics comparing two partitions1 dataset, metrics evaluating alignment dataset’s embedding graph representations partition. Metrics comparing two fuzzy partitions2, comparing hard partition fuzzy partition. allows evaluation fuzzy partition results assessing agreement fuzzy hard ground-truth partition. Metrics tailored domain detection spatially-resolved omics data. include especially external evaluation metrics (.e. based comparison ground truth labels), also internal metrics. detailed introduction package, see online docs.","code":""},{"path":"https://roseyuan.github.io/poem/index.html","id":"installation","dir":"","previous_headings":"","what":"Installation","title":"POpulation-based Evaluation Metrics","text":"can install development version poem GitHub :","code":"# install.packages(\"devtools\") devtools::install_github(\"RoseYuan/poem\")"},{"path":"https://roseyuan.github.io/poem/index.html","id":"contact","dir":"","previous_headings":"","what":"Contact","title":"POpulation-based Evaluation Metrics","text":"case questions suggestions poem, please consider opening issue GitHub repository.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate CDbw index — CDbw","title":"Calculate CDbw index — CDbw","text":"Computes CDbw-index (Halkidi Vazirgiannis 2008; Halkidi, Vazirgiannis Hennig, 2015). function directly copied fpc CRAN package written Christian Hennig. included reduce package dependencies (since fpc --light dependencies required ).","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate CDbw index — CDbw","text":"","code":"CDbw( x, labels, r = 10, s = seq(0.1, 0.8, by = 0.1), clusterstdev = TRUE, trace = FALSE )"},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate CDbw index — CDbw","text":"x Something can coerced numerical matrix, elements rows. labels vector integers length =nrow(x) indicating cluster observation. r Number cluster border representatives. s Vector shrinking factors. clusterstdev Logical. TRUE, neighborhood radius intra-cluster density within-cluster estimated squared distance mean cluster; otherwise average clusters. trace Logical; whether print processing info.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate CDbw index — CDbw","text":"vector following values (see refs details): cdbw value CDbw index (higher better). cohesion cohesion. compactness compactness. sep separation.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Calculate CDbw index — CDbw","text":"Halkidi, M. Vazirgiannis, M. (2008) density-based cluster validity approach using multi-representatives. Pattern Recognition Letters 29, 773-786. Halkidi, M., Vazirgiannis, M. Hennig, C. (2015) Method-independent indices cluster validation. C. Hennig, M. Meila, F. Murtagh, R. Rocci (eds.) Handbook Cluster Analysis, CRC Press/Taylor & Francis, Boca Raton.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"author","dir":"Reference","previous_headings":"","what":"Author","title":"Calculate CDbw index — CDbw","text":"Christian Hennig","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Calculate CDbw index — CDbw","text":"","code":"d1 <- mockData() CDbw(d1[,1:2], d1[,3]) #> cdbw cohesion compactness sep #> 0.03805049 0.23717624 0.24202068 0.66288255"},{"path":"https://roseyuan.github.io/poem/reference/CHAOS.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate CHAOS score — CHAOS","title":"Calculate CHAOS score — CHAOS","text":"CHAOS score measures clustering performance calculating mean length graph edges 1-nearest neighbor (1NN) graph cluster, averaged across clusters. Lower CHAOS score indicates better spatial domain clustering performance.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CHAOS.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate CHAOS score — CHAOS","text":"","code":"CHAOS(labels, location, BNPARAM = NULL)"},{"path":"https://roseyuan.github.io/poem/reference/CHAOS.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate CHAOS score — CHAOS","text":"labels Cluster labels. location numeric data matrix containing location information, rows points columns location dimensions. BNPARAM BNPARAM object passed findKNN specifying kNN approximation method use. Defaults exact small datasets, Annoy larger ones.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CHAOS.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate CHAOS score — CHAOS","text":"numeric value CHAOS score.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CHAOS.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Calculate CHAOS score — CHAOS","text":"","code":"data(sp_toys) data <- sp_toys CHAOS(data$label, data[,c(\"x\", \"y\")]) #> $CHAOS #> [1] 0.2305866 #> #> $CHAOS_class #> 2 1 #> 0.2305866 0.2305866 #> CHAOS(data$p1, data[,c(\"x\", \"y\")]) #> $CHAOS #> [1] 0.2305866 #> #> $CHAOS_class #> 2 1 #> 0.2305866 0.2305866 #> CHAOS(data$p2, data[,c(\"x\", \"y\")]) #> $CHAOS #> [1] 0.2305866 #> #> $CHAOS_class #> 2 1 #> 0.2305866 0.2305866 #>"},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate ELSA scores — ELSA","title":"Calculate ELSA scores — ELSA","text":"Calculating Entropy-based Local indicator Spatial Association (ELSA) scores, consist Ea, Ec overall ELSA.","code":""},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate ELSA scores — ELSA","text":"","code":"ELSA(labels, location, k = 10)"},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate ELSA scores — ELSA","text":"labels Cluster labels. location numerical matrix containing location information, rows samples columns location dimensions. k Number nearest neighbors.","code":""},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate ELSA scores — ELSA","text":"dataframe containing Ea, Ec ELSA samples dataset.","code":""},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Calculate ELSA scores — ELSA","text":"Naimi, Babak, et al., 2019; 10.1016/j.spasta.2018.10.001","code":""},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Calculate ELSA scores — ELSA","text":"","code":"data(sp_toys) data <- sp_toys ELSA(data$label, data[,c(\"x\", \"y\")], k=6) #> Ea Ec ELSA #> 1 0.00000000 0.0000000 0.00000000 #> 2 0.00000000 0.0000000 0.00000000 #> 3 0.09090909 0.4138169 0.03761971 #> 4 0.20000000 0.6840384 0.13680769 #> 5 0.66666667 0.9709506 0.64730040 #> 6 0.11111111 0.4689956 0.05211062 #> 7 0.00000000 0.0000000 0.00000000 #> 8 0.00000000 0.0000000 0.00000000 #> 9 0.00000000 0.0000000 0.00000000 #> 10 0.00000000 0.0000000 0.00000000 #> 11 0.00000000 0.0000000 0.00000000 #> 12 0.00000000 0.0000000 0.00000000 #> 13 0.00000000 0.0000000 0.00000000 #> 14 0.00000000 0.0000000 0.00000000 #> 15 0.00000000 0.0000000 0.00000000 #> 16 0.00000000 0.0000000 0.00000000 #> 17 0.00000000 0.0000000 0.00000000 #> 18 0.13333333 0.5435644 0.07247526 #> 19 0.28571429 0.8366407 0.23904021 #> 20 0.53846154 1.0000000 0.53846154 #> 21 0.07692308 0.3712323 0.02855633 #> 22 0.00000000 0.0000000 0.00000000 #> 23 0.00000000 0.0000000 0.00000000 #> 24 0.00000000 0.0000000 0.00000000 #> 25 0.00000000 0.0000000 0.00000000 #> 26 0.00000000 0.0000000 0.00000000 #> 27 0.00000000 0.0000000 0.00000000 #> 28 0.00000000 0.0000000 0.00000000 #> 29 0.00000000 0.0000000 0.00000000 #> 30 0.00000000 0.0000000 0.00000000 #> 31 0.00000000 0.0000000 0.00000000 #> 32 0.00000000 0.0000000 0.00000000 #> 33 0.00000000 0.0000000 0.00000000 #> 34 0.11764706 0.5032583 0.05920686 #> 35 0.33333333 0.8960382 0.29867941 #> 36 0.33333333 0.8960382 0.29867941 #> 37 0.11764706 0.5032583 0.05920686 #> 38 0.00000000 0.0000000 0.00000000 #> 39 0.00000000 0.0000000 0.00000000 #> 40 0.00000000 0.0000000 0.00000000 #> 41 0.00000000 0.0000000 0.00000000 #> 42 0.00000000 0.0000000 0.00000000 #> 43 0.00000000 0.0000000 0.00000000 #> 44 0.00000000 0.0000000 0.00000000 #> 45 0.00000000 0.0000000 0.00000000 #> 46 0.00000000 0.0000000 0.00000000 #> 47 0.00000000 0.0000000 0.00000000 #> 48 0.00000000 0.0000000 0.00000000 #> 49 0.11764706 0.5032583 0.05920686 #> 50 0.33333333 0.8960382 0.29867941 #> 51 0.33333333 0.8960382 0.29867941 #> 52 0.11764706 0.5032583 0.05920686 #> 53 0.00000000 0.0000000 0.00000000 #> 54 0.00000000 0.0000000 0.00000000 #> 55 0.00000000 0.0000000 0.00000000 #> 56 0.00000000 0.0000000 0.00000000 #> 57 0.00000000 0.0000000 0.00000000 #> 58 0.00000000 0.0000000 0.00000000 #> 59 0.00000000 0.0000000 0.00000000 #> 60 0.00000000 0.0000000 0.00000000 #> 61 0.00000000 0.0000000 0.00000000 #> 62 0.00000000 0.0000000 0.00000000 #> 63 0.00000000 0.0000000 0.00000000 #> 64 0.00000000 0.0000000 0.00000000 #> 65 0.06666667 0.3372901 0.02248600 #> 66 0.46666667 0.9886994 0.46139306 #> 67 0.35294118 0.9182958 0.32410441 #> 68 0.16666667 0.6292492 0.10487487 #> 69 0.00000000 0.0000000 0.00000000 #> 70 0.00000000 0.0000000 0.00000000 #> 71 0.00000000 0.0000000 0.00000000 #> 72 0.00000000 0.0000000 0.00000000 #> 73 0.00000000 0.0000000 0.00000000 #> 74 0.00000000 0.0000000 0.00000000 #> 75 0.00000000 0.0000000 0.00000000 #> 76 0.00000000 0.0000000 0.00000000 #> 77 0.00000000 0.0000000 0.00000000 #> 78 0.00000000 0.0000000 0.00000000 #> 79 0.00000000 0.0000000 0.00000000 #> 80 0.06666667 0.3372901 0.02248600 #> 81 0.46666667 0.9886994 0.46139306 #> 82 0.35294118 0.9182958 0.32410441 #> 83 0.16666667 0.6292492 0.10487487 #> 84 0.00000000 0.0000000 0.00000000 #> 85 0.00000000 0.0000000 0.00000000 #> 86 0.00000000 0.0000000 0.00000000 #> 87 0.00000000 0.0000000 0.00000000 #> 88 0.00000000 0.0000000 0.00000000 #> 89 0.00000000 0.0000000 0.00000000 #> 90 0.00000000 0.0000000 0.00000000 #> 91 0.00000000 0.0000000 0.00000000 #> 92 0.00000000 0.0000000 0.00000000 #> 93 0.00000000 0.0000000 0.00000000 #> 94 0.00000000 0.0000000 0.00000000 #> 95 0.00000000 0.0000000 0.00000000 #> 96 0.20000000 0.6962123 0.13924245 #> 97 0.41176471 0.9640788 0.39697361 #> 98 0.38888889 0.9494520 0.36923134 #> 99 0.17647059 0.6500224 0.11470984 #> 100 0.00000000 0.0000000 0.00000000 #> 101 0.00000000 0.0000000 0.00000000 #> 102 0.00000000 0.0000000 0.00000000 #> 103 0.00000000 0.0000000 0.00000000 #> 104 0.00000000 0.0000000 0.00000000 #> 105 0.00000000 0.0000000 0.00000000 #> 106 0.00000000 0.0000000 0.00000000 #> 107 0.00000000 0.0000000 0.00000000 #> 108 0.00000000 0.0000000 0.00000000 #> 109 0.00000000 0.0000000 0.00000000 #> 110 0.00000000 0.0000000 0.00000000 #> 111 0.20000000 0.6962123 0.13924245 #> 112 0.41176471 0.9640788 0.39697361 #> 113 0.38888889 0.9494520 0.36923134 #> 114 0.16666667 0.6292492 0.10487487 #> 115 0.00000000 0.0000000 0.00000000 #> 116 0.00000000 0.0000000 0.00000000 #> 117 0.00000000 0.0000000 0.00000000 #> 118 0.00000000 0.0000000 0.00000000 #> 119 0.00000000 0.0000000 0.00000000 #> 120 0.00000000 0.0000000 0.00000000 #> 121 0.00000000 0.0000000 0.00000000 #> 122 0.00000000 0.0000000 0.00000000 #> 123 0.00000000 0.0000000 0.00000000 #> 124 0.00000000 0.0000000 0.00000000 #> 125 0.00000000 0.0000000 0.00000000 #> 126 0.00000000 0.0000000 0.00000000 #> 127 0.17647059 0.6500224 0.11470984 #> 128 0.38888889 0.9494520 0.36923134 #> 129 0.41176471 0.9640788 0.39697361 #> 130 0.20000000 0.6962123 0.13924245 #> 131 0.00000000 0.0000000 0.00000000 #> 132 0.00000000 0.0000000 0.00000000 #> 133 0.00000000 0.0000000 0.00000000 #> 134 0.00000000 0.0000000 0.00000000 #> 135 0.00000000 0.0000000 0.00000000 #> 136 0.00000000 0.0000000 0.00000000 #> 137 0.00000000 0.0000000 0.00000000 #> 138 0.00000000 0.0000000 0.00000000 #> 139 0.00000000 0.0000000 0.00000000 #> 140 0.00000000 0.0000000 0.00000000 #> 141 0.00000000 0.0000000 0.00000000 #> 142 0.17647059 0.6500224 0.11470984 #> 143 0.38888889 0.9494520 0.36923134 #> 144 0.38888889 0.9494520 0.36923134 #> 145 0.20000000 0.6962123 0.13924245 #> 146 0.00000000 0.0000000 0.00000000 #> 147 0.00000000 0.0000000 0.00000000 #> 148 0.00000000 0.0000000 0.00000000 #> 149 0.00000000 0.0000000 0.00000000 #> 150 0.00000000 0.0000000 0.00000000 #> 151 0.00000000 0.0000000 0.00000000 #> 152 0.00000000 0.0000000 0.00000000 #> 153 0.00000000 0.0000000 0.00000000 #> 154 0.00000000 0.0000000 0.00000000 #> 155 0.00000000 0.0000000 0.00000000 #> 156 0.00000000 0.0000000 0.00000000 #> 157 0.00000000 0.0000000 0.00000000 #> 158 0.16666667 0.6292492 0.10487487 #> 159 0.35294118 0.9182958 0.32410441 #> 160 0.46666667 0.9886994 0.46139306 #> 161 0.07692308 0.3712323 0.02855633 #> 162 0.00000000 0.0000000 0.00000000 #> 163 0.00000000 0.0000000 0.00000000 #> 164 0.00000000 0.0000000 0.00000000 #> 165 0.00000000 0.0000000 0.00000000 #> 166 0.00000000 0.0000000 0.00000000 #> 167 0.00000000 0.0000000 0.00000000 #> 168 0.00000000 0.0000000 0.00000000 #> 169 0.00000000 0.0000000 0.00000000 #> 170 0.00000000 0.0000000 0.00000000 #> 171 0.00000000 0.0000000 0.00000000 #> 172 0.00000000 0.0000000 0.00000000 #> 173 0.16666667 0.6292492 0.10487487 #> 174 0.38888889 0.9494520 0.36923134 #> 175 0.46666667 0.9886994 0.46139306 #> 176 0.14285714 0.5665095 0.08092993 #> 177 0.00000000 0.0000000 0.00000000 #> 178 0.00000000 0.0000000 0.00000000 #> 179 0.00000000 0.0000000 0.00000000 #> 180 0.00000000 0.0000000 0.00000000 #> 181 0.00000000 0.0000000 0.00000000 #> 182 0.00000000 0.0000000 0.00000000 #> 183 0.00000000 0.0000000 0.00000000 #> 184 0.00000000 0.0000000 0.00000000 #> 185 0.00000000 0.0000000 0.00000000 #> 186 0.00000000 0.0000000 0.00000000 #> 187 0.00000000 0.0000000 0.00000000 #> 188 0.00000000 0.0000000 0.00000000 #> 189 0.11764706 0.5032583 0.05920686 #> 190 0.33333333 0.8960382 0.29867941 #> 191 0.38461538 0.9402860 0.36164845 #> 192 0.06666667 0.3372901 0.02248600 #> 193 0.00000000 0.0000000 0.00000000 #> 194 0.00000000 0.0000000 0.00000000 #> 195 0.00000000 0.0000000 0.00000000 #> 196 0.00000000 0.0000000 0.00000000 #> 197 0.00000000 0.0000000 0.00000000 #> 198 0.00000000 0.0000000 0.00000000 #> 199 0.00000000 0.0000000 0.00000000 #> 200 0.00000000 0.0000000 0.00000000 #> 201 0.00000000 0.0000000 0.00000000 #> 202 0.00000000 0.0000000 0.00000000 #> 203 0.00000000 0.0000000 0.00000000 #> 204 0.16666667 0.6292492 0.10487487 #> 205 0.33333333 0.8960382 0.29867941 #> 206 0.42857143 0.9709506 0.41612168 #> 207 0.06666667 0.3372901 0.02248600 #> 208 0.00000000 0.0000000 0.00000000 #> 209 0.00000000 0.0000000 0.00000000 #> 210 0.00000000 0.0000000 0.00000000 #> 211 0.00000000 0.0000000 0.00000000 #> 212 0.00000000 0.0000000 0.00000000 #> 213 0.00000000 0.0000000 0.00000000 #> 214 0.00000000 0.0000000 0.00000000 #> 215 0.00000000 0.0000000 0.00000000 #> 216 0.00000000 0.0000000 0.00000000 #> 217 0.00000000 0.0000000 0.00000000 #> 218 0.00000000 0.0000000 0.00000000 #> 219 0.00000000 0.0000000 0.00000000 #> 220 0.07692308 0.3712323 0.02855633 #> 221 0.50000000 0.9957275 0.49786373 #> 222 0.30769231 0.8631206 0.26557556 #> 223 0.14285714 0.5665095 0.08092993 #> 224 0.00000000 0.0000000 0.00000000 #> 225 0.00000000 0.0000000 0.00000000 #> 226 0.00000000 0.0000000 0.00000000 #> 227 0.00000000 0.0000000 0.00000000 #> 228 0.00000000 0.0000000 0.00000000 #> 229 0.00000000 0.0000000 0.00000000 #> 230 0.00000000 0.0000000 0.00000000 #> 231 0.00000000 0.0000000 0.00000000 #> 232 0.00000000 0.0000000 0.00000000 #> 233 0.00000000 0.0000000 0.00000000 #> 234 0.00000000 0.0000000 0.00000000 #> 235 0.11111111 0.4689956 0.05211062 #> 236 0.55555556 1.0000000 0.55555556 #> 237 0.22222222 0.7219281 0.16042847 #> 238 0.10000000 0.4394970 0.04394970 #> 239 0.00000000 0.0000000 0.00000000 #> 240 0.00000000 0.0000000 0.00000000 ELSA(data$p1, data[,c(\"x\", \"y\")], k=6) #> Ea Ec ELSA #> 1 0.00000000 0.0000000 0.00000000 #> 2 0.00000000 0.0000000 0.00000000 #> 3 0.09090909 0.4138169 0.03761971 #> 4 0.20000000 0.6840384 0.13680769 #> 5 0.66666667 0.9709506 0.64730040 #> 6 0.11111111 0.4689956 0.05211062 #> 7 0.00000000 0.0000000 0.00000000 #> 8 0.00000000 0.0000000 0.00000000 #> 9 0.00000000 0.0000000 0.00000000 #> 10 0.00000000 0.0000000 0.00000000 #> 11 0.00000000 0.0000000 0.00000000 #> 12 0.00000000 0.0000000 0.00000000 #> 13 0.00000000 0.0000000 0.00000000 #> 14 0.00000000 0.0000000 0.00000000 #> 15 0.00000000 0.0000000 0.00000000 #> 16 0.00000000 0.0000000 0.00000000 #> 17 0.00000000 0.0000000 0.00000000 #> 18 0.13333333 0.5435644 0.07247526 #> 19 0.28571429 0.8366407 0.23904021 #> 20 0.53846154 1.0000000 0.53846154 #> 21 0.07692308 0.3712323 0.02855633 #> 22 0.00000000 0.0000000 0.00000000 #> 23 0.00000000 0.0000000 0.00000000 #> 24 0.00000000 0.0000000 0.00000000 #> 25 0.00000000 0.0000000 0.00000000 #> 26 0.00000000 0.0000000 0.00000000 #> 27 0.00000000 0.0000000 0.00000000 #> 28 0.00000000 0.0000000 0.00000000 #> 29 0.00000000 0.0000000 0.00000000 #> 30 0.00000000 0.0000000 0.00000000 #> 31 0.00000000 0.0000000 0.00000000 #> 32 0.00000000 0.0000000 0.00000000 #> 33 0.00000000 0.0000000 0.00000000 #> 34 0.11764706 0.5032583 0.05920686 #> 35 0.33333333 0.8960382 0.29867941 #> 36 0.40000000 0.9544340 0.38177360 #> 37 0.23529412 0.7642045 0.17981283 #> 38 0.16666667 0.6292492 0.10487487 #> 39 0.17647059 0.6500224 0.11470984 #> 40 0.13333333 0.5435644 0.07247526 #> 41 0.07692308 0.3712323 0.02855633 #> 42 0.00000000 0.0000000 0.00000000 #> 43 0.00000000 0.0000000 0.00000000 #> 44 0.00000000 0.0000000 0.00000000 #> 45 0.00000000 0.0000000 0.00000000 #> 46 0.00000000 0.0000000 0.00000000 #> 47 0.00000000 0.0000000 0.00000000 #> 48 0.00000000 0.0000000 0.00000000 #> 49 0.11764706 0.5032583 0.05920686 #> 50 0.26666667 0.8112781 0.21634083 #> 51 0.53333333 1.0000000 0.53333333 #> 52 0.41176471 0.9640788 0.39697361 #> 53 0.38888889 0.9494520 0.36923134 #> 54 0.38888889 0.9494520 0.36923134 #> 55 0.33333333 0.8960382 0.29867941 #> 56 0.14285714 0.5665095 0.08092993 #> 57 0.06666667 0.3372901 0.02248600 #> 58 0.00000000 0.0000000 0.00000000 #> 59 0.00000000 0.0000000 0.00000000 #> 60 0.00000000 0.0000000 0.00000000 #> 61 0.00000000 0.0000000 0.00000000 #> 62 0.00000000 0.0000000 0.00000000 #> 63 0.00000000 0.0000000 0.00000000 #> 64 0.00000000 0.0000000 0.00000000 #> 65 0.06666667 0.3372901 0.02248600 #> 66 0.26666667 0.8112781 0.21634083 #> 67 0.35294118 0.9182958 0.32410441 #> 68 0.38888889 0.9494520 0.36923134 #> 69 0.41176471 0.9640788 0.39697361 #> 70 0.46666667 0.9886994 0.46139306 #> 71 0.69230769 0.9402860 0.65096720 #> 72 0.13333333 0.5435644 0.07247526 #> 73 0.06250000 0.3227570 0.02017231 #> 74 0.00000000 0.0000000 0.00000000 #> 75 0.00000000 0.0000000 0.00000000 #> 76 0.00000000 0.0000000 0.00000000 #> 77 0.00000000 0.0000000 0.00000000 #> 78 0.00000000 0.0000000 0.00000000 #> 79 0.00000000 0.0000000 0.00000000 #> 80 0.00000000 0.0000000 0.00000000 #> 81 0.13333333 0.5435644 0.07247526 #> 82 0.17647059 0.6500224 0.11470984 #> 83 0.16666667 0.6292492 0.10487487 #> 84 0.22222222 0.7424876 0.16499724 #> 85 0.33333333 0.8960382 0.29867941 #> 86 0.42857143 0.9709506 0.41612168 #> 87 0.13333333 0.5435644 0.07247526 #> 88 0.00000000 0.0000000 0.00000000 #> 89 0.00000000 0.0000000 0.00000000 #> 90 0.00000000 0.0000000 0.00000000 #> 91 0.00000000 0.0000000 0.00000000 #> 92 0.00000000 0.0000000 0.00000000 #> 93 0.00000000 0.0000000 0.00000000 #> 94 0.00000000 0.0000000 0.00000000 #> 95 0.00000000 0.0000000 0.00000000 #> 96 0.00000000 0.0000000 0.00000000 #> 97 0.00000000 0.0000000 0.00000000 #> 98 0.05555556 0.2974722 0.01652624 #> 99 0.11764706 0.5032583 0.05920686 #> 100 0.26666667 0.8112781 0.21634083 #> 101 0.53846154 1.0000000 0.53846154 #> 102 0.20000000 0.6962123 0.13924245 #> 103 0.06250000 0.3227570 0.02017231 #> 104 0.00000000 0.0000000 0.00000000 #> 105 0.00000000 0.0000000 0.00000000 #> 106 0.00000000 0.0000000 0.00000000 #> 107 0.00000000 0.0000000 0.00000000 #> 108 0.00000000 0.0000000 0.00000000 #> 109 0.00000000 0.0000000 0.00000000 #> 110 0.00000000 0.0000000 0.00000000 #> 111 0.00000000 0.0000000 0.00000000 #> 112 0.05882353 0.3095434 0.01820844 #> 113 0.16666667 0.6292492 0.10487487 #> 114 0.33333333 0.8997438 0.29991459 #> 115 0.53333333 1.0000000 0.53333333 #> 116 0.28571429 0.8366407 0.23904021 #> 117 0.06666667 0.3372901 0.02248600 #> 118 0.00000000 0.0000000 0.00000000 #> 119 0.00000000 0.0000000 0.00000000 #> 120 0.00000000 0.0000000 0.00000000 #> 121 0.00000000 0.0000000 0.00000000 #> 122 0.00000000 0.0000000 0.00000000 #> 123 0.00000000 0.0000000 0.00000000 #> 124 0.00000000 0.0000000 0.00000000 #> 125 0.00000000 0.0000000 0.00000000 #> 126 0.00000000 0.0000000 0.00000000 #> 127 0.05882353 0.3095434 0.01820844 #> 128 0.16666667 0.6292492 0.10487487 #> 129 0.76470588 0.8524052 0.65183925 #> 130 0.53333333 1.0000000 0.53333333 #> 131 0.23076923 0.7495953 0.17298352 #> 132 0.06666667 0.3372901 0.02248600 #> 133 0.00000000 0.0000000 0.00000000 #> 134 0.00000000 0.0000000 0.00000000 #> 135 0.00000000 0.0000000 0.00000000 #> 136 0.00000000 0.0000000 0.00000000 #> 137 0.00000000 0.0000000 0.00000000 #> 138 0.00000000 0.0000000 0.00000000 #> 139 0.00000000 0.0000000 0.00000000 #> 140 0.00000000 0.0000000 0.00000000 #> 141 0.00000000 0.0000000 0.00000000 #> 142 0.11764706 0.5032583 0.05920686 #> 143 0.27777778 0.8314744 0.23096511 #> 144 0.55555556 0.9980009 0.55444494 #> 145 0.33333333 0.8960382 0.29867941 #> 146 0.00000000 0.0000000 0.00000000 #> 147 0.00000000 0.0000000 0.00000000 #> 148 0.00000000 0.0000000 0.00000000 #> 149 0.00000000 0.0000000 0.00000000 #> 150 0.00000000 0.0000000 0.00000000 #> 151 0.00000000 0.0000000 0.00000000 #> 152 0.00000000 0.0000000 0.00000000 #> 153 0.00000000 0.0000000 0.00000000 #> 154 0.00000000 0.0000000 0.00000000 #> 155 0.00000000 0.0000000 0.00000000 #> 156 0.00000000 0.0000000 0.00000000 #> 157 0.00000000 0.0000000 0.00000000 #> 158 0.16666667 0.6292492 0.10487487 #> 159 0.35294118 0.9182958 0.32410441 #> 160 0.46666667 0.9886994 0.46139306 #> 161 0.07692308 0.3712323 0.02855633 #> 162 0.00000000 0.0000000 0.00000000 #> 163 0.00000000 0.0000000 0.00000000 #> 164 0.00000000 0.0000000 0.00000000 #> 165 0.00000000 0.0000000 0.00000000 #> 166 0.00000000 0.0000000 0.00000000 #> 167 0.00000000 0.0000000 0.00000000 #> 168 0.00000000 0.0000000 0.00000000 #> 169 0.00000000 0.0000000 0.00000000 #> 170 0.00000000 0.0000000 0.00000000 #> 171 0.00000000 0.0000000 0.00000000 #> 172 0.00000000 0.0000000 0.00000000 #> 173 0.16666667 0.6292492 0.10487487 #> 174 0.38888889 0.9494520 0.36923134 #> 175 0.46666667 0.9886994 0.46139306 #> 176 0.14285714 0.5665095 0.08092993 #> 177 0.00000000 0.0000000 0.00000000 #> 178 0.00000000 0.0000000 0.00000000 #> 179 0.00000000 0.0000000 0.00000000 #> 180 0.00000000 0.0000000 0.00000000 #> 181 0.00000000 0.0000000 0.00000000 #> 182 0.00000000 0.0000000 0.00000000 #> 183 0.00000000 0.0000000 0.00000000 #> 184 0.00000000 0.0000000 0.00000000 #> 185 0.00000000 0.0000000 0.00000000 #> 186 0.00000000 0.0000000 0.00000000 #> 187 0.00000000 0.0000000 0.00000000 #> 188 0.00000000 0.0000000 0.00000000 #> 189 0.11764706 0.5032583 0.05920686 #> 190 0.33333333 0.8960382 0.29867941 #> 191 0.38461538 0.9402860 0.36164845 #> 192 0.06666667 0.3372901 0.02248600 #> 193 0.00000000 0.0000000 0.00000000 #> 194 0.00000000 0.0000000 0.00000000 #> 195 0.00000000 0.0000000 0.00000000 #> 196 0.00000000 0.0000000 0.00000000 #> 197 0.00000000 0.0000000 0.00000000 #> 198 0.00000000 0.0000000 0.00000000 #> 199 0.00000000 0.0000000 0.00000000 #> 200 0.00000000 0.0000000 0.00000000 #> 201 0.00000000 0.0000000 0.00000000 #> 202 0.00000000 0.0000000 0.00000000 #> 203 0.00000000 0.0000000 0.00000000 #> 204 0.16666667 0.6292492 0.10487487 #> 205 0.33333333 0.8960382 0.29867941 #> 206 0.42857143 0.9709506 0.41612168 #> 207 0.06666667 0.3372901 0.02248600 #> 208 0.00000000 0.0000000 0.00000000 #> 209 0.00000000 0.0000000 0.00000000 #> 210 0.00000000 0.0000000 0.00000000 #> 211 0.00000000 0.0000000 0.00000000 #> 212 0.00000000 0.0000000 0.00000000 #> 213 0.00000000 0.0000000 0.00000000 #> 214 0.00000000 0.0000000 0.00000000 #> 215 0.00000000 0.0000000 0.00000000 #> 216 0.00000000 0.0000000 0.00000000 #> 217 0.00000000 0.0000000 0.00000000 #> 218 0.00000000 0.0000000 0.00000000 #> 219 0.00000000 0.0000000 0.00000000 #> 220 0.07692308 0.3712323 0.02855633 #> 221 0.50000000 0.9957275 0.49786373 #> 222 0.30769231 0.8631206 0.26557556 #> 223 0.14285714 0.5665095 0.08092993 #> 224 0.00000000 0.0000000 0.00000000 #> 225 0.00000000 0.0000000 0.00000000 #> 226 0.00000000 0.0000000 0.00000000 #> 227 0.00000000 0.0000000 0.00000000 #> 228 0.00000000 0.0000000 0.00000000 #> 229 0.00000000 0.0000000 0.00000000 #> 230 0.00000000 0.0000000 0.00000000 #> 231 0.00000000 0.0000000 0.00000000 #> 232 0.00000000 0.0000000 0.00000000 #> 233 0.00000000 0.0000000 0.00000000 #> 234 0.00000000 0.0000000 0.00000000 #> 235 0.11111111 0.4689956 0.05211062 #> 236 0.55555556 1.0000000 0.55555556 #> 237 0.22222222 0.7219281 0.16042847 #> 238 0.10000000 0.4394970 0.04394970 #> 239 0.00000000 0.0000000 0.00000000 #> 240 0.00000000 0.0000000 0.00000000 ELSA(data$p2, data[,c(\"x\", \"y\")], k=6) #> Ea Ec ELSA #> 1 0.00000000 0.0000000 0.00000000 #> 2 0.00000000 0.0000000 0.00000000 #> 3 0.00000000 0.0000000 0.00000000 #> 4 0.00000000 0.0000000 0.00000000 #> 5 0.22222222 0.7219281 0.16042847 #> 6 0.44444444 0.9709506 0.43153360 #> 7 0.20000000 0.6840384 0.13680769 #> 8 0.00000000 0.0000000 0.00000000 #> 9 0.00000000 0.0000000 0.00000000 #> 10 0.00000000 0.0000000 0.00000000 #> 11 0.00000000 0.0000000 0.00000000 #> 12 0.00000000 0.0000000 0.00000000 #> 13 0.00000000 0.0000000 0.00000000 #> 14 0.00000000 0.0000000 0.00000000 #> 15 0.00000000 0.0000000 0.00000000 #> 16 0.00000000 0.0000000 0.00000000 #> 17 0.00000000 0.0000000 0.00000000 #> 18 0.00000000 0.0000000 0.00000000 #> 19 0.07142857 0.3533593 0.02523995 #> 20 0.30769231 0.8631206 0.26557556 #> 21 0.38461538 0.9402860 0.36164845 #> 22 0.14285714 0.5665095 0.08092993 #> 23 0.00000000 0.0000000 0.00000000 #> 24 0.00000000 0.0000000 0.00000000 #> 25 0.00000000 0.0000000 0.00000000 #> 26 0.00000000 0.0000000 0.00000000 #> 27 0.00000000 0.0000000 0.00000000 #> 28 0.00000000 0.0000000 0.00000000 #> 29 0.00000000 0.0000000 0.00000000 #> 30 0.00000000 0.0000000 0.00000000 #> 31 0.00000000 0.0000000 0.00000000 #> 32 0.00000000 0.0000000 0.00000000 #> 33 0.00000000 0.0000000 0.00000000 #> 34 0.00000000 0.0000000 0.00000000 #> 35 0.06666667 0.3372901 0.02248600 #> 36 0.46666667 0.9886994 0.46139306 #> 37 0.35294118 0.9182958 0.32410441 #> 38 0.16666667 0.6292492 0.10487487 #> 39 0.00000000 0.0000000 0.00000000 #> 40 0.00000000 0.0000000 0.00000000 #> 41 0.00000000 0.0000000 0.00000000 #> 42 0.00000000 0.0000000 0.00000000 #> 43 0.00000000 0.0000000 0.00000000 #> 44 0.00000000 0.0000000 0.00000000 #> 45 0.00000000 0.0000000 0.00000000 #> 46 0.00000000 0.0000000 0.00000000 #> 47 0.00000000 0.0000000 0.00000000 #> 48 0.00000000 0.0000000 0.00000000 #> 49 0.00000000 0.0000000 0.00000000 #> 50 0.06666667 0.3372901 0.02248600 #> 51 0.46666667 0.9886994 0.46139306 #> 52 0.35294118 0.9182958 0.32410441 #> 53 0.16666667 0.6292492 0.10487487 #> 54 0.00000000 0.0000000 0.00000000 #> 55 0.00000000 0.0000000 0.00000000 #> 56 0.00000000 0.0000000 0.00000000 #> 57 0.00000000 0.0000000 0.00000000 #> 58 0.00000000 0.0000000 0.00000000 #> 59 0.00000000 0.0000000 0.00000000 #> 60 0.00000000 0.0000000 0.00000000 #> 61 0.00000000 0.0000000 0.00000000 #> 62 0.00000000 0.0000000 0.00000000 #> 63 0.00000000 0.0000000 0.00000000 #> 64 0.00000000 0.0000000 0.00000000 #> 65 0.00000000 0.0000000 0.00000000 #> 66 0.20000000 0.6962123 0.13924245 #> 67 0.41176471 0.9640788 0.39697361 #> 68 0.38888889 0.9494520 0.36923134 #> 69 0.17647059 0.6500224 0.11470984 #> 70 0.00000000 0.0000000 0.00000000 #> 71 0.00000000 0.0000000 0.00000000 #> 72 0.00000000 0.0000000 0.00000000 #> 73 0.00000000 0.0000000 0.00000000 #> 74 0.00000000 0.0000000 0.00000000 #> 75 0.00000000 0.0000000 0.00000000 #> 76 0.00000000 0.0000000 0.00000000 #> 77 0.00000000 0.0000000 0.00000000 #> 78 0.00000000 0.0000000 0.00000000 #> 79 0.00000000 0.0000000 0.00000000 #> 80 0.00000000 0.0000000 0.00000000 #> 81 0.20000000 0.6962123 0.13924245 #> 82 0.41176471 0.9640788 0.39697361 #> 83 0.38888889 0.9494520 0.36923134 #> 84 0.16666667 0.6292492 0.10487487 #> 85 0.00000000 0.0000000 0.00000000 #> 86 0.00000000 0.0000000 0.00000000 #> 87 0.00000000 0.0000000 0.00000000 #> 88 0.00000000 0.0000000 0.00000000 #> 89 0.00000000 0.0000000 0.00000000 #> 90 0.00000000 0.0000000 0.00000000 #> 91 0.00000000 0.0000000 0.00000000 #> 92 0.00000000 0.0000000 0.00000000 #> 93 0.00000000 0.0000000 0.00000000 #> 94 0.00000000 0.0000000 0.00000000 #> 95 0.00000000 0.0000000 0.00000000 #> 96 0.00000000 0.0000000 0.00000000 #> 97 0.17647059 0.6500224 0.11470984 #> 98 0.38888889 0.9494520 0.36923134 #> 99 0.41176471 0.9640788 0.39697361 #> 100 0.20000000 0.6962123 0.13924245 #> 101 0.00000000 0.0000000 0.00000000 #> 102 0.00000000 0.0000000 0.00000000 #> 103 0.00000000 0.0000000 0.00000000 #> 104 0.00000000 0.0000000 0.00000000 #> 105 0.00000000 0.0000000 0.00000000 #> 106 0.00000000 0.0000000 0.00000000 #> 107 0.00000000 0.0000000 0.00000000 #> 108 0.00000000 0.0000000 0.00000000 #> 109 0.00000000 0.0000000 0.00000000 #> 110 0.00000000 0.0000000 0.00000000 #> 111 0.00000000 0.0000000 0.00000000 #> 112 0.17647059 0.6500224 0.11470984 #> 113 0.38888889 0.9494520 0.36923134 #> 114 0.38888889 0.9494520 0.36923134 #> 115 0.20000000 0.6962123 0.13924245 #> 116 0.00000000 0.0000000 0.00000000 #> 117 0.00000000 0.0000000 0.00000000 #> 118 0.00000000 0.0000000 0.00000000 #> 119 0.00000000 0.0000000 0.00000000 #> 120 0.00000000 0.0000000 0.00000000 #> 121 0.00000000 0.0000000 0.00000000 #> 122 0.00000000 0.0000000 0.00000000 #> 123 0.00000000 0.0000000 0.00000000 #> 124 0.00000000 0.0000000 0.00000000 #> 125 0.00000000 0.0000000 0.00000000 #> 126 0.00000000 0.0000000 0.00000000 #> 127 0.00000000 0.0000000 0.00000000 #> 128 0.16666667 0.6292492 0.10487487 #> 129 0.35294118 0.9182958 0.32410441 #> 130 0.46666667 0.9886994 0.46139306 #> 131 0.07692308 0.3712323 0.02855633 #> 132 0.00000000 0.0000000 0.00000000 #> 133 0.00000000 0.0000000 0.00000000 #> 134 0.00000000 0.0000000 0.00000000 #> 135 0.00000000 0.0000000 0.00000000 #> 136 0.00000000 0.0000000 0.00000000 #> 137 0.00000000 0.0000000 0.00000000 #> 138 0.00000000 0.0000000 0.00000000 #> 139 0.00000000 0.0000000 0.00000000 #> 140 0.00000000 0.0000000 0.00000000 #> 141 0.00000000 0.0000000 0.00000000 #> 142 0.00000000 0.0000000 0.00000000 #> 143 0.16666667 0.6292492 0.10487487 #> 144 0.38888889 0.9494520 0.36923134 #> 145 0.46666667 0.9886994 0.46139306 #> 146 0.14285714 0.5665095 0.08092993 #> 147 0.00000000 0.0000000 0.00000000 #> 148 0.00000000 0.0000000 0.00000000 #> 149 0.00000000 0.0000000 0.00000000 #> 150 0.00000000 0.0000000 0.00000000 #> 151 0.00000000 0.0000000 0.00000000 #> 152 0.00000000 0.0000000 0.00000000 #> 153 0.00000000 0.0000000 0.00000000 #> 154 0.00000000 0.0000000 0.00000000 #> 155 0.00000000 0.0000000 0.00000000 #> 156 0.00000000 0.0000000 0.00000000 #> 157 0.00000000 0.0000000 0.00000000 #> 158 0.00000000 0.0000000 0.00000000 #> 159 0.11764706 0.5032583 0.05920686 #> 160 0.33333333 0.8960382 0.29867941 #> 161 0.38461538 0.9402860 0.36164845 #> 162 0.06666667 0.3372901 0.02248600 #> 163 0.00000000 0.0000000 0.00000000 #> 164 0.00000000 0.0000000 0.00000000 #> 165 0.00000000 0.0000000 0.00000000 #> 166 0.00000000 0.0000000 0.00000000 #> 167 0.00000000 0.0000000 0.00000000 #> 168 0.00000000 0.0000000 0.00000000 #> 169 0.00000000 0.0000000 0.00000000 #> 170 0.00000000 0.0000000 0.00000000 #> 171 0.00000000 0.0000000 0.00000000 #> 172 0.00000000 0.0000000 0.00000000 #> 173 0.00000000 0.0000000 0.00000000 #> 174 0.16666667 0.6292492 0.10487487 #> 175 0.33333333 0.8960382 0.29867941 #> 176 0.42857143 0.9709506 0.41612168 #> 177 0.06666667 0.3372901 0.02248600 #> 178 0.00000000 0.0000000 0.00000000 #> 179 0.00000000 0.0000000 0.00000000 #> 180 0.00000000 0.0000000 0.00000000 #> 181 0.00000000 0.0000000 0.00000000 #> 182 0.00000000 0.0000000 0.00000000 #> 183 0.00000000 0.0000000 0.00000000 #> 184 0.00000000 0.0000000 0.00000000 #> 185 0.00000000 0.0000000 0.00000000 #> 186 0.00000000 0.0000000 0.00000000 #> 187 0.00000000 0.0000000 0.00000000 #> 188 0.00000000 0.0000000 0.00000000 #> 189 0.00000000 0.0000000 0.00000000 #> 190 0.06666667 0.3372901 0.02248600 #> 191 0.46153846 0.9852281 0.45472068 #> 192 0.33333333 0.8960382 0.29867941 #> 193 0.18750000 0.6722948 0.12605528 #> 194 0.00000000 0.0000000 0.00000000 #> 195 0.00000000 0.0000000 0.00000000 #> 196 0.00000000 0.0000000 0.00000000 #> 197 0.00000000 0.0000000 0.00000000 #> 198 0.00000000 0.0000000 0.00000000 #> 199 0.00000000 0.0000000 0.00000000 #> 200 0.00000000 0.0000000 0.00000000 #> 201 0.00000000 0.0000000 0.00000000 #> 202 0.00000000 0.0000000 0.00000000 #> 203 0.00000000 0.0000000 0.00000000 #> 204 0.00000000 0.0000000 0.00000000 #> 205 0.06666667 0.3372901 0.02248600 #> 206 0.42857143 0.9709506 0.41612168 #> 207 0.33333333 0.8960382 0.29867941 #> 208 0.18750000 0.6722948 0.12605528 #> 209 0.00000000 0.0000000 0.00000000 #> 210 0.00000000 0.0000000 0.00000000 #> 211 0.00000000 0.0000000 0.00000000 #> 212 0.00000000 0.0000000 0.00000000 #> 213 0.00000000 0.0000000 0.00000000 #> 214 0.00000000 0.0000000 0.00000000 #> 215 0.00000000 0.0000000 0.00000000 #> 216 0.00000000 0.0000000 0.00000000 #> 217 0.00000000 0.0000000 0.00000000 #> 218 0.00000000 0.0000000 0.00000000 #> 219 0.00000000 0.0000000 0.00000000 #> 220 0.00000000 0.0000000 0.00000000 #> 221 0.16666667 0.6193822 0.10323037 #> 222 0.53846154 1.0000000 0.53846154 #> 223 0.35714286 0.9182958 0.32796280 #> 224 0.15384615 0.5916728 0.09102658 #> 225 0.00000000 0.0000000 0.00000000 #> 226 0.00000000 0.0000000 0.00000000 #> 227 0.00000000 0.0000000 0.00000000 #> 228 0.00000000 0.0000000 0.00000000 #> 229 0.00000000 0.0000000 0.00000000 #> 230 0.00000000 0.0000000 0.00000000 #> 231 0.00000000 0.0000000 0.00000000 #> 232 0.00000000 0.0000000 0.00000000 #> 233 0.00000000 0.0000000 0.00000000 #> 234 0.00000000 0.0000000 0.00000000 #> 235 0.00000000 0.0000000 0.00000000 #> 236 0.22222222 0.7219281 0.16042847 #> 237 0.66666667 0.9709506 0.64730040 #> 238 0.30000000 0.8453509 0.25360528 #> 239 0.12500000 0.5032583 0.06290729 #> 240 0.00000000 0.0000000 0.00000000"},{"path":"https://roseyuan.github.io/poem/reference/FMeasure.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate F measure — FMeasure","title":"Calculate F measure — FMeasure","text":"Compute F measure two clustering results. directly copied package FlowSOM.","code":""},{"path":"https://roseyuan.github.io/poem/reference/FMeasure.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate F measure — FMeasure","text":"","code":"FMeasure(true, pred, silent = TRUE)"},{"path":"https://roseyuan.github.io/poem/reference/FMeasure.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate F measure — FMeasure","text":"true Array containing real cluster labels sample pred Array containing predicted cluster labels sample silent Logical, FALSE, print information precision recall","code":""},{"path":"https://roseyuan.github.io/poem/reference/FMeasure.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate F measure — FMeasure","text":"F measure score","code":""},{"path":"https://roseyuan.github.io/poem/reference/PAS.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate PAS score — PAS","title":"Calculate PAS score — PAS","text":"PAS score measures clustering performance calculating randomness spots located outside spatial region clustered . Lower PAS score indicates better spatial domian clustering performance.","code":""},{"path":"https://roseyuan.github.io/poem/reference/PAS.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate PAS score — PAS","text":"","code":"PAS(labels, location, k = 10, ...)"},{"path":"https://roseyuan.github.io/poem/reference/PAS.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate PAS score — PAS","text":"labels Cluster labels. location numerical matrix containing location information, rows samples columns location dimensions. k Number nearest neighbors. ... Optional params findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/PAS.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate PAS score — PAS","text":"numeric value PAS score, boolean vector abnormal spots.","code":""},{"path":"https://roseyuan.github.io/poem/reference/PAS.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Calculate PAS score — PAS","text":"","code":"data(sp_toys) data <- sp_toys PAS(data$label, data[,c(\"x\", \"y\")], k=6) #> $PAS #> [1] 0.008333333 #> #> $abnormalty #> [1] FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [109] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [133] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [145] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [157] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [169] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [181] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [193] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [205] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [217] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE #> PAS(data$p1, data[,c(\"x\", \"y\")], k=6) #> $PAS #> [1] 0.01666667 #> #> $abnormalty #> [1] FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE #> [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [109] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE #> [133] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [145] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [157] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [169] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [181] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [193] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [205] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [217] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE #> PAS(data$p2, data[,c(\"x\", \"y\")], k=6) #> $PAS #> [1] 0.004166667 #> #> $abnormalty #> [1] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [109] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [133] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [145] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [157] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [169] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [181] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [193] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [205] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [217] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE #>"},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate DBCV Metric — dbcv","title":"Calculate DBCV Metric — dbcv","text":"Compute DBCV (Density-Based Clustering Validation) metric.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate DBCV Metric — dbcv","text":"","code":"dbcv( X, labels, distance = \"euclidean\", noise_id = -1, check_duplicates = FALSE, use_igraph_mst = TRUE, BPPARAM = BiocParallel::SerialParam(), ... )"},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate DBCV Metric — dbcv","text":"X Numeric matrix samples. labels Integer vector cluster IDs. distance String specifying distance metric. \"sqeuclidean\", possible method stats::dist(). default \"euclidean\". noise_id Integer, cluster ID y noise (default -1). check_duplicates Logical flag check duplicate samples. use_igraph_mst Logical flag use igraph's MST implementation. Currently mst igraph implemented. BPPARAM BiocParallel params multithreading (default none) ... Ignored","code":""},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate DBCV Metric — dbcv","text":"list: vcs Numeric vector validity index cluster. dbcv Numeric value representing overall DBCV metric.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Calculate DBCV Metric — dbcv","text":"implementation fully reproduce results existing implementations (e.g. https://github.com/FelSiq/DBCV) due different algorithms used computing Minimum Spanning Tree.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Calculate DBCV Metric — dbcv","text":"Davoud Moulavi, et al. 2014; 10.1137/1.9781611973440.96.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Calculate DBCV Metric — dbcv","text":"","code":"data(noisy_moon) data <- noisy_moon dbcv(data[, c(\"x\", \"y\")], data$kmeans_label) #> $vcs #> [1] -0.4383721 -0.4077112 #> #> $dbcv #> [1] -0.4230416 #> dbcv(data[, c(\"x\", \"y\")], data$hdbscan_label) #> $vcs #> [1] -0.5889023 0.3726825 0.5500422 0.7884686 0.4887283 0.7682203 #> [7] 0.7246492 0.7246492 0.9349664 0.4749650 0.5500422 0.3726825 #> #> $dbcv #> [1] 0.4214685 #>"},{"path":"https://roseyuan.github.io/poem/reference/dot-check_duplicated_samples.html","id":null,"dir":"Reference","previous_headings":"","what":"Check Duplicated Samples — .check_duplicated_samples","title":"Check Duplicated Samples — .check_duplicated_samples","text":"Checks duplicated samples matrix X.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-check_duplicated_samples.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check Duplicated Samples — .check_duplicated_samples","text":"","code":".check_duplicated_samples(X, threshold = 1e-09)"},{"path":"https://roseyuan.github.io/poem/reference/dot-check_duplicated_samples.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Check Duplicated Samples — .check_duplicated_samples","text":"X Numeric matrix samples. threshold Numeric, distance threshold consider samples duplicates.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-check_duplicated_samples.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Check Duplicated Samples — .check_duplicated_samples","text":"None","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_cluster_core_distance.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute Cluster Core Distance — .compute_cluster_core_distance","title":"Compute Cluster Core Distance — .compute_cluster_core_distance","text":"Computes core distance point cluster.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_cluster_core_distance.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute Cluster Core Distance — .compute_cluster_core_distance","text":"","code":".compute_cluster_core_distance(dists, d)"},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_cluster_core_distance.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute Cluster Core Distance — .compute_cluster_core_distance","text":"dists Numeric matrix distances. d Integer, dimensionality.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_cluster_core_distance.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute Cluster Core Distance — .compute_cluster_core_distance","text":"Numeric vector core distances point.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_mutual_reach_dists.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute Mutual Reachability Distances — .compute_mutual_reach_dists","title":"Compute Mutual Reachability Distances — .compute_mutual_reach_dists","text":"Computes mutual reachability distances points.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_mutual_reach_dists.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute Mutual Reachability Distances — .compute_mutual_reach_dists","text":"","code":".compute_mutual_reach_dists(dists, d)"},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_mutual_reach_dists.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute Mutual Reachability Distances — .compute_mutual_reach_dists","text":"dists Numeric matrix distances. d Float, dimensionality.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_mutual_reach_dists.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute Mutual Reachability Distances — .compute_mutual_reach_dists","text":"list containing core distances mutual reachability distances.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_pair_to_pair_dists.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute Pair to Pair Distances — .compute_pair_to_pair_dists","title":"Compute Pair to Pair Distances — .compute_pair_to_pair_dists","text":"Compute pairwise distances points matrix X.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_pair_to_pair_dists.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute Pair to Pair Distances — .compute_pair_to_pair_dists","text":"","code":".compute_pair_to_pair_dists(X, distance = \"euclidean\")"},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_pair_to_pair_dists.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute Pair to Pair Distances — .compute_pair_to_pair_dists","text":"X Numeric matrix. distance String specifying metric compute distances.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_pair_to_pair_dists.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute Pair to Pair Distances — .compute_pair_to_pair_dists","text":"Numeric matrix pairwise distances self-distances set Inf.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-convert_singleton_clusters_to_noise.html","id":null,"dir":"Reference","previous_headings":"","what":"Convert Singleton Clusters to Noise — .convert_singleton_clusters_to_noise","title":"Convert Singleton Clusters to Noise — .convert_singleton_clusters_to_noise","text":"Converts clusters containing single instance noise.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-convert_singleton_clusters_to_noise.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Convert Singleton Clusters to Noise — .convert_singleton_clusters_to_noise","text":"","code":".convert_singleton_clusters_to_noise(labels, noise_id)"},{"path":"https://roseyuan.github.io/poem/reference/dot-convert_singleton_clusters_to_noise.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Convert Singleton Clusters to Noise — .convert_singleton_clusters_to_noise","text":"labels Integer vector cluster IDs. noise_id Integer, ID noise.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-convert_singleton_clusters_to_noise.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Convert Singleton Clusters to Noise — .convert_singleton_clusters_to_noise","text":"Integer vector singleton clusters converted noise.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_separation.html","id":null,"dir":"Reference","previous_headings":"","what":"Density Separation of a Pair of Clusters — .fn_density_separation","title":"Density Separation of a Pair of Clusters — .fn_density_separation","text":"Computes density separation two clusters.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_separation.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Density Separation of a Pair of Clusters — .fn_density_separation","text":"","code":".fn_density_separation( cls_i, cls_j, dists, internal_core_dists_i, internal_core_dists_j )"},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_separation.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Density Separation of a Pair of Clusters — .fn_density_separation","text":"cls_i Integer, first cluster index. cls_j Integer, second cluster index. dists Numeric matrix distances. internal_core_dists_i Numeric vector core distances cluster . internal_core_dists_j Numeric vector core distances cluster j.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_separation.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Density Separation of a Pair of Clusters — .fn_density_separation","text":"list containing cluster indices density separation.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_sparseness.html","id":null,"dir":"Reference","previous_headings":"","what":"Density Sparseness of a Cluster — .fn_density_sparseness","title":"Density Sparseness of a Cluster — .fn_density_sparseness","text":"Computes density sparseness given cluster.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_sparseness.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Density Sparseness of a Cluster — .fn_density_sparseness","text":"","code":".fn_density_sparseness(cls_inds, dists, d, use_igraph_mst)"},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_sparseness.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Density Sparseness of a Cluster — .fn_density_sparseness","text":"cls_inds Integer vector cluster indices. dists Numeric matrix distances. d Integer, dimensionality. use_igraph_mst Logical flag use MST implementation igraph. Currently mst igraph implemented.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_sparseness.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Density Sparseness of a Cluster — .fn_density_sparseness","text":"list containing density sparseness, internal core distances, internal node indices.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_internal_objects.html","id":null,"dir":"Reference","previous_headings":"","what":"Get Internal Objects — .get_internal_objects","title":"Get Internal Objects — .get_internal_objects","text":"Computes internal nodes edges using Minimum Spanning Tree.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_internal_objects.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get Internal Objects — .get_internal_objects","text":"","code":".get_internal_objects(mutual_reach_dists, use_igraph_mst = TRUE)"},{"path":"https://roseyuan.github.io/poem/reference/dot-get_internal_objects.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get Internal Objects — .get_internal_objects","text":"mutual_reach_dists Numeric matrix representing mutual reachability distances. use_igraph_mst Logical flag use MST implementation igraph. Currently mst igraph implemented.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_internal_objects.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Get Internal Objects — .get_internal_objects","text":"list containing indices internal nodes edge weights.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_submatrix.html","id":null,"dir":"Reference","previous_headings":"","what":"Get Sub matrix — .get_submatrix","title":"Get Sub matrix — .get_submatrix","text":"Extract sub matrix matrix based optional row column indices.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_submatrix.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get Sub matrix — .get_submatrix","text":"","code":".get_submatrix(arr, inds_a = NULL, inds_b = NULL)"},{"path":"https://roseyuan.github.io/poem/reference/dot-get_submatrix.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get Sub matrix — .get_submatrix","text":"arr Numeric matrix. inds_a Optional integer vector row indices. inds_b Optional integer vector column indices.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_submatrix.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Get Sub matrix — .get_submatrix","text":"Numeric matrix representing sub matrix.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2knn.html","id":null,"dir":"Reference","previous_headings":"","what":"Computes k nearest neighbors from embedding — emb2knn","title":"Computes k nearest neighbors from embedding — emb2knn","text":"Computes k nearest neighbors embedding.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2knn.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Computes k nearest neighbors from embedding — emb2knn","text":"","code":"emb2knn(x, k, BNPARAM = NULL)"},{"path":"https://roseyuan.github.io/poem/reference/emb2knn.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Computes k nearest neighbors from embedding — emb2knn","text":"x numeric matrix (features columns items rows) nearest neighbors computed. k number nearest neighbors. BNPARAM BiocNeighbors parameter object compute kNNs. Ignored unless input matrix data.frame. omitted, Annoy approximation used 500 elements.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2knn.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Computes k nearest neighbors from embedding — emb2knn","text":"knn list.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2knn.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Computes k nearest neighbors from embedding — emb2knn","text":"","code":"d1 <- mockData() emb2knn(as.matrix(d1[,1:2]),k=5) #> $index #> [,1] [,2] [,3] [,4] [,5] #> [1,] 19 3 12 16 2 #> [2,] 16 13 19 12 6 #> [3,] 12 19 4 2 16 #> [4,] 13 12 6 2 18 #> [5,] 10 20 21 8 24 #> [6,] 13 18 2 35 4 #> [7,] 25 24 23 5 10 #> [8,] 21 20 10 5 17 #> [9,] 39 15 30 31 18 #> [10,] 20 5 21 8 24 #> [11,] 31 30 15 14 39 #> [12,] 19 2 3 4 16 #> [13,] 6 18 2 35 4 #> [14,] 31 11 17 30 15 #> [15,] 30 11 31 39 9 #> [16,] 2 19 6 12 13 #> [17,] 22 14 8 31 21 #> [18,] 35 6 13 39 9 #> [19,] 12 2 16 3 13 #> [20,] 10 21 5 8 24 #> [21,] 20 8 5 10 17 #> [22,] 17 26 21 8 14 #> [23,] 25 24 10 5 3 #> [24,] 5 10 20 25 21 #> [25,] 7 24 23 5 10 #> [26,] 22 28 17 14 21 #> [27,] 33 29 34 40 28 #> [28,] 26 32 34 27 22 #> [29,] 33 27 40 36 37 #> [30,] 15 11 31 39 9 #> [31,] 11 30 15 14 9 #> [32,] 38 28 34 26 27 #> [33,] 29 27 40 37 36 #> [34,] 27 28 32 38 29 #> [35,] 18 39 6 13 9 #> [36,] 37 40 33 11 39 #> [37,] 40 11 30 15 31 #> [38,] 32 34 28 26 27 #> [39,] 15 9 30 35 18 #> [40,] 37 11 31 14 30 #> #> $distance #> [,1] [,2] [,3] [,4] [,5] #> [1,] 1.29566420 1.3421067 1.4715387 1.6048505 1.7086970 #> [2,] 0.37452175 0.4444869 0.4458890 0.4612560 0.4666427 #> [3,] 0.62968457 0.7546696 0.9842686 1.0909192 1.3163584 #> [4,] 0.60121886 0.6580306 0.6860728 0.7425212 0.8131569 #> [5,] 0.18657183 0.2548209 0.3146041 0.5396107 0.7037336 #> [6,] 0.08575145 0.3500797 0.4666427 0.4716242 0.6860728 #> [7,] 0.93513257 1.8240751 2.0100110 2.1228417 2.2680189 #> [8,] 0.25218300 0.3587982 0.5079719 0.5396107 0.6249692 #> [9,] 0.36773351 0.4199126 0.4412507 0.5610582 0.5664558 #> [10,] 0.15120930 0.1865718 0.3548648 0.5079719 0.7077195 #> [11,] 0.15292110 0.1869890 0.2106084 0.4888427 0.5383705 #> [12,] 0.25837036 0.4612560 0.6296846 0.6580306 0.7248265 #> [13,] 0.08575145 0.3825643 0.4444869 0.5295547 0.6012189 #> [14,] 0.40406159 0.4888427 0.5499476 0.6172360 0.6360720 #> [15,] 0.02363391 0.2106084 0.2321602 0.3481019 0.4199126 #> [16,] 0.37452175 0.5664998 0.7091540 0.7248265 0.7269356 #> [17,] 0.43197293 0.5499476 0.6249692 0.6805494 0.6933050 #> [18,] 0.20180947 0.3500797 0.3825643 0.4731934 0.5664558 #> [19,] 0.25837036 0.4458890 0.5664998 0.7546696 0.8562987 #> [20,] 0.15120930 0.2471735 0.2548209 0.3587982 0.8588908 #> [21,] 0.24717347 0.2521830 0.3146041 0.3548648 0.6933050 #> [22,] 0.43197293 0.5359545 0.7748010 0.8458200 0.8870181 #> [23,] 1.13716937 1.1403808 1.8144949 1.8405530 1.8694539 #> [24,] 0.70373359 0.7077195 0.8588908 0.9841743 1.0148838 #> [25,] 0.93513257 0.9841743 1.1371694 1.4900812 1.5841110 #> [26,] 0.53595454 0.8217170 0.8963990 1.1594005 1.2948579 #> [27,] 0.81728422 0.8768693 0.9860296 1.1192155 1.1657922 #> [28,] 0.82171700 0.9478323 1.0768017 1.1657922 1.1850271 #> [29,] 0.37241440 0.8768693 1.3225420 1.4892807 1.5601800 #> [30,] 0.02363391 0.1869890 0.2140727 0.3687323 0.4412507 #> [31,] 0.15292110 0.2140727 0.2321602 0.4040616 0.5610582 #> [32,] 0.55686537 0.9478323 1.1745553 1.3934585 1.8566828 #> [33,] 0.37241440 0.8172842 0.9605452 1.1882100 1.1913554 #> [34,] 0.98602961 1.0768017 1.1745553 1.2587367 1.6696433 #> [35,] 0.20180947 0.4178786 0.4716242 0.5295547 0.6398327 #> [36,] 0.78929140 1.0616034 1.1913554 1.3796854 1.4111328 #> [37,] 0.42274741 0.5916806 0.6659964 0.6777386 0.7444686 #> [38,] 0.55686537 1.2587367 1.4407066 1.9503217 2.1388739 #> [39,] 0.34810191 0.3677335 0.3687323 0.4178786 0.4731934 #> [40,] 0.42274741 0.5691441 0.6853185 0.7250634 0.7344199 #>"},{"path":"https://roseyuan.github.io/poem/reference/emb2snn.html","id":null,"dir":"Reference","previous_headings":"","what":"Computes shared nearest neighbors from embedding — emb2snn","title":"Computes shared nearest neighbors from embedding — emb2snn","text":"computes shared nearest neighbors embedding.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2snn.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Computes shared nearest neighbors from embedding — emb2snn","text":"","code":"emb2snn(x, k, type = \"rank\", BNPARAM = NULL)"},{"path":"https://roseyuan.github.io/poem/reference/emb2snn.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Computes shared nearest neighbors from embedding — emb2snn","text":"x numeric matrix (features columns items rows) nearest neighbors computed. k number nearest neighbors. type string specifying type weighting scheme use shared neighbors. Possible choices include \"rank\", \"number\", \"jaccard\". See type bluster::neighborsToSNNGraph() details. BNPARAM BiocNeighbors parameter object compute kNNs. Ignored unless input matrix data.frame. omitted, Annoy approximation used 500 elements.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2snn.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Computes shared nearest neighbors from embedding — emb2snn","text":"igraph object.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2snn.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Computes shared nearest neighbors from embedding — emb2snn","text":"","code":"d1 <- mockData() emb2snn(as.matrix(d1[,1:2]),k=5) #> IGRAPH 1ee951a U-W- 40 257 -- #> + attr: weight (e/n) #> + edges from 1ee951a: #> [1] 1-- 3 3-- 4 2-- 4 1-- 5 3-- 6 1-- 6 4-- 6 5-- 7 1-- 7 6-- 7 #> [11] 3-- 7 4-- 7 4-- 8 3-- 8 7-- 8 5-- 9 7-- 9 8-- 9 2--10 3--10 #> [21] 6--10 9--11 8--11 7--11 4--11 3--11 6--12 7--12 1--12 3--12 #> [31] 4--12 11--12 8--12 2--13 10--13 4--13 8--14 4--14 11--14 9--14 #> [41] 13--15 2--15 10--15 4--15 8--15 3--15 15--16 2--16 13--16 4--16 #> [51] 10--16 8--16 3--16 3--17 10--17 6--17 12--17 4--17 1--17 7--17 #> [61] 17--18 6--18 1--18 12--18 3--18 7--18 9--19 11--19 7--19 8--19 #> [71] 5--19 4--19 3--19 12--19 14--19 1--19 2--20 10--20 13--20 15--20 #> + ... omitted several edges"},{"path":"https://roseyuan.github.io/poem/reference/findSpatialKNN.html","id":null,"dir":"Reference","previous_headings":"","what":"Find the k nearest spatial neighbors — findSpatialKNN","title":"Find the k nearest spatial neighbors — findSpatialKNN","text":"given dataset, find k nearest neighbors object based spatial locations, option handling ties.","code":""},{"path":"https://roseyuan.github.io/poem/reference/findSpatialKNN.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Find the k nearest spatial neighbors — findSpatialKNN","text":"","code":"findSpatialKNN( location, k, keep_ties = TRUE, useMedianDist = FALSE, BNPARAM = NULL )"},{"path":"https://roseyuan.github.io/poem/reference/findSpatialKNN.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Find the k nearest spatial neighbors — findSpatialKNN","text":"location numeric data matrix containing location information, rows points columns location dimensions. k number nearest neighbors look . keep_ties Boolean indicating ties counted . TRUE, neighbors distances included even means returning k neighbors. useMedianDist Use median distance k nearest neighbor maximum distance included. Ignored keep_ties=FALSE. BNPARAM BNPARAM object passed findKNN specifying kNN approximation method use. Defaults exact small datasets, Annoy larger ones.","code":""},{"path":"https://roseyuan.github.io/poem/reference/findSpatialKNN.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Find the k nearest spatial neighbors — findSpatialKNN","text":"list indices.","code":""},{"path":"https://roseyuan.github.io/poem/reference/findSpatialKNN.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Find the k nearest spatial neighbors — findSpatialKNN","text":"","code":"data(sp_toys) data <- sp_toys findSpatialKNN(data[,c(\"x\", \"y\")], k=6) #> [[1]] #> [1] 2 16 17 31 32 3 #> #> [[2]] #> [1] 1 16 17 3 18 32 #> #> [[3]] #> [1] 4 18 17 2 19 16 33 #> #> [[4]] #> [1] 3 5 18 19 17 20 34 #> #> [[5]] #> [1] 4 19 20 6 18 35 #> #> [[6]] #> [1] 7 20 21 5 22 36 #> #> [[7]] #> [1] 6 8 21 22 20 23 37 #> #> [[8]] #> [1] 7 9 22 23 21 24 38 #> #> [[9]] #> [1] 8 10 23 24 22 25 #> #> [[10]] #> [1] 9 24 25 11 23 26 40 #> #> [[11]] #> [1] 26 25 10 12 24 27 41 #> #> [[12]] #> [1] 13 27 26 11 28 42 #> #> [[13]] #> [1] 12 27 28 14 26 29 43 #> #> [[14]] #> [1] 15 29 28 13 30 27 44 #> #> [[15]] #> [1] 14 29 30 28 45 13 44 #> #> [[16]] #> [1] 17 2 32 1 31 3 33 46 #> #> [[17]] #> [1] 16 2 32 3 33 18 #> #> [[18]] #> [1] 19 3 4 33 34 17 #> #> [[19]] #> [1] 18 4 5 34 35 20 #> #> [[20]] #> [1] 5 6 35 36 19 21 #> #> [[21]] #> [1] 7 22 37 6 36 20 #> #> [[22]] #> [1] 8 21 23 38 7 37 #> #> [[23]] #> [1] 9 22 24 39 8 38 #> #> [[24]] #> [1] 25 10 23 40 9 39 #> #> [[25]] #> [1] 24 10 40 11 41 26 #> #> [[26]] #> [1] 11 41 12 42 25 27 #> #> [[27]] #> [1] 28 12 13 42 43 26 #> #> [[28]] #> [1] 27 13 43 14 44 29 #> #> [[29]] #> [1] 30 14 15 44 45 28 #> #> [[30]] #> [1] 29 15 45 14 44 60 #> #> [[31]] #> [1] 32 16 46 17 47 1 61 #> #> [[32]] #> [1] 16 17 31 46 47 33 #> #> [[33]] #> [1] 34 18 48 17 47 32 #> #> [[34]] #> [1] 33 35 18 19 48 49 #> #> [[35]] #> [1] 34 19 49 20 50 36 #> #> [[36]] #> [1] 37 20 21 50 51 35 #> #> [[37]] #> [1] 21 36 38 51 22 52 #> #> [[38]] #> [1] 22 37 39 52 23 53 #> #> [[39]] #> [1] 23 38 40 53 24 54 #> #> [[40]] #> [1] 24 25 39 54 55 41 #> #> [[41]] #> [1] 26 56 25 55 40 42 #> #> [[42]] #> [1] 43 27 57 26 56 41 #> #> [[43]] #> [1] 42 27 28 57 58 44 #> #> [[44]] #> [1] 45 29 59 28 58 43 #> #> [[45]] #> [1] 44 29 30 59 60 15 28 58 75 #> #> [[46]] #> [1] 47 32 62 31 61 16 33 63 76 #> #> [[47]] #> [1] 46 32 62 33 63 48 #> #> [[48]] #> [1] 49 33 34 63 64 47 #> #> [[49]] #> [1] 48 34 35 64 65 50 #> #> [[50]] #> [1] 35 36 65 66 49 51 #> #> [[51]] #> [1] 37 52 67 36 66 50 #> #> [[52]] #> [1] 38 51 53 68 37 67 #> #> [[53]] #> [1] 39 52 54 69 38 68 #> #> [[54]] #> [1] 55 40 53 70 39 69 #> #> [[55]] #> [1] 54 40 70 41 71 56 #> #> [[56]] #> [1] 41 71 42 72 55 57 #> #> [[57]] #> [1] 58 42 43 72 73 56 #> #> [[58]] #> [1] 57 43 73 44 74 59 #> #> [[59]] #> [1] 60 44 45 74 75 58 #> #> [[60]] #> [1] 59 45 75 44 74 30 90 #> #> [[61]] #> [1] 62 46 76 47 77 31 91 #> #> [[62]] #> [1] 46 47 61 76 77 63 #> #> [[63]] #> [1] 64 48 78 47 77 62 #> #> [[64]] #> [1] 63 65 48 49 78 79 #> #> [[65]] #> [1] 64 49 79 50 80 66 #> #> [[66]] #> [1] 67 50 51 80 81 65 #> #> [[67]] #> [1] 51 66 68 81 52 82 #> #> [[68]] #> [1] 52 67 69 82 53 83 #> #> [[69]] #> [1] 53 68 70 83 54 84 #> #> [[70]] #> [1] 54 55 69 84 85 71 #> #> [[71]] #> [1] 56 86 55 85 70 72 #> #> [[72]] #> [1] 73 57 87 56 86 71 #> #> [[73]] #> [1] 72 57 58 87 88 74 #> #> [[74]] #> [1] 75 59 89 58 88 73 #> #> [[75]] #> [1] 74 59 60 89 90 45 58 88 105 #> #> [[76]] #> [1] 77 62 92 61 91 46 63 93 106 #> #> [[77]] #> [1] 76 62 92 63 93 78 #> #> [[78]] #> [1] 79 63 64 93 94 77 #> #> [[79]] #> [1] 78 64 65 94 95 80 #> #> [[80]] #> [1] 65 66 95 96 79 81 #> #> [[81]] #> [1] 67 82 97 66 96 80 #> #> [[82]] #> [1] 68 81 83 98 67 97 #> #> [[83]] #> [1] 69 82 84 99 68 98 #> #> [[84]] #> [1] 85 70 83 100 69 99 #> #> [[85]] #> [1] 84 70 100 71 101 86 #> #> [[86]] #> [1] 71 101 72 102 85 87 #> #> [[87]] #> [1] 88 72 73 102 103 86 #> #> [[88]] #> [1] 87 73 103 74 104 89 #> #> [[89]] #> [1] 90 74 75 104 105 88 #> #> [[90]] #> [1] 89 75 105 74 104 60 120 #> #> [[91]] #> [1] 92 76 106 77 107 61 121 #> #> [[92]] #> [1] 76 77 91 106 107 93 #> #> [[93]] #> [1] 94 78 108 77 107 92 #> #> [[94]] #> [1] 93 95 78 79 108 109 #> #> [[95]] #> [1] 94 79 109 80 110 96 #> #> [[96]] #> [1] 97 80 81 110 111 95 #> #> [[97]] #> [1] 81 96 98 111 82 112 #> #> [[98]] #> [1] 82 97 99 112 83 113 #> #> [[99]] #> [1] 83 98 100 113 84 114 #> #> [[100]] #> [1] 84 85 99 114 115 101 #> #> [[101]] #> [1] 86 116 85 115 100 102 #> #> [[102]] #> [1] 103 87 117 86 116 101 #> #> [[103]] #> [1] 102 87 88 117 118 104 #> #> [[104]] #> [1] 105 89 119 88 118 103 #> #> [[105]] #> [1] 104 89 90 119 120 75 88 118 135 #> #> [[106]] #> [1] 107 92 122 91 121 76 93 123 136 #> #> [[107]] #> [1] 106 92 122 93 123 108 #> #> [[108]] #> [1] 109 93 94 123 124 107 #> #> [[109]] #> [1] 108 94 95 124 125 110 #> #> [[110]] #> [1] 95 96 125 126 109 111 #> #> [[111]] #> [1] 97 112 127 96 126 110 #> #> [[112]] #> [1] 98 111 113 128 97 127 #> #> [[113]] #> [1] 99 112 114 129 98 128 #> #> [[114]] #> [1] 115 100 113 130 99 129 #> #> [[115]] #> [1] 114 100 130 101 131 116 #> #> [[116]] #> [1] 101 131 102 132 115 117 #> #> [[117]] #> [1] 118 102 103 132 133 116 #> #> [[118]] #> [1] 117 103 133 104 134 119 #> #> [[119]] #> [1] 120 104 105 134 135 118 #> #> [[120]] #> [1] 119 105 135 104 134 90 150 #> #> [[121]] #> [1] 122 106 136 107 137 91 151 #> #> [[122]] #> [1] 106 107 121 136 137 123 #> #> [[123]] #> [1] 124 108 138 107 137 122 #> #> [[124]] #> [1] 123 125 108 109 138 139 #> #> [[125]] #> [1] 124 109 139 110 140 126 #> #> [[126]] #> [1] 127 110 111 140 141 125 #> #> [[127]] #> [1] 111 126 128 141 112 142 #> #> [[128]] #> [1] 112 127 129 142 113 143 #> #> [[129]] #> [1] 113 128 130 143 114 144 #> #> [[130]] #> [1] 114 115 129 144 145 131 #> #> [[131]] #> [1] 116 146 115 145 130 132 #> #> [[132]] #> [1] 133 117 147 116 146 131 #> #> [[133]] #> [1] 132 117 118 147 148 134 #> #> [[134]] #> [1] 135 119 149 118 148 133 #> #> [[135]] #> [1] 134 119 120 149 150 105 118 148 165 #> #> [[136]] #> [1] 137 122 152 121 151 106 123 153 166 #> #> [[137]] #> [1] 136 122 152 123 153 138 #> #> [[138]] #> [1] 139 123 124 153 154 137 #> #> [[139]] #> [1] 138 124 125 154 155 140 #> #> [[140]] #> [1] 125 126 155 156 139 141 #> #> [[141]] #> [1] 127 142 157 126 156 140 #> #> [[142]] #> [1] 128 141 143 158 127 157 #> #> [[143]] #> [1] 129 142 144 159 128 158 #> #> [[144]] #> [1] 145 130 143 160 129 159 #> #> [[145]] #> [1] 144 130 160 131 161 146 #> #> [[146]] #> [1] 131 161 132 162 145 147 #> #> [[147]] #> [1] 148 132 133 162 163 146 #> #> [[148]] #> [1] 147 133 163 134 164 149 #> #> [[149]] #> [1] 150 134 135 164 165 148 #> #> [[150]] #> [1] 149 135 165 134 164 120 180 #> #> [[151]] #> [1] 152 136 166 137 167 121 181 #> #> [[152]] #> [1] 136 137 151 166 167 153 #> #> [[153]] #> [1] 154 138 168 137 167 152 #> #> [[154]] #> [1] 153 155 138 139 168 169 #> #> [[155]] #> [1] 154 139 169 140 170 156 #> #> [[156]] #> [1] 157 140 141 170 171 155 #> #> [[157]] #> [1] 141 156 158 171 142 172 #> #> [[158]] #> [1] 142 157 159 172 143 173 #> #> [[159]] #> [1] 143 158 160 173 144 174 #> #> [[160]] #> [1] 144 145 159 174 175 161 #> #> [[161]] #> [1] 146 176 145 175 160 162 #> #> [[162]] #> [1] 163 147 177 146 176 161 #> #> [[163]] #> [1] 162 147 148 177 178 164 #> #> [[164]] #> [1] 165 149 179 148 178 163 #> #> [[165]] #> [1] 164 149 150 179 180 135 148 178 195 #> #> [[166]] #> [1] 167 152 182 151 181 136 153 183 196 #> #> [[167]] #> [1] 166 152 182 153 183 168 #> #> [[168]] #> [1] 169 153 154 183 184 167 #> #> [[169]] #> [1] 168 154 155 184 185 170 #> #> [[170]] #> [1] 155 156 185 186 169 171 #> #> [[171]] #> [1] 157 172 187 156 186 170 #> #> [[172]] #> [1] 158 171 173 188 157 187 #> #> [[173]] #> [1] 159 172 174 189 158 188 #> #> [[174]] #> [1] 175 160 173 190 159 189 #> #> [[175]] #> [1] 174 160 190 161 191 176 #> #> [[176]] #> [1] 161 191 162 192 175 177 #> #> [[177]] #> [1] 178 162 163 192 193 176 #> #> [[178]] #> [1] 177 163 193 164 194 179 #> #> [[179]] #> [1] 180 164 165 194 195 178 #> #> [[180]] #> [1] 179 165 195 164 194 150 210 #> #> [[181]] #> [1] 182 166 196 167 197 151 211 #> #> [[182]] #> [1] 166 167 181 196 197 183 #> #> [[183]] #> [1] 184 168 198 167 197 182 #> #> [[184]] #> [1] 183 185 168 169 198 199 #> #> [[185]] #> [1] 184 169 199 170 200 186 #> #> [[186]] #> [1] 187 170 171 200 201 185 #> #> [[187]] #> [1] 171 186 188 201 172 202 #> #> [[188]] #> [1] 172 187 189 202 173 203 #> #> [[189]] #> [1] 173 188 190 203 174 204 #> #> [[190]] #> [1] 174 175 189 204 205 191 #> #> [[191]] #> [1] 176 206 175 205 190 192 #> #> [[192]] #> [1] 193 177 207 176 206 191 #> #> [[193]] #> [1] 192 177 178 207 208 194 #> #> [[194]] #> [1] 195 179 209 178 208 193 #> #> [[195]] #> [1] 194 179 180 209 210 165 178 208 225 #> #> [[196]] #> [1] 197 182 212 181 211 166 183 213 226 #> #> [[197]] #> [1] 196 182 212 183 213 198 #> #> [[198]] #> [1] 199 183 184 213 214 197 #> #> [[199]] #> [1] 198 184 185 214 215 200 #> #> [[200]] #> [1] 185 186 215 216 199 201 #> #> [[201]] #> [1] 187 202 217 186 216 200 #> #> [[202]] #> [1] 188 201 203 218 187 217 #> #> [[203]] #> [1] 189 202 204 219 188 218 #> #> [[204]] #> [1] 205 190 203 220 189 219 #> #> [[205]] #> [1] 204 190 220 191 221 206 #> #> [[206]] #> [1] 191 221 192 222 205 207 #> #> [[207]] #> [1] 208 192 193 222 223 206 #> #> [[208]] #> [1] 207 193 223 194 224 209 #> #> [[209]] #> [1] 210 194 195 224 225 208 #> #> [[210]] #> [1] 209 195 225 194 224 180 240 #> #> [[211]] #> [1] 212 196 226 197 227 181 #> #> [[212]] #> [1] 196 197 211 226 227 213 #> #> [[213]] #> [1] 214 198 228 197 227 212 #> #> [[214]] #> [1] 213 215 198 199 228 229 #> #> [[215]] #> [1] 214 199 229 200 230 216 #> #> [[216]] #> [1] 217 200 201 230 231 215 #> #> [[217]] #> [1] 201 216 218 231 202 232 #> #> [[218]] #> [1] 202 217 219 232 203 233 #> #> [[219]] #> [1] 203 218 220 233 204 234 #> #> [[220]] #> [1] 204 205 219 234 235 221 #> #> [[221]] #> [1] 206 236 205 235 220 222 #> #> [[222]] #> [1] 223 207 237 206 236 221 #> #> [[223]] #> [1] 222 207 208 237 238 224 #> #> [[224]] #> [1] 225 209 239 208 238 223 #> #> [[225]] #> [1] 224 209 210 239 240 195 208 238 #> #> [[226]] #> [1] 227 212 211 196 213 197 228 #> #> [[227]] #> [1] 226 212 213 228 211 197 214 #> #> [[228]] #> [1] 229 213 214 227 215 198 212 #> #> [[229]] #> [1] 228 214 215 230 213 199 #> #> [[230]] #> [1] 215 216 229 231 200 214 217 #> #> [[231]] #> [1] 217 232 216 230 218 201 #> #> [[232]] #> [1] 218 231 233 217 219 202 216 #> #> [[233]] #> [1] 219 232 234 218 220 203 217 #> #> [[234]] #> [1] 235 220 233 219 204 218 221 #> #> [[235]] #> [1] 234 220 221 236 219 205 #> #> [[236]] #> [1] 221 222 235 237 206 220 223 #> #> [[237]] #> [1] 238 222 223 236 207 221 224 #> #> [[238]] #> [1] 237 223 224 239 222 208 225 #> #> [[239]] #> [1] 240 224 225 238 209 223 #> #> [[240]] #> [1] 239 225 224 210 209 238 #>"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"Computes fuzzy-hard versions pair-sorting partition metrics compare hard clustering fuzzy hard truth. especially designed cases fuzzy truth represents uncertainty hard truth. Briefly put, maximum pair concordance clustering either hard fuzzy truth used, hard truth used compute completeness. See fuzzyPartitionMetrics standard implementation metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"","code":"fuzzyHardMetrics( hardTrue, fuzzyTrue, hardPred, nperms = NULL, returnElementPairAccuracy = FALSE, verbose = TRUE, BPPARAM = BiocParallel::SerialParam() )"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"hardTrue atomic vector coercible factor integer vector containing true hard labels. Must length hardPred. fuzzyTrue object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows length hardTrue. hardPred atomic vector coercible factor integer vector containing predicted hard labels. nperms number permutations (correction chance). NULL (default), first set 10 permutations run estimate whether variation across permutations 0.0025, case (max 1000) permutations run. returnElementPairAccuracy Logical. TRUE, returns per-element pair accuracy instead various parition-level dataset-level metrics. Default FALSE. verbose Logical; whether print info warnings, including standard error mean across permutations (giving idea precision adjusted metrics). BPPARAM BiocParallel params multithreading (default none)","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"list metrics: NDC Hullermeier's NDC (fuzzy rand index) ACI Ambrosio's Adjusted Concordance Index (ACI), .e. permutation-based fuzzy version adjusted Rand index. fuzzyWH Fuzzy Wallace Homogeneity index fuzzyWC Fuzzy Wallace Completeness index fuzzyAWH Adjusted fuzzy Wallace Homogeneity index fuzzyAWC Adjusted fuzzy Wallace Completeness index","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"Hullermeier et al. 2012; 10.1109/TFUZZ.2011.2179303; D'Ambrosio et al. 2021; 10.1007/s00357-020-09367-0","code":""},{"path":[]},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"author","dir":"Reference","previous_headings":"","what":"Author","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"Pierre-Luc Germain","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"","code":"# generate a fuzzy truth: fuzzyTrue <- matrix(c( 0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) # a hard truth: hardTrue <- apply(fuzzyTrue,1,FUN=which.max) # some predicted labels: hardPred <- c(1,1,1,1,1,1,2,2,2) fuzzyHardMetrics(hardTrue, fuzzyTrue, hardPred, nperms=3) #> Standard error of the mean NDC across permutations:0.0352 #> You might want to increase the number of permutations to increase the robustness of the adjusted metrics. #> $NDC #> [1] 0.7581944 #> #> $ACI #> [1] 0.5394992 #> #> $fuzzyWH #> $fuzzyWH$global #> [1] 0.8436111 #> #> $fuzzyWH$perPartition #> 1 2 #> 1.00000000 0.06166667 #> #> #> $fuzzyWC #> $fuzzyWC$global #> [1] 0.7322727 #> #> $fuzzyWC$perPartition #> 1 2 3 #> 0.7195238 1.0000000 1.0000000 #> #> #> $fuzzyAWH #> $fuzzyAWH$global #> [1] 0.6403322 #> #> $fuzzyAWH$perPartition #> 1 2 #> 1.000000 -3.456464 #> #> #> $fuzzyAWC #> $fuzzyAWC$global #> [1] 0.4682516 #> #> $fuzzyAWC$perPartition #> 1 2 3 #> 0.4682516 NaN NaN #> #>"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotAgreement.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","title":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","text":"Per-element maximal agreement hard clustering hard fuzzy ground truth labels.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotAgreement.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","text":"","code":"fuzzyHardSpotAgreement( hardTrue, fuzzyTrue, hardPred, useNegatives = TRUE, verbose = TRUE )"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotAgreement.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","text":"hardTrue vector true cluster labels fuzzyTrue object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows length hardTrue. hardPred vector predicted cluster labels useNegatives Logical; whether include negative pairs agreement score (tends result larger overall agreement lower dynamic range score). Default TRUE. verbose Logical; whether print expected memory usage large datasets.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotAgreement.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","text":"numeric vector agreement scores element hardPred","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotAgreement.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","text":"","code":"# generate a fuzzy truth: fuzzyTrue <- matrix(c( 0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) # a hard truth: hardTrue <- apply(fuzzyTrue,1,FUN=which.max) # some predicted labels: hardPred <- c(1,1,1,1,1,1,2,2,2) fuzzyHardSpotAgreement(hardTrue, fuzzyTrue, hardPred) #> 1 2 3 4 5 6 7 8 9 #> 0.87750 0.87625 0.87625 0.87750 0.87875 0.87750 0.76625 0.02750 0.76625"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotConcordance.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","title":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","text":"Per-element maximal concordance hard clustering hard fuzzy ground truth labels.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotConcordance.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","text":"","code":"fuzzyHardSpotConcordance( hardTrue, fuzzyTrue, hardPred, useNegatives = TRUE, verbose = TRUE )"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotConcordance.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","text":"hardTrue vector true cluster labels fuzzyTrue object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows length hardTrue. hardPred vector predicted cluster labels useNegatives Logical; whether include negative pairs concordance score (tends result larger overall concordance lower dynamic range score). Default TRUE. verbose Logical; whether print expected memory usage large datasets.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotConcordance.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","text":"numeric vector concordance scores element hardPred","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotConcordance.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","text":"","code":"# generate a fuzzy truth: fuzzyTrue <- matrix(c( 0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) # a hard truth: hardTrue <- apply(fuzzyTrue,1,FUN=which.max) # some predicted labels: hardPred <- c(1,1,1,1,1,1,2,2,2) fuzzyHardSpotConcordance(hardTrue, fuzzyTrue, hardPred) #> 1 2 3 4 5 6 7 8 9 #> 0.87750 0.87625 0.87625 0.87750 0.87875 0.87750 0.76625 0.02750 0.76625"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"Computes fuzzy versions pair-sorting partition metrics. largely based permutation-based implementation Antonio D'Ambrosio ConsRankClass package, modified also compute fuzzy versions adjusted Wallace indices, implement multithreading, adjust number permutations according variability.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"","code":"fuzzyPartitionMetrics( P, Q, computeWallace = TRUE, nperms = NULL, verbose = TRUE, returnElementPairAccuracy = FALSE, BPPARAM = BiocParallel::SerialParam(), tnorm = c(\"product\", \"min\", \"lukasiewicz\") )"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"P object coercible numeric matrix membership probability elements (rows) ground-truth classes (columns). Q object coercible numeric matrix membership probability elements (rows) predicted clusters (columns). Must number rows P. computeWallace Logical; whether compute individual fuzzy versions Wallace indices (increases running time). nperms number permutations (correction chance). NULL (default), first set 10 permutations run estimate whether variation across permutations 0.0025, case (max 1000) permutations run. verbose Logical; whether print info warnings, including standard error mean across permutations (giving idea precision adjusted metrics). returnElementPairAccuracy Logical. TRUE, returns per-element pair accuracy instead various parition-level dataset-level metrics. Default FALSE. BPPARAM BiocParallel params multithreading (default none) tnorm type t-norm operation use class membership pairs (either product, min, lukasiewicz) calculating Wallace indices. influence NDC/ACI metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"returnElementPairAccuracy FALSE, return list metrics: NDC Hullermeier's NDC (fuzzy rand index) ACI Ambrosio's Adjusted Concordance Index (ACI), .e. permutation-based fuzzy version adjusted Rand index. fuzzyWH Fuzzy Wallace Homogeneity index fuzzyWC Fuzzy Wallace Completeness index fuzzyAWH Adjusted fuzzy Wallace Homogeneity index fuzzyAWC Adjusted fuzzy Wallace Completeness index","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"Hullermeier et al. 2012; 10.1109/TFUZZ.2011.2179303; D'Ambrosio et al. 2021; 10.1007/s00357-020-09367-0","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"author","dir":"Reference","previous_headings":"","what":"Author","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"Pierre-Luc Germain","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"","code":"# generate fuzzy partitions: m1 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) m2 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.025, 0.95, 0.025, 0.02, 0.96, 0.02, 0.01, 0.98, 0.01, 0.05, 0.05, 0.95, 0.02, 0.02, 0.96, 0.01, 0.01, 0.98), ncol = 3, byrow=TRUE) colnames(m1) <- colnames(m2) <- LETTERS[1:3] fuzzyPartitionMetrics(m1,m2) #> Running 100 extra permutations. #> Standard error of the mean NDC across permutations:0.00212 #> $NDC #> [1] 0.5338889 #> #> $ACI #> [1] 0.08344258 #> #> $fuzzyWH #> $fuzzyWH$global #> [1] 0.6761188 #> #> $fuzzyWH$perPartition #> A B C #> 0.9359492 0.9214151 0.1588990 #> #> #> $fuzzyWC #> $fuzzyWC$global #> [1] 0.3505049 #> #> $fuzzyWC$perPartition #> A B C #> 0.3445840 0.7242508 0.7520319 #> #> #> $fuzzyAWH #> $fuzzyAWH$global #> [1] 0.2114734 #> #> $fuzzyAWH$perPartition #> A B C #> 0.8554001 0.8021785 -1.1479958 #> #> #> $fuzzyAWC #> $fuzzyAWC$global #> [1] 0.04973933 #> #> $fuzzyAWC$perPartition #> A B C #> 0.05005958 -0.03133296 0.02342049 #> #>"},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotAgreement.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","title":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","text":"Per-element agreement two fuzzy partitionings","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotAgreement.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","text":"","code":"fuzzySpotAgreement(P, Q)"},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotAgreement.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","text":"P object coercible numeric matrix membership probability elements (rows) clusters (columns) Q object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows P","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotAgreement.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","text":"numeric vector agreement scores row P.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotAgreement.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","text":"","code":"# generate fuzzy partitions: m1 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) m2 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.025, 0.95, 0.025, 0.02, 0.96, 0.02, 0.01, 0.98, 0.01, 0.05, 0.05, 0.95, 0.02, 0.02, 0.96, 0.01, 0.01, 0.98), ncol = 3, byrow=TRUE) colnames(m1) <- colnames(m2) <- LETTERS[1:3] fuzzySpotAgreement(m1,m2) #> 1 2 3 4 5 6 7 8 #> 0.538750 0.527500 0.535000 0.536250 0.530000 0.526250 0.775625 0.075625 #> 9 #> 0.760000"},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotConcordance.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","title":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","text":"Per-element concordance two fuzzy partitionings","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotConcordance.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","text":"","code":"fuzzySpotConcordance(P, Q)"},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotConcordance.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","text":"P object coercible numeric matrix membership probability elements (rows) clusters (columns) Q object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows P","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotConcordance.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","text":"numeric vector concordance scores row P.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotConcordance.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","text":"","code":"# generate fuzzy partitions: m1 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) m2 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.025, 0.95, 0.025, 0.02, 0.96, 0.02, 0.01, 0.98, 0.01, 0.05, 0.05, 0.95, 0.02, 0.02, 0.96, 0.01, 0.01, 0.98), ncol = 3, byrow=TRUE) colnames(m1) <- colnames(m2) <- LETTERS[1:3] fuzzySpotConcordance(m1,m2) #> 1 2 3 4 5 6 7 8 #> 0.538750 0.527500 0.535000 0.536250 0.530000 0.526250 0.775625 0.075625 #> 9 #> 0.760000"},{"path":"https://roseyuan.github.io/poem/reference/getAgreement.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element agreement score — getAgreement","title":"Per-element agreement score — getAgreement","text":"Per-element agreement clustering ground truth","code":""},{"path":"https://roseyuan.github.io/poem/reference/getAgreement.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element agreement score — getAgreement","text":"","code":"getAgreement(true, pred, usePairs = TRUE, useNegatives = FALSE, adjust = FALSE)"},{"path":"https://roseyuan.github.io/poem/reference/getAgreement.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element agreement score — getAgreement","text":"true vector true class labels pred vector predicted clusters usePairs Logical; whether compute pairs instead elements Recommended TRUE default. useNegatives Logical; whether include consistency negative pairs score (default FALSE). adjust Logical; whether adjust chance. implemented useNegatives=FALSE (make sense element-level otherwise).","code":""},{"path":"https://roseyuan.github.io/poem/reference/getAgreement.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element agreement score — getAgreement","text":"vector agreement scores","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingClassMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getEmbeddingClassMetrics — getEmbeddingClassMetrics","title":"getEmbeddingClassMetrics — getEmbeddingClassMetrics","text":"Computes class-level, embedding-based metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingClassMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getEmbeddingClassMetrics — getEmbeddingClassMetrics","text":"","code":"getEmbeddingClassMetrics( x, labels, metrics = c(\"meanSW\", \"minSW\", \"pnSW\", \"dbcv\"), distance = \"euclidean\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingClassMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getEmbeddingClassMetrics — getEmbeddingClassMetrics","text":"x data.frame matrix (features columns items rows) metrics computed. labels vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. distance distance metric use (default euclidean). ... Optional arguments. See details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingClassMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getEmbeddingClassMetrics — getEmbeddingClassMetrics","text":"data.frame metrics node/element x.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingElementMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getEmbeddingElementMetrics — getEmbeddingElementMetrics","title":"getEmbeddingElementMetrics — getEmbeddingElementMetrics","text":"Computes element-level, embedding-based metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingElementMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getEmbeddingElementMetrics — getEmbeddingElementMetrics","text":"","code":"getEmbeddingElementMetrics( x, labels, metrics = c(\"SW\"), distance = \"euclidean\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingElementMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getEmbeddingElementMetrics — getEmbeddingElementMetrics","text":"x data.frame matrix (features columns items rows) metrics computed. labels vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. Currently, silhouette width supported node-level. distance distance metric use (default euclidean). ... Optional arguments. See details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingElementMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getEmbeddingElementMetrics — getEmbeddingElementMetrics","text":"data.frame metrics node/element x.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingGlobalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getEmbeddingGlobalMetrics — getEmbeddingGlobalMetrics","title":"getEmbeddingGlobalMetrics — getEmbeddingGlobalMetrics","text":"Computes dataset-level, embedding-based metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingGlobalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getEmbeddingGlobalMetrics — getEmbeddingGlobalMetrics","text":"","code":"getEmbeddingGlobalMetrics( x, labels, metrics = c(\"meanSW\", \"meanClassSW\", \"pnSW\", \"minClassSW\", \"cdbw\", \"cohesion\", \"compactness\", \"sep\", \"dbcv\"), distance = \"euclidean\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingGlobalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getEmbeddingGlobalMetrics — getEmbeddingGlobalMetrics","text":"x data.frame matrix (features columns items rows) metrics computed. labels vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. distance distance metric use (default euclidean). ... Optional arguments. See details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingGlobalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getEmbeddingGlobalMetrics — getEmbeddingGlobalMetrics","text":"data.frame (1 row) metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute embedding-based metrics — getEmbeddingMetrics","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"Computes embedding-based metrics specified level.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"","code":"getEmbeddingMetrics( x, labels, metrics = c(\"meanSW\", \"minSW\", \"pnSW\", \"dbcv\"), distance = \"euclidean\", level = \"class\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"x data.frame matrix (features columns items rows) metrics computed. labels vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. See details. distance distance metric use (default euclidean). level level calculate metrics. Options include \"element\", \"class\" \"dataset\". ... Optional arguments. See details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"SW\". level = \"class\", allowed metrics : \"meanSW\", \"minSW\", \"pnSW\", \"dbcv\". level = \"dataset\", allowed metrics : \"meanSW\", \"meanClassSW\", \"pnSW\", \"minClassSW\", \"cdbw\", \"cohesion\", \"compactness\", \"sep\", \"dbcv\". function(s) optional arguments ... passed depend value level: level = \"element\", optional arguments passed stats::dist(). level = \"class\", optional arguments passed dbcv(). level = \"dataset\", optional arguments passed dbcv() CDbw().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"","code":"d1 <- mockData() getEmbeddingMetrics(d1[,1:2], labels=d1$class, metrics=c(\"meanSW\", \"minSW\", \"pnSW\", \"dbcv\"), level=\"class\") #> class meanSW minSW pnSW dbcv #> 1 A 0.3855611 -0.42303778 0.12000000 -0.2003344 #> 2 B 0.2349886 -0.01783369 0.06666667 -0.5888114"},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyLabel.html","id":null,"dir":"Reference","previous_headings":"","what":"Get fuzzy representation of labels — getFuzzyLabel","title":"Get fuzzy representation of labels — getFuzzyLabel","text":"Get fuzzy representation labels according spatial neighborhood label composition.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyLabel.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get fuzzy representation of labels — getFuzzyLabel","text":"","code":"getFuzzyLabel(labels, location, k = 6, alpha = 0.5, ...)"},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyLabel.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get fuzzy representation of labels — getFuzzyLabel","text":"labels anomic vector cluster labels location matrix data.frame coordinates k wished number nearest neighbors alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. ... Passed findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyLabel.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Get fuzzy representation of labels — getFuzzyLabel","text":"matrix fuzzy memberships.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyLabel.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Get fuzzy representation of labels — getFuzzyLabel","text":"","code":"data(sp_toys) data <- sp_toys getFuzzyLabel(data$label, data[,c(\"x\", \"y\")], k=6) #> 1 2 #> 1 0.0000000 1.0000000 #> 2 0.0000000 1.0000000 #> 3 0.0000000 1.0000000 #> 4 0.1428571 0.8571429 #> 5 0.6666667 0.3333333 #> 6 1.0000000 0.0000000 #> 7 1.0000000 0.0000000 #> 8 1.0000000 0.0000000 #> 9 1.0000000 0.0000000 #> 10 1.0000000 0.0000000 #> 11 1.0000000 0.0000000 #> 12 1.0000000 0.0000000 #> 13 1.0000000 0.0000000 #> 14 1.0000000 0.0000000 #> 15 1.0000000 0.0000000 #> 16 0.0000000 1.0000000 #> 17 0.0000000 1.0000000 #> 18 0.0000000 1.0000000 #> 19 0.1666667 0.8333333 #> 20 0.8333333 0.1666667 #> 21 1.0000000 0.0000000 #> 22 1.0000000 0.0000000 #> 23 1.0000000 0.0000000 #> 24 1.0000000 0.0000000 #> 25 1.0000000 0.0000000 #> 26 1.0000000 0.0000000 #> 27 1.0000000 0.0000000 #> 28 1.0000000 0.0000000 #> 29 1.0000000 0.0000000 #> 30 1.0000000 0.0000000 #> 31 0.0000000 1.0000000 #> 32 0.0000000 1.0000000 #> 33 0.0000000 1.0000000 #> 34 0.0000000 1.0000000 #> 35 0.1666667 0.8333333 #> 36 0.8333333 0.1666667 #> 37 1.0000000 0.0000000 #> 38 1.0000000 0.0000000 #> 39 1.0000000 0.0000000 #> 40 1.0000000 0.0000000 #> 41 1.0000000 0.0000000 #> 42 1.0000000 0.0000000 #> 43 1.0000000 0.0000000 #> 44 1.0000000 0.0000000 #> 45 1.0000000 0.0000000 #> 46 0.0000000 1.0000000 #> 47 0.0000000 1.0000000 #> 48 0.0000000 1.0000000 #> 49 0.0000000 1.0000000 #> 50 0.1666667 0.8333333 #> 51 0.8333333 0.1666667 #> 52 1.0000000 0.0000000 #> 53 1.0000000 0.0000000 #> 54 1.0000000 0.0000000 #> 55 1.0000000 0.0000000 #> 56 1.0000000 0.0000000 #> 57 1.0000000 0.0000000 #> 58 1.0000000 0.0000000 #> 59 1.0000000 0.0000000 #> 60 1.0000000 0.0000000 #> 61 0.0000000 1.0000000 #> 62 0.0000000 1.0000000 #> 63 0.0000000 1.0000000 #> 64 0.0000000 1.0000000 #> 65 0.0000000 1.0000000 #> 66 0.1666667 0.8333333 #> 67 0.8333333 0.1666667 #> 68 1.0000000 0.0000000 #> 69 1.0000000 0.0000000 #> 70 1.0000000 0.0000000 #> 71 1.0000000 0.0000000 #> 72 1.0000000 0.0000000 #> 73 1.0000000 0.0000000 #> 74 1.0000000 0.0000000 #> 75 1.0000000 0.0000000 #> 76 0.0000000 1.0000000 #> 77 0.0000000 1.0000000 #> 78 0.0000000 1.0000000 #> 79 0.0000000 1.0000000 #> 80 0.0000000 1.0000000 #> 81 0.1666667 0.8333333 #> 82 0.8333333 0.1666667 #> 83 1.0000000 0.0000000 #> 84 1.0000000 0.0000000 #> 85 1.0000000 0.0000000 #> 86 1.0000000 0.0000000 #> 87 1.0000000 0.0000000 #> 88 1.0000000 0.0000000 #> 89 1.0000000 0.0000000 #> 90 1.0000000 0.0000000 #> 91 0.0000000 1.0000000 #> 92 0.0000000 1.0000000 #> 93 0.0000000 1.0000000 #> 94 0.0000000 1.0000000 #> 95 0.0000000 1.0000000 #> 96 0.0000000 1.0000000 #> 97 0.1666667 0.8333333 #> 98 0.8333333 0.1666667 #> 99 1.0000000 0.0000000 #> 100 1.0000000 0.0000000 #> 101 1.0000000 0.0000000 #> 102 1.0000000 0.0000000 #> 103 1.0000000 0.0000000 #> 104 1.0000000 0.0000000 #> 105 1.0000000 0.0000000 #> 106 0.0000000 1.0000000 #> 107 0.0000000 1.0000000 #> 108 0.0000000 1.0000000 #> 109 0.0000000 1.0000000 #> 110 0.0000000 1.0000000 #> 111 0.0000000 1.0000000 #> 112 0.1666667 0.8333333 #> 113 0.8333333 0.1666667 #> 114 1.0000000 0.0000000 #> 115 1.0000000 0.0000000 #> 116 1.0000000 0.0000000 #> 117 1.0000000 0.0000000 #> 118 1.0000000 0.0000000 #> 119 1.0000000 0.0000000 #> 120 1.0000000 0.0000000 #> 121 0.0000000 1.0000000 #> 122 0.0000000 1.0000000 #> 123 0.0000000 1.0000000 #> 124 0.0000000 1.0000000 #> 125 0.0000000 1.0000000 #> 126 0.0000000 1.0000000 #> 127 0.0000000 1.0000000 #> 128 0.1666667 0.8333333 #> 129 0.8333333 0.1666667 #> 130 1.0000000 0.0000000 #> 131 1.0000000 0.0000000 #> 132 1.0000000 0.0000000 #> 133 1.0000000 0.0000000 #> 134 1.0000000 0.0000000 #> 135 1.0000000 0.0000000 #> 136 0.0000000 1.0000000 #> 137 0.0000000 1.0000000 #> 138 0.0000000 1.0000000 #> 139 0.0000000 1.0000000 #> 140 0.0000000 1.0000000 #> 141 0.0000000 1.0000000 #> 142 0.0000000 1.0000000 #> 143 0.1666667 0.8333333 #> 144 0.8333333 0.1666667 #> 145 1.0000000 0.0000000 #> 146 1.0000000 0.0000000 #> 147 1.0000000 0.0000000 #> 148 1.0000000 0.0000000 #> 149 1.0000000 0.0000000 #> 150 1.0000000 0.0000000 #> 151 0.0000000 1.0000000 #> 152 0.0000000 1.0000000 #> 153 0.0000000 1.0000000 #> 154 0.0000000 1.0000000 #> 155 0.0000000 1.0000000 #> 156 0.0000000 1.0000000 #> 157 0.0000000 1.0000000 #> 158 0.0000000 1.0000000 #> 159 0.1666667 0.8333333 #> 160 0.8333333 0.1666667 #> 161 1.0000000 0.0000000 #> 162 1.0000000 0.0000000 #> 163 1.0000000 0.0000000 #> 164 1.0000000 0.0000000 #> 165 1.0000000 0.0000000 #> 166 0.0000000 1.0000000 #> 167 0.0000000 1.0000000 #> 168 0.0000000 1.0000000 #> 169 0.0000000 1.0000000 #> 170 0.0000000 1.0000000 #> 171 0.0000000 1.0000000 #> 172 0.0000000 1.0000000 #> 173 0.0000000 1.0000000 #> 174 0.1666667 0.8333333 #> 175 0.8333333 0.1666667 #> 176 1.0000000 0.0000000 #> 177 1.0000000 0.0000000 #> 178 1.0000000 0.0000000 #> 179 1.0000000 0.0000000 #> 180 1.0000000 0.0000000 #> 181 0.0000000 1.0000000 #> 182 0.0000000 1.0000000 #> 183 0.0000000 1.0000000 #> 184 0.0000000 1.0000000 #> 185 0.0000000 1.0000000 #> 186 0.0000000 1.0000000 #> 187 0.0000000 1.0000000 #> 188 0.0000000 1.0000000 #> 189 0.0000000 1.0000000 #> 190 0.1666667 0.8333333 #> 191 0.8333333 0.1666667 #> 192 1.0000000 0.0000000 #> 193 1.0000000 0.0000000 #> 194 1.0000000 0.0000000 #> 195 1.0000000 0.0000000 #> 196 0.0000000 1.0000000 #> 197 0.0000000 1.0000000 #> 198 0.0000000 1.0000000 #> 199 0.0000000 1.0000000 #> 200 0.0000000 1.0000000 #> 201 0.0000000 1.0000000 #> 202 0.0000000 1.0000000 #> 203 0.0000000 1.0000000 #> 204 0.0000000 1.0000000 #> 205 0.1666667 0.8333333 #> 206 0.8333333 0.1666667 #> 207 1.0000000 0.0000000 #> 208 1.0000000 0.0000000 #> 209 1.0000000 0.0000000 #> 210 1.0000000 0.0000000 #> 211 0.0000000 1.0000000 #> 212 0.0000000 1.0000000 #> 213 0.0000000 1.0000000 #> 214 0.0000000 1.0000000 #> 215 0.0000000 1.0000000 #> 216 0.0000000 1.0000000 #> 217 0.0000000 1.0000000 #> 218 0.0000000 1.0000000 #> 219 0.0000000 1.0000000 #> 220 0.0000000 1.0000000 #> 221 0.1666667 0.8333333 #> 222 0.8333333 0.1666667 #> 223 1.0000000 0.0000000 #> 224 1.0000000 0.0000000 #> 225 1.0000000 0.0000000 #> 226 0.0000000 1.0000000 #> 227 0.0000000 1.0000000 #> 228 0.0000000 1.0000000 #> 229 0.0000000 1.0000000 #> 230 0.0000000 1.0000000 #> 231 0.0000000 1.0000000 #> 232 0.0000000 1.0000000 #> 233 0.0000000 1.0000000 #> 234 0.0000000 1.0000000 #> 235 0.0000000 1.0000000 #> 236 0.2857143 0.7142857 #> 237 0.8571429 0.1428571 #> 238 1.0000000 0.0000000 #> 239 1.0000000 0.0000000 #> 240 1.0000000 0.0000000"},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionElementMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getFuzzyPartitionElementMetrics — getFuzzyPartitionElementMetrics","title":"getFuzzyPartitionElementMetrics — getFuzzyPartitionElementMetrics","text":"Computes selection external fuzzy clustering evaluation metrics element level.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionElementMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getFuzzyPartitionElementMetrics — getFuzzyPartitionElementMetrics","text":"","code":"getFuzzyPartitionElementMetrics( hardTrue = NULL, fuzzyTrue = NULL, hardPred = NULL, fuzzyPred = NULL, fuzzy_true = TRUE, fuzzy_pred = FALSE, metrics = c(\"fuzzySPC\"), useNegatives = TRUE, verbose = TRUE, usePairs = TRUE )"},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionElementMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getFuzzyPartitionElementMetrics — getFuzzyPartitionElementMetrics","text":"hardTrue vector true cluster labels fuzzyTrue object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows length hardTrue. hardPred vector predicted cluster labels fuzzyPred object coercible numeric matrix membership probability elements (rows) clusters (columns). fuzzy_true Logical; whether truth fuzzy. fuzzy_pred Logical; whether prediction fuzzy. metrics metrics compute. Currently \"fuzzySPC\" included element level. useNegatives Logical; whether include negative pairs concordance score (tends result larger overall concordance lower dynamic range score). Default TRUE. verbose Logical; whether print expected memory usage large datasets. usePairs Logical; whether compute pairs instead elements. useful fuzzy_true=TRUE fuzzy_pred=FALSE.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionElementMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getFuzzyPartitionElementMetrics — getFuzzyPartitionElementMetrics","text":"dataframe metric values.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"Computes selection external fuzzy clustering evaluation metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"","code":"getFuzzyPartitionMetrics( hardTrue = NULL, fuzzyTrue = NULL, hardPred = NULL, fuzzyPred = NULL, metrics = c(\"fuzzyWH\", \"fuzzyAWH\", \"fuzzyWC\", \"fuzzyAWC\"), level = \"class\", nperms = NULL, verbose = TRUE, returnElementPairAccuracy = FALSE, BPPARAM = BiocParallel::SerialParam(), useNegatives = TRUE, usePairs = NULL, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"hardTrue atomic vector coercible factor integer vector containing true hard labels. fuzzyTrue object coercible numeric matrix membership probability elements (rows) clusters (columns). hardPred atomic vector coercible factor integer vector containing predicted hard labels. fuzzyPred object coercible numeric matrix membership probability elements (rows) clusters (columns). metrics metrics compute. See details. level level calculate metrics. Options include \"element\", \"class\" \"dataset\". nperms number permutations (correction chance). NULL (default), first set 10 permutations run estimate whether variation across permutations 0.0025, case (max 1000) permutations run. verbose Logical; whether print info warnings, including standard error mean across permutations (giving idea precision adjusted metrics). returnElementPairAccuracy Logical. TRUE, returns per-element pair accuracy instead various parition-level dataset-level metrics. Default FALSE. BPPARAM BiocParallel params multithreading (default none) useNegatives Logical; whether include negative pairs concordance score (tends result larger overall concordance lower dynamic range score). Default TRUE. usePairs Logical; whether compute pairs instead elements Recommended TRUE default. ... Optional arguments poem::FuzzyPartitionMetrics(): tnorm. useful fuzzy_true=TRUE fuzzy_pred=TRUE.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"dataframe metric results.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"fuzzySPC\". level = \"class\", allowed metrics : \"fuzzyWH\", \"fuzzyAWH\", \"fuzzyWC\", \"fuzzyAWC\". level = \"dataset\", allowed metrics : \"fuzzyRI\", \"fuzzyARI\", \"fuzzyWH\", \"fuzzyAWH\", \"fuzzyWC\", \"fuzzyAWC\".","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"","code":"# generate fuzzy partitions: m1 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) m2 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.025, 0.95, 0.025, 0.02, 0.96, 0.02, 0.01, 0.98, 0.01, 0.05, 0.05, 0.95, 0.02, 0.02, 0.96, 0.01, 0.01, 0.98), ncol = 3, byrow=TRUE) colnames(m1) <- colnames(m2) <- LETTERS[1:3] getFuzzyPartitionMetrics(fuzzyTrue=m1,fuzzyPred=m2, level=\"class\") #> Comparing between a fuzzy truth and a fuzzy prediction... #> Running 100 extra permutations. #> Standard error of the mean NDC across permutations:0.00234 #> fuzzyWC fuzzyAWC class fuzzyWH fuzzyAWH cluster #> 1 0.3445840 0.04752717 1 NA NA NA #> 2 0.7242508 -0.08734288 2 NA NA NA #> 3 0.7520319 0.03868490 3 NA NA NA #> 4 NA NA NA 0.9359492 0.8097549 1 #> 5 NA NA NA 0.9214151 0.8187337 2 #> 6 NA NA NA 0.1588990 -0.9221584 3 # generate a fuzzy truth: fuzzyTrue <- matrix(c( 0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) # a hard truth: hardTrue <- apply(fuzzyTrue,1,FUN=which.max) # some predicted labels: hardPred <- c(1,1,1,1,1,1,2,2,2) getFuzzyPartitionMetrics(hardPred=hardPred, hardTrue=hardTrue, fuzzyTrue=fuzzyTrue, nperms=3, level=\"class\") #> Comparing between a fuzzy truth and a hard prediction... #> Standard error of the mean NDC across permutations:0.0357 #> You might want to increase the number of permutations to increase the robustness of the adjusted metrics. #> fuzzyWC fuzzyAWC class fuzzyWH fuzzyAWH cluster #> 1 0.7195238 0.4677711 1 NA NA NA #> 2 1.0000000 NaN 2 NA NA NA #> 3 1.0000000 NaN 3 NA NA NA #> 4 NA NA NA 1.00000000 1.000000 1 #> 5 NA NA NA 0.06166667 -3.421466 2"},{"path":"https://roseyuan.github.io/poem/reference/getGraphClassMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getGraphClassMetrics — getGraphClassMetrics","title":"getGraphClassMetrics — getGraphClassMetrics","text":"Computes selection supervised graph evaluation metrics using ground truth class labels. metrics reported (average) per class.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphClassMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getGraphClassMetrics — getGraphClassMetrics","text":"","code":"getGraphClassMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"AMSP\", \"PWC\", \"NCE\"), directed = NULL, ... ) # S4 method for class 'list' getGraphClassMetrics(x, labels, metrics, directed = NULL, k = NULL, ...) # S4 method for class 'data.frame' getGraphClassMetrics( x, labels, metrics, directed = NULL, k, shared = FALSE, ... ) # S4 method for class 'matrix' getGraphClassMetrics( x, labels, metrics, directed = NULL, k, shared = FALSE, ... ) # S4 method for class 'igraph' getGraphClassMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"AMSP\", \"PWC\", \"NCE\"), directed = NULL, ... ) # S4 method for class 'dist' getGraphClassMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"AMSP\", \"PWC\", \"NCE\"), directed = NULL, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getGraphClassMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getGraphClassMetrics — getGraphClassMetrics","text":"x Either igraph object, list nearest neighbors (see details ), data.frame matrix (features columns items rows) nearest neighbors computed. labels Either factor character vector indicating true class label element (.e. row vertex) x. metrics metrics compute. See details. directed Logical; whether compute metrics directed fashion. left NULL, conventional choices made per metric (adhesion, cohesion, PWC AMSP undirected, others directed). ... Optional arguments emb2knn() emb2snn(). k number nearest neighbors compute /use. Can omitted x graph list nearest neighbors. shared Logical; whether use shared nearest neighbor network instead nearest neighbor network. Ignored x embedding dist object.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphClassMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getGraphClassMetrics — getGraphClassMetrics","text":"data.frame metrics class.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphElementMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getGraphElementMetrics — getGraphElementMetrics","title":"getGraphElementMetrics — getGraphElementMetrics","text":"Computes selection supervised graph evaluation metrics using ground truth class labels. metrics reported (average) per node/element.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphElementMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getGraphElementMetrics — getGraphElementMetrics","text":"","code":"getGraphElementMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"NCE\"), directed = NULL, ... ) # S4 method for class 'list' getGraphElementMetrics(x, labels, metrics, directed = NULL, k = NULL, ...) # S4 method for class 'data.frame' getGraphElementMetrics( x, labels, metrics, directed = NULL, k, shared = FALSE, ... ) # S4 method for class 'matrix' getGraphElementMetrics( x, labels, metrics, directed = NULL, k, shared = FALSE, ... ) # S4 method for class 'igraph' getGraphElementMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"NCE\"), directed = NULL, ... ) # S4 method for class 'dist' getGraphElementMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"NCE\"), directed = NULL, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getGraphElementMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getGraphElementMetrics — getGraphElementMetrics","text":"x Either igraph object, list nearest neighbors (see details ), data.frame matrix (features columns items rows) nearest neighbors computed. labels Either factor character vector indicating true class label element (.e. row vertex) x. metrics metrics compute. See details. directed Logical; whether compute metrics directed fashion. left NULL, conventional choices made per metric (adhesion, cohesion, PWC AMSP undirected, others directed). ... Optional arguments emb2knn() emb2snn(). k number nearest neighbors compute /use. Can omitted x graph list nearest neighbors. shared Logical; whether use shared nearest neighbor network instead nearest neighbor network. Ignored x embedding dist object.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphElementMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getGraphElementMetrics — getGraphElementMetrics","text":"data.frame metrics node/element x.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute graph-based metrics — getGraphMetrics","title":"Compute graph-based metrics — getGraphMetrics","text":"Computes selection graph evaluation metrics using class labels.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute graph-based metrics — getGraphMetrics","text":"","code":"getGraphMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"AMSP\", \"PWC\", \"NCE\"), directed = NULL, k = 10, shared = FALSE, level = \"class\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute graph-based metrics — getGraphMetrics","text":"x Either igraph object, list nearest neighbors (see details ), data.frame matrix (features columns items rows) nearest neighbors computed. labels Either factor character vector indicating true class label element (.e. row vertex) x. metrics metrics compute. See details. directed Logical; whether compute metrics directed fashion. left NULL, conventional choices made per metric (adhesion, cohesion, PWC AMSP undirected, others directed). k number nearest neighbors compute /use. Can omitted x graph list nearest neighbors. shared Logical; whether use shared nearest neighbor network instead nearest neighbor network. Ignored x embedding dist object. level level calculate metrics. Options include \"element\", \"class\" \"dataset\". ... Optional arguments emb2knn() emb2snn().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute graph-based metrics — getGraphMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute graph-based metrics — getGraphMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"SI\",\"ISI\",\"NP\",\"NCE\" (see details). level = \"class\", allowed metrics : \"SI\": Simpson’s Index. \"ISI\": Inverse Simpson’s Index \"NP\": Neighborhood Purity \"AMSP\": Adjusted Mean Shortest Path \"PWC\": Proportion Weakly Connected \"NCE\": Neighborhood Class Enrichment \"adhesion\": adhesion graph, minumum number nodes must removed split graph. \"cohesion\": cohesion graph, minumum number edges must removed split graph. level = \"dataset\", allowed metrics : \"SI\",\"ISI\",\"NP\",\"AMSP\",\"PWC\",\"NCE\", \"adhesion\",\"cohesion\".","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute graph-based metrics — getGraphMetrics","text":"","code":"d1 <- mockData() getGraphMetrics(d1[,1:2], labels=d1$class, level=\"class\") #> class SI NP AMSP PWC NCE #> A A 0.8456000 0.8200000 0.1097333 0.2400000 0.2588020 #> B B 0.5746667 0.6466667 0.1600000 0.2666667 0.6306445"},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairAgreement.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","title":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","text":"Per-element local agreement clustering ground truth","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairAgreement.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","text":"","code":"getNeighboringPairAgreement( true, pred, location, k = 20L, useNegatives = FALSE, distWeights = TRUE, BNPARAM = NULL )"},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairAgreement.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","text":"true vector true class labels pred vector predicted clusters location matrix data.frame spatial dimensions columns. Alternatively, nearest neighbor object produced findKNN. k Approximate number nearest neighbors consider useNegatives Logical; whether include consistency negative pairs score (default FALSE). distWeights Logical; whether weight agreement distance (default TRUE). BNPARAM BiocNeighbors parameter object compute kNNs. Ignored unless input matrix data.frame. omitted, Annoy approximation used 500 elements.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairAgreement.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","text":"vector agreement scores","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairAgreement.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","text":"","code":"data(sp_toys) data <- sp_toys getNeighboringPairAgreement(data$label, data$p1, data[,c(\"x\", \"y\")], k=6) #> [1] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [8] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [15] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [22] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [29] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [36] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [43] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [50] 1.0000000 0.7500000 0.6666667 0.6666667 0.6666667 0.6666667 0.8333333 #> [57] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [64] 1.0000000 1.0000000 0.8000000 0.3333333 0.6666667 0.6666667 0.6666667 #> [71] 0.3333333 0.8333333 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [78] 1.0000000 1.0000000 1.0000000 0.6666667 0.6666667 1.0000000 1.0000000 #> [85] 0.8333333 0.5000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [92] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.6666667 #> [99] 1.0000000 1.0000000 0.5000000 0.8333333 1.0000000 1.0000000 1.0000000 #> [106] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 #> [113] 0.5000000 0.6666667 0.5000000 0.6666667 1.0000000 1.0000000 1.0000000 #> [120] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [127] 1.0000000 0.8000000 0.5000000 0.6666667 0.8333333 1.0000000 1.0000000 #> [134] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [141] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [148] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [155] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [162] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [169] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [176] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [183] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [190] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [197] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [204] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [211] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [218] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [225] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [232] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [239] 1.0000000 1.0000000"},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairConcordance.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","title":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","text":"Per-element local concordance clustering ground truth","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairConcordance.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","text":"","code":"getNeighboringPairConcordance( true, pred, location, k = 20L, useNegatives = FALSE, distWeights = TRUE, BNPARAM = NULL )"},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairConcordance.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","text":"true vector true class labels pred vector predicted clusters location matrix data.frame spatial dimensions columns. Alternatively, nearest neighbor object produced findKNN. k Approximate number nearest neighbors consider useNegatives Logical; whether include concordance negative pairs score (default FALSE). distWeights Logical; whether weight concordance distance (default TRUE). BNPARAM BiocNeighbors parameter object compute kNNs. Ignored unless input matrix data.frame. omitted, Annoy approximation used 500 elements.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairConcordance.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","text":"vector concordance scores","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairConcordance.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","text":"","code":"data(sp_toys) data <- sp_toys getNeighboringPairConcordance(data$label, data$p1, data[,c(\"x\", \"y\")], k=6) #> [1] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [8] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [15] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [22] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [29] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [36] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [43] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [50] 1.0000000 0.7500000 0.6666667 0.6666667 0.6666667 0.6666667 0.8333333 #> [57] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [64] 1.0000000 1.0000000 0.8000000 0.3333333 0.6666667 0.6666667 0.6666667 #> [71] 0.3333333 0.8333333 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [78] 1.0000000 1.0000000 1.0000000 0.6666667 0.6666667 1.0000000 1.0000000 #> [85] 0.8333333 0.5000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [92] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.6666667 #> [99] 1.0000000 1.0000000 0.5000000 0.8333333 1.0000000 1.0000000 1.0000000 #> [106] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 #> [113] 0.5000000 0.6666667 0.5000000 0.6666667 1.0000000 1.0000000 1.0000000 #> [120] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [127] 1.0000000 0.8000000 0.5000000 0.6666667 0.8333333 1.0000000 1.0000000 #> [134] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [141] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [148] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [155] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [162] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [169] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [176] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [183] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [190] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [197] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [204] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [211] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [218] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [225] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [232] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [239] 1.0000000 1.0000000"},{"path":"https://roseyuan.github.io/poem/reference/getPairConcordance.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element pair concordance score — getPairConcordance","title":"Per-element pair concordance score — getPairConcordance","text":"Per-element pair concordance clustering ground truth","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPairConcordance.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element pair concordance score — getPairConcordance","text":"","code":"getPairConcordance( true, pred, usePairs = TRUE, useNegatives = FALSE, adjust = FALSE )"},{"path":"https://roseyuan.github.io/poem/reference/getPairConcordance.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element pair concordance score — getPairConcordance","text":"true vector true class labels pred vector predicted clusters usePairs Logical; whether compute pairs instead elements Recommended TRUE default. useNegatives Logical; whether include consistency negative pairs score (default FALSE). adjust Logical; whether adjust chance. implemented useNegatives=FALSE (make sense element-level otherwise).","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPairConcordance.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element pair concordance score — getPairConcordance","text":"vector concordance scores","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionClassMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getPartitionClassMetrics — getPartitionClassMetrics","title":"getPartitionClassMetrics — getPartitionClassMetrics","text":"Computes selection external evaluation metrics partition. metrics reported per class.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionClassMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getPartitionClassMetrics — getPartitionClassMetrics","text":"","code":"getPartitionClassMetrics( true, pred, metrics = c(\"WC\", \"WH\", \"AWC\", \"AWH\", \"FM\") )"},{"path":"https://roseyuan.github.io/poem/reference/getPartitionClassMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getPartitionClassMetrics — getPartitionClassMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. omitted, main metrics computed.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionClassMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getPartitionClassMetrics — getPartitionClassMetrics","text":"dataframe metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionElementMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getPartitionElementMetrics — getPartitionElementMetrics","title":"getPartitionElementMetrics — getPartitionElementMetrics","text":"Computes selection external evaluation metrics partition. metrics reported per element.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionElementMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getPartitionElementMetrics — getPartitionElementMetrics","text":"","code":"getPartitionElementMetrics( true, pred, metrics = c(\"ASPC\"), usePairs = TRUE, useNegatives = TRUE )"},{"path":"https://roseyuan.github.io/poem/reference/getPartitionElementMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getPartitionElementMetrics — getPartitionElementMetrics","text":"true vector true class labels pred vector predicted clusters metrics metrics compute. usePairs Logical; whether compute pairs instead elements Recommended TRUE default. useNegatives Logical; whether include consistency negative pairs score (default FALSE).","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionElementMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getPartitionElementMetrics — getPartitionElementMetrics","text":"dataframe metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionGlobalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getPartitionGlobalMetrics — getPartitionGlobalMetrics","title":"getPartitionGlobalMetrics — getPartitionGlobalMetrics","text":"Computes selection external evaluation metrics partition. metrics reported per dataset.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionGlobalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getPartitionGlobalMetrics — getPartitionGlobalMetrics","text":"","code":"getPartitionGlobalMetrics( true, pred, metrics = c(\"RI\", \"WC\", \"WH\", \"ARI\", \"NCR\", \"AWC\", \"AWH\", \"MI\", \"AMI\", \"VI\", \"EH\", \"EC\", \"VM\", \"FM\"), ... )"},{"path":"https://roseyuan.github.io/poem/reference/getPartitionGlobalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getPartitionGlobalMetrics — getPartitionGlobalMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. omitted, main metrics computed. See details. ... Optional arguments MI, VI, VM. See clevr::mutual_info(), clevr::variation_info() clevr::v_measure() details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionGlobalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getPartitionGlobalMetrics — getPartitionGlobalMetrics","text":"dataframe metric results. Possible metrics : RI Rand Index WC Wallace Completeness WH Wallace Homogeneity ARI Adjusted Rand Index AWC Adjusted Wallace Completeness AWH Adjusted Wallace Homogeneity NCR Normalized class size Rand index MI Mutual Information AMI Adjusted Mutual Information VI Variation Information EH (Entropy-based) Homogeneity EC (Entropy-based) Completeness VM V-measure FM F-measure/weighted average F1 score VDM Van Dongen Measure MHM Meila-Heckerman Measure MMM Maximum-Match Measure Mirkin Mirkin Metric Accuracy Set Matching Accuracy","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute partition-based metrics — getPartitionMetrics","title":"Compute partition-based metrics — getPartitionMetrics","text":"Computes selection external evaluation metrics partition.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute partition-based metrics — getPartitionMetrics","text":"","code":"getPartitionMetrics( true, pred, metrics = c(\"WC\", \"WH\", \"AWC\", \"AWH\", \"FM\"), level = \"class\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute partition-based metrics — getPartitionMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. omitted, main metrics computed. See details. level level calculate metrics. Options include \"element\", \"class\" \"dataset\". ... Optional arguments MI, VI, VM. See clevr::mutual_info(), clevr::variation_info() clevr::v_measure() details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute partition-based metrics — getPartitionMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute partition-based metrics — getPartitionMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"SPC\": Spot-wise Pair Concordance. \"ASPC\": Adjusted Spot-wise Pair Concordance. level = \"class\", allowed metrics : \"WC\",\"WH\",\"AWC\",\"AWH\",\"FM\" (see details). level = \"dataset\", allowed metrics : \"RI\": Rand Index \"WC\": Wallace Completeness \"WH\": Wallace Homogeneity \"ARI\": Adjusted Rand Index \"AWC\": Adjusted Wallace Completeness \"AWH\": Adjusted Wallace Homogeneity \"NCR\": Normalized class size Rand index \"MI\": Mutual Information \"AMI\": Adjusted Mutual Information \"VI\": Variation Information \"EH\": (Entropy-based) Homogeneity \"EC\": (Entropy-based) Completeness \"VM\": V-measure \"FM\": F-measure/weighted average F1 score \"VDM\": Van Dongen Measure \"MHM\": Meila-Heckerman Measure \"MMM\": Maximum-Match Measure \"Mirkin\": Mirkin Metric \"Accuracy\": Set Matching Accuracy","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute partition-based metrics — getPartitionMetrics","text":"","code":"true <- rep(LETTERS[1:3], each=10) pred <- c(rep(\"A\", 8), rep(\"B\", 9), rep(\"C\", 3), rep(\"D\", 10)) getPartitionMetrics(true, pred, level=\"class\") #> WC AWC FM class WH AWH cluster #> 1 0.6444444 0.5211558 0.8888889 A NA NA #> 2 0.5333333 0.3715170 0.7368421 B NA NA #> 3 1.0000000 1.0000000 1.0000000 C NA NA #> 4 NA NA NA 1.0000000 1.0000000 A #> 5 NA NA NA 0.6111111 0.4361111 B #> 6 NA NA NA 1.0000000 1.0000000 C #> 7 NA NA NA 1.0000000 1.0000000 D getPartitionMetrics(true, pred, level=\"dataset\") #> WC WH AWC AWH FM #> 1 0.7259259 0.875 0.630891 0.81875 0.8752437"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassExternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute class-level external evaluation metrics for spatially-resolved data — getSpatialClassExternalMetrics","title":"Compute class-level external evaluation metrics for spatially-resolved data — getSpatialClassExternalMetrics","text":"Computes selection external clustering evaluation metrics spatial data class/cluster level.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassExternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute class-level external evaluation metrics for spatially-resolved data — getSpatialClassExternalMetrics","text":"","code":"getSpatialClassExternalMetrics( true, pred, location, k = 6, alpha = 0.5, metrics = c(\"SpatialWH\", \"SpatialAWH\", \"SpatialWC\", \"SpatialAWC\"), fuzzy_true = TRUE, fuzzy_pred = FALSE, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassExternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute class-level external evaluation metrics for spatially-resolved data — getSpatialClassExternalMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. location matrix data.frame coordinates k number neighbors used calculating fuzzy class memberships fuzzy metrics. alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. metrics vector metric names compute. fuzzy_true Logical; whether compute fuzzy class memberships true. fuzzy_pred Logical; whether compute fuzzy class memberships pred. ... Optional params poem::FuzzyPartitionMetrics() findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassExternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute class-level external evaluation metrics for spatially-resolved data — getSpatialClassExternalMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassInternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute class-level internal evaluation metrics for spatially-resolved data — getSpatialClassInternalMetrics","title":"Compute class-level internal evaluation metrics for spatially-resolved data — getSpatialClassInternalMetrics","text":"Computes selection internal clustering evaluation metrics spatial data class.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassInternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute class-level internal evaluation metrics for spatially-resolved data — getSpatialClassInternalMetrics","text":"","code":"getSpatialClassInternalMetrics( labels, location, k = 6, metrics = c(\"CHAOS\", \"PAS\", \"ELSA\"), ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassInternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute class-level internal evaluation metrics for spatially-resolved data — getSpatialClassInternalMetrics","text":"labels vector containing labels evaluated. location numerical matrix containing location information, rows samples columns location dimensions. k size spatial neighborhood look spot. used calculating PAS ELSA scores. metrics Possible metrics: \"CHAOS\", \"PAS\" \"ELSA\". ... Optional params PAS().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassInternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute class-level internal evaluation metrics for spatially-resolved data — getSpatialClassInternalMetrics","text":"dataframe metric values.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementExternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getSpatialElementExternalMetrics — getSpatialElementExternalMetrics","title":"getSpatialElementExternalMetrics — getSpatialElementExternalMetrics","text":"Computes selection external clustering evaluation metrics spatial data element level.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementExternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getSpatialElementExternalMetrics — getSpatialElementExternalMetrics","text":"","code":"getSpatialElementExternalMetrics( true, pred, location, k = 6, alpha = 0.5, metrics = c(\"SpatialSPC\", \"SpatialNPC\"), fuzzy_true = TRUE, fuzzy_pred = FALSE, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementExternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getSpatialElementExternalMetrics — getSpatialElementExternalMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. location matrix data.frame coordinates k number neighbors used calculating fuzzy class memberships fuzzy metrics, calculating weighted accuracy. alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. metrics vector metric names compute. fuzzy_true Logical; whether compute fuzzy class memberships true. fuzzy_pred Logical; whether compute fuzzy class memberships pred. ... Optional params getFuzzyPartitionElementMetrics() findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementExternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getSpatialElementExternalMetrics — getSpatialElementExternalMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementInternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute spot-level internal evaluation metrics for spatially-resolved data — getSpatialElementInternalMetrics","title":"Compute spot-level internal evaluation metrics for spatially-resolved data — getSpatialElementInternalMetrics","text":"Computes selection internal clustering evaluation metrics spatial data spot level.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementInternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute spot-level internal evaluation metrics for spatially-resolved data — getSpatialElementInternalMetrics","text":"","code":"getSpatialElementInternalMetrics( labels, location, k = 6, metrics = c(\"PAS\", \"ELSA\"), ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementInternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute spot-level internal evaluation metrics for spatially-resolved data — getSpatialElementInternalMetrics","text":"labels vector containing labels evaluated. location numerical matrix containing location information, rows samples columns location dimensions. k size spatial neighborhood look spot. used calculating PAS ELSA scores. metrics Possible metrics: \"PAS\" \"ELSA\". ... Optional params PAS().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementInternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute spot-level internal evaluation metrics for spatially-resolved data — getSpatialElementInternalMetrics","text":"dataframe containing metric values samples dataset. PAS calculated, value Boolean abnormality spot. ELSA calculated, Ea, Ec ELSA spots returned.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute external metrics for spatial data — getSpatialExternalMetrics","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"Computes selection external clustering evaluation metrics spatial data.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"","code":"getSpatialExternalMetrics( true, pred, location, k = 6, alpha = 0.5, level = \"class\", metrics = c(\"SpatialWH\", \"SpatialAWH\", \"SpatialWC\", \"SpatialAWC\"), fuzzy_true = TRUE, fuzzy_pred = FALSE, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. location matrix data.frame coordinates k number neighbors used calculating fuzzy class memberships fuzzy metrics, calculating weighted accuracy. alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. level level calculate metrics. Options include \"element\", \"class\" \"dataset\". metrics metrics compute. See details. fuzzy_true Logical; whether compute fuzzy class memberships true. fuzzy_pred Logical; whether compute fuzzy class memberships pred. ... Optional params getFuzzyPartitionElementMetrics() findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"SpatialSPC\", \"SpatialNPC\". level = \"class\", allowed metrics : \"SpatialWH\",\"SpatialAWH\", \"SpatialWC\",\"SpatialAWC\". level = \"dataset\", allowed metrics : \"SpatialRI\",\"SpatialARI\",\"SpatialWH\",\"SpatialAWH\", \"SpatialWC\",\"SpatialAWC\",\"SpatialAccuracy\".","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"","code":"data(sp_toys) data <- sp_toys getSpatialExternalMetrics(data$label, data$p1, data[,c(\"x\", \"y\")], k=6, level=\"class\") #> Comparing between a fuzzy truth and a hard prediction... #> Standard error of the mean NDC across permutations:0.000636 #> SpatialWH SpatialAWH SpatialWC SpatialAWC class cluster #> 1 NA NA 0.779245 0.5328745 1 NA #> 2 NA NA 1.000000 1.0000000 2 NA #> 3 1.000000 1.0000000 NA NA NA 1 #> 4 0.804378 0.5906445 NA NA NA 2"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalExternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute dataset-level external evaluation metrics for spatially-resolved data — getSpatialGlobalExternalMetrics","title":"Compute dataset-level external evaluation metrics for spatially-resolved data — getSpatialGlobalExternalMetrics","text":"Computes selection external clustering evaluation metrics spatial data dataset level. Options include series fuzzy pair-counting metrics set matching-based accuracy.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalExternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute dataset-level external evaluation metrics for spatially-resolved data — getSpatialGlobalExternalMetrics","text":"","code":"getSpatialGlobalExternalMetrics( true, pred, location, k = 6, alpha = 0.5, metrics = c(\"SpatialRI\", \"SpatialARI\", \"SpatialWH\", \"SpatialAWH\", \"SpatialWC\", \"SpatialAWC\", \"SpatialAccuracy\"), fuzzy_true = TRUE, fuzzy_pred = FALSE, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalExternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute dataset-level external evaluation metrics for spatially-resolved data — getSpatialGlobalExternalMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. location matrix data.frame coordinates k number neighbors used calculating fuzzy class memberships fuzzy metrics, calculating weighted accuracy. alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. metrics vector metric names compute. fuzzy_true Logical; whether compute fuzzy class memberships true. fuzzy_pred Logical; whether compute fuzzy class memberships pred. ... Optional params poem::FuzzyPartitionMetrics() findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalExternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute dataset-level external evaluation metrics for spatially-resolved data — getSpatialGlobalExternalMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalInternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","title":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","text":"Computes selection internal clustering evaluation metrics spatial data dataset level. MPC, PC PE internal metrics fuzzy clustering, implementations package fclust used.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalInternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","text":"","code":"getSpatialGlobalInternalMetrics( labels, location, k = 6, metrics = c(\"PAS\", \"ELSA\", \"CHAOS\"), ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalInternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","text":"labels vector containing labels evaluated. location numerical matrix containing location information, rows samples columns location dimensions. k size spatial neighborhood look spot. used calculating PAS ELSA scores. metrics metrics compute. See details. ... Optional arguments PAS().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalInternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","text":"named vector containing metric values. Possible metrics : PAS Proportion abnormal spots (PAS score). ELSA Entropy-based Local indicator Spatial Association (ELSA score). CHAOS Spatial Chaos Score. MPC Modified partition coefficient PC Partition coefficient PE Partition entropy","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalInternalMetrics.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","text":"Yuan, Zhiyuan, et al., 2024; 10.1038/s41592-024-02215-8 Naimi, Babak, et al., 2019; 10.1016/j.spasta.2018.10.001 Wang, et al., 2022; 10.1016/j.ins.2022.11.010","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute internal metrics for spatial data — getSpatialInternalMetrics","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"Computes selection internal clustering evaluation metrics spatial data.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"","code":"getSpatialInternalMetrics( labels, location, k = 6, level = \"class\", metrics = c(\"CHAOS\", \"PAS\", \"ELSA\"), ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"labels vector containing labels evaluated. location numerical matrix containing location information, rows samples columns location dimensions. k size spatial neighborhood look spot. used calculating PAS ELSA scores. level level calculate metrics. Options include \"element\", \"class\" \"dataset\". metrics metrics compute. See details. ... Optional params PAS().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"PAS\", \"ELSA\". level = \"class\", allowed metrics : \"CHAOS\", \"PAS\", \"ELSA\". level = \"dataset\", allowed metrics : \"PAS\": Proportion abnormal spots (PAS score) \"ELSA\": Entropy-based Local indicator Spatial Association (ELSA score) \"CHAOS\": Spatial Chaos Score. \"MPC\": Modified partition coefficient \"PC\": Partition coefficient \"PE\": Partition entropy","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"","code":"data(sp_toys) data <- sp_toys getSpatialInternalMetrics(data$label, data[,c(\"x\", \"y\")], k=6, level=\"class\") #> class PAS ELSA.Ea ELSA.Ec ELSA CHAOS #> 1 1 0.008333333 0.07106927 0.1940084 0.06040663 0.2305866 #> 2 2 0.008333333 0.06918805 0.1944579 0.05840334 0.2305866"},{"path":"https://roseyuan.github.io/poem/reference/knnComposition.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute neighborhood composition — knnComposition","title":"Compute neighborhood composition — knnComposition","text":"given dataset locations labels, compute label composition neighborhood sample.","code":""},{"path":"https://roseyuan.github.io/poem/reference/knnComposition.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute neighborhood composition — knnComposition","text":"","code":"knnComposition(location, k = 6, labels, alpha = 0.5, ...)"},{"path":"https://roseyuan.github.io/poem/reference/knnComposition.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute neighborhood composition — knnComposition","text":"location numeric data matrix containing location information, rows points columns location dimensions. k number nearest neighbors look . labels vector containing label dataset. alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. ... Optional arguments findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/knnComposition.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute neighborhood composition — knnComposition","text":"numerical matrix indicating composition, rows correspond samples columns correspond classes label.","code":""},{"path":"https://roseyuan.github.io/poem/reference/knnComposition.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute neighborhood composition — knnComposition","text":"","code":"data(sp_toys) data <- sp_toys knnComposition(data[,c(\"x\", \"y\")], k=6, data$label) #> 1 2 #> 1 0.0000000 1.0000000 #> 2 0.0000000 1.0000000 #> 3 0.0000000 1.0000000 #> 4 0.1428571 0.8571429 #> 5 0.6666667 0.3333333 #> 6 1.0000000 0.0000000 #> 7 1.0000000 0.0000000 #> 8 1.0000000 0.0000000 #> 9 1.0000000 0.0000000 #> 10 1.0000000 0.0000000 #> 11 1.0000000 0.0000000 #> 12 1.0000000 0.0000000 #> 13 1.0000000 0.0000000 #> 14 1.0000000 0.0000000 #> 15 1.0000000 0.0000000 #> 16 0.0000000 1.0000000 #> 17 0.0000000 1.0000000 #> 18 0.0000000 1.0000000 #> 19 0.1666667 0.8333333 #> 20 0.8333333 0.1666667 #> 21 1.0000000 0.0000000 #> 22 1.0000000 0.0000000 #> 23 1.0000000 0.0000000 #> 24 1.0000000 0.0000000 #> 25 1.0000000 0.0000000 #> 26 1.0000000 0.0000000 #> 27 1.0000000 0.0000000 #> 28 1.0000000 0.0000000 #> 29 1.0000000 0.0000000 #> 30 1.0000000 0.0000000 #> 31 0.0000000 1.0000000 #> 32 0.0000000 1.0000000 #> 33 0.0000000 1.0000000 #> 34 0.0000000 1.0000000 #> 35 0.1666667 0.8333333 #> 36 0.8333333 0.1666667 #> 37 1.0000000 0.0000000 #> 38 1.0000000 0.0000000 #> 39 1.0000000 0.0000000 #> 40 1.0000000 0.0000000 #> 41 1.0000000 0.0000000 #> 42 1.0000000 0.0000000 #> 43 1.0000000 0.0000000 #> 44 1.0000000 0.0000000 #> 45 1.0000000 0.0000000 #> 46 0.0000000 1.0000000 #> 47 0.0000000 1.0000000 #> 48 0.0000000 1.0000000 #> 49 0.0000000 1.0000000 #> 50 0.1666667 0.8333333 #> 51 0.8333333 0.1666667 #> 52 1.0000000 0.0000000 #> 53 1.0000000 0.0000000 #> 54 1.0000000 0.0000000 #> 55 1.0000000 0.0000000 #> 56 1.0000000 0.0000000 #> 57 1.0000000 0.0000000 #> 58 1.0000000 0.0000000 #> 59 1.0000000 0.0000000 #> 60 1.0000000 0.0000000 #> 61 0.0000000 1.0000000 #> 62 0.0000000 1.0000000 #> 63 0.0000000 1.0000000 #> 64 0.0000000 1.0000000 #> 65 0.0000000 1.0000000 #> 66 0.1666667 0.8333333 #> 67 0.8333333 0.1666667 #> 68 1.0000000 0.0000000 #> 69 1.0000000 0.0000000 #> 70 1.0000000 0.0000000 #> 71 1.0000000 0.0000000 #> 72 1.0000000 0.0000000 #> 73 1.0000000 0.0000000 #> 74 1.0000000 0.0000000 #> 75 1.0000000 0.0000000 #> 76 0.0000000 1.0000000 #> 77 0.0000000 1.0000000 #> 78 0.0000000 1.0000000 #> 79 0.0000000 1.0000000 #> 80 0.0000000 1.0000000 #> 81 0.1666667 0.8333333 #> 82 0.8333333 0.1666667 #> 83 1.0000000 0.0000000 #> 84 1.0000000 0.0000000 #> 85 1.0000000 0.0000000 #> 86 1.0000000 0.0000000 #> 87 1.0000000 0.0000000 #> 88 1.0000000 0.0000000 #> 89 1.0000000 0.0000000 #> 90 1.0000000 0.0000000 #> 91 0.0000000 1.0000000 #> 92 0.0000000 1.0000000 #> 93 0.0000000 1.0000000 #> 94 0.0000000 1.0000000 #> 95 0.0000000 1.0000000 #> 96 0.0000000 1.0000000 #> 97 0.1666667 0.8333333 #> 98 0.8333333 0.1666667 #> 99 1.0000000 0.0000000 #> 100 1.0000000 0.0000000 #> 101 1.0000000 0.0000000 #> 102 1.0000000 0.0000000 #> 103 1.0000000 0.0000000 #> 104 1.0000000 0.0000000 #> 105 1.0000000 0.0000000 #> 106 0.0000000 1.0000000 #> 107 0.0000000 1.0000000 #> 108 0.0000000 1.0000000 #> 109 0.0000000 1.0000000 #> 110 0.0000000 1.0000000 #> 111 0.0000000 1.0000000 #> 112 0.1666667 0.8333333 #> 113 0.8333333 0.1666667 #> 114 1.0000000 0.0000000 #> 115 1.0000000 0.0000000 #> 116 1.0000000 0.0000000 #> 117 1.0000000 0.0000000 #> 118 1.0000000 0.0000000 #> 119 1.0000000 0.0000000 #> 120 1.0000000 0.0000000 #> 121 0.0000000 1.0000000 #> 122 0.0000000 1.0000000 #> 123 0.0000000 1.0000000 #> 124 0.0000000 1.0000000 #> 125 0.0000000 1.0000000 #> 126 0.0000000 1.0000000 #> 127 0.0000000 1.0000000 #> 128 0.1666667 0.8333333 #> 129 0.8333333 0.1666667 #> 130 1.0000000 0.0000000 #> 131 1.0000000 0.0000000 #> 132 1.0000000 0.0000000 #> 133 1.0000000 0.0000000 #> 134 1.0000000 0.0000000 #> 135 1.0000000 0.0000000 #> 136 0.0000000 1.0000000 #> 137 0.0000000 1.0000000 #> 138 0.0000000 1.0000000 #> 139 0.0000000 1.0000000 #> 140 0.0000000 1.0000000 #> 141 0.0000000 1.0000000 #> 142 0.0000000 1.0000000 #> 143 0.1666667 0.8333333 #> 144 0.8333333 0.1666667 #> 145 1.0000000 0.0000000 #> 146 1.0000000 0.0000000 #> 147 1.0000000 0.0000000 #> 148 1.0000000 0.0000000 #> 149 1.0000000 0.0000000 #> 150 1.0000000 0.0000000 #> 151 0.0000000 1.0000000 #> 152 0.0000000 1.0000000 #> 153 0.0000000 1.0000000 #> 154 0.0000000 1.0000000 #> 155 0.0000000 1.0000000 #> 156 0.0000000 1.0000000 #> 157 0.0000000 1.0000000 #> 158 0.0000000 1.0000000 #> 159 0.1666667 0.8333333 #> 160 0.8333333 0.1666667 #> 161 1.0000000 0.0000000 #> 162 1.0000000 0.0000000 #> 163 1.0000000 0.0000000 #> 164 1.0000000 0.0000000 #> 165 1.0000000 0.0000000 #> 166 0.0000000 1.0000000 #> 167 0.0000000 1.0000000 #> 168 0.0000000 1.0000000 #> 169 0.0000000 1.0000000 #> 170 0.0000000 1.0000000 #> 171 0.0000000 1.0000000 #> 172 0.0000000 1.0000000 #> 173 0.0000000 1.0000000 #> 174 0.1666667 0.8333333 #> 175 0.8333333 0.1666667 #> 176 1.0000000 0.0000000 #> 177 1.0000000 0.0000000 #> 178 1.0000000 0.0000000 #> 179 1.0000000 0.0000000 #> 180 1.0000000 0.0000000 #> 181 0.0000000 1.0000000 #> 182 0.0000000 1.0000000 #> 183 0.0000000 1.0000000 #> 184 0.0000000 1.0000000 #> 185 0.0000000 1.0000000 #> 186 0.0000000 1.0000000 #> 187 0.0000000 1.0000000 #> 188 0.0000000 1.0000000 #> 189 0.0000000 1.0000000 #> 190 0.1666667 0.8333333 #> 191 0.8333333 0.1666667 #> 192 1.0000000 0.0000000 #> 193 1.0000000 0.0000000 #> 194 1.0000000 0.0000000 #> 195 1.0000000 0.0000000 #> 196 0.0000000 1.0000000 #> 197 0.0000000 1.0000000 #> 198 0.0000000 1.0000000 #> 199 0.0000000 1.0000000 #> 200 0.0000000 1.0000000 #> 201 0.0000000 1.0000000 #> 202 0.0000000 1.0000000 #> 203 0.0000000 1.0000000 #> 204 0.0000000 1.0000000 #> 205 0.1666667 0.8333333 #> 206 0.8333333 0.1666667 #> 207 1.0000000 0.0000000 #> 208 1.0000000 0.0000000 #> 209 1.0000000 0.0000000 #> 210 1.0000000 0.0000000 #> 211 0.0000000 1.0000000 #> 212 0.0000000 1.0000000 #> 213 0.0000000 1.0000000 #> 214 0.0000000 1.0000000 #> 215 0.0000000 1.0000000 #> 216 0.0000000 1.0000000 #> 217 0.0000000 1.0000000 #> 218 0.0000000 1.0000000 #> 219 0.0000000 1.0000000 #> 220 0.0000000 1.0000000 #> 221 0.1666667 0.8333333 #> 222 0.8333333 0.1666667 #> 223 1.0000000 0.0000000 #> 224 1.0000000 0.0000000 #> 225 1.0000000 0.0000000 #> 226 0.0000000 1.0000000 #> 227 0.0000000 1.0000000 #> 228 0.0000000 1.0000000 #> 229 0.0000000 1.0000000 #> 230 0.0000000 1.0000000 #> 231 0.0000000 1.0000000 #> 232 0.0000000 1.0000000 #> 233 0.0000000 1.0000000 #> 234 0.0000000 1.0000000 #> 235 0.0000000 1.0000000 #> 236 0.2857143 0.7142857 #> 237 0.8571429 0.1428571 #> 238 1.0000000 0.0000000 #> 239 1.0000000 0.0000000 #> 240 1.0000000 0.0000000"},{"path":"https://roseyuan.github.io/poem/reference/matchSets.html","id":null,"dir":"Reference","previous_headings":"","what":"Match two partitions using Hungarian algorithm — matchSets","title":"Match two partitions using Hungarian algorithm — matchSets","text":"Match sets partitions reference partition using Hungarian algorithm optimize F1 scores.","code":""},{"path":"https://roseyuan.github.io/poem/reference/matchSets.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Match two partitions using Hungarian algorithm — matchSets","text":"","code":"matchSets(pred, true, forceMatch = TRUE, returnIndices = is.integer(true))"},{"path":"https://roseyuan.github.io/poem/reference/matchSets.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Match two partitions using Hungarian algorithm — matchSets","text":"pred integer factor cluster labels true integer factor reference labels forceMatch Logical; whether enforce match every set pred returnIndices Logical; whether return indices rather levels","code":""},{"path":"https://roseyuan.github.io/poem/reference/matchSets.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Match two partitions using Hungarian algorithm — matchSets","text":"vector matching sets (.e. level) true every set (.e. level) pred.","code":""},{"path":"https://roseyuan.github.io/poem/reference/metric_info.html","id":null,"dir":"Reference","previous_headings":"","what":"Metrics Information — metric_info","title":"Metrics Information — metric_info","text":"dataframe storing information metrics","code":""},{"path":"https://roseyuan.github.io/poem/reference/metric_info.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Metrics Information — metric_info","text":"","code":"metric_info"},{"path":[]},{"path":"https://roseyuan.github.io/poem/reference/metric_info.html","id":"metric-info","dir":"Reference","previous_headings":"","what":"metric_info","title":"Metrics Information — metric_info","text":"data frame.","code":""},{"path":"https://roseyuan.github.io/poem/reference/mockData.html","id":null,"dir":"Reference","previous_headings":"","what":"Generate mock multidimensional data — mockData","title":"Generate mock multidimensional data — mockData","text":"Generates mock multidimensional data given number classes points, testing.","code":""},{"path":"https://roseyuan.github.io/poem/reference/mockData.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Generate mock multidimensional data — mockData","text":"","code":"mockData( Ns = c(25, 15), classDiff = 2, Sds = 1, ndims = 2, spread = c(1, 2), rndFn = rnorm )"},{"path":"https://roseyuan.github.io/poem/reference/mockData.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Generate mock multidimensional data — mockData","text":"Ns vector one positive integers specifying number elements class. classDiff distances classes. 2 classes, can dist object symmetric matrix length(Ns)-1 columns/rows lower triangle indicates desired distances classes. Sds standard deviation. Can either fixed value, value per class, matrix values class (rows) dimension (column). ndims number dimensions generate (default 2). spread spread points. Can either fixed value, value per class, matrix values class (rows) dimension (col). rndFn random function, default rnorm, also work rlnorm similar.","code":""},{"path":"https://roseyuan.github.io/poem/reference/mockData.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Generate mock multidimensional data — mockData","text":"data.frame coordinates class column.","code":""},{"path":"https://roseyuan.github.io/poem/reference/mockData.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Generate mock multidimensional data — mockData","text":"","code":"d <- mockData()"},{"path":"https://roseyuan.github.io/poem/reference/nnWeightedAccuracy.html","id":null,"dir":"Reference","previous_headings":"","what":"nnWeightedAccuracy — nnWeightedAccuracy","title":"nnWeightedAccuracy — nnWeightedAccuracy","text":"Computes accuracy score weighs elements/spots misclassified proportion (spatial) neighborhood element/spot's predicted class. reduces weight misclassifications happening boundary domains.","code":""},{"path":"https://roseyuan.github.io/poem/reference/nnWeightedAccuracy.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"nnWeightedAccuracy — nnWeightedAccuracy","text":"","code":"nnWeightedAccuracy(true, pred, location, k = 5, ...)"},{"path":"https://roseyuan.github.io/poem/reference/nnWeightedAccuracy.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"nnWeightedAccuracy — nnWeightedAccuracy","text":"true True class labels (vector coercible factor) pred Predicted labels (vector coercible factor) location spatial coordinates compute nearest neighbors. k Number nearest neighbors ... Optional params passed findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/nnWeightedAccuracy.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"nnWeightedAccuracy — nnWeightedAccuracy","text":"scalar representing weighted accuracy.","code":""},{"path":"https://roseyuan.github.io/poem/reference/noisy_moon.html","id":null,"dir":"Reference","previous_headings":"","what":"The noisy moon dataset — noisy_moon","title":"The noisy moon dataset — noisy_moon","text":"simple toy dataset consists two interleaving half circles.","code":""},{"path":"https://roseyuan.github.io/poem/reference/noisy_moon.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"The noisy moon dataset — noisy_moon","text":"","code":"noisy_moon"},{"path":[]},{"path":"https://roseyuan.github.io/poem/reference/noisy_moon.html","id":"noisy-moon","dir":"Reference","previous_headings":"","what":"noisy_moon","title":"The noisy moon dataset — noisy_moon","text":"data frame 100 rows 5 columns: x, y Coordinates observations. label Ground truth labels. Either 1 2. kmeans_label Predicted clustering labels using kmeans 2 centers. hdbscan_label Predicted clustering labels using hdbscan minPts = 5.","code":""},{"path":"https://roseyuan.github.io/poem/reference/setMatchingAccuracy.html","id":null,"dir":"Reference","previous_headings":"","what":"The non-spatially-weighted counterpart of nnWeightedAccuracy — setMatchingAccuracy","title":"The non-spatially-weighted counterpart of nnWeightedAccuracy — setMatchingAccuracy","text":"non-spatially-weighted counterpart nnWeightedAccuracy","code":""},{"path":"https://roseyuan.github.io/poem/reference/setMatchingAccuracy.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"The non-spatially-weighted counterpart of nnWeightedAccuracy — setMatchingAccuracy","text":"","code":"setMatchingAccuracy(true, pred)"},{"path":"https://roseyuan.github.io/poem/reference/setMatchingAccuracy.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"The non-spatially-weighted counterpart of nnWeightedAccuracy — setMatchingAccuracy","text":"true True class labels (vector coercible factor) pred Predicted labels (vector coercible factor)","code":""},{"path":"https://roseyuan.github.io/poem/reference/setMatchingAccuracy.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"The non-spatially-weighted counterpart of nnWeightedAccuracy — setMatchingAccuracy","text":"scalar representing weighted accuracy.","code":""},{"path":"https://roseyuan.github.io/poem/reference/sp_toys.html","id":null,"dir":"Reference","previous_headings":"","what":"Toy examples of spatial data — sp_toys","title":"Toy examples of spatial data — sp_toys","text":"Toy examples spatial data.","code":""},{"path":"https://roseyuan.github.io/poem/reference/sp_toys.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Toy examples of spatial data — sp_toys","text":"","code":"sp_toys"},{"path":[]},{"path":"https://roseyuan.github.io/poem/reference/sp_toys.html","id":"sp-toys","dir":"Reference","previous_headings":"","what":"sp_toys","title":"Toy examples of spatial data — sp_toys","text":"data frame 240 rows 11 columns, representing 16 x 15 array spots: x, y Coordinates spots. row, col row column index spots. label Ground truth labels. Either 1 2. p1-p6 Hypothetical predicted spatial clustering labels.","code":""},{"path":"https://roseyuan.github.io/poem/reference/toyExamples.html","id":null,"dir":"Reference","previous_headings":"","what":"Toy embedding examples — toyExamples","title":"Toy embedding examples — toyExamples","text":"Toy example 2D embeddings elements different classes, varying mixing spread. Graphs 1-3 20 elements 4 classes, mixed different fashion embedding space. Graphs 4-7 100 elements class1 60 class2, class1 elements vary spread.","code":""},{"path":"https://roseyuan.github.io/poem/reference/toyExamples.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Toy embedding examples — toyExamples","text":"","code":"toyExamples"},{"path":[]},{"path":"https://roseyuan.github.io/poem/reference/toyExamples.html","id":"toyexamples","dir":"Reference","previous_headings":"","what":"toyExamples","title":"Toy embedding examples — toyExamples","text":"data frame. graph name embedding element belongs. x, y Coordinates 2D embedding. class class element belongs.","code":""}]
+[{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":null,"dir":"","previous_headings":"","what":"GNU General Public License","title":"GNU General Public License","text":"Version 3, 29 June 2007Copyright © 2007 Free Software Foundation, Inc. Everyone permitted copy distribute verbatim copies license document, changing allowed.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"preamble","dir":"","previous_headings":"","what":"Preamble","title":"GNU General Public License","text":"GNU General Public License free, copyleft license software kinds works. licenses software practical works designed take away freedom share change works. contrast, GNU General Public License intended guarantee freedom share change versions program–make sure remains free software users. , Free Software Foundation, use GNU General Public License software; applies also work released way authors. can apply programs, . speak free software, referring freedom, price. General Public Licenses designed make sure freedom distribute copies free software (charge wish), receive source code can get want , can change software use pieces new free programs, know can things. protect rights, need prevent others denying rights asking surrender rights. Therefore, certain responsibilities distribute copies software, modify : responsibilities respect freedom others. example, distribute copies program, whether gratis fee, must pass recipients freedoms received. must make sure , , receive can get source code. must show terms know rights. Developers use GNU GPL protect rights two steps: (1) assert copyright software, (2) offer License giving legal permission copy, distribute /modify . developers’ authors’ protection, GPL clearly explains warranty free software. users’ authors’ sake, GPL requires modified versions marked changed, problems attributed erroneously authors previous versions. devices designed deny users access install run modified versions software inside , although manufacturer can . fundamentally incompatible aim protecting users’ freedom change software. systematic pattern abuse occurs area products individuals use, precisely unacceptable. Therefore, designed version GPL prohibit practice products. problems arise substantially domains, stand ready extend provision domains future versions GPL, needed protect freedom users. Finally, every program threatened constantly software patents. States allow patents restrict development use software general-purpose computers, , wish avoid special danger patents applied free program make effectively proprietary. prevent , GPL assures patents used render program non-free. precise terms conditions copying, distribution modification follow.","code":""},{"path":[]},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_0-definitions","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"0. Definitions","title":"GNU General Public License","text":"“License” refers version 3 GNU General Public License. “Copyright” also means copyright-like laws apply kinds works, semiconductor masks. “Program” refers copyrightable work licensed License. licensee addressed “”. “Licensees” “recipients” may individuals organizations. “modify” work means copy adapt part work fashion requiring copyright permission, making exact copy. resulting work called “modified version” earlier work work “based ” earlier work. “covered work” means either unmodified Program work based Program. “propagate” work means anything , without permission, make directly secondarily liable infringement applicable copyright law, except executing computer modifying private copy. Propagation includes copying, distribution (without modification), making available public, countries activities well. “convey” work means kind propagation enables parties make receive copies. Mere interaction user computer network, transfer copy, conveying. interactive user interface displays “Appropriate Legal Notices” extent includes convenient prominently visible feature (1) displays appropriate copyright notice, (2) tells user warranty work (except extent warranties provided), licensees may convey work License, view copy License. interface presents list user commands options, menu, prominent item list meets criterion.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_1-source-code","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"1. Source Code","title":"GNU General Public License","text":"“source code” work means preferred form work making modifications . “Object code” means non-source form work. “Standard Interface” means interface either official standard defined recognized standards body, , case interfaces specified particular programming language, one widely used among developers working language. “System Libraries” executable work include anything, work whole, () included normal form packaging Major Component, part Major Component, (b) serves enable use work Major Component, implement Standard Interface implementation available public source code form. “Major Component”, context, means major essential component (kernel, window system, ) specific operating system () executable work runs, compiler used produce work, object code interpreter used run . “Corresponding Source” work object code form means source code needed generate, install, (executable work) run object code modify work, including scripts control activities. However, include work’s System Libraries, general-purpose tools generally available free programs used unmodified performing activities part work. example, Corresponding Source includes interface definition files associated source files work, source code shared libraries dynamically linked subprograms work specifically designed require, intimate data communication control flow subprograms parts work. Corresponding Source need include anything users can regenerate automatically parts Corresponding Source. Corresponding Source work source code form work.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_2-basic-permissions","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"2. Basic Permissions","title":"GNU General Public License","text":"rights granted License granted term copyright Program, irrevocable provided stated conditions met. License explicitly affirms unlimited permission run unmodified Program. output running covered work covered License output, given content, constitutes covered work. License acknowledges rights fair use equivalent, provided copyright law. may make, run propagate covered works convey, without conditions long license otherwise remains force. may convey covered works others sole purpose make modifications exclusively , provide facilities running works, provided comply terms License conveying material control copyright. thus making running covered works must exclusively behalf, direction control, terms prohibit making copies copyrighted material outside relationship . Conveying circumstances permitted solely conditions stated . Sublicensing allowed; section 10 makes unnecessary.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_3-protecting-users-legal-rights-from-anti-circumvention-law","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"3. Protecting Users’ Legal Rights From Anti-Circumvention Law","title":"GNU General Public License","text":"covered work shall deemed part effective technological measure applicable law fulfilling obligations article 11 WIPO copyright treaty adopted 20 December 1996, similar laws prohibiting restricting circumvention measures. convey covered work, waive legal power forbid circumvention technological measures extent circumvention effected exercising rights License respect covered work, disclaim intention limit operation modification work means enforcing, work’s users, third parties’ legal rights forbid circumvention technological measures.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_4-conveying-verbatim-copies","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"4. Conveying Verbatim Copies","title":"GNU General Public License","text":"may convey verbatim copies Program’s source code receive , medium, provided conspicuously appropriately publish copy appropriate copyright notice; keep intact notices stating License non-permissive terms added accord section 7 apply code; keep intact notices absence warranty; give recipients copy License along Program. may charge price price copy convey, may offer support warranty protection fee.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_5-conveying-modified-source-versions","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"5. Conveying Modified Source Versions","title":"GNU General Public License","text":"may convey work based Program, modifications produce Program, form source code terms section 4, provided also meet conditions: ) work must carry prominent notices stating modified , giving relevant date. b) work must carry prominent notices stating released License conditions added section 7. requirement modifies requirement section 4 “keep intact notices”. c) must license entire work, whole, License anyone comes possession copy. License therefore apply, along applicable section 7 additional terms, whole work, parts, regardless packaged. License gives permission license work way, invalidate permission separately received . d) work interactive user interfaces, must display Appropriate Legal Notices; however, Program interactive interfaces display Appropriate Legal Notices, work need make . compilation covered work separate independent works, nature extensions covered work, combined form larger program, volume storage distribution medium, called “aggregate” compilation resulting copyright used limit access legal rights compilation’s users beyond individual works permit. Inclusion covered work aggregate cause License apply parts aggregate.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_6-conveying-non-source-forms","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"6. Conveying Non-Source Forms","title":"GNU General Public License","text":"may convey covered work object code form terms sections 4 5, provided also convey machine-readable Corresponding Source terms License, one ways: ) Convey object code , embodied , physical product (including physical distribution medium), accompanied Corresponding Source fixed durable physical medium customarily used software interchange. b) Convey object code , embodied , physical product (including physical distribution medium), accompanied written offer, valid least three years valid long offer spare parts customer support product model, give anyone possesses object code either (1) copy Corresponding Source software product covered License, durable physical medium customarily used software interchange, price reasonable cost physically performing conveying source, (2) access copy Corresponding Source network server charge. c) Convey individual copies object code copy written offer provide Corresponding Source. alternative allowed occasionally noncommercially, received object code offer, accord subsection 6b. d) Convey object code offering access designated place (gratis charge), offer equivalent access Corresponding Source way place charge. need require recipients copy Corresponding Source along object code. place copy object code network server, Corresponding Source may different server (operated third party) supports equivalent copying facilities, provided maintain clear directions next object code saying find Corresponding Source. Regardless server hosts Corresponding Source, remain obligated ensure available long needed satisfy requirements. e) Convey object code using peer--peer transmission, provided inform peers object code Corresponding Source work offered general public charge subsection 6d. separable portion object code, whose source code excluded Corresponding Source System Library, need included conveying object code work. “User Product” either (1) “consumer product”, means tangible personal property normally used personal, family, household purposes, (2) anything designed sold incorporation dwelling. determining whether product consumer product, doubtful cases shall resolved favor coverage. particular product received particular user, “normally used” refers typical common use class product, regardless status particular user way particular user actually uses, expects expected use, product. product consumer product regardless whether product substantial commercial, industrial non-consumer uses, unless uses represent significant mode use product. “Installation Information” User Product means methods, procedures, authorization keys, information required install execute modified versions covered work User Product modified version Corresponding Source. information must suffice ensure continued functioning modified object code case prevented interfered solely modification made. convey object code work section , , specifically use , User Product, conveying occurs part transaction right possession use User Product transferred recipient perpetuity fixed term (regardless transaction characterized), Corresponding Source conveyed section must accompanied Installation Information. requirement apply neither third party retains ability install modified object code User Product (example, work installed ROM). requirement provide Installation Information include requirement continue provide support service, warranty, updates work modified installed recipient, User Product modified installed. Access network may denied modification materially adversely affects operation network violates rules protocols communication across network. Corresponding Source conveyed, Installation Information provided, accord section must format publicly documented (implementation available public source code form), must require special password key unpacking, reading copying.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_7-additional-terms","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"7. Additional Terms","title":"GNU General Public License","text":"“Additional permissions” terms supplement terms License making exceptions one conditions. Additional permissions applicable entire Program shall treated though included License, extent valid applicable law. additional permissions apply part Program, part may used separately permissions, entire Program remains governed License without regard additional permissions. convey copy covered work, may option remove additional permissions copy, part . (Additional permissions may written require removal certain cases modify work.) may place additional permissions material, added covered work, can give appropriate copyright permission. Notwithstanding provision License, material add covered work, may (authorized copyright holders material) supplement terms License terms: ) Disclaiming warranty limiting liability differently terms sections 15 16 License; b) Requiring preservation specified reasonable legal notices author attributions material Appropriate Legal Notices displayed works containing ; c) Prohibiting misrepresentation origin material, requiring modified versions material marked reasonable ways different original version; d) Limiting use publicity purposes names licensors authors material; e) Declining grant rights trademark law use trade names, trademarks, service marks; f) Requiring indemnification licensors authors material anyone conveys material (modified versions ) contractual assumptions liability recipient, liability contractual assumptions directly impose licensors authors. non-permissive additional terms considered “restrictions” within meaning section 10. Program received , part , contains notice stating governed License along term restriction, may remove term. license document contains restriction permits relicensing conveying License, may add covered work material governed terms license document, provided restriction survive relicensing conveying. add terms covered work accord section, must place, relevant source files, statement additional terms apply files, notice indicating find applicable terms. Additional terms, permissive non-permissive, may stated form separately written license, stated exceptions; requirements apply either way.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_8-termination","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"8. Termination","title":"GNU General Public License","text":"may propagate modify covered work except expressly provided License. attempt otherwise propagate modify void, automatically terminate rights License (including patent licenses granted third paragraph section 11). However, cease violation License, license particular copyright holder reinstated () provisionally, unless copyright holder explicitly finally terminates license, (b) permanently, copyright holder fails notify violation reasonable means prior 60 days cessation. Moreover, license particular copyright holder reinstated permanently copyright holder notifies violation reasonable means, first time received notice violation License (work) copyright holder, cure violation prior 30 days receipt notice. Termination rights section terminate licenses parties received copies rights License. rights terminated permanently reinstated, qualify receive new licenses material section 10.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_9-acceptance-not-required-for-having-copies","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"9. Acceptance Not Required for Having Copies","title":"GNU General Public License","text":"required accept License order receive run copy Program. Ancillary propagation covered work occurring solely consequence using peer--peer transmission receive copy likewise require acceptance. However, nothing License grants permission propagate modify covered work. actions infringe copyright accept License. Therefore, modifying propagating covered work, indicate acceptance License .","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_10-automatic-licensing-of-downstream-recipients","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"10. Automatic Licensing of Downstream Recipients","title":"GNU General Public License","text":"time convey covered work, recipient automatically receives license original licensors, run, modify propagate work, subject License. responsible enforcing compliance third parties License. “entity transaction” transaction transferring control organization, substantially assets one, subdividing organization, merging organizations. propagation covered work results entity transaction, party transaction receives copy work also receives whatever licenses work party’s predecessor interest give previous paragraph, plus right possession Corresponding Source work predecessor interest, predecessor can get reasonable efforts. may impose restrictions exercise rights granted affirmed License. example, may impose license fee, royalty, charge exercise rights granted License, may initiate litigation (including cross-claim counterclaim lawsuit) alleging patent claim infringed making, using, selling, offering sale, importing Program portion .","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_11-patents","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"11. Patents","title":"GNU General Public License","text":"“contributor” copyright holder authorizes use License Program work Program based. work thus licensed called contributor’s “contributor version”. contributor’s “essential patent claims” patent claims owned controlled contributor, whether already acquired hereafter acquired, infringed manner, permitted License, making, using, selling contributor version, include claims infringed consequence modification contributor version. purposes definition, “control” includes right grant patent sublicenses manner consistent requirements License. contributor grants non-exclusive, worldwide, royalty-free patent license contributor’s essential patent claims, make, use, sell, offer sale, import otherwise run, modify propagate contents contributor version. following three paragraphs, “patent license” express agreement commitment, however denominated, enforce patent (express permission practice patent covenant sue patent infringement). “grant” patent license party means make agreement commitment enforce patent party. convey covered work, knowingly relying patent license, Corresponding Source work available anyone copy, free charge terms License, publicly available network server readily accessible means, must either (1) cause Corresponding Source available, (2) arrange deprive benefit patent license particular work, (3) arrange, manner consistent requirements License, extend patent license downstream recipients. “Knowingly relying” means actual knowledge , patent license, conveying covered work country, recipient’s use covered work country, infringe one identifiable patents country reason believe valid. , pursuant connection single transaction arrangement, convey, propagate procuring conveyance , covered work, grant patent license parties receiving covered work authorizing use, propagate, modify convey specific copy covered work, patent license grant automatically extended recipients covered work works based . patent license “discriminatory” include within scope coverage, prohibits exercise , conditioned non-exercise one rights specifically granted License. may convey covered work party arrangement third party business distributing software, make payment third party based extent activity conveying work, third party grants, parties receive covered work , discriminatory patent license () connection copies covered work conveyed (copies made copies), (b) primarily connection specific products compilations contain covered work, unless entered arrangement, patent license granted, prior 28 March 2007. Nothing License shall construed excluding limiting implied license defenses infringement may otherwise available applicable patent law.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_12-no-surrender-of-others-freedom","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"12. No Surrender of Others’ Freedom","title":"GNU General Public License","text":"conditions imposed (whether court order, agreement otherwise) contradict conditions License, excuse conditions License. convey covered work satisfy simultaneously obligations License pertinent obligations, consequence may convey . example, agree terms obligate collect royalty conveying convey Program, way satisfy terms License refrain entirely conveying Program.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_13-use-with-the-gnu-affero-general-public-license","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"13. Use with the GNU Affero General Public License","title":"GNU General Public License","text":"Notwithstanding provision License, permission link combine covered work work licensed version 3 GNU Affero General Public License single combined work, convey resulting work. terms License continue apply part covered work, special requirements GNU Affero General Public License, section 13, concerning interaction network apply combination .","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_14-revised-versions-of-this-license","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"14. Revised Versions of this License","title":"GNU General Public License","text":"Free Software Foundation may publish revised /new versions GNU General Public License time time. new versions similar spirit present version, may differ detail address new problems concerns. version given distinguishing version number. Program specifies certain numbered version GNU General Public License “later version” applies , option following terms conditions either numbered version later version published Free Software Foundation. Program specify version number GNU General Public License, may choose version ever published Free Software Foundation. Program specifies proxy can decide future versions GNU General Public License can used, proxy’s public statement acceptance version permanently authorizes choose version Program. Later license versions may give additional different permissions. However, additional obligations imposed author copyright holder result choosing follow later version.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_15-disclaimer-of-warranty","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"15. Disclaimer of Warranty","title":"GNU General Public License","text":"WARRANTY PROGRAM, EXTENT PERMITTED APPLICABLE LAW. EXCEPT OTHERWISE STATED WRITING COPYRIGHT HOLDERS /PARTIES PROVIDE PROGRAM “” WITHOUT WARRANTY KIND, EITHER EXPRESSED IMPLIED, INCLUDING, LIMITED , IMPLIED WARRANTIES MERCHANTABILITY FITNESS PARTICULAR PURPOSE. ENTIRE RISK QUALITY PERFORMANCE PROGRAM . PROGRAM PROVE DEFECTIVE, ASSUME COST NECESSARY SERVICING, REPAIR CORRECTION.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_16-limitation-of-liability","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"16. Limitation of Liability","title":"GNU General Public License","text":"EVENT UNLESS REQUIRED APPLICABLE LAW AGREED WRITING COPYRIGHT HOLDER, PARTY MODIFIES /CONVEYS PROGRAM PERMITTED , LIABLE DAMAGES, INCLUDING GENERAL, SPECIAL, INCIDENTAL CONSEQUENTIAL DAMAGES ARISING USE INABILITY USE PROGRAM (INCLUDING LIMITED LOSS DATA DATA RENDERED INACCURATE LOSSES SUSTAINED THIRD PARTIES FAILURE PROGRAM OPERATE PROGRAMS), EVEN HOLDER PARTY ADVISED POSSIBILITY DAMAGES.","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"id_17-interpretation-of-sections-15-and-16","dir":"","previous_headings":"TERMS AND CONDITIONS","what":"17. Interpretation of Sections 15 and 16","title":"GNU General Public License","text":"disclaimer warranty limitation liability provided given local legal effect according terms, reviewing courts shall apply local law closely approximates absolute waiver civil liability connection Program, unless warranty assumption liability accompanies copy Program return fee. END TERMS CONDITIONS","code":""},{"path":"https://roseyuan.github.io/poem/LICENSE.html","id":"how-to-apply-these-terms-to-your-new-programs","dir":"","previous_headings":"","what":"How to Apply These Terms to Your New Programs","title":"GNU General Public License","text":"develop new program, want greatest possible use public, best way achieve make free software everyone can redistribute change terms. , attach following notices program. safest attach start source file effectively state exclusion warranty; file least “copyright” line pointer full notice found. Also add information contact electronic paper mail. program terminal interaction, make output short notice like starts interactive mode: hypothetical commands show w show c show appropriate parts General Public License. course, program’s commands might different; GUI interface, use “box”. also get employer (work programmer) school, , sign “copyright disclaimer” program, necessary. information , apply follow GNU GPL, see . GNU General Public License permit incorporating program proprietary programs. program subroutine library, may consider useful permit linking proprietary applications library. want , use GNU Lesser General Public License instead License. first, please read .","code":" Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'. This is free software, and you are welcome to redistribute it under certain conditions; type 'show c' for details."},{"path":"https://roseyuan.github.io/poem/articles/SpatialExperiment.html","id":"prepare-dataset","dir":"Articles","previous_headings":"","what":"Prepare dataset","title":"Working with SpatialExperiment","text":"package poem can easily integrated workflow SpatialExperiment objects. use Visium_humanDLPFC dataset package STexampleData illustration. Load : SpatialExperiment object, take location information (accessible via spatialCoords) manual annotation colData store dataframe: manual annotation looks like : generate hypothetical domain detection predictions randomly permuting manual annotation. simulate prediction results random noise well merging splitting domains: visualize :","code":"my_cols <-c(\"#D55E00\", \"#CC79A7\",\"#E69F00\",\"#0072B2\",\"#009E73\",\"#F0E442\",\"#56B4E9\",\"#000000\") names(my_cols) <- as.character(seq(my_cols)) spe <- Visium_humanDLPFC() spe ## class: SpatialExperiment ## dim: 33538 4992 ## metadata(0): ## assays(1): counts ## rownames(33538): ENSG00000243485 ENSG00000237613 ... ENSG00000277475 ## ENSG00000268674 ## rowData names(3): gene_id gene_name feature_type ## colnames(4992): AAACAACGAATAGTTC-1 AAACAAGTATCTCCCA-1 ... ## TTGTTTGTATTACACG-1 TTGTTTGTGTAAATTC-1 ## colData names(8): barcode_id sample_id ... reference cell_count ## reducedDimNames(0): ## mainExpName: NULL ## altExpNames(0): ## spatialCoords names(2) : pxl_col_in_fullres pxl_row_in_fullres ## imgData names(4): sample_id image_id data scaleFactor data <- data.frame(spatialCoords(spe)) data$reference <- colData(spe)$reference data <- na.omit(data) data$reference <- factor(data$reference, levels=c(\"WM\", \"Layer6\", \"Layer5\", \"Layer4\", \"Layer3\", \"Layer2\", \"Layer1\")) p1 <- ggplot(data) + geom_point(aes(x = pxl_col_in_fullres, y = -pxl_row_in_fullres, color = reference), size=0.3) + labs(x = \"\", y = \"\", color=\"\", title=\"Manual annotation\") + theme_minimal() + scale_color_manual(values = unname(my_cols)) + theme( legend.box.background = element_rect(fill = \"grey90\", color = \"black\", size = 0.1), legend.box.margin = margin(-1, -1, -1, -1), axis.title.x=element_blank(), legend.position = \"bottom\", legend.box.spacing = margin(0), axis.text.x=element_blank(), axis.ticks.x=element_blank(), axis.text.y=element_blank(), axis.ticks.y=element_blank(), panel.spacing.x = unit(-0.5, \"cm\"), panel.grid.major = element_blank(), panel.grid.minor = element_blank(), plot.title = element_text(hjust = 0.5, size=12, margin = margin(b = 5, t = 15))) + guides(color = guide_legend(keywidth = 1, keyheight = 0.8, override.aes = list(size = 3))) p1 set.seed(123) # For reproducibility # Given a factor vector representing clustering results, simulate clustering variations including merging two clusters and adding random noise. simulate_clustering_variation <- function(clusters, split_cluster = NULL, merge_clusters = NULL, noise_level = 0.1) { # Convert to numeric for easier manipulation merge_clusters <- which(levels(clusters) %in% merge_clusters) clusters <- as.numeric(clusters) # 1. Merging two clusters if (!is.null(merge_clusters)) { clusters[clusters %in% merge_clusters] <- merge_clusters[1] # Rename both to the same label } # 2. Adding random noise n <- length(clusters) n_noise <- round(n * noise_level) # Number of elements to replace if (n_noise > 0) { noise_indices <- sample(seq_len(n), n_noise) # Random indices to replace existing_levels <- unique(clusters) clusters[noise_indices] <- sample(existing_levels, n_noise, replace = TRUE) # Replace with random levels } # Convert back to factor and return factor(clusters) } # P1: add random noise data$P1 <- simulate_clustering_variation( data$reference, noise_level = 0.1 ) # P2: split Layer 3 into 2 domains, add random noise data$P2 <- as.numeric(data$reference) data$P2[data$reference==\"Layer3\" & data$pxl_col_in_fullres < 8000] <- 8 data$P2 <- factor(as.numeric(factor(data$P2))) data$P2 <- simulate_clustering_variation( data$P2, noise_level = 0.1 ) # P3: merge Layer 4 and Layer 5, add random noise data$P3 <- simulate_clustering_variation( data$reference, merge_clusters = c(\"Layer4\", \"Layer5\"), noise_level = 0.1 ) p2 <- data %>% pivot_longer(cols=-c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\"), names_to=\"prediction\", values_to=\"domain\") %>% dplyr::filter(prediction != \"reference\") %>% ggplot() + geom_point(aes(x = pxl_col_in_fullres, y = -pxl_row_in_fullres, color = domain), size=0.4) + facet_wrap(~prediction, nrow=2) + labs(x = \"\", y = \"\", color=\"\", title=\"\") + theme_minimal() + scale_color_manual(values = unname(my_cols)) + theme( legend.box.background = element_rect(fill = \"grey90\", color = \"black\", size = 0.1), legend.box.margin = margin(-1, -1, -1, -1), axis.title.x=element_blank(), legend.position = \"bottom\", legend.justification=c(0, 0), legend.box.spacing = margin(0), axis.text.x=element_blank(), axis.ticks.x=element_blank(), axis.text.y=element_blank(), axis.ticks.y=element_blank(), panel.spacing.x = unit(-0.5, \"cm\"), panel.grid.major = element_blank(), panel.grid.minor = element_blank(), plot.title = element_text(hjust = 0.5, size=10)) + guides(color = guide_legend(keywidth = 1, keyheight = 0.8, override.aes = list(size = 3))) ggdraw() + draw_plot(p2 + theme(plot.margin = margin(0, 2, 2, 2))) + # Main plot draw_plot(p1, x = 0.5, y = -0.01, width = 0.5, height = 0.56) # Inset plot"},{"path":"https://roseyuan.github.io/poem/articles/SpatialExperiment.html","id":"calculate-external-spatial-metrics","dir":"Articles","previous_headings":"","what":"Calculate external spatial metrics","title":"Working with SpatialExperiment","text":"can compare P1-P3 manual annotation using external spatial metrics.","code":""},{"path":"https://roseyuan.github.io/poem/articles/SpatialExperiment.html","id":"dataset-level","dir":"Articles","previous_headings":"Calculate external spatial metrics","what":"Dataset level","title":"Working with SpatialExperiment","text":"Let’s first calculate two dataset-level metrics, SpatialARI SpatialAccuracy:","code":"res3 <- getSpatialExternalMetrics(true=data$reference, pred=data$P3, location=data[,c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\")], level=\"dataset\", metrics=c(\"SpatialARI\",\"SpatialAccuracy\"), k=6, fuzzy_true = TRUE, fuzzy_pred = FALSE) res2 <- getSpatialExternalMetrics(true=data$reference, pred=data$P2, location=data[,c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\")], level=\"dataset\", metrics=c(\"SpatialARI\",\"SpatialAccuracy\"), k=6, fuzzy_true = TRUE, fuzzy_pred = FALSE) res1 <- getSpatialExternalMetrics(true=data$reference, pred=data$P1, location=data[,c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\")], level=\"dataset\", metrics=c(\"SpatialARI\",\"SpatialAccuracy\"), k=6, fuzzy_true = TRUE, fuzzy_pred = FALSE) cbind(bind_rows(list(res1, res2, res3), .id=\"P\")) %>% pivot_longer(cols=c(\"SpatialARI\", \"SpatialAccuracy\"), names_to=\"metric\", values_to=\"value\") %>% ggplot(aes(x=P, y=value, group=metric)) + geom_point(size=3, aes(color=P)) + facet_wrap(~metric, scales = \"free\") + theme_bw() + labs(x=\"Prediction\")"},{"path":"https://roseyuan.github.io/poem/articles/SpatialExperiment.html","id":"classcluster-level","dir":"Articles","previous_headings":"Calculate external spatial metrics","what":"Class/cluster level","title":"Working with SpatialExperiment","text":"can calculate class/cluster-level metrics, SpatialAWH SpatialAWC, get insights errors predictions make: Note indices columns “class” “cluster” correspond levels original factors passed true pred. align back previous factor values, plot heatmap: class-level AWC highlights P2, Layer3 low completeness. align simulation Layer3 splitted 3 clusters P2. Similarly, cluster-level AWH highlights P3, cluster 3 low homogeneity, consistent merging layer 4 5.","code":"res3 <- getSpatialExternalMetrics(true=data$reference, pred=data$P3, location=data[,c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\")], level=\"class\", metrics=c(\"SpatialAWH\",\"SpatialAWC\"), k=6, fuzzy_true = TRUE, fuzzy_pred = FALSE) res2 <- getSpatialExternalMetrics(true=data$reference, pred=data$P2, location=data[,c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\")], level=\"class\", metrics=c(\"SpatialAWH\",\"SpatialAWC\"), k=6, fuzzy_true = TRUE, fuzzy_pred = FALSE) res1 <- getSpatialExternalMetrics(true=data$reference, pred=data$P1, location=data[,c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\")], level=\"class\", metrics=c(\"SpatialAWH\",\"SpatialAWC\"), k=6, fuzzy_true = TRUE, fuzzy_pred = FALSE) res1 ## SpatialAWH SpatialAWC class cluster ## 1 NA 0.7963689 1 NA ## 2 NA 0.8145576 2 NA ## 3 NA 0.7561886 3 NA ## 4 NA 0.8184988 4 NA ## 5 NA 0.7949846 5 NA ## 6 NA 0.8179805 6 NA ## 7 NA 0.7884382 7 NA ## 8 0.7704762 NA NA 1 ## 9 0.8727733 NA NA 2 ## 10 0.8274801 NA NA 3 ## 11 0.5853277 NA NA 4 ## 12 0.8989078 NA NA 5 ## 13 0.6277691 NA NA 6 ## 14 0.6240081 NA NA 7 awh1 <- na.omit(res1[,c(\"SpatialAWH\", \"cluster\")]) %>% mutate(cluster = levels(data$P1)[cluster]) awh2 <- na.omit(res2[,c(\"SpatialAWH\", \"cluster\")]) %>% mutate(cluster = levels(data$P2)[cluster]) awh3 <- na.omit(res3[,c(\"SpatialAWH\", \"cluster\")]) %>% mutate(cluster = levels(data$P3)[cluster]) awh <- cbind(bind_rows(list(awh1, awh2, awh3), .id=\"P\")) %>% pivot_wider(names_from = cluster, values_from = SpatialAWH) %>% subset(select = -c(P)) awh <- as.matrix(awh) rownames(awh) <- c(\"P1\", \"P2\", \"P3\") awh <- awh[,c(\"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\")] column_ha = HeatmapAnnotation(class=c(\"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\"), col = list(class = my_cols), show_annotation_name = FALSE, show_legend = FALSE) p4 <- awh %>% Heatmap( name = \"AWH\", cluster_columns = FALSE, cluster_rows = FALSE, column_order = c(\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\"), bottom_annotation = column_ha, width = ncol(awh)*unit(6, \"mm\"), height = nrow(awh)*unit(7, \"mm\")) p4 <- grid.grabExpr({ draw(p4, merge_legend = TRUE, padding = unit(c(12, 2, 2, 2), \"mm\")) }) awc1 <- na.omit(res1[,c(\"SpatialAWC\", \"class\")]) %>% mutate(class = levels(data$reference)[class]) awc2 <- na.omit(res2[,c(\"SpatialAWC\", \"class\")]) %>% mutate(class = levels(data$reference)[class]) awc3 <- na.omit(res3[,c(\"SpatialAWC\", \"class\")]) %>% mutate(class = levels(data$reference)[class]) awc <- cbind(bind_rows(list(awc1, awc2, awc3), .id=\"P\")) %>% pivot_wider(names_from = class, values_from = SpatialAWC) %>% subset(select = -c(P)) awc <- as.matrix(awc) rownames(awc) <- c(\"P1\", \"P2\", \"P3\") col_ls <- my_cols[1:7] names(col_ls) <- c(\"WM\", \"Layer6\", \"Layer5\", \"Layer4\", \"Layer3\", \"Layer2\", \"Layer1\") column_ha = HeatmapAnnotation(class=c(\"WM\", \"Layer6\", \"Layer5\", \"Layer4\", \"Layer3\", \"Layer2\", \"Layer1\"), col = list(class = col_ls), show_annotation_name = FALSE, show_legend = FALSE) p5 <- awc %>% Heatmap( name = \"AWC\", cluster_columns = FALSE, cluster_rows = FALSE, column_order = c(\"WM\", \"Layer6\", \"Layer5\", \"Layer4\", \"Layer3\", \"Layer2\", \"Layer1\"), bottom_annotation = column_ha, width = ncol(awc)*unit(6, \"mm\"), height = nrow(awc)*unit(7, \"mm\")) p5 <- grid.grabExpr({ draw(p5, merge_legend = TRUE) }) plot_grid(p4, p5, rel_widths=c(1,1), scale=c(1, 1))"},{"path":"https://roseyuan.github.io/poem/articles/SpatialExperiment.html","id":"element-level","dir":"Articles","previous_headings":"Calculate external spatial metrics","what":"Element level","title":"Working with SpatialExperiment","text":"One can also calculate element-level metric, SPC, visualization. clear highlights low concordance regions prediction expected.","code":"res1 <- cbind(getSpatialExternalMetrics(true=data$reference, pred=data$P1, location=data[,c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\")], level=\"element\", metrics=c(\"SpatialSPC\"), k=6, fuzzy_true = TRUE, fuzzy_pred = FALSE, useNegatives = FALSE), data[,c(\"pxl_col_in_fullres\", \"pxl_row_in_fullres\")]) res2 <- cbind(getSpatialExternalMetrics(true=data$reference, pred=data$P2, location=data[,c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\")], level=\"element\", metrics=c(\"SpatialSPC\"), k=6, fuzzy_true = TRUE, fuzzy_pred = FALSE, useNegatives = FALSE), data[,c(\"pxl_col_in_fullres\", \"pxl_row_in_fullres\")]) res3 <- cbind(getSpatialExternalMetrics(true=data$reference, pred=data$P3, location=data[,c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\")], level=\"element\", metrics=c(\"SpatialSPC\"), k=6, fuzzy_true = TRUE, fuzzy_pred = FALSE, useNegatives = FALSE), data[,c(\"pxl_col_in_fullres\", \"pxl_row_in_fullres\")]) cbind(bind_rows(list(res1, res2, res3), .id=\"P\")) %>% pivot_longer(cols=c(\"SpatialSPC\"), names_to=\"metric\", values_to=\"value\") %>% ggplot(aes(x = pxl_col_in_fullres, y = - pxl_row_in_fullres, color = value)) + scale_colour_gradient(high=\"white\", low =\"deeppink4\") + geom_point(size=0.3) + facet_wrap(~P, scales = \"free\") + theme_bw() + labs(x=\"Prediction\", y=\"\", color=\"SpatialSPC\")"},{"path":"https://roseyuan.github.io/poem/articles/SpatialExperiment.html","id":"calculate-internal-spatial-metrics","dir":"Articles","previous_headings":"","what":"Calculate internal spatial metrics","title":"Working with SpatialExperiment","text":"manual annotation available, one can use internal metrics, CHAOS, ELSA PAS, understand domain continuity local homogeneity domain detection result. illustrate , simulate P4 P5 20% 30% random noise, respectively. calculate internal spatial metrics P1-P5: lower scores, smoother predictions. expected, smoothness decrease P3 P5 noise level increase. internal metrics can also calculated element level. example can calculate element-wise ELSA score, score local diversity can regarded edge detector:","code":"# P4: add 20% random noise data$P4 <- simulate_clustering_variation( data$reference, noise_level = 0.2 ) # P5: add 30% random noise data$P5 <- simulate_clustering_variation( data$reference, noise_level = 0.3 ) internal <-lapply(setNames(c(\"reference\",\"P1\",\"P2\",\"P3\",\"P4\",\"P5\"), c(\"reference\",\"P1\",\"P2\",\"P3\",\"P4\",\"P5\")), function(x){getSpatialInternalMetrics(data[[x]], location=data[,c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\")], k=6, level=\"dataset\", metrics=c(\"PAS\", \"ELSA\", \"CHAOS\"))}) ## the specified variable is considered as categorical... ## the specified variable is considered as categorical... ## the specified variable is considered as categorical... ## the specified variable is considered as categorical... ## the specified variable is considered as categorical... ## the specified variable is considered as categorical... internal <- bind_rows(internal,.id = \"prediction\") internal %>% pivot_longer(cols=-c(\"prediction\"), names_to=\"metric\", values_to=\"value\") %>% filter(metric %in% c(\"ELSA\", \"PAS\", \"CHAOS\")) %>% ggplot(aes(x=prediction, y=value, group=metric)) + geom_point(size=3, aes(color=prediction)) + facet_wrap(~metric, scales = \"free\") + theme_bw() + labs(x=\"\", color=\"\") + theme(legend.position=\"None\", axis.text.x = element_text(angle = 45, vjust = 0.5, hjust = 1)) internal <-lapply(setNames(c(\"reference\",\"P1\",\"P2\",\"P3\",\"P4\",\"P5\"), c(\"reference\",\"P1\",\"P2\",\"P3\",\"P4\",\"P5\")), function(x){cbind( getSpatialInternalMetrics(data[[x]], location = data[,c(\"pxl_col_in_fullres\",\"pxl_row_in_fullres\")], k=6, level=\"element\", metrics=c( \"ELSA\")), data[,c(\"pxl_col_in_fullres\", \"pxl_row_in_fullres\")])}) ## the specified variable is considered as categorical... ## the specified variable is considered as categorical... ## the specified variable is considered as categorical... ## the specified variable is considered as categorical... ## the specified variable is considered as categorical... ## the specified variable is considered as categorical... internal <- bind_rows(internal,.id = \"prediction\") internal %>% ggplot(aes(x = pxl_col_in_fullres, y = - pxl_row_in_fullres, color = ELSA)) + scale_colour_gradient(low=\"white\", high=\"deeppink4\") + geom_point(size=0.4) + facet_wrap(~prediction, scales = \"free\") + theme_bw() + labs(x=\"\", y=\"\", color=\"ELSA\")"},{"path":"https://roseyuan.github.io/poem/articles/SpatialExperiment.html","id":"session-info","dir":"Articles","previous_headings":"","what":"Session info","title":"Working with SpatialExperiment","text":"","code":"sessionInfo() ## R version 4.4.2 (2024-10-31) ## Platform: x86_64-pc-linux-gnu ## Running under: Ubuntu 22.04.5 LTS ## ## Matrix products: default ## BLAS: /usr/lib/x86_64-linux-gnu/blas/libblas.so.3.10.0 ## LAPACK: /usr/lib/x86_64-linux-gnu/lapack/liblapack.so.3.10.0 ## ## locale: ## [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C ## [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 ## [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 ## [7] LC_PAPER=en_US.UTF-8 LC_NAME=C ## [9] LC_ADDRESS=C LC_TELEPHONE=C ## [11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C ## ## time zone: Europe/Zurich ## tzcode source: system (glibc) ## ## attached base packages: ## [1] stats4 grid stats graphics grDevices utils datasets ## [8] methods base ## ## other attached packages: ## [1] tidyr_1.3.1 dplyr_1.1.4 ## [3] STexampleData_1.12.3 ExperimentHub_2.12.0 ## [5] AnnotationHub_3.12.0 BiocFileCache_2.12.0 ## [7] dbplyr_2.5.0 SpatialExperiment_1.14.0 ## [9] SingleCellExperiment_1.26.0 SummarizedExperiment_1.34.0 ## [11] Biobase_2.64.0 GenomicRanges_1.56.1 ## [13] GenomeInfoDb_1.40.1 IRanges_2.38.1 ## [15] S4Vectors_0.42.1 BiocGenerics_0.50.0 ## [17] MatrixGenerics_1.16.0 matrixStats_1.3.0 ## [19] ComplexHeatmap_2.20.0 cowplot_1.1.3 ## [21] ggplot2_3.5.1 poem_0.99.2 ## [23] BiocStyle_2.32.1 ## ## loaded via a namespace (and not attached): ## [1] RColorBrewer_1.1-3 rstudioapi_0.16.0 jsonlite_1.8.8 ## [4] shape_1.4.6.1 wk_0.9.4 magrittr_2.0.3 ## [7] magick_2.8.4 farver_2.1.2 rmarkdown_2.27 ## [10] GlobalOptions_0.1.2 fs_1.6.4 zlibbioc_1.50.0 ## [13] ragg_1.3.2 vctrs_0.6.5 spdep_1.3-6 ## [16] Cairo_1.6-2 memoise_2.0.1 elsa_1.1-28 ## [19] terra_1.5-21 htmltools_0.5.8.1 S4Arrays_1.4.1 ## [22] curl_5.2.1 BiocNeighbors_1.22.0 raster_3.5-15 ## [25] s2_1.1.7 SparseArray_1.4.8 sass_0.4.9 ## [28] spData_2.3.3 KernSmooth_2.23-24 bslib_0.8.0 ## [31] htmlwidgets_1.6.4 desc_1.4.3 cachem_1.1.0 ## [34] igraph_2.1.1 mime_0.12 lifecycle_1.0.4 ## [37] iterators_1.0.14 pkgconfig_2.0.3 Matrix_1.7-0 ## [40] R6_2.5.1 fastmap_1.2.0 GenomeInfoDbData_1.2.12 ## [43] rbibutils_2.3 aricode_1.0.3 clue_0.3-65 ## [46] digest_0.6.36 colorspace_2.1-1 AnnotationDbi_1.66.0 ## [49] textshaping_0.3.6 RSQLite_2.3.7 labeling_0.4.3 ## [52] filelock_1.0.3 fansi_1.0.6 httr_1.4.7 ## [55] abind_1.4-5 compiler_4.4.2 proxy_0.4-27 ## [58] bit64_4.0.5 withr_3.0.1 doParallel_1.0.17 ## [61] BiocParallel_1.38.0 DBI_1.2.3 highr_0.11 ## [64] MASS_7.3-61 rappdirs_0.3.3 DelayedArray_0.30.1 ## [67] rjson_0.2.21 classInt_0.4-10 bluster_1.14.0 ## [70] tools_4.4.2 units_0.8-0 glue_1.8.0 ## [73] dbscan_1.2-0 sf_1.0-6 cluster_2.1.6 ## [76] generics_0.1.3 gtable_0.3.5 clevr_0.1.2 ## [79] class_7.3-22 fclust_2.1.1.1 sp_2.1-4 ## [82] utf8_1.2.4 XVector_0.44.0 BiocVersion_3.19.1 ## [85] foreach_1.5.2 pillar_1.9.0 circlize_0.4.16 ## [88] lattice_0.22-6 bit_4.0.5 deldir_2.0-4 ## [91] tidyselect_1.2.1 Biostrings_2.72.1 knitr_1.48 ## [94] bookdown_0.40 xfun_0.46 UCSC.utils_1.0.0 ## [97] yaml_2.3.10 boot_1.3-30 evaluate_0.24.0 ## [100] codetools_0.2-20 tibble_3.2.1 mclustcomp_0.3.3 ## [103] BiocManager_1.30.23 cli_3.6.3 systemfonts_1.1.0 ## [106] Rdpack_2.6.1 munsell_0.5.1 jquerylib_0.1.4 ## [109] Rcpp_1.0.13 png_0.1-8 parallel_4.4.2 ## [112] pkgdown_2.1.1 blob_1.2.4 scales_1.3.0 ## [115] e1071_1.7-9 purrr_1.0.2 crayon_1.5.3 ## [118] GetoptLong_1.0.5 rlang_1.1.4 KEGGREST_1.44.1"},{"path":"https://roseyuan.github.io/poem/articles/fuzzy_metrics.html","id":"load-the-data","dir":"Articles","previous_headings":"","what":"Load the data","title":"Fuzzy metrics","text":"","code":"data(\"sp_toys\") data <- sp_toys s <- 3 st <- 1 p0 <- ggplot(data, aes(x, y, color=label)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + geom_point(shape = 1, size = s, stroke = st, aes(color=label)) + labs(x=\"\",y=\"\", title=\"GT1\") p1 <- ggplot(data, aes(x, y, color=label)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + geom_point(shape = 1, size = s, stroke = st, aes(color=p1)) + labs(x=\"\",y=\"\", title=\"P1\") p2 <- ggplot(data, aes(x, y, color=label)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + geom_point(shape = 1, size = s, stroke = st, aes(color=p2)) + labs(x=\"\",y=\"\", title=\"P2\") p3 <- ggplot(data, aes(x, y, color=label)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + geom_point(shape = 1, size = s, stroke = st, aes(color=p3)) + labs(x=\"\",y=\"\", title=\"P3\") ps1 <- plot_grid( p0 + theme(legend.position = \"none\", plot.title = element_text(hjust = 0.5), plot.margin = margin(5, 2, -2, 0), axis.text.x = element_blank(), axis.text.y = element_blank(),axis.ticks.x = element_blank(), axis.ticks.y = element_blank()), p1 + theme(legend.position = \"none\", plot.title = element_text(hjust = 0.5), plot.margin = margin(5, 2, -2, 0), axis.text.x = element_blank(), axis.text.y = element_blank(),axis.ticks.x = element_blank(), axis.ticks.y = element_blank()), p2 + theme(legend.position = \"none\", plot.title = element_text(hjust = 0.5), plot.margin = margin(5, 2, -2, 0), axis.text.x = element_blank(), axis.text.y = element_blank(),axis.ticks.x = element_blank(), axis.ticks.y = element_blank()), p3 + theme(legend.position = \"none\", plot.title = element_text(hjust = 0.5), plot.margin = margin(5, 2, -2, 0), axis.text.x = element_blank(), axis.text.y = element_blank(),axis.ticks.x = element_blank(), axis.ticks.y = element_blank()), ncol = 4) plot(ps1) gt1 <- getFuzzyLabel(data$label, data[,c(\"x\",\"y\")], k=6, alpha=0.5) gt1 ## 1 2 ## 1 0.0000000 1.0000000 ## 2 0.0000000 1.0000000 ## 3 0.0000000 1.0000000 ## 4 0.1428571 0.8571429 ## 5 0.6666667 0.3333333 ## 6 1.0000000 0.0000000 ## 7 1.0000000 0.0000000 ## 8 1.0000000 0.0000000 ## 9 1.0000000 0.0000000 ## 10 1.0000000 0.0000000 ## 11 1.0000000 0.0000000 ## 12 1.0000000 0.0000000 ## 13 1.0000000 0.0000000 ## 14 1.0000000 0.0000000 ## 15 1.0000000 0.0000000 ## 16 0.0000000 1.0000000 ## 17 0.0000000 1.0000000 ## 18 0.0000000 1.0000000 ## 19 0.1666667 0.8333333 ## 20 0.8333333 0.1666667 ## 21 1.0000000 0.0000000 ## 22 1.0000000 0.0000000 ## 23 1.0000000 0.0000000 ## 24 1.0000000 0.0000000 ## 25 1.0000000 0.0000000 ## 26 1.0000000 0.0000000 ## 27 1.0000000 0.0000000 ## 28 1.0000000 0.0000000 ## 29 1.0000000 0.0000000 ## 30 1.0000000 0.0000000 ## 31 0.0000000 1.0000000 ## 32 0.0000000 1.0000000 ## 33 0.0000000 1.0000000 ## 34 0.0000000 1.0000000 ## 35 0.1666667 0.8333333 ## 36 0.8333333 0.1666667 ## 37 1.0000000 0.0000000 ## 38 1.0000000 0.0000000 ## 39 1.0000000 0.0000000 ## 40 1.0000000 0.0000000 ## 41 1.0000000 0.0000000 ## 42 1.0000000 0.0000000 ## 43 1.0000000 0.0000000 ## 44 1.0000000 0.0000000 ## 45 1.0000000 0.0000000 ## 46 0.0000000 1.0000000 ## 47 0.0000000 1.0000000 ## 48 0.0000000 1.0000000 ## 49 0.0000000 1.0000000 ## 50 0.1666667 0.8333333 ## 51 0.8333333 0.1666667 ## 52 1.0000000 0.0000000 ## 53 1.0000000 0.0000000 ## 54 1.0000000 0.0000000 ## 55 1.0000000 0.0000000 ## 56 1.0000000 0.0000000 ## 57 1.0000000 0.0000000 ## 58 1.0000000 0.0000000 ## 59 1.0000000 0.0000000 ## 60 1.0000000 0.0000000 ## 61 0.0000000 1.0000000 ## 62 0.0000000 1.0000000 ## 63 0.0000000 1.0000000 ## 64 0.0000000 1.0000000 ## 65 0.0000000 1.0000000 ## 66 0.1666667 0.8333333 ## 67 0.8333333 0.1666667 ## 68 1.0000000 0.0000000 ## 69 1.0000000 0.0000000 ## 70 1.0000000 0.0000000 ## 71 1.0000000 0.0000000 ## 72 1.0000000 0.0000000 ## 73 1.0000000 0.0000000 ## 74 1.0000000 0.0000000 ## 75 1.0000000 0.0000000 ## 76 0.0000000 1.0000000 ## 77 0.0000000 1.0000000 ## 78 0.0000000 1.0000000 ## 79 0.0000000 1.0000000 ## 80 0.0000000 1.0000000 ## 81 0.1666667 0.8333333 ## 82 0.8333333 0.1666667 ## 83 1.0000000 0.0000000 ## 84 1.0000000 0.0000000 ## 85 1.0000000 0.0000000 ## 86 1.0000000 0.0000000 ## 87 1.0000000 0.0000000 ## 88 1.0000000 0.0000000 ## 89 1.0000000 0.0000000 ## 90 1.0000000 0.0000000 ## 91 0.0000000 1.0000000 ## 92 0.0000000 1.0000000 ## 93 0.0000000 1.0000000 ## 94 0.0000000 1.0000000 ## 95 0.0000000 1.0000000 ## 96 0.0000000 1.0000000 ## 97 0.1666667 0.8333333 ## 98 0.8333333 0.1666667 ## 99 1.0000000 0.0000000 ## 100 1.0000000 0.0000000 ## 101 1.0000000 0.0000000 ## 102 1.0000000 0.0000000 ## 103 1.0000000 0.0000000 ## 104 1.0000000 0.0000000 ## 105 1.0000000 0.0000000 ## 106 0.0000000 1.0000000 ## 107 0.0000000 1.0000000 ## 108 0.0000000 1.0000000 ## 109 0.0000000 1.0000000 ## 110 0.0000000 1.0000000 ## 111 0.0000000 1.0000000 ## 112 0.1666667 0.8333333 ## 113 0.8333333 0.1666667 ## 114 1.0000000 0.0000000 ## 115 1.0000000 0.0000000 ## 116 1.0000000 0.0000000 ## 117 1.0000000 0.0000000 ## 118 1.0000000 0.0000000 ## 119 1.0000000 0.0000000 ## 120 1.0000000 0.0000000 ## 121 0.0000000 1.0000000 ## 122 0.0000000 1.0000000 ## 123 0.0000000 1.0000000 ## 124 0.0000000 1.0000000 ## 125 0.0000000 1.0000000 ## 126 0.0000000 1.0000000 ## 127 0.0000000 1.0000000 ## 128 0.1666667 0.8333333 ## 129 0.8333333 0.1666667 ## 130 1.0000000 0.0000000 ## 131 1.0000000 0.0000000 ## 132 1.0000000 0.0000000 ## 133 1.0000000 0.0000000 ## 134 1.0000000 0.0000000 ## 135 1.0000000 0.0000000 ## 136 0.0000000 1.0000000 ## 137 0.0000000 1.0000000 ## 138 0.0000000 1.0000000 ## 139 0.0000000 1.0000000 ## 140 0.0000000 1.0000000 ## 141 0.0000000 1.0000000 ## 142 0.0000000 1.0000000 ## 143 0.1666667 0.8333333 ## 144 0.8333333 0.1666667 ## 145 1.0000000 0.0000000 ## 146 1.0000000 0.0000000 ## 147 1.0000000 0.0000000 ## 148 1.0000000 0.0000000 ## 149 1.0000000 0.0000000 ## 150 1.0000000 0.0000000 ## 151 0.0000000 1.0000000 ## 152 0.0000000 1.0000000 ## 153 0.0000000 1.0000000 ## 154 0.0000000 1.0000000 ## 155 0.0000000 1.0000000 ## 156 0.0000000 1.0000000 ## 157 0.0000000 1.0000000 ## 158 0.0000000 1.0000000 ## 159 0.1666667 0.8333333 ## 160 0.8333333 0.1666667 ## 161 1.0000000 0.0000000 ## 162 1.0000000 0.0000000 ## 163 1.0000000 0.0000000 ## 164 1.0000000 0.0000000 ## 165 1.0000000 0.0000000 ## 166 0.0000000 1.0000000 ## 167 0.0000000 1.0000000 ## 168 0.0000000 1.0000000 ## 169 0.0000000 1.0000000 ## 170 0.0000000 1.0000000 ## 171 0.0000000 1.0000000 ## 172 0.0000000 1.0000000 ## 173 0.0000000 1.0000000 ## 174 0.1666667 0.8333333 ## 175 0.8333333 0.1666667 ## 176 1.0000000 0.0000000 ## 177 1.0000000 0.0000000 ## 178 1.0000000 0.0000000 ## 179 1.0000000 0.0000000 ## 180 1.0000000 0.0000000 ## 181 0.0000000 1.0000000 ## 182 0.0000000 1.0000000 ## 183 0.0000000 1.0000000 ## 184 0.0000000 1.0000000 ## 185 0.0000000 1.0000000 ## 186 0.0000000 1.0000000 ## 187 0.0000000 1.0000000 ## 188 0.0000000 1.0000000 ## 189 0.0000000 1.0000000 ## 190 0.1666667 0.8333333 ## 191 0.8333333 0.1666667 ## 192 1.0000000 0.0000000 ## 193 1.0000000 0.0000000 ## 194 1.0000000 0.0000000 ## 195 1.0000000 0.0000000 ## 196 0.0000000 1.0000000 ## 197 0.0000000 1.0000000 ## 198 0.0000000 1.0000000 ## 199 0.0000000 1.0000000 ## 200 0.0000000 1.0000000 ## 201 0.0000000 1.0000000 ## 202 0.0000000 1.0000000 ## 203 0.0000000 1.0000000 ## 204 0.0000000 1.0000000 ## 205 0.1666667 0.8333333 ## 206 0.8333333 0.1666667 ## 207 1.0000000 0.0000000 ## 208 1.0000000 0.0000000 ## 209 1.0000000 0.0000000 ## 210 1.0000000 0.0000000 ## 211 0.0000000 1.0000000 ## 212 0.0000000 1.0000000 ## 213 0.0000000 1.0000000 ## 214 0.0000000 1.0000000 ## 215 0.0000000 1.0000000 ## 216 0.0000000 1.0000000 ## 217 0.0000000 1.0000000 ## 218 0.0000000 1.0000000 ## 219 0.0000000 1.0000000 ## 220 0.0000000 1.0000000 ## 221 0.1666667 0.8333333 ## 222 0.8333333 0.1666667 ## 223 1.0000000 0.0000000 ## 224 1.0000000 0.0000000 ## 225 1.0000000 0.0000000 ## 226 0.0000000 1.0000000 ## 227 0.0000000 1.0000000 ## 228 0.0000000 1.0000000 ## 229 0.0000000 1.0000000 ## 230 0.0000000 1.0000000 ## 231 0.0000000 1.0000000 ## 232 0.0000000 1.0000000 ## 233 0.0000000 1.0000000 ## 234 0.0000000 1.0000000 ## 235 0.0000000 1.0000000 ## 236 0.2857143 0.7142857 ## 237 0.8571429 0.1428571 ## 238 1.0000000 0.0000000 ## 239 1.0000000 0.0000000 ## 240 1.0000000 0.0000000"},{"path":[]},{"path":[]},{"path":"https://roseyuan.github.io/poem/articles/fuzzy_metrics.html","id":"reflexive-property","dir":"Articles","previous_headings":"External metrics","what":"Reflexive property","title":"Fuzzy metrics","text":"comparison clustering result achieves maximum index.","code":"fuzzyPartitionMetrics(gt1, gt1) ## $NDC ## [1] 1 ## ## $ACI ## [1] 1 ## ## $fuzzyWH ## $fuzzyWH$global ## [1] 1 ## ## $fuzzyWH$perPartition ## 1 2 ## 1 1 ## ## ## $fuzzyWC ## $fuzzyWC$global ## [1] 1 ## ## $fuzzyWC$perPartition ## 1 2 ## 1 1 ## ## ## $fuzzyAWH ## $fuzzyAWH$global ## [1] 1 ## ## $fuzzyAWH$perPartition ## 1 2 ## 1 1 ## ## ## $fuzzyAWC ## $fuzzyAWC$global ## [1] 1 ## ## $fuzzyAWC$perPartition ## 1 2 ## 1 1 fuzzyHardMetrics(data$label, gt1, data$label) ## $NDC ## [1] 1 ## ## $ACI ## [1] 1 ## ## $fuzzyWH ## $fuzzyWH$global ## [1] 1 ## ## $fuzzyWH$perPartition ## 1 2 ## 1 1 ## ## ## $fuzzyWC ## $fuzzyWC$global ## [1] 1 ## ## $fuzzyWC$perPartition ## 1 2 ## 1 1 ## ## ## $fuzzyAWH ## $fuzzyAWH$global ## [1] 1 ## ## $fuzzyAWH$perPartition ## 1 2 ## 1 1 ## ## ## $fuzzyAWC ## $fuzzyAWC$global ## [1] 1 ## ## $fuzzyAWC$perPartition ## 1 2 ## 1 1"},{"path":[]},{"path":"https://roseyuan.github.io/poem/articles/fuzzy_metrics.html","id":"session-info","dir":"Articles","previous_headings":"","what":"Session info","title":"Fuzzy metrics","text":"","code":"sessionInfo() ## R version 4.4.2 (2024-10-31) ## Platform: x86_64-pc-linux-gnu ## Running under: Ubuntu 22.04.5 LTS ## ## Matrix products: default ## BLAS: /usr/lib/x86_64-linux-gnu/blas/libblas.so.3.10.0 ## LAPACK: /usr/lib/x86_64-linux-gnu/lapack/liblapack.so.3.10.0 ## ## locale: ## [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C ## [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 ## [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 ## [7] LC_PAPER=en_US.UTF-8 LC_NAME=C ## [9] LC_ADDRESS=C LC_TELEPHONE=C ## [11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C ## ## time zone: Europe/Zurich ## tzcode source: system (glibc) ## ## attached base packages: ## [1] stats graphics grDevices utils datasets methods base ## ## other attached packages: ## [1] cowplot_1.1.3 ggplot2_3.5.1 poem_0.99.2 BiocStyle_2.32.1 ## ## loaded via a namespace (and not attached): ## [1] tidyselect_1.2.1 dplyr_1.1.4 farver_2.1.2 ## [4] fastmap_1.2.0 bluster_1.14.0 digest_0.6.36 ## [7] lifecycle_1.0.4 sf_1.0-6 cluster_2.1.6 ## [10] terra_1.5-21 magrittr_2.0.3 compiler_4.4.2 ## [13] rlang_1.1.4 sass_0.4.9 tools_4.4.2 ## [16] mclustcomp_0.3.3 igraph_2.1.1 utf8_1.2.4 ## [19] yaml_2.3.10 knitr_1.48 labeling_0.4.3 ## [22] htmlwidgets_1.6.4 sp_2.1-4 classInt_0.4-10 ## [25] BiocParallel_1.38.0 KernSmooth_2.23-24 fclust_2.1.1.1 ## [28] elsa_1.1-28 withr_3.0.1 BiocGenerics_0.50.0 ## [31] desc_1.4.3 grid_4.4.2 stats4_4.4.2 ## [34] fansi_1.0.6 e1071_1.7-9 colorspace_2.1-1 ## [37] aricode_1.0.3 scales_1.3.0 MASS_7.3-61 ## [40] cli_3.6.3 rmarkdown_2.27 ragg_1.3.2 ## [43] generics_0.1.3 rstudioapi_0.16.0 spdep_1.3-6 ## [46] DBI_1.2.3 cachem_1.1.0 proxy_0.4-27 ## [49] parallel_4.4.2 BiocManager_1.30.23 s2_1.1.7 ## [52] vctrs_0.6.5 boot_1.3-30 Matrix_1.7-0 ## [55] jsonlite_1.8.8 spData_2.3.3 bookdown_0.40 ## [58] clevr_0.1.2 S4Vectors_0.42.1 BiocNeighbors_1.22.0 ## [61] clue_0.3-65 systemfonts_1.1.0 jquerylib_0.1.4 ## [64] units_0.8-0 glue_1.8.0 pkgdown_2.1.1 ## [67] codetools_0.2-20 gtable_0.3.5 deldir_2.0-4 ## [70] raster_3.5-15 munsell_0.5.1 tibble_3.2.1 ## [73] pillar_1.9.0 htmltools_0.5.8.1 R6_2.5.1 ## [76] wk_0.9.4 textshaping_0.3.6 Rdpack_2.6.1 ## [79] evaluate_0.24.0 lattice_0.22-6 highr_0.11 ## [82] rbibutils_2.3 bslib_0.8.0 class_7.3-22 ## [85] Rcpp_1.0.13 xfun_0.46 fs_1.6.4 ## [88] pkgconfig_2.0.3"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"installation-loading","dir":"Articles","previous_headings":"","what":"Installation & loading","title":"Introduction to `poem`","text":"","code":"if (!requireNamespace(\"devtools\", quietly = TRUE)) install.packages(\"devtools\") devtools::install_github(\"RoseYuan/poem\") library(poem) library(ggplot2) library(dplyr) library(tidyr) library(ggnetwork) library(igraph) library(cowplot)"},{"path":[]},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"what-is-this-package-for","dir":"Articles","previous_headings":"Introduction","what":"What is this package for?","title":"Introduction to `poem`","text":"package provides multiple approaches comparing two partitions1 dataset, evaluating alignment dataset’s embedding/graph representations partition. Besides, package offers methods comparing two fuzzy partitions2 well comparing hard partition fuzzy partition. allows evaluation fuzzy partition results assessing agreement fuzzy hard ground-truth partition. Finally, package implements visualization evaluation metrics tailored domain detection spatially-resolved -omics data. include especially external evaluation metrics (.e. based comparison ground truth labels), also internal metrics.","code":""},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"main-functions","dir":"Articles","previous_headings":"Introduction","what":"Main functions","title":"Introduction to `poem`","text":"package poem includes many metrics perform different kinds evaluations, metrics can retrieved via 6 main wrapper functions. Unless specified, “partition” means “hard” partition. : getEmbeddingMetrics(): Metrics compare embedding data points partition data points. getGraphMetrics(): Metrics compare graph (e.g. kNN/sNN) partition, nodes graph data points partition. getPartitionMetrics(): Metrics compare two partitions dataset. getfuzzyPartitionMetrics(): Metrics compare two fuzzy partitions, compare fuzzy hard partition dataset. getSpatialExternalMetrics(): External metrics evaluating spatial clustering results spatial-aware fashion. non-spatial-aware evaluation, one can directly use getPartitionMetrics(). getSpatialInternalMetrics(): Internal metrics evaluating spatial clustering results spatial-aware fashion. 3 different levels one can perform -mentioned evaluation: element-level, class-level, dataset-level. Element-level evaluation reports metric values data point; Class-level evaluation reports metrics classes3 clusters4; dataset-level evaluation returns single metric value whole dataset. following table illustrates available metrics different evaluation levels, main functions used retrieve .","code":"data(metric_info) DT::datatable(metric_info)"},{"path":[]},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"example-data","dir":"Articles","previous_headings":"Getting started","what":"Example data","title":"Introduction to `poem`","text":"showcase main functions, use simulated datasets examples vignette. two datasets, g1 g2, contain 80 data points x y coordinates 4 different classes. plot :","code":"data(toyExamples) g1 <- toyExamples[toyExamples$graph==\"graph1\",] g2 <- toyExamples[toyExamples$graph==\"graph2\",] head(g1) ## graph x y class ## 641 graph1 -0.6290416 -0.487293 class1 ## 642 graph1 -2.5646982 -1.742079 class1 ## 643 graph1 -1.6368716 -1.911560 class1 ## 644 graph1 -1.3671374 -2.120897 class1 ## 645 graph1 -1.5957317 -3.194329 class1 ## 646 graph1 -2.1061245 -1.388003 class1 ggplot(rbind(g1,g2), aes(x,y,color=class, shape=class)) + geom_point() + facet_wrap(~graph) + theme_bw()"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"embedding-evaluation","dir":"Articles","previous_headings":"","what":"Embedding evaluation","title":"Introduction to `poem`","text":"Let’s assume g1 g2 contain two different embeddings set objects. “good” embedding put objects class together, objects different class apart. Since know ground-truth class object, one can evaluation “goodness” embedding calculating embedding evaluation metrics. One can calculate metrics element-wise, class/cluster, whole dataset.","code":""},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"element-level-evaluation","dir":"Articles","previous_headings":"Embedding evaluation","what":"Element-level evaluation","title":"Introduction to `poem`","text":"example, element level, one can calculate Silhouette Width specifying level=\"element\" metrics=c(\"SW\"): output data.frame containing metric values specified level.","code":"sw <- getEmbeddingMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, metrics=c(\"SW\"), level=\"element\") head(sw) ## class SW ## 641 class1 0.2986628 ## 642 class1 0.5818507 ## 643 class1 0.6299871 ## 644 class1 0.5867285 ## 645 class1 0.5191290 ## 646 class1 0.5679847 g1$sw <- getEmbeddingMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, metrics=c(\"SW\"), level=\"element\")$SW g2$sw <- getEmbeddingMetrics(x=g2[,c(\"x\",\"y\")], labels=g2$class, metrics=c(\"SW\"), level=\"element\")$SW ggplot(rbind(g1,g2), aes(x, y, color=sw, shape=class)) + geom_point() + facet_wrap(~graph) + theme_bw()"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"class-level-evaluation","dir":"Articles","previous_headings":"Embedding evaluation","what":"Class-level evaluation","title":"Introduction to `poem`","text":"One can also evaluate class level, specifying level=\"class\". Check ?getEmbeddingMetrics see allowed metrics class level. example:","code":"cl <- getEmbeddingMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, metrics=c(\"dbcv\", \"meanSW\"), level=\"class\") head(cl) ## class meanSW dbcv ## 1 class1 0.4240817 -0.37367780 ## 2 class2 0.4897828 -0.34617982 ## 3 class3 0.5021555 0.07752233 ## 4 class4 0.5957709 0.26757842 res1 <- getEmbeddingMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, metrics=c(\"dbcv\", \"meanSW\"), level=\"class\") res2 <- getEmbeddingMetrics(x=g2[,c(\"x\",\"y\")], labels=g2$class, metrics=c(\"dbcv\", \"meanSW\"), level=\"class\") bind_rows(list(graph1=res1, graph2=res2), .id=\"graph\") %>% pivot_longer(cols=c(\"meanSW\",\"dbcv\"), names_to=\"metric\",values_to=\"value\") %>% ggplot(aes(class, value, fill=graph, group=graph)) + geom_bar(position = \"dodge\", stat = \"identity\") + facet_wrap(~metric) + theme_bw()"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"dataset-level-evaluation","dir":"Articles","previous_headings":"Embedding evaluation","what":"Dataset-level evaluation","title":"Introduction to `poem`","text":"Similarly, one can evaluate dataset level specifying level=\"dataset\". example:","code":"getEmbeddingMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, level=\"dataset\", metrics=c(\"meanSW\", \"meanClassSW\", \"pnSW\", \"minClassSW\", \"cdbw\", \"cohesion\", \"compactness\", \"sep\", \"dbcv\")) ## meanSW meanClassSW pnSW minClassSW cdbw cohesion compactness ## 1 0.5029477 0.5029477 0.0375 0.4240817 0.0553208 0.2732925 0.2800803 ## sep dbcv ## 1 0.7227335 -0.09368922"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"graph-evaluation","dir":"Articles","previous_headings":"","what":"Graph evaluation","title":"Introduction to `poem`","text":"Instead directly using distances densities embedding space evaluation, one may want evaluate connectivity stand point looking graph structure constructed datasets. getGraphMetrics() can perform k nearest neighbor (KNN) graph shared nearest neighbor graph (SNN) construction embedding apply graph-based evaluation metrics. examples g1 g2, constructed graphs look like: Use ?getGraphMetrics() check optional arguments KNN/SNN graph construction. Similarly, level can \"element\", \"class\" \"dataset\". Alternatively, getGraphMetrics() can take igraph object x, enables application evaluation metrics general graph, list nearest neighbors x, accelerate computation large datasets.","code":"# Some functions for plotting plotGraphs <- function(d, k=7){ gn <- dplyr::bind_rows(lapply(split(d[,-1],d$graph), FUN=function(d1){ nn <- emb2knn(as.matrix(d1[,c(\"x\",\"y\")]), k=k) g <- poem:::.nn2graph(nn, labels=d1$class) ggnetwork(g, layout=as.matrix(d1[,1:2]), scale=FALSE) }), .id=\"graph\") ggplot(gn, aes(x = x, y = y, xend = xend, yend = yend)) + theme_blank() + theme(legend.position = \"right\") + geom_edges(alpha=0.5, colour=\"grey\") + geom_nodes(aes(colour=class, shape=class), size=2) + facet_wrap(~graph, nrow=1) } plotGraphs(bind_rows(list(g1,g2), .id=\"graph\")) getGraphMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class, metrics=c(\"PWC\",\"ISI\"), level=\"class\", directed=FALSE, k=7, shared=FALSE) ## class PWC ISI ## class1 class1 0.05 1.186272 ## class2 class2 0.10 1.224188 ## class3 class3 0.05 1.149098 ## class4 class4 0.05 1.251146 res1 <- getGraphMetrics(x=g1[,c(\"x\",\"y\")], labels=g1$class,metrics=c(\"PWC\",\"ISI\"), level=\"class\", directed=FALSE, k=7, shared=FALSE) res2 <- getGraphMetrics(x=g2[,c(\"x\",\"y\")], labels=g2$class, metrics=c(\"PWC\",\"ISI\"), level=\"class\", directed=FALSE, k=7, shared=FALSE) bind_rows(list(graph1=res1, graph2=res2), .id=\"graph\") %>% pivot_longer(cols=c(\"PWC\",\"ISI\"), names_to=\"metric\",values_to=\"value\") %>% ggplot(aes(class, value, fill=graph, group=graph)) + geom_bar(position = \"dodge\", stat = \"identity\") + facet_wrap(~metric) + theme_bw()"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"partition-evaluation","dir":"Articles","previous_headings":"","what":"Partition evaluation","title":"Introduction to `poem`","text":"construct SNN graph g1 g2 embeddings, apply Louvain algorithm get partitions . compare predictions known labels using partition metrics: Note class-level metrics, reported per class, (specifically, “WH”, “AWH) reported per cluster.","code":"k <- 7 r <- 0.5 snn1 <- emb2snn(as.matrix(g1[,c(\"x\",\"y\")]), k=k) snn2 <- emb2snn(as.matrix(g2[,c(\"x\",\"y\")]), k=k) g1$cluster <- factor(igraph::cluster_louvain(snn1, resolution = r)$membership) g2$cluster <- factor(igraph::cluster_louvain(snn2, resolution = r)$membership) ggplot(rbind(g1,g2), aes(x,y,color=cluster, shape=class)) + geom_point() + facet_wrap(~graph) + theme_bw() # for g1 getPartitionMetrics(true=g1$class, pred=g1$cluster, level=\"dataset\", metrics = c(\"RI\", \"WC\", \"WH\", \"ARI\", \"AWC\", \"AWH\", \"FM\", \"AMI\")) ## RI WC WH ARI AWC AWH FM AMI ## 1 0.9636076 0.925 0.9237845 0.9004285 0.9012088 0.8996496 0.9624922 0.8872892 # for g2 getPartitionMetrics(true=g2$class, pred=g2$cluster, level=\"dataset\", metrics = c(\"RI\", \"WC\", \"WH\", \"ARI\", \"AWC\", \"AWH\", \"FM\", \"AMI\")) ## RI WC WH ARI AWC AWH FM AMI ## 1 0.721519 0.95 0.4616368 0.4400954 0.9010025 0.2911552 0.6501669 0.4193846 getPartitionMetrics(true=g1$class, pred=g2$cluster, level=\"class\") ## WC AWC FM class WH AWH cluster ## 1 0.9 0.802005 0.6551724 class1 NA NA ## 2 0.9 0.802005 0.6551724 class2 NA NA ## 3 1.0 1.000000 0.6451613 class3 NA NA ## 4 1.0 1.000000 0.6451613 class4 NA NA ## 5 NA NA NA 0.4864865 0.3238739 1 ## 6 NA NA NA 0.4413473 0.2644406 2"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"fuzzy-partition-evaluation","dir":"Articles","previous_headings":"","what":"Fuzzy partition evaluation","title":"Introduction to `poem`","text":"comparing two fuzzy partitions comparing fuzzy partition hard patition, one can use getFuzzyPartitionMetrics(). fuzzy reprensentation partion look like following, row data point, value class memberships class. row sums 1. using input hardPred, hardTrue, fuzzyPred, fuzzyTrue, one can control whether fuzzy hard version two partitions used comparison. example, fuzzyTrue fuzzyPred NULL, metrics comparing two fuzzy partitions used.","code":"fuzzyTrue <- matrix(c( 0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) # a hard truth: hardTrue <- apply(fuzzyTrue,1,FUN=which.max) # some predicted labels: hardPred <- c(1,1,1,1,1,1,2,2,2) getFuzzyPartitionMetrics(hardPred=hardPred, hardTrue=hardTrue, fuzzyTrue=fuzzyTrue, nperms=3, level=\"class\") ## fuzzyWC fuzzyAWC class fuzzyWH fuzzyAWH cluster ## 1 0.7195238 0.3542847 1 NA NA NA ## 2 1.0000000 NaN 2 NA NA NA ## 3 1.0000000 NaN 3 NA NA NA ## 4 NA NA NA 1.00000000 1.0000000 1 ## 5 NA NA NA 0.06166667 -0.8064171 2"},{"path":[]},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"example-data-1","dir":"Articles","previous_headings":"Spatial clustering evaluation","what":"Example data","title":"Introduction to `poem`","text":"use another toy example dataset package, sp_toys, illustrate spatial clustering evaluation. C, spots colored ground-truth class. P1 P2, color inside spot according ground-truth class, color border according clustering predictions. P1 P2 misclassified amount red spots blue cluster.","code":"data(sp_toys) s <- 3 st <- 1 p1 <- ggplot(sp_toys, aes(x, y, color=label)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + geom_point(shape = 1, size = s, stroke = st, aes(color=p1)) + labs(x=\"\",y=\"\", title=\"P1\") p0 <- ggplot(sp_toys, aes(x, y, color=label)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + geom_point(shape = 1, size = s, stroke = st, aes(color=label)) + labs(x=\"\",y=\"\", title=\"C\") p2 <- ggplot(sp_toys, aes(x, y, color=label)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + geom_point(shape = 1, size = s, stroke = st, aes(color=p2)) + labs(x=\"\",y=\"\", title=\"P2\") plot_grid(p0 + theme(legend.position = \"none\", plot.title = element_text(hjust = 0.5)), p1 + theme(legend.position = \"none\", plot.title = element_text(hjust = 0.5)), p2 + theme(legend.position = \"none\", plot.title = element_text(hjust = 0.5)), ncol = 3)"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"external-metrics","dir":"Articles","previous_headings":"Spatial clustering evaluation","what":"External metrics","title":"Introduction to `poem`","text":"Let’s quantify calculating external spatial metrics: specifying fuzzy_true fuzzy_pred, one can control whether fuzzy hard version true pred used comparison. fuzzy_true fuzzy_pred TRUE, spatial neighborhood information used construct fuzzy representation class/cluster memberships. evaluation non-spatial-aware, P1 P2 get ARI score. However, spatial-aware metrics like SpatialARI SpatialAccuracy, P2 gets higher scores P1.","code":"getSpatialExternalMetrics(true=sp_toys$label, pred=sp_toys$p1, location=sp_toys[,c(\"x\",\"y\")], level=\"dataset\", metrics=c(\"SpatialARI\",\"SpatialAccuracy\"), fuzzy_true = TRUE, fuzzy_pred = FALSE) ## SpatialARI SpatialAccuracy ## 1 0.7871135 0.956746 getSpatialExternalMetrics(true=sp_toys$label, pred=sp_toys$p1, location=sp_toys[,c(\"x\",\"y\")], level=\"class\") ## SpatialWH SpatialAWH SpatialWC SpatialAWC class cluster ## 1 NA NA 0.8078698 0.5929504 1 NA ## 2 NA NA 1.0000000 1.0000000 2 NA ## 3 1.0000000 1.0000000 NA NA NA 1 ## 4 0.8323893 0.6493279 NA NA NA 2 res1.1 <- getSpatialExternalMetrics(true=sp_toys$label, pred=sp_toys$p1, location=sp_toys[,c(\"x\",\"y\")], level=\"dataset\", metrics=c(\"SpatialARI\",\"SpatialAccuracy\"), fuzzy_true = TRUE, fuzzy_pred = FALSE) res2.1 <- getSpatialExternalMetrics(true=sp_toys$label, pred=sp_toys$p2, location=sp_toys[,c(\"x\",\"y\")], level=\"dataset\", metrics=c(\"SpatialARI\",\"SpatialAccuracy\"), fuzzy_true = TRUE, fuzzy_pred = FALSE) res1.2 <- getPartitionMetrics(true=sp_toys$label, pred=sp_toys$p1, level=\"dataset\", metrics=c(\"ARI\")) res2.2 <- getPartitionMetrics(true=sp_toys$label, pred=sp_toys$p2, level=\"dataset\", metrics=c(\"ARI\")) cbind(bind_rows(list(res1.1, res2.1), .id=\"P\"), bind_rows(list(res1.2, res2.2), .id=\"P\")) %>% pivot_longer(cols=c(\"SpatialARI\", \"SpatialAccuracy\", \"ARI\"), names_to=\"metric\", values_to=\"value\") %>% ggplot(aes(x=P, y=value, group=metric)) + geom_point(size=3, aes(color=P)) + facet_wrap(~metric) + theme_bw() + labs(x=\"Prediction\")"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"internal-metrics","dir":"Articles","previous_headings":"Spatial clustering evaluation","what":"Internal metrics","title":"Introduction to `poem`","text":"Last least, internal metrics spatial clustering evaluation:","code":"sp_toys$c_elsa <- getSpatialInternalMetrics(label=sp_toys$label, location=sp_toys[,c(\"x\",\"y\")], level=\"element\", metrics=c(\"ELSA\"))$ELSA ## the specified variable is considered as categorical... sp_toys$p1_elsa <- getSpatialInternalMetrics(label=sp_toys$p1, location=sp_toys[,c(\"x\",\"y\")], level=\"element\", metrics=c(\"ELSA\"))$ELSA ## the specified variable is considered as categorical... sp_toys$p2_elsa <- getSpatialInternalMetrics(label=sp_toys$p2, location=sp_toys[,c(\"x\",\"y\")], level=\"element\", metrics=c(\"ELSA\"))$ELSA ## the specified variable is considered as categorical... s <- 3 st <- 1 p1 <- ggplot(sp_toys, aes(x, y, color=p1_elsa)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + labs(x=\"\",y=\"\", title=\"P1\", color=\"ELSA\") + scico::scale_color_scico(palette = \"roma\", limits = c(0, 1), direction=-1) p0 <- ggplot(sp_toys, aes(x, y, color=c_elsa)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + labs(x=\"\",y=\"\", title=\"C\", color=\"ELSA\") + scico::scale_color_scico(palette = \"roma\", limits = c(0, 1), direction=-1) p2 <- ggplot(sp_toys, aes(x, y, color=p2_elsa)) + geom_point(size=s, alpha=0.5) + scale_y_reverse() + theme_bw() + labs(x=\"\",y=\"\", title=\"P2\", color=\"ELSA\") + scico::scale_color_scico(palette = \"roma\", limits = c(0, 1), direction=-1) plot_grid(p0 + theme(plot.title = element_text(hjust = 0.5)), p1 + theme(plot.title = element_text(hjust = 0.5)), p2 + theme(plot.title = element_text(hjust = 0.5)), nrow=1, rel_width=c(1,1,1))"},{"path":"https://roseyuan.github.io/poem/articles/poem.html","id":"session-info","dir":"Articles","previous_headings":"","what":"Session info","title":"Introduction to `poem`","text":"","code":"sessionInfo() ## R version 4.4.2 (2024-10-31) ## Platform: x86_64-pc-linux-gnu ## Running under: Ubuntu 22.04.5 LTS ## ## Matrix products: default ## BLAS: /usr/lib/x86_64-linux-gnu/blas/libblas.so.3.10.0 ## LAPACK: /usr/lib/x86_64-linux-gnu/lapack/liblapack.so.3.10.0 ## ## locale: ## [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C ## [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 ## [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 ## [7] LC_PAPER=en_US.UTF-8 LC_NAME=C ## [9] LC_ADDRESS=C LC_TELEPHONE=C ## [11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C ## ## time zone: Europe/Zurich ## tzcode source: system (glibc) ## ## attached base packages: ## [1] stats graphics grDevices utils datasets methods base ## ## other attached packages: ## [1] cowplot_1.1.3 igraph_2.1.1 ggnetwork_0.5.13 tidyr_1.3.1 ## [5] dplyr_1.1.4 ggplot2_3.5.1 poem_0.99.2 BiocStyle_2.32.1 ## ## loaded via a namespace (and not attached): ## [1] tidyselect_1.2.1 farver_2.1.2 fastmap_1.2.0 ## [4] bluster_1.14.0 digest_0.6.36 lifecycle_1.0.4 ## [7] sf_1.0-6 cluster_2.1.6 terra_1.5-21 ## [10] dbscan_1.2-0 magrittr_2.0.3 compiler_4.4.2 ## [13] rlang_1.1.4 sass_0.4.9 tools_4.4.2 ## [16] mclustcomp_0.3.3 utf8_1.2.4 yaml_2.3.10 ## [19] knitr_1.48 labeling_0.4.3 htmlwidgets_1.6.4 ## [22] sp_2.1-4 classInt_0.4-10 scico_1.5.0 ## [25] BiocParallel_1.38.0 KernSmooth_2.23-24 fclust_2.1.1.1 ## [28] elsa_1.1-28 withr_3.0.1 purrr_1.0.2 ## [31] BiocGenerics_0.50.0 desc_1.4.3 grid_4.4.2 ## [34] stats4_4.4.2 fansi_1.0.6 e1071_1.7-9 ## [37] colorspace_2.1-1 aricode_1.0.3 scales_1.3.0 ## [40] MASS_7.3-61 cli_3.6.3 rmarkdown_2.27 ## [43] ragg_1.3.2 generics_0.1.3 rstudioapi_0.16.0 ## [46] spdep_1.3-6 DBI_1.2.3 cachem_1.1.0 ## [49] proxy_0.4-27 parallel_4.4.2 BiocManager_1.30.23 ## [52] s2_1.1.7 vctrs_0.6.5 boot_1.3-30 ## [55] Matrix_1.7-0 jsonlite_1.8.8 spData_2.3.3 ## [58] bookdown_0.40 clevr_0.1.2 S4Vectors_0.42.1 ## [61] BiocNeighbors_1.22.0 clue_0.3-65 crosstalk_1.2.1 ## [64] systemfonts_1.1.0 jquerylib_0.1.4 units_0.8-0 ## [67] glue_1.8.0 pkgdown_2.1.1 codetools_0.2-20 ## [70] DT_0.33 gtable_0.3.5 deldir_2.0-4 ## [73] raster_3.5-15 munsell_0.5.1 tibble_3.2.1 ## [76] pillar_1.9.0 htmltools_0.5.8.1 R6_2.5.1 ## [79] wk_0.9.4 textshaping_0.3.6 Rdpack_2.6.1 ## [82] evaluate_0.24.0 lattice_0.22-6 highr_0.11 ## [85] rbibutils_2.3 bslib_0.8.0 class_7.3-22 ## [88] Rcpp_1.0.13 xfun_0.46 fs_1.6.4 ## [91] pkgconfig_2.0.3"},{"path":"https://roseyuan.github.io/poem/articles/table.html","id":"partition-based-metrics","dir":"Articles","previous_headings":"","what":"Partition-based metrics","title":"Details about each evaluation metrics","text":"Partition-based metrics. notation used common throughout table: consider comparing predicted partition PP ground-truth partition GG; aa number pairs group PP GG; bb number pairs class GG different clusters PP; cc number pairs different classes GG cluster PP; dd number pairs different groups PP GG; nn total number objects; EE expectation operator; H(⋅)H(⋅) Shannon entropy; $eta$ ratio weight attributed homogeneity vs completeness; expactation value RI, WH, WC calculated assuming generalized hypergeometric model.","code":""},{"path":"https://roseyuan.github.io/poem/articles/table.html","id":"embedding-based-metrics","dir":"Articles","previous_headings":"","what":"Embedding-based metrics","title":"Details about each evaluation metrics","text":"Embedding-based metrics.","code":""},{"path":"https://roseyuan.github.io/poem/articles/table.html","id":"graph-based-metrics","dir":"Articles","previous_headings":"","what":"Graph-based metrics","title":"Details about each evaluation metrics","text":"Graph-based metrics.","code":""},{"path":"https://roseyuan.github.io/poem/articles/table.html","id":"metrics-for-spatial-clusterings","dir":"Articles","previous_headings":"","what":"Metrics for spatial clusterings","title":"Details about each evaluation metrics","text":"Metrics spatial clusterings.","code":""},{"path":"https://roseyuan.github.io/poem/articles/table.html","id":"session-info","dir":"Articles","previous_headings":"","what":"Session info","title":"Details about each evaluation metrics","text":"","code":"sessionInfo() ## R version 4.4.2 (2024-10-31) ## Platform: x86_64-pc-linux-gnu ## Running under: Ubuntu 22.04.5 LTS ## ## Matrix products: default ## BLAS: /usr/lib/x86_64-linux-gnu/blas/libblas.so.3.10.0 ## LAPACK: /usr/lib/x86_64-linux-gnu/lapack/liblapack.so.3.10.0 ## ## locale: ## [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C ## [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 ## [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 ## [7] LC_PAPER=en_US.UTF-8 LC_NAME=C ## [9] LC_ADDRESS=C LC_TELEPHONE=C ## [11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C ## ## time zone: Europe/Zurich ## tzcode source: system (glibc) ## ## attached base packages: ## [1] stats graphics grDevices utils datasets methods base ## ## other attached packages: ## [1] BiocStyle_2.32.1 ## ## loaded via a namespace (and not attached): ## [1] vctrs_0.6.5 svglite_2.1.3 cli_3.6.3 ## [4] knitr_1.48 rlang_1.1.4 xfun_0.46 ## [7] stringi_1.8.4 textshaping_0.3.6 jsonlite_1.8.8 ## [10] glue_1.8.0 colorspace_2.1-1 htmltools_0.5.8.1 ## [13] ragg_1.3.2 sass_0.4.9 scales_1.3.0 ## [16] rmarkdown_2.27 munsell_0.5.1 evaluate_0.24.0 ## [19] jquerylib_0.1.4 kableExtra_1.4.0 fastmap_1.2.0 ## [22] yaml_2.3.10 lifecycle_1.0.4 bookdown_0.40 ## [25] stringr_1.5.1 BiocManager_1.30.23 compiler_4.4.2 ## [28] fs_1.6.4 htmlwidgets_1.6.4 rstudioapi_0.16.0 ## [31] systemfonts_1.1.0 digest_0.6.36 viridisLite_0.4.2 ## [34] R6_2.5.1 magrittr_2.0.3 bslib_0.8.0 ## [37] tools_4.4.2 xml2_1.3.6 pkgdown_2.1.1 ## [40] cachem_1.1.0 desc_1.4.3"},{"path":"https://roseyuan.github.io/poem/authors.html","id":null,"dir":"","previous_headings":"","what":"Authors","title":"Authors and Citation","text":"Siyuan Luo. Maintainer, author. Pierre-Luc Germain. Author, contributor.","code":""},{"path":"https://roseyuan.github.io/poem/authors.html","id":"citation","dir":"","previous_headings":"","what":"Citation","title":"Authors and Citation","text":"Luo S, Germain P (2024). poem: POpulation-based Evaluation Metrics. R package version 0.99.2, https://roseyuan.github.io/poem/.","code":"@Manual{, title = {poem: POpulation-based Evaluation Metrics}, author = {Siyuan Luo and Pierre-Luc Germain}, year = {2024}, note = {R package version 0.99.2}, url = {https://roseyuan.github.io/poem/}, }"},{"path":[]},{"path":"https://roseyuan.github.io/poem/index.html","id":"overview","dir":"","previous_headings":"","what":"Overview","title":"POpulation-based Evaluation Metrics","text":"poem package provides multiple metrics evaluating subpopulation structure identification dataset. include: Metrics comparing two partitions1 dataset, metrics evaluating alignment dataset’s embedding graph representations partition. Metrics comparing two fuzzy partitions2, comparing hard partition fuzzy partition. allows evaluation fuzzy partition results assessing agreement fuzzy hard ground-truth partition. Metrics tailored domain detection spatially-resolved omics data. include especially external evaluation metrics (.e. based comparison ground truth labels), also internal metrics. detailed introduction package, see online docs.","code":""},{"path":"https://roseyuan.github.io/poem/index.html","id":"installation","dir":"","previous_headings":"","what":"Installation","title":"POpulation-based Evaluation Metrics","text":"can install development version poem GitHub :","code":"# install.packages(\"devtools\") devtools::install_github(\"RoseYuan/poem\")"},{"path":"https://roseyuan.github.io/poem/index.html","id":"contact","dir":"","previous_headings":"","what":"Contact","title":"POpulation-based Evaluation Metrics","text":"case questions suggestions poem, please consider opening issue GitHub repository.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate CDbw index — CDbw","title":"Calculate CDbw index — CDbw","text":"Computes CDbw-index (Halkidi Vazirgiannis 2008; Halkidi, Vazirgiannis Hennig, 2015). function directly copied fpc CRAN package written Christian Hennig. included reduce package dependencies (since fpc --light dependencies required ).","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate CDbw index — CDbw","text":"","code":"CDbw( x, labels, r = 10, s = seq(0.1, 0.8, by = 0.1), clusterstdev = TRUE, trace = FALSE )"},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate CDbw index — CDbw","text":"x Something can coerced numerical matrix, elements rows. labels vector integers length =nrow(x) indicating cluster observation. r Number cluster border representatives. s Vector shrinking factors. clusterstdev Logical. TRUE, neighborhood radius intra-cluster density within-cluster estimated squared distance mean cluster; otherwise average clusters. trace Logical; whether print processing info.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate CDbw index — CDbw","text":"vector following values (see refs details): cdbw value CDbw index (higher better). cohesion cohesion. compactness compactness. sep separation.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Calculate CDbw index — CDbw","text":"Halkidi, M. Vazirgiannis, M. (2008) density-based cluster validity approach using multi-representatives. Pattern Recognition Letters 29, 773-786. Halkidi, M., Vazirgiannis, M. Hennig, C. (2015) Method-independent indices cluster validation. C. Hennig, M. Meila, F. Murtagh, R. Rocci (eds.) Handbook Cluster Analysis, CRC Press/Taylor & Francis, Boca Raton.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"author","dir":"Reference","previous_headings":"","what":"Author","title":"Calculate CDbw index — CDbw","text":"Christian Hennig","code":""},{"path":"https://roseyuan.github.io/poem/reference/CDbw.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Calculate CDbw index — CDbw","text":"","code":"d1 <- mockData() CDbw(d1[,1:2], d1[,3]) #> cdbw cohesion compactness sep #> 0.03805049 0.23717624 0.24202068 0.66288255"},{"path":"https://roseyuan.github.io/poem/reference/CHAOS.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate CHAOS score — CHAOS","title":"Calculate CHAOS score — CHAOS","text":"CHAOS score measures clustering performance calculating mean length graph edges 1-nearest neighbor (1NN) graph cluster, averaged across clusters. Lower CHAOS score indicates better spatial domain clustering performance.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CHAOS.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate CHAOS score — CHAOS","text":"","code":"CHAOS(labels, location, BNPARAM = NULL)"},{"path":"https://roseyuan.github.io/poem/reference/CHAOS.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate CHAOS score — CHAOS","text":"labels Cluster labels. location numeric data matrix containing location information, rows points columns location dimensions. BNPARAM BNPARAM object passed findKNN specifying kNN approximation method use. Defaults exact small datasets, Annoy larger ones.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CHAOS.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate CHAOS score — CHAOS","text":"numeric value CHAOS score.","code":""},{"path":"https://roseyuan.github.io/poem/reference/CHAOS.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Calculate CHAOS score — CHAOS","text":"","code":"data(sp_toys) data <- sp_toys CHAOS(data$label, data[,c(\"x\", \"y\")]) #> $CHAOS #> [1] 0.2305866 #> #> $CHAOS_class #> 2 1 #> 0.2305866 0.2305866 #> CHAOS(data$p1, data[,c(\"x\", \"y\")]) #> $CHAOS #> [1] 0.2305866 #> #> $CHAOS_class #> 2 1 #> 0.2305866 0.2305866 #> CHAOS(data$p2, data[,c(\"x\", \"y\")]) #> $CHAOS #> [1] 0.2305866 #> #> $CHAOS_class #> 2 1 #> 0.2305866 0.2305866 #>"},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate ELSA scores — ELSA","title":"Calculate ELSA scores — ELSA","text":"Calculating Entropy-based Local indicator Spatial Association (ELSA) scores, consist Ea, Ec overall ELSA.","code":""},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate ELSA scores — ELSA","text":"","code":"ELSA(labels, location, k = 10)"},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate ELSA scores — ELSA","text":"labels Cluster labels. location numerical matrix containing location information, rows samples columns location dimensions. k Number nearest neighbors.","code":""},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate ELSA scores — ELSA","text":"dataframe containing Ea, Ec ELSA samples dataset.","code":""},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Calculate ELSA scores — ELSA","text":"Naimi, Babak, et al., 2019; 10.1016/j.spasta.2018.10.001","code":""},{"path":"https://roseyuan.github.io/poem/reference/ELSA.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Calculate ELSA scores — ELSA","text":"","code":"data(sp_toys) data <- sp_toys ELSA(data$label, data[,c(\"x\", \"y\")], k=6) #> the specified variable is considered as categorical... #> Ea Ec ELSA #> 1 0.00000000 0.0000000 0.00000000 #> 2 0.00000000 0.0000000 0.00000000 #> 3 0.09090909 0.4138169 0.03761971 #> 4 0.20000000 0.6840384 0.13680769 #> 5 0.66666667 0.9709506 0.64730040 #> 6 0.11111111 0.4689956 0.05211062 #> 7 0.00000000 0.0000000 0.00000000 #> 8 0.00000000 0.0000000 0.00000000 #> 9 0.00000000 0.0000000 0.00000000 #> 10 0.00000000 0.0000000 0.00000000 #> 11 0.00000000 0.0000000 0.00000000 #> 12 0.00000000 0.0000000 0.00000000 #> 13 0.00000000 0.0000000 0.00000000 #> 14 0.00000000 0.0000000 0.00000000 #> 15 0.00000000 0.0000000 0.00000000 #> 16 0.00000000 0.0000000 0.00000000 #> 17 0.00000000 0.0000000 0.00000000 #> 18 0.13333333 0.5435644 0.07247526 #> 19 0.28571429 0.8366407 0.23904021 #> 20 0.53846154 1.0000000 0.53846154 #> 21 0.07692308 0.3712323 0.02855633 #> 22 0.00000000 0.0000000 0.00000000 #> 23 0.00000000 0.0000000 0.00000000 #> 24 0.00000000 0.0000000 0.00000000 #> 25 0.00000000 0.0000000 0.00000000 #> 26 0.00000000 0.0000000 0.00000000 #> 27 0.00000000 0.0000000 0.00000000 #> 28 0.00000000 0.0000000 0.00000000 #> 29 0.00000000 0.0000000 0.00000000 #> 30 0.00000000 0.0000000 0.00000000 #> 31 0.00000000 0.0000000 0.00000000 #> 32 0.00000000 0.0000000 0.00000000 #> 33 0.00000000 0.0000000 0.00000000 #> 34 0.11764706 0.5032583 0.05920686 #> 35 0.33333333 0.8960382 0.29867941 #> 36 0.33333333 0.8960382 0.29867941 #> 37 0.11764706 0.5032583 0.05920686 #> 38 0.00000000 0.0000000 0.00000000 #> 39 0.00000000 0.0000000 0.00000000 #> 40 0.00000000 0.0000000 0.00000000 #> 41 0.00000000 0.0000000 0.00000000 #> 42 0.00000000 0.0000000 0.00000000 #> 43 0.00000000 0.0000000 0.00000000 #> 44 0.00000000 0.0000000 0.00000000 #> 45 0.00000000 0.0000000 0.00000000 #> 46 0.00000000 0.0000000 0.00000000 #> 47 0.00000000 0.0000000 0.00000000 #> 48 0.00000000 0.0000000 0.00000000 #> 49 0.11764706 0.5032583 0.05920686 #> 50 0.33333333 0.8960382 0.29867941 #> 51 0.33333333 0.8960382 0.29867941 #> 52 0.11764706 0.5032583 0.05920686 #> 53 0.00000000 0.0000000 0.00000000 #> 54 0.00000000 0.0000000 0.00000000 #> 55 0.00000000 0.0000000 0.00000000 #> 56 0.00000000 0.0000000 0.00000000 #> 57 0.00000000 0.0000000 0.00000000 #> 58 0.00000000 0.0000000 0.00000000 #> 59 0.00000000 0.0000000 0.00000000 #> 60 0.00000000 0.0000000 0.00000000 #> 61 0.00000000 0.0000000 0.00000000 #> 62 0.00000000 0.0000000 0.00000000 #> 63 0.00000000 0.0000000 0.00000000 #> 64 0.00000000 0.0000000 0.00000000 #> 65 0.06666667 0.3372901 0.02248600 #> 66 0.46666667 0.9886994 0.46139306 #> 67 0.35294118 0.9182958 0.32410441 #> 68 0.16666667 0.6292492 0.10487487 #> 69 0.00000000 0.0000000 0.00000000 #> 70 0.00000000 0.0000000 0.00000000 #> 71 0.00000000 0.0000000 0.00000000 #> 72 0.00000000 0.0000000 0.00000000 #> 73 0.00000000 0.0000000 0.00000000 #> 74 0.00000000 0.0000000 0.00000000 #> 75 0.00000000 0.0000000 0.00000000 #> 76 0.00000000 0.0000000 0.00000000 #> 77 0.00000000 0.0000000 0.00000000 #> 78 0.00000000 0.0000000 0.00000000 #> 79 0.00000000 0.0000000 0.00000000 #> 80 0.06666667 0.3372901 0.02248600 #> 81 0.46666667 0.9886994 0.46139306 #> 82 0.35294118 0.9182958 0.32410441 #> 83 0.16666667 0.6292492 0.10487487 #> 84 0.00000000 0.0000000 0.00000000 #> 85 0.00000000 0.0000000 0.00000000 #> 86 0.00000000 0.0000000 0.00000000 #> 87 0.00000000 0.0000000 0.00000000 #> 88 0.00000000 0.0000000 0.00000000 #> 89 0.00000000 0.0000000 0.00000000 #> 90 0.00000000 0.0000000 0.00000000 #> 91 0.00000000 0.0000000 0.00000000 #> 92 0.00000000 0.0000000 0.00000000 #> 93 0.00000000 0.0000000 0.00000000 #> 94 0.00000000 0.0000000 0.00000000 #> 95 0.00000000 0.0000000 0.00000000 #> 96 0.20000000 0.6962123 0.13924245 #> 97 0.41176471 0.9640788 0.39697361 #> 98 0.38888889 0.9494520 0.36923134 #> 99 0.17647059 0.6500224 0.11470984 #> 100 0.00000000 0.0000000 0.00000000 #> 101 0.00000000 0.0000000 0.00000000 #> 102 0.00000000 0.0000000 0.00000000 #> 103 0.00000000 0.0000000 0.00000000 #> 104 0.00000000 0.0000000 0.00000000 #> 105 0.00000000 0.0000000 0.00000000 #> 106 0.00000000 0.0000000 0.00000000 #> 107 0.00000000 0.0000000 0.00000000 #> 108 0.00000000 0.0000000 0.00000000 #> 109 0.00000000 0.0000000 0.00000000 #> 110 0.00000000 0.0000000 0.00000000 #> 111 0.20000000 0.6962123 0.13924245 #> 112 0.41176471 0.9640788 0.39697361 #> 113 0.38888889 0.9494520 0.36923134 #> 114 0.16666667 0.6292492 0.10487487 #> 115 0.00000000 0.0000000 0.00000000 #> 116 0.00000000 0.0000000 0.00000000 #> 117 0.00000000 0.0000000 0.00000000 #> 118 0.00000000 0.0000000 0.00000000 #> 119 0.00000000 0.0000000 0.00000000 #> 120 0.00000000 0.0000000 0.00000000 #> 121 0.00000000 0.0000000 0.00000000 #> 122 0.00000000 0.0000000 0.00000000 #> 123 0.00000000 0.0000000 0.00000000 #> 124 0.00000000 0.0000000 0.00000000 #> 125 0.00000000 0.0000000 0.00000000 #> 126 0.00000000 0.0000000 0.00000000 #> 127 0.17647059 0.6500224 0.11470984 #> 128 0.38888889 0.9494520 0.36923134 #> 129 0.41176471 0.9640788 0.39697361 #> 130 0.20000000 0.6962123 0.13924245 #> 131 0.00000000 0.0000000 0.00000000 #> 132 0.00000000 0.0000000 0.00000000 #> 133 0.00000000 0.0000000 0.00000000 #> 134 0.00000000 0.0000000 0.00000000 #> 135 0.00000000 0.0000000 0.00000000 #> 136 0.00000000 0.0000000 0.00000000 #> 137 0.00000000 0.0000000 0.00000000 #> 138 0.00000000 0.0000000 0.00000000 #> 139 0.00000000 0.0000000 0.00000000 #> 140 0.00000000 0.0000000 0.00000000 #> 141 0.00000000 0.0000000 0.00000000 #> 142 0.17647059 0.6500224 0.11470984 #> 143 0.38888889 0.9494520 0.36923134 #> 144 0.38888889 0.9494520 0.36923134 #> 145 0.20000000 0.6962123 0.13924245 #> 146 0.00000000 0.0000000 0.00000000 #> 147 0.00000000 0.0000000 0.00000000 #> 148 0.00000000 0.0000000 0.00000000 #> 149 0.00000000 0.0000000 0.00000000 #> 150 0.00000000 0.0000000 0.00000000 #> 151 0.00000000 0.0000000 0.00000000 #> 152 0.00000000 0.0000000 0.00000000 #> 153 0.00000000 0.0000000 0.00000000 #> 154 0.00000000 0.0000000 0.00000000 #> 155 0.00000000 0.0000000 0.00000000 #> 156 0.00000000 0.0000000 0.00000000 #> 157 0.00000000 0.0000000 0.00000000 #> 158 0.16666667 0.6292492 0.10487487 #> 159 0.35294118 0.9182958 0.32410441 #> 160 0.46666667 0.9886994 0.46139306 #> 161 0.07692308 0.3712323 0.02855633 #> 162 0.00000000 0.0000000 0.00000000 #> 163 0.00000000 0.0000000 0.00000000 #> 164 0.00000000 0.0000000 0.00000000 #> 165 0.00000000 0.0000000 0.00000000 #> 166 0.00000000 0.0000000 0.00000000 #> 167 0.00000000 0.0000000 0.00000000 #> 168 0.00000000 0.0000000 0.00000000 #> 169 0.00000000 0.0000000 0.00000000 #> 170 0.00000000 0.0000000 0.00000000 #> 171 0.00000000 0.0000000 0.00000000 #> 172 0.00000000 0.0000000 0.00000000 #> 173 0.16666667 0.6292492 0.10487487 #> 174 0.38888889 0.9494520 0.36923134 #> 175 0.46666667 0.9886994 0.46139306 #> 176 0.14285714 0.5665095 0.08092993 #> 177 0.00000000 0.0000000 0.00000000 #> 178 0.00000000 0.0000000 0.00000000 #> 179 0.00000000 0.0000000 0.00000000 #> 180 0.00000000 0.0000000 0.00000000 #> 181 0.00000000 0.0000000 0.00000000 #> 182 0.00000000 0.0000000 0.00000000 #> 183 0.00000000 0.0000000 0.00000000 #> 184 0.00000000 0.0000000 0.00000000 #> 185 0.00000000 0.0000000 0.00000000 #> 186 0.00000000 0.0000000 0.00000000 #> 187 0.00000000 0.0000000 0.00000000 #> 188 0.00000000 0.0000000 0.00000000 #> 189 0.11764706 0.5032583 0.05920686 #> 190 0.33333333 0.8960382 0.29867941 #> 191 0.38461538 0.9402860 0.36164845 #> 192 0.06666667 0.3372901 0.02248600 #> 193 0.00000000 0.0000000 0.00000000 #> 194 0.00000000 0.0000000 0.00000000 #> 195 0.00000000 0.0000000 0.00000000 #> 196 0.00000000 0.0000000 0.00000000 #> 197 0.00000000 0.0000000 0.00000000 #> 198 0.00000000 0.0000000 0.00000000 #> 199 0.00000000 0.0000000 0.00000000 #> 200 0.00000000 0.0000000 0.00000000 #> 201 0.00000000 0.0000000 0.00000000 #> 202 0.00000000 0.0000000 0.00000000 #> 203 0.00000000 0.0000000 0.00000000 #> 204 0.16666667 0.6292492 0.10487487 #> 205 0.33333333 0.8960382 0.29867941 #> 206 0.42857143 0.9709506 0.41612168 #> 207 0.06666667 0.3372901 0.02248600 #> 208 0.00000000 0.0000000 0.00000000 #> 209 0.00000000 0.0000000 0.00000000 #> 210 0.00000000 0.0000000 0.00000000 #> 211 0.00000000 0.0000000 0.00000000 #> 212 0.00000000 0.0000000 0.00000000 #> 213 0.00000000 0.0000000 0.00000000 #> 214 0.00000000 0.0000000 0.00000000 #> 215 0.00000000 0.0000000 0.00000000 #> 216 0.00000000 0.0000000 0.00000000 #> 217 0.00000000 0.0000000 0.00000000 #> 218 0.00000000 0.0000000 0.00000000 #> 219 0.00000000 0.0000000 0.00000000 #> 220 0.07692308 0.3712323 0.02855633 #> 221 0.50000000 0.9957275 0.49786373 #> 222 0.30769231 0.8631206 0.26557556 #> 223 0.14285714 0.5665095 0.08092993 #> 224 0.00000000 0.0000000 0.00000000 #> 225 0.00000000 0.0000000 0.00000000 #> 226 0.00000000 0.0000000 0.00000000 #> 227 0.00000000 0.0000000 0.00000000 #> 228 0.00000000 0.0000000 0.00000000 #> 229 0.00000000 0.0000000 0.00000000 #> 230 0.00000000 0.0000000 0.00000000 #> 231 0.00000000 0.0000000 0.00000000 #> 232 0.00000000 0.0000000 0.00000000 #> 233 0.00000000 0.0000000 0.00000000 #> 234 0.00000000 0.0000000 0.00000000 #> 235 0.11111111 0.4689956 0.05211062 #> 236 0.55555556 1.0000000 0.55555556 #> 237 0.22222222 0.7219281 0.16042847 #> 238 0.10000000 0.4394970 0.04394970 #> 239 0.00000000 0.0000000 0.00000000 #> 240 0.00000000 0.0000000 0.00000000 ELSA(data$p1, data[,c(\"x\", \"y\")], k=6) #> the specified variable is considered as categorical... #> Ea Ec ELSA #> 1 0.00000000 0.0000000 0.00000000 #> 2 0.00000000 0.0000000 0.00000000 #> 3 0.00000000 0.0000000 0.00000000 #> 4 0.00000000 0.0000000 0.00000000 #> 5 0.22222222 0.7219281 0.16042847 #> 6 0.44444444 0.9709506 0.43153360 #> 7 0.20000000 0.6840384 0.13680769 #> 8 0.00000000 0.0000000 0.00000000 #> 9 0.00000000 0.0000000 0.00000000 #> 10 0.00000000 0.0000000 0.00000000 #> 11 0.00000000 0.0000000 0.00000000 #> 12 0.00000000 0.0000000 0.00000000 #> 13 0.00000000 0.0000000 0.00000000 #> 14 0.00000000 0.0000000 0.00000000 #> 15 0.00000000 0.0000000 0.00000000 #> 16 0.00000000 0.0000000 0.00000000 #> 17 0.00000000 0.0000000 0.00000000 #> 18 0.00000000 0.0000000 0.00000000 #> 19 0.07142857 0.3533593 0.02523995 #> 20 0.30769231 0.8631206 0.26557556 #> 21 0.38461538 0.9402860 0.36164845 #> 22 0.14285714 0.5665095 0.08092993 #> 23 0.00000000 0.0000000 0.00000000 #> 24 0.00000000 0.0000000 0.00000000 #> 25 0.00000000 0.0000000 0.00000000 #> 26 0.00000000 0.0000000 0.00000000 #> 27 0.00000000 0.0000000 0.00000000 #> 28 0.00000000 0.0000000 0.00000000 #> 29 0.00000000 0.0000000 0.00000000 #> 30 0.00000000 0.0000000 0.00000000 #> 31 0.00000000 0.0000000 0.00000000 #> 32 0.00000000 0.0000000 0.00000000 #> 33 0.00000000 0.0000000 0.00000000 #> 34 0.00000000 0.0000000 0.00000000 #> 35 0.06666667 0.3372901 0.02248600 #> 36 0.46666667 0.9886994 0.46139306 #> 37 0.35294118 0.9182958 0.32410441 #> 38 0.16666667 0.6292492 0.10487487 #> 39 0.00000000 0.0000000 0.00000000 #> 40 0.00000000 0.0000000 0.00000000 #> 41 0.00000000 0.0000000 0.00000000 #> 42 0.00000000 0.0000000 0.00000000 #> 43 0.00000000 0.0000000 0.00000000 #> 44 0.00000000 0.0000000 0.00000000 #> 45 0.00000000 0.0000000 0.00000000 #> 46 0.00000000 0.0000000 0.00000000 #> 47 0.00000000 0.0000000 0.00000000 #> 48 0.00000000 0.0000000 0.00000000 #> 49 0.00000000 0.0000000 0.00000000 #> 50 0.06666667 0.3372901 0.02248600 #> 51 0.46666667 0.9886994 0.46139306 #> 52 0.35294118 0.9182958 0.32410441 #> 53 0.16666667 0.6292492 0.10487487 #> 54 0.00000000 0.0000000 0.00000000 #> 55 0.00000000 0.0000000 0.00000000 #> 56 0.00000000 0.0000000 0.00000000 #> 57 0.00000000 0.0000000 0.00000000 #> 58 0.00000000 0.0000000 0.00000000 #> 59 0.00000000 0.0000000 0.00000000 #> 60 0.00000000 0.0000000 0.00000000 #> 61 0.00000000 0.0000000 0.00000000 #> 62 0.00000000 0.0000000 0.00000000 #> 63 0.00000000 0.0000000 0.00000000 #> 64 0.00000000 0.0000000 0.00000000 #> 65 0.00000000 0.0000000 0.00000000 #> 66 0.20000000 0.6962123 0.13924245 #> 67 0.41176471 0.9640788 0.39697361 #> 68 0.38888889 0.9494520 0.36923134 #> 69 0.17647059 0.6500224 0.11470984 #> 70 0.00000000 0.0000000 0.00000000 #> 71 0.00000000 0.0000000 0.00000000 #> 72 0.00000000 0.0000000 0.00000000 #> 73 0.00000000 0.0000000 0.00000000 #> 74 0.00000000 0.0000000 0.00000000 #> 75 0.00000000 0.0000000 0.00000000 #> 76 0.00000000 0.0000000 0.00000000 #> 77 0.00000000 0.0000000 0.00000000 #> 78 0.00000000 0.0000000 0.00000000 #> 79 0.00000000 0.0000000 0.00000000 #> 80 0.00000000 0.0000000 0.00000000 #> 81 0.20000000 0.6962123 0.13924245 #> 82 0.41176471 0.9640788 0.39697361 #> 83 0.38888889 0.9494520 0.36923134 #> 84 0.16666667 0.6292492 0.10487487 #> 85 0.00000000 0.0000000 0.00000000 #> 86 0.00000000 0.0000000 0.00000000 #> 87 0.00000000 0.0000000 0.00000000 #> 88 0.00000000 0.0000000 0.00000000 #> 89 0.00000000 0.0000000 0.00000000 #> 90 0.00000000 0.0000000 0.00000000 #> 91 0.00000000 0.0000000 0.00000000 #> 92 0.00000000 0.0000000 0.00000000 #> 93 0.00000000 0.0000000 0.00000000 #> 94 0.00000000 0.0000000 0.00000000 #> 95 0.00000000 0.0000000 0.00000000 #> 96 0.00000000 0.0000000 0.00000000 #> 97 0.17647059 0.6500224 0.11470984 #> 98 0.38888889 0.9494520 0.36923134 #> 99 0.41176471 0.9640788 0.39697361 #> 100 0.20000000 0.6962123 0.13924245 #> 101 0.00000000 0.0000000 0.00000000 #> 102 0.00000000 0.0000000 0.00000000 #> 103 0.00000000 0.0000000 0.00000000 #> 104 0.00000000 0.0000000 0.00000000 #> 105 0.00000000 0.0000000 0.00000000 #> 106 0.00000000 0.0000000 0.00000000 #> 107 0.00000000 0.0000000 0.00000000 #> 108 0.00000000 0.0000000 0.00000000 #> 109 0.00000000 0.0000000 0.00000000 #> 110 0.00000000 0.0000000 0.00000000 #> 111 0.00000000 0.0000000 0.00000000 #> 112 0.17647059 0.6500224 0.11470984 #> 113 0.38888889 0.9494520 0.36923134 #> 114 0.38888889 0.9494520 0.36923134 #> 115 0.20000000 0.6962123 0.13924245 #> 116 0.00000000 0.0000000 0.00000000 #> 117 0.00000000 0.0000000 0.00000000 #> 118 0.00000000 0.0000000 0.00000000 #> 119 0.00000000 0.0000000 0.00000000 #> 120 0.00000000 0.0000000 0.00000000 #> 121 0.00000000 0.0000000 0.00000000 #> 122 0.00000000 0.0000000 0.00000000 #> 123 0.00000000 0.0000000 0.00000000 #> 124 0.00000000 0.0000000 0.00000000 #> 125 0.00000000 0.0000000 0.00000000 #> 126 0.00000000 0.0000000 0.00000000 #> 127 0.00000000 0.0000000 0.00000000 #> 128 0.16666667 0.6292492 0.10487487 #> 129 0.35294118 0.9182958 0.32410441 #> 130 0.46666667 0.9886994 0.46139306 #> 131 0.07692308 0.3712323 0.02855633 #> 132 0.00000000 0.0000000 0.00000000 #> 133 0.00000000 0.0000000 0.00000000 #> 134 0.00000000 0.0000000 0.00000000 #> 135 0.00000000 0.0000000 0.00000000 #> 136 0.00000000 0.0000000 0.00000000 #> 137 0.00000000 0.0000000 0.00000000 #> 138 0.00000000 0.0000000 0.00000000 #> 139 0.00000000 0.0000000 0.00000000 #> 140 0.00000000 0.0000000 0.00000000 #> 141 0.00000000 0.0000000 0.00000000 #> 142 0.00000000 0.0000000 0.00000000 #> 143 0.16666667 0.6292492 0.10487487 #> 144 0.38888889 0.9494520 0.36923134 #> 145 0.46666667 0.9886994 0.46139306 #> 146 0.14285714 0.5665095 0.08092993 #> 147 0.00000000 0.0000000 0.00000000 #> 148 0.00000000 0.0000000 0.00000000 #> 149 0.00000000 0.0000000 0.00000000 #> 150 0.00000000 0.0000000 0.00000000 #> 151 0.00000000 0.0000000 0.00000000 #> 152 0.00000000 0.0000000 0.00000000 #> 153 0.00000000 0.0000000 0.00000000 #> 154 0.00000000 0.0000000 0.00000000 #> 155 0.00000000 0.0000000 0.00000000 #> 156 0.00000000 0.0000000 0.00000000 #> 157 0.00000000 0.0000000 0.00000000 #> 158 0.00000000 0.0000000 0.00000000 #> 159 0.11764706 0.5032583 0.05920686 #> 160 0.33333333 0.8960382 0.29867941 #> 161 0.38461538 0.9402860 0.36164845 #> 162 0.06666667 0.3372901 0.02248600 #> 163 0.00000000 0.0000000 0.00000000 #> 164 0.00000000 0.0000000 0.00000000 #> 165 0.00000000 0.0000000 0.00000000 #> 166 0.00000000 0.0000000 0.00000000 #> 167 0.00000000 0.0000000 0.00000000 #> 168 0.00000000 0.0000000 0.00000000 #> 169 0.00000000 0.0000000 0.00000000 #> 170 0.00000000 0.0000000 0.00000000 #> 171 0.00000000 0.0000000 0.00000000 #> 172 0.00000000 0.0000000 0.00000000 #> 173 0.00000000 0.0000000 0.00000000 #> 174 0.16666667 0.6292492 0.10487487 #> 175 0.33333333 0.8960382 0.29867941 #> 176 0.42857143 0.9709506 0.41612168 #> 177 0.06666667 0.3372901 0.02248600 #> 178 0.00000000 0.0000000 0.00000000 #> 179 0.00000000 0.0000000 0.00000000 #> 180 0.00000000 0.0000000 0.00000000 #> 181 0.00000000 0.0000000 0.00000000 #> 182 0.00000000 0.0000000 0.00000000 #> 183 0.00000000 0.0000000 0.00000000 #> 184 0.00000000 0.0000000 0.00000000 #> 185 0.00000000 0.0000000 0.00000000 #> 186 0.00000000 0.0000000 0.00000000 #> 187 0.00000000 0.0000000 0.00000000 #> 188 0.00000000 0.0000000 0.00000000 #> 189 0.00000000 0.0000000 0.00000000 #> 190 0.06666667 0.3372901 0.02248600 #> 191 0.46153846 0.9852281 0.45472068 #> 192 0.33333333 0.8960382 0.29867941 #> 193 0.18750000 0.6722948 0.12605528 #> 194 0.00000000 0.0000000 0.00000000 #> 195 0.00000000 0.0000000 0.00000000 #> 196 0.00000000 0.0000000 0.00000000 #> 197 0.00000000 0.0000000 0.00000000 #> 198 0.00000000 0.0000000 0.00000000 #> 199 0.00000000 0.0000000 0.00000000 #> 200 0.00000000 0.0000000 0.00000000 #> 201 0.00000000 0.0000000 0.00000000 #> 202 0.00000000 0.0000000 0.00000000 #> 203 0.00000000 0.0000000 0.00000000 #> 204 0.00000000 0.0000000 0.00000000 #> 205 0.06666667 0.3372901 0.02248600 #> 206 0.42857143 0.9709506 0.41612168 #> 207 0.33333333 0.8960382 0.29867941 #> 208 0.18750000 0.6722948 0.12605528 #> 209 0.00000000 0.0000000 0.00000000 #> 210 0.00000000 0.0000000 0.00000000 #> 211 0.00000000 0.0000000 0.00000000 #> 212 0.00000000 0.0000000 0.00000000 #> 213 0.00000000 0.0000000 0.00000000 #> 214 0.00000000 0.0000000 0.00000000 #> 215 0.00000000 0.0000000 0.00000000 #> 216 0.00000000 0.0000000 0.00000000 #> 217 0.00000000 0.0000000 0.00000000 #> 218 0.00000000 0.0000000 0.00000000 #> 219 0.00000000 0.0000000 0.00000000 #> 220 0.00000000 0.0000000 0.00000000 #> 221 0.16666667 0.6193822 0.10323037 #> 222 0.53846154 1.0000000 0.53846154 #> 223 0.35714286 0.9182958 0.32796280 #> 224 0.15384615 0.5916728 0.09102658 #> 225 0.00000000 0.0000000 0.00000000 #> 226 0.00000000 0.0000000 0.00000000 #> 227 0.00000000 0.0000000 0.00000000 #> 228 0.00000000 0.0000000 0.00000000 #> 229 0.00000000 0.0000000 0.00000000 #> 230 0.00000000 0.0000000 0.00000000 #> 231 0.00000000 0.0000000 0.00000000 #> 232 0.00000000 0.0000000 0.00000000 #> 233 0.00000000 0.0000000 0.00000000 #> 234 0.00000000 0.0000000 0.00000000 #> 235 0.00000000 0.0000000 0.00000000 #> 236 0.22222222 0.7219281 0.16042847 #> 237 0.66666667 0.9709506 0.64730040 #> 238 0.30000000 0.8453509 0.25360528 #> 239 0.12500000 0.5032583 0.06290729 #> 240 0.00000000 0.0000000 0.00000000 ELSA(data$p2, data[,c(\"x\", \"y\")], k=6) #> the specified variable is considered as categorical... #> Ea Ec ELSA #> 1 0.00000000 0.0000000 0.00000000 #> 2 0.00000000 0.0000000 0.00000000 #> 3 0.09090909 0.4138169 0.03761971 #> 4 0.20000000 0.6840384 0.13680769 #> 5 0.66666667 0.9709506 0.64730040 #> 6 0.11111111 0.4689956 0.05211062 #> 7 0.00000000 0.0000000 0.00000000 #> 8 0.00000000 0.0000000 0.00000000 #> 9 0.00000000 0.0000000 0.00000000 #> 10 0.00000000 0.0000000 0.00000000 #> 11 0.00000000 0.0000000 0.00000000 #> 12 0.00000000 0.0000000 0.00000000 #> 13 0.00000000 0.0000000 0.00000000 #> 14 0.00000000 0.0000000 0.00000000 #> 15 0.00000000 0.0000000 0.00000000 #> 16 0.00000000 0.0000000 0.00000000 #> 17 0.00000000 0.0000000 0.00000000 #> 18 0.13333333 0.5435644 0.07247526 #> 19 0.28571429 0.8366407 0.23904021 #> 20 0.53846154 1.0000000 0.53846154 #> 21 0.07692308 0.3712323 0.02855633 #> 22 0.00000000 0.0000000 0.00000000 #> 23 0.00000000 0.0000000 0.00000000 #> 24 0.00000000 0.0000000 0.00000000 #> 25 0.00000000 0.0000000 0.00000000 #> 26 0.00000000 0.0000000 0.00000000 #> 27 0.00000000 0.0000000 0.00000000 #> 28 0.00000000 0.0000000 0.00000000 #> 29 0.00000000 0.0000000 0.00000000 #> 30 0.00000000 0.0000000 0.00000000 #> 31 0.00000000 0.0000000 0.00000000 #> 32 0.00000000 0.0000000 0.00000000 #> 33 0.00000000 0.0000000 0.00000000 #> 34 0.11764706 0.5032583 0.05920686 #> 35 0.33333333 0.8960382 0.29867941 #> 36 0.40000000 0.9544340 0.38177360 #> 37 0.23529412 0.7642045 0.17981283 #> 38 0.16666667 0.6292492 0.10487487 #> 39 0.17647059 0.6500224 0.11470984 #> 40 0.13333333 0.5435644 0.07247526 #> 41 0.07692308 0.3712323 0.02855633 #> 42 0.00000000 0.0000000 0.00000000 #> 43 0.00000000 0.0000000 0.00000000 #> 44 0.00000000 0.0000000 0.00000000 #> 45 0.00000000 0.0000000 0.00000000 #> 46 0.00000000 0.0000000 0.00000000 #> 47 0.00000000 0.0000000 0.00000000 #> 48 0.00000000 0.0000000 0.00000000 #> 49 0.11764706 0.5032583 0.05920686 #> 50 0.26666667 0.8112781 0.21634083 #> 51 0.53333333 1.0000000 0.53333333 #> 52 0.41176471 0.9640788 0.39697361 #> 53 0.38888889 0.9494520 0.36923134 #> 54 0.38888889 0.9494520 0.36923134 #> 55 0.33333333 0.8960382 0.29867941 #> 56 0.14285714 0.5665095 0.08092993 #> 57 0.06666667 0.3372901 0.02248600 #> 58 0.00000000 0.0000000 0.00000000 #> 59 0.00000000 0.0000000 0.00000000 #> 60 0.00000000 0.0000000 0.00000000 #> 61 0.00000000 0.0000000 0.00000000 #> 62 0.00000000 0.0000000 0.00000000 #> 63 0.00000000 0.0000000 0.00000000 #> 64 0.00000000 0.0000000 0.00000000 #> 65 0.06666667 0.3372901 0.02248600 #> 66 0.26666667 0.8112781 0.21634083 #> 67 0.35294118 0.9182958 0.32410441 #> 68 0.38888889 0.9494520 0.36923134 #> 69 0.41176471 0.9640788 0.39697361 #> 70 0.46666667 0.9886994 0.46139306 #> 71 0.69230769 0.9402860 0.65096720 #> 72 0.13333333 0.5435644 0.07247526 #> 73 0.06250000 0.3227570 0.02017231 #> 74 0.00000000 0.0000000 0.00000000 #> 75 0.00000000 0.0000000 0.00000000 #> 76 0.00000000 0.0000000 0.00000000 #> 77 0.00000000 0.0000000 0.00000000 #> 78 0.00000000 0.0000000 0.00000000 #> 79 0.00000000 0.0000000 0.00000000 #> 80 0.00000000 0.0000000 0.00000000 #> 81 0.13333333 0.5435644 0.07247526 #> 82 0.17647059 0.6500224 0.11470984 #> 83 0.16666667 0.6292492 0.10487487 #> 84 0.22222222 0.7424876 0.16499724 #> 85 0.33333333 0.8960382 0.29867941 #> 86 0.42857143 0.9709506 0.41612168 #> 87 0.13333333 0.5435644 0.07247526 #> 88 0.00000000 0.0000000 0.00000000 #> 89 0.00000000 0.0000000 0.00000000 #> 90 0.00000000 0.0000000 0.00000000 #> 91 0.00000000 0.0000000 0.00000000 #> 92 0.00000000 0.0000000 0.00000000 #> 93 0.00000000 0.0000000 0.00000000 #> 94 0.00000000 0.0000000 0.00000000 #> 95 0.00000000 0.0000000 0.00000000 #> 96 0.00000000 0.0000000 0.00000000 #> 97 0.00000000 0.0000000 0.00000000 #> 98 0.05555556 0.2974722 0.01652624 #> 99 0.11764706 0.5032583 0.05920686 #> 100 0.26666667 0.8112781 0.21634083 #> 101 0.53846154 1.0000000 0.53846154 #> 102 0.20000000 0.6962123 0.13924245 #> 103 0.06250000 0.3227570 0.02017231 #> 104 0.00000000 0.0000000 0.00000000 #> 105 0.00000000 0.0000000 0.00000000 #> 106 0.00000000 0.0000000 0.00000000 #> 107 0.00000000 0.0000000 0.00000000 #> 108 0.00000000 0.0000000 0.00000000 #> 109 0.00000000 0.0000000 0.00000000 #> 110 0.00000000 0.0000000 0.00000000 #> 111 0.00000000 0.0000000 0.00000000 #> 112 0.05882353 0.3095434 0.01820844 #> 113 0.16666667 0.6292492 0.10487487 #> 114 0.33333333 0.8997438 0.29991459 #> 115 0.53333333 1.0000000 0.53333333 #> 116 0.28571429 0.8366407 0.23904021 #> 117 0.06666667 0.3372901 0.02248600 #> 118 0.00000000 0.0000000 0.00000000 #> 119 0.00000000 0.0000000 0.00000000 #> 120 0.00000000 0.0000000 0.00000000 #> 121 0.00000000 0.0000000 0.00000000 #> 122 0.00000000 0.0000000 0.00000000 #> 123 0.00000000 0.0000000 0.00000000 #> 124 0.00000000 0.0000000 0.00000000 #> 125 0.00000000 0.0000000 0.00000000 #> 126 0.00000000 0.0000000 0.00000000 #> 127 0.05882353 0.3095434 0.01820844 #> 128 0.16666667 0.6292492 0.10487487 #> 129 0.76470588 0.8524052 0.65183925 #> 130 0.53333333 1.0000000 0.53333333 #> 131 0.23076923 0.7495953 0.17298352 #> 132 0.06666667 0.3372901 0.02248600 #> 133 0.00000000 0.0000000 0.00000000 #> 134 0.00000000 0.0000000 0.00000000 #> 135 0.00000000 0.0000000 0.00000000 #> 136 0.00000000 0.0000000 0.00000000 #> 137 0.00000000 0.0000000 0.00000000 #> 138 0.00000000 0.0000000 0.00000000 #> 139 0.00000000 0.0000000 0.00000000 #> 140 0.00000000 0.0000000 0.00000000 #> 141 0.00000000 0.0000000 0.00000000 #> 142 0.11764706 0.5032583 0.05920686 #> 143 0.27777778 0.8314744 0.23096511 #> 144 0.55555556 0.9980009 0.55444494 #> 145 0.33333333 0.8960382 0.29867941 #> 146 0.00000000 0.0000000 0.00000000 #> 147 0.00000000 0.0000000 0.00000000 #> 148 0.00000000 0.0000000 0.00000000 #> 149 0.00000000 0.0000000 0.00000000 #> 150 0.00000000 0.0000000 0.00000000 #> 151 0.00000000 0.0000000 0.00000000 #> 152 0.00000000 0.0000000 0.00000000 #> 153 0.00000000 0.0000000 0.00000000 #> 154 0.00000000 0.0000000 0.00000000 #> 155 0.00000000 0.0000000 0.00000000 #> 156 0.00000000 0.0000000 0.00000000 #> 157 0.00000000 0.0000000 0.00000000 #> 158 0.16666667 0.6292492 0.10487487 #> 159 0.35294118 0.9182958 0.32410441 #> 160 0.46666667 0.9886994 0.46139306 #> 161 0.07692308 0.3712323 0.02855633 #> 162 0.00000000 0.0000000 0.00000000 #> 163 0.00000000 0.0000000 0.00000000 #> 164 0.00000000 0.0000000 0.00000000 #> 165 0.00000000 0.0000000 0.00000000 #> 166 0.00000000 0.0000000 0.00000000 #> 167 0.00000000 0.0000000 0.00000000 #> 168 0.00000000 0.0000000 0.00000000 #> 169 0.00000000 0.0000000 0.00000000 #> 170 0.00000000 0.0000000 0.00000000 #> 171 0.00000000 0.0000000 0.00000000 #> 172 0.00000000 0.0000000 0.00000000 #> 173 0.16666667 0.6292492 0.10487487 #> 174 0.38888889 0.9494520 0.36923134 #> 175 0.46666667 0.9886994 0.46139306 #> 176 0.14285714 0.5665095 0.08092993 #> 177 0.00000000 0.0000000 0.00000000 #> 178 0.00000000 0.0000000 0.00000000 #> 179 0.00000000 0.0000000 0.00000000 #> 180 0.00000000 0.0000000 0.00000000 #> 181 0.00000000 0.0000000 0.00000000 #> 182 0.00000000 0.0000000 0.00000000 #> 183 0.00000000 0.0000000 0.00000000 #> 184 0.00000000 0.0000000 0.00000000 #> 185 0.00000000 0.0000000 0.00000000 #> 186 0.00000000 0.0000000 0.00000000 #> 187 0.00000000 0.0000000 0.00000000 #> 188 0.00000000 0.0000000 0.00000000 #> 189 0.11764706 0.5032583 0.05920686 #> 190 0.33333333 0.8960382 0.29867941 #> 191 0.38461538 0.9402860 0.36164845 #> 192 0.06666667 0.3372901 0.02248600 #> 193 0.00000000 0.0000000 0.00000000 #> 194 0.00000000 0.0000000 0.00000000 #> 195 0.00000000 0.0000000 0.00000000 #> 196 0.00000000 0.0000000 0.00000000 #> 197 0.00000000 0.0000000 0.00000000 #> 198 0.00000000 0.0000000 0.00000000 #> 199 0.00000000 0.0000000 0.00000000 #> 200 0.00000000 0.0000000 0.00000000 #> 201 0.00000000 0.0000000 0.00000000 #> 202 0.00000000 0.0000000 0.00000000 #> 203 0.00000000 0.0000000 0.00000000 #> 204 0.16666667 0.6292492 0.10487487 #> 205 0.33333333 0.8960382 0.29867941 #> 206 0.42857143 0.9709506 0.41612168 #> 207 0.06666667 0.3372901 0.02248600 #> 208 0.00000000 0.0000000 0.00000000 #> 209 0.00000000 0.0000000 0.00000000 #> 210 0.00000000 0.0000000 0.00000000 #> 211 0.00000000 0.0000000 0.00000000 #> 212 0.00000000 0.0000000 0.00000000 #> 213 0.00000000 0.0000000 0.00000000 #> 214 0.00000000 0.0000000 0.00000000 #> 215 0.00000000 0.0000000 0.00000000 #> 216 0.00000000 0.0000000 0.00000000 #> 217 0.00000000 0.0000000 0.00000000 #> 218 0.00000000 0.0000000 0.00000000 #> 219 0.00000000 0.0000000 0.00000000 #> 220 0.07692308 0.3712323 0.02855633 #> 221 0.50000000 0.9957275 0.49786373 #> 222 0.30769231 0.8631206 0.26557556 #> 223 0.14285714 0.5665095 0.08092993 #> 224 0.00000000 0.0000000 0.00000000 #> 225 0.00000000 0.0000000 0.00000000 #> 226 0.00000000 0.0000000 0.00000000 #> 227 0.00000000 0.0000000 0.00000000 #> 228 0.00000000 0.0000000 0.00000000 #> 229 0.00000000 0.0000000 0.00000000 #> 230 0.00000000 0.0000000 0.00000000 #> 231 0.00000000 0.0000000 0.00000000 #> 232 0.00000000 0.0000000 0.00000000 #> 233 0.00000000 0.0000000 0.00000000 #> 234 0.00000000 0.0000000 0.00000000 #> 235 0.11111111 0.4689956 0.05211062 #> 236 0.55555556 1.0000000 0.55555556 #> 237 0.22222222 0.7219281 0.16042847 #> 238 0.10000000 0.4394970 0.04394970 #> 239 0.00000000 0.0000000 0.00000000 #> 240 0.00000000 0.0000000 0.00000000"},{"path":"https://roseyuan.github.io/poem/reference/FMeasure.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate F measure — FMeasure","title":"Calculate F measure — FMeasure","text":"Compute F measure two clustering results. directly copied package FlowSOM.","code":""},{"path":"https://roseyuan.github.io/poem/reference/FMeasure.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate F measure — FMeasure","text":"","code":"FMeasure(true, pred, silent = TRUE)"},{"path":"https://roseyuan.github.io/poem/reference/FMeasure.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate F measure — FMeasure","text":"true Array containing real cluster labels sample pred Array containing predicted cluster labels sample silent Logical, FALSE, print information precision recall","code":""},{"path":"https://roseyuan.github.io/poem/reference/FMeasure.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate F measure — FMeasure","text":"F measure score","code":""},{"path":"https://roseyuan.github.io/poem/reference/PAS.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate PAS score — PAS","title":"Calculate PAS score — PAS","text":"PAS score measures clustering performance calculating randomness spots located outside spatial region clustered . Lower PAS score indicates better spatial domian clustering performance.","code":""},{"path":"https://roseyuan.github.io/poem/reference/PAS.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate PAS score — PAS","text":"","code":"PAS(labels, location, k = 10, ...)"},{"path":"https://roseyuan.github.io/poem/reference/PAS.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate PAS score — PAS","text":"labels Cluster labels. location numerical matrix containing location information, rows samples columns location dimensions. k Number nearest neighbors. ... Optional params findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/PAS.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate PAS score — PAS","text":"numeric value PAS score, boolean vector abnormal spots.","code":""},{"path":"https://roseyuan.github.io/poem/reference/PAS.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Calculate PAS score — PAS","text":"","code":"data(sp_toys) data <- sp_toys PAS(data$label, data[,c(\"x\", \"y\")], k=6) #> $PAS #> [1] 0.008333333 #> #> $abnormalty #> [1] FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [109] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [133] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [145] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [157] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [169] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [181] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [193] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [205] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [217] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE #> PAS(data$p1, data[,c(\"x\", \"y\")], k=6) #> $PAS #> [1] 0.004166667 #> #> $abnormalty #> [1] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [109] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [133] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [145] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [157] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [169] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [181] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [193] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [205] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [217] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE #> PAS(data$p2, data[,c(\"x\", \"y\")], k=6) #> $PAS #> [1] 0.01666667 #> #> $abnormalty #> [1] FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE #> [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [109] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE #> [133] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [145] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [157] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [169] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [181] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [193] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [205] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [217] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE #> [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE TRUE FALSE FALSE FALSE FALSE #>"},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate DBCV Metric — dbcv","title":"Calculate DBCV Metric — dbcv","text":"Compute DBCV (Density-Based Clustering Validation) metric.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate DBCV Metric — dbcv","text":"","code":"dbcv( X, labels, distance = \"euclidean\", noise_id = -1, check_duplicates = FALSE, use_igraph_mst = TRUE, BPPARAM = BiocParallel::SerialParam(), ... )"},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate DBCV Metric — dbcv","text":"X Numeric matrix samples. labels Integer vector cluster IDs. distance String specifying distance metric. \"sqeuclidean\", possible method stats::dist(). default \"euclidean\". noise_id Integer, cluster ID y noise (default -1). check_duplicates Logical flag check duplicate samples. use_igraph_mst Logical flag use igraph's MST implementation. Currently mst igraph implemented. BPPARAM BiocParallel params multithreading (default none) ... Ignored","code":""},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate DBCV Metric — dbcv","text":"list: vcs Numeric vector validity index cluster. dbcv Numeric value representing overall DBCV metric.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Calculate DBCV Metric — dbcv","text":"implementation fully reproduce results existing implementations (e.g. https://github.com/FelSiq/DBCV) due different algorithms used computing Minimum Spanning Tree.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Calculate DBCV Metric — dbcv","text":"Davoud Moulavi, et al. 2014; 10.1137/1.9781611973440.96.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dbcv.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Calculate DBCV Metric — dbcv","text":"","code":"data(noisy_moon) data <- noisy_moon dbcv(data[, c(\"x\", \"y\")], data$kmeans_label) #> $vcs #> [1] -0.4383721 -0.4077112 #> #> $dbcv #> [1] -0.4230416 #> dbcv(data[, c(\"x\", \"y\")], data$hdbscan_label) #> $vcs #> [1] -0.5889023 0.3726825 0.5500422 0.7884686 0.4887283 0.7682203 #> [7] 0.7246492 0.7246492 0.9349664 0.4749650 0.5500422 0.3726825 #> #> $dbcv #> [1] 0.4214685 #>"},{"path":"https://roseyuan.github.io/poem/reference/dot-check_duplicated_samples.html","id":null,"dir":"Reference","previous_headings":"","what":"Check Duplicated Samples — .check_duplicated_samples","title":"Check Duplicated Samples — .check_duplicated_samples","text":"Checks duplicated samples matrix X.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-check_duplicated_samples.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check Duplicated Samples — .check_duplicated_samples","text":"","code":".check_duplicated_samples(X, threshold = 1e-09)"},{"path":"https://roseyuan.github.io/poem/reference/dot-check_duplicated_samples.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Check Duplicated Samples — .check_duplicated_samples","text":"X Numeric matrix samples. threshold Numeric, distance threshold consider samples duplicates.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-check_duplicated_samples.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Check Duplicated Samples — .check_duplicated_samples","text":"None","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_cluster_core_distance.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute Cluster Core Distance — .compute_cluster_core_distance","title":"Compute Cluster Core Distance — .compute_cluster_core_distance","text":"Computes core distance point cluster.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_cluster_core_distance.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute Cluster Core Distance — .compute_cluster_core_distance","text":"","code":".compute_cluster_core_distance(dists, d)"},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_cluster_core_distance.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute Cluster Core Distance — .compute_cluster_core_distance","text":"dists Numeric matrix distances. d Integer, dimensionality.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_cluster_core_distance.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute Cluster Core Distance — .compute_cluster_core_distance","text":"Numeric vector core distances point.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_mutual_reach_dists.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute Mutual Reachability Distances — .compute_mutual_reach_dists","title":"Compute Mutual Reachability Distances — .compute_mutual_reach_dists","text":"Computes mutual reachability distances points.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_mutual_reach_dists.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute Mutual Reachability Distances — .compute_mutual_reach_dists","text":"","code":".compute_mutual_reach_dists(dists, d)"},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_mutual_reach_dists.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute Mutual Reachability Distances — .compute_mutual_reach_dists","text":"dists Numeric matrix distances. d Float, dimensionality.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_mutual_reach_dists.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute Mutual Reachability Distances — .compute_mutual_reach_dists","text":"list containing core distances mutual reachability distances.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_pair_to_pair_dists.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute Pair to Pair Distances — .compute_pair_to_pair_dists","title":"Compute Pair to Pair Distances — .compute_pair_to_pair_dists","text":"Compute pairwise distances points matrix X.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_pair_to_pair_dists.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute Pair to Pair Distances — .compute_pair_to_pair_dists","text":"","code":".compute_pair_to_pair_dists(X, distance = \"euclidean\")"},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_pair_to_pair_dists.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute Pair to Pair Distances — .compute_pair_to_pair_dists","text":"X Numeric matrix. distance String specifying metric compute distances.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-compute_pair_to_pair_dists.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute Pair to Pair Distances — .compute_pair_to_pair_dists","text":"Numeric matrix pairwise distances self-distances set Inf.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-convert_singleton_clusters_to_noise.html","id":null,"dir":"Reference","previous_headings":"","what":"Convert Singleton Clusters to Noise — .convert_singleton_clusters_to_noise","title":"Convert Singleton Clusters to Noise — .convert_singleton_clusters_to_noise","text":"Converts clusters containing single instance noise.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-convert_singleton_clusters_to_noise.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Convert Singleton Clusters to Noise — .convert_singleton_clusters_to_noise","text":"","code":".convert_singleton_clusters_to_noise(labels, noise_id)"},{"path":"https://roseyuan.github.io/poem/reference/dot-convert_singleton_clusters_to_noise.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Convert Singleton Clusters to Noise — .convert_singleton_clusters_to_noise","text":"labels Integer vector cluster IDs. noise_id Integer, ID noise.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-convert_singleton_clusters_to_noise.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Convert Singleton Clusters to Noise — .convert_singleton_clusters_to_noise","text":"Integer vector singleton clusters converted noise.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_separation.html","id":null,"dir":"Reference","previous_headings":"","what":"Density Separation of a Pair of Clusters — .fn_density_separation","title":"Density Separation of a Pair of Clusters — .fn_density_separation","text":"Computes density separation two clusters.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_separation.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Density Separation of a Pair of Clusters — .fn_density_separation","text":"","code":".fn_density_separation( cls_i, cls_j, dists, internal_core_dists_i, internal_core_dists_j )"},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_separation.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Density Separation of a Pair of Clusters — .fn_density_separation","text":"cls_i Integer, first cluster index. cls_j Integer, second cluster index. dists Numeric matrix distances. internal_core_dists_i Numeric vector core distances cluster . internal_core_dists_j Numeric vector core distances cluster j.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_separation.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Density Separation of a Pair of Clusters — .fn_density_separation","text":"list containing cluster indices density separation.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_sparseness.html","id":null,"dir":"Reference","previous_headings":"","what":"Density Sparseness of a Cluster — .fn_density_sparseness","title":"Density Sparseness of a Cluster — .fn_density_sparseness","text":"Computes density sparseness given cluster.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_sparseness.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Density Sparseness of a Cluster — .fn_density_sparseness","text":"","code":".fn_density_sparseness(cls_inds, dists, d, use_igraph_mst)"},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_sparseness.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Density Sparseness of a Cluster — .fn_density_sparseness","text":"cls_inds Integer vector cluster indices. dists Numeric matrix distances. d Integer, dimensionality. use_igraph_mst Logical flag use MST implementation igraph. Currently mst igraph implemented.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-fn_density_sparseness.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Density Sparseness of a Cluster — .fn_density_sparseness","text":"list containing density sparseness, internal core distances, internal node indices.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_internal_objects.html","id":null,"dir":"Reference","previous_headings":"","what":"Get Internal Objects — .get_internal_objects","title":"Get Internal Objects — .get_internal_objects","text":"Computes internal nodes edges using Minimum Spanning Tree.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_internal_objects.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get Internal Objects — .get_internal_objects","text":"","code":".get_internal_objects(mutual_reach_dists, use_igraph_mst = TRUE)"},{"path":"https://roseyuan.github.io/poem/reference/dot-get_internal_objects.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get Internal Objects — .get_internal_objects","text":"mutual_reach_dists Numeric matrix representing mutual reachability distances. use_igraph_mst Logical flag use MST implementation igraph. Currently mst igraph implemented.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_internal_objects.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Get Internal Objects — .get_internal_objects","text":"list containing indices internal nodes edge weights.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_submatrix.html","id":null,"dir":"Reference","previous_headings":"","what":"Get Sub matrix — .get_submatrix","title":"Get Sub matrix — .get_submatrix","text":"Extract sub matrix matrix based optional row column indices.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_submatrix.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get Sub matrix — .get_submatrix","text":"","code":".get_submatrix(arr, inds_a = NULL, inds_b = NULL)"},{"path":"https://roseyuan.github.io/poem/reference/dot-get_submatrix.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get Sub matrix — .get_submatrix","text":"arr Numeric matrix. inds_a Optional integer vector row indices. inds_b Optional integer vector column indices.","code":""},{"path":"https://roseyuan.github.io/poem/reference/dot-get_submatrix.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Get Sub matrix — .get_submatrix","text":"Numeric matrix representing sub matrix.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2knn.html","id":null,"dir":"Reference","previous_headings":"","what":"Computes k nearest neighbors from embedding — emb2knn","title":"Computes k nearest neighbors from embedding — emb2knn","text":"Computes k nearest neighbors embedding.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2knn.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Computes k nearest neighbors from embedding — emb2knn","text":"","code":"emb2knn(x, k, BNPARAM = NULL)"},{"path":"https://roseyuan.github.io/poem/reference/emb2knn.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Computes k nearest neighbors from embedding — emb2knn","text":"x numeric matrix (features columns items rows) nearest neighbors computed. k number nearest neighbors. BNPARAM BiocNeighbors parameter object compute kNNs. Ignored unless input matrix data.frame. omitted, Annoy approximation used 500 elements.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2knn.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Computes k nearest neighbors from embedding — emb2knn","text":"knn list.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2knn.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Computes k nearest neighbors from embedding — emb2knn","text":"","code":"d1 <- mockData() emb2knn(as.matrix(d1[,1:2]),k=5) #> $index #> [,1] [,2] [,3] [,4] [,5] #> [1,] 19 3 12 16 2 #> [2,] 16 13 19 12 6 #> [3,] 12 19 4 2 16 #> [4,] 13 12 6 2 18 #> [5,] 10 20 21 8 24 #> [6,] 13 18 2 35 4 #> [7,] 25 24 23 5 10 #> [8,] 21 20 10 5 17 #> [9,] 39 15 30 31 18 #> [10,] 20 5 21 8 24 #> [11,] 31 30 15 14 39 #> [12,] 19 2 3 4 16 #> [13,] 6 18 2 35 4 #> [14,] 31 11 17 30 15 #> [15,] 30 11 31 39 9 #> [16,] 2 19 6 12 13 #> [17,] 22 14 8 31 21 #> [18,] 35 6 13 39 9 #> [19,] 12 2 16 3 13 #> [20,] 10 21 5 8 24 #> [21,] 20 8 5 10 17 #> [22,] 17 26 21 8 14 #> [23,] 25 24 10 5 3 #> [24,] 5 10 20 25 21 #> [25,] 7 24 23 5 10 #> [26,] 22 28 17 14 21 #> [27,] 33 29 34 40 28 #> [28,] 26 32 34 27 22 #> [29,] 33 27 40 36 37 #> [30,] 15 11 31 39 9 #> [31,] 11 30 15 14 9 #> [32,] 38 28 34 26 27 #> [33,] 29 27 40 37 36 #> [34,] 27 28 32 38 29 #> [35,] 18 39 6 13 9 #> [36,] 37 40 33 11 39 #> [37,] 40 11 30 15 31 #> [38,] 32 34 28 26 27 #> [39,] 15 9 30 35 18 #> [40,] 37 11 31 14 30 #> #> $distance #> [,1] [,2] [,3] [,4] [,5] #> [1,] 1.29566420 1.3421067 1.4715387 1.6048505 1.7086970 #> [2,] 0.37452175 0.4444869 0.4458890 0.4612560 0.4666427 #> [3,] 0.62968457 0.7546696 0.9842686 1.0909192 1.3163584 #> [4,] 0.60121886 0.6580306 0.6860728 0.7425212 0.8131569 #> [5,] 0.18657183 0.2548209 0.3146041 0.5396107 0.7037336 #> [6,] 0.08575145 0.3500797 0.4666427 0.4716242 0.6860728 #> [7,] 0.93513257 1.8240751 2.0100110 2.1228417 2.2680189 #> [8,] 0.25218300 0.3587982 0.5079719 0.5396107 0.6249692 #> [9,] 0.36773351 0.4199126 0.4412507 0.5610582 0.5664558 #> [10,] 0.15120930 0.1865718 0.3548648 0.5079719 0.7077195 #> [11,] 0.15292110 0.1869890 0.2106084 0.4888427 0.5383705 #> [12,] 0.25837036 0.4612560 0.6296846 0.6580306 0.7248265 #> [13,] 0.08575145 0.3825643 0.4444869 0.5295547 0.6012189 #> [14,] 0.40406159 0.4888427 0.5499476 0.6172360 0.6360720 #> [15,] 0.02363391 0.2106084 0.2321602 0.3481019 0.4199126 #> [16,] 0.37452175 0.5664998 0.7091540 0.7248265 0.7269356 #> [17,] 0.43197293 0.5499476 0.6249692 0.6805494 0.6933050 #> [18,] 0.20180947 0.3500797 0.3825643 0.4731934 0.5664558 #> [19,] 0.25837036 0.4458890 0.5664998 0.7546696 0.8562987 #> [20,] 0.15120930 0.2471735 0.2548209 0.3587982 0.8588908 #> [21,] 0.24717347 0.2521830 0.3146041 0.3548648 0.6933050 #> [22,] 0.43197293 0.5359545 0.7748010 0.8458200 0.8870181 #> [23,] 1.13716937 1.1403808 1.8144949 1.8405530 1.8694539 #> [24,] 0.70373359 0.7077195 0.8588908 0.9841743 1.0148838 #> [25,] 0.93513257 0.9841743 1.1371694 1.4900812 1.5841110 #> [26,] 0.53595454 0.8217170 0.8963990 1.1594005 1.2948579 #> [27,] 0.81728422 0.8768693 0.9860296 1.1192155 1.1657922 #> [28,] 0.82171700 0.9478323 1.0768017 1.1657922 1.1850271 #> [29,] 0.37241440 0.8768693 1.3225420 1.4892807 1.5601800 #> [30,] 0.02363391 0.1869890 0.2140727 0.3687323 0.4412507 #> [31,] 0.15292110 0.2140727 0.2321602 0.4040616 0.5610582 #> [32,] 0.55686537 0.9478323 1.1745553 1.3934585 1.8566828 #> [33,] 0.37241440 0.8172842 0.9605452 1.1882100 1.1913554 #> [34,] 0.98602961 1.0768017 1.1745553 1.2587367 1.6696433 #> [35,] 0.20180947 0.4178786 0.4716242 0.5295547 0.6398327 #> [36,] 0.78929140 1.0616034 1.1913554 1.3796854 1.4111328 #> [37,] 0.42274741 0.5916806 0.6659964 0.6777386 0.7444686 #> [38,] 0.55686537 1.2587367 1.4407066 1.9503217 2.1388739 #> [39,] 0.34810191 0.3677335 0.3687323 0.4178786 0.4731934 #> [40,] 0.42274741 0.5691441 0.6853185 0.7250634 0.7344199 #>"},{"path":"https://roseyuan.github.io/poem/reference/emb2snn.html","id":null,"dir":"Reference","previous_headings":"","what":"Computes shared nearest neighbors from embedding — emb2snn","title":"Computes shared nearest neighbors from embedding — emb2snn","text":"computes shared nearest neighbors embedding.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2snn.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Computes shared nearest neighbors from embedding — emb2snn","text":"","code":"emb2snn(x, k, type = \"rank\", BNPARAM = NULL)"},{"path":"https://roseyuan.github.io/poem/reference/emb2snn.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Computes shared nearest neighbors from embedding — emb2snn","text":"x numeric matrix (features columns items rows) nearest neighbors computed. k number nearest neighbors. type string specifying type weighting scheme use shared neighbors. Possible choices include \"rank\", \"number\", \"jaccard\". See type bluster::neighborsToSNNGraph() details. BNPARAM BiocNeighbors parameter object compute kNNs. Ignored unless input matrix data.frame. omitted, Annoy approximation used 500 elements.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2snn.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Computes shared nearest neighbors from embedding — emb2snn","text":"igraph object.","code":""},{"path":"https://roseyuan.github.io/poem/reference/emb2snn.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Computes shared nearest neighbors from embedding — emb2snn","text":"","code":"d1 <- mockData() emb2snn(as.matrix(d1[,1:2]),k=5) #> IGRAPH 09455b4 U-W- 40 257 -- #> + attr: weight (e/n) #> + edges from 09455b4: #> [1] 1-- 3 3-- 4 2-- 4 1-- 5 3-- 6 1-- 6 4-- 6 5-- 7 1-- 7 6-- 7 #> [11] 3-- 7 4-- 7 4-- 8 3-- 8 7-- 8 5-- 9 7-- 9 8-- 9 2--10 3--10 #> [21] 6--10 9--11 8--11 7--11 4--11 3--11 6--12 7--12 1--12 3--12 #> [31] 4--12 11--12 8--12 2--13 10--13 4--13 8--14 4--14 11--14 9--14 #> [41] 13--15 2--15 10--15 4--15 8--15 3--15 15--16 2--16 13--16 4--16 #> [51] 10--16 8--16 3--16 3--17 10--17 6--17 12--17 4--17 1--17 7--17 #> [61] 17--18 6--18 1--18 12--18 3--18 7--18 9--19 11--19 7--19 8--19 #> [71] 5--19 4--19 3--19 12--19 14--19 1--19 2--20 10--20 13--20 15--20 #> + ... omitted several edges"},{"path":"https://roseyuan.github.io/poem/reference/findSpatialKNN.html","id":null,"dir":"Reference","previous_headings":"","what":"Find the k nearest spatial neighbors — findSpatialKNN","title":"Find the k nearest spatial neighbors — findSpatialKNN","text":"given dataset, find k nearest neighbors object based spatial locations, option handling ties.","code":""},{"path":"https://roseyuan.github.io/poem/reference/findSpatialKNN.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Find the k nearest spatial neighbors — findSpatialKNN","text":"","code":"findSpatialKNN( location, k, keep_ties = TRUE, useMedianDist = FALSE, BNPARAM = NULL )"},{"path":"https://roseyuan.github.io/poem/reference/findSpatialKNN.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Find the k nearest spatial neighbors — findSpatialKNN","text":"location numeric data matrix containing location information, rows points columns location dimensions. k number nearest neighbors look . keep_ties Boolean indicating ties counted . TRUE, neighbors distances included even means returning k neighbors. useMedianDist Use median distance k nearest neighbor maximum distance included. Ignored keep_ties=FALSE. BNPARAM BNPARAM object passed findKNN specifying kNN approximation method use. Defaults exact small datasets, Annoy larger ones.","code":""},{"path":"https://roseyuan.github.io/poem/reference/findSpatialKNN.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Find the k nearest spatial neighbors — findSpatialKNN","text":"list indices.","code":""},{"path":"https://roseyuan.github.io/poem/reference/findSpatialKNN.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Find the k nearest spatial neighbors — findSpatialKNN","text":"","code":"data(sp_toys) data <- sp_toys findSpatialKNN(data[,c(\"x\", \"y\")], k=6) #> [[1]] #> [1] 2 16 17 31 32 3 #> #> [[2]] #> [1] 1 16 17 3 18 32 #> #> [[3]] #> [1] 4 18 17 2 19 16 33 #> #> [[4]] #> [1] 3 5 18 19 17 20 34 #> #> [[5]] #> [1] 4 19 20 6 18 35 #> #> [[6]] #> [1] 7 20 21 5 22 36 #> #> [[7]] #> [1] 6 8 21 22 20 23 37 #> #> [[8]] #> [1] 7 9 22 23 21 24 38 #> #> [[9]] #> [1] 8 10 23 24 22 25 #> #> [[10]] #> [1] 9 24 25 11 23 26 40 #> #> [[11]] #> [1] 26 25 10 12 24 27 41 #> #> [[12]] #> [1] 13 27 26 11 28 42 #> #> [[13]] #> [1] 12 27 28 14 26 29 43 #> #> [[14]] #> [1] 15 29 28 13 30 27 44 #> #> [[15]] #> [1] 14 29 30 28 45 13 44 #> #> [[16]] #> [1] 17 2 32 1 31 3 33 46 #> #> [[17]] #> [1] 16 2 32 3 33 18 #> #> [[18]] #> [1] 19 3 4 33 34 17 #> #> [[19]] #> [1] 18 4 5 34 35 20 #> #> [[20]] #> [1] 5 6 35 36 19 21 #> #> [[21]] #> [1] 7 22 37 6 36 20 #> #> [[22]] #> [1] 8 21 23 38 7 37 #> #> [[23]] #> [1] 9 22 24 39 8 38 #> #> [[24]] #> [1] 25 10 23 40 9 39 #> #> [[25]] #> [1] 24 10 40 11 41 26 #> #> [[26]] #> [1] 11 41 12 42 25 27 #> #> [[27]] #> [1] 28 12 13 42 43 26 #> #> [[28]] #> [1] 27 13 43 14 44 29 #> #> [[29]] #> [1] 30 14 15 44 45 28 #> #> [[30]] #> [1] 29 15 45 14 44 60 #> #> [[31]] #> [1] 32 16 46 17 47 1 61 #> #> [[32]] #> [1] 16 17 31 46 47 33 #> #> [[33]] #> [1] 34 18 48 17 47 32 #> #> [[34]] #> [1] 33 35 18 19 48 49 #> #> [[35]] #> [1] 34 19 49 20 50 36 #> #> [[36]] #> [1] 37 20 21 50 51 35 #> #> [[37]] #> [1] 21 36 38 51 22 52 #> #> [[38]] #> [1] 22 37 39 52 23 53 #> #> [[39]] #> [1] 23 38 40 53 24 54 #> #> [[40]] #> [1] 24 25 39 54 55 41 #> #> [[41]] #> [1] 26 56 25 55 40 42 #> #> [[42]] #> [1] 43 27 57 26 56 41 #> #> [[43]] #> [1] 42 27 28 57 58 44 #> #> [[44]] #> [1] 45 29 59 28 58 43 #> #> [[45]] #> [1] 44 29 30 59 60 15 28 58 75 #> #> [[46]] #> [1] 47 32 62 31 61 16 33 63 76 #> #> [[47]] #> [1] 46 32 62 33 63 48 #> #> [[48]] #> [1] 49 33 34 63 64 47 #> #> [[49]] #> [1] 48 34 35 64 65 50 #> #> [[50]] #> [1] 35 36 65 66 49 51 #> #> [[51]] #> [1] 37 52 67 36 66 50 #> #> [[52]] #> [1] 38 51 53 68 37 67 #> #> [[53]] #> [1] 39 52 54 69 38 68 #> #> [[54]] #> [1] 55 40 53 70 39 69 #> #> [[55]] #> [1] 54 40 70 41 71 56 #> #> [[56]] #> [1] 41 71 42 72 55 57 #> #> [[57]] #> [1] 58 42 43 72 73 56 #> #> [[58]] #> [1] 57 43 73 44 74 59 #> #> [[59]] #> [1] 60 44 45 74 75 58 #> #> [[60]] #> [1] 59 45 75 44 74 30 90 #> #> [[61]] #> [1] 62 46 76 47 77 31 91 #> #> [[62]] #> [1] 46 47 61 76 77 63 #> #> [[63]] #> [1] 64 48 78 47 77 62 #> #> [[64]] #> [1] 63 65 48 49 78 79 #> #> [[65]] #> [1] 64 49 79 50 80 66 #> #> [[66]] #> [1] 67 50 51 80 81 65 #> #> [[67]] #> [1] 51 66 68 81 52 82 #> #> [[68]] #> [1] 52 67 69 82 53 83 #> #> [[69]] #> [1] 53 68 70 83 54 84 #> #> [[70]] #> [1] 54 55 69 84 85 71 #> #> [[71]] #> [1] 56 86 55 85 70 72 #> #> [[72]] #> [1] 73 57 87 56 86 71 #> #> [[73]] #> [1] 72 57 58 87 88 74 #> #> [[74]] #> [1] 75 59 89 58 88 73 #> #> [[75]] #> [1] 74 59 60 89 90 45 58 88 105 #> #> [[76]] #> [1] 77 62 92 61 91 46 63 93 106 #> #> [[77]] #> [1] 76 62 92 63 93 78 #> #> [[78]] #> [1] 79 63 64 93 94 77 #> #> [[79]] #> [1] 78 64 65 94 95 80 #> #> [[80]] #> [1] 65 66 95 96 79 81 #> #> [[81]] #> [1] 67 82 97 66 96 80 #> #> [[82]] #> [1] 68 81 83 98 67 97 #> #> [[83]] #> [1] 69 82 84 99 68 98 #> #> [[84]] #> [1] 85 70 83 100 69 99 #> #> [[85]] #> [1] 84 70 100 71 101 86 #> #> [[86]] #> [1] 71 101 72 102 85 87 #> #> [[87]] #> [1] 88 72 73 102 103 86 #> #> [[88]] #> [1] 87 73 103 74 104 89 #> #> [[89]] #> [1] 90 74 75 104 105 88 #> #> [[90]] #> [1] 89 75 105 74 104 60 120 #> #> [[91]] #> [1] 92 76 106 77 107 61 121 #> #> [[92]] #> [1] 76 77 91 106 107 93 #> #> [[93]] #> [1] 94 78 108 77 107 92 #> #> [[94]] #> [1] 93 95 78 79 108 109 #> #> [[95]] #> [1] 94 79 109 80 110 96 #> #> [[96]] #> [1] 97 80 81 110 111 95 #> #> [[97]] #> [1] 81 96 98 111 82 112 #> #> [[98]] #> [1] 82 97 99 112 83 113 #> #> [[99]] #> [1] 83 98 100 113 84 114 #> #> [[100]] #> [1] 84 85 99 114 115 101 #> #> [[101]] #> [1] 86 116 85 115 100 102 #> #> [[102]] #> [1] 103 87 117 86 116 101 #> #> [[103]] #> [1] 102 87 88 117 118 104 #> #> [[104]] #> [1] 105 89 119 88 118 103 #> #> [[105]] #> [1] 104 89 90 119 120 75 88 118 135 #> #> [[106]] #> [1] 107 92 122 91 121 76 93 123 136 #> #> [[107]] #> [1] 106 92 122 93 123 108 #> #> [[108]] #> [1] 109 93 94 123 124 107 #> #> [[109]] #> [1] 108 94 95 124 125 110 #> #> [[110]] #> [1] 95 96 125 126 109 111 #> #> [[111]] #> [1] 97 112 127 96 126 110 #> #> [[112]] #> [1] 98 111 113 128 97 127 #> #> [[113]] #> [1] 99 112 114 129 98 128 #> #> [[114]] #> [1] 115 100 113 130 99 129 #> #> [[115]] #> [1] 114 100 130 101 131 116 #> #> [[116]] #> [1] 101 131 102 132 115 117 #> #> [[117]] #> [1] 118 102 103 132 133 116 #> #> [[118]] #> [1] 117 103 133 104 134 119 #> #> [[119]] #> [1] 120 104 105 134 135 118 #> #> [[120]] #> [1] 119 105 135 104 134 90 150 #> #> [[121]] #> [1] 122 106 136 107 137 91 151 #> #> [[122]] #> [1] 106 107 121 136 137 123 #> #> [[123]] #> [1] 124 108 138 107 137 122 #> #> [[124]] #> [1] 123 125 108 109 138 139 #> #> [[125]] #> [1] 124 109 139 110 140 126 #> #> [[126]] #> [1] 127 110 111 140 141 125 #> #> [[127]] #> [1] 111 126 128 141 112 142 #> #> [[128]] #> [1] 112 127 129 142 113 143 #> #> [[129]] #> [1] 113 128 130 143 114 144 #> #> [[130]] #> [1] 114 115 129 144 145 131 #> #> [[131]] #> [1] 116 146 115 145 130 132 #> #> [[132]] #> [1] 133 117 147 116 146 131 #> #> [[133]] #> [1] 132 117 118 147 148 134 #> #> [[134]] #> [1] 135 119 149 118 148 133 #> #> [[135]] #> [1] 134 119 120 149 150 105 118 148 165 #> #> [[136]] #> [1] 137 122 152 121 151 106 123 153 166 #> #> [[137]] #> [1] 136 122 152 123 153 138 #> #> [[138]] #> [1] 139 123 124 153 154 137 #> #> [[139]] #> [1] 138 124 125 154 155 140 #> #> [[140]] #> [1] 125 126 155 156 139 141 #> #> [[141]] #> [1] 127 142 157 126 156 140 #> #> [[142]] #> [1] 128 141 143 158 127 157 #> #> [[143]] #> [1] 129 142 144 159 128 158 #> #> [[144]] #> [1] 145 130 143 160 129 159 #> #> [[145]] #> [1] 144 130 160 131 161 146 #> #> [[146]] #> [1] 131 161 132 162 145 147 #> #> [[147]] #> [1] 148 132 133 162 163 146 #> #> [[148]] #> [1] 147 133 163 134 164 149 #> #> [[149]] #> [1] 150 134 135 164 165 148 #> #> [[150]] #> [1] 149 135 165 134 164 120 180 #> #> [[151]] #> [1] 152 136 166 137 167 121 181 #> #> [[152]] #> [1] 136 137 151 166 167 153 #> #> [[153]] #> [1] 154 138 168 137 167 152 #> #> [[154]] #> [1] 153 155 138 139 168 169 #> #> [[155]] #> [1] 154 139 169 140 170 156 #> #> [[156]] #> [1] 157 140 141 170 171 155 #> #> [[157]] #> [1] 141 156 158 171 142 172 #> #> [[158]] #> [1] 142 157 159 172 143 173 #> #> [[159]] #> [1] 143 158 160 173 144 174 #> #> [[160]] #> [1] 144 145 159 174 175 161 #> #> [[161]] #> [1] 146 176 145 175 160 162 #> #> [[162]] #> [1] 163 147 177 146 176 161 #> #> [[163]] #> [1] 162 147 148 177 178 164 #> #> [[164]] #> [1] 165 149 179 148 178 163 #> #> [[165]] #> [1] 164 149 150 179 180 135 148 178 195 #> #> [[166]] #> [1] 167 152 182 151 181 136 153 183 196 #> #> [[167]] #> [1] 166 152 182 153 183 168 #> #> [[168]] #> [1] 169 153 154 183 184 167 #> #> [[169]] #> [1] 168 154 155 184 185 170 #> #> [[170]] #> [1] 155 156 185 186 169 171 #> #> [[171]] #> [1] 157 172 187 156 186 170 #> #> [[172]] #> [1] 158 171 173 188 157 187 #> #> [[173]] #> [1] 159 172 174 189 158 188 #> #> [[174]] #> [1] 175 160 173 190 159 189 #> #> [[175]] #> [1] 174 160 190 161 191 176 #> #> [[176]] #> [1] 161 191 162 192 175 177 #> #> [[177]] #> [1] 178 162 163 192 193 176 #> #> [[178]] #> [1] 177 163 193 164 194 179 #> #> [[179]] #> [1] 180 164 165 194 195 178 #> #> [[180]] #> [1] 179 165 195 164 194 150 210 #> #> [[181]] #> [1] 182 166 196 167 197 151 211 #> #> [[182]] #> [1] 166 167 181 196 197 183 #> #> [[183]] #> [1] 184 168 198 167 197 182 #> #> [[184]] #> [1] 183 185 168 169 198 199 #> #> [[185]] #> [1] 184 169 199 170 200 186 #> #> [[186]] #> [1] 187 170 171 200 201 185 #> #> [[187]] #> [1] 171 186 188 201 172 202 #> #> [[188]] #> [1] 172 187 189 202 173 203 #> #> [[189]] #> [1] 173 188 190 203 174 204 #> #> [[190]] #> [1] 174 175 189 204 205 191 #> #> [[191]] #> [1] 176 206 175 205 190 192 #> #> [[192]] #> [1] 193 177 207 176 206 191 #> #> [[193]] #> [1] 192 177 178 207 208 194 #> #> [[194]] #> [1] 195 179 209 178 208 193 #> #> [[195]] #> [1] 194 179 180 209 210 165 178 208 225 #> #> [[196]] #> [1] 197 182 212 181 211 166 183 213 226 #> #> [[197]] #> [1] 196 182 212 183 213 198 #> #> [[198]] #> [1] 199 183 184 213 214 197 #> #> [[199]] #> [1] 198 184 185 214 215 200 #> #> [[200]] #> [1] 185 186 215 216 199 201 #> #> [[201]] #> [1] 187 202 217 186 216 200 #> #> [[202]] #> [1] 188 201 203 218 187 217 #> #> [[203]] #> [1] 189 202 204 219 188 218 #> #> [[204]] #> [1] 205 190 203 220 189 219 #> #> [[205]] #> [1] 204 190 220 191 221 206 #> #> [[206]] #> [1] 191 221 192 222 205 207 #> #> [[207]] #> [1] 208 192 193 222 223 206 #> #> [[208]] #> [1] 207 193 223 194 224 209 #> #> [[209]] #> [1] 210 194 195 224 225 208 #> #> [[210]] #> [1] 209 195 225 194 224 180 240 #> #> [[211]] #> [1] 212 196 226 197 227 181 #> #> [[212]] #> [1] 196 197 211 226 227 213 #> #> [[213]] #> [1] 214 198 228 197 227 212 #> #> [[214]] #> [1] 213 215 198 199 228 229 #> #> [[215]] #> [1] 214 199 229 200 230 216 #> #> [[216]] #> [1] 217 200 201 230 231 215 #> #> [[217]] #> [1] 201 216 218 231 202 232 #> #> [[218]] #> [1] 202 217 219 232 203 233 #> #> [[219]] #> [1] 203 218 220 233 204 234 #> #> [[220]] #> [1] 204 205 219 234 235 221 #> #> [[221]] #> [1] 206 236 205 235 220 222 #> #> [[222]] #> [1] 223 207 237 206 236 221 #> #> [[223]] #> [1] 222 207 208 237 238 224 #> #> [[224]] #> [1] 225 209 239 208 238 223 #> #> [[225]] #> [1] 224 209 210 239 240 195 208 238 #> #> [[226]] #> [1] 227 212 211 196 213 197 228 #> #> [[227]] #> [1] 226 212 213 228 211 197 214 #> #> [[228]] #> [1] 229 213 214 227 215 198 212 #> #> [[229]] #> [1] 228 214 215 230 213 199 #> #> [[230]] #> [1] 215 216 229 231 200 214 217 #> #> [[231]] #> [1] 217 232 216 230 218 201 #> #> [[232]] #> [1] 218 231 233 217 219 202 216 #> #> [[233]] #> [1] 219 232 234 218 220 203 217 #> #> [[234]] #> [1] 235 220 233 219 204 218 221 #> #> [[235]] #> [1] 234 220 221 236 219 205 #> #> [[236]] #> [1] 221 222 235 237 206 220 223 #> #> [[237]] #> [1] 238 222 223 236 207 221 224 #> #> [[238]] #> [1] 237 223 224 239 222 208 225 #> #> [[239]] #> [1] 240 224 225 238 209 223 #> #> [[240]] #> [1] 239 225 224 210 209 238 #>"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"Computes fuzzy-hard versions pair-sorting partition metrics compare hard clustering fuzzy hard truth. especially designed cases fuzzy truth represents uncertainty hard truth. Briefly put, maximum pair concordance clustering either hard fuzzy truth used, hard truth used compute completeness. See fuzzyPartitionMetrics standard implementation metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"","code":"fuzzyHardMetrics( hardTrue, fuzzyTrue, hardPred, nperms = NULL, returnElementPairAccuracy = FALSE, verbose = TRUE, BPPARAM = BiocParallel::SerialParam() )"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"hardTrue atomic vector coercible factor integer vector containing true hard labels. Must length hardPred. fuzzyTrue object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows length hardTrue. Also note columns fuzzyTrue order levels (integer values) hardTrue. hardPred atomic vector coercible factor integer vector containing predicted hard labels. nperms number permutations (correction chance). NULL (default), first set 10 permutations run estimate whether variation across permutations 0.0025, case (max 1000) permutations run. returnElementPairAccuracy Logical. TRUE, returns per-element pair accuracy instead various parition-level dataset-level metrics. Default FALSE. verbose Logical; whether print info warnings, including standard error mean across permutations (giving idea precision adjusted metrics). BPPARAM BiocParallel params multithreading (default none)","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"list metrics: NDC Hullermeier's NDC (fuzzy rand index) ACI Ambrosio's Adjusted Concordance Index (ACI), .e. permutation-based fuzzy version adjusted Rand index. fuzzyWH Fuzzy Wallace Homogeneity index fuzzyWC Fuzzy Wallace Completeness index fuzzyAWH Adjusted fuzzy Wallace Homogeneity index fuzzyAWC Adjusted fuzzy Wallace Completeness index","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"Hullermeier et al. 2012; 10.1109/TFUZZ.2011.2179303; D'Ambrosio et al. 2021; 10.1007/s00357-020-09367-0","code":""},{"path":[]},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"author","dir":"Reference","previous_headings":"","what":"Author","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"Pierre-Luc Germain","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute fuzzy-hard versions of pair-sorting partition metrics — fuzzyHardMetrics","text":"","code":"# generate a fuzzy truth: fuzzyTrue <- matrix(c( 0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) # a hard truth: hardTrue <- apply(fuzzyTrue,1,FUN=which.max) # some predicted labels: hardPred <- c(1,1,1,1,1,1,2,2,2) fuzzyHardMetrics(hardTrue, fuzzyTrue, hardPred, nperms=3) #> Standard error of the mean NDC across permutations:0.0352 #> You might want to increase the number of permutations to increase the robustness of the adjusted metrics. #> $NDC #> [1] 0.7581944 #> #> $ACI #> [1] 0.5394992 #> #> $fuzzyWH #> $fuzzyWH$global #> [1] 0.8436111 #> #> $fuzzyWH$perPartition #> 1 2 #> 1.00000000 0.06166667 #> #> #> $fuzzyWC #> $fuzzyWC$global #> [1] 0.7322727 #> #> $fuzzyWC$perPartition #> 1 2 3 #> 0.7195238 1.0000000 1.0000000 #> #> #> $fuzzyAWH #> $fuzzyAWH$global #> [1] 0.6403322 #> #> $fuzzyAWH$perPartition #> 1 2 #> 1.000000 -3.456464 #> #> #> $fuzzyAWC #> $fuzzyAWC$global #> [1] 0.4682516 #> #> $fuzzyAWC$perPartition #> 1 2 3 #> 0.4682516 NaN NaN #> #>"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics2.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute fuzzy-hard metrics with lower memory requirement — fuzzyHardMetrics2","title":"Compute fuzzy-hard metrics with lower memory requirement — fuzzyHardMetrics2","text":"slightly slower, low-memory version fuzzyHardMetrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics2.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute fuzzy-hard metrics with lower memory requirement — fuzzyHardMetrics2","text":"","code":"fuzzyHardMetrics2( hardTrue, fuzzyTrue, hardPred, nperms = 10, returnElementPairAccuracy = FALSE, verbose = TRUE, BPPARAM = BiocParallel::SerialParam() )"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics2.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute fuzzy-hard metrics with lower memory requirement — fuzzyHardMetrics2","text":"hardTrue atomic vector coercible factor integer vector containing true hard labels. Must length hardPred. fuzzyTrue object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows length hardTrue. Also note columns fuzzyTrue order levels (integer values) hardTrue. hardPred atomic vector coercible factor integer vector containing predicted hard labels. nperms number permutations (correction chance). NULL (default), first set 10 permutations run estimate whether variation across permutations 0.0025, case (max 1000) permutations run. returnElementPairAccuracy Logical. TRUE, returns per-element pair accuracy instead various parition-level dataset-level metrics. Default FALSE. verbose Logical; whether print info warnings, including standard error mean across permutations (giving idea precision adjusted metrics). BPPARAM BiocParallel params multithreading (default none)","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics2.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute fuzzy-hard metrics with lower memory requirement — fuzzyHardMetrics2","text":"list metrics: NDC Hullermeier's NDC (fuzzy rand index) ACI Ambrosio's Adjusted Concordance Index (ACI), .e. permutation-based fuzzy version adjusted Rand index. fuzzyWH Fuzzy Wallace Homogeneity index fuzzyWC Fuzzy Wallace Completeness index fuzzyAWH Adjusted fuzzy Wallace Homogeneity index fuzzyAWC Adjusted fuzzy Wallace Completeness index","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics2.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Compute fuzzy-hard metrics with lower memory requirement — fuzzyHardMetrics2","text":"Hullermeier et al. 2012; 10.1109/TFUZZ.2011.2179303; D'Ambrosio et al. 2021; 10.1007/s00357-020-09367-0","code":""},{"path":[]},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics2.html","id":"author","dir":"Reference","previous_headings":"","what":"Author","title":"Compute fuzzy-hard metrics with lower memory requirement — fuzzyHardMetrics2","text":"Pierre-Luc Germain","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardMetrics2.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute fuzzy-hard metrics with lower memory requirement — fuzzyHardMetrics2","text":"","code":"# generate a fuzzy truth: fuzzyTrue <- matrix(c( 0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) # a hard truth: hardTrue <- apply(fuzzyTrue,1,FUN=which.max) # some predicted labels: hardPred <- c(1,1,1,1,1,1,2,2,2) fuzzyHardMetrics2(hardTrue, fuzzyTrue, hardPred, nperms=3) #> Standard error of the mean NDC across permutations:0.00016 #> $NDC #> [1] 0.7581944 #> #> $ACI #> [1] 0.4675841 #> #> $fuzzyWH #> $fuzzyWH$global #> [1] 0.7581944 #> #> $fuzzyWH$perPartition #> 1 2 #> 0.8772917 0.5200000 #> #> #> $fuzzyWC #> $fuzzyWC$global #> [1] 0.7647297 #> #> $fuzzyWC$perPartition #> 1 2 3 #> 0.7558929 0.7922222 0.7922222 #> #> #> $fuzzyAWH #> $fuzzyAWH$global #> [1] 0.4675841 #> #> $fuzzyAWH$perPartition #> 1 2 #> 0.6921871 0.1507556 #> #> #> $fuzzyAWC #> $fuzzyAWC$global #> [1] 0.4675841 #> #> $fuzzyAWC$perPartition #> 1 2 3 #> 0.4763455 0.3407756 0.5024390 #> #>"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotAgreement.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","title":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","text":"Per-element maximal agreement hard clustering hard fuzzy ground truth labels.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotAgreement.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","text":"","code":"fuzzyHardSpotAgreement( hardTrue, fuzzyTrue, hardPred, useNegatives = TRUE, verbose = TRUE )"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotAgreement.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","text":"hardTrue vector true cluster labels fuzzyTrue object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows length hardTrue. hardPred vector predicted cluster labels useNegatives Logical; whether include negative pairs agreement score (tends result larger overall agreement lower dynamic range score). Default TRUE. verbose Logical; whether print expected memory usage large datasets.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotAgreement.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","text":"numeric vector agreement scores element hardPred","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotAgreement.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element maximal agreement between a hard and a fuzzy partition — fuzzyHardSpotAgreement","text":"","code":"# generate a fuzzy truth: fuzzyTrue <- matrix(c( 0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) # a hard truth: hardTrue <- apply(fuzzyTrue,1,FUN=which.max) # some predicted labels: hardPred <- c(1,1,1,1,1,1,2,2,2) fuzzyHardSpotAgreement(hardTrue, fuzzyTrue, hardPred) #> 1 2 3 4 5 6 7 8 9 #> 0.87750 0.87625 0.87625 0.87750 0.87875 0.87750 0.76625 0.02750 0.76625"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotConcordance.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","title":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","text":"Per-element maximal concordance hard clustering hard fuzzy ground truth labels.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotConcordance.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","text":"","code":"fuzzyHardSpotConcordance( hardTrue, fuzzyTrue, hardPred, useNegatives = TRUE, verbose = TRUE )"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotConcordance.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","text":"hardTrue vector true cluster labels fuzzyTrue object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows length hardTrue. hardPred vector predicted cluster labels useNegatives Logical; whether include negative pairs concordance score (tends result larger overall concordance lower dynamic range score). Default TRUE. verbose Logical; whether print expected memory usage large datasets.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotConcordance.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","text":"numeric vector concordance scores element hardPred","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyHardSpotConcordance.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element maximal concordance between a hard and a fuzzy partition — fuzzyHardSpotConcordance","text":"","code":"# generate a fuzzy truth: fuzzyTrue <- matrix(c( 0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) # a hard truth: hardTrue <- apply(fuzzyTrue,1,FUN=which.max) # some predicted labels: hardPred <- c(1,1,1,1,1,1,2,2,2) fuzzyHardSpotConcordance(hardTrue, fuzzyTrue, hardPred) #> [1] 0.87750 0.87625 0.87625 0.87750 0.87875 0.87750 0.76625 0.02750 0.76625"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"Computes fuzzy versions pair-sorting partition metrics. largely based permutation-based implementation Antonio D'Ambrosio ConsRankClass package, modified also compute fuzzy versions adjusted Wallace indices, implement multithreading, adjust number permutations according variability.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"","code":"fuzzyPartitionMetrics( P, Q, computeWallace = TRUE, nperms = NULL, verbose = TRUE, returnElementPairAccuracy = FALSE, BPPARAM = BiocParallel::SerialParam(), tnorm = c(\"product\", \"min\", \"lukasiewicz\") )"},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"P object coercible numeric matrix membership probability elements (rows) ground-truth classes (columns). Q object coercible numeric matrix membership probability elements (rows) predicted clusters (columns). Must number rows P. computeWallace Logical; whether compute individual fuzzy versions Wallace indices (increases running time). nperms number permutations (correction chance). NULL (default), first set 10 permutations run estimate whether variation across permutations 0.0025, case (max 1000) permutations run. verbose Logical; whether print info warnings, including standard error mean across permutations (giving idea precision adjusted metrics). returnElementPairAccuracy Logical. TRUE, returns per-element pair accuracy instead various parition-level dataset-level metrics. Default FALSE. BPPARAM BiocParallel params multithreading (default none) tnorm type t-norm operation use class membership pairs (either product, min, lukasiewicz) calculating Wallace indices. influence NDC/ACI metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"returnElementPairAccuracy FALSE, return list metrics: NDC Hullermeier's NDC (fuzzy rand index) ACI Ambrosio's Adjusted Concordance Index (ACI), .e. permutation-based fuzzy version adjusted Rand index. fuzzyWH Fuzzy Wallace Homogeneity index fuzzyWC Fuzzy Wallace Completeness index fuzzyAWH Adjusted fuzzy Wallace Homogeneity index fuzzyAWC Adjusted fuzzy Wallace Completeness index","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"Hullermeier et al. 2012; 10.1109/TFUZZ.2011.2179303; D'Ambrosio et al. 2021; 10.1007/s00357-020-09367-0","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"author","dir":"Reference","previous_headings":"","what":"Author","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"Pierre-Luc Germain","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute fuzzy-fuzzy versions of pair-sorting partition metrics — fuzzyPartitionMetrics","text":"","code":"# generate fuzzy partitions: m1 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) m2 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.025, 0.95, 0.025, 0.02, 0.96, 0.02, 0.01, 0.98, 0.01, 0.05, 0.05, 0.95, 0.02, 0.02, 0.96, 0.01, 0.01, 0.98), ncol = 3, byrow=TRUE) colnames(m1) <- colnames(m2) <- LETTERS[1:3] fuzzyPartitionMetrics(m1,m2) #> Running 100 extra permutations. #> Standard error of the mean NDC across permutations:0.00216 #> $NDC #> [1] 0.5338889 #> #> $ACI #> [1] 0.08249493 #> #> $fuzzyWH #> $fuzzyWH$global #> [1] 0.6761188 #> #> $fuzzyWH$perPartition #> A B C #> 0.9359492 0.9214151 0.1588990 #> #> #> $fuzzyWC #> $fuzzyWC$global #> [1] 0.3505049 #> #> $fuzzyWC$perPartition #> A B C #> 0.3445840 0.7242508 0.7520319 #> #> #> $fuzzyAWH #> $fuzzyAWH$global #> [1] 0.2110446 #> #> $fuzzyAWH$perPartition #> A B C #> 0.8450279 0.8102834 -1.0825844 #> #> #> $fuzzyAWC #> $fuzzyAWC$global #> [1] 0.04921198 #> #> $fuzzyAWC$perPartition #> A B C #> 0.04951609 -0.05389235 0.05126432 #> #>"},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotAgreement.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","title":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","text":"Per-element agreement two fuzzy partitionings","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotAgreement.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","text":"","code":"fuzzySpotAgreement(P, Q)"},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotAgreement.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","text":"P object coercible numeric matrix membership probability elements (rows) clusters (columns) Q object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows P","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotAgreement.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","text":"numeric vector agreement scores row P.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotAgreement.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element agreement between two fuzzy partitions — fuzzySpotAgreement","text":"","code":"# generate fuzzy partitions: m1 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) m2 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.025, 0.95, 0.025, 0.02, 0.96, 0.02, 0.01, 0.98, 0.01, 0.05, 0.05, 0.95, 0.02, 0.02, 0.96, 0.01, 0.01, 0.98), ncol = 3, byrow=TRUE) colnames(m1) <- colnames(m2) <- LETTERS[1:3] fuzzySpotAgreement(m1,m2) #> 1 2 3 4 5 6 7 8 #> 0.538750 0.527500 0.535000 0.536250 0.530000 0.526250 0.775625 0.075625 #> 9 #> 0.760000"},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotConcordance.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","title":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","text":"Per-element concordance two fuzzy partitions","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotConcordance.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","text":"","code":"fuzzySpotConcordance(P, Q)"},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotConcordance.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","text":"P object coercible numeric matrix membership probability elements (rows) clusters (columns) Q object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows P","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotConcordance.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","text":"numeric vector concordance scores row P.","code":""},{"path":"https://roseyuan.github.io/poem/reference/fuzzySpotConcordance.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element concordance between two fuzzy partitions — fuzzySpotConcordance","text":"","code":"# generate fuzzy partitions: m1 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) m2 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.025, 0.95, 0.025, 0.02, 0.96, 0.02, 0.01, 0.98, 0.01, 0.05, 0.05, 0.95, 0.02, 0.02, 0.96, 0.01, 0.01, 0.98), ncol = 3, byrow=TRUE) colnames(m1) <- colnames(m2) <- LETTERS[1:3] fuzzySpotConcordance(m1,m2) #> 1 2 3 4 5 6 7 8 #> 0.538750 0.527500 0.535000 0.536250 0.530000 0.526250 0.775625 0.075625 #> 9 #> 0.760000"},{"path":"https://roseyuan.github.io/poem/reference/getAgreement.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element agreement score — getAgreement","title":"Per-element agreement score — getAgreement","text":"Per-element agreement clustering ground truth","code":""},{"path":"https://roseyuan.github.io/poem/reference/getAgreement.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element agreement score — getAgreement","text":"","code":"getAgreement(true, pred, usePairs = TRUE, useNegatives = FALSE, adjust = FALSE)"},{"path":"https://roseyuan.github.io/poem/reference/getAgreement.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element agreement score — getAgreement","text":"true vector true class labels pred vector predicted clusters usePairs Logical; whether compute pairs instead elements Recommended TRUE default. useNegatives Logical; whether include consistency negative pairs score (default FALSE). adjust Logical; whether adjust chance. implemented useNegatives=FALSE (make sense element-level otherwise).","code":""},{"path":"https://roseyuan.github.io/poem/reference/getAgreement.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element agreement score — getAgreement","text":"vector agreement scores","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingClassMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getEmbeddingClassMetrics — getEmbeddingClassMetrics","title":"getEmbeddingClassMetrics — getEmbeddingClassMetrics","text":"Computes class-level, embedding-based metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingClassMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getEmbeddingClassMetrics — getEmbeddingClassMetrics","text":"","code":"getEmbeddingClassMetrics( x, labels, metrics = c(\"meanSW\", \"minSW\", \"pnSW\", \"dbcv\"), distance = \"euclidean\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingClassMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getEmbeddingClassMetrics — getEmbeddingClassMetrics","text":"x data.frame matrix (features columns items rows) metrics computed. labels vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. distance distance metric use (default euclidean). ... Optional arguments. See details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingClassMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getEmbeddingClassMetrics — getEmbeddingClassMetrics","text":"data.frame metrics node/element x.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingElementMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getEmbeddingElementMetrics — getEmbeddingElementMetrics","title":"getEmbeddingElementMetrics — getEmbeddingElementMetrics","text":"Computes element-level, embedding-based metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingElementMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getEmbeddingElementMetrics — getEmbeddingElementMetrics","text":"","code":"getEmbeddingElementMetrics( x, labels, metrics = c(\"SW\"), distance = \"euclidean\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingElementMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getEmbeddingElementMetrics — getEmbeddingElementMetrics","text":"x data.frame matrix (features columns items rows) metrics computed. labels vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. Currently, silhouette width supported node-level. distance distance metric use (default euclidean). ... Optional arguments. See details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingElementMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getEmbeddingElementMetrics — getEmbeddingElementMetrics","text":"data.frame metrics node/element x.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingGlobalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getEmbeddingGlobalMetrics — getEmbeddingGlobalMetrics","title":"getEmbeddingGlobalMetrics — getEmbeddingGlobalMetrics","text":"Computes dataset-level, embedding-based metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingGlobalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getEmbeddingGlobalMetrics — getEmbeddingGlobalMetrics","text":"","code":"getEmbeddingGlobalMetrics( x, labels, metrics = c(\"meanSW\", \"meanClassSW\", \"pnSW\", \"minClassSW\", \"cdbw\", \"cohesion\", \"compactness\", \"sep\", \"dbcv\"), distance = \"euclidean\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingGlobalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getEmbeddingGlobalMetrics — getEmbeddingGlobalMetrics","text":"x data.frame matrix (features columns items rows) metrics computed. labels vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. distance distance metric use (default euclidean). ... Optional arguments. See details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingGlobalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getEmbeddingGlobalMetrics — getEmbeddingGlobalMetrics","text":"data.frame (1 row) metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute embedding-based metrics — getEmbeddingMetrics","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"Computes embedding-based metrics specified level.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"","code":"getEmbeddingMetrics( x, labels, metrics = c(\"meanSW\", \"minSW\", \"pnSW\", \"dbcv\"), distance = \"euclidean\", level = \"class\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"x data.frame matrix (features columns items rows) metrics computed. labels vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. See details. distance distance metric use (default euclidean). level level calculate metrics. Options include \"element\", \"class\" \"dataset\". ... Optional arguments. See details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"SW\". level = \"class\", allowed metrics : \"meanSW\", \"minSW\", \"pnSW\", \"dbcv\". level = \"dataset\", allowed metrics : \"meanSW\", \"meanClassSW\", \"pnSW\", \"minClassSW\", \"cdbw\", \"cohesion\", \"compactness\", \"sep\", \"dbcv\". function(s) optional arguments ... passed depend value level: level = \"element\", optional arguments passed stats::dist(). level = \"class\", optional arguments passed dbcv(). level = \"dataset\", optional arguments passed dbcv() CDbw().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getEmbeddingMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute embedding-based metrics — getEmbeddingMetrics","text":"","code":"d1 <- mockData() getEmbeddingMetrics(d1[,1:2], labels=d1$class, metrics=c(\"meanSW\", \"minSW\", \"pnSW\", \"dbcv\"), level=\"class\") #> class meanSW minSW pnSW dbcv #> 1 A 0.3750742 -0.1625053 0.1600000 0.006068187 #> 2 B 0.2784612 -0.3653234 0.1333333 -0.270849655"},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyLabel.html","id":null,"dir":"Reference","previous_headings":"","what":"Get fuzzy representation of labels — getFuzzyLabel","title":"Get fuzzy representation of labels — getFuzzyLabel","text":"Get fuzzy representation labels according spatial neighborhood label composition.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyLabel.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get fuzzy representation of labels — getFuzzyLabel","text":"","code":"getFuzzyLabel(labels, location, k = 6, alpha = 0.5, ...)"},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyLabel.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get fuzzy representation of labels — getFuzzyLabel","text":"labels anomic vector cluster labels location matrix data.frame coordinates k wished number nearest neighbors alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. ... Passed findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyLabel.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Get fuzzy representation of labels — getFuzzyLabel","text":"matrix fuzzy memberships.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyLabel.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Get fuzzy representation of labels — getFuzzyLabel","text":"","code":"data(sp_toys) data <- sp_toys getFuzzyLabel(data$label, data[,c(\"x\", \"y\")], k=6) #> 1 2 #> 1 0.0000000 1.0000000 #> 2 0.0000000 1.0000000 #> 3 0.0000000 1.0000000 #> 4 0.1428571 0.8571429 #> 5 0.6666667 0.3333333 #> 6 1.0000000 0.0000000 #> 7 1.0000000 0.0000000 #> 8 1.0000000 0.0000000 #> 9 1.0000000 0.0000000 #> 10 1.0000000 0.0000000 #> 11 1.0000000 0.0000000 #> 12 1.0000000 0.0000000 #> 13 1.0000000 0.0000000 #> 14 1.0000000 0.0000000 #> 15 1.0000000 0.0000000 #> 16 0.0000000 1.0000000 #> 17 0.0000000 1.0000000 #> 18 0.0000000 1.0000000 #> 19 0.1666667 0.8333333 #> 20 0.8333333 0.1666667 #> 21 1.0000000 0.0000000 #> 22 1.0000000 0.0000000 #> 23 1.0000000 0.0000000 #> 24 1.0000000 0.0000000 #> 25 1.0000000 0.0000000 #> 26 1.0000000 0.0000000 #> 27 1.0000000 0.0000000 #> 28 1.0000000 0.0000000 #> 29 1.0000000 0.0000000 #> 30 1.0000000 0.0000000 #> 31 0.0000000 1.0000000 #> 32 0.0000000 1.0000000 #> 33 0.0000000 1.0000000 #> 34 0.0000000 1.0000000 #> 35 0.1666667 0.8333333 #> 36 0.8333333 0.1666667 #> 37 1.0000000 0.0000000 #> 38 1.0000000 0.0000000 #> 39 1.0000000 0.0000000 #> 40 1.0000000 0.0000000 #> 41 1.0000000 0.0000000 #> 42 1.0000000 0.0000000 #> 43 1.0000000 0.0000000 #> 44 1.0000000 0.0000000 #> 45 1.0000000 0.0000000 #> 46 0.0000000 1.0000000 #> 47 0.0000000 1.0000000 #> 48 0.0000000 1.0000000 #> 49 0.0000000 1.0000000 #> 50 0.1666667 0.8333333 #> 51 0.8333333 0.1666667 #> 52 1.0000000 0.0000000 #> 53 1.0000000 0.0000000 #> 54 1.0000000 0.0000000 #> 55 1.0000000 0.0000000 #> 56 1.0000000 0.0000000 #> 57 1.0000000 0.0000000 #> 58 1.0000000 0.0000000 #> 59 1.0000000 0.0000000 #> 60 1.0000000 0.0000000 #> 61 0.0000000 1.0000000 #> 62 0.0000000 1.0000000 #> 63 0.0000000 1.0000000 #> 64 0.0000000 1.0000000 #> 65 0.0000000 1.0000000 #> 66 0.1666667 0.8333333 #> 67 0.8333333 0.1666667 #> 68 1.0000000 0.0000000 #> 69 1.0000000 0.0000000 #> 70 1.0000000 0.0000000 #> 71 1.0000000 0.0000000 #> 72 1.0000000 0.0000000 #> 73 1.0000000 0.0000000 #> 74 1.0000000 0.0000000 #> 75 1.0000000 0.0000000 #> 76 0.0000000 1.0000000 #> 77 0.0000000 1.0000000 #> 78 0.0000000 1.0000000 #> 79 0.0000000 1.0000000 #> 80 0.0000000 1.0000000 #> 81 0.1666667 0.8333333 #> 82 0.8333333 0.1666667 #> 83 1.0000000 0.0000000 #> 84 1.0000000 0.0000000 #> 85 1.0000000 0.0000000 #> 86 1.0000000 0.0000000 #> 87 1.0000000 0.0000000 #> 88 1.0000000 0.0000000 #> 89 1.0000000 0.0000000 #> 90 1.0000000 0.0000000 #> 91 0.0000000 1.0000000 #> 92 0.0000000 1.0000000 #> 93 0.0000000 1.0000000 #> 94 0.0000000 1.0000000 #> 95 0.0000000 1.0000000 #> 96 0.0000000 1.0000000 #> 97 0.1666667 0.8333333 #> 98 0.8333333 0.1666667 #> 99 1.0000000 0.0000000 #> 100 1.0000000 0.0000000 #> 101 1.0000000 0.0000000 #> 102 1.0000000 0.0000000 #> 103 1.0000000 0.0000000 #> 104 1.0000000 0.0000000 #> 105 1.0000000 0.0000000 #> 106 0.0000000 1.0000000 #> 107 0.0000000 1.0000000 #> 108 0.0000000 1.0000000 #> 109 0.0000000 1.0000000 #> 110 0.0000000 1.0000000 #> 111 0.0000000 1.0000000 #> 112 0.1666667 0.8333333 #> 113 0.8333333 0.1666667 #> 114 1.0000000 0.0000000 #> 115 1.0000000 0.0000000 #> 116 1.0000000 0.0000000 #> 117 1.0000000 0.0000000 #> 118 1.0000000 0.0000000 #> 119 1.0000000 0.0000000 #> 120 1.0000000 0.0000000 #> 121 0.0000000 1.0000000 #> 122 0.0000000 1.0000000 #> 123 0.0000000 1.0000000 #> 124 0.0000000 1.0000000 #> 125 0.0000000 1.0000000 #> 126 0.0000000 1.0000000 #> 127 0.0000000 1.0000000 #> 128 0.1666667 0.8333333 #> 129 0.8333333 0.1666667 #> 130 1.0000000 0.0000000 #> 131 1.0000000 0.0000000 #> 132 1.0000000 0.0000000 #> 133 1.0000000 0.0000000 #> 134 1.0000000 0.0000000 #> 135 1.0000000 0.0000000 #> 136 0.0000000 1.0000000 #> 137 0.0000000 1.0000000 #> 138 0.0000000 1.0000000 #> 139 0.0000000 1.0000000 #> 140 0.0000000 1.0000000 #> 141 0.0000000 1.0000000 #> 142 0.0000000 1.0000000 #> 143 0.1666667 0.8333333 #> 144 0.8333333 0.1666667 #> 145 1.0000000 0.0000000 #> 146 1.0000000 0.0000000 #> 147 1.0000000 0.0000000 #> 148 1.0000000 0.0000000 #> 149 1.0000000 0.0000000 #> 150 1.0000000 0.0000000 #> 151 0.0000000 1.0000000 #> 152 0.0000000 1.0000000 #> 153 0.0000000 1.0000000 #> 154 0.0000000 1.0000000 #> 155 0.0000000 1.0000000 #> 156 0.0000000 1.0000000 #> 157 0.0000000 1.0000000 #> 158 0.0000000 1.0000000 #> 159 0.1666667 0.8333333 #> 160 0.8333333 0.1666667 #> 161 1.0000000 0.0000000 #> 162 1.0000000 0.0000000 #> 163 1.0000000 0.0000000 #> 164 1.0000000 0.0000000 #> 165 1.0000000 0.0000000 #> 166 0.0000000 1.0000000 #> 167 0.0000000 1.0000000 #> 168 0.0000000 1.0000000 #> 169 0.0000000 1.0000000 #> 170 0.0000000 1.0000000 #> 171 0.0000000 1.0000000 #> 172 0.0000000 1.0000000 #> 173 0.0000000 1.0000000 #> 174 0.1666667 0.8333333 #> 175 0.8333333 0.1666667 #> 176 1.0000000 0.0000000 #> 177 1.0000000 0.0000000 #> 178 1.0000000 0.0000000 #> 179 1.0000000 0.0000000 #> 180 1.0000000 0.0000000 #> 181 0.0000000 1.0000000 #> 182 0.0000000 1.0000000 #> 183 0.0000000 1.0000000 #> 184 0.0000000 1.0000000 #> 185 0.0000000 1.0000000 #> 186 0.0000000 1.0000000 #> 187 0.0000000 1.0000000 #> 188 0.0000000 1.0000000 #> 189 0.0000000 1.0000000 #> 190 0.1666667 0.8333333 #> 191 0.8333333 0.1666667 #> 192 1.0000000 0.0000000 #> 193 1.0000000 0.0000000 #> 194 1.0000000 0.0000000 #> 195 1.0000000 0.0000000 #> 196 0.0000000 1.0000000 #> 197 0.0000000 1.0000000 #> 198 0.0000000 1.0000000 #> 199 0.0000000 1.0000000 #> 200 0.0000000 1.0000000 #> 201 0.0000000 1.0000000 #> 202 0.0000000 1.0000000 #> 203 0.0000000 1.0000000 #> 204 0.0000000 1.0000000 #> 205 0.1666667 0.8333333 #> 206 0.8333333 0.1666667 #> 207 1.0000000 0.0000000 #> 208 1.0000000 0.0000000 #> 209 1.0000000 0.0000000 #> 210 1.0000000 0.0000000 #> 211 0.0000000 1.0000000 #> 212 0.0000000 1.0000000 #> 213 0.0000000 1.0000000 #> 214 0.0000000 1.0000000 #> 215 0.0000000 1.0000000 #> 216 0.0000000 1.0000000 #> 217 0.0000000 1.0000000 #> 218 0.0000000 1.0000000 #> 219 0.0000000 1.0000000 #> 220 0.0000000 1.0000000 #> 221 0.1666667 0.8333333 #> 222 0.8333333 0.1666667 #> 223 1.0000000 0.0000000 #> 224 1.0000000 0.0000000 #> 225 1.0000000 0.0000000 #> 226 0.0000000 1.0000000 #> 227 0.0000000 1.0000000 #> 228 0.0000000 1.0000000 #> 229 0.0000000 1.0000000 #> 230 0.0000000 1.0000000 #> 231 0.0000000 1.0000000 #> 232 0.0000000 1.0000000 #> 233 0.0000000 1.0000000 #> 234 0.0000000 1.0000000 #> 235 0.0000000 1.0000000 #> 236 0.2857143 0.7142857 #> 237 0.8571429 0.1428571 #> 238 1.0000000 0.0000000 #> 239 1.0000000 0.0000000 #> 240 1.0000000 0.0000000"},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionElementMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getFuzzyPartitionElementMetrics — getFuzzyPartitionElementMetrics","title":"getFuzzyPartitionElementMetrics — getFuzzyPartitionElementMetrics","text":"Computes selection external fuzzy clustering evaluation metrics element level.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionElementMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getFuzzyPartitionElementMetrics — getFuzzyPartitionElementMetrics","text":"","code":"getFuzzyPartitionElementMetrics( hardTrue = NULL, fuzzyTrue = NULL, hardPred = NULL, fuzzyPred = NULL, fuzzy_true = TRUE, fuzzy_pred = FALSE, metrics = c(\"fuzzySPC\"), useNegatives = TRUE, verbose = TRUE, usePairs = TRUE )"},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionElementMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getFuzzyPartitionElementMetrics — getFuzzyPartitionElementMetrics","text":"hardTrue vector true cluster labels fuzzyTrue object coercible numeric matrix membership probability elements (rows) clusters (columns). Must number rows length hardTrue. hardPred vector predicted cluster labels fuzzyPred object coercible numeric matrix membership probability elements (rows) clusters (columns). fuzzy_true Logical; whether truth fuzzy. fuzzy_pred Logical; whether prediction fuzzy. metrics metrics compute. Currently \"fuzzySPC\" included element level. useNegatives Logical; whether include negative pairs concordance score (tends result larger overall concordance lower dynamic range score). Default TRUE. verbose Logical; whether print expected memory usage large datasets. usePairs Logical; whether compute pairs instead elements. useful fuzzy_true=TRUE fuzzy_pred=FALSE.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionElementMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getFuzzyPartitionElementMetrics — getFuzzyPartitionElementMetrics","text":"dataframe metric values.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"Computes selection external fuzzy clustering evaluation metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"","code":"getFuzzyPartitionMetrics( hardTrue = NULL, fuzzyTrue = NULL, hardPred = NULL, fuzzyPred = NULL, metrics = c(\"fuzzyWH\", \"fuzzyAWH\", \"fuzzyWC\", \"fuzzyAWC\"), level = \"class\", nperms = NULL, verbose = TRUE, returnElementPairAccuracy = FALSE, BPPARAM = BiocParallel::SerialParam(), useNegatives = TRUE, usePairs = NULL, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"hardTrue atomic vector coercible factor integer vector containing true hard labels. fuzzyTrue object coercible numeric matrix membership probability elements (rows) clusters (columns). hardPred atomic vector coercible factor integer vector containing predicted hard labels. fuzzyPred object coercible numeric matrix membership probability elements (rows) clusters (columns). metrics metrics compute. See details. level level calculate metrics. Options include \"element\", \"class\" \"dataset\". nperms number permutations (correction chance). NULL (default), first set 10 permutations run estimate whether variation across permutations 0.0025, case (max 1000) permutations run. verbose Logical; whether print info warnings, including standard error mean across permutations (giving idea precision adjusted metrics). returnElementPairAccuracy Logical. TRUE, returns per-element pair accuracy instead various parition-level dataset-level metrics. Default FALSE. BPPARAM BiocParallel params multithreading (default none) useNegatives Logical; whether include negative pairs concordance score (tends result larger overall concordance lower dynamic range score). Default TRUE. usePairs Logical; whether compute pairs instead elements Recommended TRUE default. ... Optional arguments poem::FuzzyPartitionMetrics(): tnorm. useful fuzzy_true=TRUE fuzzy_pred=TRUE.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"dataframe metric results.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"fuzzySPC\". level = \"class\", allowed metrics : \"fuzzyWH\", \"fuzzyAWH\", \"fuzzyWC\", \"fuzzyAWC\". level = \"dataset\", allowed metrics : \"fuzzyRI\", \"fuzzyARI\", \"fuzzyWH\", \"fuzzyAWH\", \"fuzzyWC\", \"fuzzyAWC\".","code":""},{"path":"https://roseyuan.github.io/poem/reference/getFuzzyPartitionMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute external metrics for fuzzy clusterings — getFuzzyPartitionMetrics","text":"","code":"# generate fuzzy partitions: m1 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) m2 <- matrix(c(0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.025, 0.95, 0.025, 0.02, 0.96, 0.02, 0.01, 0.98, 0.01, 0.05, 0.05, 0.95, 0.02, 0.02, 0.96, 0.01, 0.01, 0.98), ncol = 3, byrow=TRUE) colnames(m1) <- colnames(m2) <- LETTERS[1:3] getFuzzyPartitionMetrics(fuzzyTrue=m1,fuzzyPred=m2, level=\"class\") #> Comparing between a fuzzy truth and a fuzzy prediction... #> Running 100 extra permutations. #> Standard error of the mean NDC across permutations:0.00239 #> fuzzyWC fuzzyAWC class fuzzyWH fuzzyAWH cluster #> 1 0.3445840 0.04564740 1 NA NA NA #> 2 0.7242508 -0.08932182 2 NA NA NA #> 3 0.7520319 0.03572075 3 NA NA NA #> 4 NA NA NA 0.9359492 0.8194385 1 #> 5 NA NA NA 0.9214151 0.7916720 2 #> 6 NA NA NA 0.1588990 -0.7970378 3 # generate a fuzzy truth: fuzzyTrue <- matrix(c( 0.95, 0.025, 0.025, 0.98, 0.01, 0.01, 0.96, 0.02, 0.02, 0.95, 0.04, 0.01, 0.95, 0.01, 0.04, 0.99, 0.005, 0.005, 0.025, 0.95, 0.025, 0.97, 0.02, 0.01, 0.025, 0.025, 0.95), ncol = 3, byrow=TRUE) # a hard truth: hardTrue <- apply(fuzzyTrue,1,FUN=which.max) # some predicted labels: hardPred <- c(1,1,1,1,1,1,2,2,2) getFuzzyPartitionMetrics(hardPred=hardPred, hardTrue=hardTrue, fuzzyTrue=fuzzyTrue, nperms=3, level=\"class\") #> Comparing between a fuzzy truth and a hard prediction... #> Standard error of the mean NDC across permutations:0.000424 #> fuzzyWC fuzzyAWC class fuzzyWH fuzzyAWH cluster #> 1 0.7195238 0.4977974 1 NA NA NA #> 2 1.0000000 NaN 2 NA NA NA #> 3 1.0000000 NaN 3 NA NA NA #> 4 NA NA NA 1.00000000 1 1 #> 5 NA NA NA 0.06166667 -Inf 2"},{"path":"https://roseyuan.github.io/poem/reference/getGraphClassMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getGraphClassMetrics — getGraphClassMetrics","title":"getGraphClassMetrics — getGraphClassMetrics","text":"Computes selection supervised graph evaluation metrics using ground truth class labels. metrics reported (average) per class.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphClassMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getGraphClassMetrics — getGraphClassMetrics","text":"","code":"getGraphClassMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"AMSP\", \"PWC\", \"NCE\"), directed = NULL, ... ) # S4 method for class 'list' getGraphClassMetrics(x, labels, metrics, directed = NULL, k = NULL, ...) # S4 method for class 'data.frame' getGraphClassMetrics( x, labels, metrics, directed = NULL, k, shared = FALSE, ... ) # S4 method for class 'matrix' getGraphClassMetrics( x, labels, metrics, directed = NULL, k, shared = FALSE, ... ) # S4 method for class 'igraph' getGraphClassMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"AMSP\", \"PWC\", \"NCE\"), directed = NULL, ... ) # S4 method for class 'dist' getGraphClassMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"AMSP\", \"PWC\", \"NCE\"), directed = NULL, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getGraphClassMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getGraphClassMetrics — getGraphClassMetrics","text":"x Either igraph object, list nearest neighbors (see details ), data.frame matrix (features columns items rows) nearest neighbors computed. labels Either factor character vector indicating true class label element (.e. row vertex) x. metrics metrics compute. See details. directed Logical; whether compute metrics directed fashion. left NULL, conventional choices made per metric (adhesion, cohesion, PWC AMSP undirected, others directed). ... Optional arguments emb2knn() emb2snn(). k number nearest neighbors compute /use. Can omitted x graph list nearest neighbors. shared Logical; whether use shared nearest neighbor network instead nearest neighbor network. Ignored x embedding dist object.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphClassMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getGraphClassMetrics — getGraphClassMetrics","text":"data.frame metrics class.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphElementMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getGraphElementMetrics — getGraphElementMetrics","title":"getGraphElementMetrics — getGraphElementMetrics","text":"Computes selection supervised graph evaluation metrics using ground truth class labels. metrics reported (average) per node/element.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphElementMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getGraphElementMetrics — getGraphElementMetrics","text":"","code":"getGraphElementMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"NCE\"), directed = NULL, ... ) # S4 method for class 'list' getGraphElementMetrics(x, labels, metrics, directed = NULL, k = NULL, ...) # S4 method for class 'data.frame' getGraphElementMetrics( x, labels, metrics, directed = NULL, k, shared = FALSE, ... ) # S4 method for class 'matrix' getGraphElementMetrics( x, labels, metrics, directed = NULL, k, shared = FALSE, ... ) # S4 method for class 'igraph' getGraphElementMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"NCE\"), directed = NULL, ... ) # S4 method for class 'dist' getGraphElementMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"NCE\"), directed = NULL, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getGraphElementMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getGraphElementMetrics — getGraphElementMetrics","text":"x Either igraph object, list nearest neighbors (see details ), data.frame matrix (features columns items rows) nearest neighbors computed. labels Either factor character vector indicating true class label element (.e. row vertex) x. metrics metrics compute. See details. directed Logical; whether compute metrics directed fashion. left NULL, conventional choices made per metric (adhesion, cohesion, PWC AMSP undirected, others directed). ... Optional arguments emb2knn() emb2snn(). k number nearest neighbors compute /use. Can omitted x graph list nearest neighbors. shared Logical; whether use shared nearest neighbor network instead nearest neighbor network. Ignored x embedding dist object.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphElementMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getGraphElementMetrics — getGraphElementMetrics","text":"data.frame metrics node/element x.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute graph-based metrics — getGraphMetrics","title":"Compute graph-based metrics — getGraphMetrics","text":"Computes selection graph evaluation metrics using class labels.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute graph-based metrics — getGraphMetrics","text":"","code":"getGraphMetrics( x, labels, metrics = c(\"SI\", \"NP\", \"AMSP\", \"PWC\", \"NCE\"), directed = NULL, k = 10, shared = FALSE, level = \"class\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute graph-based metrics — getGraphMetrics","text":"x Either igraph object, list nearest neighbors (see details ), data.frame matrix (features columns items rows) nearest neighbors computed. labels Either factor character vector indicating true class label element (.e. row vertex) x. metrics metrics compute. See details. directed Logical; whether compute metrics directed fashion. left NULL, conventional choices made per metric (adhesion, cohesion, PWC AMSP undirected, others directed). k number nearest neighbors compute /use. Can omitted x graph list nearest neighbors. shared Logical; whether use shared nearest neighbor network instead nearest neighbor network. Ignored x embedding dist object. level level calculate metrics. Options include \"element\", \"class\" \"dataset\". ... Optional arguments emb2knn() emb2snn().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute graph-based metrics — getGraphMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute graph-based metrics — getGraphMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"SI\",\"ISI\",\"NP\",\"NCE\" (see details). level = \"class\", allowed metrics : \"SI\": Simpson’s Index. \"ISI\": Inverse Simpson’s Index \"NP\": Neighborhood Purity \"AMSP\": Adjusted Mean Shortest Path \"PWC\": Proportion Weakly Connected \"NCE\": Neighborhood Class Enrichment \"adhesion\": adhesion graph, minumum number nodes must removed split graph. \"cohesion\": cohesion graph, minumum number edges must removed split graph. level = \"dataset\", allowed metrics : \"SI\",\"ISI\",\"NP\",\"AMSP\",\"PWC\",\"NCE\", \"adhesion\",\"cohesion\".","code":""},{"path":"https://roseyuan.github.io/poem/reference/getGraphMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute graph-based metrics — getGraphMetrics","text":"","code":"d1 <- mockData() getGraphMetrics(d1[,1:2], labels=d1$class, level=\"class\") #> class SI NP AMSP PWC NCE #> A A 0.7752000 0.7960000 0.1109333 0.16 0.2405360 #> B B 0.5853333 0.5133333 0.1752381 0.40 0.2216192"},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairAgreement.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","title":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","text":"Per-element local agreement clustering ground truth","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairAgreement.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","text":"","code":"getNeighboringPairAgreement( true, pred, location, k = 20L, useNegatives = FALSE, distWeights = TRUE, BNPARAM = NULL )"},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairAgreement.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","text":"true vector true class labels pred vector predicted clusters location matrix data.frame spatial dimensions columns. Alternatively, nearest neighbor object produced findKNN. k Approximate number nearest neighbors consider useNegatives Logical; whether include consistency negative pairs score (default FALSE). distWeights Logical; whether weight agreement distance (default TRUE). BNPARAM BiocNeighbors parameter object compute kNNs. Ignored unless input matrix data.frame. omitted, Annoy approximation used 500 elements.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairAgreement.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","text":"vector agreement scores","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairAgreement.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element local agreement between a clustering and a ground truth — getNeighboringPairAgreement","text":"","code":"data(sp_toys) data <- sp_toys getNeighboringPairAgreement(data$label, data$p1, data[,c(\"x\", \"y\")], k=6) #> [1] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [8] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [15] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [22] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [29] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [36] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [43] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [50] 1.0000000 0.7500000 0.6666667 0.6666667 0.6666667 0.6666667 0.8333333 #> [57] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [64] 1.0000000 1.0000000 0.8000000 0.3333333 0.6666667 0.6666667 0.6666667 #> [71] 0.3333333 0.8333333 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [78] 1.0000000 1.0000000 1.0000000 0.6666667 0.6666667 1.0000000 1.0000000 #> [85] 0.8333333 0.5000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [92] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.6666667 #> [99] 1.0000000 1.0000000 0.5000000 0.8333333 1.0000000 1.0000000 1.0000000 #> [106] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 #> [113] 0.5000000 0.6666667 0.5000000 0.6666667 1.0000000 1.0000000 1.0000000 #> [120] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [127] 1.0000000 0.8000000 0.5000000 0.6666667 0.8333333 1.0000000 1.0000000 #> [134] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [141] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [148] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [155] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [162] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [169] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [176] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [183] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [190] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [197] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [204] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [211] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [218] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [225] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [232] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [239] 1.0000000 1.0000000"},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairConcordance.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","title":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","text":"Per-element local concordance clustering ground truth","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairConcordance.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","text":"","code":"getNeighboringPairConcordance( true, pred, location, k = 20L, useNegatives = FALSE, distWeights = TRUE, BNPARAM = NULL )"},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairConcordance.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","text":"true vector true class labels pred vector predicted clusters location matrix data.frame spatial dimensions columns. Alternatively, nearest neighbor object produced findKNN. k Approximate number nearest neighbors consider useNegatives Logical; whether include concordance negative pairs score (default FALSE). distWeights Logical; whether weight concordance distance (default TRUE). BNPARAM BiocNeighbors parameter object compute kNNs. Ignored unless input matrix data.frame. omitted, Annoy approximation used 500 elements.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairConcordance.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","text":"vector concordance scores","code":""},{"path":"https://roseyuan.github.io/poem/reference/getNeighboringPairConcordance.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Per-element local concordance between a clustering and a ground truth — getNeighboringPairConcordance","text":"","code":"data(sp_toys) data <- sp_toys getNeighboringPairConcordance(data$label, data$p1, data[,c(\"x\", \"y\")], k=6) #> [1] 1.0000000 1.0000000 1.0000000 0.7500000 0.2500000 0.5000000 1.0000000 #> [8] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [15] 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667 #> [22] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [29] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 #> [36] 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [43] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [50] 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000 #> [57] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [64] 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000 #> [71] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [78] 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000 #> [85] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [92] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333 #> [99] 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [106] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 #> [113] 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [120] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [127] 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000 #> [134] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [141] 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000 #> [148] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [155] 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667 #> [162] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [169] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333 #> [176] 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [183] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [190] 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000 1.0000000 #> [197] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [204] 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000 1.0000000 1.0000000 #> [211] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [218] 1.0000000 1.0000000 1.0000000 0.6666667 0.3333333 0.6666667 1.0000000 #> [225] 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 1.0000000 #> [232] 1.0000000 1.0000000 1.0000000 1.0000000 0.5000000 0.2500000 0.7500000 #> [239] 1.0000000 1.0000000"},{"path":"https://roseyuan.github.io/poem/reference/getPairConcordance.html","id":null,"dir":"Reference","previous_headings":"","what":"Per-element pair concordance score — getPairConcordance","title":"Per-element pair concordance score — getPairConcordance","text":"Per-element pair concordance clustering ground truth","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPairConcordance.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Per-element pair concordance score — getPairConcordance","text":"","code":"getPairConcordance( true, pred, usePairs = TRUE, useNegatives = FALSE, adjust = FALSE )"},{"path":"https://roseyuan.github.io/poem/reference/getPairConcordance.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Per-element pair concordance score — getPairConcordance","text":"true vector true class labels pred vector predicted clusters usePairs Logical; whether compute pairs instead elements Recommended TRUE default. useNegatives Logical; whether include consistency negative pairs score (default FALSE). adjust Logical; whether adjust chance. implemented useNegatives=FALSE (make sense element-level otherwise).","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPairConcordance.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Per-element pair concordance score — getPairConcordance","text":"vector concordance scores","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionClassMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getPartitionClassMetrics — getPartitionClassMetrics","title":"getPartitionClassMetrics — getPartitionClassMetrics","text":"Computes selection external evaluation metrics partition. metrics reported per class.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionClassMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getPartitionClassMetrics — getPartitionClassMetrics","text":"","code":"getPartitionClassMetrics( true, pred, metrics = c(\"WC\", \"WH\", \"AWC\", \"AWH\", \"FM\") )"},{"path":"https://roseyuan.github.io/poem/reference/getPartitionClassMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getPartitionClassMetrics — getPartitionClassMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. omitted, main metrics computed.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionClassMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getPartitionClassMetrics — getPartitionClassMetrics","text":"dataframe metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionElementMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getPartitionElementMetrics — getPartitionElementMetrics","title":"getPartitionElementMetrics — getPartitionElementMetrics","text":"Computes selection external evaluation metrics partition. metrics reported per element.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionElementMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getPartitionElementMetrics — getPartitionElementMetrics","text":"","code":"getPartitionElementMetrics( true, pred, metrics = c(\"ASPC\"), usePairs = TRUE, useNegatives = TRUE )"},{"path":"https://roseyuan.github.io/poem/reference/getPartitionElementMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getPartitionElementMetrics — getPartitionElementMetrics","text":"true vector true class labels pred vector predicted clusters metrics metrics compute. usePairs Logical; whether compute pairs instead elements Recommended TRUE default. useNegatives Logical; whether include consistency negative pairs score (default FALSE).","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionElementMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getPartitionElementMetrics — getPartitionElementMetrics","text":"dataframe metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionGlobalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getPartitionGlobalMetrics — getPartitionGlobalMetrics","title":"getPartitionGlobalMetrics — getPartitionGlobalMetrics","text":"Computes selection external evaluation metrics partition. metrics reported per dataset.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionGlobalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getPartitionGlobalMetrics — getPartitionGlobalMetrics","text":"","code":"getPartitionGlobalMetrics( true, pred, metrics = c(\"RI\", \"WC\", \"WH\", \"ARI\", \"NCR\", \"AWC\", \"AWH\", \"MI\", \"AMI\", \"VI\", \"EH\", \"EC\", \"VM\", \"FM\"), ... )"},{"path":"https://roseyuan.github.io/poem/reference/getPartitionGlobalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getPartitionGlobalMetrics — getPartitionGlobalMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. omitted, main metrics computed. See details. ... Optional arguments MI, VI, VM. See clevr::mutual_info(), clevr::variation_info() clevr::v_measure() details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionGlobalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getPartitionGlobalMetrics — getPartitionGlobalMetrics","text":"dataframe metric results. Possible metrics : RI Rand Index WC Wallace Completeness WH Wallace Homogeneity ARI Adjusted Rand Index AWC Adjusted Wallace Completeness AWH Adjusted Wallace Homogeneity NCR Normalized class size Rand index MI Mutual Information AMI Adjusted Mutual Information VI Variation Information EH (Entropy-based) Homogeneity EC (Entropy-based) Completeness VM V-measure FM F-measure/weighted average F1 score VDM Van Dongen Measure MHM Meila-Heckerman Measure MMM Maximum-Match Measure Mirkin Mirkin Metric Accuracy Set Matching Accuracy","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute partition-based metrics — getPartitionMetrics","title":"Compute partition-based metrics — getPartitionMetrics","text":"Computes selection external evaluation metrics partition.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute partition-based metrics — getPartitionMetrics","text":"","code":"getPartitionMetrics( true, pred, metrics = c(\"WC\", \"WH\", \"AWC\", \"AWH\", \"FM\"), level = \"class\", ... )"},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute partition-based metrics — getPartitionMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. metrics metrics compute. omitted, main metrics computed. See details. level level calculate metrics. Options include \"element\", \"class\" \"dataset\". ... Optional arguments MI, VI, VM. See clevr::mutual_info(), clevr::variation_info() clevr::v_measure() details.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute partition-based metrics — getPartitionMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute partition-based metrics — getPartitionMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"SPC\": Spot-wise Pair Concordance. \"ASPC\": Adjusted Spot-wise Pair Concordance. level = \"class\", allowed metrics : \"WC\",\"WH\",\"AWC\",\"AWH\",\"FM\" (see details). level = \"dataset\", allowed metrics : \"RI\": Rand Index \"WC\": Wallace Completeness \"WH\": Wallace Homogeneity \"ARI\": Adjusted Rand Index \"AWC\": Adjusted Wallace Completeness \"AWH\": Adjusted Wallace Homogeneity \"NCR\": Normalized class size Rand index \"MI\": Mutual Information \"AMI\": Adjusted Mutual Information \"VI\": Variation Information \"EH\": (Entropy-based) Homogeneity \"EC\": (Entropy-based) Completeness \"VM\": V-measure \"FM\": F-measure/weighted average F1 score \"VDM\": Van Dongen Measure \"MHM\": Meila-Heckerman Measure \"MMM\": Maximum-Match Measure \"Mirkin\": Mirkin Metric \"Accuracy\": Set Matching Accuracy","code":""},{"path":"https://roseyuan.github.io/poem/reference/getPartitionMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute partition-based metrics — getPartitionMetrics","text":"","code":"true <- rep(LETTERS[1:3], each=10) pred <- c(rep(\"A\", 8), rep(\"B\", 9), rep(\"C\", 3), rep(\"D\", 10)) getPartitionMetrics(true, pred, level=\"class\") #> WC AWC FM class WH AWH cluster #> 1 0.6444444 0.5211558 0.8888889 A NA NA #> 2 0.5333333 0.3715170 0.7368421 B NA NA #> 3 1.0000000 1.0000000 1.0000000 C NA NA #> 4 NA NA NA 1.0000000 1.0000000 A #> 5 NA NA NA 0.6111111 0.4361111 B #> 6 NA NA NA 1.0000000 1.0000000 C #> 7 NA NA NA 1.0000000 1.0000000 D getPartitionMetrics(true, pred, level=\"dataset\") #> WC WH AWC AWH FM #> 1 0.7259259 0.875 0.630891 0.81875 0.8752437"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassExternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute class-level external evaluation metrics for spatially-resolved data — getSpatialClassExternalMetrics","title":"Compute class-level external evaluation metrics for spatially-resolved data — getSpatialClassExternalMetrics","text":"Computes selection external clustering evaluation metrics spatial data class/cluster level.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassExternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute class-level external evaluation metrics for spatially-resolved data — getSpatialClassExternalMetrics","text":"","code":"getSpatialClassExternalMetrics( true, pred, location, k = 6, alpha = 0.5, metrics = c(\"SpatialWH\", \"SpatialAWH\", \"SpatialWC\", \"SpatialAWC\"), fuzzy_true = TRUE, fuzzy_pred = FALSE, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassExternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute class-level external evaluation metrics for spatially-resolved data — getSpatialClassExternalMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. location matrix data.frame coordinates k number neighbors used calculating fuzzy class memberships fuzzy metrics. alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. metrics vector metric names compute. fuzzy_true Logical; whether compute fuzzy class memberships true. fuzzy_pred Logical; whether compute fuzzy class memberships pred. ... Optional params poem::FuzzyPartitionMetrics() findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassExternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute class-level external evaluation metrics for spatially-resolved data — getSpatialClassExternalMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassInternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute class-level internal evaluation metrics for spatially-resolved data — getSpatialClassInternalMetrics","title":"Compute class-level internal evaluation metrics for spatially-resolved data — getSpatialClassInternalMetrics","text":"Computes selection internal clustering evaluation metrics spatial data class.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassInternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute class-level internal evaluation metrics for spatially-resolved data — getSpatialClassInternalMetrics","text":"","code":"getSpatialClassInternalMetrics( labels, location, k = 6, metrics = c(\"CHAOS\", \"PAS\", \"ELSA\"), ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassInternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute class-level internal evaluation metrics for spatially-resolved data — getSpatialClassInternalMetrics","text":"labels vector containing labels evaluated. location numerical matrix containing location information, rows samples columns location dimensions. k size spatial neighborhood look spot. used calculating PAS ELSA scores. metrics Possible metrics: \"CHAOS\", \"PAS\" \"ELSA\". ... Optional params PAS().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialClassInternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute class-level internal evaluation metrics for spatially-resolved data — getSpatialClassInternalMetrics","text":"dataframe metric values.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementExternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"getSpatialElementExternalMetrics — getSpatialElementExternalMetrics","title":"getSpatialElementExternalMetrics — getSpatialElementExternalMetrics","text":"Computes selection external clustering evaluation metrics spatial data element level.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementExternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"getSpatialElementExternalMetrics — getSpatialElementExternalMetrics","text":"","code":"getSpatialElementExternalMetrics( true, pred, location, k = 6, alpha = 0.5, metrics = c(\"SpatialSPC\", \"SpatialNPC\"), fuzzy_true = TRUE, fuzzy_pred = FALSE, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementExternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"getSpatialElementExternalMetrics — getSpatialElementExternalMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. location matrix data.frame coordinates k number neighbors used calculating fuzzy class memberships fuzzy metrics, calculating weighted accuracy. alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. metrics vector metric names compute. fuzzy_true Logical; whether compute fuzzy class memberships true. fuzzy_pred Logical; whether compute fuzzy class memberships pred. ... Optional params getFuzzyPartitionElementMetrics() findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementExternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"getSpatialElementExternalMetrics — getSpatialElementExternalMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementInternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute spot-level internal evaluation metrics for spatially-resolved data — getSpatialElementInternalMetrics","title":"Compute spot-level internal evaluation metrics for spatially-resolved data — getSpatialElementInternalMetrics","text":"Computes selection internal clustering evaluation metrics spatial data spot level.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementInternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute spot-level internal evaluation metrics for spatially-resolved data — getSpatialElementInternalMetrics","text":"","code":"getSpatialElementInternalMetrics( labels, location, k = 6, metrics = c(\"PAS\", \"ELSA\"), ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementInternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute spot-level internal evaluation metrics for spatially-resolved data — getSpatialElementInternalMetrics","text":"labels vector containing labels evaluated. location numerical matrix containing location information, rows samples columns location dimensions. k size spatial neighborhood look spot. used calculating PAS ELSA scores. metrics Possible metrics: \"PAS\" \"ELSA\". ... Optional params PAS().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialElementInternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute spot-level internal evaluation metrics for spatially-resolved data — getSpatialElementInternalMetrics","text":"dataframe containing metric values samples dataset. PAS calculated, value Boolean abnormality spot. ELSA calculated, Ea, Ec ELSA spots returned.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute external metrics for spatial data — getSpatialExternalMetrics","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"Computes selection external clustering evaluation metrics spatial data.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"","code":"getSpatialExternalMetrics( true, pred, location, k = 6, alpha = 0.5, level = \"class\", metrics = c(\"SpatialWH\", \"SpatialAWH\", \"SpatialWC\", \"SpatialAWC\"), fuzzy_true = TRUE, fuzzy_pred = FALSE, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. location matrix data.frame coordinates k number neighbors used calculating fuzzy class memberships fuzzy metrics, calculating weighted accuracy. alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. level level calculate metrics. Options include \"element\", \"class\" \"dataset\". metrics metrics compute. See details. fuzzy_true Logical; whether compute fuzzy class memberships true. fuzzy_pred Logical; whether compute fuzzy class memberships pred. ... Optional params getFuzzyPartitionElementMetrics() findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"SpatialSPC\", \"SpatialNPC\". level = \"class\", allowed metrics : \"SpatialWH\",\"SpatialAWH\", \"SpatialWC\",\"SpatialAWC\". level = \"dataset\", allowed metrics : \"SpatialRI\",\"SpatialARI\",\"SpatialWH\",\"SpatialAWH\", \"SpatialWC\",\"SpatialAWC\",\"SpatialAccuracy\".","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialExternalMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute external metrics for spatial data — getSpatialExternalMetrics","text":"","code":"data(sp_toys) data <- sp_toys getSpatialExternalMetrics(data$label, data$p1, data[,c(\"x\", \"y\")], k=6, level=\"class\") #> Comparing between a fuzzy truth and a hard prediction... #> Standard error of the mean NDC across permutations:0.000521 #> SpatialWH SpatialAWH SpatialWC SpatialAWC class cluster #> 1 NA NA 0.8078698 0.5987364 1 NA #> 2 NA NA 1.0000000 1.0000000 2 NA #> 3 1.0000000 1.0000000 NA NA NA 1 #> 4 0.8323893 0.6502603 NA NA NA 2"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalExternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute dataset-level external evaluation metrics for spatially-resolved data — getSpatialGlobalExternalMetrics","title":"Compute dataset-level external evaluation metrics for spatially-resolved data — getSpatialGlobalExternalMetrics","text":"Computes selection external clustering evaluation metrics spatial data dataset level. Options include series fuzzy pair-counting metrics set matching-based accuracy.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalExternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute dataset-level external evaluation metrics for spatially-resolved data — getSpatialGlobalExternalMetrics","text":"","code":"getSpatialGlobalExternalMetrics( true, pred, location, k = 6, alpha = 0.5, metrics = c(\"SpatialRI\", \"SpatialARI\", \"SpatialWH\", \"SpatialAWH\", \"SpatialWC\", \"SpatialAWC\", \"SpatialAccuracy\"), fuzzy_true = TRUE, fuzzy_pred = FALSE, ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalExternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute dataset-level external evaluation metrics for spatially-resolved data — getSpatialGlobalExternalMetrics","text":"true vector containing labels true classes. Must vector characters, integers, numerics, factor, list. pred vector containing labels predicted clusters. Must vector characters, integers, numerics, factor, list. location matrix data.frame coordinates k number neighbors used calculating fuzzy class memberships fuzzy metrics, calculating weighted accuracy. alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. metrics vector metric names compute. fuzzy_true Logical; whether compute fuzzy class memberships true. fuzzy_pred Logical; whether compute fuzzy class memberships pred. ... Optional params poem::FuzzyPartitionMetrics() findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalExternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute dataset-level external evaluation metrics for spatially-resolved data — getSpatialGlobalExternalMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalInternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","title":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","text":"Computes selection internal clustering evaluation metrics spatial data dataset level. MPC, PC PE internal metrics fuzzy clustering, implementations package fclust used.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalInternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","text":"","code":"getSpatialGlobalInternalMetrics( labels, location, k = 6, metrics = c(\"PAS\", \"ELSA\", \"CHAOS\"), ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalInternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","text":"labels vector containing labels evaluated. location numerical matrix containing location information, rows samples columns location dimensions. k size spatial neighborhood look spot. used calculating PAS ELSA scores. metrics metrics compute. See details. ... Optional arguments PAS().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalInternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","text":"named vector containing metric values. Possible metrics : PAS Proportion abnormal spots (PAS score). ELSA Entropy-based Local indicator Spatial Association (ELSA score). CHAOS Spatial Chaos Score. MPC Modified partition coefficient PC Partition coefficient PE Partition entropy","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialGlobalInternalMetrics.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Compute dataset-level internal evaluation metrics for spatially-resolved data — getSpatialGlobalInternalMetrics","text":"Yuan, Zhiyuan, et al., 2024; 10.1038/s41592-024-02215-8 Naimi, Babak, et al., 2019; 10.1016/j.spasta.2018.10.001 Wang, et al., 2022; 10.1016/j.ins.2022.11.010","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute internal metrics for spatial data — getSpatialInternalMetrics","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"Computes selection internal clustering evaluation metrics spatial data.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"","code":"getSpatialInternalMetrics( labels, location, k = 6, level = \"class\", metrics = c(\"CHAOS\", \"PAS\", \"ELSA\"), ... )"},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"labels vector containing labels evaluated. location numerical matrix containing location information, rows samples columns location dimensions. k size spatial neighborhood look spot. used calculating PAS ELSA scores. level level calculate metrics. Options include \"element\", \"class\" \"dataset\". metrics metrics compute. See details. ... Optional params PAS().","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"data.frame metrics.","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"allowed values metrics depend value level: level = \"element\", allowed metrics : \"PAS\", \"ELSA\". level = \"class\", allowed metrics : \"CHAOS\", \"PAS\", \"ELSA\". level = \"dataset\", allowed metrics : \"PAS\": Proportion abnormal spots (PAS score) \"ELSA\": Entropy-based Local indicator Spatial Association (ELSA score) \"CHAOS\": Spatial Chaos Score. \"MPC\": Modified partition coefficient \"PC\": Partition coefficient \"PE\": Partition entropy","code":""},{"path":"https://roseyuan.github.io/poem/reference/getSpatialInternalMetrics.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute internal metrics for spatial data — getSpatialInternalMetrics","text":"","code":"data(sp_toys) data <- sp_toys getSpatialInternalMetrics(data$label, data[,c(\"x\", \"y\")], k=6, level=\"class\") #> the specified variable is considered as categorical... #> class PAS ELSA.Ea ELSA.Ec ELSA CHAOS #> 1 1 0.008333333 0.07106927 0.1940084 0.06040663 0.2305866 #> 2 2 0.008333333 0.06918805 0.1944579 0.05840334 0.2305866"},{"path":"https://roseyuan.github.io/poem/reference/knnComposition.html","id":null,"dir":"Reference","previous_headings":"","what":"Compute neighborhood composition — knnComposition","title":"Compute neighborhood composition — knnComposition","text":"given dataset locations labels, compute label composition neighborhood sample.","code":""},{"path":"https://roseyuan.github.io/poem/reference/knnComposition.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Compute neighborhood composition — knnComposition","text":"","code":"knnComposition(location, k = 6, labels, alpha = 0.5, ...)"},{"path":"https://roseyuan.github.io/poem/reference/knnComposition.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Compute neighborhood composition — knnComposition","text":"location numeric data matrix containing location information, rows points columns location dimensions. k number nearest neighbors look . labels vector containing label dataset. alpha parameter control extend spot contribute class composition calculation. \"equal\" means weighted neighbors. numeric value 0 1 means weight frequency contribution spot , frequency contribution knn 1-alpha. default 0.5. ... Optional arguments findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/knnComposition.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Compute neighborhood composition — knnComposition","text":"numerical matrix indicating composition, rows correspond samples columns correspond classes label.","code":""},{"path":"https://roseyuan.github.io/poem/reference/knnComposition.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Compute neighborhood composition — knnComposition","text":"","code":"data(sp_toys) data <- sp_toys knnComposition(data[,c(\"x\", \"y\")], k=6, data$label) #> 1 2 #> 1 0.0000000 1.0000000 #> 2 0.0000000 1.0000000 #> 3 0.0000000 1.0000000 #> 4 0.1428571 0.8571429 #> 5 0.6666667 0.3333333 #> 6 1.0000000 0.0000000 #> 7 1.0000000 0.0000000 #> 8 1.0000000 0.0000000 #> 9 1.0000000 0.0000000 #> 10 1.0000000 0.0000000 #> 11 1.0000000 0.0000000 #> 12 1.0000000 0.0000000 #> 13 1.0000000 0.0000000 #> 14 1.0000000 0.0000000 #> 15 1.0000000 0.0000000 #> 16 0.0000000 1.0000000 #> 17 0.0000000 1.0000000 #> 18 0.0000000 1.0000000 #> 19 0.1666667 0.8333333 #> 20 0.8333333 0.1666667 #> 21 1.0000000 0.0000000 #> 22 1.0000000 0.0000000 #> 23 1.0000000 0.0000000 #> 24 1.0000000 0.0000000 #> 25 1.0000000 0.0000000 #> 26 1.0000000 0.0000000 #> 27 1.0000000 0.0000000 #> 28 1.0000000 0.0000000 #> 29 1.0000000 0.0000000 #> 30 1.0000000 0.0000000 #> 31 0.0000000 1.0000000 #> 32 0.0000000 1.0000000 #> 33 0.0000000 1.0000000 #> 34 0.0000000 1.0000000 #> 35 0.1666667 0.8333333 #> 36 0.8333333 0.1666667 #> 37 1.0000000 0.0000000 #> 38 1.0000000 0.0000000 #> 39 1.0000000 0.0000000 #> 40 1.0000000 0.0000000 #> 41 1.0000000 0.0000000 #> 42 1.0000000 0.0000000 #> 43 1.0000000 0.0000000 #> 44 1.0000000 0.0000000 #> 45 1.0000000 0.0000000 #> 46 0.0000000 1.0000000 #> 47 0.0000000 1.0000000 #> 48 0.0000000 1.0000000 #> 49 0.0000000 1.0000000 #> 50 0.1666667 0.8333333 #> 51 0.8333333 0.1666667 #> 52 1.0000000 0.0000000 #> 53 1.0000000 0.0000000 #> 54 1.0000000 0.0000000 #> 55 1.0000000 0.0000000 #> 56 1.0000000 0.0000000 #> 57 1.0000000 0.0000000 #> 58 1.0000000 0.0000000 #> 59 1.0000000 0.0000000 #> 60 1.0000000 0.0000000 #> 61 0.0000000 1.0000000 #> 62 0.0000000 1.0000000 #> 63 0.0000000 1.0000000 #> 64 0.0000000 1.0000000 #> 65 0.0000000 1.0000000 #> 66 0.1666667 0.8333333 #> 67 0.8333333 0.1666667 #> 68 1.0000000 0.0000000 #> 69 1.0000000 0.0000000 #> 70 1.0000000 0.0000000 #> 71 1.0000000 0.0000000 #> 72 1.0000000 0.0000000 #> 73 1.0000000 0.0000000 #> 74 1.0000000 0.0000000 #> 75 1.0000000 0.0000000 #> 76 0.0000000 1.0000000 #> 77 0.0000000 1.0000000 #> 78 0.0000000 1.0000000 #> 79 0.0000000 1.0000000 #> 80 0.0000000 1.0000000 #> 81 0.1666667 0.8333333 #> 82 0.8333333 0.1666667 #> 83 1.0000000 0.0000000 #> 84 1.0000000 0.0000000 #> 85 1.0000000 0.0000000 #> 86 1.0000000 0.0000000 #> 87 1.0000000 0.0000000 #> 88 1.0000000 0.0000000 #> 89 1.0000000 0.0000000 #> 90 1.0000000 0.0000000 #> 91 0.0000000 1.0000000 #> 92 0.0000000 1.0000000 #> 93 0.0000000 1.0000000 #> 94 0.0000000 1.0000000 #> 95 0.0000000 1.0000000 #> 96 0.0000000 1.0000000 #> 97 0.1666667 0.8333333 #> 98 0.8333333 0.1666667 #> 99 1.0000000 0.0000000 #> 100 1.0000000 0.0000000 #> 101 1.0000000 0.0000000 #> 102 1.0000000 0.0000000 #> 103 1.0000000 0.0000000 #> 104 1.0000000 0.0000000 #> 105 1.0000000 0.0000000 #> 106 0.0000000 1.0000000 #> 107 0.0000000 1.0000000 #> 108 0.0000000 1.0000000 #> 109 0.0000000 1.0000000 #> 110 0.0000000 1.0000000 #> 111 0.0000000 1.0000000 #> 112 0.1666667 0.8333333 #> 113 0.8333333 0.1666667 #> 114 1.0000000 0.0000000 #> 115 1.0000000 0.0000000 #> 116 1.0000000 0.0000000 #> 117 1.0000000 0.0000000 #> 118 1.0000000 0.0000000 #> 119 1.0000000 0.0000000 #> 120 1.0000000 0.0000000 #> 121 0.0000000 1.0000000 #> 122 0.0000000 1.0000000 #> 123 0.0000000 1.0000000 #> 124 0.0000000 1.0000000 #> 125 0.0000000 1.0000000 #> 126 0.0000000 1.0000000 #> 127 0.0000000 1.0000000 #> 128 0.1666667 0.8333333 #> 129 0.8333333 0.1666667 #> 130 1.0000000 0.0000000 #> 131 1.0000000 0.0000000 #> 132 1.0000000 0.0000000 #> 133 1.0000000 0.0000000 #> 134 1.0000000 0.0000000 #> 135 1.0000000 0.0000000 #> 136 0.0000000 1.0000000 #> 137 0.0000000 1.0000000 #> 138 0.0000000 1.0000000 #> 139 0.0000000 1.0000000 #> 140 0.0000000 1.0000000 #> 141 0.0000000 1.0000000 #> 142 0.0000000 1.0000000 #> 143 0.1666667 0.8333333 #> 144 0.8333333 0.1666667 #> 145 1.0000000 0.0000000 #> 146 1.0000000 0.0000000 #> 147 1.0000000 0.0000000 #> 148 1.0000000 0.0000000 #> 149 1.0000000 0.0000000 #> 150 1.0000000 0.0000000 #> 151 0.0000000 1.0000000 #> 152 0.0000000 1.0000000 #> 153 0.0000000 1.0000000 #> 154 0.0000000 1.0000000 #> 155 0.0000000 1.0000000 #> 156 0.0000000 1.0000000 #> 157 0.0000000 1.0000000 #> 158 0.0000000 1.0000000 #> 159 0.1666667 0.8333333 #> 160 0.8333333 0.1666667 #> 161 1.0000000 0.0000000 #> 162 1.0000000 0.0000000 #> 163 1.0000000 0.0000000 #> 164 1.0000000 0.0000000 #> 165 1.0000000 0.0000000 #> 166 0.0000000 1.0000000 #> 167 0.0000000 1.0000000 #> 168 0.0000000 1.0000000 #> 169 0.0000000 1.0000000 #> 170 0.0000000 1.0000000 #> 171 0.0000000 1.0000000 #> 172 0.0000000 1.0000000 #> 173 0.0000000 1.0000000 #> 174 0.1666667 0.8333333 #> 175 0.8333333 0.1666667 #> 176 1.0000000 0.0000000 #> 177 1.0000000 0.0000000 #> 178 1.0000000 0.0000000 #> 179 1.0000000 0.0000000 #> 180 1.0000000 0.0000000 #> 181 0.0000000 1.0000000 #> 182 0.0000000 1.0000000 #> 183 0.0000000 1.0000000 #> 184 0.0000000 1.0000000 #> 185 0.0000000 1.0000000 #> 186 0.0000000 1.0000000 #> 187 0.0000000 1.0000000 #> 188 0.0000000 1.0000000 #> 189 0.0000000 1.0000000 #> 190 0.1666667 0.8333333 #> 191 0.8333333 0.1666667 #> 192 1.0000000 0.0000000 #> 193 1.0000000 0.0000000 #> 194 1.0000000 0.0000000 #> 195 1.0000000 0.0000000 #> 196 0.0000000 1.0000000 #> 197 0.0000000 1.0000000 #> 198 0.0000000 1.0000000 #> 199 0.0000000 1.0000000 #> 200 0.0000000 1.0000000 #> 201 0.0000000 1.0000000 #> 202 0.0000000 1.0000000 #> 203 0.0000000 1.0000000 #> 204 0.0000000 1.0000000 #> 205 0.1666667 0.8333333 #> 206 0.8333333 0.1666667 #> 207 1.0000000 0.0000000 #> 208 1.0000000 0.0000000 #> 209 1.0000000 0.0000000 #> 210 1.0000000 0.0000000 #> 211 0.0000000 1.0000000 #> 212 0.0000000 1.0000000 #> 213 0.0000000 1.0000000 #> 214 0.0000000 1.0000000 #> 215 0.0000000 1.0000000 #> 216 0.0000000 1.0000000 #> 217 0.0000000 1.0000000 #> 218 0.0000000 1.0000000 #> 219 0.0000000 1.0000000 #> 220 0.0000000 1.0000000 #> 221 0.1666667 0.8333333 #> 222 0.8333333 0.1666667 #> 223 1.0000000 0.0000000 #> 224 1.0000000 0.0000000 #> 225 1.0000000 0.0000000 #> 226 0.0000000 1.0000000 #> 227 0.0000000 1.0000000 #> 228 0.0000000 1.0000000 #> 229 0.0000000 1.0000000 #> 230 0.0000000 1.0000000 #> 231 0.0000000 1.0000000 #> 232 0.0000000 1.0000000 #> 233 0.0000000 1.0000000 #> 234 0.0000000 1.0000000 #> 235 0.0000000 1.0000000 #> 236 0.2857143 0.7142857 #> 237 0.8571429 0.1428571 #> 238 1.0000000 0.0000000 #> 239 1.0000000 0.0000000 #> 240 1.0000000 0.0000000"},{"path":"https://roseyuan.github.io/poem/reference/matchSets.html","id":null,"dir":"Reference","previous_headings":"","what":"Match two partitions using Hungarian algorithm — matchSets","title":"Match two partitions using Hungarian algorithm — matchSets","text":"Match sets partitions reference partition using Hungarian algorithm optimize F1 scores.","code":""},{"path":"https://roseyuan.github.io/poem/reference/matchSets.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Match two partitions using Hungarian algorithm — matchSets","text":"","code":"matchSets(pred, true, forceMatch = TRUE, returnIndices = is.integer(true))"},{"path":"https://roseyuan.github.io/poem/reference/matchSets.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Match two partitions using Hungarian algorithm — matchSets","text":"pred integer factor cluster labels true integer factor reference labels forceMatch Logical; whether enforce match every set pred returnIndices Logical; whether return indices rather levels","code":""},{"path":"https://roseyuan.github.io/poem/reference/matchSets.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Match two partitions using Hungarian algorithm — matchSets","text":"vector matching sets (.e. level) true every set (.e. level) pred.","code":""},{"path":"https://roseyuan.github.io/poem/reference/metric_info.html","id":null,"dir":"Reference","previous_headings":"","what":"Metrics Information — metric_info","title":"Metrics Information — metric_info","text":"dataframe storing information metrics","code":""},{"path":"https://roseyuan.github.io/poem/reference/metric_info.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Metrics Information — metric_info","text":"","code":"metric_info"},{"path":[]},{"path":"https://roseyuan.github.io/poem/reference/metric_info.html","id":"metric-info","dir":"Reference","previous_headings":"","what":"metric_info","title":"Metrics Information — metric_info","text":"data frame.","code":""},{"path":"https://roseyuan.github.io/poem/reference/mockData.html","id":null,"dir":"Reference","previous_headings":"","what":"Generate mock multidimensional data — mockData","title":"Generate mock multidimensional data — mockData","text":"Generates mock multidimensional data given number classes points, testing.","code":""},{"path":"https://roseyuan.github.io/poem/reference/mockData.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Generate mock multidimensional data — mockData","text":"","code":"mockData( Ns = c(25, 15), classDiff = 2, Sds = 1, ndims = 2, spread = c(1, 2), rndFn = rnorm )"},{"path":"https://roseyuan.github.io/poem/reference/mockData.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Generate mock multidimensional data — mockData","text":"Ns vector one positive integers specifying number elements class. classDiff distances classes. 2 classes, can dist object symmetric matrix length(Ns)-1 columns/rows lower triangle indicates desired distances classes. Sds standard deviation. Can either fixed value, value per class, matrix values class (rows) dimension (column). ndims number dimensions generate (default 2). spread spread points. Can either fixed value, value per class, matrix values class (rows) dimension (col). rndFn random function, default rnorm, also work rlnorm similar.","code":""},{"path":"https://roseyuan.github.io/poem/reference/mockData.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Generate mock multidimensional data — mockData","text":"data.frame coordinates class column.","code":""},{"path":"https://roseyuan.github.io/poem/reference/mockData.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Generate mock multidimensional data — mockData","text":"","code":"d <- mockData()"},{"path":"https://roseyuan.github.io/poem/reference/nnWeightedAccuracy.html","id":null,"dir":"Reference","previous_headings":"","what":"nnWeightedAccuracy — nnWeightedAccuracy","title":"nnWeightedAccuracy — nnWeightedAccuracy","text":"Computes accuracy score weighs elements/spots misclassified proportion (spatial) neighborhood element/spot's predicted class. reduces weight misclassifications happening boundary domains.","code":""},{"path":"https://roseyuan.github.io/poem/reference/nnWeightedAccuracy.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"nnWeightedAccuracy — nnWeightedAccuracy","text":"","code":"nnWeightedAccuracy(true, pred, location, k = 5, ...)"},{"path":"https://roseyuan.github.io/poem/reference/nnWeightedAccuracy.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"nnWeightedAccuracy — nnWeightedAccuracy","text":"true True class labels (vector coercible factor) pred Predicted labels (vector coercible factor) location spatial coordinates compute nearest neighbors. k Number nearest neighbors ... Optional params passed findSpatialKNN().","code":""},{"path":"https://roseyuan.github.io/poem/reference/nnWeightedAccuracy.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"nnWeightedAccuracy — nnWeightedAccuracy","text":"scalar representing weighted accuracy.","code":""},{"path":"https://roseyuan.github.io/poem/reference/noisy_moon.html","id":null,"dir":"Reference","previous_headings":"","what":"The noisy moon dataset — noisy_moon","title":"The noisy moon dataset — noisy_moon","text":"simple toy dataset consists two interleaving half circles.","code":""},{"path":"https://roseyuan.github.io/poem/reference/noisy_moon.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"The noisy moon dataset — noisy_moon","text":"","code":"noisy_moon"},{"path":[]},{"path":"https://roseyuan.github.io/poem/reference/noisy_moon.html","id":"noisy-moon","dir":"Reference","previous_headings":"","what":"noisy_moon","title":"The noisy moon dataset — noisy_moon","text":"data frame 100 rows 5 columns: x, y Coordinates observations. label Ground truth labels. Either 1 2. kmeans_label Predicted clustering labels using kmeans 2 centers. hdbscan_label Predicted clustering labels using hdbscan minPts = 5.","code":""},{"path":"https://roseyuan.github.io/poem/reference/setMatchingAccuracy.html","id":null,"dir":"Reference","previous_headings":"","what":"The non-spatially-weighted counterpart of nnWeightedAccuracy — setMatchingAccuracy","title":"The non-spatially-weighted counterpart of nnWeightedAccuracy — setMatchingAccuracy","text":"non-spatially-weighted counterpart nnWeightedAccuracy","code":""},{"path":"https://roseyuan.github.io/poem/reference/setMatchingAccuracy.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"The non-spatially-weighted counterpart of nnWeightedAccuracy — setMatchingAccuracy","text":"","code":"setMatchingAccuracy(true, pred)"},{"path":"https://roseyuan.github.io/poem/reference/setMatchingAccuracy.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"The non-spatially-weighted counterpart of nnWeightedAccuracy — setMatchingAccuracy","text":"true True class labels (vector coercible factor) pred Predicted labels (vector coercible factor)","code":""},{"path":"https://roseyuan.github.io/poem/reference/setMatchingAccuracy.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"The non-spatially-weighted counterpart of nnWeightedAccuracy — setMatchingAccuracy","text":"scalar representing weighted accuracy.","code":""},{"path":"https://roseyuan.github.io/poem/reference/sp_toys.html","id":null,"dir":"Reference","previous_headings":"","what":"Toy examples of spatial data — sp_toys","title":"Toy examples of spatial data — sp_toys","text":"Toy examples spatial data.","code":""},{"path":"https://roseyuan.github.io/poem/reference/sp_toys.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Toy examples of spatial data — sp_toys","text":"","code":"sp_toys"},{"path":[]},{"path":"https://roseyuan.github.io/poem/reference/sp_toys.html","id":"sp-toys","dir":"Reference","previous_headings":"","what":"sp_toys","title":"Toy examples of spatial data — sp_toys","text":"data frame 240 rows 11 columns, representing 16 x 15 array spots: x, y Coordinates spots. row, col row column index spots. label Ground truth labels. Either 1 2. p1-p6 Hypothetical predicted spatial clustering labels.","code":""},{"path":"https://roseyuan.github.io/poem/reference/toyExamples.html","id":null,"dir":"Reference","previous_headings":"","what":"Toy embedding examples — toyExamples","title":"Toy embedding examples — toyExamples","text":"Toy example 2D embeddings elements different classes, varying mixing spread. Graphs 1-3 20 elements 4 classes, mixed different fashion embedding space. Graphs 4-7 100 elements class1 60 class2, class1 elements vary spread.","code":""},{"path":"https://roseyuan.github.io/poem/reference/toyExamples.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Toy embedding examples — toyExamples","text":"","code":"toyExamples"},{"path":[]},{"path":"https://roseyuan.github.io/poem/reference/toyExamples.html","id":"toyexamples","dir":"Reference","previous_headings":"","what":"toyExamples","title":"Toy embedding examples — toyExamples","text":"data frame. graph name embedding element belongs. x, y Coordinates 2D embedding. class class element belongs.","code":""}]
diff --git a/docs/sitemap.xml b/docs/sitemap.xml
index 9b35b20..eeb7cde 100644
--- a/docs/sitemap.xml
+++ b/docs/sitemap.xml
@@ -1,6 +1,8 @@
https://roseyuan.github.io/poem/404.html
https://roseyuan.github.io/poem/LICENSE.html
+https://roseyuan.github.io/poem/articles/SpatialExperiment.html
+https://roseyuan.github.io/poem/articles/fuzzy_metrics.html
https://roseyuan.github.io/poem/articles/index.html
https://roseyuan.github.io/poem/articles/poem.html
https://roseyuan.github.io/poem/articles/table.html
@@ -26,6 +28,7 @@
https://roseyuan.github.io/poem/reference/emb2snn.html
https://roseyuan.github.io/poem/reference/findSpatialKNN.html
https://roseyuan.github.io/poem/reference/fuzzyHardMetrics.html
+https://roseyuan.github.io/poem/reference/fuzzyHardMetrics2.html
https://roseyuan.github.io/poem/reference/fuzzyHardSpotAgreement.html
https://roseyuan.github.io/poem/reference/fuzzyHardSpotConcordance.html
https://roseyuan.github.io/poem/reference/fuzzyPartitionMetrics.html
diff --git a/vignettes/SpatialExperiment.Rmd b/vignettes/SpatialExperiment.Rmd
index 33c4c25..4471086 100644
--- a/vignettes/SpatialExperiment.Rmd
+++ b/vignettes/SpatialExperiment.Rmd
@@ -45,14 +45,14 @@ my_cols <-c("#D55E00", "#CC79A7","#E69F00","#0072B2","#009E73","#F0E442","#56B4E
names(my_cols) <- as.character(seq(my_cols))
```
-Here we use the `Visium_humanDLPFC` dataset from package `STexampleData` for illustration. Load it:
+Our package `poem` can be easily integrated into a workflow with `SpatialExperiment` objects. Here we use the `Visium_humanDLPFC` dataset from package `STexampleData` for illustration. Load it:
```{r}
spe <- Visium_humanDLPFC()
spe
```
-From this `SpatialExperiment` object, we take the location information (accessible via `spatialCoords()`) and the manual annotation in `colData` and store them as a dataframe:
+From this `SpatialExperiment` object, we take the location information (accessible via `spatialCoords`) and the manual annotation in `colData` and store them as a dataframe:
```{r}
data <- data.frame(spatialCoords(spe))
@@ -64,7 +64,8 @@ data$reference <- factor(data$reference, levels=c("WM", "Layer6", "Layer5",
```
The manual annotation looks like this:
-```{r, fig.height = 4, fig.width = 4}
+
+```{r, fig.height = 4, fig.width = 4, fig.small = TRUE}
p1 <- ggplot(data) +
geom_point(aes(x = pxl_col_in_fullres, y = -pxl_row_in_fullres, color = reference), size=0.3) +
labs(x = "", y = "", color="", title="Manual annotation") +
@@ -88,6 +89,7 @@ p1 <- ggplot(data) +
p1
```
+
We then generate some hypothetical domain detection predictions by randomly permuting the manual annotation.
```{r}
@@ -118,7 +120,7 @@ simulate_clustering_variation <- function(clusters, split_cluster = NULL, merge_
}
```
-Simulate some prediction results with random noise as well as merging or splitting of domains:
+Below we simulate some prediction results with random noise as well as merging or splitting of domains:
```{r}
# P1: add random noise
@@ -298,7 +300,7 @@ p5 <- grid.grabExpr({
})
```
-```{r, fig.height = 2.5, fig.width = 6}
+```{r, fig.height = 3, fig.width = 8}
plot_grid(p4, p5, rel_widths=c(1,1), scale=c(1, 1))
```
@@ -385,7 +387,7 @@ The lower the scores, the smoother the predictions. As expected, the smoothness
The internal metrics can also be calculated at the element level. For example we can calculate the element-wise ELSA score, which is a score for local diversity and can be regarded as edge detector:
-```{r}
+```{r, fig.height = 4, fig.width = 8}
internal <-lapply(setNames(c("reference","P1","P2","P3","P4","P5"), c("reference","P1","P2","P3","P4","P5")),
function(x){cbind(
getSpatialInternalMetrics(data[[x]],