Skip to content
Snippets Groups Projects
Commit df484cc9 authored by Paul Best's avatar Paul Best
Browse files

update

parent 85b04279
No related branches found
No related tags found
No related merge requests found
...@@ -13,3 +13,4 @@ annot_distrib.pdf ...@@ -13,3 +13,4 @@ annot_distrib.pdf
annot_distrib.tex annot_distrib.tex
humpback/annot humpback/annot
humpback_CARIMAM/ humpback_CARIMAM/
dolphin/zips
fn,label,dur,fs,pos
FB79/85FB79_19.wav,FB79,1.49865625,96000,0.749328125
FB79/85FB79_16.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_14.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_12.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_07.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_17.wav,FB79,1.515125,96000,0.7575625
FB79/85FB79_03.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_08.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_04.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_05.wav,FB79,1.4799375,96000,0.73996875
FB79/85FB79_20.wav,FB79,1.515125,96000,0.7575625
FB79/85FB79_09.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_11.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_18.wav,FB79,1.49865625,96000,0.749328125
FB79/85FB79_02.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_10.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_01.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_13.wav,FB79,1.50734375,96000,0.753671875
FB79/85FB79_06.wav,FB79,1.53475,96000,0.767375
FB79/85FB79_15.wav,FB79,1.50734375,96000,0.753671875
FB25/89FB25_13.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_06.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_17.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_12.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_20.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_11.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_05.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_08.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_04.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_02.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_07.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_09.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_03.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_15.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_10.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_19.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_16.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_14.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_18.wav,FB25,2.0085,96000,1.00425
FB25/89FB25_01.wav,FB25,2.0085,96000,1.00425
FB02/92FB2_17.wav,FB02,1.51025,96000,0.755125
FB02/92FB2_06.wav,FB02,1.50875,96000,0.754375
FB02/92FB2_13.wav,FB02,1.51025,96000,0.755125
FB02/92FB2_11.wav,FB02,1.50875,96000,0.754375
FB02/92FB2_15.wav,FB02,1.51025,96000,0.755125
FB02/92FB2_12.wav,FB02,1.50875,96000,0.754375
FB02/92FB2_10.wav,FB02,1.50875,96000,0.754375
FB02/92FB2_16.wav,FB02,1.5282291666666667,96000,0.7641145833333334
FB02/92FB2_14.wav,FB02,1.4922708333333334,96000,0.7461354166666667
FB02/92FB2_08.wav,FB02,1.50875,96000,0.754375
FB02/92FB2_07.wav,FB02,1.50875,96000,0.754375
FB02/92FB2_18.wav,FB02,1.4922708333333334,96000,0.7461354166666667
FB02/92FB2_20.wav,FB02,1.51025,96000,0.755125
FB02/92FB2_01.wav,FB02,1.50875,96000,0.754375
FB02/92FB2_19.wav,FB02,1.4922708333333334,96000,0.7461354166666667
FB02/92FB2_04.wav,FB02,1.4865625,96000,0.74328125
FB02/92FB2_02.wav,FB02,1.50875,96000,0.754375
FB02/92FB2_09.wav,FB02,1.50875,96000,0.754375
FB02/92FB2_05.wav,FB02,1.50875,96000,0.754375
FB02/92FB2_03.wav,FB02,1.50875,96000,0.754375
FB92/89FB92_03.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_06.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_19.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_13.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_15.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_02.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_09.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_10.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_04.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_20.wav,FB92,2.0171875,96000,1.00859375
FB92/89FB92_01.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_11.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_18.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_08.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_05.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_14.wav,FB92,1.9902916666666666,96000,0.9951458333333333
FB92/89FB92_17.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_12.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_16.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB92/89FB92_07.wav,FB92,2.003739583333333,96000,1.0018697916666666
FB09/85FB9_02.wav,FB09,1.97834375,96000,0.989171875
FB09/85FB9_10.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_13.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_08.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_07.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_05.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_14.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_09.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_20.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_11.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_15.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_17.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_01.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_06.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_03.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_12.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_19.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_18.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_16.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB09/85FB9_04.wav,FB09,2.0010833333333333,96000,1.0005416666666667
FB07/87FB7_14.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_13.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_20.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_18.wav,FB07,1.9881145833333334,96000,0.9940572916666667
FB07/87FB7_07.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_05.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_11.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_02.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_08.wav,FB07,1.9881145833333334,96000,0.9940572916666667
FB07/87FB7_09.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_15.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_12.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_01.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_06.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_10.wav,FB07,1.9881145833333334,96000,0.9940572916666667
FB07/87FB7_16.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_19.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_04.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_17.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB07/87FB7_03.wav,FB07,2.0063541666666667,96000,1.0031770833333333
FB35/89FB35_11.wav,FB35,2.002,96000,1.001
FB35/89FB35_02.wav,FB35,1.9970833333333333,96000,0.9985416666666667
FB35/89FB35_03.wav,FB35,1.9970833333333333,96000,0.9985416666666667
FB35/89FB35_06.wav,FB35,1.9970833333333333,96000,0.9985416666666667
FB35/89FB35_07.wav,FB35,1.9970833333333333,96000,0.9985416666666667
FB35/89FB35_16.wav,FB35,2.002,96000,1.001
FB35/89FB35_04.wav,FB35,1.9970833333333333,96000,0.9985416666666667
FB35/89FB35_13.wav,FB35,2.002,96000,1.001
FB35/89FB35_10.wav,FB35,1.9970833333333333,96000,0.9985416666666667
FB35/89FB35_17.wav,FB35,2.002,96000,1.001
FB35/89FB35_18.wav,FB35,2.002,96000,1.001
FB35/89FB35_01.wav,FB35,1.9970833333333333,96000,0.9985416666666667
FB35/89FB35_15.wav,FB35,2.002,96000,1.001
FB35/89FB35_05.wav,FB35,1.9970833333333333,96000,0.9985416666666667
FB35/89FB35_20.wav,FB35,2.002,96000,1.001
FB35/89FB35_19.wav,FB35,2.002,96000,1.001
FB35/89FB35_09.wav,FB35,1.9970833333333333,96000,0.9985416666666667
FB35/89FB35_08.wav,FB35,1.9970833333333333,96000,0.9985416666666667
FB35/89FB35_12.wav,FB35,2.002,96000,1.001
FB35/89FB35_14.wav,FB35,2.002,96000,1.001
FB55/02FB55_14.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_20.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_15.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_16.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_04.wav,FB55,1.9930612244897958,88200,0.9965306122448979
FB55/02FB55_11.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_18.wav,FB55,1.9930612244897958,88200,0.9965306122448979
FB55/02FB55_13.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_10.wav,FB55,2.028027210884354,88200,1.014013605442177
FB55/02FB55_03.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_05.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_08.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_06.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_19.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_07.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_02.wav,FB55,1.9930612244897958,88200,0.9965306122448979
FB55/02FB55_01.wav,FB55,1.9930612244897958,88200,0.9965306122448979
FB55/02FB55_09.wav,FB55,1.975578231292517,88200,0.9877891156462585
FB55/02FB55_12.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB55/02FB55_17.wav,FB55,2.010544217687075,88200,1.0052721088435375
FB33/84FB33_01.wav,FB33,2.491125,96000,1.2455625
FB33/84FB33_05.wav,FB33,2.5108958333333335,96000,1.2554479166666668
FB33/84FB33_18.wav,FB33,2.4934375,96000,1.24671875
FB33/84FB33_20.wav,FB33,2.518125,96000,1.2590625
FB33/84FB33_17.wav,FB33,2.4934375,96000,1.24671875
FB33/84FB33_16.wav,FB33,2.4934375,96000,1.24671875
FB33/84FB33_19.wav,FB33,2.4934375,96000,1.24671875
FB33/84FB33_02.wav,FB33,2.5108958333333335,96000,1.2554479166666668
FB33/84FB33_03.wav,FB33,2.491125,96000,1.2455625
FB33/84FB33_04.wav,FB33,2.5108958333333335,96000,1.2554479166666668
FB33/84FB33_08.wav,FB33,2.491125,96000,1.2455625
FB33/84FB33_14.wav,FB33,2.518125,96000,1.2590625
FB33/84FB33_13.wav,FB33,2.518125,96000,1.2590625
FB33/84FB33_09.wav,FB33,2.491125,96000,1.2455625
FB33/84FB33_15.wav,FB33,2.50578125,96000,1.252890625
FB33/84FB33_06.wav,FB33,2.5108958333333335,96000,1.2554479166666668
FB33/84FB33_07.wav,FB33,2.491125,96000,1.2455625
FB33/84FB33_11.wav,FB33,2.491125,96000,1.2455625
FB33/84FB33_12.wav,FB33,2.491125,96000,1.2455625
FB33/84FB33_10.wav,FB33,2.5108958333333335,96000,1.2554479166666668
FB101/00FB101_01.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_08.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_07.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_10.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_02.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_14.wav,FB101,2.0183333333333335,96000,1.0091666666666668
FB101/00FB101_11.wav,FB101,1.9822916666666666,96000,0.9911458333333333
FB101/00FB101_04.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_18.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_06.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_16.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_13.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_09.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_05.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_12.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_03.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_20.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_17.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_15.wav,FB101,2.0003125,96000,1.00015625
FB101/00FB101_19.wav,FB101,2.0003125,96000,1.00015625
FB05/89FB5_14.wav,FB05,1.9953333333333334,96000,0.9976666666666667
FB05/89FB5_02.wav,FB05,1.9970416666666666,96000,0.9985208333333333
FB05/89FB5_01.wav,FB05,1.9982083333333334,96000,0.9991041666666667
FB05/89FB5_17.wav,FB05,1.9953333333333334,96000,0.9976666666666667
FB05/89FB5_19.wav,FB05,1.9953333333333334,96000,0.9976666666666667
FB05/89FB5_04.wav,FB05,1.9970416666666666,96000,0.9985208333333333
FB05/89FB5_10.wav,FB05,1.9953333333333334,96000,0.9976666666666667
FB05/89FB5_16.wav,FB05,1.9953333333333334,96000,0.9976666666666667
FB05/89FB5_06.wav,FB05,1.9970416666666666,96000,0.9985208333333333
FB05/89FB5_03.wav,FB05,1.9970416666666666,96000,0.9985208333333333
FB05/89FB5_15.wav,FB05,1.9953333333333334,96000,0.9976666666666667
FB05/89FB5_11.wav,FB05,1.9953333333333334,96000,0.9976666666666667
FB05/89FB5_12.wav,FB05,1.9953333333333334,96000,0.9976666666666667
FB05/89FB5_08.wav,FB05,1.9953333333333334,96000,0.9976666666666667
FB05/89FB5_07.wav,FB05,2.0213958333333335,96000,1.0106979166666668
FB05/89FB5_20.wav,FB05,1.9953333333333334,96000,0.9976666666666667
FB05/89FB5_18.wav,FB05,1.9953333333333334,96000,0.9976666666666667
FB05/89FB5_13.wav,FB05,1.9466666666666668,96000,0.9733333333333334
FB05/89FB5_09.wav,FB05,2.0196666666666667,96000,1.0098333333333334
FB05/89FB5_05.wav,FB05,1.9970416666666666,96000,0.9985208333333333
FB182/90FB182_02.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_04.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_03.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_11.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_17.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_08.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_16.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_19.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_13.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_06.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_07.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_10.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_15.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_01.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_18.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_20.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_12.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/F182-2005-SW-IND10622.wav,FB182,1.4600104166666668,96000,0.7300052083333334
FB182/90FB182_05.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB182/90FB182_09.wav,FB182,2.5000104166666666,96000,1.2500052083333333
FB122/94FB122_04.wav,FB122,2.0151041666666667,96000,1.0075520833333333
FB122/94FB122_13.wav,FB122,1.99609375,96000,0.998046875
FB122/94FB122_18.wav,FB122,2.001333333333333,96000,1.0006666666666666
FB122/94FB122_19.wav,FB122,2.001333333333333,96000,1.0006666666666666
FB122/94FB122_15.wav,FB122,2.0151041666666667,96000,1.0075520833333333
FB122/94FB122_08.wav,FB122,2.0151041666666667,96000,1.0075520833333333
FB122/94FB122_03.wav,FB122,2.0151041666666667,96000,1.0075520833333333
FB122/94FB122_02.wav,FB122,1.99609375,96000,0.998046875
FB122/94FB122_10.wav,FB122,2.0151041666666667,96000,1.0075520833333333
FB122/94FB122_11.wav,FB122,1.99609375,96000,0.998046875
FB122/94FB122_17.wav,FB122,2.0145,96000,1.00725
FB122/94FB122_06.wav,FB122,1.99609375,96000,0.998046875
FB122/94FB122_12.wav,FB122,2.0151041666666667,96000,1.0075520833333333
FB122/94FB122_05.wav,FB122,1.99609375,96000,0.998046875
FB122/94FB122_16.wav,FB122,2.0145,96000,1.00725
FB122/94FB122_07.wav,FB122,2.0151041666666667,96000,1.0075520833333333
FB122/94FB122_20.wav,FB122,2.001333333333333,96000,1.0006666666666666
FB122/94FB122_14.wav,FB122,1.99609375,96000,0.998046875
FB122/94FB122_01.wav,FB122,1.99609375,96000,0.998046875
FB122/94FB122_09.wav,FB122,2.0151041666666667,96000,1.0075520833333333
FB10/85FB10_05.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_06.wav,FB10,2.46603125,96000,1.233015625
FB10/85FB10_08.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_20.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_18.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_10.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_14.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_12.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_17.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_13.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_09.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_01.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_07.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_11.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_02.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_04.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_19.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_03.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_16.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB10/85FB10_15.wav,FB10,2.5017708333333335,96000,1.2508854166666667
FB11/87FB11_08.wav,FB11,2.0,96000,1.0
FB11/87FB11_17.wav,FB11,2.0,96000,1.0
FB11/87FB11_16.wav,FB11,2.013333333333333,96000,1.0066666666666666
FB11/87FB11_18.wav,FB11,2.0,96000,1.0
FB11/87FB11_05.wav,FB11,1.9976041666666666,96000,0.9988020833333333
FB11/87FB11_01.wav,FB11,2.0094583333333333,96000,1.0047291666666667
FB11/87FB11_11.wav,FB11,2.0,96000,1.0
FB11/87FB11_03.wav,FB11,2.013333333333333,96000,1.0066666666666666
FB11/87FB11_10.wav,FB11,2.0,96000,1.0
FB11/87FB11_19.wav,FB11,2.0,96000,1.0
FB11/87FB11_04.wav,FB11,1.9976041666666666,96000,0.9988020833333333
FB11/87FB11_07.wav,FB11,2.013333333333333,96000,1.0066666666666666
FB11/87FB11_09.wav,FB11,2.0,96000,1.0
FB11/87FB11_02.wav,FB11,1.9976041666666666,96000,0.9988020833333333
FB11/87FB11_13.wav,FB11,1.9866666666666666,96000,0.9933333333333333
FB11/87FB11_14.wav,FB11,2.013333333333333,96000,1.0066666666666666
FB11/87FB11_12.wav,FB11,2.0,96000,1.0
FB11/87FB11_20.wav,FB11,2.0,96000,1.0
FB11/87FB11_15.wav,FB11,2.0,96000,1.0
FB11/87FB11_06.wav,FB11,1.9976041666666666,96000,0.9988020833333333
FB131/92FB131_12.wav,FB131,2.4680045351473923,88200,1.2340022675736961
FB131/92FB131_19.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_03.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_09.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_07.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_11.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_20.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_16.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_05.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_13.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_02.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_10.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_04.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_15.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_18.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_01.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_08.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_14.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/92FB131_17.wav,FB131,2.5068707482993196,88200,1.2534353741496598
FB131/95FB131E.WAV,FB131,3.7029,80000,1.85145
FB15/91FB15_20.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_02.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_09.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_11.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_03.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_06.wav,FB15,1.9886458333333332,96000,0.9943229166666666
FB15/91FB15_07.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_05.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_12.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_19.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_10.wav,FB15,2.0354375,96000,1.01771875
FB15/91FB15_16.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_13.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_14.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_18.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_15.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_04.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_17.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_08.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB15/91FB15_01.wav,FB15,2.0120416666666667,96000,1.0060208333333334
FB71/90FB71_12.wav,FB71,2.003625,96000,1.0018125
FB71/90FB71_04.wav,FB71,2.00625,96000,1.003125
FB71/90FB71_20.wav,FB71,2.01825,96000,1.009125
FB71/90FB71_18.wav,FB71,2.003625,96000,1.0018125
FB71/90FB71_08.wav,FB71,2.0048958333333333,96000,1.0024479166666667
FB71/90FB71_13.wav,FB71,2.003625,96000,1.0018125
FB71/90FB71_02.wav,FB71,2.00625,96000,1.003125
FB71/90FB71_11.wav,FB71,2.0048958333333333,96000,1.0024479166666667
FB71/90FB71_06.wav,FB71,2.0048958333333333,96000,1.0024479166666667
FB71/90FB71_17.wav,FB71,2.003625,96000,1.0018125
FB71/90FB71_16.wav,FB71,2.003625,96000,1.0018125
FB71/FB71A.WAV,FB71,1.2203375,80000,0.61016875
FB71/90FB71_15.wav,FB71,2.003625,96000,1.0018125
FB71/90FB71_14.wav,FB71,2.003625,96000,1.0018125
FB71/90FB71_07.wav,FB71,2.0048958333333333,96000,1.0024479166666667
FB71/90FB71_19.wav,FB71,2.003625,96000,1.0018125
FB71/90FB71_05.wav,FB71,2.00625,96000,1.003125
FB71/90FB71_10.wav,FB71,2.0048958333333333,96000,1.0024479166666667
FB71/90FB71_09.wav,FB71,2.0048958333333333,96000,1.0024479166666667
FB71/90FB71_03.wav,FB71,2.00625,96000,1.003125
FB67/89FB67_12.wav,FB67,2.99975,96000,1.499875
FB67/89FB67_10.wav,FB67,3.0138333333333334,96000,1.5069166666666667
FB67/89FB67_20.wav,FB67,2.99975,96000,1.499875
FB67/89FB67_05.wav,FB67,2.99975,96000,1.499875
FB67/89FB67_17.wav,FB67,3.0138333333333334,96000,1.5069166666666667
FB67/89FB67_19.wav,FB67,2.99975,96000,1.499875
FB67/89FB67_08.wav,FB67,3.0138333333333334,96000,1.5069166666666667
FB67/89FB67_04.wav,FB67,3.0138333333333334,96000,1.5069166666666667
FB67/89FB67_14.wav,FB67,3.0138333333333334,96000,1.5069166666666667
FB67/89FB67_02.wav,FB67,2.9856666666666665,96000,1.4928333333333332
FB67/89FB67_01.wav,FB67,3.0138333333333334,96000,1.5069166666666667
FB67/89FB67_16.wav,FB67,2.99975,96000,1.499875
FB67/89FB67_18.wav,FB67,3.0138333333333334,96000,1.5069166666666667
FB67/89FB67_03.wav,FB67,2.99975,96000,1.499875
FB67/89FB67_07.wav,FB67,3.0138333333333334,96000,1.5069166666666667
FB67/89FB67_09.wav,FB67,3.0138333333333334,96000,1.5069166666666667
FB67/89FB67_11.wav,FB67,3.0138333333333334,96000,1.5069166666666667
FB67/89FB67_13.wav,FB67,3.0138333333333334,96000,1.5069166666666667
FB67/89FB67_06.wav,FB67,2.99975,96000,1.499875
FB67/89FB67_15.wav,FB67,3.0279166666666666,96000,1.5139583333333333
FB163/91FB163_20.wav,FB163,2.49903125,96000,1.249515625
FB163/91FB163_06.wav,FB163,2.510875,96000,1.2554375
FB163/FB163F_adjSR.wav,FB163,0.9566,80000,0.4783
FB163/FB163G_adjSR.wav,FB163,1.5,80000,0.75
FB163/91FB163_10.wav,FB163,2.49903125,96000,1.249515625
FB163/91FB163_07.wav,FB163,2.510875,96000,1.2554375
FB163/91FB163_01.wav,FB163,2.5108333333333333,96000,1.2554166666666666
FB163/91FB163_02.wav,FB163,2.489,96000,1.2445
FB163/91FB163_11.wav,FB163,2.510875,96000,1.2554375
FB163/91FB163_12.wav,FB163,2.49903125,96000,1.249515625
FB163/91FB163_13.wav,FB163,2.510875,96000,1.2554375
FB163/91FB163_15.wav,FB163,2.49903125,96000,1.249515625
FB163/91FB163_19.wav,FB163,2.49903125,96000,1.249515625
FB163/91FB163_09.wav,FB163,2.510875,96000,1.2554375
FB163/FB163K_adjSR.wav,FB163,1.3607875,80000,0.68039375
FB163/FB163D_adjSR.wav,FB163,1.8088125,80000,0.90440625
FB163/91FB163_04.wav,FB163,2.510875,96000,1.2554375
FB163/91FB163_03.wav,FB163,2.5108333333333333,96000,1.2554166666666666
FB163/91FB163_18.wav,FB163,2.510875,96000,1.2554375
FB163/91FB163_16.wav,FB163,2.49903125,96000,1.249515625
...@@ -4,4 +4,5 @@ california_thrashers ...@@ -4,4 +4,5 @@ california_thrashers
cassin_vireo cassin_vireo
black-headed_grosbeaks black-headed_grosbeaks
humpback humpback
zebra_finch dolphin
otter
humpback/humpback.csv 100644 → 100755
Source diff could not be displayed: it is too large. Options to address this: view the blob.
...@@ -41,12 +41,12 @@ meta = { ...@@ -41,12 +41,12 @@ meta = {
'sampleDur': 2 'sampleDur': 2
}, },
'humpback':{ 'humpback':{
'nfft': 512, 'nfft': 1024,
'sr': 11025, 'sr': 11025,
'sampleDur': 2 'sampleDur': 2
}, },
'humpback2':{ 'humpback2':{
'nfft': 512, 'nfft': 2048,
'sr': 11025, 'sr': 11025,
'sampleDur': 2 'sampleDur': 2
}, },
...@@ -57,12 +57,17 @@ meta = { ...@@ -57,12 +57,17 @@ meta = {
}, },
'otter':{ 'otter':{
'nfft':2048, 'nfft':2048,
'sr':48000, 'sr':96000,
'sampleDur':0.5 'sampleDur':1
},
'dolphin':{
'nfft':512,
'sr':96000,
'sampleDur':2
} }
} }
vgg16 = torchmodels.vgg16(pretrained=True) # weights=torchmodels.VGG16_Weights.DEFAULT) vgg16 = torchmodels.vgg16(weights=torchmodels.VGG16_Weights.DEFAULT)
vgg16 = vgg16.features[:13] vgg16 = vgg16.features[:13]
for nm, mod in vgg16.named_modules(): for nm, mod in vgg16.named_modules():
if isinstance(mod, nn.MaxPool2d): if isinstance(mod, nn.MaxPool2d):
...@@ -82,6 +87,13 @@ frontend = { ...@@ -82,6 +87,13 @@ frontend = {
nn.InstanceNorm2d(1), nn.InstanceNorm2d(1),
u.Croper2D(n_mel, 128) u.Croper2D(n_mel, 128)
), ),
'logMel_vggish': lambda sr, nfft, sampleDur, n_mel : nn.Sequential(
STFT(nfft, int((sampleDur*sr - nfft)/96)),
MelFilter(sr, nfft, n_mel, 0, sr//2),
Log1p(7, trainable=False),
nn.InstanceNorm2d(1),
u.Croper2D(n_mel, 128)
),
'logSTFT': lambda sr, nfft, sampleDur, n_mel : nn.Sequential( 'logSTFT': lambda sr, nfft, sampleDur, n_mel : nn.Sequential(
STFT(nfft, int((sampleDur*sr - nfft)/128)), STFT(nfft, int((sampleDur*sr - nfft)/128)),
Log1p(7, trainable=False), Log1p(7, trainable=False),
...@@ -117,8 +129,28 @@ sparrow_VQ_encoder = lambda nfeat, shape : nn.Sequential( ...@@ -117,8 +129,28 @@ sparrow_VQ_encoder = lambda nfeat, shape : nn.Sequential(
u.Reshape(nfeat * shape[0] * shape[1]) u.Reshape(nfeat * shape[0] * shape[1])
) )
sparrow_encoder_maxPool = lambda nfeat, shape : nn.Sequential(
nn.Conv2d(1, 32, 3, bias=False, padding=1),
nn.BatchNorm2d(32),
nn.ReLU(True),
nn.Conv2d(32, 32, 3, bias=False, padding=1),
nn.BatchNorm2d(32),
nn.MaxPool2d((4, 4)),
nn.ReLU(True),
nn.Conv2d(32, 32, 3, bias=False, padding=1),
nn.BatchNorm2d(32),
nn.ReLU(True),
nn.Conv2d(32, 32, 3, bias=False, padding=1),
nn.BatchNorm2d(32),
nn.ReLU(True),
nn.Conv2d(32, nfeat, 3, bias=False, padding=1),
nn.MaxPool2d((4, 4)),
u.Reshape(nfeat * shape[0] * shape[1])
)
sparrow_encoder = lambda nfeat, shape : nn.Sequential( sparrow_encoder = lambda nfeat, shape : nn.Sequential(
nn.Conv2d(1, 32, 3, stride=2, bias=False, padding=(1)), nn.Conv2d(1, 32, 3, stride=2, bias=False, padding=1),
nn.BatchNorm2d(32), nn.BatchNorm2d(32),
nn.ReLU(True), nn.ReLU(True),
nn.Conv2d(32, 64, 3, stride=2, bias=False, padding=1), nn.Conv2d(32, 64, 3, stride=2, bias=False, padding=1),
...@@ -177,7 +209,7 @@ sparrow_decoder = lambda nfeat, shape : nn.Sequential( ...@@ -177,7 +209,7 @@ sparrow_decoder = lambda nfeat, shape : nn.Sequential(
nn.Conv2d(32, 1, (3, 3), bias=False, padding=1), nn.Conv2d(32, 1, (3, 3), bias=False, padding=1),
nn.BatchNorm2d(1), nn.BatchNorm2d(1),
nn.ReLU(True), nn.ReLU(True),
nn.Conv2d(1, 1, (3, 3), bias=False, padding=1), nn.Conv2d(1, 1, (3, 3), bias=False, padding=1)
) )
sparrow_decoder_old = lambda nfeat, shape : nn.Sequential( sparrow_decoder_old = lambda nfeat, shape : nn.Sequential(
......
...@@ -2,6 +2,15 @@ import matplotlib.pyplot as plt ...@@ -2,6 +2,15 @@ import matplotlib.pyplot as plt
import pandas as pd, numpy as np import pandas as pd, numpy as np
species = np.loadtxt('good_species.txt', dtype=str) species = np.loadtxt('good_species.txt', dtype=str)
info = {
'bengalese_finch1': ['bengalese finch', 'nicholson2017bengalese', 'bird'],
'bengalese_finch2': ['bengalese finch', 'koumura2016birdsongrecognition', 'bird'],
'california_thrashers': ['california trashers', 'arriaga2015bird', 'bird'],
'cassin_vireo': ['cassin vireo', 'arriaga2015bird', 'bird'],
'black-headed_grosbeaks': ['black-headed grosbeaks', 'arriaga2015bird', 'bird'],
'zebra_finch': ['zebra finch', 'elie2018zebra', 'bird'],
'humpback': ['humpback whale', 'malige2021use', 'cetacean'],
}
fig, ax = plt.subplots(nrows=4, ncols=3, figsize=(10, 10)) fig, ax = plt.subplots(nrows=4, ncols=3, figsize=(10, 10))
for i, specie in enumerate(species): for i, specie in enumerate(species):
...@@ -12,10 +21,10 @@ plt.tight_layout() ...@@ -12,10 +21,10 @@ plt.tight_layout()
plt.savefig('annot_distrib.pdf') plt.savefig('annot_distrib.pdf')
a = "Specie & \# Classes & \# Annotated samples & \# Samples & Proportion of annotations\\\\ \hline \n" a = "Specie & \# Classes & \# Samples & Annotations \% \\\\ \hline \n"
for specie in species: for specie in species:
df = pd.read_csv(f'{specie}/{specie}.csv') df = pd.read_csv(f'{specie}/{specie}.csv')
a += f"{specie.replace('_',' ')} & {df.label.nunique()} & {(~df.label.isna()).sum()} & {len(df)} & {int(100*(~df.label.isna()).sum()/len(df))} \\\\ \hline \n" a += f"{info[specie][0]} \cite{{{info[specie][1]}}} & {df.label.nunique()} & {len(df)} & {int(100*(~df.label.isna()).sum()/len(df))} \\\\ \hline \n"
f = open('annot_distrib.tex', 'w') f = open('annot_distrib.tex', 'w')
f.write(a) f.write(a)
f.close() f.close()
...@@ -5,7 +5,7 @@ import numpy as np ...@@ -5,7 +5,7 @@ import numpy as np
from sklearn import metrics from sklearn import metrics
species = np.loadtxt('good_species.txt', dtype=str) species = np.loadtxt('good_species.txt', dtype=str)
frontends = ['16_pcenMel128', '16_logMel128', '16_logSTFT', '16_Mel128', '8_pcenMel64', '32_pcenMel128'] frontends = ['16_logMel128', '16_logSTFT', '16_Mel128', '16_pcenMel128', '8_pcenMel64', '32_pcenMel128', '64_pcenMel128']
plt.figure() plt.figure()
for specie in species: for specie in species:
df = pd.read_csv(f'{specie}/{specie}.csv') df = pd.read_csv(f'{specie}/{specie}.csv')
...@@ -30,3 +30,4 @@ plt.grid() ...@@ -30,3 +30,4 @@ plt.grid()
plt.tight_layout() plt.tight_layout()
plt.legend() plt.legend()
plt.savefig('NMIs_hdbscan.pdf') plt.savefig('NMIs_hdbscan.pdf')
plt.close()
...@@ -8,7 +8,7 @@ import torch ...@@ -8,7 +8,7 @@ import torch
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("specie", type=str) parser.add_argument("specie", type=str)
parser.add_argument("-frontend", type=str, default='logMel') parser.add_argument("-frontend", type=str, default='logMel')
parser.add_argument("-nMel", type=int, default=64) parser.add_argument("-nMel", type=int, default=128)
args = parser.parse_args() args = parser.parse_args()
meta = models.meta[args.specie] meta = models.meta[args.specie]
......
...@@ -12,8 +12,8 @@ args = parser.parse_args() ...@@ -12,8 +12,8 @@ args = parser.parse_args()
df = pd.read_csv(f'{args.specie}/{args.specie}.csv') df = pd.read_csv(f'{args.specie}/{args.specie}.csv')
def norm(arr): norm = lambda arr: (arr - np.mean(arr) ) / np.std(arr)
return (arr - np.mean(arr) ) / np.std(arr)
meta = models.meta[args.specie] meta = models.meta[args.specie]
feats = ['fund', 'cvfund', 'maxfund', 'minfund', 'meansal', 'meanspect', 'stdspect', 'skewspect',\ feats = ['fund', 'cvfund', 'maxfund', 'minfund', 'meansal', 'meanspect', 'stdspect', 'skewspect',\
...@@ -32,7 +32,7 @@ def process(idx): ...@@ -32,7 +32,7 @@ def process(idx):
if fs != meta['sr']: if fs != meta['sr']:
sig = resample(sig, int(len(sig)/fs*meta['sr'])) sig = resample(sig, int(len(sig)/fs*meta['sr']))
sound = BioSound(soundWave=norm(sig), fs=fs) sound = BioSound(soundWave=norm(sig), fs=fs)
sound.spectroCalc(max_freq=meta['sr']//2) sound.spectroCalc(max_freq=meta['sr']//2, spec_sample_rate=128//meta['sampleDur'])
sound.rms = sound.sound.std() sound.rms = sound.sound.std()
sound.ampenv(cutoff_freq = 20, amp_sample_rate = 1000) sound.ampenv(cutoff_freq = 20, amp_sample_rate = 1000)
sound.spectrum(f_high=meta['sr']//2 - 1) sound.spectrum(f_high=meta['sr']//2 - 1)
...@@ -43,9 +43,9 @@ def process(idx): ...@@ -43,9 +43,9 @@ def process(idx):
return [sound.__dict__[f] for f in feats] return [sound.__dict__[f] for f in feats]
res = p_tqdm.p_map(process, df.index[:10]) res = p_tqdm.p_map(process, df.index[:100], num_cpus=16)
for i, mr in zip(df.index[:10], res): for i, mr in zip(df.index[:100], res):
for f, r in zip(feats, mr): for f, r in zip(feats, mr):
df.loc[i, f] = r df.loc[i, f] = r
......
from sklearn import metrics
import matplotlib.pyplot as plt
import umap, hdbscan
from tqdm import tqdm
import argparse, os
import models, utils as u
import pandas as pd, numpy as np, torch
torch.multiprocessing.set_sharing_strategy('file_system')
parser = argparse.ArgumentParser()
parser.add_argument("specie", type=str)
args = parser.parse_args()
df = pd.read_csv(f'{args.specie}/{args.specie}.csv')
meta = models.meta[args.specie]
if not os.path.isfile(f'{args.specie}/encodings_vggish.npy'):
gpu = torch.device('cuda')
frontend = models.frontend['logMel_vggish'](meta['sr'], meta['nfft'], meta['sampleDur'], 64)
vggish = torch.hub.load('harritaylor/torchvggish', 'vggish')
# vggish.preprocess = False
vggish.postprocess = False
model = torch.nn.Sequential(frontend, vggish).to(gpu)
model.eval()
loader = torch.utils.data.DataLoader(u.Dataset(df, f'{args.specie}/audio/', 16000, 1), batch_size=1, shuffle=True, num_workers=8, collate_fn=u.collate_fn)
with torch.no_grad():
encodings, idxs = [], []
for x, idx in tqdm(loader, desc='test '+args.specie, leave=False):
# encoding = model(x.to(gpu))
encoding = vggish(x.numpy().squeeze(0), fs=16000)
idxs.extend(idx)
encodings.extend(encoding.cpu().detach())
idxs, encodings = np.array(idxs), np.stack(encodings)
X = umap.UMAP(n_jobs=-1).fit_transform(encodings)
np.save(f'{args.specie}/encodings_vggish.npy', {'idxs':idxs, 'encodings':encodings, 'umap':X})
else:
dic = np.load(f'{args.specie}/encodings_vggish.npy', allow_pickle=True).item()
idxs, encodings, X = dic['idxs'], dic['encodings'], dic['umap']
clusters = hdbscan.HDBSCAN(min_cluster_size=50, min_samples=5, cluster_selection_epsilon=0.05, core_dist_n_jobs=-1, cluster_selection_method='leaf').fit_predict(X)
df.loc[idxs, 'cluster'] = clusters.astype(int)
mask = ~df.loc[idxs].label.isna()
#print('Found clusters : \n', pd.Series(clusters).value_counts())
plt.figure(figsize=(20, 10))
plt.scatter(X[clusters==-1,0], X[clusters==-1,1], s=2, alpha=.2, color='Grey')
plt.scatter(X[clusters!=-1,0], X[clusters!=-1,1], s=2, c=clusters[clusters!=-1], cmap='tab20')
plt.tight_layout()
plt.savefig(f'{args.specie}/vggish_projection_clusters.png')
plt.figure(figsize=(20, 10))
plt.scatter(X[~mask,0], X[~mask,1], s=2, alpha=.2, color='Grey')
for l, grp in df.groupby('label'):
plt.scatter(X[df.loc[idxs].label==l, 0], X[df.loc[idxs].label==l, 1], s=4, label=l)
plt.legend()
plt.tight_layout()
plt.savefig(f'{args.specie}/vggish_projection_labels.png')
clusters, labels = clusters[mask], df.loc[idxs[mask]].label
print('Silhouette', metrics.silhouette_score(encodings[mask], clusters))
print('NMI', metrics.normalized_mutual_info_score(labels, clusters))
print('Homogeneity', metrics.homogeneity_score(labels, clusters))
print('Completeness', metrics.completeness_score(labels, clusters))
print('V-Measure', metrics.v_measure_score(labels, clusters))
labelled = df[~df.label.isna()]
for l, grp in labelled.groupby('label'):
best = (grp.groupby('cluster').fn.count() / labelled.groupby('cluster').fn.count()).idxmax()
print(f'Best precision for {l} is for cluster {best} with {(df.cluster==best).sum()} points, \
with precision {((labelled.cluster==best)&(labelled.label==l)).sum()/(labelled.cluster==best).sum():.2f} and recall {((labelled.cluster==best)&(labelled.label==l)).sum()/(labelled.label==l).sum():.2f}')
...@@ -7,6 +7,7 @@ from sklearn import metrics ...@@ -7,6 +7,7 @@ from sklearn import metrics
import umap, hdbscan import umap, hdbscan
torch.multiprocessing.set_sharing_strategy('file_system') torch.multiprocessing.set_sharing_strategy('file_system')
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("specie", type=str) parser.add_argument("specie", type=str)
parser.add_argument("-bottleneck", type=int, default=16) parser.add_argument("-bottleneck", type=int, default=16)
...@@ -21,8 +22,6 @@ df = pd.read_csv(f'{args.specie}/{args.specie}.csv') ...@@ -21,8 +22,6 @@ df = pd.read_csv(f'{args.specie}/{args.specie}.csv')
print(f'Tests for model {modelname}') print(f'Tests for model {modelname}')
print(f'{len(df)} available vocs') print(f'{len(df)} available vocs')
loader = torch.utils.data.DataLoader(u.Dataset(df, f'{args.specie}/audio/', meta['sr'], meta['sampleDur']), batch_size=64, shuffle=True, num_workers=8, collate_fn=u.collate_fn)
if os.path.isfile(f'{args.specie}/encodings_{modelname[:-4]}npy'): if os.path.isfile(f'{args.specie}/encodings_{modelname[:-4]}npy'):
dic = np.load(f'{args.specie}/encodings_{modelname[:-4]}npy', allow_pickle=True).item() dic = np.load(f'{args.specie}/encodings_{modelname[:-4]}npy', allow_pickle=True).item()
idxs, encodings, X = dic['idxs'], dic['encodings'], dic['umap'] idxs, encodings, X = dic['idxs'], dic['encodings'], dic['umap']
...@@ -34,6 +33,7 @@ else: ...@@ -34,6 +33,7 @@ else:
model = torch.nn.Sequential(frontend, encoder, decoder).to(gpu) model = torch.nn.Sequential(frontend, encoder, decoder).to(gpu)
model.load_state_dict(torch.load(f'{args.specie}/{modelname}')) model.load_state_dict(torch.load(f'{args.specie}/{modelname}'))
model.eval() model.eval()
loader = torch.utils.data.DataLoader(u.Dataset(df, f'{args.specie}/audio/', meta['sr'], meta['sampleDur']), batch_size=64, shuffle=True, num_workers=8, collate_fn=u.collate_fn)
with torch.no_grad(): with torch.no_grad():
encodings, idxs = [], [] encodings, idxs = [], []
for x, idx in tqdm(loader, desc='test '+args.specie, leave=False): for x, idx in tqdm(loader, desc='test '+args.specie, leave=False):
...@@ -45,7 +45,7 @@ else: ...@@ -45,7 +45,7 @@ else:
X = umap.UMAP(n_jobs=-1).fit_transform(encodings) X = umap.UMAP(n_jobs=-1).fit_transform(encodings)
np.save(f'{args.specie}/encodings_{modelname[:-4]}npy', {'idxs':idxs, 'encodings':encodings, 'umap':X}) np.save(f'{args.specie}/encodings_{modelname[:-4]}npy', {'idxs':idxs, 'encodings':encodings, 'umap':X})
clusters = hdbscan.HDBSCAN(min_cluster_size=10, min_samples=5, cluster_selection_epsilon=0.0, core_dist_n_jobs=-1, cluster_selection_method='eom').fit_predict(X) clusters = hdbscan.HDBSCAN(min_cluster_size=50, min_samples=5, cluster_selection_epsilon=0.05, core_dist_n_jobs=-1, cluster_selection_method='leaf').fit_predict(X)
df.loc[idxs, 'cluster'] = clusters.astype(int) df.loc[idxs, 'cluster'] = clusters.astype(int)
mask = ~df.loc[idxs].label.isna() mask = ~df.loc[idxs].label.isna()
...@@ -74,7 +74,15 @@ print('Completeness', metrics.completeness_score(labels, clusters)) ...@@ -74,7 +74,15 @@ print('Completeness', metrics.completeness_score(labels, clusters))
print('V-Measure', metrics.v_measure_score(labels, clusters)) print('V-Measure', metrics.v_measure_score(labels, clusters))
labelled = df[~df.label.isna()] labelled = df[~df.label.isna()]
goodClusters = []
for l, grp in labelled.groupby('label'): for l, grp in labelled.groupby('label'):
best = (grp.groupby('cluster').fn.count() / labelled.groupby('cluster').fn.count()).idxmax() precisions = grp.groupby('cluster').fn.count() / labelled.groupby('cluster').fn.count()
best = precisions.idxmax()
goodClusters.extend(precisions[precisions > 0.9].index)
print(f'Best precision for {l} is for cluster {best} with {(df.cluster==best).sum()} points, \ print(f'Best precision for {l} is for cluster {best} with {(df.cluster==best).sum()} points, \
with precision {((labelled.cluster==best)&(labelled.label==l)).sum()/(labelled.cluster==best).sum():.2f} and recall {((labelled.cluster==best)&(labelled.label==l)).sum()/(labelled.label==l).sum():.2f}') with precision {((labelled.cluster==best)&(labelled.label==l)).sum()/(labelled.cluster==best).sum():.2f}\
and recall {((labelled.cluster==best)&(labelled.label==l)).sum()/(labelled.label==l).sum():.2f}')
print(f'{len(goodClusters)} clusters would sort {df.cluster.isin(goodClusters).sum()/len(df)*100:.0f}% of samples')
print(f'{len(goodClusters)/df.label.nunique():.1f} cluster per label in avg)')
\ No newline at end of file
...@@ -67,8 +67,10 @@ for epoch in range(100_000//len(loader)): ...@@ -67,8 +67,10 @@ for epoch in range(100_000//len(loader)):
if len(loss) > 2000 and np.median(loss[-2000:-1000]) < np.median(loss[-1000:]): if len(loss) > 2000 and np.median(loss[-2000:-1000]) < np.median(loss[-1000:]):
print('Early stop') print('Early stop')
torch.save(model.state_dict(), f'{args.specie}/{modelname}')
exit() exit()
step += 1
continue
# TEST ROUTINE # TEST ROUTINE
if step % 500 == 0: if step % 500 == 0:
# Plot reconstructions # Plot reconstructions
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment