diff --git a/EMT/EMT_notebooks/Rebuild_Huddling_events.ipynb b/EMT/EMT_notebooks/Rebuild_Huddling_events.ipynb index e85cbaa3f844d86a6f975a3cdf9a9764d795ea36..9bc5997aaed664fa67a1f1943e8273851cb56989 100644 --- a/EMT/EMT_notebooks/Rebuild_Huddling_events.ipynb +++ b/EMT/EMT_notebooks/Rebuild_Huddling_events.ipynb @@ -39,7 +39,8 @@ "\n", "# set this to false if you want to set manual parameters.\n", "automaticSettings = True\n", - "oldRoundness=False\n", + "# Use 'old', 'new' or 'parallel'\n", + "mode = \"new\"\n", "# Manual parameters:\n", "\n", "''' minT and maxT to process the analysis (expressed in frame) '''\n", @@ -78,7 +79,7 @@ "from lmtanalysis.Animal import *\n", "from lmtanalysis.Event import *\n", "from lmtanalysis.Measure import *\n", - "from lmtanalysis import BuildEventHuddling, BuildDataBaseIndex\n", + "from lmtanalysis import BuildEventHuddling, BuildEventHuddling_oldv, BuildEventHuddling_parallel, BuildDataBaseIndex\n", "from lmtanalysis.FileUtil import getFilesToProcess\n", "from lmtanalysis.EventTimeLineCache import flushEventTimeLineCache, disableEventTimeLineCache\n", "from lmtanalysis.AnimalType import AnimalType\n", @@ -97,7 +98,7 @@ " chrono.printTimeInS()\n", "\n", "\n", - "def processTimeWindow(file, connection, currentMinT , currentMaxT, oldRoundness=False):\n", + "def processTimeWindow(file, connection, currentMinT , currentMaxT, mode=\"new\"):\n", "\n", " animalPool = None\n", " flushEventTimeLineCache()\n", @@ -110,15 +111,29 @@ " print(\"Caching load of animal detection done.\")\n", "\n", " chrono = Chronometer(str(BuildEventHuddling))\n", - " BuildEventHuddling.reBuildEvent(connection, file,\n", + " if \"old\" in mode.lower():\n", + " BuildEventHuddling_oldv.reBuildEvent(connection, file,\n", " tmin=currentMinT,\n", " tmax=currentMaxT,\n", " pool = animalPool,\n", - " animalType = AnimalType.MOUSE,\n", - " oldRoundness = oldRoundness)\n", + " animalType = AnimalType.MOUSE)\n", + " elif \"new\" in mode.lower():\n", + " BuildEventHuddling.reBuildEvent(connection, file,\n", + " tmin=currentMinT,\n", + " tmax=currentMaxT,\n", + " pool = animalPool,\n", + " animalType = AnimalType.MOUSE)\n", + " elif \"parallel\" in mode.lower():\n", + " BuildEventHuddling_parallel.reBuildEvent(connection, file,\n", + " tmin=currentMinT,\n", + " tmax=currentMaxT,\n", + " pool = animalPool,\n", + " animalType = AnimalType.MOUSE)\n", + " else:\n", + " raise NotImplementedError(\"This mode does not exist. Use 'old', 'new' or 'parallel'\")\n", " chrono.printTimeInS()\n", "\n", - "def process(file, oldRoundness= False):\n", + "def process(file, mode=\"new\"):\n", "\n", " print(file)\n", "\n", @@ -158,7 +173,7 @@ " currentMaxT = maxT\n", "\n", " chronoTimeWindowFile = Chronometer(\"File \"+ file+ \" currentMinT: \"+ str(currentMinT)+ \" currentMaxT: \" + str(currentMaxT));\n", - " processTimeWindow(file, connection, currentMinT, currentMaxT, oldRoundness= oldRoundness)\n", + " processTimeWindow(file, connection, currentMinT, currentMaxT, mode=mode)\n", " chronoTimeWindowFile.printTimeInS()\n", "\n", " currentT += windowT\n", @@ -197,7 +212,7 @@ " for file in files:\n", " try:\n", " print (\"Processing file\" , file)\n", - " process(file, oldRoundness = oldRoundness)\n", + " process(file, mode=mode)\n", " except FileProcessException:\n", " print (\"STOP PROCESSING FILE \" + file , file=sys.stderr)\n", "\n", diff --git a/EMT/EMT_notebooks/Rebuild_SAP_events.ipynb b/EMT/EMT_notebooks/Rebuild_SAP_events.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..edf086a03d64b439e297d35a935c49b7d8ff2681 --- /dev/null +++ b/EMT/EMT_notebooks/Rebuild_SAP_events.ipynb @@ -0,0 +1,237 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Rebuild SAP Events\n", + " This script builds/rebuilds SAP events of Live Mouse Tracker\n", + " Allowing for a computation of this event only when needed, once it's heavily time-consuming" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Parameters:\n", + " This notebook will compute automatically the SAP events for your database, and adjust settings for you.\n", + " \n", + " If you remove the automatic settings, you can force parameters:\n", + " Set minT and maxT to process the database.\n", + " Set windowT to divide the computation load in segment of that duration. Default value is 1 day." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from lmtanalysis.Measure import *\n", + "\n", + "# set this to false if you want to set manual parameters.\n", + "automaticSettings = True\n", + "# Use 'old', 'new' or 'parallel'\n", + "mode = \"new\"\n", + "# Manual parameters:\n", + "\n", + "''' minT and maxT to process the analysis (expressed in frame) '''\n", + "minT = 0\n", + "maxT = 1*oneDay\n", + "''' time window to compute the events (in frame). '''\n", + "windowT = 1*oneDay\n", + "''' speed up process '''\n", + "USE_CACHE_LOAD_DETECTION_CACHE = True" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Run this section to compute your database\n", + " You will be prompt to provide a database" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "''' Created on 26 march 2019 @author: Fab\n", + " Adapted on 28 november 2023 @author: Raul '''\n", + "from psutil import virtual_memory\n", + "import sqlite3\n", + "import sys\n", + "import traceback\n", + "\n", + "from lmtanalysis.TaskLogger import TaskLogger\n", + "from lmtanalysis.Animal import *\n", + "from lmtanalysis.Event import *\n", + "from lmtanalysis import BuildEventSAP, BuildEventSAP_oldv, BuildEventSAP_parallel, BuildDataBaseIndex\n", + "from lmtanalysis.FileUtil import getFilesToProcess\n", + "from lmtanalysis.EventTimeLineCache import flushEventTimeLineCache, disableEventTimeLineCache\n", + "from lmtanalysis.AnimalType import AnimalType\n", + "from lmtanalysis.Util import getNumberOfFrames\n", + "\n", + "class FileProcessException(Exception):\n", + " pass\n", + "\n", + "\n", + "def flushEvents( connection ):\n", + "\n", + " print(\"Flushing events...\")\n", + "\n", + " chrono = Chronometer(\"Flushing event \" + str(BuildEventSAP))\n", + " BuildEventSAP.flush(connection);\n", + " chrono.printTimeInS()\n", + "\n", + "\n", + "def processTimeWindow(file, connection, currentMinT , currentMaxT, mode=\"new\"):\n", + "\n", + " animalPool = None\n", + " flushEventTimeLineCache()\n", + "\n", + " if (USE_CACHE_LOAD_DETECTION_CACHE):\n", + " print(\"Caching load of animal detection...\")\n", + " animalPool = AnimalPool()\n", + " animalPool.loadAnimals(connection)\n", + " animalPool.loadDetection(start = currentMinT, end = currentMaxT)\n", + " print(\"Caching load of animal detection done.\")\n", + "\n", + " chrono = Chronometer(str(BuildEventSAP))\n", + " if \"old\" in mode.lower():\n", + " BuildEventSAP_oldv.reBuildEvent(connection, file,\n", + " tmin=currentMinT,\n", + " tmax=currentMaxT,\n", + " pool = animalPool,\n", + " animalType = AnimalType.MOUSE)\n", + " elif \"new\" in mode.lower():\n", + " BuildEventSAP.reBuildEvent(connection, file,\n", + " tmin=currentMinT,\n", + " tmax=currentMaxT,\n", + " pool = animalPool,\n", + " animalType = AnimalType.MOUSE)\n", + " elif \"parallel\" in mode.lower():\n", + " BuildEventSAP_parallel.reBuildEvent(connection, file,\n", + " tmin=currentMinT,\n", + " tmax=currentMaxT,\n", + " pool = animalPool,\n", + " animalType = AnimalType.MOUSE)\n", + " else:\n", + " raise NotImplementedError(\"This mode does not exist. Use 'old', 'new' or 'parallel'\")\n", + " chrono.printTimeInS()\n", + "\n", + "def process(file, mode=\"new\"):\n", + "\n", + " print(file)\n", + "\n", + " if automaticSettings:\n", + " print(\"Automatic settings.\")\n", + " windowT = 1*oneDay\n", + " minT = 0\n", + " maxT = getNumberOfFrames(file)\n", + " print (\"Auto max set to\" , maxT , \"frames\")\n", + "\n", + "\n", + " chronoFullFile = Chronometer(\"File \" + file)\n", + "\n", + " connection = sqlite3.connect(file)\n", + "\n", + " # update missing fields\n", + " try:\n", + " c = connection.cursor()\n", + " query = \"ALTER TABLE EVENT ADD METADATA TEXT\";\n", + " c.execute(query)\n", + " connection.commit()\n", + " except:\n", + " print(\"METADATA field already exists\" , file)\n", + "\n", + " # Build Index\n", + " BuildDataBaseIndex.buildDataBaseIndex(connection, force=False)\n", + "\n", + " currentT = minT\n", + "\n", + " try:\n", + " flushEvents(connection)\n", + "\n", + " while currentT < maxT:\n", + " currentMinT = currentT\n", + " currentMaxT = currentT+ windowT\n", + " if (currentMaxT > maxT):\n", + " currentMaxT = maxT\n", + "\n", + " chronoTimeWindowFile = Chronometer(\"File \"+ file+ \" currentMinT: \"+ str(currentMinT)+ \" currentMaxT: \" + str(currentMaxT));\n", + " processTimeWindow(file, connection, currentMinT, currentMaxT, mode=mode)\n", + " chronoTimeWindowFile.printTimeInS()\n", + "\n", + " currentT += windowT\n", + "\n", + " print(\"Full file process time: \")\n", + " chronoFullFile.printTimeInS()\n", + " except:\n", + " exc_type, exc_value, exc_traceback = sys.exc_info()\n", + " lines = traceback.format_exception(exc_type, exc_value, exc_traceback)\n", + " error = ''.join('!! ' + line for line in lines)\n", + "\n", + " t = TaskLogger(connection)\n", + " t.addLog(error)\n", + "\n", + " print(error, file=sys.stderr)\n", + "\n", + " raise FileProcessException()\n", + "\n", + "print(\"Code launched.\")\n", + "\n", + "mem = virtual_memory()\n", + "availableMemoryGB = mem.total / 1000000000\n", + "print(\"Total memory on computer: (GB)\", availableMemoryGB)\n", + "\n", + "if availableMemoryGB < 10:\n", + " print(\"Not enough memory to use cache load of events.\")\n", + " disableEventTimeLineCache()\n", + "\n", + "print(\"A window is popping (maybe hidden) asking for files to process...\")\n", + "files = getFilesToProcess()\n", + "\n", + "chronoFullBatch = Chronometer(\"Full batch\")\n", + "\n", + "if ( files != None ):\n", + " for file in files:\n", + " try:\n", + " print (\"Processing file\" , file)\n", + " process(file, mode=mode)\n", + " except FileProcessException:\n", + " print (\"STOP PROCESSING FILE \" + file , file=sys.stderr)\n", + "\n", + " flushEventTimeLineCache()\n", + "\n", + "chronoFullBatch.printTimeInS()\n", + "print(\"*** ALL JOBS DONE ***\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "LMT", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/EMT/EMT_notebooks/Rebuild_WallJump_events.ipynb b/EMT/EMT_notebooks/Rebuild_WallJump_events.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..f7482ea61838631fa099dd01e25a1b7d0ec9f288 --- /dev/null +++ b/EMT/EMT_notebooks/Rebuild_WallJump_events.ipynb @@ -0,0 +1,238 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Rebuild WallJump Events\n", + " This script builds/rebuilds WallJump events of Live Mouse Tracker\n", + " Allowing for a computation of this event only when needed, once it's heavily time-consuming" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Parameters:\n", + " This will compute automatically the WallJump events for your database, and adjust settings for you.\n", + " \n", + " If you remove the automatic settings, you can force parameters:\n", + " Set minT and maxT to process the database.\n", + " Set windowT to divide the computation load in segment of that duration. Default value is 1 day." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from lmtanalysis.Measure import *\n", + "\n", + "# set this to false if you want to set manual parameters.\n", + "automaticSettings = True\n", + "# Use 'old', 'new' or 'parallel'\n", + "mode = \"new\"\n", + "# Manual parameters:\n", + "\n", + "''' minT and maxT to process the analysis (expressed in frame) '''\n", + "minT = 0\n", + "maxT = 1*oneDay\n", + "''' time window to compute the events (in frame). '''\n", + "windowT = 1*oneDay\n", + "''' speed up process '''\n", + "USE_CACHE_LOAD_DETECTION_CACHE = True" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Run this section to compute your database\n", + " You will be prompt to provide a database" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "''' Created on 26 march 2019 @author: Fab\n", + " Adapted on 28 november 2023 @author: Raul '''\n", + "from psutil import virtual_memory\n", + "import sqlite3\n", + "import sys\n", + "import traceback\n", + "\n", + "from lmtanalysis.TaskLogger import TaskLogger\n", + "from lmtanalysis.Animal import *\n", + "from lmtanalysis.Event import *\n", + "from lmtanalysis.Measure import *\n", + "from lmtanalysis import BuildEventWallJump, BuildEventWallJump_oldv, BuildEventWallJump_parallel, BuildDataBaseIndex\n", + "from lmtanalysis.FileUtil import getFilesToProcess\n", + "from lmtanalysis.EventTimeLineCache import flushEventTimeLineCache, disableEventTimeLineCache\n", + "from lmtanalysis.AnimalType import AnimalType\n", + "from lmtanalysis.Util import getNumberOfFrames\n", + "\n", + "class FileProcessException(Exception):\n", + " pass\n", + "\n", + "\n", + "def flushEvents( connection ):\n", + "\n", + " print(\"Flushing events...\")\n", + "\n", + " chrono = Chronometer(\"Flushing event \" + str(BuildEventWallJump))\n", + " BuildEventWallJump.flush(connection);\n", + " chrono.printTimeInS()\n", + "\n", + "\n", + "def processTimeWindow(file, connection, currentMinT , currentMaxT, mode=\"new\"):\n", + "\n", + " animalPool = None\n", + " flushEventTimeLineCache()\n", + "\n", + " if (USE_CACHE_LOAD_DETECTION_CACHE):\n", + " print(\"Caching load of animal detection...\")\n", + " animalPool = AnimalPool()\n", + " animalPool.loadAnimals(connection)\n", + " animalPool.loadDetection(start = currentMinT, end = currentMaxT)\n", + " print(\"Caching load of animal detection done.\")\n", + "\n", + " chrono = Chronometer(str(BuildEventWallJump))\n", + " if \"old\" in mode.lower():\n", + " BuildEventWallJump_oldv.reBuildEvent(connection, file,\n", + " tmin=currentMinT,\n", + " tmax=currentMaxT,\n", + " pool = animalPool,\n", + " animalType = AnimalType.MOUSE)\n", + " elif \"new\" in mode.lower():\n", + " BuildEventWallJump.reBuildEvent(connection, file,\n", + " tmin=currentMinT,\n", + " tmax=currentMaxT,\n", + " pool = animalPool,\n", + " animalType = AnimalType.MOUSE)\n", + " elif \"parallel\" in mode.lower():\n", + " BuildEventWallJump_parallel.reBuildEvent(connection, file,\n", + " tmin=currentMinT,\n", + " tmax=currentMaxT,\n", + " pool = animalPool,\n", + " animalType = AnimalType.MOUSE)\n", + " else:\n", + " raise NotImplementedError(\"This mode does not exist. Use 'old', 'new' or 'parallel'\")\n", + " chrono.printTimeInS()\n", + "\n", + "def process(file, mode=\"new\"):\n", + "\n", + " print(file)\n", + "\n", + " if automaticSettings:\n", + " print(\"Automatic settings.\")\n", + " windowT = 1*oneDay\n", + " minT = 0\n", + " maxT = getNumberOfFrames(file)\n", + " print (\"Auto max set to\" , maxT , \"frames\")\n", + "\n", + "\n", + " chronoFullFile = Chronometer(\"File \" + file)\n", + "\n", + " connection = sqlite3.connect(file)\n", + "\n", + " # update missing fields\n", + " try:\n", + " c = connection.cursor()\n", + " query = \"ALTER TABLE EVENT ADD METADATA TEXT\";\n", + " c.execute(query)\n", + " connection.commit()\n", + " except:\n", + " print(\"METADATA field already exists\" , file)\n", + "\n", + " # Build Index\n", + " BuildDataBaseIndex.buildDataBaseIndex(connection, force=False)\n", + "\n", + " currentT = minT\n", + "\n", + " try:\n", + " flushEvents(connection)\n", + "\n", + " while currentT < maxT:\n", + " currentMinT = currentT\n", + " currentMaxT = currentT+ windowT\n", + " if (currentMaxT > maxT):\n", + " currentMaxT = maxT\n", + "\n", + " chronoTimeWindowFile = Chronometer(\"File \"+ file+ \" currentMinT: \"+ str(currentMinT)+ \" currentMaxT: \" + str(currentMaxT));\n", + " processTimeWindow(file, connection, currentMinT, currentMaxT, mode=mode)\n", + " chronoTimeWindowFile.printTimeInS()\n", + "\n", + " currentT += windowT\n", + "\n", + " print(\"Full file process time: \")\n", + " chronoFullFile.printTimeInS()\n", + " except:\n", + " exc_type, exc_value, exc_traceback = sys.exc_info()\n", + " lines = traceback.format_exception(exc_type, exc_value, exc_traceback)\n", + " error = ''.join('!! ' + line for line in lines)\n", + "\n", + " t = TaskLogger(connection)\n", + " t.addLog(error)\n", + "\n", + " print(error, file=sys.stderr)\n", + "\n", + " raise FileProcessException()\n", + "\n", + "print(\"Code launched.\")\n", + "\n", + "mem = virtual_memory()\n", + "availableMemoryGB = mem.total / 1000000000\n", + "print(\"Total memory on computer: (GB)\", availableMemoryGB)\n", + "\n", + "if availableMemoryGB < 10:\n", + " print(\"Not enough memory to use cache load of events.\")\n", + " disableEventTimeLineCache()\n", + "\n", + "print(\"A window is popping (maybe hidden) asking for files to process...\")\n", + "files = getFilesToProcess()\n", + "\n", + "chronoFullBatch = Chronometer(\"Full batch\")\n", + "\n", + "if ( files != None ):\n", + " for file in files:\n", + " try:\n", + " print (\"Processing file\" , file)\n", + " process(file, mode=mode)\n", + " except FileProcessException:\n", + " print (\"STOP PROCESSING FILE \" + file , file=sys.stderr)\n", + "\n", + " flushEventTimeLineCache()\n", + "\n", + "chronoFullBatch.printTimeInS()\n", + "print(\"*** ALL JOBS DONE ***\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "LMT", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/EMT/EMT_notebooks/make_gif.ipynb b/EMT/EMT_notebooks/make_gif.ipynb index 34ec63c140b961a1997bf624345f8e979b89c20a..b5ecdf1ae0ad947bada08132697ddb5395601551 100644 --- a/EMT/EMT_notebooks/make_gif.ipynb +++ b/EMT/EMT_notebooks/make_gif.ipynb @@ -33,20 +33,17 @@ "import numpy as np\n", "\n", "from lmtanalysis.FileUtil import getDirectoryToProcess\n", + "from lmtanalysis.Measure import *\n", "\n", - "one_second = 30\n", - "one_minute = 60 * one_second\n", - "one_hour = 60 * one_minute\n", - "one_day = 24 * one_hour\n", "\n", "def frame2video(frame):\n", - " video_len = 10 * one_minute\n", + " video_len = 10 * oneMinute\n", " video = frame // video_len * video_len\n", " frame_video = (frame % video_len) // 2 # /2 because videos are 15 fps and frame counting at 30 fps\n", " return video, frame_video\n", "\n", "def time2frame(video, min, sec):\n", - " frame = video + min * one_minute + sec * one_second\n", + " frame = video + min * oneMinute + sec * oneSecond\n", " return frame\n", "\n", "def findFile(directory, framenumber):\n", @@ -120,7 +117,7 @@ " output_folder = Path(dir) / f\"frames_{start}-{stop}\"\n", " output_folder.mkdir(exist_ok=True)\n", " for i, frame in enumerate(frames):\n", - " cv2.imwrite(str(output_folder / f\"frame_{start+2*i}.png\"), np.array(frame))\n", + " cv2.imwrite(str(output_folder / f\"frame_{start+2*i}.png\"), cv2.cvtColor(np.array(frame), cv2.COLOR_RGBA2BGR))\n", "\n" ] }, diff --git a/EMT/EMT_scripts/Rebuild_Huddling_events_script.py b/EMT/EMT_scripts/Rebuild_Huddling_events_script.py index b19ceb387d2d634d9ed3800963cfbc69f3d5813b..da59cecc80d58f35221c52b1ccdb9feb81259139 100644 --- a/EMT/EMT_scripts/Rebuild_Huddling_events_script.py +++ b/EMT/EMT_scripts/Rebuild_Huddling_events_script.py @@ -15,7 +15,7 @@ from lmtanalysis.Animal import * from lmtanalysis.Event import * from lmtanalysis.Measure import * from lmtanalysis.Util import getNumberOfFrames -from lmtanalysis import BuildEventHuddling, BuildDataBaseIndex +from lmtanalysis import BuildEventHuddling, BuildEventHuddling_oldv, BuildEventHuddling_parallel, BuildDataBaseIndex from lmtanalysis.TaskLogger import TaskLogger from lmtanalysis.EventTimeLineCache import flushEventTimeLineCache,\ disableEventTimeLineCache @@ -36,7 +36,7 @@ def flushEvents(connection): chrono.printTimeInS() -def processTimeWindow(file, connection, currentMinT , currentMaxT, oldRoundness=False): +def processTimeWindow(file, connection, currentMinT , currentMaxT, mode="new"): animalPool = None flushEventTimeLineCache() @@ -49,15 +49,29 @@ def processTimeWindow(file, connection, currentMinT , currentMaxT, oldRoundness= print("Caching load of animal detection done.") chrono = Chronometer(str(BuildEventHuddling)) - BuildEventHuddling.reBuildEvent(connection, file, + if "old" in mode.lower(): + BuildEventHuddling_oldv.reBuildEvent(connection, file, tmin=currentMinT, tmax=currentMaxT, pool = animalPool, - animalType = AnimalType.MOUSE, - oldRoundness = oldRoundness) + animalType = AnimalType.MOUSE) + elif "new" in mode.lower(): + BuildEventHuddling.reBuildEvent(connection, file, + tmin=currentMinT, + tmax=currentMaxT, + pool = animalPool, + animalType = AnimalType.MOUSE) + elif "parallel" in mode.lower(): + BuildEventHuddling_parallel.reBuildEvent(connection, file, + tmin=currentMinT, + tmax=currentMaxT, + pool = animalPool, + animalType = AnimalType.MOUSE) + else: + raise NotImplementedError("This mode does not exist. Use 'old', 'new' or 'parallel'") chrono.printTimeInS() -def process(file, oldRoundness= False, windowT=oneDay, minT=0, maxT=None): +def process(file, mode="new", windowT=oneDay, minT=0, maxT=None): if maxT == None: maxT = getNumberOfFrames(file) @@ -91,7 +105,7 @@ def process(file, oldRoundness= False, windowT=oneDay, minT=0, maxT=None): currentMaxT = maxT chronoTimeWindowFile = Chronometer("File "+ file+ " currentMinT: "+ str(currentMinT)+ " currentMaxT: " + str(currentMaxT)) - processTimeWindow(file, connection, currentMinT, currentMaxT, oldRoundness= oldRoundness) + processTimeWindow(file, connection, currentMinT, currentMaxT, mode=mode) chronoTimeWindowFile.printTimeInS() currentT += windowT @@ -117,8 +131,8 @@ if __name__ == "__main__": argparser.add_argument("-sq" , "--sqlite_path", required=True, help="Path to sqlite file to be rebuilt. Only one at a time. Use batch files \ to run over multiple videos") - argparser.add_argument("-old" , "--old_roundness", action="store_true", - help="Use old roundness formula") + argparser.add_argument("-m" , "--mode", choices=["old", "new", "parallel"], + help="Execution mode") argparser.add_argument("-w" , "--window", default=oneDay, help="Window size") argparser.add_argument("-min" , "--min", default=0, @@ -142,7 +156,7 @@ if __name__ == "__main__": try: print ("Processing file" , file) - process(file, oldRoundness = args.old_roundness, windowT=args.window, minT=args.min, maxT=args.max) + process(file, mode=args.mode, windowT=args.window, minT=args.min, maxT=args.max) except FileProcessException: print ("STOP PROCESSING FILE ", file , file=sys.stderr) diff --git a/EMT/EMT_scripts/Rebuild_SAP_events_script.py b/EMT/EMT_scripts/Rebuild_SAP_events_script.py index e057ce61bbb54559f48c28a1ac7d0a96999c6832..acedfca3651ba796d59853ce6ff5025779c96943 100644 --- a/EMT/EMT_scripts/Rebuild_SAP_events_script.py +++ b/EMT/EMT_scripts/Rebuild_SAP_events_script.py @@ -15,7 +15,7 @@ from lmtanalysis.Animal import * from lmtanalysis.Event import * from lmtanalysis.Measure import * from lmtanalysis.Util import getNumberOfFrames -from lmtanalysis import BuildEventSAP, BuildEventSAP_oldv, BuildDataBaseIndex +from lmtanalysis import BuildEventSAP, BuildEventSAP_oldv, BuildEventSAP_parallel, BuildDataBaseIndex from lmtanalysis.TaskLogger import TaskLogger from lmtanalysis.EventTimeLineCache import flushEventTimeLineCache,\ disableEventTimeLineCache @@ -36,7 +36,7 @@ def flushEvents(connection): chrono.printTimeInS() -def processTimeWindow(file, connection, currentMinT , currentMaxT, oldSAP=False): +def processTimeWindow(file, connection, currentMinT , currentMaxT, mode="new"): animalPool = None flushEventTimeLineCache() @@ -49,21 +49,29 @@ def processTimeWindow(file, connection, currentMinT , currentMaxT, oldSAP=False) print("Caching load of animal detection done.") chrono = Chronometer(str(BuildEventSAP)) - if oldSAP: + if "old" in mode.lower(): BuildEventSAP_oldv.reBuildEvent(connection, file, tmin=currentMinT, tmax=currentMaxT, pool = animalPool, animalType = AnimalType.MOUSE) - else: - BuildEventSAP.reBuildEvent(connection, file, + elif "new" in mode.lower(): + BuildEventSAP.reBuildEvent(connection, file, + tmin=currentMinT, + tmax=currentMaxT, + pool = animalPool, + animalType = AnimalType.MOUSE) + elif "parallel" in mode.lower(): + BuildEventSAP_parallel.reBuildEvent(connection, file, tmin=currentMinT, tmax=currentMaxT, pool = animalPool, animalType = AnimalType.MOUSE) + else: + raise NotImplementedError("This mode does not exist. Use 'old', 'new' or 'parallel'") chrono.printTimeInS() -def process(file, oldSAP=False, windowT=oneDay, minT=0, maxT=None): +def process(file, mode="new", windowT=oneDay, minT=0, maxT=None): if maxT == None: maxT = getNumberOfFrames(file) @@ -97,7 +105,7 @@ def process(file, oldSAP=False, windowT=oneDay, minT=0, maxT=None): currentMaxT = maxT chronoTimeWindowFile = Chronometer("File "+ file+ " currentMinT: "+ str(currentMinT)+ " currentMaxT: " + str(currentMaxT)) - processTimeWindow(file, connection, currentMinT, currentMaxT, oldSAP=oldSAP) + processTimeWindow(file, connection, currentMinT, currentMaxT, mode=mode) chronoTimeWindowFile.printTimeInS() currentT += windowT @@ -123,8 +131,8 @@ if __name__ == "__main__": argparser.add_argument("-sq" , "--sqlite_path", required=True, help="Path to sqlite file to be rebuilt. Only one at a time. Use batch files \ to run over multiple videos") - argparser.add_argument("-old" , "--old_roundness", action="store_true", - help="Use old roundness formula") + argparser.add_argument("-m" , "--mode", choices=["old", "new", "parallel"], + help="Execution mode") argparser.add_argument("-w" , "--window", default=oneDay, help="Window size") argparser.add_argument("-min" , "--min", default=0, @@ -148,7 +156,7 @@ if __name__ == "__main__": try: print ("Processing file" , file) - process(file, oldRoundness = args.old_roundness, windowT=args.window, minT=args.min, maxT=args.max) + process(file, mode=args.mode, windowT=args.window, minT=args.min, maxT=args.max) except FileProcessException: print ("STOP PROCESSING FILE ", file , file=sys.stderr) diff --git a/EMT/EMT_scripts/Rebuild_WallJump_events_script.py b/EMT/EMT_scripts/Rebuild_WallJump_events_script.py index 8b7566fdcd4cd61eb778cc2051a3a2d7e3ec0a09..e3b13be29cc0071040d8c83d98444f1a1ee8f44d 100644 --- a/EMT/EMT_scripts/Rebuild_WallJump_events_script.py +++ b/EMT/EMT_scripts/Rebuild_WallJump_events_script.py @@ -15,7 +15,7 @@ from lmtanalysis.Animal import * from lmtanalysis.Event import * from lmtanalysis.Measure import * from lmtanalysis.Util import getNumberOfFrames -from lmtanalysis import BuildEventWallJump, BuildEventWallJump_oldv, BuildDataBaseIndex +from lmtanalysis import BuildEventWallJump, BuildEventWallJump_oldv, BuildEventWallJump_parallel, BuildDataBaseIndex from lmtanalysis.TaskLogger import TaskLogger from lmtanalysis.EventTimeLineCache import flushEventTimeLineCache,\ disableEventTimeLineCache @@ -36,7 +36,7 @@ def flushEvents(connection): chrono.printTimeInS() -def processTimeWindow(file, connection, currentMinT , currentMaxT, oldWallJump=False): +def processTimeWindow(file, connection, currentMinT , currentMaxT, mode="new"): animalPool = None flushEventTimeLineCache() @@ -49,21 +49,29 @@ def processTimeWindow(file, connection, currentMinT , currentMaxT, oldWallJump=F print("Caching load of animal detection done.") chrono = Chronometer(str(BuildEventWallJump)) - if oldWallJump: + if "old" in mode.lower(): BuildEventWallJump_oldv.reBuildEvent(connection, file, tmin=currentMinT, tmax=currentMaxT, pool = animalPool, animalType = AnimalType.MOUSE) - else: + elif "new" in mode.lower(): BuildEventWallJump.reBuildEvent(connection, file, tmin=currentMinT, tmax=currentMaxT, pool = animalPool, animalType = AnimalType.MOUSE) + elif "parallel" in mode.lower(): + BuildEventWallJump_parallel.reBuildEvent(connection, file, + tmin=currentMinT, + tmax=currentMaxT, + pool = animalPool, + animalType = AnimalType.MOUSE) + else: + raise NotImplementedError("This mode does not exist. Use 'old', 'new' or 'parallel'") chrono.printTimeInS() -def process(file, oldWallJump=False, windowT=oneDay, minT=0, maxT=None): +def process(file, mode="new", windowT=oneDay, minT=0, maxT=None): if maxT == None: maxT = getNumberOfFrames(file) @@ -97,7 +105,7 @@ def process(file, oldWallJump=False, windowT=oneDay, minT=0, maxT=None): currentMaxT = maxT chronoTimeWindowFile = Chronometer("File "+ file+ " currentMinT: "+ str(currentMinT)+ " currentMaxT: " + str(currentMaxT)) - processTimeWindow(file, connection, currentMinT, currentMaxT, oldWallJump=oldWallJump) + processTimeWindow(file, connection, currentMinT, currentMaxT, mode=mode) chronoTimeWindowFile.printTimeInS() currentT += windowT @@ -123,8 +131,8 @@ if __name__ == "__main__": argparser.add_argument("-sq" , "--sqlite_path", required=True, help="Path to sqlite file to be rebuilt. Only one at a time. Use batch files \ to run over multiple videos") - argparser.add_argument("-old" , "--old_roundness", action="store_true", - help="Use old roundness formula") + argparser.add_argument("-m" , "--mode", choices=["old", "new", "parallel"], + help="Execution mode") argparser.add_argument("-w" , "--window", default=oneDay, help="Window size") argparser.add_argument("-min" , "--min", default=0, diff --git a/EMT/EMT_scripts/run_EMT_pipeline.py b/EMT/EMT_scripts/run_EMT_pipeline.py index 0203b595ac12773fbd3f954d2ebd5caa1437f19e..7b529d9203897f6645622a9d8fc5a5743c71aa2f 100644 --- a/EMT/EMT_scripts/run_EMT_pipeline.py +++ b/EMT/EMT_scripts/run_EMT_pipeline.py @@ -29,13 +29,13 @@ if __name__ == "__main__": rebuildLMT.process(args.sqlite_path) ### REBUILD HUDDLING ### - rebuildHuddling.process(args.sqlite_path, oldRoundness=False) + rebuildHuddling.process(args.sqlite_path, mode="parallel") ### REBUILD SAP ### - rebuildSAP.process(args.sqlite_path, oldSAP=False) + rebuildSAP.process(args.sqlite_path, mode="parallel") ### REBUILD WALLJUMP ### - rebuildWallJump.process(args.sqlite_path, oldWallJump=False) + rebuildWallJump.process(args.sqlite_path, mode="parallel") ### REBUILD CONTACTS FEEDERS/DRINKER ### feeders_file = Path(args.sqlite_path).parent / "feeders.txt" diff --git a/EMT/__init__.py b/EMT/__init__.py index 17484b23783311f1998e0424d67d3e41d235108c..d448f714c0d4eed74b8b0675f616cba73054d32c 100644 --- a/EMT/__init__.py +++ b/EMT/__init__.py @@ -1,5 +1,9 @@ ''' -Created on 13 aout 2018 +Created on 24 avril 2023 -@author: Fab +@author: Raul Silva + +Originally based on: +lmtanalysis - Created on 13 aout 2018 +@author: fab ''' diff --git a/EMT/lmtanalysis/Animal.py b/EMT/lmtanalysis/Animal.py index 574054572ab8fa55227ae6c4559f3ec9f34a93e6..888d145184e29ff7f301ade81dcab6cd2696ecc6 100644 --- a/EMT/lmtanalysis/Animal.py +++ b/EMT/lmtanalysis/Animal.py @@ -1258,11 +1258,14 @@ class AnimalPool(): return tDic - def loadDetection (self , start = None, end=None , lightLoad = False ): + def loadDetection (self , start = None, end=None , lightLoad = False, idAnimal=None ): self.detectionStartFrame = start self.detectionEndFrame = end - for animal in self.animalDictionary.keys(): - self.animalDictionary[animal].loadDetection( start = start, end = end , lightLoad=lightLoad ) + if idAnimal is None: + for animal in self.animalDictionary.keys(): + self.animalDictionary[animal].loadDetection(start = start, end = end , lightLoad=lightLoad) + else: + self.animalDictionary[idAnimal].loadDetection(start = start, end = end , lightLoad=lightLoad) def filterDetectionByInstantSpeed(self, minSpeed, maxSpeed): for animal in self.animalDictionary.keys(): diff --git a/EMT/lmtanalysis/BuildEventHuddling.py b/EMT/lmtanalysis/BuildEventHuddling.py index edbf0943177abf41fa21ffa1e7a4677fed445d47..1016094b5c103a5de3ffaa2ff04b7f10ea890393 100644 --- a/EMT/lmtanalysis/BuildEventHuddling.py +++ b/EMT/lmtanalysis/BuildEventHuddling.py @@ -23,7 +23,7 @@ def flush( connection ): ''' flush event in database ''' deleteEventTimeLineInBase(connection, "Huddling" ) -def reBuildEvent( connection, file, tmin=None, tmax=None, pool = None , animalType = None, showGraph = False, oldRoundness=False): +def reBuildEvent( connection, file, tmin=None, tmax=None, pool = None , animalType = None, showGraph = False): ''' use the pool provided or create it''' if ( pool == None ): @@ -57,31 +57,18 @@ def reBuildEvent( connection, file, tmin=None, tmax=None, pool = None , animalTy mask.getNbPoint() < ParametersMouse.MIN_MASK_AREA): continue - # As a matter of compatibility, we let the old roundness method, but default is the new one - if oldRoundness: - roundness = mask.getRoundness() - if roundness == None: - continue - if roundness < ParametersMouse.ROUNDNESS_THRESHOLD: # and roundness > 1: - result[t] = True - else: - roundness = mask.getNewRoundness() - if roundness == None: - continue - if roundness > ParametersMouse.ROUNDNESS_THRESHOLD: # and roundness < 1: - result[t] = True - + roundness = mask.getNewRoundness() + if (roundness is not None) and (roundness > ParametersMouse.ROUNDNESS_THRESHOLD): + result[t] = True - huddlingTimeLine.reBuildWithDictionary( result ) + huddlingTimeLine.reBuildWithDictionary(result) huddlingTimeLine.endRebuildEventTimeLine(connection) - # log process t = TaskLogger( connection ) - t.addLog( "Build Event Huddling" , tmin=tmin, tmax=tmax ) - + t.addLog("Build Event Huddling" , tmin=tmin, tmax=tmax) - print( "Rebuild event finished." ) + print("Rebuild event finished.") def filterEventTimeLine(connection, **kwargs): diff --git a/EMT/lmtanalysis/BuildEventHuddling_oldv.py b/EMT/lmtanalysis/BuildEventHuddling_oldv.py new file mode 100644 index 0000000000000000000000000000000000000000..c5bcf87e0aeb8e73fd0b982c90503eea2d531233 --- /dev/null +++ b/EMT/lmtanalysis/BuildEventHuddling_oldv.py @@ -0,0 +1,68 @@ +''' +Created on 6 sept. 2017 + +@author: Fab +''' +import sqlite3 +from time import * +from lmtanalysis.Chronometer import Chronometer +from lmtanalysis.Animal import * +from lmtanalysis.Detection import * +from lmtanalysis.Measure import * +import matplotlib.pyplot as plt +import numpy as np +from lmtanalysis.Event import * +from lmtanalysis.Measure import * +from lmtanalysis.TaskLogger import TaskLogger + +def flush( connection ): + ''' flush event in database ''' + deleteEventTimeLineInBase(connection, "Huddling" ) + +def reBuildEvent( connection, file, tmin=None, tmax=None, pool = None , animalType = None, showGraph = False ): + + ''' use the pool provided or create it''' + if ( pool == None ): + pool = AnimalPool( ) + pool.loadAnimals( connection ) + #pool.loadDetection( start = tmin, end = tmax ) + + + for idAnimalA in pool.animalDictionary: + + threshold = 0.5 + animal = pool.animalDictionary[idAnimalA] + print("Computing huddling for animal " , animal ) + + huddlingTimeLine = EventTimeLine( connection, "Huddling", idAnimalA, minFrame=tmin, maxFrame=tmax, loadEvent=False ) + + result = {} + + for t in range( tmin, tmax+1 ): + if t%10000 == 0: + print ( "current t", t ) + + mask = animal.getBinaryDetectionMask( t ) + if mask == None: + continue + mask.unzip() + roundness = mask.getRoundness() + if roundness == None: + continue + #print ( idAnimalA , t , roundness ) + #if roundness > 1-threshold and roundness < 1+threshold: + if roundness < 1.85: # and roundness > 1: + result[t] = True + + huddlingTimeLine.reBuildWithDictionary( result ) + huddlingTimeLine.endRebuildEventTimeLine(connection) + + + # log process + + t = TaskLogger( connection ) + t.addLog( "Build Event Huddling" , tmin=tmin, tmax=tmax ) + + + print( "Rebuild event finished." ) + diff --git a/EMT/lmtanalysis/BuildEventHuddling_parallel.py b/EMT/lmtanalysis/BuildEventHuddling_parallel.py new file mode 100644 index 0000000000000000000000000000000000000000..3697f518e3ac0f159c0a24e6b8b793d07d44696e --- /dev/null +++ b/EMT/lmtanalysis/BuildEventHuddling_parallel.py @@ -0,0 +1,99 @@ +''' +Created on 6 sept. 2017 + +@author: Fab + +Adapted on 18 sept. 2023 +@author: Raul +''' +import sqlite3 +from multiprocessing import Process + +from lmtanalysis.Animal import AnimalPool +from lmtanalysis.Event import * +from lmtanalysis.TaskLogger import TaskLogger +from lmtanalysis.ParametersMouse import ParametersMouse +from lmtanalysis.Util import getNumberOfFrames + + +def flush( connection ): + ''' flush event in database ''' + deleteEventTimeLineInBase(connection, "Huddling" ) + +def computeHuddling(sqlite_file, idAnimalA, tmin=None, tmax=None): + + tmin = 0 if tmin is None else tmin + tmax = getNumberOfFrames(sqlite_file) if tmax is None else tmax + + connection = sqlite3.connect(sqlite_file) + pool = AnimalPool() + pool.loadAnimals(connection) + animal = pool.animalDictionary[idAnimalA] + + huddlingTimeLine = EventTimeLine(connection, "Huddling", idAnimalA, minFrame=tmin, maxFrame=tmax, loadEvent=False) + result = {} + for t in range( tmin, tmax+1 ): + if t%18000 == 0: + print (f"[ID {idAnimalA}] Current t: {t//1800} min" ) + + mask = animal.getBinaryDetectionMask(t) + if mask is not None: + mask.unzip() + else: + continue + + # We filter out some special cases of non existing mask or clear bad segmentation masks + if (mask.width == 0 or + mask.getNbPoint() < ParametersMouse.MIN_MASK_AREA): + continue + + roundness = mask.getNewRoundness() + if (roundness is not None) and (roundness > ParametersMouse.ROUNDNESS_THRESHOLD): + result[t] = True + + huddlingTimeLine.reBuildWithDictionary(result) + huddlingTimeLine.endRebuildEventTimeLine(connection) + return result + +def reBuildEvent(connection, file, tmin=None, tmax=None, pool = None , animalType = None, showGraph = False): + + # ''' use the pool provided or create it''' + if ( pool == None ): + pool = AnimalPool( ) + pool.loadAnimals( connection ) + + processes = [] + for idAnimalA in pool.animalDictionary: + process = Process(target=computeHuddling, args=(file, idAnimalA, tmin, tmax)) + processes.append(process) + process.start() + + # Wait for all threads to complete - Is this really necessary? + for process in processes: + process.join() + + # log process + t = TaskLogger( connection ) + t.addLog( "Build Event Huddling" , tmin=tmin, tmax=tmax ) + + +def filterEventTimeLine(connection, **kwargs): + idA = kwargs.get("idA", None) + idB = kwargs.get("idB", None) + idC = kwargs.get("idC", None) + idD = kwargs.get("idD", None) + t_min = kwargs.get("t_min", 0) + t_max = kwargs.get("t_max", oneHour) - 1 + minimum_event_particle_lenght = kwargs.get("minimum_event_particle_length", 0) + dilation_factor = kwargs.get('merge_events_length', 5) + minimum_event_length = kwargs.get("minimum_event_length", 30) + + event_name = "Huddling" + + eventTL = EventTimeLine(connection, event_name, idA= idA, idB= idB, idC= idC, idD= idD, minFrame= t_min, maxFrame= t_max) + eventTL.removeEventsBelowLength(minimum_event_particle_lenght) + eventTL.closeEvents(dilation_factor) + eventTL.removeEventsBelowLength(minimum_event_length) + + print("Filtering event finished.") + return eventTL diff --git a/EMT/lmtanalysis/BuildEventSAP_parallel.py b/EMT/lmtanalysis/BuildEventSAP_parallel.py new file mode 100644 index 0000000000000000000000000000000000000000..d48cd5590140cd96c16495b3151088d525850654 --- /dev/null +++ b/EMT/lmtanalysis/BuildEventSAP_parallel.py @@ -0,0 +1,194 @@ +''' +Created on 6 sept. 2017 + +@author: Fab +''' +import sqlite3 +import matplotlib.pyplot as plt +import numpy as np +from multiprocessing import Process +import skimage.morphology as skm + +from lmtanalysis.Animal import AnimalPool +from lmtanalysis.Event import * +from lmtanalysis.TaskLogger import TaskLogger +from lmtanalysis.Util import getNumberOfFrames + +def flush( connection ): + ''' flush event in database ''' + deleteEventTimeLineInBase(connection, "SAP" ) + +def computeMoment(im, p, q): + h, w = im.shape[:2] + y,x = np.mgrid[-h//2:h//2, -w//2:w//2] + return np.sum(x**p * y**q * im, dtype=float) + +def computeMetrics(image, r = 9, debug = False): + + ############################################################# + # Prepocessing added + image = np.pad(image, ((5, 5), (5, 5)), 'constant') + if debug: + plt.figure() + plt.imshow(image) + h = r//2 + strel_1 = np.zeros((r,r)) + strel_1[h,:] = 1 + strel_2 = np.zeros((r,r)) + strel_2[:,h] = 1 + strel_3 = np.zeros((r,r)) + for j in range(r): + strel_3[j,j] = 1 + strel_4 = np.zeros((r,r)) + for j in range(r): + strel_4[j, r-j-1] = 1 + + temp = skm.binary_opening(image, strel_1) + temp = skm.binary_opening(temp, strel_2) + temp = skm.binary_opening(temp, strel_3) + temp = skm.binary_opening(temp, strel_4) + temp = skm.binary_dilation(temp, np.ones((h+1,h+1))) + filtered_image = image * temp + ############################################################# + + if debug: + plt.figure() + plt.imshow(filtered_image) + + M00 = computeMoment(filtered_image, 0,0) + + if M00 == 0: + return [np.inf, np.inf, 0] + + M01 = computeMoment(filtered_image, 0,1) + M10 = computeMoment(filtered_image, 1,0) + M11 = computeMoment(filtered_image, 1,1) + M02 = computeMoment(filtered_image, 0,2) + M20 = computeMoment(filtered_image, 2,0) + + xc = M10/M00 + yc = M01/M00 + + mu00 = M00 + mu11 = M11 - xc * M01 + mu02 = M02 - yc * M01 + mu20 = M20 - xc * M10 + n20 = mu20 / mu00**((2+2+0)/2) + n02 = mu02 / mu00**((2+0+2)/2) + + I1 = n20 + n02 + + sigma02 = mu20/mu00 + sigma20 = mu02/mu00 + sigma11 = mu11/mu00 + + lambda1 = ((sigma02 + sigma20) - (4*sigma11**2 + (sigma20 - sigma02)**2)**(1/2))/2 + lambda2 = ((sigma02 + sigma20) + (4*sigma11**2 + (sigma20 - sigma02)**2)**(1/2))/2 + + eccentricity = np.sqrt(1 - (lambda1/lambda2)) + length = 2*np.sqrt(lambda2) + + return [I1, eccentricity, length] + + +def computeSAP(sqlite_file, idAnimalA, tmin=None, tmax=None): + + tmin = 0 if tmin is None else tmin + tmax = getNumberOfFrames(sqlite_file) if tmax is None else tmax + + connection = sqlite3.connect(sqlite_file) + pool = AnimalPool() + pool.loadAnimals(connection) + pool.loadDetection(start = tmin, end = tmax, lightLoad = True, idAnimal=idAnimalA) + animal = pool.animalDictionary[idAnimalA] + + SAPTimeLine = EventTimeLine(connection, "SAP", animal.baseId, minFrame=tmin, maxFrame=tmax, loadEvent=False) + result={} + + descriptors = np.zeros((tmax - tmin + 1, 6)) + for t in range(tmin, tmax): + if t%18000 == 0: + print (f"[ID {idAnimalA}] Current t: {t//1800} min") + + mask = animal.getBinaryDetectionMask(t) + speed = animal.getSpeedSingleFrame(t) + try: + im_mask = mask.getImageMask() + except: + continue + + metrics = computeMetrics(im_mask, r=9) + descriptors[t-tmin] = [mask.xc, mask.yc, speed] + metrics + + mu = descriptors[descriptors[:, -1] != 0][:, -1].mean() + sigma = descriptors[descriptors[:, -1] != 0][:, -1].std() + for t in range(tmin, tmax): + xc = descriptors[t-tmin, 0] + yc = descriptors[t-tmin, 1] + speed = descriptors[t-tmin, 2] + hu1 = descriptors[t-tmin, 3] + ecc = descriptors[t-tmin, 4] + length = descriptors[t-tmin, 5] + ecc_pred = -9.8908 * hu1**2 + 5.9121 * hu1 + 0.0654 + if ((speed < 1) and + (length > (mu + sigma)) and + (0.18 <= hu1 <= 0.3) and + (np.abs(ecc - ecc_pred) < 0.01) and + (125 < xc < 385) and + (75 < yc < 340)): + result[t] = True + + SAPTimeLine.reBuildWithDictionary(result) + SAPTimeLine.endRebuildEventTimeLine(connection) + return result + +def reBuildEvent(connection, file, tmin=None, tmax=None, pool = None , animalType = None, showGraph = False): + + ''' use the pool provided or create it''' + if (pool == None): + pool = AnimalPool() + pool.loadAnimals(connection) + + ''' + Animal A is stopped (built-in event): + Move social: animal A is stopped and in contact with any other animal. + Move isolated: animal A is stopped and not in contact with any other animal. + ''' + + processes = [] + for idAnimalA in pool.animalDictionary: + process = Process(target=computeSAP, args=(file, idAnimalA, tmin, tmax)) + processes.append(process) + process.start() + + # Wait for all threads to complete - Is this really necessary? + for process in processes: + process.join() + + # log process + t = TaskLogger( connection ) + t.addLog( "Build Event SAP" , tmin=tmin, tmax=tmax ) + + print("Rebuild event finished.") + + +def filterEventTimeLine(connection, **kwargs): + idA = kwargs.get("idA", None) + idB = kwargs.get("idB", None) + idC = kwargs.get("idC", None) + idD = kwargs.get("idD", None) + t_min = kwargs.get("t_min", 0) + t_max = kwargs.get("t_max", oneHour) - 1 + minimum_event_particle_lenght = kwargs.get("minimum_event_particle_length", 0) + dilation_factor = kwargs.get('merge_events_length', 5) + minimum_event_length = kwargs.get("minimum_event_length", 30) + + event_name = "SAP" + + eventTL = EventTimeLine(connection, event_name, idA= idA, idB= idB, idC= idC, idD= idD, minFrame= t_min, maxFrame= t_max) + eventTL.removeEventsBelowLength(minimum_event_particle_lenght) + eventTL.closeEvents(dilation_factor) + eventTL.removeEventsBelowLength(minimum_event_length) + + print("Filtering event finished.") + return eventTL diff --git a/EMT/lmtanalysis/BuildEventWallJump_parallel.py b/EMT/lmtanalysis/BuildEventWallJump_parallel.py new file mode 100644 index 0000000000000000000000000000000000000000..9466c2667e7a4d9e2e3d4731d4f5056610eb0b63 --- /dev/null +++ b/EMT/lmtanalysis/BuildEventWallJump_parallel.py @@ -0,0 +1,172 @@ +''' +Created on 6 sept. 2017 + +@author: Fab +''' +import numpy as np +import skimage.morphology as skm +from multiprocessing import Process + +from lmtanalysis.Animal import AnimalPool +from lmtanalysis.Event import * +from lmtanalysis.TaskLogger import TaskLogger +from lmtanalysis.Util import getNumberOfFrames + +def flush( connection ): + ''' flush event in database ''' + deleteEventTimeLineInBase(connection, "WallJump" ) + +def computeMoment(im, p, q): + h, w = im.shape[:2] + y,x = np.mgrid[-h//2:h//2, -w//2:w//2] + return np.sum(x**p * y**q * im, dtype=float) + +def computeMetrics(image, r = 9, debug=False): + """ + Computes first Hu invariant moment. For the Wall jump event, this is the only invariant + moment necessary to filter out some sparse bad detections. The Preprocessing may not be + necessary, but it improves uniformity of the shape and avoids the decrease of the metrics + because of the borders irregularity. + """ + ############################################################# + # Prepocessing added + image = np.pad(image, ((5, 5), (5, 5)), 'constant') + + h = r//2 + strel_1 = np.zeros((r,r)) + strel_1[h,:] = 1 + strel_2 = np.zeros((r,r)) + strel_2[:,h] = 1 + strel_3 = np.zeros((r,r)) + for j in range(r): + strel_3[j,j] = 1 + strel_4 = np.zeros((r,r)) + for j in range(r): + strel_4[j, r-j-1] = 1 + + image = skm.binary_opening(image, strel_1) + image = skm.binary_opening(image, strel_2) + image = skm.binary_opening(image, strel_3) + image = skm.binary_opening(image, strel_4) + image = skm.binary_opening(image, np.ones((3,3))) + ############################################################# + + image[image>0] = 1 + + M00 = computeMoment(image, 0,0) + + if M00 == 0: + return np.inf + + M01 = computeMoment(image, 0,1) + M10 = computeMoment(image, 1,0) + M02 = computeMoment(image, 0,2) + M20 = computeMoment(image, 2,0) + + xc = M10/M00 + yc = M01/M00 + mu00 = M00 + mu02 = M02 - yc * M01 + mu20 = M20 - xc * M10 + n20 = mu20 / mu00**((2+2+0)/2) + n02 = mu02 / mu00**((2+0+2)/2) + + I1 = n20 + n02 + return I1 + + +def computeWallJump(sqlite_file, idAnimalA, tmin=None, tmax=None): + + tmin = 0 if tmin is None else tmin + tmax = getNumberOfFrames(sqlite_file) if tmax is None else tmax + + connection = sqlite3.connect(sqlite_file) + pool = AnimalPool() + pool.loadAnimals(connection) + pool.loadDetection(start = tmin, end = tmax, lightLoad = True, idAnimal=idAnimalA) + animal = pool.animalDictionary[idAnimalA] + JumpWallTimeLine = EventTimeLine(connection, "WallJump", animal.baseId, None, None, None, loadEvent=False) + result={} + + descriptors = np.zeros((tmax - tmin + 1, 7)) + for t in range(tmin, tmax + 1): + if t%18000 == 0: + print (f"[ID {idAnimalA}] Current t: {t//1800} min") + speed = animal.getSpeedSingleFrame(t) + mask = animal.getBinaryDetectionMask(t) + try: + im_mask = mask.getImageMask() + except: + continue + + I1 = computeMetrics(im_mask, r=3) + descriptors[t-tmin] = [speed, I1, mask.zc, mask.x, mask.y, mask.x + mask.width, mask.y + mask.height] + + for t in range(tmax - tmin + 1): + if ((np.all(descriptors[t:t+5, 3] == 84) or + np.all(descriptors[t:t+5, 4] == 33) or + np.all(descriptors[t:t+5, 5] == 428) or + np.all(descriptors[t:t+5, 6] == 383)) + and not + (np.all(descriptors[t:t+11, 3] == 84) or + np.all(descriptors[t:t+11, 4] == 33) or + np.all(descriptors[t:t+11, 5] == 428) or + np.all(descriptors[t:t+11, 6] == 383)) + and + np.any(descriptors[t-5:t, 0] > 8) and + np.any(descriptors[t+1:t+6, 0] < 3)): + + dt = np.argmin(descriptors[t+1:t+6, 0]) + if np.any(descriptors[t:t+dt+6, 2] > 100) and np.all(descriptors[t-5:t, 0] < 25) and (descriptors[t, 1] <= 0.4): + result[t+tmin] = True + + JumpWallTimeLine.reBuildWithDictionary(result) + JumpWallTimeLine.endRebuildEventTimeLine(connection) + + +def reBuildEvent( connection, file, tmin=None, tmax=None, pool = None, animalType = None, showGraph = False ): + + pool = AnimalPool() + pool.loadAnimals(connection) + + eventName = "WallJump" + print (eventName) + print ("A is jumping against the wall") + + processes = [] + for idAnimalA in pool.animalDictionary: + process = Process(target=computeWallJump, args=(file, idAnimalA, tmin, tmax)) + processes.append(process) + process.start() + + # Wait for all threads to complete - Is this really necessary? + for process in processes: + process.join() + + # log process + t = TaskLogger( connection ) + t.addLog( "Build Event Wall Jump" , tmin=tmin, tmax=tmax ) + + print("Rebuild event finished.") + + +def filterEventTimeLine(connection, **kwargs): + idA = kwargs.get("idA", None) + idB = kwargs.get("idB", None) + idC = kwargs.get("idC", None) + idD = kwargs.get("idD", None) + t_min = kwargs.get("t_min", 0) + t_max = kwargs.get("t_max", oneHour) - 1 + minimum_event_particle_lenght = kwargs.get("minimum_event_particle_length", 0) + dilation_factor = kwargs.get('merge_events_length', 5) + minimum_event_length = kwargs.get("minimum_event_length", 30) + + event_name = "WallJump" + + eventTL = EventTimeLine(connection, event_name, idA= idA, idB= idB, idC= idC, idD= idD, minFrame= t_min, maxFrame= t_max) + eventTL.removeEventsBelowLength(minimum_event_particle_lenght) + eventTL.closeEvents(dilation_factor) + eventTL.removeEventsBelowLength(minimum_event_length) + + print("Filtering event finished.") + return eventTL diff --git a/EMT/lmtanalysis/__init__.py b/EMT/lmtanalysis/__init__.py index ee7330fd31f408bafdf851f09b5263f3bd915d4d..38041d4bbf72b37c171b1101f3acabe38aac21b5 100644 --- a/EMT/lmtanalysis/__init__.py +++ b/EMT/lmtanalysis/__init__.py @@ -1,6 +1,9 @@ ''' -Created on 13 ao�t 2018 +Modified on 24 avril 2023 -@author: Fab -''' +@author: Raul Silva +Originally based on: +lmtanalysis - Created on 13 aout 2018 +@author: fab +''' diff --git a/EMT/lmtanalysis/split_database.py b/EMT/lmtanalysis/split_database.py index 3ab5421eda8c04cef4dd7ff1d8500f4872d5904e..46488a3d6bbc01adc04aef3d4e2d1ba716eb9c59 100644 --- a/EMT/lmtanalysis/split_database.py +++ b/EMT/lmtanalysis/split_database.py @@ -64,8 +64,7 @@ def extractDetectionTable(database, new_database_name, start, length): # Pasting values from database to new_database for row in table.itertuples(): - cursor.execute("INSERT INTO " + table_name + " (ID, \ - FRAMENUMBER, \ + cursor.execute("INSERT INTO " + table_name + " (FRAMENUMBER, \ ANIMALID, \ MASS_X, \ MASS_Y, \ @@ -80,8 +79,8 @@ def extractDetectionTable(database, new_database_name, start, length): LOOK_UP, \ LOOK_DOWN, \ DATA) VALUES \ - (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - (row[1], row[2], row[3], row[4], row[5], row[6], + (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + (row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16])) @@ -119,8 +118,7 @@ def extractEventTable(database, new_database_name, start, length): METADATA TEXT)") # Pasting values from database to new_database for row in table.itertuples(): - cursor.execute("INSERT INTO " + table_name + " (ID, \ - NAME, \ + cursor.execute("INSERT INTO " + table_name + " (NAME, \ DESCRIPTION, \ STARTFRAME, \ ENDFRAME, \ @@ -129,8 +127,8 @@ def extractEventTable(database, new_database_name, start, length): IDANIMALC, \ IDANIMALD, \ METADATA) VALUES \ - (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - (row[1], row[2], row[3], row[4], + (?, ?, ?, ?, ?, ?, ?, ?, ?)", + (row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10])) new_database.commit() diff --git a/EMT/setup.py b/EMT/setup.py index 8d49c90014ac9c4b5ab711fc0835fcd84a12f259..afed539da1143266cfb761817182c7e90f074d9f 100644 --- a/EMT/setup.py +++ b/EMT/setup.py @@ -3,10 +3,10 @@ from setuptools import setup setup( name='lmtanalysis', - version='1.1.0', + version='1.2.0', description='A new LMT package under development', author='Raul Silva', - author_email='raul-alfredo.lis-lab.fr', + author_email='raul-alfredo.de-sousa-silva@lis-lab.fr', packages=['lmtanalysis'], install_requires=[ 'affine', @@ -27,6 +27,5 @@ setup( 'shapely', 'tabulate', 'umap-learn', - ], )