diff --git a/Stationary stimulus.xlsx b/Stationary stimulus.xlsx
new file mode 100644
index 0000000..24bbd03
Binary files /dev/null and b/Stationary stimulus.xlsx differ
diff --git a/camera_configuration_fish0.dat b/camera_configuration_fish0.dat
new file mode 100644
index 0000000..878868a
Binary files /dev/null and b/camera_configuration_fish0.dat differ
diff --git a/camera_configuration_fish1.dat b/camera_configuration_fish1.dat
new file mode 100644
index 0000000..42d0c4a
Binary files /dev/null and b/camera_configuration_fish1.dat differ
diff --git a/free_swimming_4fish_setup/alignment/__pycache__/camera_alignment_gui.cpython-37.pyc b/free_swimming_4fish_setup/alignment/__pycache__/camera_alignment_gui.cpython-37.pyc
new file mode 100644
index 0000000..16fd33e
Binary files /dev/null and b/free_swimming_4fish_setup/alignment/__pycache__/camera_alignment_gui.cpython-37.pyc differ
diff --git a/free_swimming_4fish_setup/alignment/camera_alignment.ui b/free_swimming_4fish_setup/alignment/camera_alignment.ui
new file mode 100644
index 0000000..3e0768b
--- /dev/null
+++ b/free_swimming_4fish_setup/alignment/camera_alignment.ui
@@ -0,0 +1,62 @@
+
+
+ Dialog
+
+
+ true
+
+
+
+ 0
+ 0
+ 1363
+ 970
+
+
+
+ Dialog
+
+
+
+
+ 20
+ 30
+ 901
+ 901
+
+
+
+ QFrame::Box
+
+
+ TextLabel
+
+
+
+
+
+ 940
+ 330
+ 411
+ 51
+
+
+
+ Info:
+
+
+
+
+
+ 930
+ 30
+ 421
+ 291
+
+
+
+
+
+
+
+
diff --git a/free_swimming_4fish_setup/alignment/camera_alignment_gui.py b/free_swimming_4fish_setup/alignment/camera_alignment_gui.py
new file mode 100644
index 0000000..7436337
--- /dev/null
+++ b/free_swimming_4fish_setup/alignment/camera_alignment_gui.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+import sys
+import numpy as np
+import os
+from multiprocessing import Process, Value
+import pickle
+import cv2
+from ctypes import *
+import pyqtgraph as pg
+import socket
+
+from PyQt5 import QtGui, QtWidgets, QtCore, Qt, uic
+import sys
+sys._excepthook = sys.excepthook
+def exception_hook(exctype, value, traceback):
+ sys._excepthook(exctype, value, traceback)
+ sys.exit(1)
+sys.excepthook = exception_hook
+
+class Fishreturn(Structure):
+ _fields_ = [("camera_framenum", c_uint),
+ ("camera_timestamp", c_double),
+ ("camera_fps", c_double),
+
+ ("mode_updated", c_bool),
+
+ ("errorcode", c_int),
+
+ ("fish_movie_framenum", c_uint),
+
+ ("fish_position_x", c_double),
+ ("fish_position_y", c_double),
+
+ ("fish_orientation", c_double),
+ ("fish_accumulated_orientation", c_double),
+ ("fish_accumulated_orientation_lowpass", c_double),
+ ("fish_accumulated_orientation_variance", c_double),
+
+ ("fish_accumulated_path", c_double),
+
+ ("bout_found", c_bool),
+ ("bout_timestamp_start", c_double),
+ ("bout_timestamp_end", c_double),
+ ("bout_heading_angle_change", c_double),
+ ("bout_distance_traveled_change", c_double),
+
+ ("fish_area", c_double),
+ ]
+
+
+class keyPressEvent(QtCore.QObject):
+ def __init__(self, parent):
+ super(keyPressEvent, self).__init__(parent)
+ self.parent = parent
+
+
+ def eventFilter(self, object, event):
+ if event.type() == QtCore.QEvent.KeyPress:
+ #print event.key()
+ if event.key() == 16777236:
+ self.parent.xoffset += 2
+
+ if event.key() == 16777234:
+ self.parent.xoffset -= 2
+
+ if event.key() == 16777235:
+ self.parent.yoffset += 2
+
+ if event.key() == 16777237:
+ self.parent.yoffset -= 2
+
+
+ if event.key() == ord('T'):
+ self.parent.radius += 16
+
+ if event.key() == ord('R'):
+ self.parent.radius -= 16 # the width has to be devidable to 32
+
+ if event.key() == ord('G'):
+ self.parent.gain -= 0.05
+
+ if event.key() == ord('H'):
+ self.parent.gain += 0.05
+
+ if event.key() == ord('S'):
+ self.parent.shutter -= 0.05
+
+ if event.key() == ord('D'):
+ self.parent.shutter += 0.05
+
+ if event.key() == 16777216: # ESC
+ self.parent.close()
+
+ return False # not complete
+
+class CameraAlignment_Dialog(QtWidgets.QDialog):
+ def __init__(self, fish_index, running, parent=None):
+ QtWidgets.QWidget.__init__(self, parent)
+ self.fish_index = fish_index
+ self.running = running
+
+ path = os.path.dirname(__file__)
+ uic.loadUi(os.path.join(path, "camera_alignment.ui"), self)
+
+ computer_name = socket.gethostname()
+
+ if computer_name == "NW152-beh-2":
+ self.setup_ID = 0
+
+ #if computer_name == "NW152-beh-2":
+ #self.setup_ID = 1
+
+
+ if self.setup_ID == 0:
+ self.root_path = r"C:\LS100\LS100_free_swimming_4fish_data"
+ self.lib = cdll.LoadLibrary(r"C:\Users\Max\Desktop\LS100\modules\fishcamera_C\x64\Release\fishcamera.dll")
+
+ #if self.setup_ID == 1:
+ #self.root_path = r"D:\Yasuko_free_swimming_4fish_data"
+ #self.lib = cdll.LoadLibrary(r"C:\Users\Yasuko Isoe\PyCharm_projects\fishsetup\modules\fishcamera_C\x64\Release\fishcamera.dll")
+
+ self.installEventFilter(keyPressEvent(self))
+ self.layout_histogram.installEventFilter(keyPressEvent(self))
+ self.label_view_camera.installEventFilter(keyPressEvent(self))
+
+ self.lib.get_fish_info.restype = Fishreturn
+ self.lib.get_gain.restype = c_double
+ self.lib.get_shutter.restype = c_double
+
+ [self.radius, self.xoffset, self.yoffset, self.gain, self.shutter] = 1024, 0, 0, 0.5, 0.5
+
+
+ if self.setup_ID == 0:
+ if self.fish_index == 0:
+ self.camera_serial = 16307759
+ elif self.fish_index == 1:
+ self.camera_serial = 19300526
+ # elif self.fish_index == 2:
+ # self.camera_serial = 19242806
+
+ # if self.setup_ID == 1:
+ #
+ # if self.fish_index == 0:
+ # self.camera_serial = 16307759
+ # elif self.fish_index == 1:
+ # self.camera_serial = 19300526
+ # elif self.fish_index == 2:
+ # self.camera_serial = 19242806
+
+ try:
+ [self.radius, self.xoffset, self.yoffset, self.gain, self.shutter] = pickle.load(open(os.path.join(self.root_path, "camera_configuration_fish%d.dat"%self.fish_index), 'rb'))
+ except:
+ print("???")
+ pass
+
+ self.lib.open_cam(self.camera_serial, 1024, 0, 0, c_float(self.gain), c_float(self.shutter), c_char_p("".encode()), c_char_p("".encode()), c_char_p("".encode()))
+
+ pg.setConfigOption('background', pg.mkColor(0.95))
+ pg.setConfigOption('foreground', 'k')
+
+ self.my_plot = pg.PlotWidget()
+ self.layout_histogram.addWidget(self.my_plot)
+
+ self.my_plot.setLabel('left', 'Mean Brightness')
+ self.my_plot.setLabel('bottom', 'Normalized radius')
+ self.my_plot.setXRange(0, 1)
+ self.my_plot.setYRange(0, 255)
+
+ curvePen = pg.mkPen(color=(255, 15, 10), width=4.5)
+
+ self.curve = pg.PlotCurveItem(pen = curvePen)
+
+ self.my_plot.addItem(self.curve)
+
+ self.fish_roi_buffer = create_string_buffer(2048*2048)
+
+ self.timer = QtCore.QTimer()
+ self.timer.timeout.connect(self.Refresh)
+ self.timer.start(40.)
+
+ def closeEvent(self, event):
+ print("Closing...")
+
+ self.lib.close_cam()
+ print("Saving configuration file")
+ pickle.dump([self.radius, self.xoffset, self.yoffset, self.gain, self.shutter], open(os.path.join(self.root_path, "camera_configuration_fish%d.dat"%self.fish_index), 'wb'))
+
+ # also tell the panda3d to close
+ self.running.value = 0
+
+ self.close()
+
+ def Refresh(self):
+
+ self.radius = int(self.radius/16)*16
+ self.xoffset = int(self.xoffset/2)*2
+ self.yoffset = int(self.yoffset/2)*2
+
+ self.lib.set_gain(c_double(self.gain))
+ self.lib.set_shutter(c_double(self.shutter))
+
+ rs = np.linspace(0, 500, 100)/500.
+ import datetime
+ t = datetime.datetime.now()
+ self.lib.get_image(self.fish_roi_buffer, 2048*2048)
+
+ roi_ar = np.fromstring(self.fish_roi_buffer, dtype=np.uint8).reshape((2048, 2048))
+ #roi_ar = (np.random.random((2048, 2048))*255).astype(np.uint8)
+ pic = cv2.resize(roi_ar, (500, 500)).flatten()
+
+ radial_histogram = create_string_buffer(100)
+ self.lib.get_radial_histogram(pic.tobytes(), radial_histogram)
+
+ radial_histogram = np.fromstring(radial_histogram, dtype=np.uint8)
+ self.curve.setData(rs, radial_histogram)
+
+ roi_ar = cv2.cvtColor(roi_ar, cv2.COLOR_GRAY2RGBA)
+
+ cv2.circle(roi_ar, (1024+self.xoffset, 1024-self.yoffset), radius = self.radius, color = (0, 255, 255, 255), thickness=3)
+
+ cv2.line(roi_ar, (1024-int(0.5*self.radius)+self.xoffset, 1024-self.yoffset), (1024+int(0.5*self.radius)+self.xoffset, 1024-self.yoffset), color = (0, 255, 255, 255), thickness=3)
+ cv2.line(roi_ar, (1024+self.xoffset, 1024-int(0.5*self.radius)-self.yoffset), (1024+self.xoffset, 1024+int(0.5*self.radius)-self.yoffset), color = (0, 255, 255, 255), thickness=3)
+
+ roi_ar = cv2.resize(roi_ar, (900, 900)) #1800, 1800
+ QI = QtGui.QImage(roi_ar, 900, 900, QtGui.QImage.Format_ARGB32) # 1800, 1800
+
+ self.label_view_camera.setPixmap(QtGui.QPixmap.fromImage(QI))
+
+ text = "Radius: %d; xoffset: %d; yoffset: %d; Gain set : %.2f; Shutter set: %.2f\n"%(self.radius, self.xoffset, self.yoffset, self.gain, self.shutter)
+ text += "Gain actual: %.2f; Shutter actual: %.2f" % (float(self.lib.get_gain()), float(self.lib.get_shutter()))
+ self.label_info.setText(text)
+
+ self.timer.start(40.)
+
+
+class GUI_Process(Process):
+ def __init__(self, fish_index):
+ Process.__init__(self)
+ self.fish_index = fish_index
+
+ self.running = Value("i", 1)
+
+ def run(self):
+ app = QtWidgets.QApplication(sys.argv)
+
+ main = CameraAlignment_Dialog(self.fish_index, self.running)
+
+ main.show()
+ app.exec_()
+
diff --git a/free_swimming_4fish_setup/alignment/clean.py b/free_swimming_4fish_setup/alignment/clean.py
new file mode 100644
index 0000000..a8f2f8f
--- /dev/null
+++ b/free_swimming_4fish_setup/alignment/clean.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+if __name__ == "__main__":
+ import sys
+
+ from direct.showbase.ShowBase import ShowBase
+ from pandac.PandaModules import *
+
+ class World(ShowBase):
+
+ def __init__(self):
+
+ loadPrcFileData("",
+ """fullscreen 0
+ win-origin 1920 0
+ undecorated 1
+ win-size 5760 1080
+ sync-video 0
+ load-display pandagl""")
+
+ # Init the scene
+ ShowBase.__init__(self)
+ self.disableMouse()
+
+ self.accept("escape", self.finish)
+
+ self.setBackgroundColor(1, 1, 1, 1)
+
+ def finish(self):
+
+ sys.exit()
+
+
+ world = World()
+ world.run()
diff --git a/free_swimming_4fish_setup/alignment/stimulus_alignment.py b/free_swimming_4fish_setup/alignment/stimulus_alignment.py
new file mode 100644
index 0000000..f037b1d
--- /dev/null
+++ b/free_swimming_4fish_setup/alignment/stimulus_alignment.py
@@ -0,0 +1,242 @@
+###########
+fish_index = 0
+user_prefix = 'LS100_' ##!!
+
+###########
+
+# -*- coding: utf-8 -*-
+if __name__ == "__main__":
+ import sys
+ import numpy as np
+
+ from multiprocessing import Value, Array, Queue
+ from panda3d.core import TrueClock
+
+ TrueClock.getGlobalPtr().setCpuAffinity(0xFFFFFFFF)
+
+ from camera_alignment_gui import GUI_Process
+
+ gui = GUI_Process(fish_index)
+ gui.start()
+
+ from direct.showbase.ShowBase import ShowBase
+ from panda3d.core import *
+ import numpy as np
+ import sys
+ import time
+ import os
+ import pickle
+ import socket
+
+
+ class World(ShowBase):
+
+ def __init__(self, fish_index):
+
+ loadPrcFileData("",
+ """fullscreen 0
+ win-origin 2692 205
+ undecorated 1
+ win-size 980 980
+ sync-video 0
+ load-display pandagl""")
+
+ # fish_index 0
+ # win - origin 2692 205
+ # win - size 980 980
+
+ # fish_index 1
+ # win - origin 3928 127
+ # win - size 1030 1030
+
+ # Init the scene
+ ShowBase.__init__(self)
+ self.disableMouse()
+
+ [self.k_x, self.k_y, self.k_z, self.k_r] = 0, 0, 1, 90
+
+ computer_name = socket.gethostname()
+ self.setup_ID = 0
+ if computer_name == "NW152-beh-2":
+ self.setup_ID = 0
+
+ # if computer_name == "NW152-beh-2":
+ # self.setup_ID = 1
+
+ if self.setup_ID == 0:
+ self.root_path = r"C:\LS100\{}free_swimming_4fish_data".format(user_prefix)
+
+ # if self.setup_ID == 1:
+ # self.root_path = r"C:\LS100\{}free_swimming_4fish_data".format(user_prefix)
+
+ self.fish_index = fish_index
+
+ try:
+ [self.k_x, self.k_y, self.k_z, self.k_r] = pickle.load(
+ open(os.path.join(self.root_path, "stimulus_configuration_fish%d.dat" % self.fish_index), 'rb'))
+ print(os.path.join(self.root_path, "stimulus_configuration_fish%d.dat" % self.fish_index))
+ except:
+ pass
+
+ self.accept("arrow_up", self.up)
+ self.accept("arrow_up-repeat", self.up)
+
+ self.accept("arrow_down", self.down)
+ self.accept("arrow_down-repeat", self.down)
+
+ self.accept("arrow_right", self.right)
+ self.accept("arrow_right-repeat", self.right)
+
+ self.accept("arrow_left", self.left)
+ self.accept("arrow_left-repeat", self.left)
+
+ self.accept("z", self.zoom_in)
+ self.accept("z-repeat", self.zoom_in)
+
+ self.accept("x", self.zoom_out)
+ self.accept("x-repeat", self.zoom_out)
+
+ self.accept("r", self.rotate_right)
+ self.accept("r-repeat", self.rotate_right)
+
+ self.accept("t", self.rotate_left)
+ self.accept("t-repeat", self.rotate_left)
+
+ self.lens1 = PerspectiveLens()
+ self.lens1.setFov(90, 90)
+ self.lens1.setNearFar(0.001, 1000)
+ self.lens1.setAspectRatio(500. / 550)
+ self.cam.node().setLens(self.lens1)
+
+ self.setBackgroundColor(0, 0, 0, 1)
+
+ self.fish_node = self.render.attachNewNode("fish_node")
+
+ self.background_circle1 = self.create_circles(1, edges=30)
+ self.background_circle1.reparentTo(self.fish_node)
+
+ self.background_circle2 = self.create_circles(1, edges=30)
+ self.background_circle2.reparentTo(self.fish_node)
+
+ # make a cross
+ cm = CardMaker('card')
+
+ card0 = self.fish_node.attachNewNode(cm.generate())
+ card0.setScale(0.01, 1, 1)
+ card0.setPos(-0.005, -0.002, -0.5)
+ card0.setColor(1, 1, 1)
+
+ card1 = self.fish_node.attachNewNode(cm.generate())
+ card1.setScale(1, 1, 0.01)
+ card1.setPos(-0.5, -0.002, -0.005)
+ card1.setColor(1, 1, 1)
+
+ card2 = self.fish_node.attachNewNode(cm.generate())
+ card2.setScale(0.1, 1, 0.1)
+ card2.setPos(0.1 - 0.05, -0.002, 0.1 - 0.05)
+ card2.setColor(1, 0, 0)
+
+
+ self.background_circle1.setScale(1.01, 1.01, 1.01) #1.01, 1.01, 1.01
+ self.background_circle1.setColor(1, 0, 1)
+ self.background_circle1.setPos(0.0, 0.0, 0)
+
+ self.background_circle2.setScale(0.99, 0.99, 0.99)
+ self.background_circle2.setColor(0.0, 0.0, 0.0)
+ self.background_circle2.setPos(0, -0.001, 0)
+
+ self.taskMgr.add(self.check_finish, "check_finish")
+
+ self.update()
+
+ def update(self):
+
+ self.fish_node.setPos(self.k_x, 1, self.k_y)
+ self.fish_node.setScale(self.k_z, 1, self.k_z)
+ self.fish_node.setHpr(0, 0, self.k_r)
+
+ print(self.k_x, self.k_y, self.k_z, self.k_r)
+
+ def rotate_right(self):
+ self.k_r += 0.05
+ self.update()
+
+ def rotate_left(self):
+ self.k_r -= 0.05
+ self.update()
+
+ def up(self):
+ self.k_x += 0.002
+ self.update()
+
+ def down(self):
+ self.k_x -= 0.002
+ self.update()
+
+ def left(self):
+ self.k_y += 0.002
+ self.update()
+
+ def right(self):
+ self.k_y -= 0.002
+ self.update()
+
+ def zoom_in(self):
+ self.k_z += 0.002
+
+ self.update()
+
+ def zoom_out(self):
+ self.k_z -= 0.002
+
+ self.update()
+
+ def check_finish(self, task):
+ if gui.running.value == 1:
+ return task.cont
+ else:
+ pickle.dump([self.k_x, self.k_y, self.k_z, self.k_r],
+ open(os.path.join(self.root_path, "stimulus_configuration_fish%d.dat" % self.fish_index),
+ 'wb'))
+
+ sys.exit()
+
+ return task.done
+
+ def create_circles(self, n, edges=20):
+
+ vdata = GeomVertexData('name', GeomVertexFormat.getV3t2(), Geom.UHStatic)
+
+ prim_wall = GeomTriangles(Geom.UHStatic)
+
+ vertex_writer = GeomVertexWriter(vdata, 'vertex')
+ texcoord_writer = GeomVertexWriter(vdata, 'texcoord')
+
+ angs = np.linspace(0, 360, edges)
+
+ for i in range(n):
+ for j in range(len(angs)):
+ ang0 = angs[j]
+ ang1 = angs[(j + 1) % edges]
+ vertex_writer.addData3f(0, i, 0) # stack them in distance
+ vertex_writer.addData3f(np.cos(ang0 * np.pi / 180), i, np.sin(ang0 * np.pi / 180))
+ vertex_writer.addData3f(np.cos(ang1 * np.pi / 180), i, np.sin(ang1 * np.pi / 180))
+
+ texcoord_writer.addData2f(i / float(n), 0)
+ texcoord_writer.addData2f(i / float(n), 0.5)
+ texcoord_writer.addData2f(i / float(n), 1)
+
+ prim_wall.addConsecutiveVertices(i * edges * 3, edges * 3)
+ prim_wall.closePrimitive()
+
+ geom_wall = Geom(vdata)
+ geom_wall.addPrimitive(prim_wall)
+
+ circles = GeomNode('card')
+ circles.addGeom(geom_wall)
+
+ return NodePath(circles)
+
+
+ world = World(fish_index)
+ world.run()
diff --git a/free_swimming_4fish_setup/phototaxis/last_fishinfo.dat b/free_swimming_4fish_setup/phototaxis/last_fishinfo.dat
new file mode 100644
index 0000000..8171c5b
Binary files /dev/null and b/free_swimming_4fish_setup/phototaxis/last_fishinfo.dat differ
diff --git a/free_swimming_4fish_setup/phototaxis/scene_error.txt b/free_swimming_4fish_setup/phototaxis/scene_error.txt
new file mode 100644
index 0000000..a3dbf60
--- /dev/null
+++ b/free_swimming_4fish_setup/phototaxis/scene_error.txt
@@ -0,0 +1,6 @@
+[2020-02-26 11:13:57.724875] : (, FileNotFoundError(2, 'No such file or directory'), )
+[2020-02-26 11:13:57.724875] : (, FileNotFoundError(2, 'No such file or directory'), )
+[2020-02-26 11:17:27.159850] : (, FileNotFoundError(2, 'No such file or directory'), )
+[2020-02-26 11:17:27.159850] : (, FileNotFoundError(2, 'No such file or directory'), )
+[2020-02-26 11:18:08.407966] : (, FileNotFoundError(2, 'No such file or directory'), )
+[2020-02-26 11:18:08.407966] : (, FileNotFoundError(2, 'No such file or directory'), )
diff --git a/free_swimming_4fish_setup/stimulus_configuration_fish0.dat b/free_swimming_4fish_setup/stimulus_configuration_fish0.dat
new file mode 100644
index 0000000..b244b6c
Binary files /dev/null and b/free_swimming_4fish_setup/stimulus_configuration_fish0.dat differ
diff --git a/modules/fishcamera.py b/modules/fishcamera.py
new file mode 100644
index 0000000..a365359
--- /dev/null
+++ b/modules/fishcamera.py
@@ -0,0 +1,221 @@
+# -*- coding: utf-8 -*-
+
+from multiprocessing import Process
+import sys
+import os
+from ctypes import *
+import numpy as np
+import pickle
+import cv2
+
+python_file_path = os.path.dirname(os.path.abspath(__file__))
+
+class Fishreturn(Structure):
+ _fields_ = [("camera_framenum", c_uint),
+ ("camera_timestamp", c_double),
+ ("camera_fps", c_double),
+
+ ("mode_updated", c_bool),
+
+ ("errorcode", c_int),
+
+ ("fish_movie_framenum", c_uint),
+
+ ("fish_position_x", c_double),
+ ("fish_position_y", c_double),
+
+ ("fish_orientation", c_double),
+ ("fish_accumulated_orientation", c_double),
+ ("fish_accumulated_orientation_lowpass", c_double),
+ ("fish_accumulated_orientation_variance", c_double),
+
+ ("fish_accumulated_path", c_double),
+
+ ("bout_found", c_bool),
+ ("bout_timestamp_start", c_double),
+ ("bout_timestamp_end", c_double),
+ ("bout_heading_angle_change", c_double),
+ ("bout_distance_traveled_change", c_double),
+
+ ("fish_area", c_double),
+ ]
+
+class Fishcamera(Process):
+ def __init__(self, shared, fish_index):
+ Process.__init__(self)
+
+ self.shared = shared
+ self.fish_index = fish_index
+ # update serial numbers
+ if self.shared.setup_ID == 0:
+
+ if self.fish_index == 0:
+ self.camera_serial = 16307759
+ elif self.fish_index == 1:
+ self.camera_serial = 19300526
+ #elif self.fish_index == 2:
+ #self.camera_serial = 19242806
+ # elif self.fish_index == 3:
+ # self.camera_serial = 17096220
+ #
+
+ # if self.shared.setup_ID == 1:
+ # if self.fish_index == 0:
+ # self.camera_serial = 16307759
+ # elif self.fish_index == 1:
+ # self.camera_serial = 19300526
+ #elif self.fish_index == 2:
+ #self.camera_serial = 19242806
+ # elif self.fish_index == 3:
+ # self.camera_serial = 17475983
+ #
+
+ self.camera_opened = False
+
+ def run(self):
+ try:
+ self.loop()
+ except:
+ self.shared.error("fishcamera_error.txt", sys.exc_info())
+
+ def loop(self):
+
+ self.lib = cdll.LoadLibrary(r"C:\Users\Max\Desktop\LS100\modules\fishcamera_C\x64\Release\fishcamera.dll")
+
+ self.lib.get_fish_info.restype = Fishreturn
+
+ try:
+
+ [self.shared.alignment_radius[self.fish_index].value, self.shared.alignment_xoffset[self.fish_index].value, self.shared.alignment_yoffset[self.fish_index].value, self.shared.alignment_gain[self.fish_index].value,self.shared.alignment_shutter[self.fish_index].value] = pickle.load(open(os.path.join(self.shared.root_path, "camera_configuration_fish%d.dat"%self.fish_index), 'rb'))
+ print(self.shared.root_path, self.shared.alignment_radius[self.fish_index].value)
+ except:
+ print("Fish %d not started. Either camera not configured or not found."%self.fish_index)
+ return
+
+
+
+ while self.shared.running.value == 1:
+
+ if self.shared.camera_running[self.fish_index].value == 1 and self.camera_opened == False:
+
+ self.fish_path = self.shared.get_fish_path(self.fish_index)
+
+ #self.lib.reset_cam(self.camera_serial) # needs to be done in a separte programme still
+
+ print("Starting cam", self.fish_index, self.shared.alignment_gain[self.fish_index].value,
+ self.shared.alignment_shutter[self.fish_index].value)
+
+ if self.lib.open_cam(self.camera_serial,
+ self.shared.alignment_radius[self.fish_index].value,
+ self.shared.alignment_xoffset[self.fish_index].value,
+ self.shared.alignment_yoffset[self.fish_index].value,
+ c_float(self.shared.alignment_gain[self.fish_index].value),
+ c_float(self.shared.alignment_shutter[self.fish_index].value),
+ c_char_p(os.path.join(self.fish_path, "fish_roi.avi").encode()), # fish_roi_movie.avi;;; make this an empty string when no movie recording is required (saves a lot of space)
+ c_char_p(os.path.join(self.fish_path, "mode_movie.avi").encode()),
+ c_char_p(os.path.join(self.fish_path, "current_mode.png").encode()) ) == False:
+ print("Camera %d could not be opened."%self.fish_index)
+ else:
+
+ self.camera_opened = True
+
+ self.fish_roi_buffer = create_string_buffer(100*100)
+
+ # once the recording button was pushed
+ self.full_frame_buffer_list = []
+ self.full_frame_stimulus_information = []
+ self.full_frame_buffer = create_string_buffer(4*self.shared.alignment_radius[self.fish_index].value**2)
+
+ if self.shared.camera_running[self.fish_index].value == 0 and self.camera_opened == True:
+ self.lib.close_cam()
+ self.camera_opened = False
+
+ if self.camera_opened == True:
+
+ if self.shared.full_frame_recording_started[self.fish_index].value == 0: # not in the fullframe recording mode
+
+ data = self.lib.get_fish_info(self.fish_roi_buffer, c_bool(self.shared.ignore_fish[self.fish_index].value), c_bool(False), self.full_frame_buffer) # do not copy info to buffer
+
+ # if we want to record (for illustration purposes)
+ if self.shared.full_frame_recording_started[self.fish_index].value == 1:
+ data = self.lib.get_fish_info(self.fish_roi_buffer, c_bool(self.shared.ignore_fish[self.fish_index].value), c_bool(True), self.full_frame_buffer)
+
+ full_frame_buffer_numpy = np.fromstring(self.full_frame_buffer, dtype=np.uint8).reshape((self.shared.alignment_radius[self.fish_index].value*2, self.shared.alignment_radius[self.fish_index].value*2))
+
+ self.full_frame_buffer_list.append(full_frame_buffer_numpy)
+ self.full_frame_stimulus_information.append([self.shared.current_stimulus_time[self.fish_index].value,
+ self.shared.current_stimulus_index[self.fish_index].value,
+ self.shared.current_trial[self.fish_index].value,
+ data.camera_timestamp,
+ data.fish_position_x,
+ data.fish_position_y,
+ data.fish_orientation,
+ self.shared.current_info0[self.fish_index].value,
+ self.shared.current_info1[self.fish_index].value,
+ self.shared.current_info2[self.fish_index].value])
+
+ # record 2000 frames... TODO: Make this flexible
+ if len(self.full_frame_buffer_list) > 2500:
+ for i in range(len(self.full_frame_buffer_list)):
+ filename = os.path.join(self.fish_path, "recorded_full_frames", "full_frame_%05d.png"%i)
+ cv2.imwrite(filename, self.full_frame_buffer_list[i])
+
+ with open(os.path.join(self.fish_path, "recorded_full_frames", "stimulus_fish_info.dat"), 'wb') as f:
+ pickle.dump(self.full_frame_stimulus_information, f)
+
+ # reset the lists
+ self.full_frame_buffer_list = []
+ self.full_frame_stimulus_information = []
+
+ self.shared.full_frame_recording_started[self.fish_index].value = 0
+
+ self.shared.current_camera_framenum[self.fish_index].value = data.camera_framenum
+ self.shared.current_camera_timestamp[self.fish_index].value = data.camera_timestamp
+ self.shared.current_camera_fps[self.fish_index].value = data.camera_fps
+
+ self.shared.current_errorcode[self.fish_index].value = data.errorcode
+
+ self.shared.current_fish_position_x[self.fish_index].value = data.fish_position_x
+ self.shared.current_fish_position_y[self.fish_index].value = data.fish_position_y
+
+ self.shared.current_fish_center_distance[self.fish_index].value = np.sqrt(data.fish_position_x**2 + data.fish_position_y**2)
+
+ self.shared.current_fish_orientation[self.fish_index].value = data.fish_orientation
+ self.shared.current_fish_accumulated_orientation[self.fish_index].value = data.fish_accumulated_orientation
+ self.shared.current_fish_accumulated_orientation_lowpass[self.fish_index].value = data.fish_accumulated_orientation_lowpass
+ self.shared.current_fish_accumulated_orientation_variance[self.fish_index].value = data.fish_accumulated_orientation_variance
+ self.shared.current_fish_accumulated_path[self.fish_index].value = data.fish_accumulated_path
+
+ self.shared.current_bout_found[self.fish_index].value = data.bout_found
+ self.shared.current_bout_timestamp_start[self.fish_index].value = data.bout_timestamp_start
+ self.shared.current_bout_timestamp_end[self.fish_index].value = data.bout_timestamp_end
+
+ self.shared.current_fish_area[self.fish_index].value = data.fish_area
+ self.shared.current_fish_roi_buffer[self.fish_index][:] = self.fish_roi_buffer
+
+ if data.mode_updated == True:
+ self.shared.mode_calculated[self.fish_index].value = 1
+ self.shared.updated_mode_image[self.fish_index].value = 1
+
+ # add the data to the recorder que
+ if self.shared.stimulus_running[self.fish_index].value == 1:
+ # send the current data to the recorder que, also add the current info0 value. if info0 corresponds to some stimulus paramters (for example set when a bout is detectred), info0 will be the setting at the last bout
+
+ self.shared.dataqueue.put([0, self.fish_index, data.camera_framenum, data.camera_timestamp, data.camera_fps,
+ data.errorcode, data.fish_movie_framenum, data.fish_position_x, data.fish_position_y,
+ data.fish_orientation, data.fish_accumulated_orientation, data.fish_accumulated_orientation_lowpass,
+ data.fish_accumulated_orientation_variance, data.fish_accumulated_path, data.fish_area,
+ self.shared.current_info0[self.fish_index].value, self.shared.current_info1[self.fish_index].value, self.shared.current_info2[self.fish_index].value])
+
+ if data.bout_found == True:
+ self.shared.dataqueue.put([4, self.fish_index, data.bout_timestamp_start, data.bout_timestamp_end])
+
+ if data.bout_found == True:
+ self.shared.new_bout[self.fish_index].value = 1 # tell the scene that tere is a new bout, this might update some stimulus parameters
+ self.shared.new_bout_heading_angle_change[self.fish_index].value = data.bout_heading_angle_change
+ self.shared.new_bout_distance_traveled_change[self.fish_index].value = data.bout_distance_traveled_change
+
+
+ if self.camera_opened == True:
+ self.lib.close_cam()
+ self.camera_opened = False
diff --git a/modules/fishcamera_C/fishcamera/fishcamera.cpp b/modules/fishcamera_C/fishcamera/fishcamera.cpp
new file mode 100644
index 0000000..3a78f37
--- /dev/null
+++ b/modules/fishcamera_C/fishcamera/fishcamera.cpp
@@ -0,0 +1,1006 @@
+#include
+#include "fishcamera.h"
+#include
+#include
+
+bool compare_contour_areas(std::vector contour1, std::vector contour2) {
+ double i = fabs(contourArea(cv::Mat(contour1)));
+ double j = fabs(contourArea(cv::Mat(contour2)));
+ return (i > j);
+}
+
+bool add_fish_roi_to_movie(cv::Mat fish_roi, unsigned int camera_framenum, int errorcode) {
+
+ if (fish_roi_video.isOpened() == true) {
+
+ cv::Mat fish_roi_copy;
+ fish_roi.copyTo(fish_roi_copy);
+
+ //cv::putText(fish_roi_copy, std::to_string(camera_framenum) + "," + std::to_string(errorcode), cv::Point(5, 10), cv::FONT_HERSHEY_SIMPLEX, 0.4, 255, 1, 8, false);
+
+ fish_roi_video.write(fish_roi_copy);
+ fish_movie_framenum += 1;
+
+ return true;
+ }
+
+ return false;
+}
+
+unsigned char substract_background(unsigned char image, unsigned char background) {
+
+ // the fish will always be darker than the background
+
+ if (image >= background)
+ return 0;
+ else
+ return background - image;
+}
+
+
+DLLEXPORT bool reset_cam(unsigned int serial_number) {
+
+
+ SystemPtr system_gl_reset;
+ CameraList camList_reset;
+ CameraPtr pCam_reset;
+
+ // Find the camera and connect
+ system_gl_reset = System::GetInstance();
+
+ try {
+
+
+ // Retrieve list of cameras from the system
+ camList_reset = system_gl_reset->GetCameras();
+
+ pCam_reset = camList_reset.GetBySerial(to_string(serial_number));
+ pCam_reset->Init();
+
+ pCam_reset->DeviceReset();
+
+ pCam_reset->DeInit();
+
+ // Release system
+ pCam_reset = NULL;
+
+ camList_reset.Clear();
+
+ system_gl_reset->ReleaseInstance();
+
+ Sleep(500);
+
+ cout << "All good";
+ } catch (Spinnaker::Exception &e) {
+ cout << "Error: " << e.what() << endl;
+ return false;
+ }
+
+
+ return true;
+}
+
+DLLEXPORT bool open_cam(unsigned int serial_number, unsigned int radius, unsigned int xoffset, unsigned int yoffset, float gain, float shutter, const char* filename_fish_roi, const char* filename_mode, const char* filename_modeimage)
+{
+ picture_width = radius * 2;
+ picture_height = radius * 2;
+ fish_serial = serial_number;
+
+ global_filename_modeimage = cv::String(filename_modeimage);
+
+ try {
+ system_gl = System::GetInstance();
+ camList = system_gl->GetCameras();
+ cout << "Loading...." << camList.GetSize();
+ pCam = camList.GetBySerial(to_string(serial_number));
+
+ pCam->Init();
+ cout << pCam->DeviceFirmwareVersion();
+ } catch (Spinnaker::Exception &e) {
+ cout << "Error: " << e.what() << endl;
+ return false;
+ }
+
+
+ try {
+
+ INodeMap & nodeMap = pCam->GetNodeMap();
+ CEnumerationPtr ptrAcquisitionMode = nodeMap.GetNode("AcquisitionMode");
+ CEnumEntryPtr ptrAcquisitionModeContinuous = ptrAcquisitionMode->GetEntryByName("Continuous");
+ int64_t acquisitionModeContinuous = ptrAcquisitionModeContinuous->GetValue();
+ ptrAcquisitionMode->SetIntValue(acquisitionModeContinuous);
+
+ // setup the buffer properties
+ INodeMap & nodeMapTLStream = pCam->GetTLStreamNodeMap();
+ CEnumerationPtr StreamNode = nodeMapTLStream.GetNode("StreamBufferHandlingMode");
+ StreamNode->SetIntValue(3); // newest, overwrite
+ } catch (Spinnaker::Exception &e) {
+ cout << "Error: " << e.what() << endl;
+ return false;
+ }
+
+ try {
+ pCam->ChunkSelector.SetValue(ChunkSelectorEnums::ChunkSelector_Timestamp);
+ pCam->ChunkEnable.SetValue(true);
+
+ pCam->ChunkSelector.SetValue(ChunkSelectorEnums::ChunkSelector_OffsetX);
+ pCam->ChunkEnable.SetValue(true);
+
+ pCam->ChunkSelector.SetValue(ChunkSelectorEnums::ChunkSelector_OffsetY);
+ pCam->ChunkEnable.SetValue(true);
+
+ // frame id chunk does not work
+
+ pCam->ChunkModeActive.SetValue(true);
+ } catch (Spinnaker::Exception &e) {
+ cout << "Error: " << e.what() << endl;
+ return false;
+ }
+
+ // set the roi on the camera (don't change anymore)
+ // if this is incorrect, crashes, check this!
+ try {
+ pCam->OffsetX.SetValue(1024 - radius + xoffset, false);
+ pCam->OffsetY.SetValue(1024 - radius - yoffset, false);
+ pCam->Width.SetValue(picture_width, false);
+ pCam->Height.SetValue(picture_height, false);
+ } catch (Spinnaker::Exception &e) {
+ cout << "Error: " << e.what() << endl;
+ return false;
+ }
+
+ set_gain(gain);
+ set_shutter(shutter);
+
+ last_timestamp_s = 0;
+
+ circular_counter = 0;
+ fish_accumulated_path = -1;
+
+ /// start off with the full frame (no fish roi yet found)
+ roi_x0 = 0;
+ roi_x1 = picture_width;
+ roi_y0 = 0;
+ roi_y1 = picture_height;
+
+ // create a videowriter for the fish roi
+ mode = new unsigned char[picture_width * picture_height];
+ live_mode = new unsigned char[picture_width * picture_height]; // needs to be freed when closing the camera
+ live_mode_counter = new unsigned char[256 * picture_width * picture_height];
+ live_mode_max_counter = new unsigned char[picture_width * picture_height];
+
+ // mode initializes at zeros (will be changed during run)
+ memset(mode, 0, picture_width * picture_height * sizeof(unsigned char));
+ mode_mat = cv::Mat::zeros(picture_width, picture_height, CV_8UC1);
+ memset(live_mode_counter, 0, 256 * picture_width * picture_height * sizeof(unsigned char));
+ memset(live_mode_max_counter, 0, picture_width * picture_height * sizeof(unsigned char));
+
+ live_mode_number = -1; // this counter the number of the mode (for disply in the image)
+ live_mode_frame_index = 0;
+ last_mode_image_added_time = 0;
+
+ // Dummy function to initalizwe opencv
+ cv::Mat test = cv::Mat::zeros(100, 100, CV_8UC1);
+ threshold(test, test, 15, 255, 0);
+
+ // Open the video streams without comporession
+ if (strlen(filename_fish_roi) > 0) {
+ fish_roi_video.open(cv::String(filename_fish_roi), CV_FOURCC('D', 'I', 'V', 'X'), 60, cv::Size(100, 100), false);
+ //fish_roi_video.open(cv::String(filename_fish_roi), 0, 60, cv::Size(100, 100), false);
+ fish_movie_framenum = 0;
+
+ }
+
+ if (strlen(filename_mode) > 0) {
+ mode_video.open(cv::String(filename_mode), CV_FOURCC('D', 'I', 'V', 'X'), 60, cv::Size(picture_width, picture_height), false);
+ }
+
+ // initialize standard variables for the global _last_good_return structure
+ _last_good_return.camera_framenum = 0;
+ _last_good_return.camera_timestamp = 0;
+ _last_good_return.camera_fps = 0;
+
+ _last_good_return.mode_updated = false;
+ _last_good_return.errorcode = -1;
+ _last_good_return.fish_movie_framenum = fish_movie_framenum;
+
+ _last_good_return.fish_position_x = 0;
+ _last_good_return.fish_position_y = 0;
+
+ _last_good_return.fish_orientation = 0;
+ _last_good_return.fish_accumulated_orientation = 0;
+ _last_good_return.fish_accumulated_orientation_lowpass = 0;
+ _last_good_return.fish_accumulated_orientation_variance = 0;
+
+ _last_good_return.fish_accumulated_path = 0;
+
+ _last_good_return.bout_found = false;
+ _last_good_return.bout_timestamp_start = 0;
+ _last_good_return.bout_timestamp_end = 0;
+ _last_good_return.bout_heading_angle_change = 0;
+ _last_good_return.bout_distance_traveled_change = 0;
+
+ _last_good_return.fish_area = 0;
+
+
+ // and start the image stream
+ try {
+ pCam->BeginAcquisition();
+ } catch (Spinnaker::Exception &e) {
+ cout << "Error: " << e.what() << endl;
+ return false;
+ }
+
+ timestamp_seconds_start = -1; // reset the timestamp to zero, dont know how to do this on the camera
+
+ return true;
+}
+
+DLLEXPORT bool close_cam(void)
+{
+ delete[] mode;
+ delete[] live_mode;
+ delete[] live_mode_counter;
+ delete[] live_mode_max_counter;
+
+ if (fish_roi_video.isOpened() == true) fish_roi_video.release();
+ if (mode_video.isOpened() == true) mode_video.release();
+
+ pCam->EndAcquisition();
+ pCam->DeInit();
+
+ camList.Clear();
+
+ // Release system
+ pCam = NULL;
+
+ system_gl->ReleaseInstance();
+
+ return true;
+}
+
+DLLEXPORT bool get_image(unsigned char *image_data, size_t size) {
+ if (pCam == NULL) return false;
+
+ ImagePtr pResultImage = pCam->GetNextImage();
+
+ ChunkData chunkData = pResultImage->GetChunkData();
+
+ memcpy(image_data, pResultImage->GetData(), size);
+
+ pResultImage->Release();
+
+ return true;
+}
+
+DLLEXPORT bool get_radial_histogram(unsigned char *image, unsigned char *output_radial_mean) {
+
+ double r_n[100];
+ double r_sum[100];
+ int i, j, k;
+ double r;
+
+ for (i = 0; i < 100; i++) {
+ r_sum[i] = 0;
+ r_n[i] = 0;
+ }
+
+ for (i = 0; i < 500; i++) {
+ for (j = 0; j < 500; j++) {
+ r = sqrt(pow(i - 250, 2) + pow(j - 250, 2));
+ if (r < 250) {
+ k = static_cast(100 * r / 250);
+ r_sum[k] += image[i * 500 + j];
+ r_n[k] += 1;
+ }
+ }
+ }
+
+ for (i = 0; i < 100; i++) {
+ output_radial_mean[i] = (unsigned char)(r_sum[i] / r_n[i]);
+ }
+
+ return true;
+}
+
+
+DLLEXPORT double get_gain() {
+
+ if (pCam == NULL) return 0;
+
+ return pCam->Gain.GetValue();
+
+}
+
+DLLEXPORT bool set_gain(double value) {
+
+ if (pCam == NULL) return false;
+
+ pCam->GainAuto.SetValue(Spinnaker::GainAutoEnums::GainAuto_Off);
+ try {
+ pCam->Gain.SetValue(value);
+ } catch (Spinnaker::Exception &e) {
+ cout << "Error: " << e.what() << endl;
+ return false;
+ }
+
+ return true;
+}
+
+DLLEXPORT double get_brightness() { return 0; }
+DLLEXPORT bool set_brightness(double value) { return true; }
+
+DLLEXPORT double get_shutter() {
+
+ if (pCam == NULL) return 0;
+
+ return pCam->ExposureTime.GetValue() / 1000.; // make us in ms
+}
+
+DLLEXPORT bool set_shutter(double value) {
+
+ if (pCam == NULL) return false;
+
+ pCam->ExposureAuto.SetValue(Spinnaker::ExposureAutoEnums::ExposureAuto_Off);
+ pCam->ExposureMode.SetValue(Spinnaker::ExposureModeEnums::ExposureMode_Timed);
+
+ try {
+ pCam->ExposureTime.SetValue(value*1000); // in make ms in us
+ } catch (Spinnaker::Exception &e) {
+ cout << "Error: " << e.what() << endl;
+ return false;
+ }
+
+ return true;
+}
+
+DLLEXPORT struct FishReturn const get_fish_info(unsigned char *fish_roi, bool ignore_fish, bool copy_full_frame, unsigned char *full_frame_buffer)
+{
+ int i, j;
+
+ unsigned char* pixrow;
+ int fish_position_x, fish_position_y;
+ double timestamp_seconds, delta_timestamp;
+ double alpha_lowpass;
+
+ double min_value, max_value;
+ double fish_position_x_scaled, fish_position_y_scaled, fish_orientation;
+ double fish_delta_position_x, fish_delta_position_y;
+ double fish_delta_orientation;
+ double fish_delta_path;
+
+ double fish_area;
+ double picture_norm_factor;
+
+ double bout_finder_t_now;
+ double bout_finder_t_past;
+
+ double bout_finder_mean_sum;
+ double bout_finder_mean;
+ double bout_finder_variance;
+ int bout_finder_i0;
+ int bout_finder_past_i;
+ double timestamp_seconds_now;
+
+ // if we get an error, we assume the following: fish is still at the same position where it was before, same orientation, etc.
+ struct FishReturn _return = _last_good_return;
+ _return.bout_found = false;
+ _return.mode_updated = false;
+
+ cv::Mat frame = cv::Mat(roi_x1 - roi_x0, roi_y1 - roi_y0, CV_8U);
+ cv::Mat thresholded = cv::Mat(roi_x1 - roi_x0, roi_y1 - roi_y0, CV_8U);
+ cv::Mat blurred = cv::Mat(roi_x1 - roi_x0, roi_y1 - roi_y0, CV_8U);
+
+ // standard of fish roi buffer is empty
+ for (i = 0; i < 100 * 100; i++) fish_roi[i] = 0;
+
+ ImagePtr pResultImage;
+
+ try {
+
+ pResultImage = pCam->GetNextImage();
+
+ } catch (Spinnaker::Exception &e) {
+
+ cout << "Error: " << e.what() << endl;
+
+ roi_x0 = 0;
+ roi_x1 = picture_width;
+ roi_y0 = 0;
+ roi_y1 = picture_height;
+ any_error_during_bout = true;
+
+ _return.errorcode = 7; // Camera error
+
+ return _return;
+ }
+
+ // if full_frame_buffer is availble, copy the entire frame there
+ memcpy(full_frame_buffer, pResultImage->GetData(), picture_height*picture_width);
+
+ ChunkData chunkData = pResultImage->GetChunkData();
+
+ // dont know how to reset the camera time stamp... so first frame is zero
+ timestamp_seconds_now = chunkData.GetTimestamp() / (1000 * 1000 * 1000.); //old flycapture: timestamp.cycleSeconds + ((timestamp.cycleCount + (timestamp.cycleOffset / 3072.0)) / 8000.0);
+ if (timestamp_seconds_start == -1) timestamp_seconds_start = timestamp_seconds_now;
+ timestamp_seconds = timestamp_seconds_now - timestamp_seconds_start;
+
+ // The timestamp runs with rather high preceision from 0 to 128 (exluding 128), so we need to correct for this
+ delta_timestamp = timestamp_seconds - last_timestamp_s;
+ last_timestamp_s = timestamp_seconds;
+
+ _return.camera_timestamp = timestamp_seconds;
+ _return.camera_framenum = (unsigned int) pResultImage->GetFrameID();// chunkData.GetCounterValue(); does not work!
+ _return.camera_fps = 1.0 / delta_timestamp;
+
+ _return.fish_movie_framenum = fish_movie_framenum;
+
+ pResultImage->Release();
+
+ /////////////////////////////////////
+ // Live mode calculation
+ unsigned char c;
+ int i0, i1, j0, j1, binx, biny;
+
+ if (live_mode_frame_index > 1200) {
+
+ // for 16 frames stay in this loop
+ // this is to reset the background image counters, but split this into 16 parts, to distribute computation among between different frames
+ // this is 1/16 of the entire buffer length
+ binx = 256 * picture_width * picture_height / 16;
+ biny = picture_width * picture_height / 16; // both width and height should be dividable by 32
+
+ memset(live_mode_counter + (live_mode_frame_index - 1200)*binx, 0, binx * sizeof(unsigned char));
+ memset(live_mode_max_counter + (live_mode_frame_index - 1200)*biny, 0, biny * sizeof(unsigned char));
+
+ live_mode_frame_index += 1;
+
+ if (live_mode_frame_index == 1216) {
+ live_mode_frame_index = 0;
+ }
+
+ } else if (_return.camera_timestamp - last_mode_image_added_time >= 0.05) {
+ // every 50 ms update a 1/16 of the whole mode... we use a 1/16 to better distribute computation time in between frames
+
+ // determine which region of the mode we are in (use the index as a counter between 0 to 15)
+ binx = (live_mode_frame_index % 16) / 4;
+ biny = (live_mode_frame_index % 4);
+
+ i0 = biny * picture_height / 4;
+ i1 = i0 + picture_height / 4;
+
+ j0 = binx * picture_width / 4;
+ j1 = j0 + picture_width / 4;
+
+ for (i = i0; i < i1; i++) {
+ for (j = j0; j < j1; j++) {
+ c = full_frame_buffer[i * picture_width + j];
+
+ // in the 3d matrix add one to the pixel brightness count
+ live_mode_counter[c * picture_width * picture_height + i * picture_width + j] += 1;
+ // values in that counter can only run up to 255!,
+ // so pay attention that this does not happen.. with live_mode_frame_index < 1200, where each region is analyzed every 1/16, this means, now the max val is less than 100
+
+ if (live_mode_max_counter[i * picture_width + j] < live_mode_counter[c * picture_width * picture_height + i * picture_width + j]) {
+ live_mode_max_counter[i * picture_width + j] = live_mode_counter[c * picture_width * picture_height + i * picture_width + j];
+ live_mode[i * picture_width + j] = c;
+ }
+ }
+ }
+
+ live_mode_frame_index += 1;
+ last_mode_image_added_time = _return.camera_timestamp;
+
+ if (live_mode_frame_index == 1200) { /// should happen after circa 70 seconds!
+
+ live_mode_number += 1;
+
+ // Copy this to the mode structure which is used for background substraction
+ memcpy(mode, live_mode, picture_width * picture_height * sizeof(unsigned char));
+
+ // Transform it to an opencv image
+ for (i = 0; i < picture_height; i++) {
+ pixrow = mode_mat.ptr(i);
+
+ for (j = 0; j < picture_width; j++)
+ pixrow[j] = mode[i * picture_width + j];
+ }
+
+ // Add the timestamp to the mode image
+ cv::putText(mode_mat, "#" + std::to_string(live_mode_number) + ", " + std::to_string(_return.camera_timestamp) + "s", cv::Point(20, 30), cv::FONT_HERSHEY_SIMPLEX, 1, 255, 1, 8, false);
+
+ // Add the mode image to the mode video
+ if (mode_video.isOpened() == true) mode_video.write(mode_mat);
+
+ // Also write this image as a jpeg file, so it can be displayed in the gui
+ if (global_filename_modeimage.length() > 0) {
+ cv::imwrite(global_filename_modeimage, mode_mat);
+ }
+
+ _return.mode_updated = true; // so that the gui knows it has to load the current mode jpeg file
+ }
+ }
+
+ if (live_mode_number == -1) {
+
+ roi_x0 = 0;
+ roi_x1 = picture_width;
+ roi_y0 = 0;
+ roi_y1 = picture_height;
+ any_error_during_bout = true;
+
+ _return.errorcode = 8; // still calculating the mode
+
+ return _return;
+ }
+
+ if (ignore_fish == true) { // for example when one wants to illustrate how many fish swim without implementing feedback
+
+ return _return;
+ }
+
+ ///////////////////////////////////////////////////
+ /// Find the FISH!
+
+ // just copy the region into the opencv frame, the ptr method is supposed to be more efficient than the at method
+ // and at the same time substract the mode
+ for (i = roi_y0; i < roi_y1; i++) {
+ pixrow = frame.ptr(i - roi_y0);
+
+ for (j = roi_x0; j < roi_x1; j++)
+ pixrow[j - roi_x0] = substract_background(full_frame_buffer[i * picture_width + j], mode[i * picture_width + j]);
+ }
+
+ cv::threshold(frame, thresholded, 15, 255, 0);
+ cv::GaussianBlur(thresholded, blurred, Size(25, 25), 0);
+
+ // find the darkest pixel (should be approximatelty the middle of the fish)
+ cv::minMaxLoc(blurred, &min_value, &max_value, &min_loc, &max_loc);
+
+ if (max_value < 20) { // fish too dim (or not found)
+
+ roi_x0 = 0;
+ roi_x1 = picture_width;
+ roi_y0 = 0;
+ roi_y1 = picture_height;
+ any_error_during_bout = true;
+
+ _return.errorcode = 1; // Blob not bright enough
+
+ // add an empty frame to the fish movie
+ add_fish_roi_to_movie(cv::Mat::zeros(100, 100, CV_8UC1), _return.camera_framenum, _return.errorcode);
+
+
+ return _return;
+ }
+
+ picture_norm_factor = 255 / max_value; // Use the max_value to normalize the next step
+
+ fish_position_x = max_loc.x + roi_x0;
+ fish_position_y = max_loc.y + roi_y0;
+
+ // if the fish is too close to the walls, its probably not a fish
+ if (fish_position_x < 50 || fish_position_y < 50 || fish_position_x >= picture_width - 50 || fish_position_y >= picture_height - 50) {
+
+ // also reset the roi
+ roi_x0 = 0;
+ roi_x1 = picture_width;
+ roi_y0 = 0;
+ roi_y1 = picture_height;
+ any_error_during_bout = true;
+
+ _return.errorcode = 2; // Fish blob too close to the wall
+
+ // add an empty frame to the fish movie
+ add_fish_roi_to_movie(cv::Mat::zeros(100, 100, CV_8UC1), _return.camera_framenum, _return.errorcode);
+
+ return _return;
+ }
+
+ // just use an area of +- 50 around the fish of the original image for determining its head position and orientation
+ for (i = 0; i < 100; i++) {
+ pixrow = frame100.ptr(i);
+
+ for (j = 0; j < 100; j++) {
+ pixrow[j] = static_cast(picture_norm_factor * substract_background(full_frame_buffer[(fish_position_y + i - 50) * picture_width + fish_position_x + j - 50], mode[(fish_position_y + i - 50) * picture_width + fish_position_x + j - 50]));
+ }
+ }
+
+ // find the fish orientation and refine its position, for this blow the picture up, and smooth it a bit, then threshold
+ cv::resize(frame100, frame100_bigger, cv::Size(200, 200));
+ cv::GaussianBlur(frame100_bigger, frame100_bigger, Size(5, 5), 0); // week smoothing
+
+ cv::minMaxLoc(frame100_bigger, &min_value, &max_value, &min_loc, &max_loc);
+
+ double thresh;
+ bool found = false;
+
+ vector >fish_hull(1);
+
+ // apply a variable threshold, start from 0.8*maxvalue and go slowly down
+ for (thresh = 0.7; thresh > 0.2; thresh -= 0.05) {
+
+ vector > fish_contours;
+ vector fish_contour_points;
+
+ cv::threshold(frame100_bigger, thresholded100_bigger, thresh*max_value, 255, 0); // the threshold here defines how much of the fish will be used... low thresholds can lead to much to include the detailed tail shape, which leads to tracking artifacts
+
+ // calculate the fraction of how many pixels are white now
+ fish_area = (double)cv::sum(thresholded100_bigger)[0] / (200 * 200 * 255);
+
+ if (fish_area > 0.2) {
+ continue; // something went wrong, there are too many white dots
+ }
+
+ cv::Mat thresholded100_dummy = thresholded100_bigger.clone();
+
+ // find all contours in the tresholded blown up image
+ cv::findContours(thresholded100_dummy, fish_contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE); // This crashes for images with a lot of noise!!! To be solved
+
+ if (fish_contours.size() == 0 || fish_contours.size() > 7) {
+ continue; // no or too many contours found, probaly some big dust particles, which we don't want!
+ }
+
+ // combine all contours which are large enough into one array of points
+ // sort the contours, and add the three largest ones together
+ std::sort(fish_contours.begin(), fish_contours.end(), compare_contour_areas);
+ for (i = 0; i < 3 && i < fish_contours.size(); i++) {
+
+ if (contourArea(fish_contours[i]) > 10) { // we dont want to pick up noise
+ fish_contour_points.insert(fish_contour_points.end(), fish_contours[i].begin(), fish_contours[i].end());
+ }
+ }
+
+ if (fish_contour_points.size() == 0) {
+ continue; // no big enough contours found
+ }
+
+ // Calculate a convex hull around these points
+ convexHull(cv::Mat(fish_contour_points), fish_hull[0]);
+
+ fish_area = contourArea(fish_hull[0]);
+
+ if (fish_area > 2500 || fish_area < 400) { // was 2000 for larval zebrafish, but changed it 2500 to track big Drosophila larvae (12/11/2017)
+ continue; // the found hull is too big, or too small, so its probably not a fish
+
+ }
+
+ found = true;
+ break; /// all is good
+ }
+
+ // set the fish area to the return vector
+ _return.fish_area = fish_area;
+
+ if (found == false) {
+ // also reset the roi
+ roi_x0 = 0;
+ roi_x1 = picture_width;
+ roi_y0 = 0;
+ roi_y1 = picture_height;
+ any_error_during_bout = true;
+
+ _return.errorcode = 3; /// threshold searh did not find properly sized contours
+
+ // add the current frame100 to the fish movie, this might not be the fish
+ add_fish_roi_to_movie(frame100, _return.camera_framenum, _return.errorcode);
+
+ return _return;
+ }
+
+ // calculate the orientation of the convex hull with image moments
+ Moments m;
+ m = moments(fish_hull[0], false);
+
+ // first order moments (represent the center off mass)
+ double x_ = m.m10 / m.m00;
+ double y_ = m.m01 / m.m00;
+
+ // second order moments (represent the covariances for x, y, xy)
+ double mu20 = m.mu20 / m.m00;
+ double mu02 = m.mu02 / m.m00;
+ double mu11 = m.mu11 / m.m00;
+
+ // the eigenvectors of the covariance matrix represent to direction of largest and smalles variances, out of this one can calculate the angle of the fish (equation from wikipedia)
+ double angle = 0.5*atan2(2 * mu11, mu20 - mu02);
+
+ // This angle does not allow to determine whether the fish is oriented right, or left, so we rotate the fish by that angle and calculate the point of its largest width, which should be the eyes
+
+ // OpenCV allows the creaton of a rotation matrix around the center of mass with the angle of the largest variance
+ Mat rotMat = getRotationMatrix2D(Point2f(static_cast(x_), static_cast(y_)), angle * 180 / 3.141592653589793, 1);
+ Mat rotMat_inv;
+
+ // And the inverted rotation matrix
+ invertAffineTransform(rotMat, rotMat_inv);
+
+ // Draw the hull into an empty image
+ Mat dummy_image_original = cv::Mat::zeros(200, 200, CV_32F);
+ drawContours(dummy_image_original, fish_hull, 0, 1, CV_FILLED, 8); // the image will be only 1s and 0s
+
+ // Rotate that image around its center of mass and with the angle found before
+ Mat dummy_image_rotated = cv::Mat::zeros(200, 200, CV_32F);
+ cv::warpAffine(dummy_image_original, dummy_image_rotated, rotMat, cv::Size(200, 200));
+
+ // Sum all the columns of the rotated image, so we can determine the maximal wisdth of the fish
+ Mat dummy_image_reduced = cv::Mat::zeros(1, 200, CV_32F); // 1 row 200 columns
+ cv::reduce(dummy_image_rotated, dummy_image_reduced, 0, CV_REDUCE_SUM);
+
+ // Determine where the maximal value is in that image (corresponds to the widtgh of the fish)
+ cv::minMaxLoc(dummy_image_reduced, &min_value, &max_value, &min_loc, &max_loc);
+
+ // The location of the maximum along the x-axis is the point of the head
+ Point2f point_head(static_cast(max_loc.x), static_cast(y_)); // as the image was reduced, the y position is the center of mass of the non-reduced image, as we rotated around that point, that is the center of mass of the original image
+
+ // for precision purposes, make a very long virtual tail
+ Point2f point_dummy_tail;
+ if (max_loc.x < x_) point_dummy_tail = Point2f(+1000, static_cast(y_));
+ else point_dummy_tail = Point2f(-1000, static_cast(y_));
+
+ // Apply the inverse rotation matrix to that vector by hand (see warpAffine documetion how opencv arranges its rotation matrix, 3rd column if for the translation)
+ Point2f point_head_rotatedback(static_cast(point_head.x*rotMat_inv.at(0, 0) + point_head.y*rotMat_inv.at(0, 1) + rotMat_inv.at(0, 2)),
+ static_cast(point_head.x*rotMat_inv.at(1, 0) + point_head.y*rotMat_inv.at(1, 1) + rotMat_inv.at(1, 2)));
+
+ Point2f point_dummy_tail_rotatedback(static_cast(point_dummy_tail.x*rotMat_inv.at(0, 0) + point_dummy_tail.y*rotMat_inv.at(0, 1) + rotMat_inv.at(0, 2)),
+ static_cast(point_dummy_tail.x*rotMat_inv.at(1, 0) + point_dummy_tail.y*rotMat_inv.at(1, 1) + rotMat_inv.at(1, 2)));
+
+ // calculate the final angle fround the infinitale tail position and head position
+ fish_orientation = atan2(-(point_head_rotatedback.y - point_dummy_tail_rotatedback.y), point_head_rotatedback.x - point_dummy_tail_rotatedback.x);
+
+
+ ////////////////////////////////////
+ //// All information are now availble
+
+ // Update the fish position
+ fish_position_x = static_cast(fish_position_x - 50 + point_head_rotatedback.x / 2); // devide by two because of the scaling from roi 100 to roi 200
+ fish_position_y = static_cast(fish_position_y - 50 + point_head_rotatedback.y / 2);
+
+ if (fish_position_x < 50 || fish_position_y < 50 || fish_position_x >= picture_width - 50 || fish_position_y >= picture_height - 50) {
+
+ // also reset the roi
+ roi_x0 = 0;
+ roi_x1 = picture_width;
+ roi_y0 = 0;
+ roi_y1 = picture_height;
+ any_error_during_bout = true;
+
+ _return.errorcode = 2; // Fish is too close to the wall
+
+ add_fish_roi_to_movie(frame100, _return.camera_framenum, _return.errorcode);
+
+ return _return;
+ }
+
+ // Update the roi position for the next round (search in a 200x200 window around the last fish head position)
+ roi_x0 = fish_position_x - 100;
+ roi_x1 = fish_position_x + 100;
+ roi_y0 = fish_position_y - 100;
+ roi_y1 = fish_position_y + 100;
+
+ // the roi might now be too close to the walls, make it still 200x200
+ if (roi_x0 < 0) {
+ roi_x0 = 0;
+ roi_x1 = 200;
+ }
+
+ if (roi_x1 >= picture_width) {
+ roi_x1 = picture_width - 1;
+ roi_x0 = picture_width - 1 - 200;
+ }
+
+ if (roi_y0 < 0) {
+ roi_y0 = 0;
+ roi_y1 = 200;
+ }
+
+ if (roi_y1 >= picture_height) {
+ roi_y1 = picture_height - 1;
+ roi_y0 = picture_height - 1 - 200;
+ }
+
+ // Update the fish roi to its final position
+ for (i = 0; i < 100; i++) {
+
+ pixrow = frame100.ptr(i);
+
+ for (j = 0; j < 100; j++) {
+
+ fish_roi[i * 100 + j] = static_cast(picture_norm_factor * substract_background(full_frame_buffer[(fish_position_y + i - 50) * picture_width + fish_position_x + j - 50], mode[(fish_position_y + i - 50) * picture_width + fish_position_x + j - 50]));
+
+ pixrow[j] = fish_roi[i * 100 + j];
+
+ }
+ }
+
+ // For debugging
+ /*
+ Scalar color = Scalar(0, 255, 255);
+ Scalar color2 = Scalar(255, 0, 255);
+ Scalar color3 = Scalar(0, 0, 255);
+
+ cv::Mat test = cv::Mat::zeros(200, 200, CV_8UC3);
+ drawContours(test, fish_hull, 0, color, CV_FILLED, 8);
+ cv::circle(test, point_head_rotatedback, 3, color3, -1);
+
+ cv::imshow("Convex hull", test);*/
+
+ //cv::waitKey(1);
+
+ // Scale the value between -1 and 1
+ fish_position_x_scaled = 2 * ((double)fish_position_x) / picture_width - 1;
+ fish_position_y_scaled = - (2 * ((double)fish_position_y) / picture_height - 1);
+ fish_orientation = fish_orientation * 180 / 3.141592653589793; /// Make it degree
+
+
+ /////////////////////////
+ /// Live bout finder
+
+ if (fish_accumulated_path == -1) { // the very first function call
+ fish_accumulated_path = 0;
+ fish_accumulated_orientation = fish_orientation;
+
+ circular_counter = 0;
+
+ circular_history_time[0] = timestamp_seconds;
+ circular_history_fish_accumulated_orientation[0] = fish_accumulated_orientation;
+
+ fish_accumulated_orientation_lowpass = fish_accumulated_orientation;
+ inside_bout = false;
+ variance_was_low = false;
+ last_bout_timestamp_end = -1;
+
+ } else {
+
+ // determine the delta time between this return and the last one that was good
+ delta_timestamp = _return.camera_timestamp - _last_good_return.camera_timestamp;
+
+ // what is is the delta orientation
+ fish_delta_orientation = fmod(fish_orientation - _last_good_return.fish_orientation + 180 + 360, 360) - 180;
+
+ // is there some error (flipping)?
+ // assuming the framerate is about 100 hz, maximal heading angle change during espaces can be up to 10000deg/s (Tim), but set the threshold for us here at 5000deg/s
+ if (fabs(fish_delta_orientation / delta_timestamp) > 5000) {
+ // also reset the roi
+ roi_x0 = 0;
+ roi_x1 = picture_width;
+ roi_y0 = 0;
+ roi_y1 = picture_height;
+ any_error_during_bout = true;
+
+ _return.errorcode = 4; // Fish is flipping
+
+ add_fish_roi_to_movie(frame100, _return.camera_framenum, _return.errorcode);
+
+ return _return;
+ }
+
+ fish_delta_position_x = fish_position_x_scaled - _last_good_return.fish_position_x;
+ fish_delta_position_y = fish_position_y_scaled - _last_good_return.fish_position_y;
+
+ fish_delta_path = sqrt(fish_delta_position_x*fish_delta_position_x + fish_delta_position_y*fish_delta_position_y);
+
+ // is the fish jumping (because for example, some dust particle?
+ // we assume that the maximal upper bound for a path change/s is one dish per second
+ if (fish_delta_path / delta_timestamp > 2) {
+ // also reset the roi
+ roi_x0 = 0;
+ roi_x1 = picture_width;
+ roi_y0 = 0;
+ roi_y1 = picture_height;
+ any_error_during_bout = true;
+
+ _return.errorcode = 5; // Fish is jumping
+
+ add_fish_roi_to_movie(frame100, _return.camera_framenum, _return.errorcode);
+
+ return _return;
+ }
+
+ fish_accumulated_path += fish_delta_path;
+ fish_accumulated_orientation += fish_delta_orientation;
+
+ // use the accumulated fish orientation for bout detection via analyzing the variance with a 100ms time bin
+ circular_history_fish_accumulated_orientation[circular_counter] = fish_accumulated_orientation;
+ circular_history_fish_accumulated_path[circular_counter] = fish_accumulated_path;
+
+ circular_history_time[circular_counter] = timestamp_seconds;
+
+ // apply a weak lowpass filter to the accumulated orientation
+ alpha_lowpass = delta_timestamp / (0.05 + delta_timestamp);
+ fish_accumulated_orientation_lowpass = alpha_lowpass*fish_accumulated_orientation + (1 - alpha_lowpass)*fish_accumulated_orientation_lowpass;
+
+ // find how many indixes we have to go to the past for 50ms
+ // find the indexes which mark the last 50ms (as time might be variable, search for the right index based on the timestamps
+ bout_finder_t_now = circular_history_time[circular_counter];
+ bout_finder_past_i = 0;
+
+ /// TODO
+ /// need to have the array filled before that loop, otherwise might run forever, check for that!
+
+ while (true) {
+ bout_finder_past_i++;
+
+ bout_finder_t_past = circular_history_time[(circular_counter - bout_finder_past_i + 100) % 100];
+
+ if (bout_finder_t_now - bout_finder_t_past >= 0.05) {
+ break;
+ }
+ }
+
+ // Calculate the mean
+ bout_finder_mean_sum = 0;
+
+ for (i = 0; i < bout_finder_past_i; i++) {
+
+ bout_finder_i0 = (circular_counter - i + 100) % 100;
+ bout_finder_mean_sum += circular_history_fish_accumulated_orientation[bout_finder_i0];
+ }
+
+ bout_finder_mean = bout_finder_mean_sum / bout_finder_past_i;
+
+ // Calculate the variance
+ bout_finder_variance = 0;
+ for (i = 0; i < bout_finder_past_i; i++) {
+ bout_finder_i0 = (circular_counter - i + 100) % 100;
+ bout_finder_variance += pow(circular_history_fish_accumulated_orientation[bout_finder_i0] - bout_finder_mean, 2);
+ }
+
+ bout_finder_variance /= bout_finder_past_i;
+
+ // Variance needs to be higher than a threshold, for the first time
+ if (bout_finder_variance > 2 && inside_bout == false && variance_was_low == true) {
+ inside_bout = true;
+ variance_was_low = false; // whatever happens, the algorithm needs to find a variance smaller than 1 in order to find a new bout
+ any_error_during_bout = false; /// keep track of poential errors, if any, this bout is not reliable and we'll ignore it
+
+ bout_start_i = (circular_counter - bout_finder_past_i + 100) % 100;
+ }
+
+ if (bout_finder_variance < 1 && inside_bout == true) {
+ inside_bout = false;
+
+ // we ignore the whole thing, if any error happened
+ if (any_error_during_bout == false) {
+
+ bout_end_i = circular_counter;
+
+ // was this really a bout? the starttime of the new bout should be at least 50ms away from the old bout (or first bout ever)
+ if (circular_history_time[bout_start_i] - last_bout_timestamp_end > 0.05 || last_bout_timestamp_end == -1) {
+
+ _return.bout_found = true;
+ _return.bout_timestamp_start = circular_history_time[bout_start_i];
+ _return.bout_timestamp_end = circular_history_time[bout_end_i];
+
+ _return.bout_heading_angle_change = circular_history_fish_accumulated_orientation[bout_end_i] - circular_history_fish_accumulated_orientation[bout_start_i];
+ _return.bout_distance_traveled_change = circular_history_fish_accumulated_path[bout_end_i] - circular_history_fish_accumulated_path[bout_start_i];
+
+ last_bout_timestamp_end = circular_history_time[bout_end_i];
+ }
+ }
+ }
+
+ if (bout_finder_variance < 1) {
+ variance_was_low = true;
+ }
+ }
+
+ circular_counter++;
+ if (circular_counter == 100) circular_counter = 0;
+
+ _return.errorcode = 0; // Fish found, all good
+
+ _return.fish_position_x = fish_position_x_scaled;
+ _return.fish_position_y = fish_position_y_scaled;
+
+ _return.fish_orientation = fish_orientation;
+ _return.fish_accumulated_orientation = fish_accumulated_orientation;
+ _return.fish_accumulated_orientation_lowpass = fish_accumulated_orientation_lowpass;
+ _return.fish_accumulated_orientation_variance = bout_finder_variance;
+
+ _return.fish_accumulated_path = fish_accumulated_path;
+
+ _last_good_return = _return; // memorize this return structure, in case next round something goes wrong
+
+ add_fish_roi_to_movie(frame100, _return.camera_framenum, _return.errorcode);
+
+ return _return;
+}
\ No newline at end of file
diff --git a/modules/fishcamera_C/fishcamera/fishcamera.h b/modules/fishcamera_C/fishcamera/fishcamera.h
new file mode 100644
index 0000000..1e3301f
--- /dev/null
+++ b/modules/fishcamera_C/fishcamera/fishcamera.h
@@ -0,0 +1,141 @@
+#pragma once
+#define DLLEXPORT extern "C" __declspec(dllexport)
+
+#include "Spinnaker.h"
+#include "SpinGenApi/SpinnakerGenApi.h"
+#include
+#include
+#include
+#include "windows.h"
+
+using namespace Spinnaker;
+using namespace Spinnaker::GenApi;
+using namespace Spinnaker::GenICam;
+using namespace std;
+using namespace cv;
+
+struct FishReturn
+{
+ unsigned int camera_framenum;
+ double camera_timestamp;
+
+ double camera_fps;
+
+ bool mode_updated;
+ int errorcode;
+
+ int fish_movie_framenum;
+
+ double fish_position_x;
+ double fish_position_y;
+
+ double fish_orientation;
+ double fish_accumulated_orientation;
+ double fish_accumulated_orientation_lowpass;
+ double fish_accumulated_orientation_variance;
+
+ double fish_accumulated_path;
+
+ bool bout_found;
+ double bout_timestamp_start;
+ double bout_timestamp_end;
+ double bout_heading_angle_change;
+ double bout_distance_traveled_change;
+
+ double fish_area;
+};
+
+// Camera stuff
+SystemPtr system_gl;
+CameraList camList;
+CameraPtr pCam;
+ImagePtr pResultImage;
+
+VideoWriter fish_roi_video;
+VideoWriter mode_video;
+
+cv::String global_filename_modeimage;
+
+double last_timestamp_s;
+double timestamp_seconds_start;
+
+int circular_counter;
+int fish_movie_framenum;
+
+double fish_accumulated_orientation_lowpass;
+
+double circular_history_time[100];
+double circular_history_fish_accumulated_orientation[100];
+double circular_history_fish_accumulated_path[100];
+
+double last_bout_timestamp_end;
+
+double fish_accumulated_path = -1, fish_accumulated_orientation;
+
+int fish_serial;
+
+int picture_height;
+int picture_width;
+
+int roi_x0;
+int roi_x1;
+int roi_y0;
+int roi_y1;
+
+bool inside_bout;
+bool any_error_during_bout;
+bool variance_was_low;
+
+int bout_start_i;
+int bout_end_i;
+
+int live_mode_number;
+int live_mode_frame_index;
+double last_mode_image_added_time;
+
+// needs to be freed when closing the camera
+unsigned char* mode;
+unsigned char* live_mode;
+unsigned char *live_mode_counter;
+unsigned char *live_mode_max_counter;
+cv::Mat mode_mat;
+
+cv::Mat frame100 = cv::Mat(100, 100, CV_8U);
+cv::Mat frame100a = cv::Mat(100, 100, CV_8U);
+cv::Mat frame100b = cv::Mat(100, 100, CV_8U);
+cv::Mat frame100c = cv::Mat(100, 100, CV_8U);
+cv::Mat frame100d = cv::Mat(100, 100, CV_8U);
+cv::Mat thresholded100 = cv::Mat(100, 100, CV_8U);
+cv::Mat blurred100 = cv::Mat(100, 100, CV_8U);
+
+cv::Mat frame100_bigger = cv::Mat(200, 200, CV_8U);
+cv::Mat thresholded100_bigger = cv::Mat(200, 200, CV_8U);
+cv::Mat blurred100_bigger = cv::Mat(200, 200, CV_8U);
+
+cv::Point min_loc, max_loc;
+
+struct FishReturn _last_good_return;
+
+/// non-export internal functions
+
+bool compare_contour_areas(std::vector contour1, std::vector contour2);
+bool add_fish_roi_to_movie(cv::Mat fish_roi, unsigned int camera_framenum, int errorcode);
+
+// functions visible from the outside of the dll
+DLLEXPORT bool reset_cam(unsigned int serial_number);
+DLLEXPORT bool open_cam(unsigned int serial_number, unsigned int radius, unsigned int xoffset, unsigned int yoffset, float gain, float shutter, const char* filename_fish_roi, const char* filename_mode, const char* filename_modeimage);
+DLLEXPORT bool close_cam(void);
+DLLEXPORT bool get_image(unsigned char *image_data, size_t size);
+DLLEXPORT bool get_radial_histogram(unsigned char *image, unsigned char *output_radial_mean);
+
+DLLEXPORT double get_gain();
+DLLEXPORT bool set_gain(double value);
+
+DLLEXPORT double get_brightness();
+DLLEXPORT bool set_brightness(double value);
+
+DLLEXPORT double get_shutter();
+DLLEXPORT bool set_shutter(double value);
+
+DLLEXPORT struct FishReturn const get_fish_info(unsigned char *fish_roi, bool ignore_fish, bool copy_full_frame, unsigned char *full_frame_buffer);
+
diff --git a/modules/fishcamera_C/fishcamera/fishcamera.vcxproj b/modules/fishcamera_C/fishcamera/fishcamera.vcxproj
new file mode 100644
index 0000000..6ae1954
--- /dev/null
+++ b/modules/fishcamera_C/fishcamera/fishcamera.vcxproj
@@ -0,0 +1,168 @@
+
+
+
+
+ Debug
+ Win32
+
+
+ Release
+ Win32
+
+
+ Debug
+ x64
+
+
+ Release
+ x64
+
+
+
+ {0E03E6E2-B92B-44F4-8BF2-FFF20A6C377B}
+ Win32Proj
+ fishcamera
+ 10.0
+
+
+
+ DynamicLibrary
+ true
+ v142
+ Unicode
+
+
+ DynamicLibrary
+ false
+ v142
+ true
+ Unicode
+
+
+ DynamicLibrary
+ true
+ v142
+ Unicode
+
+
+ DynamicLibrary
+ false
+ v142
+ true
+ Unicode
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ true
+ C:\Program Files\Point Grey Research\FlyCapture2\include;C:\OpenCV\build\include;$(VC_IncludePath);$(WindowsSDK_IncludePath);
+ C:\OpenCV\build\x64\vc14\lib;C:\Program Files\Point Grey Research\FlyCapture2\lib64;$(VC_LibraryPath_x64);$(WindowsSDK_LibraryPath_x64);$(NETFXKitsDir)Lib\um\x64
+
+
+ false
+ C:\Program Files\Point Grey Research\FlyCapture2\include;C:\OpenCV\build\include;$(IncludePath)
+ C:\OpenCV\build\x64\vc12\lib;C:\Program Files\Point Grey Research\FlyCapture2\lib64;$(LibraryPath)
+
+
+ false
+ C:\Program Files\Point Grey Research\Spinnaker\include;C:\opencv3\opencv\build\include;$(VC_IncludePath);$(WindowsSDK_IncludePath);$(IncludePath)
+ C:\opencv3\opencv\build\x64\vc15\lib;C:\Program Files\Point Grey Research\Spinnaker\lib64\vs2015;$(VC_LibraryPath_x64);$(WindowsSDK_LibraryPath_x64);$(LibraryPath)
+
+
+
+
+
+ Level3
+ Disabled
+ WIN32;_DEBUG;_WINDOWS;_USRDLL;FISHCAMERA_EXPORTS;%(PreprocessorDefinitions)
+ true
+
+
+ Windows
+ true
+
+
+
+
+
+
+ Level3
+ Full
+ _DEBUG;_WINDOWS;_USRDLL;FISHCAMERA_EXPORTS;%(PreprocessorDefinitions)
+ true
+
+
+ Windows
+ true
+ kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;FlyCapture2d.lib;opencv_world310d.lib;%(AdditionalDependencies)
+
+
+
+
+ Level3
+
+
+ MaxSpeed
+ true
+ true
+ WIN32;NDEBUG;_WINDOWS;_USRDLL;FISHCAMERA_EXPORTS;%(PreprocessorDefinitions)
+ true
+
+
+ Windows
+ true
+ true
+ true
+ kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;FlyCapture2.lib;%(AdditionalDependencies)
+
+
+
+
+ Level3
+
+
+ MaxSpeed
+ true
+ true
+ NDEBUG;_WINDOWS;_USRDLL;FISHCAMERA_EXPORTS;%(PreprocessorDefinitions)
+ true
+ C:\opencv\build\include
+
+
+ Windows
+ true
+ true
+ true
+ kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;Spinnaker_v140.lib;opencv_world349.lib;%(AdditionalDependencies)
+ C:\opencv\build\x64\vc15\lib
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/modules/fishcamera_C/fishcamera/fishcamera.vcxproj.filters b/modules/fishcamera_C/fishcamera/fishcamera.vcxproj.filters
new file mode 100644
index 0000000..8ac6c94
--- /dev/null
+++ b/modules/fishcamera_C/fishcamera/fishcamera.vcxproj.filters
@@ -0,0 +1,27 @@
+
+
+
+
+ {4FC737F1-C7A5-4376-A066-2A32D752A2FF}
+ cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx
+
+
+ {93995380-89BD-4b04-88EB-625FBE52EBFB}
+ h;hh;hpp;hxx;hm;inl;inc;xsd
+
+
+ {67DA6AB6-F800-4c08-8B7A-83BB121AAD01}
+ rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms
+
+
+
+
+ Source Files
+
+
+
+
+ Header Files
+
+
+
\ No newline at end of file
diff --git a/modules/fishcamera_C/fishcamera/fishcamera.vcxproj.user b/modules/fishcamera_C/fishcamera/fishcamera.vcxproj.user
new file mode 100644
index 0000000..6fb136b
--- /dev/null
+++ b/modules/fishcamera_C/fishcamera/fishcamera.vcxproj.user
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.Build.CppClean.log b/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.Build.CppClean.log
new file mode 100644
index 0000000..e8286ee
--- /dev/null
+++ b/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.Build.CppClean.log
@@ -0,0 +1,2 @@
+c:\users\arminbahl\pycharmprojects\fishsetup\free_swimming_4fish_setup\modules\fishcamera_c\x64\debug\fishcamera.dll
+c:\users\arminbahl\pycharmprojects\fishsetup\free_swimming_4fish_setup\modules\fishcamera_c\x64\debug\fishcamera.ilk
diff --git a/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.log b/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.log
new file mode 100644
index 0000000..42dba5a
--- /dev/null
+++ b/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.log
@@ -0,0 +1 @@
+cl : Command line error D8016: '/Ox' and '/RTC1' command-line options are incompatible
diff --git a/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.tlog/CL.command.1.tlog b/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.tlog/CL.command.1.tlog
new file mode 100644
index 0000000..93db41f
Binary files /dev/null and b/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.tlog/CL.command.1.tlog differ
diff --git a/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.tlog/CL.read.1.tlog b/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.tlog/CL.read.1.tlog
new file mode 100644
index 0000000..c58e420
Binary files /dev/null and b/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.tlog/CL.read.1.tlog differ
diff --git a/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.tlog/fishcamera.lastbuildstate b/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.tlog/fishcamera.lastbuildstate
new file mode 100644
index 0000000..3b31725
--- /dev/null
+++ b/modules/fishcamera_C/fishcamera/x64/Debug/fishcamera.tlog/fishcamera.lastbuildstate
@@ -0,0 +1,2 @@
+#TargetFrameworkVersion=v4.0:PlatformToolSet=v141:EnableManagedIncrementalBuild=false:VCToolArchitecture=Native32Bit:WindowsTargetPlatformVersion=10.0.16299.0
+Debug|x64|C:\Users\arminbahl\PycharmProjects\fishsetup\free_swimming_4fish_setup\modules\fishcamera_C\|
diff --git a/stimuli/black.png b/stimuli/black.png
new file mode 100644
index 0000000..fa031ed
Binary files /dev/null and b/stimuli/black.png differ
diff --git a/stimuli/blackwhitepatch.png b/stimuli/blackwhitepatch.png
new file mode 100644
index 0000000..34b7a49
Binary files /dev/null and b/stimuli/blackwhitepatch.png differ
diff --git a/stimuli/circle2.png b/stimuli/circle2.png
new file mode 100644
index 0000000..5418580
Binary files /dev/null and b/stimuli/circle2.png differ
diff --git a/stimuli/circle3.png b/stimuli/circle3.png
new file mode 100644
index 0000000..867c4cd
Binary files /dev/null and b/stimuli/circle3.png differ
diff --git a/stimuli/circles.bam b/stimuli/circles.bam
new file mode 100644
index 0000000..5b5f5b9
Binary files /dev/null and b/stimuli/circles.bam differ
diff --git a/stimuli/dot_motion_coherenceedited.py b/stimuli/dot_motion_coherenceedited.py
new file mode 100644
index 0000000..f91d842
--- /dev/null
+++ b/stimuli/dot_motion_coherenceedited.py
@@ -0,0 +1,193 @@
+
+dot_motion_shader = [
+ """ #version 140
+
+ uniform sampler2D p3d_Texture0;
+ uniform mat4 p3d_ModelViewProjectionMatrix;
+
+ in vec4 p3d_Vertex;
+ in vec2 p3d_MultiTexCoord0;
+
+ out vec4 color;
+
+ void main(void) {
+ vec4 newvertex;
+ float dot_i;
+ float dot_scale = 0.01;
+ float dot_x, dot_y, dot_color;
+ float maxi = 10000.0;
+ vec4 dot_properties;
+
+ dot_i = float(p3d_Vertex[1]);
+ dot_properties = texture2D(p3d_Texture0, vec2(dot_i/maxi, 0.0));
+
+ dot_x = dot_properties[2];
+ dot_y = dot_properties[1];
+ dot_color = dot_properties[0];
+
+ newvertex = p3d_Vertex;
+
+ if (dot_x*dot_x + dot_y*dot_y > 1.0*1.0 || dot_i > 1200) { // only plot 1200 dots inside the circle
+ newvertex[0] = 0.0;
+ newvertex[1] = 0.0;
+ newvertex[2] = 0.0;
+ } else {
+ newvertex[0] = p3d_Vertex[0]*dot_scale+dot_x;
+ newvertex[1] = 0;
+ newvertex[2] = p3d_Vertex[2]*dot_scale+dot_y;
+ }
+
+ color = vec4(dot_color, dot_color, dot_color, 1.0);
+
+ gl_Position = p3d_ModelViewProjectionMatrix * newvertex;
+ }
+ """,
+
+ """ #version 140
+ in vec4 color;
+ out vec4 gl_FragColor;
+
+ void main() {
+ gl_FragColor = color;
+ }
+
+ """
+]
+
+if __name__ == "__main__":
+ import sys
+ import numpy as np
+ import os
+
+ sys.path.append(r"C:\Users\Max\Desktop\LS100\modules")
+
+ from shared import Shared
+ import socket
+
+ stimuli = []
+ t_stimuli = []
+
+ coherences = [0, 25, 50, 100]
+ directions = [-1, 1] # left, right
+ number_fish = 2
+ for direction in directions:
+ for coherence in coherences:
+ stimuli.append([direction, coherence])
+ t_stimuli.append(20) # 5s 0% coherence, 10s coherence level, 5s 0%
+
+ shared = Shared(r'C:\LS100\LS100_free_swimming_4fish_data', "dot_motion_coherence_new", stimuli=stimuli, t_stimuli=t_stimuli, shaders=[dot_motion_shader], python_file = __file__)
+
+ shared.start_threads()
+
+ from scene import Scene
+ from panda3d.core import *
+
+ class World(Scene):
+
+ def __init__(self):
+
+ # Init the scene
+ Scene.__init__(self, shared=shared)
+
+ self.background_circle = dict({})
+ self.circles = dict({})
+ self.dummytex = dict({})
+
+ self.dots_position = dict({})
+ self.dots_x = dict({})
+ self.dots_y = dict({})
+
+ self.t_ = [0, 0, 0, 0]
+
+ for fish_index in range(number_fish):
+
+ self.background_circle[fish_index] = self.create_circles(1, edges=30)
+ self.background_circle[fish_index].reparentTo(self.fish_nodes[fish_index])
+
+ self.background_circle[fish_index].setScale(1, 0, 1)
+ self.background_circle[fish_index].setColor(0,0, 0)
+ self.background_circle[fish_index].setPos(0, 0.001, 0)
+
+ filepath = os.path.join(os.path.split(__file__)[0], "circles.bam")
+ self.circles[fish_index] = self.loader.loadModel(Filename.fromOsSpecific(filepath))
+ self.circles[fish_index].reparentTo(self.fish_nodes[fish_index])
+
+ self.dummytex[fish_index] = Texture("dummy texture")
+ self.dummytex[fish_index].setup2dTexture(10000, 1, Texture.T_float, Texture.FRgb32)
+ self.dummytex[fish_index].setMagfilter(Texture.FTNearest)
+
+ ts1 = TextureStage("part2")
+ ts1.setSort(-100)
+
+ self.circles[fish_index].setTexture(ts1, self.dummytex[fish_index])
+ self.circles[fish_index].setShader(self.compiled_shaders[0])
+
+ self.dots_x[fish_index] = 2 * np.random.random(10000).astype(np.float32) - 1
+ self.dots_y[fish_index] = 2 * np.random.random(10000).astype(np.float32) - 1
+
+ self.dots_position[fish_index] = np.empty((1, 10000, 3)).astype(np.float32)
+ self.dots_position[fish_index][0, :, 0] = self.dots_x[fish_index]
+ self.dots_position[fish_index][0, :, 1] = self.dots_y[fish_index]
+ self.dots_position[fish_index][0, :, 2] = np.random.randint(0, 3,10000).astype(np.float32) * 0 + 1 # 0 will be black, 1, will be white, 2 will be hidden
+
+ memoryview(self.dummytex[fish_index].modify_ram_image())[:] = self.dots_position[fish_index].tobytes()
+
+ def stimulus(self, fish_index, i_stimulus, t_stimulus, frame, time, dt, trial):
+
+ direction, coherence = stimuli[i_stimulus]
+
+ if self.t_[fish_index] >= 0.025: # update every 50 ms
+ speed = 0.025
+ coherence = 50
+ direction = -1
+ if fish_index == 3:
+ print(direction)
+
+ random_vector = np.random.randint(100, size=10000)
+ if 5 <= time < 15:
+ coherent_change_vector_ind = np.where(random_vector < coherence) # this fraction moves coherently
+ coherent_random_vector_ind = np.where(random_vector >= coherence) # this fraction moves randomly
+ else:
+ coherent_change_vector_ind = np.where(random_vector < 0) # this fraction moves coherently
+ coherent_random_vector_ind = np.where(random_vector >= 0) # this fraction moves randomly
+
+ if direction == -1: # to right
+ ang = 90
+
+ if direction == 1: # to left
+ ang = -90
+
+ dx1 = np.cos((self.shared.current_fish_accumulated_orientation_lowpass[fish_index].value + ang) * np.pi / 180.)
+ dy1 = np.sin((self.shared.current_fish_accumulated_orientation_lowpass[fish_index].value + ang) * np.pi / 180.)
+
+ rand_ang = np.random.random(len(coherent_random_vector_ind[0]))*360
+ dx2 = np.cos((self.shared.current_fish_accumulated_orientation_lowpass[fish_index].value + rand_ang) * np.pi / 180.)
+ dy2 = np.sin((self.shared.current_fish_accumulated_orientation_lowpass[fish_index].value + rand_ang) * np.pi / 180.)
+
+ self.dots_x[fish_index][coherent_change_vector_ind] -= dx1 * speed
+ self.dots_y[fish_index][coherent_change_vector_ind] -= dy1 * speed
+
+ #self.dots_x[fish_index][coherent_random_vector_ind] -= dx2 * speed
+ #self.dots_y[fish_index][coherent_random_vector_ind] -= dy2 * speed
+
+ self.dots_x[fish_index][coherent_random_vector_ind] = 2 * np.random.random(len(coherent_random_vector_ind[0])).astype(np.float32) - 1
+ self.dots_y[fish_index][coherent_random_vector_ind] = 2 * np.random.random(len(coherent_random_vector_ind[0])).astype(np.float32) - 1
+
+ # Wrap them
+ self.dots_x[fish_index] = (self.dots_x[fish_index] + 1) % 2 - 1
+ self.dots_y[fish_index] = (self.dots_y[fish_index] + 1) % 2 - 1
+
+ self.dots_position[fish_index][0, :, 0] = self.dots_x[fish_index]
+ self.dots_position[fish_index][0, :, 1] = self.dots_y[fish_index]
+
+ self.t_[fish_index] = 0
+ else:
+ self.t_[fish_index] += dt
+
+ memoryview(self.dummytex[fish_index].modify_ram_image())[:] = self.dots_position[fish_index].tobytes()
+
+ try:
+ world = World()
+ world.run()
+ except:
+ shared.error("scene_error.txt", sys.exc_info())
diff --git a/stimuli/gray.png b/stimuli/gray.png
new file mode 100644
index 0000000..1a07222
Binary files /dev/null and b/stimuli/gray.png differ
diff --git a/stimuli/hanna_phototaxisedited.py b/stimuli/hanna_phototaxisedited.py
new file mode 100644
index 0000000..3d59fa1
--- /dev/null
+++ b/stimuli/hanna_phototaxisedited.py
@@ -0,0 +1,130 @@
+# -*- coding: utf-8 -*-
+if __name__ == "__main__":
+
+ import sys
+ import numpy as np
+ import os
+ sys.path.append(r"C:\Users\Max\Desktop\LS100\modules")
+
+ from shared import Shared
+ from scene import Scene
+ from panda3d.core import *
+ from direct.task import Task
+
+ # default black bg, black circle
+ # circle = True
+ # moving = False
+ # gray = False
+ stimuli = [0]
+ t_stimuli = [5 * 60]
+
+ number_fish = 2
+ shared = Shared(r'C:\LS100\LS100_free_swimming_4fish_data', "phototaxis", stimuli=stimuli, t_stimuli=t_stimuli,
+ shaders=[], python_file=__file__)
+ # shared = Shared(r'D:\hanna_free_swimming_4fish_data', "hanna_phototaxis", stimuli=stimuli, t_stimuli=t_stimuli,
+ # shaders=[dot_motion_shader, phototaxis_shader], python_file=__file__)
+ shared.start_threads()
+
+
+ class World(Scene):
+
+ def __init__(self):
+
+ # Init the scene
+ Scene.__init__(self, shared=shared)
+
+ # Place cards on the fishnodes
+ cm = CardMaker('card')
+
+ dot_scale = 0.3
+ self.dummynodes = []
+ self.cardnodes = []
+ self.number_fish = 2
+ self.background_circle = dict({})
+ self.circle = True
+
+ # for fish_index in range(self.number_fish):
+ # self.background_circle[fish_index] = self.create_circles(1, edges=30)
+ # self.background_circle[fish_index].reparentTo(self.fish_nodes[fish_index])
+ #
+ # self.background_circle[fish_index].setScale(1, 0, 1)
+ # self.background_circle[fish_index].setColor(0, 0, 0)
+ # self.background_circle[fish_index].setPos(0, 0.001, 0)
+ #
+ # filepath = os.path.join(os.path.split(__file__)[0], "circles.bam")
+ # self.circles[fish_index] = self.loader.loadModel(Filename.fromOsSpecific(filepath))
+ # self.circles[fish_index].reparentTo(self.fish_nodes[fish_index])
+ #
+ # self.dummytex[fish_index] = Texture("dummy texture")
+ # self.dummytex[fish_index].setup2dTexture(10000, 1, Texture.T_float, Texture.FRgb32)
+ # self.dummytex[fish_index].setMagfilter(Texture.FTNearest)
+
+ for i in range(self.number_fish):
+
+ self.dummynodes.append(self.fish_nodes[i].attachNewNode("Dummy Node"))
+ self.cardnodes.append(self.dummynodes[i].attachNewNode(cm.generate()))
+ self.cardnodes[i].setScale(dot_scale, 1, dot_scale)
+ self.cardnodes[i].setPos(-dot_scale / 2, 0, -dot_scale / 2)
+
+ # if self.circle:
+ pic = self.loader.loadTexture("blackwhitepatch.png")
+ # elif gray:
+ # pic = self.loader.loadTexture("gray.png")
+ # else:
+ # pic = self.loader.loadTexture("black.png")
+
+ pic.setMagfilter(Texture.FTLinear)
+ pic.setMinfilter(Texture.FTLinearMipmapLinear)
+ pic.setAnisotropicDegree(16)
+
+ for fish_index in range(self.number_fish):
+ self.cardnodes[fish_index].setTexture(pic)
+ self.cardnodes[fish_index].setTransparency(True)
+ self.cardnodes[fish_index].setTexScale(TextureStage.getDefault(), 0.99, 0.99)
+
+ c = 0 # same as 0.5 * 255 in illustrator, rgb, gray
+ self.setBackgroundColor(c, c, c, 1)
+
+ def stimulus(self, fish_index, i_stimulus, t_stimulus, frame, time, dt, trial, ang = 0):
+ # To Do : Make the stimulus move and add stimulus to the second fish
+ for fish_index in range(self.number_fish):
+ # default black for bg
+ c = 0
+ # all gray
+ # if gray:
+ # c = 0.5
+ # self.setBackgroundColor(c, c, c, 1)
+ #
+ radius_spot = 0.24
+ center_spot = 0.45
+
+ x = self.shared.current_fish_position_x[fish_index].value
+ y = self.shared.current_fish_position_y[fish_index].value
+
+
+ #if (x-center_spot)**2 + y**2 <= radius_spot :
+ #self.dummynodes[fish_index].setHpr(np.pi/2, 0, 0)
+ #self.cardnodes[fish_index].setTexRotate(TextureStage.getDefault(), np.pi/2)
+ #self.dummynodes[fish_index].setPos(x, 0.0001, y)
+
+ #ang = np.arctan2(self.dummynodes[fish_index].getX(),
+ #self.dummynodes[fish_index].getZ()) * 180 / np.pi - 90
+
+ # moving stimulus
+ # if moving:
+ # # positions = [[1,1],[0.3,0.3],[1,0.6],[0.7,0.3]]
+ # self.cardnodes[fish_index].setTexOffset(TextureStage.getDefault(), time, 0)
+
+ def timedtask(self, task, fish_index):
+ if task.time < 10.0:
+ return Task.cont
+
+ self.dummynodes[fish_index].setHpr(np.pi / 2, 0,0)
+ return Task.done
+
+
+ try:
+ world = World()
+ world.run()
+ except:
+ shared.error("scene_error.txt", sys.exc_info())