Skip to content

Commit

Permalink
Merge pull request #263 from zhujun98/reset_moving_average_in_correla…
Browse files Browse the repository at this point in the history
…tion_scan_mode

Reset moving average in correlation scan mode
  • Loading branch information
zhujun98 authored Jul 20, 2020
2 parents 0a520b7 + 0b62a8c commit 140bccf
Show file tree
Hide file tree
Showing 29 changed files with 691 additions and 376 deletions.
34 changes: 19 additions & 15 deletions docs/statistics.rst
Original file line number Diff line number Diff line change
Expand Up @@ -137,21 +137,25 @@ various control data. Two plot types are supported in the correlation analysis:
One can change the value of resolution on-the-fly without resetting the whole data history.


+----------------------------+--------------------------------------------------------------------+
| Input | Description |
+============================+====================================================================+
| ``Analysis type`` | See :ref:`Analysis type`. |
+----------------------------+--------------------------------------------------------------------+
| ``Category`` | Category of the slow data. |
+----------------------------+--------------------------------------------------------------------+
| ``Karabo device ID`` | ID of the Karabo device which produces the slow data. |
+----------------------------+--------------------------------------------------------------------+
| ``Property name`` | Property name in the Karabo device. |
+----------------------------+--------------------------------------------------------------------+
| ``Resolution`` | 0 for scatter plot and any positive value for statistics bar plot. |
+----------------------------+--------------------------------------------------------------------+
| ``Reset`` | Reset the correlation history. |
+----------------------------+--------------------------------------------------------------------+
+-------------------------------+--------------------------------------------------------------------+
| Input | Description |
+===============================+====================================================================+
| ``Analysis type`` | See :ref:`Analysis type`. |
+-------------------------------+--------------------------------------------------------------------+
| ``Category`` | Category of the slow data. |
+-------------------------------+--------------------------------------------------------------------+
| ``Karabo device ID`` | ID of the Karabo device which produces the slow data. |
+-------------------------------+--------------------------------------------------------------------+
| ``Property name`` | Property name in the Karabo device. |
+-------------------------------+--------------------------------------------------------------------+
| ``Resolution`` | 0 for scatter plot and any positive value for statistics bar plot. |
+-------------------------------+--------------------------------------------------------------------+
| ``Reset`` | Reset the correlation history. |
+-------------------------------+--------------------------------------------------------------------+
| ``Auto reset moving average`` | Check to automatically reset moving average in the |
| | "stop-and-scan" analysis when a new point starts. *Only apply to |
| | correlation 1.* |
+-------------------------------+--------------------------------------------------------------------+

One can also plot FOMs of ROI1 and ROI2 together when the *master-slave* mode is activated in
:ref:`ROI FOM setup`.
33 changes: 25 additions & 8 deletions extra_foam/algorithms/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,11 +302,10 @@ class OneWayAccuPairSequence(_AbstractSequence):
"""

def __init__(self, resolution, *,
max_len=3000, dtype=np.float64, min_count=2, epsilon=1.e-9):
max_len=3000, dtype=np.float64, min_count=2):
super().__init__(max_len=max_len)

self._min_count = min_count
self._epsilon = np.abs(epsilon)

if resolution <= 0:
raise ValueError("resolution must be positive!")
Expand All @@ -324,6 +323,8 @@ def __init__(self, resolution, *,
self._y_std = np.zeros(
self._OVER_CAPACITY * max_len, dtype=dtype)

self._last = 0

def __getitem__(self, index):
"""Override."""
s = slice(self._i0, self._i0 + self._len)
Expand Down Expand Up @@ -355,9 +356,9 @@ def append(self, item):
x, y = item

new_pt = False
if self._len > 0:
last = self._i0 + self._len - 1
if abs(x - self._x_avg[last]) - self._resolution < self._epsilon:
last = self._last
if self._len > 0 or self._count[0] > 0:
if abs(x - self._x_avg[last]) <= self._resolution:
self._count[last] += 1
self._x_avg[last] += (x - self._x_avg[last]) / self._count[last]
avg_prev = self._y_avg[last]
Expand All @@ -373,12 +374,15 @@ def append(self, item):
self._y_max[last] = self._y_avg[last] + 0.5*np.sqrt(
self._y_std[last]/self._count[last])

if self._count[last] == self._min_count:
new_pt = True

else:
# If the number of data at a location is less than
# min_count, the data at this location will be discarded.
if self._count[last] >= self._min_count:
new_pt = True
last += 1
self._last += 1
last = self._last

self._x_avg[last] = x
self._count[last] = 1
Expand All @@ -394,7 +398,6 @@ def append(self, item):
self._y_min[0] = y
self._y_max[0] = y
self._y_std[0] = 0.0
new_pt = True

if new_pt:
max_len = self._max_len
Expand All @@ -403,13 +406,26 @@ def append(self, item):
else:
self._i0 += 1
if self._i0 == max_len:
self._i0 = 0
self._last -= max_len
self._x_avg[:max_len] = self._x_avg[max_len:]
self._count[:max_len] = self._count[max_len:]
self._y_avg[:max_len] = self._y_avg[max_len:]
self._y_min[:max_len] = self._y_min[max_len:]
self._y_max[:max_len] = self._y_max[max_len:]
self._y_std[:max_len] = self._y_std[max_len:]

def append_dry(self, x):
"""Return whether append the given item will start a new position."""
next_pos = False
if self._len > 0 or self._count[0] > 0:
if abs(x - self._x_avg[self._last]) > self._resolution:
next_pos = True
else:
next_pos = True

return next_pos

def extend(self, items):
"""Override."""
for item in items:
Expand All @@ -419,6 +435,7 @@ def reset(self):
"""Overload."""
self._i0 = 0
self._len = 0
self._last = 0
self._x_avg.fill(0)
self._count.fill(0)
self._y_avg.fill(0)
Expand Down
204 changes: 143 additions & 61 deletions extra_foam/algorithms/tests/test_data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,16 @@ def testSimpleSequence(self):
self.assertEqual(overflow, ax[0])
self.assertEqual(MAX_LENGTH + overflow - 1, ax[-1])

# ----------------------------
# test when capacity reached
# ----------------------------
for i in range(MAX_LENGTH):
hist.append(i)
ax = hist.data()
self.assertEqual(MAX_LENGTH, len(ax))
self.assertEqual(0, ax[0])
self.assertEqual(MAX_LENGTH - 1, ax[-1])

# ----------------------------
# test constructing from array
# ----------------------------
Expand Down Expand Up @@ -168,6 +178,16 @@ def testSimpleVectorSequence(self):
np.testing.assert_array_almost_equal([MAX_LENGTH + overflow - 1,
MAX_LENGTH + overflow - 1], ax[-1])

# ----------------------------
# test when capacity reached
# ----------------------------
for i in range(MAX_LENGTH):
hist.append([i, i])
ax = hist.data()
self.assertEqual(MAX_LENGTH, len(ax))
np.testing.assert_array_almost_equal([0, 0], ax[0])
np.testing.assert_array_almost_equal([MAX_LENGTH - 1, MAX_LENGTH - 1], ax[-1])

# ----------------------------
# test constructing from array
# ----------------------------
Expand Down Expand Up @@ -217,6 +237,19 @@ def testSimplePairSequence(self):
self.assertEqual(MAX_LENGTH + overflow - 1, ax[-1])
self.assertEqual(MAX_LENGTH + overflow - 1, ay[-1])

# ----------------------------
# test when capacity reached
# ----------------------------
for i in range(MAX_LENGTH):
hist.append((i, i))
ax, ay = hist.data()
self.assertEqual(MAX_LENGTH, len(ax))
self.assertEqual(MAX_LENGTH, len(ay))
self.assertEqual(0, ax[0])
self.assertEqual(0, ay[0])
self.assertEqual(MAX_LENGTH - 1, ax[-1])
self.assertEqual(MAX_LENGTH - 1, ay[-1])

# ----------------------------
# test constructing from array
# ----------------------------
Expand All @@ -243,76 +276,101 @@ def testOneWayAccuPairSequence(self):
hist = OneWayAccuPairSequence(0.1, max_len=MAX_LENGTH, min_count=2)
self.assertEqual(0, len(hist))

# distance between two adjacent data > resolution
hist.append((1, 0.3))
hist.append((2, 0.4))
ax, ay = hist.data()
np.testing.assert_array_equal([], ax)
np.testing.assert_array_equal([], ay.avg)
np.testing.assert_array_equal([], ay.min)
np.testing.assert_array_equal([], ay.max)
np.testing.assert_array_equal([], ay.count)
for _ in range(2):
# test reset
hist.reset()

# first data
self.assertTrue(hist.append_dry(1))
hist.append((1, 0.3))
self.assertEqual(0, len(hist))

# distance between two adjacent data > resolution
self.assertTrue(hist.append_dry(2))
hist.append((2, 0.4))
self.assertEqual(0, len(hist))
ax, ay = hist.data()
np.testing.assert_array_equal([], ax)
np.testing.assert_array_equal([], ay.avg)
np.testing.assert_array_equal([], ay.min)
np.testing.assert_array_equal([], ay.max)
np.testing.assert_array_equal([], ay.count)

# new data within resolution
self.assertFalse(hist.append_dry(2.02))
hist.append((2.02, 0.5))
self.assertEqual(1, len(hist))
ax, ay = hist.data()
np.testing.assert_array_equal([2.01], ax)
np.testing.assert_array_equal([0.45], ay.avg)
np.testing.assert_array_almost_equal([0.425], ay.min)
np.testing.assert_array_almost_equal([0.475], ay.max)
np.testing.assert_array_equal([2], ay.count)

# new data within resolution
self.assertFalse(hist.append_dry(2.10))
hist.append((2.10, 0.6))
self.assertEqual(1, len(hist))
ax, ay = hist.data()
np.testing.assert_array_equal([2.04], ax)
np.testing.assert_array_equal([0.5], ay.avg)
np.testing.assert_array_almost_equal([0.4591751709536137], ay.min)
np.testing.assert_array_almost_equal([0.5408248290463863], ay.max)
np.testing.assert_array_equal([3], ay.count)

# new point outside resolution
self.assertTrue(hist.append_dry(2.31))
hist.append((2.31, 1))
self.assertFalse(hist.append_dry(2.40))
hist.append((2.40, 2))
self.assertEqual(2, len(hist))
ax, ay = hist.data()
np.testing.assert_array_equal([0.5, 1.5], ay.avg)
np.testing.assert_array_almost_equal([0.4591751709536137, 1.25], ay.min)
np.testing.assert_array_almost_equal([0.5408248290463863, 1.75], ay.max)
np.testing.assert_array_equal([3, 2], ay.count)

# test Sequence protocol
x, y = hist[0]
self.assertAlmostEqual(2.04, x)
self.assertEqual(_StatDataItem(0.5, 0.4591751709536137, 0.5408248290463863, 3), y)
x, y = hist[-1]
self.assertAlmostEqual(2.355, x)
self.assertEqual(_StatDataItem(1.5, 1.25, 1.75, 2), y)
with self.assertRaises(IndexError):
hist[2]

hist.append((2.02, 0.5))
ax, ay = hist.data()
np.testing.assert_array_equal([2.01], ax)
np.testing.assert_array_equal([0.45], ay.avg)
np.testing.assert_array_almost_equal([0.425], ay.min)
np.testing.assert_array_almost_equal([0.475], ay.max)
np.testing.assert_array_equal([2], ay.count)

hist.append((2.10, 0.6))
ax, ay = hist.data()
np.testing.assert_array_equal([2.04], ax)
np.testing.assert_array_equal([0.5], ay.avg)
np.testing.assert_array_almost_equal([0.4591751709536137], ay.min)
np.testing.assert_array_almost_equal([0.5408248290463863], ay.max)
np.testing.assert_array_equal([3], ay.count)

# new point
hist.append((2.31, 1))
hist.append((2.41, 2))
ax, ay = hist.data()
np.testing.assert_array_equal([0.5, 1.5], ay.avg)
np.testing.assert_array_almost_equal([0.4591751709536137, 1.25], ay.min)
np.testing.assert_array_almost_equal([0.5408248290463863, 1.75], ay.max)
np.testing.assert_array_equal([3, 2], ay.count)

# test Sequence protocol
x, y = hist[0]
self.assertAlmostEqual(2.04, x)
self.assertEqual(_StatDataItem(0.5, 0.4591751709536137, 0.5408248290463863, 3), y)
x, y = hist[-1]
self.assertAlmostEqual(2.36, x)
self.assertEqual(_StatDataItem(1.5, 1.25, 1.75, 2), y)
with self.assertRaises(IndexError):
hist[2]

# test reset
# ----------------------------
# test when max length reached
# ----------------------------
hist.reset()
overflow = 5
for i in range(2 * MAX_LENGTH + 2 * overflow):
# two adjacent data point will be grouped together since resolution is 0.1
hist.append((0.09 * i, i))
ax, ay = hist.data()
np.testing.assert_array_equal([], ax)
np.testing.assert_array_equal([], ay.avg)
np.testing.assert_array_equal([], ay.min)
np.testing.assert_array_equal([], ay.max)
np.testing.assert_array_equal([], ay.count)
self.assertEqual(MAX_LENGTH, len(ax))
self.assertEqual(MAX_LENGTH, len(ay.count))
self.assertEqual(MAX_LENGTH, len(ay.avg))
self.assertAlmostEqual(0.18 * overflow + 0.09 * 0.5, ax[0])
self.assertAlmostEqual(2 * overflow + 0.5, ay.avg[0])
self.assertAlmostEqual(0.18 * (MAX_LENGTH + overflow - 1) + 0.09 * 0.5, ax[-1])
self.assertAlmostEqual(2 * (MAX_LENGTH + overflow - 1) + 0.5, ay.avg[-1])

# ----------------------------
# test when max length reached
# test when capacity reached
# ----------------------------

overflow = 10
for i in range(2 * MAX_LENGTH + 2 * overflow):
for i in range(2 * MAX_LENGTH):
# two adjacent data point will be grouped together since resolution is 0.1
hist.append((0.1 * i, i))
hist.append((0.09 * i, i))
ax, ay = hist.data()
self.assertEqual(MAX_LENGTH, len(ax))
self.assertEqual(MAX_LENGTH, len(ay.count))
self.assertEqual(MAX_LENGTH, len(ay.avg))
self.assertEqual(0.2 * overflow + 0.1 * 0.5, ax[0])
self.assertEqual(2 * overflow + 0.5, ay.avg[0])
self.assertEqual(0.2 * (MAX_LENGTH + overflow - 1) + 0.1 * 0.5, ax[-1])
self.assertEqual(2 * (MAX_LENGTH + overflow - 1) + 0.5, ay.avg[-1])
self.assertAlmostEqual(0.09 * 0.5, ax[0])
self.assertAlmostEqual(0.5, ay.avg[0])
self.assertAlmostEqual(0.18 * (MAX_LENGTH - 1) + 0.09 * 0.5, ax[-1])
self.assertAlmostEqual(2 * (MAX_LENGTH - 1) + 0.5, ay.avg[-1])

# ----------------------------
# test constructing from array
Expand All @@ -321,5 +379,29 @@ def testOneWayAccuPairSequence(self):
with self.assertRaises(ValueError):
OneWayAccuPairSequence.from_array([], [1, 2])

hist = OneWayAccuPairSequence.from_array([0, 1, 2], [1, 2, 3], resolution=1)
self.assertEqual(2, len(hist))
hist = OneWayAccuPairSequence.from_array([0, 0.9, 1.8], [1, 2, 3], resolution=1)
self.assertEqual(1, len(hist))

def testOneWayAccuPairSequence2(self):
MAX_LENGTH = 100
min_count = 20

hist = OneWayAccuPairSequence(0.1, max_len=MAX_LENGTH, min_count=min_count)

for i in range(min_count):
hist.append((0.001 * i, i))
if i == min_count - 1:
self.assertEqual(1, len(hist))
else:
self.assertEqual(0, len(hist))

for i in range(min_count - 1):
hist.append((1 + 0.001 * i, i))
self.assertEqual(1, len(hist))

for i in range(min_count):
hist.append((2 + 0.001 * i, i))
if i == min_count - 1:
self.assertEqual(2, len(hist))
else:
self.assertEqual(1, len(hist))
1 change: 1 addition & 0 deletions extra_foam/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ class AnalysisType(IntEnum):
ROI_PROJ = 21
AZIMUTHAL_INTEG = 41
PULSE = 2700
PUMP_PROBE_PULSE = 2701
ROI_FOM_PULSE = 2711
ROI_NORM_PULSE = 2712
ROI_PROJ_PULSE = 2721
Expand Down
Loading

0 comments on commit 140bccf

Please sign in to comment.