Skip to content

Commit

Permalink
Merge branch 'fix_various_defects' into 'main'
Browse files Browse the repository at this point in the history
Fix various defects

Closes #11, #10, #9, #8, #7, #6, and #5

See merge request zhangc/mcpevent2hist!17
  • Loading branch information
KedoKudo committed Feb 10, 2023
2 parents 3c3a38f + 66049f3 commit f2b7f0a
Show file tree
Hide file tree
Showing 9 changed files with 298 additions and 192 deletions.
7 changes: 6 additions & 1 deletion sophiread/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,12 @@ The following steps have been tested under MaxOS and Linux, and it is in theory
- `sophiread-<version>-Linux.tar.Z`: a tarball for Linux with TZ compression
- `sophiread-<version>-Linux.sh`: an installer for Linux

> Mac users should use `environment_mac.yml` instead. Also install [MacTex](https://www.tug.org/mactex/) before building the documentation.
- For Mac users with m-series chip, please make the following adjustment:
- Create env with `CONDA_SUBDIR=osx-64 conda create -f environment_mac.yml`
- Currently `mlpack` does not have a `arm64` package from conda, so we need to fallback to x86-64 using Rosetta 2 under the hood.
- Install [MacTex](https://www.tug.org/mactex/) before building the documentation.
- __DO NOT__ install `mlpack` from homebrew.
- `mlpack` from homebrew can lead to linking error when building the DBSCAN object.

Use the CLI
-----------
Expand Down
6 changes: 3 additions & 3 deletions sophiread/include/abs.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ class ABS : public ClusteringAlgorithm {
std::vector<int> clusterLabels_; // The cluster labels for each hit
std::vector<std::vector<int>> clusterIndices_; // The cluster indices for
// each cluster
const int numClusters_ = 128; // The number of clusters use in runtime
const int maxClusterSize_ = 10; // The maximum cluster size
const int spiderTimeRange_ = 75; // The spider time range (in ns)
const int numClusters_ = 128; // The number of clusters use in runtime
const int maxClusterSize_ = 10; // The maximum cluster size
const int spiderTimeRange_ = 75; // The spider time range (in ns)
PeakFittingAlgorithm* peakFittingAlgorithm_; // The clustering algorithm
};
58 changes: 36 additions & 22 deletions sophiread/include/dbscan.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,41 +3,55 @@
#include "clustering.h"

class DBSCAN : public ClusteringAlgorithm {
public:
DBSCAN(double eps_time, size_t min_points_time, double eps_xy, size_t min_points_xy)
: m_eps_time(eps_time), m_min_points_time(min_points_time), m_eps_xy(eps_xy), m_min_points_xy(min_points_xy){};
public:
DBSCAN(double eps_time, size_t min_points_time, double eps_xy,
size_t min_points_xy)
: m_eps_time(eps_time),
m_min_points_time(min_points_time),
m_eps_xy(eps_xy),
m_min_points_xy(min_points_xy){};
~DBSCAN() = default;
public:

public:
void set_method(std::string method) { m_method = method; };
void reset();
std::vector<int> get_cluster_labels();
void fit(const std::vector<Hit>& hits);
std::vector<NeutronEvent> get_events(const std::vector<Hit>& hits);
private:
class TimeClusterInfo
{
public:
void fit(const std::vector<Hit> &hits);
std::vector<NeutronEvent> get_events(const std::vector<Hit> &hits);
bool verbose() const { return m_verbose; }
void set_verbose(bool verbose) { m_verbose = verbose; }

private:
class TimeClusterInfo {
public:
TimeClusterInfo();
TimeClusterInfo(const double time, const size_t xy_index);
~TimeClusterInfo() = default;
public:

public:
double m_time_mean;
double m_time_sum;
double m_time_min;
double m_time_max;
std::vector<size_t> m_time_cluster_xy_indexes; // wrt input hits vector
std::vector<size_t> m_time_cluster_xy_indexes; // wrt input hits vector
};
void fit1D(std::vector<double> &data, size_t &number_of_clusters, std::vector<size_t> &labels, std::vector<double> &centroids);
void fit2D(std::vector<std::pair<double,double>> &data, size_t &number_of_clusters, std::vector<size_t> &labels,
std::vector<std::pair<double,double>> &centroids);
void mergeTimeClusters1D(std::vector<TimeClusterInfo>& input_infos, std::vector<TimeClusterInfo>& merged_infos);
private:
void fit1D(std::vector<double> &data, size_t &number_of_clusters,
std::vector<size_t> &labels, std::vector<double> &centroids);
void fit2D(std::vector<std::pair<double, double>> &data,
size_t &number_of_clusters, std::vector<size_t> &labels,
std::vector<std::pair<double, double>> &centroids);
void mergeTimeClusters1D(std::vector<TimeClusterInfo> &input_infos,
std::vector<TimeClusterInfo> &merged_infos);

private:
std::string m_method{"centroid"}; // method for centroid
double m_eps_time; // The maximum distance between two time points
size_t m_min_points_time; // The minimum number of points in a time cluster
double m_eps_xy; // The maximum distance between two XY points
size_t m_min_points_xy; // The minimum number of points in an XY cluster
double m_eps_time; // The maximum distance between two time points
size_t m_min_points_time; // The minimum number of points in a time cluster
double m_eps_xy; // The maximum distance between two XY points
size_t m_min_points_xy; // The minimum number of points in an XY cluster
std::vector<NeutronEvent> m_events;
const size_t m_max_hit_chunk_size = 2e6;
std::vector<int>
clusterLabels_; // The cluster labels for each hit, not implemented yet
bool m_verbose = false;
};

8 changes: 5 additions & 3 deletions sophiread/src/centroid.cpp
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
#include "centroid.h"
#include "tpx3.h"

#include <iostream>

#include "tpx3.h"

/**
* @brief Perform centroid fitting on the hits.
*
Expand All @@ -20,8 +22,8 @@ NeutronEvent Centroid::fit(const std::vector<Hit>& data) {

if (weighted_by_tot) {
for (const auto& hit : data) {
x += DSCALE*hit.getX() * hit.getTOT();
y += DSCALE*hit.getY() * hit.getTOT();
x += DSCALE * hit.getX() * hit.getTOT();
y += DSCALE * hit.getY() * hit.getTOT();
tof += hit.getTOF();
tot += hit.getTOT();
}
Expand Down
Loading

0 comments on commit f2b7f0a

Please sign in to comment.