Skip to content

Commit

Permalink
Merge pull request #87 from NSLS-II/updates-20240515-generic-function
Browse files Browse the repository at this point in the history
modified the verbos status
  • Loading branch information
mrakitin authored May 30, 2024
2 parents 21765b0 + 1a4d3f5 commit 9e445ac
Show file tree
Hide file tree
Showing 4 changed files with 97 additions and 7 deletions.
5 changes: 5 additions & 0 deletions chx_packages_local.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,13 @@
from chx_compress import (
MultifileBNLCustom,
combine_binary_files,
compress_eigerdata,
create_compress_header,
get_eigerImage_per_file,
init_compress_eigerdata,
para_compress_eigerdata,
para_segment_compress_eigerdata,
read_compressed_eigerdata,
segment_compress_eigerdata,
)

Expand Down Expand Up @@ -306,6 +310,7 @@
get_seg_from_ring_mask,
get_t_iq,
get_t_iqc,
get_t_iqc_imstack,
multi_uids_saxs_xpcs_analysis,
plot_circular_average,
plot_qIq_with_ROI,
Expand Down
77 changes: 77 additions & 0 deletions pyCHX/XPCS_SAXS.py
Original file line number Diff line number Diff line change
Expand Up @@ -761,6 +761,83 @@ def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, sho
return qp, np.array(iqs), q


def get_t_iqc_imstack(
imgs, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, show_progress=True, *argv, **kwargs
):
"""
Get t-dependent Iq
variant of get_t_iqc that takes an image stack like a dask array to calculate average images and then does the radial integration
variant by LW 05/162024
Parameters
----------
imgs: image stack like dask array
frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ]
mask: a image mask
nx : int, optional
number of bins in x
defaults is 1500 bins
plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis
Returns
---------
qp: q in pixel
iq: intensity of circular average
q: q in real unit (A-1)
"""

Nt = len(frame_edge)
iqs = list(np.zeros(Nt))
for i in range(Nt):
t1, t2 = frame_edge[i]
# print (t1,t2)
avg_img = np.average(imgs[t1:t2, :, :], axis=0)
qp, iqs[i], q = get_circular_average(avg_img, mask, pargs, nx=nx, plot_=False)

if plot_:
fig, ax = plt.subplots(figsize=(8, 6))
for i in range(Nt):
t1, t2 = frame_edge[i]
ax.semilogy(q, iqs[i], label="frame: %s--%s" % (t1, t2))
# ax.set_xlabel("q in pixel")
ax.set_xlabel("Q " r"($\AA^{-1}$)")
ax.set_ylabel("I(q)")

if "xlim" in kwargs.keys():
ax.set_xlim(kwargs["xlim"])
if "ylim" in kwargs.keys():
ax.set_ylim(kwargs["ylim"])

ax.legend(
loc="best",
)

uid = pargs["uid"]
title = ax.set_title("uid= %s--t~I(q)" % uid)
title.set_y(1.01)
if save:
# dt =datetime.now()
# CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = pargs["path"]
uid = pargs["uid"]
# fp = path + 'uid= %s--Iq~t-'%uid + CurTime + '.png'
fp = path + "uid=%s--Iq-t-" % uid + ".png"
fig.savefig(fp, dpi=fig.dpi)

save_arrays(
np.vstack([q, np.array(iqs)]).T,
label=["q_A-1"] + ["Fram-%s-%s" % (t[0], t[1]) for t in frame_edge],
filename="uid=%s-q-Iqt.csv" % uid,
path=path,
)

# plt.show()

return qp, np.array(iqs), q


def plot_t_iqc(q, iqs, frame_edge, pargs, save=True, return_fig=False, legend_size=None, *argv, **kwargs):
"""Plot t-dependent Iq
Expand Down
17 changes: 10 additions & 7 deletions pyCHX/chx_generic_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -3401,7 +3401,7 @@ def get_sid_filenames(hdr, verbose=False):
else:
if verbose:
print('Found detector filename from "data_path" in metadata!')
success = True
success = True

if not success: # looking at path in metadata, but taking the date from the run start document
data_path = start_doc["data path"][:-11] + strftime("%Y/%m/%d/", localtime(start_doc["time"]))
Expand All @@ -3416,7 +3416,7 @@ def get_sid_filenames(hdr, verbose=False):
else:
if verbose:
print("Found detector filename in %s" % data_path)
success = True
success = True

if (
not success
Expand All @@ -3433,7 +3433,7 @@ def get_sid_filenames(hdr, verbose=False):
else:
if verbose:
print("Found detector filename in %s" % data_path)
success = True
success = True
return ret


Expand Down Expand Up @@ -3494,6 +3494,8 @@ def load_dask_data(uid, detector, mask_path_full, reverse=False, rot90=False):
returns detector_images(dask-array), image_md
LW 04/26/2024
"""
import json

import dask

hdr = db[uid]
Expand All @@ -3511,24 +3513,25 @@ def load_dask_data(uid, detector, mask_path_full, reverse=False, rot90=False):
img_md = {}
for k in list(img_md_dict.keys()):
img_md[k] = hdr.config_data(det)["primary"][0]["%s_%s" % (det, img_md_dict[k])]
if md["detector"] in ["eiger4m_single_image", "eiger1m_single_image", "eiger500K_single_image"]:
if detector in ["eiger4m_single_image", "eiger1m_single_image", "eiger500K_single_image"]:
img_md.update({"y_pixel_size": 7.5e-05, "x_pixel_size": 7.5e-05})
got_pixel_mask = True
else:
img_md.update({"y_pixel_size": None, "x_pixel_size": None})
got_pixel_mask = False
# load pixel mask from static location
if got_pixel_mask:
json_open = open(_mask_path_ + "pixel_masks/pixel_mask_compression_%s.json" % detector.split("_")[0])
# json_open = open(_mask_path_ + "pixel_masks/pixel_mask_compression_%s.json" % detector.split("_")[0])
json_open = open(mask_path_full + "pixel_mask_compression_%s.json" % detector.split("_")[0])
mask_dict = json.load(json_open)
img_md["pixel_mask"] = np.array(mask_dict["pixel_mask"])
img_md["binary_mask"] = np.array(mask_dict["binary_mask"])
del mask_dict

# load image data as dask-arry:
dimg = hdr.xarray_dask()[md["detector"]][0]
dimg = hdr.xarray_dask()[detector][0]
if reverse:
dimg = dask.array.flip(dimg, axis=(0, 1))
dimg = dask.array.flip(dimg, axis=(1, 1))
if rot90:
dimg = dask.array.rot90(dimg, axes=(1, 2))
return dimg, img_md
Expand Down
5 changes: 5 additions & 0 deletions pyCHX/chx_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,13 @@
from pyCHX.chx_compress import (
MultifileBNLCustom,
combine_binary_files,
compress_eigerdata,
create_compress_header,
get_eigerImage_per_file,
init_compress_eigerdata,
para_compress_eigerdata,
para_segment_compress_eigerdata,
read_compressed_eigerdata,
segment_compress_eigerdata,
)
from pyCHX.chx_compress_analysis import (
Expand Down Expand Up @@ -242,6 +246,7 @@
get_seg_from_ring_mask,
get_t_iq,
get_t_iqc,
get_t_iqc_imstack,
multi_uids_saxs_xpcs_analysis,
plot_circular_average,
plot_qIq_with_ROI,
Expand Down

0 comments on commit 9e445ac

Please sign in to comment.