From c32bd2ad50264ebc33009d29738fb69c72442dfb Mon Sep 17 00:00:00 2001 From: Thomas A Caswell Date: Thu, 25 Mar 2021 13:57:18 -0400 Subject: [PATCH 1/5] Accumulated uncommited changes from BL staff --- startup/00-base.py | 7 +- startup/11-temperature-controller.py | 2 + startup/12-motors.py | 3 +- startup/94-load.py | 1 - startup/96-dan_functions.py | 36 +++++- startup/97-MA_functions.py | 176 +++++++++++++++++++-------- 6 files changed, 170 insertions(+), 55 deletions(-) diff --git a/startup/00-base.py b/startup/00-base.py index a6ab453..4557e9c 100644 --- a/startup/00-base.py +++ b/startup/00-base.py @@ -1,7 +1,10 @@ # Make ophyd listen to pyepics. import logging +import matplotlib.pyplot import nslsii import time +import matplotlib.pyplot as plt + from bluesky.utils import ts_msg_hook # See docstring for nslsii.configure_base() for more details @@ -13,8 +16,8 @@ pbar=True, bec=True, magics=True, - mpl=True, - #publish_documents_to_kafka=True + mpl=False, + publish_documents_to_kafka=True ) from pathlib import Path diff --git a/startup/11-temperature-controller.py b/startup/11-temperature-controller.py index 5799b40..93541d1 100644 --- a/startup/11-temperature-controller.py +++ b/startup/11-temperature-controller.py @@ -348,3 +348,5 @@ class Lakeshore336(Device): #hotairblower=Eurotherm('XF:28ID1-ES:1{Env:03}T-I', # write_pv='XF:28ID1-ES:1{Env:03}T-SP', # tolerance=1,name='hotairblower') + +sorensen850_manual = EpicsSignal('XF:28ID1-ES{LS336:1-Out:3}Out:Man-RB', write_pv='XF:28ID1-ES{LS336:1-Out:3}Out:Man-SP', name='sorensen850_manual') diff --git a/startup/12-motors.py b/startup/12-motors.py index f415a5e..3e440ad 100644 --- a/startup/12-motors.py +++ b/startup/12-motors.py @@ -75,4 +75,5 @@ class FilterBankTwoButtonShutter(Device): #detector for ECS - DO and MA ECS_det1 = EpicsSignalRO( 'XF:28IDC-BI:1{IM:1}:C4_1' ,name='ECS_det1') - +#45-degree shifting motor on M6-grid, for use with hot air blower / cryostream with angled sample bracket +broadside45_shifter = EpicsMotor('XF:28ID1B-ES{Smpl:Array-Ax:Horiz}Mtr', name='broadside45_shifter') diff --git a/startup/94-load.py b/startup/94-load.py index 48cd0c3..7004c2d 100644 --- a/startup/94-load.py +++ b/startup/94-load.py @@ -17,7 +17,6 @@ from xpdacq.xpdacq_conf import (glbl_dict, configure_device, _reload_glbl, _set_glbl, _load_beamline_config) - # configure experiment device being used in current version if glbl_dict['is_simulation']: from xpdacq.simulation import (xpd_pe1c, db, cs700, shctl1, diff --git a/startup/96-dan_functions.py b/startup/96-dan_functions.py index 48a90b7..92c0c07 100644 --- a/startup/96-dan_functions.py +++ b/startup/96-dan_functions.py @@ -8,7 +8,7 @@ from bluesky.callbacks import LiveTable import uuid import numpy as np - +import matplotlib.pyplot as plt ############## slack_token = os.environ["SLACK_API_TOKEN"] @@ -337,6 +337,8 @@ def scan_shifter_pos( min_dist=5, peak_rad=1.5, use_det=True, + abs_data = False, + oset_data = 0.0 ): def yn_question(q): return input(q).lower().strip()[0] == "y" @@ -375,6 +377,12 @@ def yn_question(q): print("only a single point? I'm gonna quit!") return None + if oset_data != 0.0: + I_list = I_list - oset_data + + if abs_data: + I_list = abs(I_list) + print("") if not yn_question( "Move on to fitting? (if not, I'll return [pos_list, I_list]) [y/n] " @@ -554,6 +562,9 @@ def _motor_move_scan_shifter_pos(motor, xmin, xmax, numx): I_list = np.zeros(numx) dx = (xmax - xmin) / numx pos_list = np.linspace(xmin, xmax, numx) + print ('moving to starting postion') + RE(mv(motor,pos_list[0])) + print ('opening shutter') RE(mv(fs, "Open")) fig1, ax1 = plt.subplots() use_det = True @@ -661,3 +672,26 @@ def simple_ct(dets, exposure, *, md=None): plan = bp.count(dets, md=_md) plan = bpp.subs_wrapper(plan, LiveTable([])) return (yield from plan) + + +def save_history(histfile,LIMIT=5000): + ip = get_ipython() + """save the IPython history to a plaintext file""" + #histfile = os.path.join(ip.profile_dir.location, "history.txt") + print("Saving plaintext history to %s" % histfile) + lines = [] + # get previous lines + # this is only necessary because we truncate the history, + # otherwise we chould just open with mode='a' + if os.path.exists(histfile): + with open(histfile, 'r') as f: + lines = f.readlines() + + # add any new lines from this session + lines.extend(record[2] + '\n' for record in ip.history_manager.get_range()) + + with open(histfile, 'w') as f: + # limit to LIMIT entries + f.writelines(lines[-LIMIT:]) + + diff --git a/startup/97-MA_functions.py b/startup/97-MA_functions.py index 6a54026..abd633b 100644 --- a/startup/97-MA_functions.py +++ b/startup/97-MA_functions.py @@ -1,15 +1,15 @@ "Define Beamline Modes" def high_resolution(): print("Resetting white beam slits") - wb_slits.inboard.move(-9.6) - wb_slits.outboard.move(-3.542180) + wb_slits.inboard.move(-13.05) + wb_slits.outboard.move(-6.179438) print("Resetting Monochromator") - sbm.yaw.move(0.0) - sbm.roll.move(0.0) - sbm.pitch.move(-0.05799) - sbm.bend.move(1950) - sbm.twist.move(-30) + sbm.yaw.move(0.00013) + sbm.roll.move(0.00010) + sbm.pitch.move(-0.07137) + sbm.bend.move(2908.9875) + sbm.twist.move(0) print("Resetting Mirror") Mirror_VFM.y_upstream.move(-0.7) @@ -25,33 +25,33 @@ def high_resolution(): #bdm_slits.outboard.move(-4100.075) print("Resetting OCM Slits") - ocm_slits.top.move(-1065) - ocm_slits.bottom.move(1955.0) - ocm_slits.outboard.move(435.959) - ocm_slits.inboard.move(-294.037) + ocm_slits.top.move(3855) + ocm_slits.bottom.move(11105) + ocm_slits.outboard.move(2055.959) + ocm_slits.inboard.move(-1149.037) OCM_table.upstream_jack.move(4.14225) OCM_table.downstream_jack.move(-4.1700) OCM_table.X.move(-8.44701) print("Ready to go !") -def high_flux1(): +def high_flux1(): # 10 mm Beam on Mono Ni peak saturation print("Resetting white beam slits") - wb_slits.inboard.move(-14.05) - wb_slits.outboard.move(-7.125) + wb_slits.inboard.move(-9.05) + wb_slits.outboard.move(-2.125) print("Resetting Monochromator") - sbm.yaw.move(0.0) - sbm.roll.move(0.0) - sbm.pitch.move(-0.06569) - sbm.bend.move(2509) + sbm.yaw.move(0.00013) + sbm.roll.move(0.00010) + sbm.pitch.move(-0.13587) + sbm.bend.move(7708.9875) sbm.twist.move(0) print("Resetting Mirror") - Mirror_VFM.y_upstream.move(-0.7) - Mirror_VFM.y_downstream_inboard.move(-0.0202) - Mirror_VFM.y_downstream_outboard.move(0.3199) - Mirror_VFM.bend_upstream.move(60) - Mirror_VFM.bend_downstream.move(60) + Mirror_VFM.y_upstream.move(-0.6994) + Mirror_VFM.y_downstream_inboard.move(-0.019) + Mirror_VFM.y_downstream_outboard.move(0.3195) + Mirror_VFM.bend_upstream.move(50) + Mirror_VFM.bend_downstream.move(50) print("Resetting BDM Slits") #bdm_slits.top.move(999.957) @@ -60,34 +60,70 @@ def high_flux1(): #bdm_slits.outboard.move(-4100.075) print("Resetting OCM Slits") - ocm_slits.top.move(-1995) - ocm_slits.bottom.move(2005.0) - ocm_slits.outboard.move(250.959) - ocm_slits.inboard.move(50.963) - OCM_table.upstream_jack.move(4.14225) - OCM_table.downstream_jack.move(-4.1700) - OCM_table.X.move(-8.44701) + ocm_slits.top.move(3855) + ocm_slits.bottom.move(11105) + ocm_slits.outboard.move(2055.959) + ocm_slits.inboard.move(-1149.037) + OCM_table.upstream_jack.move(1.8665) + OCM_table.downstream_jack.move(-4.0372500) + #OCM_table.X.move(-8.44701) print("Ready to go !") +#8mm is similar to previous +def high_flux2(): # 8 mm Beam on Mono Ni peak 55000 cts + print("Resetting white beam slits") + wb_slits.inboard.move(-11.05) + wb_slits.outboard.move(-4.179438) + + print("Resetting Monochromator") + sbm.yaw.move(0.00013) + sbm.roll.move(0.00010) + sbm.pitch.move(-0.13587) + sbm.bend.move(7708.9875) + sbm.twist.move(0) + + print("Resetting Mirror") + Mirror_VFM.y_upstream.move(-0.6994) + Mirror_VFM.y_downstream_inboard.move(-0.019) + Mirror_VFM.y_downstream_outboard.move(0.3195) + Mirror_VFM.bend_upstream.move(50) + Mirror_VFM.bend_downstream.move(50) + + print("Resetting BDM Slits") + #bdm_slits.top.move(999.957) + #bdm_slits.bottom.move(-94363.970) + #bdm_slits.inboard.move(-7600.960) + #bdm_slits.outboard.move(-4100.075) + + print("Resetting OCM Slits") + ocm_slits.top.move(3855) + ocm_slits.bottom.move(11105) + ocm_slits.outboard.move(2055.959) + ocm_slits.inboard.move(-1149.037) + OCM_table.upstream_jack.move(1.8665) + OCM_table.downstream_jack.move(-4.0372500) + #OCM_table.X.move(-8.44701) + print("Ready to go !") -def high_flux2(): + +def high_flux3(): # 6 mm Beam on Mono Ni peak 40000 cts print("Resetting white beam slits") - wb_slits.inboard.move(-9.36) - wb_slits.outboard.move(-3.473156) + wb_slits.inboard.move(-12.05) + wb_slits.outboard.move(-5.179438) print("Resetting Monochromator") - sbm.yaw.move(0.0) - sbm.roll.move(0.0) - sbm.pitch.move(-0.07719) - sbm.bend.move(3000) - sbm.twist.move(0.0000) + sbm.yaw.move(0.00013) + sbm.roll.move(0.00010) + sbm.pitch.move(-0.13587) + sbm.bend.move(7708.9875) + sbm.twist.move(0) print("Resetting Mirror") - Mirror_VFM.y_upstream.move(-0.7) - Mirror_VFM.y_downstream_inboard.move(-0.02) - Mirror_VFM.y_downstream_outboard.move(0.32) - Mirror_VFM.bend_upstream.move(65) - Mirror_VFM.bend_downstream.move(65) + Mirror_VFM.y_upstream.move(-0.6994) + Mirror_VFM.y_downstream_inboard.move(-0.019) + Mirror_VFM.y_downstream_outboard.move(0.3195) + Mirror_VFM.bend_upstream.move(50) + Mirror_VFM.bend_downstream.move(50) print("Resetting BDM Slits") #bdm_slits.top.move(999.957) @@ -96,13 +132,48 @@ def high_flux2(): #bdm_slits.outboard.move(-4100.075) print("Resetting OCM Slits") - ocm_slits.top.move(-2215.0) - ocm_slits.bottom.move(2295.0) - ocm_slits.outboard.move(380.959) - ocm_slits.inboard.move(-49.037) - OCM_table.upstream_jack.move(4.14225) - OCM_table.downstream_jack.move(-4.1700) - OCM_table.X.move(-8.44701) + ocm_slits.top.move(3855) + ocm_slits.bottom.move(11105) + ocm_slits.outboard.move(2055.959) + ocm_slits.inboard.move(-1149.037) + OCM_table.upstream_jack.move(1.8665) + OCM_table.downstream_jack.move(-4.0372500) + #OCM_table.X.move(-8.44701) + print("Ready to go !") + +def high_flux4(): # 4 mm Beam on Mono Ni peak 25000 cts same focus as above + print("Resetting white beam slits") + wb_slits.inboard.move(-13.05) + wb_slits.outboard.move(-6.179438) + + print("Resetting Monochromator") + sbm.yaw.move(0.00013) + sbm.roll.move(0.00010) + sbm.pitch.move(-0.13587) + sbm.bend.move(7708.9875) + sbm.twist.move(0) + + print("Resetting Mirror") + Mirror_VFM.y_upstream.move(-0.6994) + Mirror_VFM.y_downstream_inboard.move(-0.019) + Mirror_VFM.y_downstream_outboard.move(0.3195) + Mirror_VFM.bend_upstream.move(50) + Mirror_VFM.bend_downstream.move(50) + + print("Resetting BDM Slits") + #bdm_slits.top.move(999.957) + #bdm_slits.bottom.move(-94363.970) + #bdm_slits.inboard.move(-7600.960) + #bdm_slits.outboard.move(-4100.075) + + print("Resetting OCM Slits") + ocm_slits.top.move(3855) + ocm_slits.bottom.move(11105) + ocm_slits.outboard.move(2055.959) + ocm_slits.inboard.move(-1149.037) + OCM_table.upstream_jack.move(1.8665) + OCM_table.downstream_jack.move(-4.0372500) + #OCM_table.X.move(-8.44701) print("Ready to go !") def saxs(): @@ -214,6 +285,10 @@ def turbo(): time.sleep(2) caput("XF:28ID1-ES:1{Env:01}Cmd-Cmd", 20) +# get direct beamcurrent +#def I0(): +# I0 = caget("SR:OPS-BI{DCCT:1}I:Real-I") + #---------------------------function to display the dark subtracted last image ---------------------------------- from tifffile import imread, imshow, imsave def lastimage(n): @@ -223,6 +298,7 @@ def lastimage(n): if data1 != None: light_img=data1['pe1c_image'] + dark_uid=hdr.start. get('sc_dk_field_uid') dk_hdrs=db(uid=dark_uid) for dk_hdr in dk_hdrs: From e5791c9ec3aaba77a48d1e0c98b8619d90487f89 Mon Sep 17 00:00:00 2001 From: PDF Operator Date: Thu, 25 Mar 2021 14:03:40 -0400 Subject: [PATCH 2/5] import jogging functionality from XPD --- startup/98-jog_scans.py | 162 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 162 insertions(+) create mode 100644 startup/98-jog_scans.py diff --git a/startup/98-jog_scans.py b/startup/98-jog_scans.py new file mode 100644 index 0000000..c6f8ff9 --- /dev/null +++ b/startup/98-jog_scans.py @@ -0,0 +1,162 @@ +from bluesky.utils import short_uid + +def future_count(detectors, num=1, delay=None, *, per_shot=None, md=None): + """ + Take one or more readings from detectors. + Parameters + ---------- + detectors : list + list of 'readable' objects + num : integer, optional + number of readings to take; default is 1 + If None, capture data until canceled + delay : iterable or scalar, optional + Time delay in seconds between successive readings; default is 0. + per_shot : callable, optional + hook for customizing action of inner loop (messages per step) + Expected signature :: + def f(detectors: Iterable[OphydObj]) -> Generator[Msg]: + ... + md : dict, optional + metadata + Notes + ----- + If ``delay`` is an iterable, it must have at least ``num - 1`` entries or + the plan will raise a ``ValueError`` during iteration. + """ + if num is None: + num_intervals = None + else: + num_intervals = num - 1 + _md = { + "detectors": [det.name for det in detectors], + "num_points": num, + "num_intervals": num_intervals, + "plan_args": {"detectors": list(map(repr, detectors)), "num": num}, + "plan_name": "count", + "hints": {}, + } + _md.update(md or {}) + _md["hints"].setdefault("dimensions", [(("time",), "primary")]) + + if per_shot is None: + per_shot = bps.one_shot + + @bpp.stage_decorator(detectors) + @bpp.run_decorator(md=_md) + def inner_count(): + return ( + yield from bps.repeat(partial(per_shot, detectors), num=num, delay=delay) + ) + + return (yield from inner_count()) + + +def _xpd_pre_plan(dets, exposure): + """Handle detector exposure time + xpdan required metadata""" + + def configure_area_det(det, exposure): + '''Configure an area detector in "continuous mode"''' + + def _check_mini_expo(exposure, acq_time): + if exposure < acq_time: + raise ValueError( + "WARNING: total exposure time: {}s is shorter " + "than frame acquisition time {}s\n" + "you have two choices:\n" + "1) increase your exposure time to be at least" + "larger than frame acquisition time\n" + "2) increase the frame rate, if possible\n" + " - to increase exposure time, simply resubmit" + " the ScanPlan with a longer exposure time\n" + " - to increase frame-rate/decrease the" + " frame acquisition time, please use the" + " following command:\n" + " >>> {} \n then rerun your ScanPlan definition" + " or rerun the xrun.\n" + "Note: by default, xpdAcq recommends running" + "the detector at its fastest frame-rate\n" + "(currently with a frame-acquisition time of" + "0.1s)\n in which case you cannot set it to a" + "lower value.".format( + exposure, + acq_time, + ">>> glbl['frame_acq_time'] = 0.5 #set" " to 0.5s", + ) + ) + + # todo make + ret = yield from bps.read(det.cam.acquire_time) + if ret is None: + acq_time = 1 + else: + acq_time = ret[det.cam.acquire_time.name]["value"] + _check_mini_expo(exposure, acq_time) + if hasattr(det, "images_per_set"): + # compute number of frames + num_frame = np.ceil(exposure / acq_time) + yield from bps.mov(det.images_per_set, num_frame) + else: + # The dexela detector does not support `images_per_set` so we just + # use whatever the user asks for as the thing + # TODO: maybe put in warnings if the exposure is too long? + num_frame = 1 + computed_exposure = num_frame * acq_time + + # print exposure time + print( + "INFO: requested exposure time = {} - > computed exposure time" + "= {}".format(exposure, computed_exposure) + ) + return num_frame, acq_time, computed_exposure + + # setting up area_detector + for ad in (d for d in dets if hasattr(d, "cam")): + (num_frame, acq_time, computed_exposure) = yield from configure_area_det( + ad, exposure + ) + else: + acq_time = 0 + computed_exposure = exposure + num_frame = 0 + + sp = { + "time_per_frame": acq_time, + "num_frames": num_frame, + "requested_exposure": exposure, + "computed_exposure": computed_exposure, + "type": "ct", + "uid": str(uuid.uuid4()), + "plan_name": "ct", + } + + # update md + _md = {"sp": sp, **{f"sp_{k}": v for k, v in sp.items()}} + + return _md + + +def rocking_ct(dets, exposure, motor, start, stop, *, num=1, md=None): + """Take a count while "rocking" the y-position""" + _md = md or {} + sp_md = yield from _xpd_pre_plan(dets, exposure) + _md.update(sp_md) + + @bpp.reset_positions_decorator([motor.velocity]) + def per_shot(dets): + nonlocal start, stop + yield from bps.mv(motor, start) # got to initial position + yield from bps.mv(motor.velocity, abs(stop - start) / exposure) # set velocity + gp = short_uid("rocker") + yield from bps.abs_set(motor, stop, group=gp) # set motor to move towards end + yield from bps.trigger_and_read(dets) # collect off detector + yield from bps.wait(group=gp) + start, stop = stop, start + + return (yield from future_count(dets, md=_md, per_shot=per_shot, num=num)) + +def jog(exposure_s, motor, start, stop): + """ pass total exposure time (in seconds), motor name (i.e. Grid_Y), start and stop positions for the motor.""" + yield from rocking_ct([pe1c], exposure_s, motor, start, stop) + + From 97eb903ede73e1ad67e2075d050b16c3a296e404 Mon Sep 17 00:00:00 2001 From: Thomas A Caswell Date: Thu, 25 Mar 2021 14:09:07 -0400 Subject: [PATCH 3/5] ENH: add sorenen power supply and plans around it --- startup/12-sorensen.py | 204 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 204 insertions(+) create mode 100644 startup/12-sorensen.py diff --git a/startup/12-sorensen.py b/startup/12-sorensen.py new file mode 100644 index 0000000..1c6d962 --- /dev/null +++ b/startup/12-sorensen.py @@ -0,0 +1,204 @@ +from ophyd import EpicsSignal +import numpy as np +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp + +def gimme_voltage(): + return caget('XF:28ID1-ES{PSU:SRS}E-I') + +def gimme_current(): + return caget('XF:28ID1-ES{PSU:SRS}I-I') + +def gimme_T(): + return lakeshore336.read()['lakeshore336_temp_C_T']['value'] + + +def _paranoid_set_and_wait( + signal, val, poll_time=0.01, timeout=10, rtol=None, atol=None +): + """Set a signal to a value and wait until it reads correctly. + + For floating point values, it is strongly recommended to set a tolerance. + If tolerances are unset, the values will be compared exactly. + + Parameters + ---------- + signal : EpicsSignal (or any object with `get` and `put`) + val : object + value to set signal to + poll_time : float, optional + how soon to check whether the value has been successfully set + timeout : float, optional + maximum time to wait for value to be successfully set + rtol : float, optional + allowed relative tolerance between the readback and setpoint values + atol : float, optional + allowed absolute tolerance between the readback and setpoint values + + Raises + ------ + TimeoutError if timeout is exceeded + """ + from bluesky.utils.epics_pvs import _compare_maybe_enum, logger + import time as ttime + + signal.put(val) + expiration_time = ttime.time() + timeout if timeout is not None else None + current_value = signal.get() + + if atol is None and hasattr(signal, "tolerance"): + atol = signal.tolerance + if rtol is None and hasattr(signal, "rtolerance"): + rtol = signal.rtolerance + + try: + enum_strings = signal.enum_strs + except AttributeError: + enum_strings = () + + if atol is not None: + within_str = ["within {!r}".format(atol)] + else: + within_str = [] + + if rtol is not None: + within_str.append("(relative tolerance of {!r})".format(rtol)) + + if within_str: + within_str = " ".join([""] + within_str) + else: + within_str = "" + + while current_value is None or not _compare_maybe_enum( + val, current_value, enum_strings, atol, rtol + ): + logger.debug( + "Waiting for %s to be set from %r to %r%s...", + signal.name, + current_value, + val, + within_str, + ) + ttime.sleep(poll_time) + if poll_time < 0.1: + poll_time *= 2 # logarithmic back-off + current_value = signal.get() + if expiration_time is not None and ttime.time() > expiration_time: + raise TimeoutError( + "Attempted to set %r to value %r and timed " + "out after %r seconds. Current value is %r." + % (signal, val, timeout, current_value) + ) + + +class ParnoidEpicsSignal(EpicsSignal): + def _set_and_wait(self, val): + return _paranoid_set_and_wait( + self, value, timeout=timeout, atol=self.tolerance, rtol=self.rtolerance + ) + + def get(self): + ret = super().get() + for j in range(5): + if ret is not None: + return ret + ttime.sleep(0.1) + ret = super().get() + else: + raise RuntimeError("getting all nones") + + +sorensen850_manual = ParnoidEpicsSignal( + "XF:28ID1-ES{LS336:1-Out:3}Out:Man-RB", + write_pv="XF:28ID1-ES{LS336:1-Out:3}Out:Man-SP", + name="sorensen850_manual", + tolerance=0.1, +) +import uuid +import bluesky.plans as bp + +lakeshore336.read_attrs = ["temp", "temp.C", "temp.C.T"] +lakeshore336.temp.C.T.kind = "hinted" + + +def power_ramp(start, stop, steps, *, exposure, settle_time=0, n_per_hold=1, **kwargs): + ramp_uid = str(uuid.uuid4()) + for p in np.linspace(start, stop, steps): + yield from bps.mv(sorensen850_manual, p) + if settle_time > 0: + yield from bps.sleep(settle_time) + for j in range(n_per_hold): + yield from bpp.baseline_wrapper( + simple_ct( + [pe1c] + [sorensen850_manual, lakeshore336], + md={"ramp_uid": ramp_uid}, + **kwargs, + exposure=exposure, + ), + [lakeshore336, ring_current, sorensen850_manual], + ) + + +from pathlib import Path +import pandas as pd + + +def write_single_calibration_data_to_csv_and_make_tom_sad(uid, path=Path(".")): + h = db[uid] + tbl = h.table() + tbl["delta"] = (tbl.time - tbl.time.iloc[0]).dt.total_seconds() + tbl = tbl.set_index(tbl["delta"]) + + power = tbl["sorensen850_manual"].mean() + T_start = tbl["lakeshore336_temp_C_T"].iloc[0] + T_stop = tbl["lakeshore336_temp_C_T"].iloc[-1] + + out = path / f"power_{power:.2f}-Tstart_{T_start:.2f}-Tstop_{T_stop:.2f}.csv" + tbl[["lakeshore336_temp_C_T"]].to_csv(out) + + return tbl + + +def write_calibration_data_to_csv_and_make_tom_sad( + uid_list, path=Path("/tmp/sorensen_calibration.csv") +): + headers = [db[uid] for uid in uid_list] + headers = sorted(headers, key=lambda h: h.start["time"]) + + merged_table = pd.concat([h.table() for h in headers]) + merged_table["delta"] = ( + merged_table["time"] - merged_table["time"].iloc[0] + ).dt.total_seconds() + merged_table = merged_table.set_index(merged_table["delta"]) + + merged_table.to_csv(path) + return merged_table + + +def power_calibration_ramp(power_levels, *, hold_time, n_per_hold=10, path): + ramp_uid = str(uuid.uuid4()) + out_uids = [] + + def inner(): + for p in power_levels: + yield from bps.mv(sorensen850_manual, p) + try: + uid = yield from bp.count( + [lakeshore336, sorensen850_manual], + num=n_per_hold, + delay=hold_time / n_per_hold, + md={"ramp_uid": ramp_uid, "purpose": "sorensen calibration"}, + ) + out_uids.append(uid) + except Exception as e: + # We want to prioritize this not crashing over night + print(e) + continue + else: + write_calibration_data_to_csv_and_make_tom_sad(out_uids, path) + return out_uids + + def cleanup(): + yield from bps.mv(sorensen850_manual, 0) + + return (yield from bpp.finalize_wrapper(inner(), cleanup)) From bda2d9894251e4cfa1bd2022f7df8397c6909d02 Mon Sep 17 00:00:00 2001 From: Thomas A Caswell Date: Thu, 25 Mar 2021 14:11:34 -0400 Subject: [PATCH 4/5] Final version of sorensen code --- startup/12-sorensen.py | 738 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 721 insertions(+), 17 deletions(-) diff --git a/startup/12-sorensen.py b/startup/12-sorensen.py index 1c6d962..0022971 100644 --- a/startup/12-sorensen.py +++ b/startup/12-sorensen.py @@ -2,17 +2,10 @@ import numpy as np import bluesky.plan_stubs as bps import bluesky.preprocessors as bpp - -def gimme_voltage(): - return caget('XF:28ID1-ES{PSU:SRS}E-I') - -def gimme_current(): - return caget('XF:28ID1-ES{PSU:SRS}I-I') - -def gimme_T(): - return lakeshore336.read()['lakeshore336_temp_C_T']['value'] +import time as ttime +# This is fixed in ophyd 1.6.2 def _paranoid_set_and_wait( signal, val, poll_time=0.01, timeout=10, rtol=None, atol=None ): @@ -160,21 +153,27 @@ def write_single_calibration_data_to_csv_and_make_tom_sad(uid, path=Path(".")): def write_calibration_data_to_csv_and_make_tom_sad( - uid_list, path=Path("/tmp/sorensen_calibration.csv") + uid_list, *, fname=None, stream_name="primary" ): - headers = [db[uid] for uid in uid_list] + if len(uid_list) and isinstance(uid_list[0], str): + headers = [db[uid] for uid in uid_list] + else: + headers = uid_list headers = sorted(headers, key=lambda h: h.start["time"]) - merged_table = pd.concat([h.table() for h in headers]) - merged_table["delta"] = ( - merged_table["time"] - merged_table["time"].iloc[0] - ).dt.total_seconds() - merged_table = merged_table.set_index(merged_table["delta"]) + merged_table = pd.concat([h.table(stream_name=stream_name) for h in headers]) + dt = (merged_table["time"] - merged_table["time"].iloc[0]).dt.total_seconds() + dt.name = "delta_time" + merged_table = merged_table.set_index(dt) - merged_table.to_csv(path) + if fname is not None: + merged_table.to_csv(fname) return merged_table +from bluesky.utils import RunEngineControlException + + def power_calibration_ramp(power_levels, *, hold_time, n_per_hold=10, path): ramp_uid = str(uuid.uuid4()) out_uids = [] @@ -190,6 +189,8 @@ def inner(): md={"ramp_uid": ramp_uid, "purpose": "sorensen calibration"}, ) out_uids.append(uid) + except RunEngineControlException: + raise except Exception as e: # We want to prioritize this not crashing over night print(e) @@ -202,3 +203,706 @@ def cleanup(): yield from bps.mv(sorensen850_manual, 0) return (yield from bpp.finalize_wrapper(inner(), cleanup)) + + +class RampControl(Device): + delta = Cpt(EpicsSignal, "RampDelta") + done = Cpt(EpicsSignal, "RampDone-Cmd") + take_xrd = Cpt(EpicsSignal, "TakeXRD-Cmd") + + +ramp_control = RampControl("OvenRampControl:", name="ramp_control") + +try: + from bluesky.plan_stubs import rd +except ImportError: + + def rd(obj, *, default_value=0): + """Reads a single-value non-triggered object + + This is a helper plan to get the scalar value out of a Device + (such as an EpicsMotor or a single EpicsSignal). + + For devices that have more than one read key the following rules are used: + + - if exactly 1 field is hinted that value is used + - if no fields are hinted and there is exactly 1 value in the + reading that value is used + - if more than one field is hinted an Exception is raised + - if no fields are hinted and there is more than one key in the reading an + Exception is raised + + The devices is not triggered and this plan does not create any Events + + Parameters + ---------- + obj : Device + The device to be read + + default_value : Any + The value to return when not running in a "live" RunEngine. + This come ups when :: + + ret = yield Msg('read', obj) + assert ret is None + + the plan is passed to `list` or some other iterator that + repeatedly sends `None` into the plan to advance the + generator. + + Returns + ------- + val : Any or None + The "single" value of the device + + """ + hints = getattr(obj, "hints", {}).get("fields", []) + if len(hints) > 1: + msg = ( + f"Your object {obj} ({obj.name}.{getattr(obj, 'dotted_name', '')}) " + f"has {len(hints)} items hinted ({hints}). We do not know how to " + "pick out a single value. Please adjust the hinting by setting the " + "kind of the components of this device or by rd ing one of it's components" + ) + raise ValueError(msg) + elif len(hints) == 0: + hint = None + if hasattr(obj, "read_attrs"): + if len(obj.read_attrs) != 1: + msg = ( + f"Your object {obj} ({obj.name}.{getattr(obj, 'dotted_name', '')}) " + f"and has {len(obj.read_attrs)} read attrs. We do not know how to " + "pick out a single value. Please adjust the hinting/read_attrs by " + "setting the kind of the components of this device or by rd ing one " + "of its components" + ) + + raise ValueError(msg) + # len(hints) == 1 + else: + (hint,) = hints + + ret = yield from read(obj) + + # list-ify mode + if ret is None: + return default_value + + if hint is not None: + return ret[hint]["value"] + + # handle the no hint 1 field case + try: + (data,) = ret.values() + except ValueError as er: + msg = ( + f"Your object {obj} ({obj.name}.{getattr(obj, 'dotted_name', '')}) " + f"and has {len(ret)} read values. We do not know how to pick out a " + "single value. Please adjust the hinting/read_attrs by setting the " + "kind of the components of this device or by rd ing one of its components" + ) + + raise ValueError(msg) from er + else: + return data["value"] + + +from dataclasses import dataclass + + +@dataclass(frozen=True) +class MotorPositions: + beamstop_x: float + beamstop_y: float + detector: float + + +near_positions = MotorPositions( + beamstop_x=-17.02152375, beamstop_y=0.717885, detector=3857.0 +) +far_positions = MotorPositions( + beamstop_x=-16.541525, beamstop_y=0.437885, detector=4973.0 +) + +from xpdacq.beamtime import close_shutter_stub + + +def power_ramp_controlled( + *, + min_power_pct: float = 0, + max_power_pct: float = 1, # max 100 + exposure: float, + n_per_step=1, + beamtime, + xrd_sample_name: str, + pdf_sample_name: str, + near_positions, + far_positions, + diagostic_T_file=None, + ramp_uid=None, +): + """ + Plan to take externally controlled temperature ramps. + + This plan consults two PVs to determine the current ramp rate (delta) and + if enough data has been collected and we should exit (more graceful than ctrl-C). + + At each hold point *n_per_point* sets of xrd and pdf will be taken. The + hold time per temperature will be approximately + + hold_time = (2*exposure + 70)*n_per_point + + Parameters + ---------- + min_power_pct : float + The minimum power (as a perentage) to give the heater + max_power_pct : float + The maxmimum power (as a percentage) to give the heater + exposure : float + Exposure time in seconds for each shot + n_per_step : int, optional + The number of exposures to take at each power step + beamtime : xpdacq.xpdacq.Beamtime + Used to get the sample meta-data + xrd_sample_name : str + Looked up in beamtime to get sample meta-data + pdf_sample_same : str + Looked up in beamtime to get sample meta-data + near_positions, far_positions : MotorPositions + The location of the beamstop and detector for "near" (PDF) and "far" (XRD) + measurements + diagsostic_T_file : Path + If you must. + """ + if ramp_uid is None: + ramp_uid = str(uuid.uuid4()) + xrd_sample = beamtime.samples[xrd_sample_name] + pdf_sample = beamtime.samples[pdf_sample_name] + + detector_motor = Det_1_Z + beam_stop = BStop1 + + baseline_detectors = [ + lakeshore336, + ring_current, + beam_stop, + detector_motor, + Grid_X, + Grid_Y, + Grid_Z, + sorensen850_manual, + ] + main_detectors = [pe1c, sorensen850_manual] + + motor_snap_shot_for_dan = { + k: globals()[k].read() for k in ["Grid_X", "Grid_Y", "Grid_Z"] + } + + def collect_cycle(ramp_phase, delta=0): + # PDF measurement + print("/n/nmoving to PDF distance/n") + yield from bps.mv( + beam_stop.x, + near_positions.beamstop_x, + beam_stop.y, + near_positions.beamstop_y, + detector_motor, + near_positions.detector, + ) + pdf_uid = yield from bpp.baseline_wrapper( + simple_ct( + main_detectors, + md={ + "ramp_uid": ramp_uid, + "ramp_phase": ramp_phase, + **pdf_sample, + **motor_snap_shot_for_dan, + "delta": delta, + }, + exposure=exposure, + ), + baseline_detectors, + ) + yield from close_shutter_stub() + # XRD measurement + print("/n/nmoving to XRD position/n") + take_xrd = yield from rd(ramp_control.take_xrd) + if take_xrd: + yield from bps.mv( + beam_stop.x, + far_positions.beamstop_x, + beam_stop.y, + far_positions.beamstop_y, + detector_motor, + far_positions.detector, + ) + xrd_uid = yield from bpp.baseline_wrapper( + simple_ct( + main_detectors, + md={ + "ramp_uid": ramp_uid, + "ramp_phase": ramp_phase, + **xrd_sample, + **motor_snap_shot_for_dan, + }, + exposure=exposure, + ), + baseline_detectors, + ) + yield from close_shutter_stub() + return [] + + uids = [] + + yield from bps.mv(ramp_control.done, 0) + + p = yield from rd(sorensen850_manual, default_value=min_power_pct) + print(f"starting at power {p}") + yield from bps.mv(sorensen850_manual, p) + + # for reasons TAC does not understand this is returning [None, None] + # suspect it is due to one of the xpdacq wrappers not forwarding returs? + data_uids = yield from collect_cycle("initial") + uids.extend(data_uids) + + done = yield from rd(ramp_control.done, default_value=True) + while not done: + delta = yield from rd(ramp_control.delta) + if delta > 0: + ramp_phase = "rising" + elif delta < 0: + ramp_phase = "falling" + else: + ramp_phase = "holding" + + p = np.clip(p + delta, min_power_pct, max_power_pct) + print(f"\n\n moving to {p} with {delta} step") + + yield from bps.mv(sorensen850_manual, p) + + for j in range(n_per_step): + print( + "\n\ntemperature is currently " + + str(lakeshore336.read()["lakeshore336_temp_C_T"]["value"]) + ) + print("on step " + str(j) + " of " + str(n_per_step)) + data_uids = yield from collect_cycle(ramp_phase, delta) + uids.extend(data_uids) + if diagostic_T_file is not None: + + write_calibration_data_to_csv_and_make_tom_sad( + list(db(ramp_uid=ramp_uid)), + fname=diagostic_T_file, + stream_name="baseline", + ) + done = yield from rd(ramp_control.done, default_value=True) + + uids.append((yield from collect_cycle("final"))) + return uids + + +# TODO reuse the code from above, but copy-paste for not to be sure +# we do not introduce bugs while refactoring. +def power_ramp_sequence( + *, + power_pct_seq, + exposure: float, + n_per_step=1, + beamtime, + xrd_sample_name: str, + pdf_sample_name: str, + near_positions, + far_positions, + diagostic_T_file=None, +): + """ + Plan to take externally controlled temperature ramps. + + This plan consults two PVs to determine the current ramp rate (delta) and + if enough data has been collected and we should exit (more graceful than ctrl-C). + + At each hold point *n_per_point* sets of xrd and pdf will be taken. The + hold time per temperature will be approximately + + hold_time = (2*exposure + 70)*n_per_point + + Parameters + ---------- + power_pct : Iterable[float] + Sequence of power precentages + exposure : float + Exposure time in seconds for each shot + n_per_step : int, optional + The number of exposures to take at each power step + beamtime : xpdacq.xpdacq.Beamtime + Used to get the sample meta-data + xrd_sample_name : str + Looked up in beamtime to get sample meta-data + pdf_sample_same : str + Looked up in beamtime to get sample meta-data + near_positions, far_positions : MotorPositions + The location of the beamstop and detector for "near" (PDF) and "far" (XRD) + measurements + diagsostic_T_file : Path + If you must. + """ + ramp_uid = str(uuid.uuid4()) + xrd_sample = beamtime.samples[xrd_sample_name] + pdf_sample = beamtime.samples[pdf_sample_name] + + detector_motor = Det_1_Z + beam_stop = BStop1 + + baseline_detectors = [ + lakeshore336, + ring_current, + beam_stop, + detector_motor, + Grid_X, + Grid_Y, + Grid_Z, + sorensen850_manual, + ] + main_detectors = [pe1c, sorensen850_manual] + + motor_snap_shot_for_dan = { + k: globals()[k].read() for k in ["Grid_X", "Grid_Y", "Grid_Z"] + } + + def collect_cycle(ramp_phase, delta=0): + # PDF measurement + print("\n\nmoving to PDF distance\n") + yield from bps.mv( + beam_stop.x, + near_positions.beamstop_x, + beam_stop.y, + near_positions.beamstop_y, + detector_motor, + near_positions.detector, + ) + pdf_uid = yield from bpp.baseline_wrapper( + simple_ct( + main_detectors, + md={ + "ramp_uid": ramp_uid, + "ramp_phase": ramp_phase, + **pdf_sample, + **motor_snap_shot_for_dan, + "delta": delta, + }, + exposure=exposure, + ), + baseline_detectors, + ) + yield from close_shutter_stub() + take_xrd = yield from rd(ramp_control.take_xrd) + if take_xrd: + # XRD measurement + print("\n\nmoving to XRD position\n") + yield from bps.mv( + beam_stop.x, + far_positions.beamstop_x, + beam_stop.y, + far_positions.beamstop_y, + detector_motor, + far_positions.detector, + ) + xrd_uid = yield from bpp.baseline_wrapper( + simple_ct( + main_detectors, + md={ + "ramp_uid": ramp_uid, + "ramp_phase": ramp_phase, + **xrd_sample, + **motor_snap_shot_for_dan, + }, + exposure=exposure, + ), + baseline_detectors, + ) + yield from close_shutter_stub() + return [] + + uids = [] + + first_power, power_seq_tail = power_pct_seq + + yield from bps.mv(sorensen850_manual, first_power) + + # for reasons TAC does not understand this is returning [None, None] + # suspect it is due to one of the xpdacq wrappers not forwarding returs? + data_uids = yield from collect_cycle("initial") + uids.extend(data_uids) + + last_power = first_power + for p in power_seq_tail: + delta = p - last_power + last_power = p + if delta > 0: + ramp_phase = "rising" + elif delta < 0: + ramp_phase = "falling" + else: + ramp_phase = "holding" + + print(f"\n\n!!Moving to power {p} with delta {delta}") + yield from bps.mv(sorensen850_manual, p) + + for j in range(n_per_step): + print( + "/n/ntemperature is currently " + + str(lakeshore336.read()["lakeshore336_temp_C_T"]["value"]) + ) + print("on step " + str(j) + " of " + str(n_per_step)) + data_uids = yield from collect_cycle(ramp_phase, delta) + uids.extend(data_uids) + if diagostic_T_file is not None: + + write_calibration_data_to_csv_and_make_tom_sad( + list(db(ramp_uid=ramp_uid)), + fname=diagostic_T_file, + stream_name="baseline", + ) + + uids.extend((yield from collect_cycle("final"))) + return uids + + +# TODO reuse the code from above, but copy-paste for not to be sure +# we do not introduce bugs while refactoring. +def power_ramp_T_threshold( + *, + start_power_pct, + max_temperature, + delta_power, + max_power_pct, + exposure: float, + n_per_step=1, + beamtime, + xrd_sample_name: str, + pdf_sample_name: str, + near_positions, + far_positions, + diagostic_T_file=None, +): + """ + Plan to take externally controlled temperature ramps. + + This plan consults two PVs to determine the current ramp rate (delta) and + if enough data has been collected and we should exit (more graceful than ctrl-C). + + At each hold point *n_per_point* sets of xrd and pdf will be taken. The + hold time per temperature will be approximately + + hold_time = (2*exposure + 70)*n_per_point + + Parameters + ---------- + exposure : float + Exposure time in seconds for each shot + n_per_step : int, optional + The number of exposures to take at each power step + beamtime : xpdacq.xpdacq.Beamtime + Used to get the sample meta-data + xrd_sample_name : str + Looked up in beamtime to get sample meta-data + pdf_sample_same : str + Looked up in beamtime to get sample meta-data + near_positions, far_positions : MotorPositions + The location of the beamstop and detector for "near" (PDF) and "far" (XRD) + measurements + diagsostic_T_file : Path + If you must. + """ + ramp_uid = str(uuid.uuid4()) + xrd_sample = beamtime.samples[xrd_sample_name] + pdf_sample = beamtime.samples[pdf_sample_name] + + detector_motor = Det_1_Z + beam_stop = BStop1 + + baseline_detectors = [ + lakeshore336, + ring_current, + beam_stop, + detector_motor, + Grid_X, + Grid_Y, + Grid_Z, + sorensen850_manual, + ] + main_detectors = [pe1c, sorensen850_manual] + + motor_snap_shot_for_dan = { + k: globals()[k].read() for k in ["Grid_X", "Grid_Y", "Grid_Z"] + } + + def collect_cycle(ramp_phase, delta=0): + # PDF measurement + print("\n\nmoving to PDF distance\n") + yield from bps.mv( + beam_stop.x, + near_positions.beamstop_x, + beam_stop.y, + near_positions.beamstop_y, + detector_motor, + near_positions.detector, + ) + pdf_uid = yield from bpp.baseline_wrapper( + simple_ct( + main_detectors, + md={ + "ramp_uid": ramp_uid, + "ramp_phase": ramp_phase, + **pdf_sample, + **motor_snap_shot_for_dan, + "delta": delta, + }, + exposure=exposure, + ), + baseline_detectors, + ) + yield from close_shutter_stub() + take_xrd = yield from rd(ramp_control.take_xrd) + if take_xrd: + # XRD measurement + print("\n\nmoving to XRD position\n") + yield from bps.mv( + beam_stop.x, + far_positions.beamstop_x, + beam_stop.y, + far_positions.beamstop_y, + detector_motor, + far_positions.detector, + ) + xrd_uid = yield from bpp.baseline_wrapper( + simple_ct( + main_detectors, + md={ + "ramp_uid": ramp_uid, + "ramp_phase": ramp_phase, + **xrd_sample, + **motor_snap_shot_for_dan, + }, + exposure=exposure, + ), + baseline_detectors, + ) + yield from close_shutter_stub() + return [] + + uids = [] + + p = start_power_pct + + yield from bps.mv(sorensen850_manual, p) + + # for reasons TAC does not understand this is returning [None, None] + # suspect it is due to one of the xpdacq wrappers not forwarding returs? + data_uids = yield from collect_cycle("initial") + uids.extend(data_uids) + + reversed = False + + delta = delta_power + while True: + p = np.clip(p + delta, 0, max_power_pct) + T = yield from rd(lakeshore336) + if T > max_temperature and not reversed: + delta = -delta + if delta > 0: + ramp_phase = "rising" + elif delta < 0: + ramp_phase = "falling" + else: + ramp_phase = "holding" + + print(f"\n\n!!Moving to power {p} with delta {delta}") + yield from bps.mv(sorensen850_manual, p) + + for j in range(n_per_step): + print( + "\n\ntemperature is currently " + + str(lakeshore336.read()["lakeshore336_temp_C_T"]["value"]) + ) + print("on step " + str(j) + " of " + str(n_per_step)) + data_uids = yield from collect_cycle(ramp_phase, delta) + uids.extend(data_uids) + if diagostic_T_file is not None: + + write_calibration_data_to_csv_and_make_tom_sad( + list(db(ramp_uid=ramp_uid)), + fname=diagostic_T_file, + stream_name="baseline", + ) + if p <= 0: + break + + uids.extend((yield from collect_cycle("final"))) + return uids + + +def bring_to_temperature(power_supply, thermo, *, out_path): + first_time = None + + def writer_call_back(name, doc): + nonlocal first_time + + if name != "event": + return + if first_time is None: + first_time = doc["time"] + with open(out_path, "a+") as fout: + data = [ + str(doc["data"][k]) + for k in ["sorensen850_manual", "lakeshore336_temp_C_T"] + ] + data_str = ",".join(data) + fout.write(f'{doc["time"] - first_time},{data_str}\n') + + condition_time = 5 * 60 + condition_steps = 15 + sub_condition_time = condition_time / condition_steps + + condition_temperature_step = 50 + + def condition_loop(): + print(f"entering {condition_time}s hold") + for i in range(condition_steps): + print(f" stage {i} / {condition_steps}") + yield from bps.trigger_and_read([power_supply, thermo]) + yield from bps.sleep(sub_condition_time) + yield from bps.trigger_and_read([power_supply, thermo]) + + @bpp.subs_decorator(writer_call_back) + @bpp.run_decorator() + def inner(): + yield from bps.trigger_and_read([power_supply, thermo]) + for p in np.arange(2.5, 30.0001, 0.1): + yield from bps.mv(power_supply, p) + yield from bps.checkpoint() + yield from bps.sleep(5) + yield from bps.trigger_and_read([power_supply, thermo]) + + yield from condition_loop() + + T = yield from rd(thermo) + t_target = T + condition_temperature_step + p_cur = yield from rd(power_supply) + while t_target < 1000: + + while T < t_target: + p_cur += 0.1 + yield from bps.mv(power_supply, p_cur) + yield from bps.checkpoint() + yield from bps.sleep(5) + yield from bps.trigger_and_read([power_supply, thermo]) + T = yield from rd(thermo) + + t_target += condition_temperature_step + + yield from condition_loop() + print(f"new_target {t_target}") + + ret = yield from inner() + return ret From 84c382b9e14dc858d3f2f139015bdb3870cfa3d1 Mon Sep 17 00:00:00 2001 From: Thomas A Caswell Date: Thu, 25 Mar 2021 14:14:01 -0400 Subject: [PATCH 5/5] revive software flyscans for xrd mapping --- startup/{98-map_scans.bak => 98-map_scans.py} | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) rename startup/{98-map_scans.bak => 98-map_scans.py} (95%) diff --git a/startup/98-map_scans.bak b/startup/98-map_scans.py similarity index 95% rename from startup/98-map_scans.bak rename to startup/98-map_scans.py index 6b4c2df..e66ed6c 100644 --- a/startup/98-map_scans.bak +++ b/startup/98-map_scans.py @@ -113,7 +113,7 @@ def dp(det : Detector, shell : SnapshotShell): How far to move beyond the fly dimensions to get up to speed snake : bool - If we should "snake" or "typewriter" the fly axis + If we should "snake" or "comb" the fly axis """ # TODO input validation @@ -170,9 +170,17 @@ def dp(det : Detector, shell : SnapshotShell): speed = abs(fly_stop - fly_start) / (fly_pixels * computed_dwell_time) shell = SnapshotShell() + baseline_dets = [Grid_Z, Det_1_X, Det_1_Y, Det_1_Z] + + motor_snap_shot_for_dan = { + k: globals()[k].read() for k in ["Grid_Z", "Det_1_X", "Det_1_Y", "Det_1_Z"] + } + + _md.update(motor_snap_shot_for_dan) @bpp.reset_positions_decorator([fly_motor.velocity]) @bpp.set_run_key_decorator(f"xrd_map_{uuid.uuid4()}") + @bpp.baseline_decorator(baseline_dets) @bpp.stage_decorator(dets) @bpp.run_decorator(md=_md) def inner(): @@ -220,7 +228,7 @@ def inner(): yield from bps.mv(px_stop, stop_pos) # generate the event yield from bps.create("primary") - for obj in dets + [px_start, px_stop, step_motor]: + for obj in dets + [px_start, px_stop, step_motor, ring_current, fly_motor.velocity]: yield from bps.read(obj) yield from bps.save() yield from bps.checkpoint()