diff --git a/chimerapy/chimera.py b/chimerapy/chimera.py index 8d1a828..c3885cd 100644 --- a/chimerapy/chimera.py +++ b/chimerapy/chimera.py @@ -1,518 +1,256 @@ -""" """ - -import glob -import sys +import copy import astropy.units as u -import cv2 -import mahotas -import matplotlib.pyplot as plt import numpy as np -import scipy.interpolate +import sunpy import sunpy.map -from astropy import wcs -from astropy.io import fits -from astropy.modeling.models import Gaussian2D -from skimage.util import img_as_ubyte +from astropy.units import UnitsError +from astropy.visualization import make_lupton_rgb +from matplotlib import pyplot as plt +from matplotlib.colors import LogNorm +from matplotlib.path import Path +from scipy.optimize import minimize +from sunpy.map import Map, all_coordinates_from_map + +m171 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0171.fits") +m193 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0193.fits") +m211 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0211.fits") + +# From old IDL code +threshold_171v193 = 0.6357 +threshold_171v211 = 0.7 +threshold_193v211 = 1.5102 + +"""I tried a new method of calculating the slopes in which I made a path out of the bin edges and a second path for the +segmentation line to identify the bins that the line passed through. I then added up the total counts and made a funtion +to vary and optimize the slope by minimizing the total counts. I don't think my optimization section works correctly so maybe +you could take a look?""" + + +def find_edges(wave1: int, wave2: int): + map1 = Map( + "https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0" + + str(wave1) + + ".fits" + ) + map2 = Map( + "https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0" + + str(wave2) + + ".fits" + ) + # Since the data are taken at similar times neglect any coordinate changes so just use 171 maps coordinates + coords = all_coordinates_from_map(map1) + disk_mask = (coords.Tx**2 + coords.Ty**2) ** 0.5 < map1.rsun_obs + map1 = map1 / map1.exposure_time + map2 = map2 / map2.exposure_time -def chimera_legacy(): - file_path = "./" + xx = np.linspace(0, 300, 500) - im171 = glob.glob(file_path + "*171*.fts.gz") - im193 = glob.glob(file_path + "*193*.fts.gz") - im211 = glob.glob(file_path + "*211*.fts.gz") - imhmi = glob.glob(file_path + "*hmi*.fts.gz") + cool_counts, cool_xedge, cool_yedge = np.histogram2d( + map1.data[disk_mask].flatten(), + map2.data[disk_mask].flatten(), + bins=250, + range=[[0, 300], [0, 300]], + density=True, + ) - circ, data, datb, datc, dattoarc, hedb, iarr, props, rs, slate, center, xgrid, ygrid = chimera( - im171, im193, im211, imhmi + xedges = np.array(cool_xedge) + yedges = np.array(cool_yedge) + return cool_counts, xedges, yedges + + +cool_hist, cool_xedges, cool_yedges = find_edges(171, 193) +warm_hist, warm_xedges, warm_yedges = find_edges(171, 211) +cool_threshold = threshold_171v193 +warm_threshold = threshold_171v211 + + +def bins_intersected(xedges: np.array, yedges: np.array, slope: float, threshold: int, hist: np.array): + x = np.linspace(0, 300, 500) + y = slope * (x**threshold) + line_path = Path(list(zip(x, y))) + counts = 0 + for i in range(len(xedges) - 1): + for j in range(len(yedges) - 1): + bin_corners = [ + (xedges[i], yedges[j]), + (xedges[i + 1], yedges[j]), + (xedges[i + 1], yedges[j + 1]), + (xedges[i], yedges[j + 1]), + ] + bin_path = Path(bin_corners) + if line_path.intersects_path(bin_path): + counts += hist[j, i] + return counts + + +def objective_function(slope: float, xedges: np.array, yedges: np.array, threshold: float, hist: np.array): + return bins_intersected(xedges, yedges, slope, threshold, hist) + + +initial_guess_cool = 2.25 +initial_guess_warm = 0.5 +bounds_cool = [(2, 2.5)] +bounds_warm = [(0.3, 0.75)] +cool_result = minimize( + objective_function, + x0=initial_guess_cool, + args=(cool_xedges, cool_yedges, cool_threshold, cool_hist), + method="Powell", + bounds=bounds_cool, +) +warm_result = minimize( + objective_function, + x0=initial_guess_warm, + args=(warm_xedges, warm_yedges, warm_threshold, warm_hist), + method="Powell", + bounds=bounds_warm, +) + + +cool_optimal_slope = warm_result.x[0] +cool_minimized_counts = warm_result.fun + + +def segmenting_plots(scale_cold: float, scale_warm: float, scale_hot: float): + m171 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0171.fits") + m193 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0193.fits") + m211 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0211.fits") + + m171_orig = copy.deepcopy(m171) + m193_orig = copy.deepcopy(m193) + m211_orig = copy.deepcopy(m211) + + # Since the data are taken at similar times neglect any coordinate changes so just use 171 maps coordinates + coords = all_coordinates_from_map(m171) + disk_mask = (coords.Tx**2 + coords.Ty**2) ** 0.5 < m171.rsun_obs + + m171 = m171 / m171.exposure_time + m193 = m193 / m193.exposure_time + m211 = m211 / m211.exposure_time + + xx = np.linspace(0, 300, 500) + + fig, axes = plt.subplot_mosaic( + [["cool_hist"], ["warm_hist"], ["hot_hist"]], + layout="constrained", + figsize=(3, 5), ) - # =====sets ident back to max value of iarr====== - - # ident = ident - 1 - np.savetxt("ch_summary.txt", props, fmt="%s") - - plot_tricolor(data, datb, datc, xgrid, ygrid, slate) - plot_mask(slate, iarr, circ, rs, dattoarc, center, xgrid, ygrid, hedb) - - -def chimera(im171, im193, im211, imhmi): - if im171 == [] or im193 == [] or im211 == [] or imhmi == []: - print("Not all required files present") - sys.exit() - # =====Reads in data and resizes images===== - x = np.arange(0, 1024) * 4 - hdu_number = 0 - heda = fits.getheader(im171[0], hdu_number) - data = fits.getdata(im171[0], ext=0) / (heda["EXPTIME"]) - dn = scipy.interpolate.interp2d(x, x, data) - data = dn(np.arange(0, 4096), np.arange(0, 4096)) - hedb = fits.getheader(im193[0], hdu_number) - datb = fits.getdata(im193[0], ext=0) / (hedb["EXPTIME"]) - dn = scipy.interpolate.interp2d(x, x, datb) - datb = dn(np.arange(0, 4096), np.arange(0, 4096)) - hedc = fits.getheader(im211[0], hdu_number) - datc = fits.getdata(im211[0], ext=0) / (hedc["EXPTIME"]) - dn = scipy.interpolate.interp2d(x, x, datc) - datc = dn(np.arange(0, 4096), np.arange(0, 4096)) - hedm = fits.getheader(imhmi[0], hdu_number) - datm = fits.getdata(imhmi[0], ext=0) - # dn = scipy.interpolate.interp2d(np.arange(4096), np.arange(4096), datm) - # datm = dn(np.arange(0, 1024)*4, np.arange(0, 1024)*4) - if hedm["crota1"] > 90: - datm = np.rot90(np.rot90(datm)) - # =====Specifies solar radius and calculates conversion value of pixel to arcsec===== - s = np.shape(data) - rs = heda["rsun"] - if hedb["ctype1"] != "solar_x ": - hedb["ctype1"] = "solar_x " - hedb["ctype2"] = "solar_y " - if heda["cdelt1"] > 1: - heda["cdelt1"], heda["cdelt2"], heda["crpix1"], heda["crpix2"] = ( - heda["cdelt1"] / 4.0, - heda["cdelt2"] / 4.0, - heda["crpix1"] * 4.0, - heda["crpix2"] * 4.0, - ) - hedb["cdelt1"], hedb["cdelt2"], hedb["crpix1"], hedb["crpix2"] = ( - hedb["cdelt1"] / 4.0, - hedb["cdelt2"] / 4.0, - hedb["crpix1"] * 4.0, - hedb["crpix2"] * 4.0, - ) - hedc["cdelt1"], hedc["cdelt2"], hedc["crpix1"], hedc["crpix2"] = ( - hedc["cdelt1"] / 4.0, - hedc["cdelt2"] / 4.0, - hedc["crpix1"] * 4.0, - hedc["crpix2"] * 4.0, - ) - dattoarc = heda["cdelt1"] - convermul = dattoarc / hedm["cdelt1"] - # =====Alternative coordinate systems===== - hdul = fits.open(im171[0]) - hdul[0].header["CUNIT1"] = "arcsec" - hdul[0].header["CUNIT2"] = "arcsec" - aia = sunpy.map.Map(hdul[0].data, hdul[0].header) - adj = 4096.0 / aia.dimensions[0].value - x, y = (np.meshgrid(*[np.arange(adj * v.value) for v in aia.dimensions]) * u.pixel) / adj - hpc = aia.pixel_to_world(x, y) - hg = hpc.transform_to(sunpy.coordinates.frames.HeliographicStonyhurst) - csys = wcs.WCS(hedb) - # =======setting up arrays to be used============ - ident = 1 - iarr = np.zeros((s[0], s[1]), dtype=np.byte) - offarr, slate = np.array(iarr), np.array(iarr) - bmcool = np.zeros((s[0], s[1]), dtype=np.float32) - cand, bmmix, bmhot = np.array(bmcool), np.array(bmcool), np.array(bmcool) - circ = np.zeros((s[0], s[1]), dtype=int) - # =======creation of a 2d gaussian for magnetic cut offs=========== - r = (s[1] / 2.0) - 450 - xgrid, ygrid = np.meshgrid(np.arange(s[0]), np.arange(s[1])) - center = [int(s[1] / 2.0), int(s[1] / 2.0)] - w = np.where((xgrid - center[0]) ** 2 + (ygrid - center[1]) ** 2 > r**2) - y, x = np.mgrid[0:4096, 0:4096] - garr = Gaussian2D(1, s[0] / 2, s[1] / 2, 2000 / 2.3548, 2000 / 2.3548)(x, y) - garr[w] = 1.0 - # ======creation of array for CH properties========== - props = np.zeros((26, 30), dtype="", - "", - "", - "BMAX", - "BMIN", - "TOT_B+", - "TOT_B-", - "", - "", - "", + # 171 v 193 + cool_counts, *cool_bins = axes["cool_hist"].hist2d( + m171.data[disk_mask].flatten(), + m193.data[disk_mask].flatten(), + bins=60, + range=[[0, 300], [0, 300]], + norm=LogNorm(), + density=True, ) - props[:, 1] = ( - "num", - '"', - '"', - "H deg", - '"', - '"', - '"', - '"', - '"', - '"', - '"', - '"', - "H deg", - "deg", - "Mm^2", - "%", - "G", - "G", - "G", - "G", - "G", - "G", - "G", - "Mx", - "Mx", - "Mx", + + axes["cool_hist"].set_facecolor("k") + axes["cool_hist"].plot(xx, scale_cold * (xx**threshold_171v193), "w") + + # 171 v 211 + warm_counts, *warm_bins = axes["warm_hist"].hist2d( + m171.data[disk_mask].flatten(), + m211.data[disk_mask].flatten(), + bins=250, + range=[[0, 300], [0, 300]], + norm=LogNorm(), + density=True, ) - # =====removes negative data values===== - data[np.where(data <= 0)] = 0 - datb[np.where(datb <= 0)] = 0 - datc[np.where(datc <= 0)] = 0 - # ============make a multi-wavelength image for contours================== - with np.errstate(divide="ignore"): - t0 = np.log10(datc) - t1 = np.log10(datb) - t2 = np.log10(data) - t0[np.where(t0 < 0.8)] = 0.8 - t0[np.where(t0 > 2.7)] = 2.7 - t1[np.where(t1 < 1.4)] = 1.4 - t1[np.where(t1 > 3.0)] = 3.0 - t2[np.where(t2 < 1.2)] = 1.2 - t2[np.where(t2 > 3.9)] = 3.9 - t0 = np.array(((t0 - 0.8) / (2.7 - 0.8)) * 255, dtype=np.float32) - t1 = np.array(((t1 - 1.4) / (3.0 - 1.4)) * 255, dtype=np.float32) - t2 = np.array(((t2 - 1.2) / (3.9 - 1.2)) * 255, dtype=np.float32) - # ====create 3 segmented bitmasks===== - with np.errstate(divide="ignore", invalid="ignore"): - bmmix[np.where(t2 / t0 >= ((np.mean(data) * 0.6357) / (np.mean(datc))))] = 1 - bmhot[np.where(t0 + t1 < (0.7 * (np.mean(datb) + np.mean(datc))))] = 1 - bmcool[np.where(t2 / t1 >= ((np.mean(data) * 1.5102) / (np.mean(datb))))] = 1 - # ====logical conjunction of 3 segmentations======= - cand = bmcool * bmmix * bmhot - # ====plot tricolour image with lon/lat conotours======= - # ======removes off detector mis-identifications========== - r = (s[1] / 2.0) - 100 - w = np.where((xgrid - center[0]) ** 2 + (ygrid - center[1]) ** 2 <= r**2) - circ[w] = 1.0 - cand = cand * circ - # =======Separates on-disk and off-limb CHs=============== - circ[:] = 0 - r = (rs / dattoarc) - 10 - w = np.where((xgrid - center[0]) ** 2 + (ygrid - center[1]) ** 2 <= r**2) - circ[w] = 1.0 - r = (rs / dattoarc) + 40 - w = np.where((xgrid - center[0]) ** 2 + (ygrid - center[1]) ** 2 >= r**2) - circ[w] = 1.0 - cand = cand * circ - # ====open file for property storage===== - # =====contours the identified datapoints======= - cand = np.array(cand, dtype=np.uint8) - cont, heir = cv2.findContours(cand, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) - # ======sorts contours by size============ - sizes = [] - for i in range(len(cont)): - sizes = np.append(sizes, len(cont[i])) - reord = sizes.ravel().argsort()[::-1] - tmp = list(cont) - for i in range(len(cont)): - tmp[i] = cont[reord[i]] - cont = list(tmp) - # =====cycles through contours========= - for i in range(len(cont)): - x = np.append(x, len(cont[i])) - - # =====only takes values of minimum surface length and calculates area====== - - if len(cont[i]) <= 100: - continue - area = 0.5 * np.abs( - np.dot(cont[i][:, 0, 0], np.roll(cont[i][:, 0, 1], 1)) - - np.dot(cont[i][:, 0, 1], np.roll(cont[i][:, 0, 0], 1)) - ) - arcar = area * (dattoarc**2) - if arcar > 1000: - # =====finds centroid======= - - # chpts = len(cont[i]) - cent = [np.mean(cont[i][:, 0, 0]), np.mean(cont[i][:, 0, 1])] - - # ===remove quiet sun regions encompassed by coronal holes====== - - if ( - cand[ - np.max(cont[i][:, 0, 0]) + 1, - cont[i][np.where(cont[i][:, 0, 0] == np.max(cont[i][:, 0, 0]))[0][0], 0, 1], - ] - > 0 - ) and ( - iarr[ - np.max(cont[i][:, 0, 0]) + 1, - cont[i][np.where(cont[i][:, 0, 0] == np.max(cont[i][:, 0, 0]))[0][0], 0, 1], - ] - > 0 - ): - mahotas.polygon.fill_polygon(np.array(list(zip(cont[i][:, 0, 1], cont[i][:, 0, 0]))), slate) - iarr[np.where(slate == 1)] = 0 - slate[:] = 0 - - else: - # ====create a simple centre point====== - - arccent = csys.all_pix2world(cent[0], cent[1], 0) - - # ====classifies off limb CH regions======== - - if (((arccent[0] ** 2) + (arccent[1] ** 2)) > (rs**2)) or ( - np.sum(np.array(csys.all_pix2world(cont[i][0, 0, 0], cont[i][0, 0, 1], 0)) ** 2) > (rs**2) - ): - mahotas.polygon.fill_polygon( - np.array(list(zip(cont[i][:, 0, 1], cont[i][:, 0, 0]))), offarr - ) - else: - # =====classifies on disk coronal holes======= - - mahotas.polygon.fill_polygon( - np.array(list(zip(cont[i][:, 0, 1], cont[i][:, 0, 0]))), slate - ) - poslin = np.where(slate == 1) - slate[:] = 0 - - # ====create an array for magnetic polarity======== - - pos = np.zeros((len(poslin[0]), 2), dtype=np.uint) - pos[:, 0] = np.array((poslin[0] - (s[0] / 2)) * convermul + (s[1] / 2), dtype=np.uint) - pos[:, 1] = np.array((poslin[1] - (s[0] / 2)) * convermul + (s[1] / 2), dtype=np.uint) - npix = list( - np.histogram( - datm[pos[:, 0], pos[:, 1]], - bins=np.arange( - np.round(np.min(datm[pos[:, 0], pos[:, 1]])) - 0.5, - np.round(np.max(datm[pos[:, 0], pos[:, 1]])) + 0.6, - 1, - ), - ) - ) - npix[0][np.where(npix[0] == 0)] = 1 - npix[1] = npix[1][:-1] + 0.5 - - wh1 = np.where(npix[1] > 0) - wh2 = np.where(npix[1] < 0) - - # =====magnetic cut offs dependent on area========= - - if ( - np.absolute((np.sum(npix[0][wh1]) - np.sum(npix[0][wh2])) / np.sqrt(np.sum(npix[0]))) - <= 10 - and arcar < 9000 - ): - continue - if ( - np.absolute(np.mean(datm[pos[:, 0], pos[:, 1]])) < garr[int(cent[0]), int(cent[1])] - and arcar < 40000 - ): - continue - iarr[poslin] = ident - - # ====create an accurate center point======= - - ypos = np.sum((poslin[0]) * np.absolute(hg.lat[poslin])) / np.sum( - np.absolute(hg.lat[poslin]) - ) - xpos = np.sum((poslin[1]) * np.absolute(hg.lon[poslin])) / np.sum( - np.absolute(hg.lon[poslin]) - ) - - arccent = csys.all_pix2world(xpos, ypos, 0) - - # ======calculate average angle coronal hole is subjected to====== - - dist = np.sqrt((arccent[0] ** 2) + (arccent[1] ** 2)) - ang = np.arcsin(dist / rs) - - # =====calculate area of CH with minimal projection effects====== - - trupixar = abs(area / np.cos(ang)) - truarcar = trupixar * (dattoarc**2) - trummar = truarcar * ((6.96e08 / rs) ** 2) - - # ====find CH extent in latitude and longitude======== - - # maxxlat = hg.lat[ - # cont[i][np.where(cont[i][:, 0, 0] == np.max(cont[i][:, 0, 0]))[0][0], 0, 1], - # np.max(cont[i][:, 0, 0]), - # ] - maxxlon = hg.lon[ - cont[i][np.where(cont[i][:, 0, 0] == np.max(cont[i][:, 0, 0]))[0][0], 0, 1], - np.max(cont[i][:, 0, 0]), - ] - # maxylat = hg.lat[ - # np.max(cont[i][:, 0, 1]), - # cont[i][np.where(cont[i][:, 0, 1] == np.max(cont[i][:, 0, 1]))[0][0], 0, 0], - # ] - # maxylon = hg.lon[ - # np.max(cont[i][:, 0, 1]), - # cont[i][np.where(cont[i][:, 0, 1] == np.max(cont[i][:, 0, 1]))[0][0], 0, 0], - # ] - # minxlat = hg.lat[ - # cont[i][np.where(cont[i][:, 0, 0] == np.min(cont[i][:, 0, 0]))[0][0], 0, 1], - # np.min(cont[i][:, 0, 0]), - # ] - minxlon = hg.lon[ - cont[i][np.where(cont[i][:, 0, 0] == np.min(cont[i][:, 0, 0]))[0][0], 0, 1], - np.min(cont[i][:, 0, 0]), - ] - # minylat = hg.lat[ - # np.min(cont[i][:, 0, 1]), - # cont[i][np.where(cont[i][:, 0, 1] == np.min(cont[i][:, 0, 1]))[0][0], 0, 0], - # ] - # minylon = hg.lon[ - # np.min(cont[i][:, 0, 1]), - # cont[i][np.where(cont[i][:, 0, 1] == np.min(cont[i][:, 0, 1]))[0][0], 0, 0], - # ] - - # =====CH centroid in lat/lon======= - - centlat = hg.lat[int(ypos), int(xpos)] - centlon = hg.lon[int(ypos), int(xpos)] - - # ====calculate the mean magnetic field===== - - mB = np.mean(datm[pos[:, 0], pos[:, 1]]) - mBpos = np.sum(npix[0][wh1] * npix[1][wh1]) / np.sum(npix[0][wh1]) - mBneg = np.sum(npix[0][wh2] * npix[1][wh2]) / np.sum(npix[0][wh2]) - - # =====finds coordinates of CH boundaries======= - - Ywb, Xwb = csys.all_pix2world( - cont[i][np.where(cont[i][:, 0, 0] == np.max(cont[i][:, 0, 0]))[0][0], 0, 1], - np.max(cont[i][:, 0, 0]), - 0, - ) - Yeb, Xeb = csys.all_pix2world( - cont[i][np.where(cont[i][:, 0, 0] == np.min(cont[i][:, 0, 0]))[0][0], 0, 1], - np.min(cont[i][:, 0, 0]), - 0, - ) - Ynb, Xnb = csys.all_pix2world( - np.max(cont[i][:, 0, 1]), - cont[i][np.where(cont[i][:, 0, 1] == np.max(cont[i][:, 0, 1]))[0][0], 0, 0], - 0, - ) - Ysb, Xsb = csys.all_pix2world( - np.min(cont[i][:, 0, 1]), - cont[i][np.where(cont[i][:, 0, 1] == np.min(cont[i][:, 0, 1]))[0][0], 0, 0], - 0, - ) - - width = round(maxxlon.value) - round(minxlon.value) - - if minxlon.value >= 0.0: - eastl = "W" + str(int(np.round(minxlon.value))) - else: - eastl = "E" + str(np.absolute(int(np.round(minxlon.value)))) - if maxxlon.value >= 0.0: - westl = "W" + str(int(np.round(maxxlon.value))) - else: - westl = "E" + str(np.absolute(int(np.round(maxxlon.value)))) - - if centlat >= 0.0: - centlat = "N" + str(int(np.round(centlat.value))) - else: - centlat = "S" + str(np.absolute(int(np.round(centlat.value)))) - if centlon >= 0.0: - centlon = "W" + str(int(np.round(centlon.value))) - else: - centlon = "E" + str(np.absolute(int(np.round(centlon.value)))) - - # ====insertions of CH properties into property array===== - - props[0, ident + 1] = str(ident) - props[1, ident + 1] = str(np.round(arccent[0])) - props[2, ident + 1] = str(np.round(arccent[1])) - props[3, ident + 1] = str(centlon + centlat) - props[4, ident + 1] = str(np.round(Xeb)) - props[5, ident + 1] = str(np.round(Yeb)) - props[6, ident + 1] = str(np.round(Xwb)) - props[7, ident + 1] = str(np.round(Ywb)) - props[8, ident + 1] = str(np.round(Xnb)) - props[9, ident + 1] = str(np.round(Ynb)) - props[10, ident + 1] = str(np.round(Xsb)) - props[11, ident + 1] = str(np.round(Ysb)) - props[12, ident + 1] = str(eastl + "-" + westl) - props[13, ident + 1] = str(width) - props[14, ident + 1] = f"{trummar / 1e+12:.1e}" - props[15, ident + 1] = str(np.round((arcar * 100 / (np.pi * (rs**2))), 1)) - props[16, ident + 1] = str(np.round(mB, 1)) - props[17, ident + 1] = str(np.round(mBpos, 1)) - props[18, ident + 1] = str(np.round(mBneg, 1)) - props[19, ident + 1] = str(np.round(np.max(npix[1]), 1)) - props[20, ident + 1] = str(np.round(np.min(npix[1]), 1)) - tbpos = np.sum(datm[pos[:, 0], pos[:, 1]][np.where(datm[pos[:, 0], pos[:, 1]] > 0)]) - props[21, ident + 1] = f"{tbpos:.1e}" - tbneg = np.sum(datm[pos[:, 0], pos[:, 1]][np.where(datm[pos[:, 0], pos[:, 1]] < 0)]) - props[22, ident + 1] = f"{tbneg:.1e}" - props[23, ident + 1] = f"{mB * trummar * 1e+16:.1e}" - props[24, ident + 1] = f"{mBpos * trummar * 1e+16:.1e}" - props[25, ident + 1] = f"{mBneg * trummar * 1e+16:.1e}" - - # =====sets up code for next possible coronal hole===== - - ident = ident + 1 - return circ, data, datb, datc, dattoarc, hedb, iarr, props, rs, slate, center, xgrid, ygrid - - -def rescale01(arr, cmin=None, cmax=None, a=0, b=1): - if cmin or cmax: - arr = np.clip(arr, cmin, cmax) - return (b - a) * ((arr - np.min(arr)) / (np.max(arr) - np.min(arr))) + a - - -def plot_tricolor(data, datb, datc, xgrid, ygrid, slate): - tricolorarray = np.zeros((4096, 4096, 3)) - - data_a = img_as_ubyte(rescale01(np.log10(data), cmin=1.2, cmax=3.9)) - data_b = img_as_ubyte(rescale01(np.log10(datb), cmin=1.4, cmax=3.0)) - data_c = img_as_ubyte(rescale01(np.log10(datc), cmin=0.8, cmax=2.7)) - - tricolorarray[..., 0] = data_c / np.max(data_c) - tricolorarray[..., 1] = data_b / np.max(data_b) - tricolorarray[..., 2] = data_a / np.max(data_a) - - fig, ax = plt.subplots(figsize=(10, 10)) - - plt.imshow(tricolorarray, origin="lower") # , extent = ) - plt.contour(xgrid, ygrid, slate, colors="white", linewidths=0.5) - plt.savefig("tricolor.png") - plt.close() - - -def plot_mask(slate, iarr, circ, rs, dattoarc, center, xgrid, ygrid, hedb): - chs = np.where(iarr > 0) - slate[chs] = 1 - slate = np.array(slate, dtype=np.uint8) - # cont, heir = cv2.findContours(slate, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) - - circ[:] = 0 - r = rs / dattoarc - w = np.where((xgrid - center[0]) ** 2 + (ygrid - center[1]) ** 2 <= r**2) - circ[w] = 1.0 - - plt.figure(figsize=(10, 10)) - plt.xlim(143, 4014) - plt.ylim(143, 4014) - plt.scatter(chs[1], chs[0], marker="s", s=0.0205, c="black", cmap="viridis", edgecolor="none", alpha=0.2) - plt.gca().set_aspect("equal", adjustable="box") - plt.axis("off") - plt.contour(xgrid, ygrid, slate, colors="black", linewidths=0.5) - plt.contour(xgrid, ygrid, circ, colors="black", linewidths=1.0) - - plt.savefig("CH_mask_" + hedb["DATE"] + ".png", transparent=True) - plt.close() + # Finding the indices of nonzero counts + non_zero_warm = np.where(warm_counts > 0) + + # Finding the corresponding minimum y-index + min_y_index = np.min(non_zero_warm[1]) + + # Map the index to the actual y-value + min_y_warm = warm_bins[1][min_y_index] + axes["warm_hist"].set_ylim(0, 100) + axes["warm_hist"].set_facecolor("k") + axes["warm_hist"].plot(xx, (scale_warm * (xx**threshold_171v211)) + min_y_warm, "w") + + # 193 v 311 + hot_counts, *hot_bins = axes["hot_hist"].hist2d( + m193.data[disk_mask].flatten(), + m211.data[disk_mask].flatten(), + bins=250, + range=[[0, 300], [0, 300]], + norm=LogNorm(), + density=True, + ) + # Finding the indices of nonzero counts + non_zero_hot = np.where(hot_counts > 0) + + # Finding the corresponding minimum y-index + min_y_index = np.min(non_zero_hot[1]) + + # Map the index to the actual y-value + min_y_hot = hot_bins[1][min_y_index] + + axes["hot_hist"].set_ylim(0, 100) + axes["hot_hist"].set_facecolor("k") + axes["hot_hist"].plot(xx, scale_hot * (xx**-threshold_193v211) + min_y_hot, "w") + + plt.show() + + +segmenting_plots(2.025, 0.5, 2600) + + +def clip_scale(map1: sunpy.map, clip1: float, clip2: float, clip3: float, scale: float): + map_clipped = np.clip(np.log10(map1.data), clip1, clip3) + map_clipped_scaled = ((map_clipped - clip1) / clip2) * scale + return map_clipped_scaled + + +cs_171 = clip_scale(m171, 1.2, 2.7, 3.9, 255) +cs_193 = clip_scale(m193, 1.4, 1.6, 3.0, 255) +cs_211 = clip_scale(m211, 0.8, 1.9, 2.7, 255) + + +def create_mask(scale1: float, scale2: float, scale3: float): + mask1 = (cs_171 / cs_211) >= (np.mean(m171.data) * scale1) / np.mean(m211.data) + mask2 = (cs_211 + cs_193) < (scale2 * (np.mean(m193.data) + np.mean(m211.data))) + mask3 = (cs_171 / cs_193) >= ((np.mean(m171.data) * scale3) / np.mean(m193.data)) + return mask1, mask2, mask3 + + +mask211_171, mask211_193, mask171_193 = create_mask(0.6357, 0.7, 1.5102) + +tri_color_img = make_lupton_rgb(cs_171, cs_193, cs_211, Q=10, stretch=50) +comb_mask = mask211_171 * mask211_193 * mask171_193 + + +fig, axes = plt.subplot_mosaic([["tri", "comb_mask"]], layout="constrained", figsize=(6, 3)) + +axes["tri"].imshow(tri_color_img, origin="lower") +axes["comb_mask"].imshow(comb_mask, origin="lower") + +plt.show() + + +def create_contours(mask1: np.ndarray, mask2: np.ndarray, mask3: np.ndarray): + mask_map = Map(((mask1 * mask2 * mask3).astype(int), m171.meta)) + try: + contours = mask_map.contour(0.5 / u.s) + except UnitsError: + contours = mask_map.contour(50 * u.percent) + + contours = sorted(contours, key=lambda x: x.size, reverse=True) + + fig, axes = plt.subplot_mosaic( + [["seg"]], layout="constrained", figsize=(6, 3), subplot_kw={"projection": m171} + ) + m171.plot(axes=axes["seg"]) + axes["seg"].imshow(tri_color_img) + + # For the moment just plot to top 5 contours based on "size" for contour + for contour in contours[:6]: + axes["seg"].plot_coord(contour, color="w", linewidth=0.5) + plt.show() + + +create_contours(mask211_171, mask211_193, mask171_193) diff --git a/chimerapy/chimera_copy.py b/chimerapy/chimera_copy.py new file mode 100644 index 0000000..c3885cd --- /dev/null +++ b/chimerapy/chimera_copy.py @@ -0,0 +1,256 @@ +import copy + +import astropy.units as u +import numpy as np +import sunpy +import sunpy.map +from astropy.units import UnitsError +from astropy.visualization import make_lupton_rgb +from matplotlib import pyplot as plt +from matplotlib.colors import LogNorm +from matplotlib.path import Path +from scipy.optimize import minimize +from sunpy.map import Map, all_coordinates_from_map + +m171 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0171.fits") +m193 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0193.fits") +m211 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0211.fits") + +# From old IDL code +threshold_171v193 = 0.6357 +threshold_171v211 = 0.7 +threshold_193v211 = 1.5102 + +"""I tried a new method of calculating the slopes in which I made a path out of the bin edges and a second path for the +segmentation line to identify the bins that the line passed through. I then added up the total counts and made a funtion +to vary and optimize the slope by minimizing the total counts. I don't think my optimization section works correctly so maybe +you could take a look?""" + + +def find_edges(wave1: int, wave2: int): + map1 = Map( + "https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0" + + str(wave1) + + ".fits" + ) + map2 = Map( + "https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0" + + str(wave2) + + ".fits" + ) + # Since the data are taken at similar times neglect any coordinate changes so just use 171 maps coordinates + coords = all_coordinates_from_map(map1) + disk_mask = (coords.Tx**2 + coords.Ty**2) ** 0.5 < map1.rsun_obs + + map1 = map1 / map1.exposure_time + map2 = map2 / map2.exposure_time + + xx = np.linspace(0, 300, 500) + + cool_counts, cool_xedge, cool_yedge = np.histogram2d( + map1.data[disk_mask].flatten(), + map2.data[disk_mask].flatten(), + bins=250, + range=[[0, 300], [0, 300]], + density=True, + ) + + xedges = np.array(cool_xedge) + yedges = np.array(cool_yedge) + return cool_counts, xedges, yedges + + +cool_hist, cool_xedges, cool_yedges = find_edges(171, 193) +warm_hist, warm_xedges, warm_yedges = find_edges(171, 211) +cool_threshold = threshold_171v193 +warm_threshold = threshold_171v211 + + +def bins_intersected(xedges: np.array, yedges: np.array, slope: float, threshold: int, hist: np.array): + x = np.linspace(0, 300, 500) + y = slope * (x**threshold) + line_path = Path(list(zip(x, y))) + counts = 0 + for i in range(len(xedges) - 1): + for j in range(len(yedges) - 1): + bin_corners = [ + (xedges[i], yedges[j]), + (xedges[i + 1], yedges[j]), + (xedges[i + 1], yedges[j + 1]), + (xedges[i], yedges[j + 1]), + ] + bin_path = Path(bin_corners) + if line_path.intersects_path(bin_path): + counts += hist[j, i] + return counts + + +def objective_function(slope: float, xedges: np.array, yedges: np.array, threshold: float, hist: np.array): + return bins_intersected(xedges, yedges, slope, threshold, hist) + + +initial_guess_cool = 2.25 +initial_guess_warm = 0.5 +bounds_cool = [(2, 2.5)] +bounds_warm = [(0.3, 0.75)] +cool_result = minimize( + objective_function, + x0=initial_guess_cool, + args=(cool_xedges, cool_yedges, cool_threshold, cool_hist), + method="Powell", + bounds=bounds_cool, +) +warm_result = minimize( + objective_function, + x0=initial_guess_warm, + args=(warm_xedges, warm_yedges, warm_threshold, warm_hist), + method="Powell", + bounds=bounds_warm, +) + + +cool_optimal_slope = warm_result.x[0] +cool_minimized_counts = warm_result.fun + + +def segmenting_plots(scale_cold: float, scale_warm: float, scale_hot: float): + m171 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0171.fits") + m193 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0193.fits") + m211 = Map("https://jsoc1.stanford.edu/data/aia/synoptic/2016/10/31/H0200/AIA20161031_0232_0211.fits") + + m171_orig = copy.deepcopy(m171) + m193_orig = copy.deepcopy(m193) + m211_orig = copy.deepcopy(m211) + + # Since the data are taken at similar times neglect any coordinate changes so just use 171 maps coordinates + coords = all_coordinates_from_map(m171) + disk_mask = (coords.Tx**2 + coords.Ty**2) ** 0.5 < m171.rsun_obs + + m171 = m171 / m171.exposure_time + m193 = m193 / m193.exposure_time + m211 = m211 / m211.exposure_time + + xx = np.linspace(0, 300, 500) + + fig, axes = plt.subplot_mosaic( + [["cool_hist"], ["warm_hist"], ["hot_hist"]], + layout="constrained", + figsize=(3, 5), + ) + + # 171 v 193 + cool_counts, *cool_bins = axes["cool_hist"].hist2d( + m171.data[disk_mask].flatten(), + m193.data[disk_mask].flatten(), + bins=60, + range=[[0, 300], [0, 300]], + norm=LogNorm(), + density=True, + ) + + axes["cool_hist"].set_facecolor("k") + axes["cool_hist"].plot(xx, scale_cold * (xx**threshold_171v193), "w") + + # 171 v 211 + warm_counts, *warm_bins = axes["warm_hist"].hist2d( + m171.data[disk_mask].flatten(), + m211.data[disk_mask].flatten(), + bins=250, + range=[[0, 300], [0, 300]], + norm=LogNorm(), + density=True, + ) + # Finding the indices of nonzero counts + non_zero_warm = np.where(warm_counts > 0) + + # Finding the corresponding minimum y-index + min_y_index = np.min(non_zero_warm[1]) + + # Map the index to the actual y-value + min_y_warm = warm_bins[1][min_y_index] + axes["warm_hist"].set_ylim(0, 100) + axes["warm_hist"].set_facecolor("k") + axes["warm_hist"].plot(xx, (scale_warm * (xx**threshold_171v211)) + min_y_warm, "w") + + # 193 v 311 + hot_counts, *hot_bins = axes["hot_hist"].hist2d( + m193.data[disk_mask].flatten(), + m211.data[disk_mask].flatten(), + bins=250, + range=[[0, 300], [0, 300]], + norm=LogNorm(), + density=True, + ) + # Finding the indices of nonzero counts + non_zero_hot = np.where(hot_counts > 0) + + # Finding the corresponding minimum y-index + min_y_index = np.min(non_zero_hot[1]) + + # Map the index to the actual y-value + min_y_hot = hot_bins[1][min_y_index] + + axes["hot_hist"].set_ylim(0, 100) + axes["hot_hist"].set_facecolor("k") + axes["hot_hist"].plot(xx, scale_hot * (xx**-threshold_193v211) + min_y_hot, "w") + + plt.show() + + +segmenting_plots(2.025, 0.5, 2600) + + +def clip_scale(map1: sunpy.map, clip1: float, clip2: float, clip3: float, scale: float): + map_clipped = np.clip(np.log10(map1.data), clip1, clip3) + map_clipped_scaled = ((map_clipped - clip1) / clip2) * scale + return map_clipped_scaled + + +cs_171 = clip_scale(m171, 1.2, 2.7, 3.9, 255) +cs_193 = clip_scale(m193, 1.4, 1.6, 3.0, 255) +cs_211 = clip_scale(m211, 0.8, 1.9, 2.7, 255) + + +def create_mask(scale1: float, scale2: float, scale3: float): + mask1 = (cs_171 / cs_211) >= (np.mean(m171.data) * scale1) / np.mean(m211.data) + mask2 = (cs_211 + cs_193) < (scale2 * (np.mean(m193.data) + np.mean(m211.data))) + mask3 = (cs_171 / cs_193) >= ((np.mean(m171.data) * scale3) / np.mean(m193.data)) + return mask1, mask2, mask3 + + +mask211_171, mask211_193, mask171_193 = create_mask(0.6357, 0.7, 1.5102) + +tri_color_img = make_lupton_rgb(cs_171, cs_193, cs_211, Q=10, stretch=50) +comb_mask = mask211_171 * mask211_193 * mask171_193 + + +fig, axes = plt.subplot_mosaic([["tri", "comb_mask"]], layout="constrained", figsize=(6, 3)) + +axes["tri"].imshow(tri_color_img, origin="lower") +axes["comb_mask"].imshow(comb_mask, origin="lower") + +plt.show() + + +def create_contours(mask1: np.ndarray, mask2: np.ndarray, mask3: np.ndarray): + mask_map = Map(((mask1 * mask2 * mask3).astype(int), m171.meta)) + try: + contours = mask_map.contour(0.5 / u.s) + except UnitsError: + contours = mask_map.contour(50 * u.percent) + + contours = sorted(contours, key=lambda x: x.size, reverse=True) + + fig, axes = plt.subplot_mosaic( + [["seg"]], layout="constrained", figsize=(6, 3), subplot_kw={"projection": m171} + ) + m171.plot(axes=axes["seg"]) + axes["seg"].imshow(tri_color_img) + + # For the moment just plot to top 5 contours based on "size" for contour + for contour in contours[:6]: + axes["seg"].plot_coord(contour, color="w", linewidth=0.5) + plt.show() + + +create_contours(mask211_171, mask211_193, mask171_193)