diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..40c3437 --- /dev/null +++ b/.gitignore @@ -0,0 +1,12 @@ +skysolve_setup.pdf +skysolve_setup.odt +static/radec.txt +static/skysolve.log +static/obs.log +static/skysolve.log +nohup.out +debug/* +static/cap*.* +static/current.jpg +none + diff --git a/README.md b/README.md index 3218ffd..32fe643 100755 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ See video for a quick introduction. https://youtu.be/IewMli4AJLw stl files for the case and mount can be found on Thingiverse at https://www.thingiverse.com/thing:4920959 and https://www.thingiverse.com/thing:5594916 Uses RaspberryPi and plate solving to take images of the night sky and identify the location of the image. - It uses a Raspberry PI 4 with the RPI High Quality camera and can send the solved position of were the camera is looking to a computer running SkySafari. When mounted to a telescope and aligned to where the scope is pointing it can then be used to guide the manual pushing of the telesopce to the desired target without using any encoders on the telescope. It communicates with SkySafari over WIFI so that no hard wired connections are needed to the computer running SkySafari. It continually takes images and solves them about every 1 to 10 seconds so that Skysafari can always show where the scope is pointing. + It uses a Raspberry PI 4B (4gb) with the RPI High Quality camera and can send the solved position of were the camera is looking to a computer running SkySafari. When mounted to a telescope and aligned to where the scope is pointing it can then be used to guide the manual pushing of the telesopce to the desired target without using any encoders on the telescope. It communicates with SkySafari over WIFI so that no hard wired connections are needed to the computer running SkySafari. It continually takes images and solves them about every 1 to 10 seconds so that Skysafari can always show where the scope is pointing. Below is a screen shot of the application's browser interface showing an image of Ursa Major in the lower left. THe status field displays the names of stars it found in the image. @@ -46,9 +46,35 @@ If the "show stars button is dimmed out" you can enable it by pressing the "Solv ## Install When you are ready, you can follow these steps to install it on the Raspberry pi: -Note: Do not change the default username on the pi from pi to anything else. There is code that depends upon there being a using named lower case "pi". +>[!NOTE] +>Do not change the default username on the pi from pi to anything else. There is code that depends upon there being a using named lower case "pi".` +>[!IMPORTANT] +>There are two versions of the code. +>1. ***Legacy*** - Works only on RPI models 4 and earlier and only with 32 bit legacy app. +>2. ***New camera library*** - To work with the newer and 64 bit RPI operating systems and all RPI models. + +### Process to use the ***New camera library*** version +1. First you need to get the Raspberry Pi software installed on an SD card. The easiest way to do this is to use the RaspberryPi imager + from [Raspberry Pi org](https://www.raspberrypi.org/software/). Select the RPI model you have and then select the most recent 64 bit version of the operating system. Usually the first in the list. + + Download this software onto a computer that can write SD cards. I use a Windows laptop. +2. Follow steps 2 thru 5 in the ***Legacy*** version below. But ignore the instruction to set the resolution to 1280 x 720 +2a. When running the setup script you will be asked to setup the hotspot. That is where you can change the wifi network name that the hot spot will use. The default name is AccessPopup. You can change it to something like skypi004. Make the number different so it not the same as others at a star party. + +4. Using either your command line connection to the RPI or VNC Make a direcotry for skysolve then download the skysolve app from GIT Repo to your RPI and Open a Terminal Window. You could type or copy and paste the following commands into Terminal to accomplish this goal. +```bash +sudo mkdir skysolve +sudo chmod 777 skysolve +cd skysolve +sudo wget https://github.com/githubdoe/skysolve/archive/newCamLib.tar.gz +sudo tar -xzvf newCamLib.tar.gz --strip-components=1 +``` +4. Continue at step 7 of the ***Legacy*** instructions. + + +### Process to use the older RPI ***legacy*** system (for new installations use the previous instructions instead.) 1. First you need to get the Raspberry Pi software installed on an SD card. The easiest way to do this is to use the RaspberryPi imager from [Raspberry Pi org](https://www.raspberrypi.org/software/). Note that the software to download is not the most recent version but an older 32 bit version. It can be found under Raspberry Pi OS (other) then Raspberry Pi OS (Legacy) @@ -79,8 +105,12 @@ Note: Do not change the default username on the pi from pi to anything else. Th * Once logged into the RPI then type sudo raspi-config. * Select 3. Interface Options then P3 VNC and enable it. Then select yes and then OK. Now VNC should be enabled. * Chose display options and select resolution to be 1280 x 720 - * Optionally enable the Raspberrypi camera now. From Interface Options select the enable camera option. Then reboot. - Back on the windows PC start the VNC viewer and connect to the IP address of the Raspberry PI. + * Optionally enable the Raspberrypi camera now. From Interface Options select the enable camera option. + * UPDATE: November 2023: + There is another step needed to be done while still in the raspi-config. That is to set all localization options to your local. + + Then reboot. + Back on the windows PC start the VNC viewer and connect to the IP address of the Raspberry PI. diff --git a/__pycache__/camera_pi.cpython-311.pyc b/__pycache__/camera_pi.cpython-311.pyc new file mode 100644 index 0000000..f28465a Binary files /dev/null and b/__pycache__/camera_pi.cpython-311.pyc differ diff --git a/__pycache__/tetra3.cpython-311.pyc b/__pycache__/tetra3.cpython-311.pyc new file mode 100644 index 0000000..5ea144e Binary files /dev/null and b/__pycache__/tetra3.cpython-311.pyc differ diff --git a/camera_pi.py b/camera_pi.py index e49c976..832c9e9 100755 --- a/camera_pi.py +++ b/camera_pi.py @@ -1,18 +1,27 @@ -#pi camera to be used with web based GUI plate solver +#pi camera to be used with web based GUI plate solver For RPI5 and using the picamera2 library +"""This is the camera interface to the PI camera_config +Picamera2 is the library that talks to the camera. +This module setups up and uses that library. +It setups so that the camera is running taking images at the requested specs. +When a client requests a frame it gets it from the camera that has already +buffered the last fame. So all that is needed is the request the current +frame from the camera and signal the client that it is ready. +""" +from picamera2 import Picamera2, Preview +from libcamera import ColorSpace +import time import io import time +import pprint from tkinter import EXCEPTION -import picamera + from fractions import Fraction import threading -try: - from greenlet import getcurrent as get_ident -except ImportError: - try: - from thread import get_ident - except ImportError: - from _thread import get_ident +from _thread import get_ident + +from picamera2 import Picamera2 + class imageEvent(object): """An Event-like class that signals all active clients when a new frame is @@ -71,7 +80,6 @@ class skyCamera(): camera = None event = imageEvent() thread = None # background thread that reads frames from camera - gainThread = None frame = None # current frame is stored here by background thread shutter = 1000000 ISO=800 @@ -80,20 +88,64 @@ class skyCamera(): runMode = True cameraStopped = True format = 'jpeg' - def __init__(self, skystatus, shutter=1000000, ISO=800, resolution=(2000,1500), format = 'jpeg'): - print("skystatus", skystatus) + def __init__(self, skystatus, shutter=.9, ISO=800, resolution=(2000,1500), format = 'jpeg'): + print("skystatus", skystatus, shutter) + self.skyStatus = skystatus - self.camera = picamera.PiCamera() - self.camera.resolution = (2000,1500) - self.camera.framerate = Fraction(1,6) - self.shutter=shutter + + self.camera = Picamera2() + + capture_config = self.camera.create_still_configuration(main={'size':resolution, 'format': 'RGB888'}) + #self.config = capture_config = self.camera.create_still_configuration(colour_space=ColorSpace.Sycc(),main={'size':resolution}) + self.camera.configure(capture_config) + #pprint.pprint(self.camera.sensor_modes) + #self.camera.set_controls({"ExposureTime": 100, "AnalogueGain": ISO/100, "AwbEnable": True}) + self.camera.controls.ExposureTime = int(1000000 * shutter) + self.camera.controls.AnalogueGain = ISO/100 + self.camera.exposure_mode = 'off' + self.camera.controls.AeEnable = False + try: + pass + #self.camera.controls.AwbEnable = False + except: + pass + self.camera.start() + time.sleep(1) + #self.camera.controls.ExposureTime = int(100000 * shutter) + + + + #print('config',self.config) + + # self.camera.preview_configuration.main.size = resolution + # self.camera.preview_configuration.main.align() + + #self.camera.awb_gains = (1.8,1.8) + + + + + #self.camera.controls.ExposureTime = int(100000 * shutter) + #self.camera.controls.AnalogueGain = ISO/100. + #print("shutter", self.camera.controls.ExposureTime, shutter) + # self.camera.configure(self.config) + + #print("\n\nproperities",self.camera.camera_properties) + + + #print('config after start', self.camera.stream_configuration("main")) + + + #self.camera.resolution = resolution + #self.camera.framerate = Fraction(1,6) + print('controlssssss',self.camera.camera_controls) self.ISO=ISO self.resolution=resolution self.format = format - self.setupGain() + #self.setupGain() self.count = 0 - self.thread = threading.Thread(target=self._thread) - self.thread.start() + #self.thread = threading.Thread(target=self._thread) + #self.thread.start() @@ -109,21 +161,21 @@ def resume(self): def setISO(self, iso): self.ISO = iso - self.setupGain() + self.setupGain(iso/100.) def status(self): return [self.ISO, self.camera.shutter_speed, self.resolution] def setResolution(self, size): - self.resolution = size - self.runMode = False - print ("waiting for camera to stop to set resolution",flush=True) - while not self.cameraStopped: - time.sleep(.2) - res = tuple(map(int, size.split('x'))) - print ("setting resolution", res, self.camera.resolution, flush=True) - self.camera.resolution = res - self.runMode = True + self.camera.stop() + res = [int(x) for x in size.split('x')] + capture_config = self.camera.create_still_configuration(main={'size':res, 'format': 'RGB888'}) + #self.config = capture_config = self.camera.create_still_configuration(colour_space=ColorSpace.Sycc(),main={'size':resolution}) + self.camera.configure(capture_config) + + self.camera.start() + time.sleep(1) + def setFormat(self,type): self.format = type @@ -133,101 +185,109 @@ def setFormat(self,type): self.runMode = True def setShutter(self, value): - self.camera.shutter_speed = value self.shutter = value - self.setupGain() + self.camera.controls.ExposureTime = int(1000000 * value) print ("new shutter speed", value, flush=True) - def setupGain(self): - #make sure previous instance gain thread is not already running - if self.gainThread: - self.gainThread.join() - self.gainThread = threading.Thread(target=self.setGainThread) - self.gainThread.start() - - def setGainThread(self): - if self.camera: - self.runMode = False - - print ("cam setup called",flush=True) - selfrunMode = False - self.skyStatus(4," stopping camera to apply changes") - while not self.cameraStopped: - pass + def setupGain(self, value): + with self.camera.controls as controls: + controls.AnalogueGain = value - self.camera.resolution = self.resolution - print ('camera framesize at init', self.resolution, flush=True) - self.camera.iso = self.ISO - self.camera.exposure_mode = 'auto' - self.camera.framerate = Fraction(1,6) - self.camera.shutter_speed = self.shutter - time.sleep(10) - self.camera.exposure_mode = 'off' - self.runMode = True - self.abortMOde = False + def get_frame(self): """Return the current camera frame.""" - # wait for a signal from the camera thread - if not self.event.wait(tt=40.): - print("camera image event wait timed out", flush=True) - return None - self.event.clear() - return self.frame - - # the thread that gets images - def _thread(self): - """Camera background thread.""" - while not self.runMode: - time.sleep(.5) - while True: - try: - self.count = 0 - print('camera thread. LOOP started', flush = True) - frames_iterator = self.getImage() - for frame in frames_iterator: - self.frame = frame - self.event.set() # send signal to clients - time.sleep(0) - except picamera.PiCameraError as e: - print("camera thread caught e",e, flush=True) - - self.thread = None - # import picamera.array - - # with picamera.PiCamera() as camera: - # with picamera.array.PiBayerArray(camera) as output: - # camera.capture(output, 'jpeg', bayer=True) - def getImage(self): - - stream = io.BytesIO() - print ("abortMOde", self.abortMode) - while not self.abortMode: - while not self.runMode: - if self.abortMode: - break - pass - if self.abortMode: + return self.camera.capture_array("main") + + + +if __name__ == '__main__': + + def delayedStatus(delay, status): + print("status",status) + from pprint import * + import cv2 + class dcam(): + def __init__(self,cam): + self.camera = cam + """ cam = dcam( Picamera2()) + ISO = 800 + + capture_config = cam.camera.create_still_configuration(main={'size':(600,600), 'format': 'RGB888'}) + cam.camera.configure(capture_config) + cam.camera.set_controls({ "AnalogueGain": ISO/100, "AwbEnable": True}) + + cam.camera.controls.AnalogueGain = 1.0 + cam.camera.start() + time.sleep(1) """ + + + #quit() + #shutter = 100000 + #cam.camera.controls.ExposureTime = shutter + #print("xxxx",cam.camera.controls.ExposureTime) + cam = skyCamera( delayedStatus, shutter=.1, ISO=800, resolution=(800,900), format = 'jpeg') + #print ("size size",cam.camera.stream_configuration("main")) + size = cam.camera.capture_metadata()['ScalerCrop'][2:] + print('meta',cam.camera.capture_metadata()) + full_res = cam.camera.camera_properties['PixelArraySize'] + offset = [0,0] + + while True: + im = cam.camera.capture_array('main') + + cv2.imshow("Camera", im) + ch = cv2.waitKey(1) + if ch != -1: + print ("ch", ch) + if ch == ord('q'): + print("Quitting") break - print ("picamera started", flush=True) - self.skyStatus(0," camera started") - try: - for _ in self.camera.capture_continuous(stream, self.format, - use_video_port=False): - - # return current frame - stream.seek(0) - yield stream.read() - if not self.runMode: - self.cameraStopped = True - print ("camera stopped",flush=True) - break - self.cameraStopped = False - - # reset stream for next frame - stream.seek(0) - stream.truncate() - except picamera.PiCameraRuntimeError as e: - print("Getimage caught e", e,flush=True) - raise e + elif ch == ord('z'): + size = [int(s * 0.95) for s in size] + offset = [(r - s) // 2 for r, s in zip(full_res, size)] + print ('zoomed',offset,size, offset+size) + cam.camera.set_controls({"ScalerCrop": offset + size}) + + elif ch == ord('x'): + size = [int(s * (1./0.95)) for s in size] + offset = [(r - s) // 2 for r, s in zip(full_res, size)] + cam.camera.set_controls({"ScalerCrop": offset + size}) + print ('zoomednnn',offset,size, offset+size) + elif ch == 81: #<-- + size = [int(s) for s in size] + offset[0] = offset[0] + int(size[0]/25) + cam.camera.set_controls({"ScalerCrop": offset + size}) + print("offset",offset) + elif ch == 83: # -> + + offset[0] = offset[0] - int(size[0]/25) + if offset[0] < 0: + offset[0] = 0 + cam.camera.set_controls({"ScalerCrop": offset + size}) + print("offset",offset) + elif ch == ord('a'): + cam.camera.set_controls({"ScalerCrop": offset + size}) + elif ch == ord('b'): #Brigher + shutter = int(shutter * 1.25) + print('shutter',shutter/100000) + cam.camera.controls.ExposureTime = shutter + + elif ch == ord('d'): #Brigher + shutter = int(shutter / 1.25) + cam.camera.controls.ExposureTime = shutter + + else: + + print("shape", im.shape) + print ("cam controls",cam.camera.camera_controls) + print("main config raw",cam.camera.camera_configuration()['raw']) + print("main config main",cam.camera.camera_configuration()['main']) + cam.camera.controls.ExposureTime = 10000 + print("xxxx",cam.camera.controls.ExposureTime) + + + #print("properities",picam.camera_properties) + #pprint(picam.camera_controls) + diff --git a/images/hand_1.2.1.jpg b/images/hand_1.2.1.jpg old mode 100644 new mode 100755 diff --git a/skyConfig.json b/skyConfig.json index ef1d73d..f81c6e1 100755 --- a/skyConfig.json +++ b/skyConfig.json @@ -1,194 +1,219 @@ { "camera": { - "shutter": ".9", + "shutter": ".1", "ISO": "800", - "frame": "800x600", + "frame": "1280x960", "format": "jpeg" }, "solver": { - "currentProfile": "25FL", - "startupSolveing": false + "currentProfile": "default", + "startupSolveing": true }, "observing": { "saveImages": "on", "showSolution": false, "savePosition": true, - "verbose": true, + "verbose": false, "obsDelta": 0.5 }, "solverProfiles": { "default": { "name": "default", "solver_type": "solverAstromet", - "maxTime": 30.0, - "solveSigma": 9, - "solveDepth": 10, + "maxTime": 10.0, + "solveSigma": 7, + "solveDepth": 20, "UselastDelta": false, - "FieldWidthMode": "FieldWidthModeOther", + "FieldWidthMode": "FieldWidthModeField", "FieldWidthModeaPP": "checked", "FieldWidthModeField": "", "FieldWidthModeOther": "", - "aPPLoValue": "64", - "aPPHiValue": "65", + "aPPLoValue": "40", + "aPPHiValue": "41", "fieldLoValue": "10", - "fieldHiValue": "25", - "searchRadius": 10.0, - "solveVerbose": true, + "fieldHiValue": "30", + "searchRadius": 20.0, + "solveVerbose": false, "showStars": false, "plots": true, - "additionalParms": "" + "additionalParms": "", + "verbose": false }, - "25FL": { - "name": "25FL", + "OrionShield": { + "name": "OrionShield", "solver_type": "solverAstromet", - "maxTime": 10.0, - "solveSigma": 5, + "maxTime": 30.0, + "solveSigma": 9, "solveDepth": 20, "UselastDelta": false, - "FieldWidthMode": "FieldWidthModeaPP", + "FieldWidthMode": "FieldWidthModeField", "FieldWidthModeaPP": "checked", "FieldWidthModeField": "", "FieldWidthModeOther": "", - "aPPLoValue": "25", - "aPPHiValue": "66", + "aPPLoValue": "27", + "aPPHiValue": "", "fieldLoValue": "13", - "fieldHiValue": "16", + "fieldHiValue": "15", "searchRadius": 10.0, "solveVerbose": false, "showStars": false, - "plots": false, - "additionalParms": "--tag-all", - "verbose": true + "plots": true, + "additionalParms": "--guess-scale", + "verbose": false }, - "16FL": { - "name": "16FL", + "aquarius": { + "name": "aquarius", "solver_type": "solverAstromet", - "maxTime": 300.0, - "solveSigma": 6, - "solveDepth": 20, + "maxTime": 10.0, + "solveSigma": 9, + "solveDepth": 10, "UselastDelta": false, - "FieldWidthMode": "FieldWidthModeaPP", + "FieldWidthMode": "FieldWidthModeField", "FieldWidthModeaPP": "checked", "FieldWidthModeField": "", "FieldWidthModeOther": "", "aPPLoValue": "40", - "aPPHiValue": "45", - "fieldLoValue": "", - "fieldHiValue": "", - "searchRadius": 0.0, - "solveVerbose": true, + "aPPHiValue": "42", + "fieldLoValue": "14", + "fieldHiValue": "15", + "searchRadius": 10.0, + "solveVerbose": false, "showStars": false, "plots": true, - "additionalParms": "--guess-scale" + "additionalParms": "", + "verbose": false }, - "Tetra3": { - "name": "Tetra3", - "solver_type": "solverTetra3", + "coronaB": { + "name": "coronaB", + "solver_type": "solverAstromet", "maxTime": 10.0, "solveSigma": 9, - "solveDepth": 20, + "solveDepth": 9, "UselastDelta": false, - "FieldWidthMode": "FieldWidthModeOther", - "FieldWidthModeaPP": "", + "FieldWidthMode": "FieldWidthModeField", + "FieldWidthModeaPP": "checked", "FieldWidthModeField": "", "FieldWidthModeOther": "", - "aPPLoValue": "", - "aPPHiValue": "", - "fieldLoValue": "14", - "fieldHiValue": "15", - "searchRadius": 0.0, + "aPPLoValue": "30", + "aPPHiValue": "80", + "fieldLoValue": "20", + "fieldHiValue": "23", + "searchRadius": 10.0, "solveVerbose": false, "showStars": false, "plots": true, "additionalParms": "", - "verbose": true + "verbose": false }, - "OrionShield": { - "name": "OrionShield", + "IMX 25 fl": { + "name": "IMX 25 fl", "solver_type": "solverAstromet", "maxTime": 30.0, - "solveSigma": 9, + "solveSigma": 5, + "solveDepth": 20, + "UselastDelta": false, + "FieldWidthMode": "FieldWidthModeField", + "FieldWidthModeaPP": "checked", + "FieldWidthModeField": "", + "FieldWidthModeOther": "", + "aPPLoValue": "30", + "aPPHiValue": "40", + "fieldLoValue": "12", + "fieldHiValue": "18", + "searchRadius": 45.0, + "solveVerbose": false, + "showStars": false, + "plots": true, + "additionalParms": "--tag-all ", + "verbose": false + }, + "50-60 app": { + "name": "50-60 app", + "solver_type": "solverAstromet", + "maxTime": 3.0, + "solveSigma": 5, "solveDepth": 20, "UselastDelta": false, "FieldWidthMode": "FieldWidthModeaPP", "FieldWidthModeaPP": "checked", "FieldWidthModeField": "", "FieldWidthModeOther": "", - "aPPLoValue": "27", - "aPPHiValue": "", - "fieldLoValue": "0", - "fieldHiValue": "0", - "searchRadius": 10.0, - "solveVerbose": true, + "aPPLoValue": "50", + "aPPHiValue": "65", + "fieldLoValue": "10", + "fieldHiValue": "25", + "searchRadius": 45.0, + "solveVerbose": false, "showStars": false, "plots": true, - "additionalParms": "--guess-scale" + "additionalParms": "", + "verbose": false }, - "aquarius": { - "name": "aquarius", + "62 arcsec per pixel": { + "name": "62 arcsec per pixel", "solver_type": "solverAstromet", - "maxTime": 10.0, - "solveSigma": 9, - "solveDepth": 10, + "maxTime": 5.0, + "solveSigma": 7, + "solveDepth": 20, "UselastDelta": false, - "FieldWidthMode": "FieldWidthModeField", + "FieldWidthMode": "FieldWidthModeaPP", "FieldWidthModeaPP": "checked", "FieldWidthModeField": "", "FieldWidthModeOther": "", - "aPPLoValue": "40", - "aPPHiValue": "42", - "fieldLoValue": "14", - "fieldHiValue": "15", - "searchRadius": 10.0, + "aPPLoValue": "60", + "aPPHiValue": "63", + "fieldLoValue": "10", + "fieldHiValue": "25", + "searchRadius": 45.0, "solveVerbose": false, "showStars": false, "plots": true, "additionalParms": "", - "verbose": true + "verbose": false }, - "bigdipper": { - "name": "bigdipper", + "FOV 17": { + "name": "FOV 17", "solver_type": "solverAstromet", "maxTime": 10.0, - "solveSigma": 9, - "solveDepth": 10, + "solveSigma": 7, + "solveDepth": 20, "UselastDelta": false, "FieldWidthMode": "FieldWidthModeField", "FieldWidthModeaPP": "checked", "FieldWidthModeField": "", "FieldWidthModeOther": "", - "aPPLoValue": "40", - "aPPHiValue": "41", - "fieldLoValue": "22", - "fieldHiValue": "23", - "searchRadius": 10.0, + "aPPLoValue": "25", + "aPPHiValue": "82", + "fieldLoValue": "16", + "fieldHiValue": "18", + "searchRadius": 45.0, "solveVerbose": false, "showStars": false, "plots": true, "additionalParms": "", - "verbose": true + "verbose": false }, - "coronaB": { - "name": "coronaB", + "Software Guess the FOV": { + "name": "Software Guess the FOV", "solver_type": "solverAstromet", - "maxTime": 10.0, - "solveSigma": 9, - "solveDepth": 9, + "maxTime": 20.0, + "solveSigma": 7, + "solveDepth": 20, "UselastDelta": false, - "FieldWidthMode": "FieldWidthModeField", + "FieldWidthMode": "FieldWidthModeOther", "FieldWidthModeaPP": "checked", "FieldWidthModeField": "", "FieldWidthModeOther": "", - "aPPLoValue": "30", - "aPPHiValue": "80", - "fieldLoValue": "22", - "fieldHiValue": "23", + "aPPLoValue": "40", + "aPPHiValue": "41", + "fieldLoValue": "10", + "fieldHiValue": "30", "searchRadius": 10.0, "solveVerbose": false, "showStars": false, "plots": true, "additionalParms": "", - "verbose": true + "verbose": false } } } \ No newline at end of file diff --git a/skysolve.py b/skysolve.py index 3d94925..4c58a5c 100755 --- a/skysolve.py +++ b/skysolve.py @@ -1,7 +1,9 @@ from subprocess import call from zipfile import ZipFile -from attr import get_run_validators from flask import Flask, render_template, request, Response, send_file, send_from_directory +from flask import stream_with_context + + import time from flask.wrappers import Request import pprint @@ -23,21 +25,23 @@ import getpass import copy import sys -from tetra3 import Tetra3 -import RPi.GPIO as GPIO # Import Raspberry Pi GPIO library +import re +import traceback +print('PYTHONPath is',sys.path) +#import cedar_detect_client +#import cedar_detect_pb2, cedar_detect_pb2_grpc +import RPi.GPIO as GPIO # Import Raspberry Pi GPIO library +import cv2 import glob - -import matplotlib.pyplot as plt -from scipy import stats import logging - +from collections import deque +from time import perf_counter as precision_timestamp debugLastState = 'startup' -solveReadyForImage = False solveImage = None -GUIReadyForImage = False + GUIImage = None print("argssss", sys.argv, len(sys.argv)) @@ -45,18 +49,20 @@ GPIO.setmode(GPIO.BOARD) GPIO.setup(7, GPIO.IN, pull_up_down=GPIO.PUD_UP) initGPIO7 = GPIO.input(7) +if ( not initGPIO7): + try: + os.system('sudo accesspopup -a') + except Exception as e: + print('could not switch to access point') + print("gpio", initGPIO7) try: os.system('systemctl restart encodertoSkySafari.service ') -except BaseException as e: +except Exception as e: print("did not start encoder", e ) usedIndexes = {} -# Create instance and load default_database (built with max_fov=12 and the rest as default) -t3 = None -if t3 == None: - t3 = Tetra3('default_database') class Mode(Enum): @@ -70,33 +76,16 @@ class Mode(Enum): AUTOPLAYBACK = auto() -class LimitedLengthList(list): - def __init__(self, seq=(), length=math.inf): - self.length = length - - if len(seq) > length: - raise ValueError("Argument seq has too many items") - - super(LimitedLengthList, self).__init__(seq) - - def append(self, item): - if len(self) < self.length: - super(LimitedLengthList, self).append(item) - else: - super(LimitedLengthList, self).__init__( - super(LimitedLengthList, self)[self.length/2:]) - super(LimitedLengthList, self).append(item) app = 30 solving = False maxTime = 50 searchEnable = False -searchRadius = 90 -solveLog = LimitedLengthList(length=1000) -ra = 0 -dec = 0 +solveLog = deque(maxlen=1000) +ra = 0 #last solved +dec = 0 #last solved solveStatus = '' computedPPa = '' @@ -114,8 +103,10 @@ def append(self, item): solve_path = os.path.join(root_path, 'static') +capPath = os.path.join(solve_path, 'cap.jpeg') +guiPath = os.path.join(solve_path, 'gui.jpeg') history_path = os.path.join(solve_path, 'history') -doDebug = True +doDebug = False logging.basicConfig(filename=os.path.join(solve_path,'skysolve.log'),\ format='%(levelname)s %(asctime)s %(message)s',filemode='w',level=logging.WARNING) if not os.path.exists(history_path): @@ -124,8 +115,6 @@ def append(self, item): test_path = '/home/pi/pyPlateSolve/data' solveThisImage = '' -solveCurrent = False -triggerSolutionDisplay = False saveObs = False obsList = [] @@ -155,20 +144,44 @@ def append(self, item): import builtins class ListStream: def write(self, s): - global doDebug - if doDebug: - logging.warning(s) + global doDebug, logging + pass + def flush(self): pass +os.system("echo '0' | sudo tee /sys/class/leds/PWR/brightness >>/dev/null") +flashrequest = 0 +def flashled(t): + os.system("echo '1' | sudo tee /sys/class/leds/PWR/brightness >>/dev/null") + time.sleep(t) + os.system("echo '0' | sudo tee /sys/class/leds/PWR/brightness >>/dev/null") + +for i in range(10): + flashled(.1) + time.sleep(.1) + +def flasher(): + global flashrequest + while True: + if flashrequest == 1: + flashled(.1) + flashrequest = 0 + elif flashrequest == 2: + flashled(.1) + flashrequest = 0 + time.sleep(.2) + flashled(.1) +flasherThread = threading.Thread(target = flasher) +flasherThread.start() def saveConfig(): with open('skyConfig.json', 'w') as f: json.dump(skyConfig, f, indent=4) - +format # saveConfig() print('cwd', os.getcwd()) @@ -176,17 +189,15 @@ def saveConfig(): with open(os.path.join(root_path, 'skyConfig.json')) as f: skyConfig = json.load(f) -print(json.dumps(skyConfig['solverProfiles']['25FL'], indent=4)) -print(json.dumps(skyConfig['observing'], indent=4)) -print(json.dumps(skyConfig['camera'], indent=4) ) -imageName = 'cap.'+skyConfig['camera']['format'] +imageName = 'cap.'+skyConfig['camera']['format'] +capPath = os.path.join(solve_path, imageName) #print (skyConfig) skyCam = None skyStatusText = '' verboseSolveText = '' - +enableSoldisplay = False def delayedStatus(delay, status): global skyStatusText @@ -197,12 +208,12 @@ def delayedStatus(delay, status): def setupCamera(): global skyCam, cameraNotPresent, state, skyStatusText if not skyCam: - print('creating cam') + try: - skyCam = skyCamera(delayedStatus, shutter=int( - 1000000 * float(skyConfig['camera']['shutter'])), - format=skyConfig['camera']['format'], - resolution=skyConfig['camera']['frame']) + skyCam = skyCamera(delayedStatus, shutter= + float(skyConfig['camera']['shutter']), + format='RGB888', + resolution= [int(x) for x in skyConfig['camera']['frame'].split('x')]) cameraNotPresent = False if skyConfig['solver']['startupSolveing']: print("startup in solving") @@ -217,21 +228,15 @@ def setupCamera(): cameraNotPresent = True skyStatusText = 'camera not connected or enabled. Demo mode and replay mode will still work however.' -def frameGrabberThread(): - global solveReadyForImage, GUIReadyForImage, solveImage, GUIImage - while(True): - frame = skyCam.get_frame() - if solveReadyForImage: - print('getting solve image') - solveImage = copy.deepcopy(frame) - solveReadyForImage = False - if GUIReadyForImage: - print('getting gui image') - GUIImage = copy.deepcopy(frame) - GUIReadyForImage = False +def getImage(): + global skyCam + frame = skyCam.camera.capture_array('main') + return frame + + setupCamera() -frameGrabberT = threading.Thread(target=frameGrabberThread) -frameGrabberT.start() +#frameGrabberT = threading.Thread(target=frameGrabberThread) +#frameGrabberT.start() framecnt = 0 @@ -252,32 +257,6 @@ def delayedStatus(delay,status): -def solveWatchDog(): - global lastsolveTime, state, skyStatusText, justStarted, framecnt, camera_Died,\ - doDebug, debugLastState,lastpictureTime - lastmode = -1 - if doDebug: - logging.warning("watch dog started %s", doDebug) - logging.warning("state %s",state) - while (True): - time.sleep(5) - if doDebug: - if lastmode != state: - if doDebug: - logging.warning("watchdog last state %s",state) - logging.warning("debug last state, %s",debugLastState) - lastmode = state - if state is Mode.SOLVING: - now = datetime.now() - duration = now - lastpictureTime - if duration > 20 and doDebug: - logging.warning("watchdog says not solving within " + str(duration) + "seconds.") - if camera_Died: - state = Mode.PAUSED - skyStatusText = "camera seens to have stopped. Restarting" - os.system('./restartsky.sh') - time.sleep(10) - break def shutThread(): time.sleep(3) @@ -308,26 +287,23 @@ def switchWatcher(): # # this is responsible for getting images from the camera even in align mod def solveThread(): - global skyStatusText, focusStd, solveCurrent, state, skyCam, testNdx, camera_Died,\ + global skyStatusText, focusStd, state, skyCam, testNdx, camera_Died,\ solveLog, solveCompleted, debugLastState, lastpictureTime,\ - solveReadyForImage,solveImage,solveThisImage + solveImage,solveThisImage - # save the image to be solved in the file system and on the stack for the gen() routine to give to the client browser + # save the image to be solved in the file system for the gen() routine to give to the client browser def saveImage(frame): global doDebug + #print("saving image using frame", solve_path) + try: + cv2.imwrite(os.path.join(solve_path, 'cap.jpg'),frame) + return True - with open(os.path.join(solve_path, imageName), "wb") as f: - try: - f.write(frame) - - return True - except Exception as e: - - if doDebug: - logging.error(str(e)) - print(e) - solveLog.append(str(e) + '\n') - return False + except Exception as e: + + print(str(e)) + solveLog.append(str(e) + '\n') + return False def makeDeadImage(text): img = Image.new('RGB', (600, 200), color=(0, 0, 0)) @@ -354,28 +330,14 @@ def makeDeadImage(text): print('solving skyStatus', skyStatusText, solveThisImage) copyfile(solveThisImage, os.path.join(solve_path, imageName)) skyStatusText = 'Solving' - print("solving", solveThisImage) - if skyConfig['solverProfiles'][skyConfig['solver']['currentProfile']]['solver_type'] == 'solverTetra3': - verboseSolveText = "" - s = tetraSolve(os.path.join(solve_path, imageName)) - if s['RA'] != None: - ra = s['RA'] - dec = s['Dec'] - fov = s['FOV'] - dur = (s['T_solve']+s['T_extract'])/1000 - result = "RA:%6.3lf Dec:%6.3lf FOV:%6.3lf %6.3lf secs" % ( - ra/15, dec, fov, dur) - skyStatusText = result - else: - skyStatusText = str(s) - else: + #print("solving", solveThisImage) - if not solve(os.path.join(solve_path, imageName)) and skyConfig['solverProfiles'][skyConfig['solver']['currentProfile']]['searchRadius'] > 0: - skyStatusText = 'Failed. Retrying with no position hint.' - if doDebug: - logging.warning("did not solve first try") - # try again but this time since the previous failed it will not use a starting guess possition - solve(os.path.join(solve_path, imageName)) + if not solve(os.path.join(solve_path, imageName)) and skyConfig['solverProfiles'][skyConfig['solver']['currentProfile']]['searchRadius'] > 0: + skyStatusText = 'Failed. Retrying with no position hint.' + if doDebug: + logging.warning("did not solve first try") + # try again but this time since the previous failed it will not use a starting guess possition + solve(os.path.join(solve_path, imageName)) state = Mode.PLAYBACK solveCompleted = True @@ -383,25 +345,22 @@ def makeDeadImage(text): continue else: # live solving loop path - #print("getting image in solve" ) + if doDebug: + print("getting image in solve" ) if cameraNotPresent: continue try: debugLastState = "waiting for image" if doDebug: logging.warning("waiting for image frame") - solveReadyForImage = True - print("solvesetting ready") - while solveReadyForImage: - print('solve waiting for image') - time.sleep(1) - print('got image') - frame = solveImage + frame = getImage() + + except Exception as e: cameraTry += 1 - if doDebug: - logging.error("no image after timeout retry count %s",cameraTry) + + print("no image after timeout retry count %s",cameraTry) if cameraTry > 10: debugLastState = "no image after timeout" @@ -419,7 +378,7 @@ def makeDeadImage(text): if doDebug: logging.error("empty frame after 10 retries") debugLastState = "frame was none after 10 retries" - saveImage(makeDeadImage("camera died. Restarting")) + # saveImage(makeDeadImage("camera died. Restarting")) print("camera died\nRestarting" ) camera_Died = True continue @@ -429,9 +388,10 @@ def makeDeadImage(text): # if solving history one after the other in auto playback if (state is Mode.AUTOPLAYBACK): if testNdx == len(testFiles): - state = Mode.PLAYBACK - skyStatusText = "Complete." - continue + testNdx = 0 + #state = Mode.PLAYBACK + #skyStatusText = "Complete." + #continue fn = testFiles[testNdx] solveThisImage = fn @@ -439,54 +399,24 @@ def makeDeadImage(text): solveLog.append(skyStatusText + '\n') testNdx += 1 - #print ("image", testNdx, fn) + print ("image in auto playback ", testNdx, fn) + + copyfile(fn, os.path.join(solve_path, imageName)) - with open(fn, 'rb') as infile: - frame = infile.read() - - saveImage(frame) #debug to fake camera image with a real star image #copyfile("static/history/11_01_22_23_47_15.jpeg", os.path.join(solve_path, imageName)) if state is Mode.SOLVING or state is Mode.AUTOPLAYBACK : - if skyConfig['solverProfiles'][skyConfig['solver']['currentProfile']]['solver_type'] == 'solverTetra3': - s = tetraSolve(os.path.join(solve_path, imageName)) - # print(str(s)) - if s['RA'] != None: - ra = s['RA'] - dec = s['Dec'] - fov = s['FOV'] - dur = (s['T_solve']+s['T_extract'])/1000 - result = "RA:%6.3lf Dec:%6.3lf FOV:%6.3lf %6.3lf secs" % ( - ra/15, dec, fov, dur) - skyStatusText = result - else: - skyStatusText = str(s) - else: - if state is Mode.SOLVING: - skyStatusText = "" - f = solve(os.path.join(solve_path, imageName)) - if f == False: - state = Mode.PLAYBACK - solveCompleted = True - - continue - - # else measaure contrast for focus bar - try: - img = Image.open(os.path.join(solve_path, imageName)).convert("L") - imgarr = np.array(img) - avg = imgarr.mean() - imax = imgarr.max() - if avg == 0: - avg = 1 - - contrast = (imax - avg)/avg + imagePath = os.path.join(solve_path, imageName) - focusStd = f"{contrast:.2f}" - - except Exception as e: - print(e) + if state is Mode.SOLVING: + skyStatusText = "" + cv2.imwrite(imagePath, frame) + f = solve(imagePath) + if f == False: + state = Mode.PLAYBACK + solveCompleted = True + continue solveT = None # print("config",skyConfig['solver']) @@ -495,20 +425,23 @@ def makeDeadImage(text): solveT = threading.Thread(target=solveThread) solveT.start() -#solveWatchDogTh = threading.Thread(target = solveWatchDog) -#solveWatchDogTh.start() - +solveAvg = 0. +solveCnt = 0 def solve(fn, parms=[]): - global doDebug, debugLastState - print("solving" ) + global doDebug, debugLastState, skyStatusText, enableSoldisplay, solveAvg, solveCnt,\ + app, solving, maxTime, solveLog, ra, dec, searchEnable, solveStatus,\ + skyStatusText, lastObs, verboseSolveText, flashrequest + found = '' + if doDebug: + print("solving" ) logging.warning("solving function") debugLastState = 'solving' - global app, solving, maxTime, searchRaius, solveLog, ra, dec, searchEnable, solveStatus,\ - triggerSolutionDisplay, skyStatusText, lastObs, verboseSolveText + startTime = datetime.now() solving = True solved = '' + wroteWcsFile = False profile = skyConfig['solverProfiles'][skyConfig['solver'] ['currentProfile']] fieldwidthParm = '' @@ -541,18 +474,21 @@ def solve(fn, parms=[]): parms = parms + ['--ra', str(ra), '--dec', str(dec), '--radius', str(profile['searchRadius'])] #print('show stars', profile['showStars']) - if not profile['showStars']: + if enableSoldisplay == False: parms = parms + ['-p'] parms = parms + ["--uniformize", "0", "--no-remove-lines", "--new-fits", "none", "--pnm", "none", "--rdls", "none"] cmd = ["solve-field", fn, "--depth", str(profile['solveDepth']), "--sigma", str(profile['solveSigma']), '--overwrite'] + parms + #if doDebug: + #solveLog.append(' '.join(cmd)) + #print("\n\nsolving ", cmd) if skyConfig['observing']['verbose']: solveLog.append(' '.join(cmd) + '\n') p = subprocess.Popen(cmd, stdout=subprocess.PIPE) - solveLog.clear() + ppa = '' starNames = {} duration = None @@ -560,57 +496,72 @@ def solve(fn, parms=[]): ra = 0 dec = 0 solveLog.append("solving:\n") - skyStatusText = skyStatusText + " solving" + #skyStatusText = skyStatusText + " solving" lastmessage = '' + verbose = skyConfig['observing']['verbose'] while not p.poll(): stdoutdata = p.stdout.readline().decode(encoding='UTF-8') if stdoutdata: + if doDebug: + print(stdoutdata) if stdoutdata == lastmessage: continue + if verbose: + solveLog.append(stdoutdata) lastmessage = stdoutdata if 'simplexy: found' in stdoutdata: + if not verbose: + found = stdoutdata + solveLog.append(stdoutdata) + skyStatusText = stdoutdata - print("stdoutdata", stdoutdata) - elif stdoutdata.startswith('Field center: (RA,Dec) = ('): + #print("stdoutdata", stdoutdata) + elif 'RA,Dec =' in stdoutdata: + try: + + import re + pattern = re.compile(r'(-*[0-9]+\.*[0-9]*),(-*[0-9]+\.*[0-9]*).*pixel scale ([0-9]+\.[0-9]*)\s') + numbers = pattern.split(stdoutdata)[1:] + + + #print ('f',fields) + ra = float(numbers[0]) + dec = float(numbers[1]) + ppa = float(numbers[2]) + + radec = "%s %6.6lf %6.6lf \n" % ( + time.strftime('%H:%M:%S'), ra, dec) + file1 = open(os.path.join( + solve_path, "radec.txt"), "w") # write mode + file1.write(radec) + file1.close() + if not verbose: + solveLog.append(radec) + solveLog.append('pixel scale %6.2lf arcsec/pix\n'%(ppa)) + + + except BaseException as e: + solveLog.append(traceback.format_exc()) + + + pass + if 'Field size:' in stdoutdata and not verbose: + solveLog.append(stdoutdata) + if 'solved with' in stdoutdata: + if not verbose: + solveLog.append(stdoutdata) solved = stdoutdata - fields = solved.split()[-3:-1] - #print ('f',fields) - ra = fields[0][1:-1] - dec = fields[1][0:-1] - ra = float(fields[0][1:-1]) - dec = float(fields[1][0:-1]) - radec = "%s %6.6lf %6.6lf \n" % ( - time.strftime('%H:%M:%S'), ra, dec) - file1 = open(os.path.join( - solve_path, "radec.txt"), "w") # write mode - file1.write(radec) - file1.close() - if doDebug: - logging.warning('here is RAdec') - logging.warning(radec) stopTime = datetime.now() duration = stopTime - startTime + solveAvg += duration.total_seconds() + solveCnt += 1 + solveLog.append('average solved time %6.2lf\n'%( solveAvg/solveCnt)) #print ('duration', duration) - skyStatusText = skyStatusText + " solved "+str(duration)+'secs' - if stdoutdata and skyConfig['observing']['verbose']: - solveLog.append(stdoutdata) - print("stdout", str(stdoutdata)) - skyStatusText = skyStatusText + '.' - if stdoutdata.startswith("Field 1: solved with"): - ndx = stdoutdata.split("index-")[-1].strip() - print("index", ndx, stdoutdata ) - usedIndexes[ndx] = usedIndexes.get(ndx, 0)+1 - pp = pprint.pformat(usedIndexes) - print("Used indexes", pp ) - solveLog.append('used indexes ' + pp + '\n') - - elif stdoutdata.startswith('Field size'): - print("Field size", stdoutdata ) - solveLog.append(stdoutdata) - solveStatus += (". " + stdoutdata.split(":")[1].rstrip()) - elif stdoutdata.find('pixel scale') > 0: - computedPPa = stdoutdata.split("scale ")[1].rstrip() - elif 'The star' in stdoutdata: + solved = " solved. "+str(duration.total_seconds())+'secs\n' + solveLog.append(solved) + skyStatusText = solved + + if 'The star' in stdoutdata: stdoutdata = stdoutdata.replace(')', '') con = stdoutdata[-4:-1] if con not in starNames: @@ -619,89 +570,77 @@ def solve(fn, parms=[]): else: break - solveStatus += ". scale " + ppa + if solved: + skyStatusText = solved + flashrequest = 1 # create solved plot - if solved and (state is Mode.PLAYBACK or skyConfig['observing']['verbose']): - - # Write-Overwrites - file1 = open(os.path.join(solve_path, "radec.txt"), "w") # write mode - file1.write(radec) - file1.close() - cmdPlot = ['/usr/bin/plot-constellations', '-v', '-w', os.path.join(solve_path, 'cap.wcs'), - '-B', '-C', '-N', '-o', os.path.join(solve_path, 'cap-ngc.png')] - p2 = subprocess.Popen(cmdPlot, stdout=subprocess.PIPE) - ndx = 0 - stars = [] - while not p2.poll(): - if ndx > 1000: - break - ndx += 1 - stdoutdata = p2.stdout.readline().decode(encoding='UTF-8') - if stdoutdata: - stars.append(stdoutdata) - print(stdoutdata) - - if 'The star' in stdoutdata: - stdoutdata = stdoutdata.replace(')', '') - con = stdoutdata[-4:-1] - if con not in starNames: - starNames[con] = 1 - foundStars = ', '.join(stars).replace("\n", "") - foundStars = foundStars.replace("The star", "") - #solveLog.append(foundStars + "\n") - constellations = ', '.join(starNames.keys()) - # copy the index over the ngc file so stars will display with the index triangle - if profile['showStars']: - os.remove(os.path.join(solve_path, 'cap-objs.png')) - copyfile(os.path.join(solve_path, 'cap-indx.png'), - os.path.join(solve_path, 'cap-objs.png')) - - if skyConfig['observing']['savePosition'] and state is Mode.SOLVING: - obsfile = open(os.path.join(solve_path, "obs.log"), "a+") - obsfile.write(radec.rstrip() + " " + constellations + '\n') - obsfile.close() - #print ("wrote obs log") + if solved: + if enableSoldisplay: + try: + # put a plus in the center of the solution image + solution = cv2.imread(os.path.join(solve_path, 'cap-ngc.png'), cv2.IMREAD_UNCHANGED) + h,w,s = solution.shape + w = int(w/2) + h = int(h/2) + dl = 10 + c = (0x0,0x0,0xff,255) + cv2.line(solution,(w-dl,h), (w -3 * dl, h),c,3) + cv2.line(solution,(w + dl, h),(w + 3 * dl, h),c,3) + cv2.line(solution,(w,h-dl), (w, h-3 * dl),c,3) + cv2.line(solution,(w, h+dl),(w, h+3 * dl),c,3) + + cv2.imwrite(os.path.join(solve_path, 'cap-ngcc.png'), solution) + #time.sleep(1) + solveLog.append('annotation ready\n') + + # if skyConfig['observing']['savePosition'] and state is Mode.SOLVING: + # obsfile = open(os.path.join(solve_path, "obs.log"), "a+") + # obsfile.write(radec.rstrip() + " " + constellations + '\n') + # obsfile.close() + # #print ("wrote obs log") + except: + pass if skyConfig['observing']['saveImages']: saveimage = False if skyConfig['observing']['obsDelta'] > 0: #print ("checking delta") if lastObs: - old = lastObs.split(" ") - ra1 = math.radians(float(old[1])) - dec1 = math.radians(float(old[2])) - ra2 = math.radians(float(ra)) - dec2 = math.radians(float(dec)) - - delta = math.degrees(math.acos(math.sin( - dec1) * math.sin(dec2) + math.cos(dec1) * math.cos(dec2)*math.cos(ra1 - ra2))) - #print("image delta", delta) - if delta > skyConfig['observing']['obsDelta']: - saveimage = True + try: + old = lastObs.split(" ") + ra1 = math.radians(float(old[1])) + dec1 = math.radians(float(old[2])) + ra2 = math.radians(float(ra)) + dec2 = math.radians(float(dec)) + + delta = math.degrees(math.acos(math.sin( + dec1) * math.sin(dec2) + math.cos(dec1) * math.cos(dec2)*math.cos(ra1 - ra2))) + #print("image delta", delta) + if delta > skyConfig['observing']['obsDelta']: + saveimage = True + except Exception as e: + solveLog.append(traceback.format_exc()) else: saveimage = True if state is Mode.SOLVING and saveimage: lastObs = radec - fn = datetime.now().strftime("%m_%d_%y_%H_%M_%S.") + \ - skyConfig['camera']['format'] + fn = datetime.now().strftime("%m_%d_%y_%H_%M_%S.jpg") copyfile(os.path.join(solve_path, imageName), os.path.join(solve_path, 'history', fn)) - if skyConfig['observing']['showSolution']: - triggerSolutionDisplay = True - stopTime = datetime.now() - duration = stopTime - startTime - skyStatusText = "solved "+str(duration) + ' secs' - verboseSolveText = foundStars + #verboseSolveText = foundStars + #solveLog.append(foundStars) if doDebug: logging.warning(skyStatusText) logging.warning(verboseSolveText) logging.warning(radec) if not solved: - skyStatusText = skyStatusText + " Failed" + flashrequest = 2 + skyStatusText = skyStatusText + " Failed " ra = 0 solveLog.append("Failed\n") + solving = False solvestatestr = 'solved' if not solved: @@ -711,35 +650,14 @@ def solve(fn, parms=[]): return solved -def tetraSolve(imageName): - global skyStatusText, solveLog, t3 - - solveLog.append("solving " + imageName + '\n') - img = Image.open(os.path.join(solve_path, imageName)) - #print('solving', imageName) - profile = skyConfig['solverProfiles'][skyConfig['solver'] - ['currentProfile']] - - solved = t3.solve_from_image( - img, fov_estimate=float(profile['fieldLoValue'])) - # print(str(solved),profile['fieldLoValue'],flush=True) - if solved['RA'] == None: - return solved - radec = "%s %6.6lf %6.6lf \n" % (time.strftime( - '%H:%M:%S'), solved['RA'], solved['Dec']) - solveLog.append(str(solved) + '\n') - file1 = open(os.path.join(solve_path, "radec.txt"), "w") # write mode - file1.write(radec) - file1.close() - skyStatusText = str(solved['RA']) - return solved app = Flask(__name__) -doDebug = False + skyStatusText = 'Initilizing Camera' + @app.route("/StarHistory", methods=['GET','POST']) def showStarHistoryPage(): @@ -752,10 +670,15 @@ def index(): verboseSolveText = "" shutterValues = ['.001', '.002', '.005', '.01', '.02', '.05', '.1', '.15', '.2', '.5', '.7', '.9', '1', '2.', '3', '4', '5', '10'] + + currentShutter = skyConfig['camera']['shutter'] skyFrameValues = ['400x300', '640x480', '800x600', '1024x768', '1280x960', '1920x1440', '2000x1000', '2000x1500'] - isoValues = ['100', '200', '400', '800'] - formatValues = ['jpeg', 'png'] + isoValues = ['10', '50', '100', '200', '400', '800', '1000', '2000','4000','8000','16000'] + formatValues = ['jpg', 'png'] + currentISO = skyConfig['camera']['ISO'] + currentFrameValue = skyConfig['camera']['frame'] + print('ISO',currentISO) solveParams = {'PPA': 27, 'FieldWidth': 14, 'Timeout': 34, 'Sigma': 9, 'Depth': 20, 'SearchRadius': 10} if cameraNotPresent: @@ -766,25 +689,9 @@ def index(): solveT = threading.Thread(target=solveThread) solveT.start() return render_template('template.html', shutterData=shutterValues, skyFrameData=skyFrameValues, skyFormatData=formatValues, - skyIsoData=isoValues, profiles=skyConfig['solverProfiles'], startup=skyConfig['solver']['startupSolveing'], solveP=skyConfig['solver']['currentProfile'], obsParms=skyConfig['observing'], cameraParms=skyConfig['camera']) - -# - - -@app.route("/solveLog") -def streamSolveLog(): - global solveLog - - def generate(): - global solveLog - while True: - #print (len(solveLog)) - if len(solveLog) > 0: - str = solveLog.pop(0) - #print ("log", str) - yield str - - return app.response_class(generate(), mimetype="text/plain") + skyIsoData=isoValues, profiles=skyConfig['solverProfiles'], + shutterValue = currentShutter, ISOValue = currentISO, frameValue = currentFrameValue, + startup=skyConfig['solver']['startupSolveing'], solveP=skyConfig['solver']['currentProfile'], obsParms=skyConfig['observing'], cameraParms=skyConfig['camera']) @app.route('/pause', methods=['POST']) @@ -807,8 +714,38 @@ def Align(): state = Mode.ALIGN skyStatusText = 'Align Mode' return Response(skyStatusText) +from subprocess import PIPE, run +@app.route('/update',methods=['POST']) +def updateskysolve(): + global solveLog + solveLog.append(' update started') + try: + resp = os.remove(os.path.join(root_path, 'newCamLib.tar.gz')) + print('remove tar',resp) + except: + pass + + + command = ['wget', '-nv','https://github.com/githubdoe/skysolve/archive/newCamLib.tar.gz'] + result = run(command, stdout=PIPE, stderr=PIPE, universal_newlines=True) + solveLog.append(result.stderr) + solveLog.append('update failed\n') + + solveLog.append(result.stdout) + command = ['tar', + '--exclude', 'skysolve-newCamlib/history', + '--exclude','skysolve-newCamLib/.skyConfig.json', + '-zxvf', + 'newCamLib.tar.gz', + '--strip-components','1'] + result = run(command, stdout=PIPE, stderr=PIPE, universal_newlines=True) + solveLog.append(result.stderr) + solveLog.append(result.stdout) + solveLog.append('Reboot to install these updates\n') + return Response(' update complete') + @app.route('/saveCurrent', methods=['POST']) def saveCurrent(): global skyStatusText, imageName @@ -839,29 +776,14 @@ def Solving(): return Response(skyStatusText) -@app.route('/setISO', methods=['POST']) -def setISOx(): - print("setISO FORM", request.form.values) - global solveLog, skyStatusText, isoglobal - value = request.form.get('setISO') - solveLog.append("ISO changing will take 10 seconds to stabilize gain.\n") - delayedStatus(2, "changing to ISO " + value) - isoglobal = value - skyCam.setISO(int(value)) - skyConfig['camera']['ISO'] = value - saveConfig() - return Response(status=204) -@app.route('/setISO/', methods=['POST', 'GET']) +@app.route('/setISO/', methods=['POST']) def setISO(value): print("setting iso", value) - global solveLog, skyStatusText, isoglobal - solveLog.append("ISO changing will take 10 seconds to stabilize gain.\n") - delayedStatus(2, "changing to ISO " + value) - isoglobal = value + skyCam.setISO(int(value)) skyConfig['camera']['ISO'] = value @@ -890,28 +812,16 @@ def setFormat(value): return Response(status=204) -@app.route('/setShutter', methods=['POST']) -def setShutterx(): - global skyStatusText - value = request.form.get('setShutter') - print("shutter value", value) - skyCam.setShutter(int(1000000 * float(value))) - skyConfig['camera']['shutter'] = value - delayedStatus(2, "Setting shutter to "+str(value) + - " may take about 10 seconds.") - saveConfig() - return Response(status=204) @app.route('/setShutter/', methods=['POST']) def setShutter(value): global skyStatusText - print("shutter value", value) - skyCam.setShutter(int(1000000 * float(value))) + + skyCam.setShutter(float(value)) skyConfig['camera']['shutter'] = value - delayedStatus(2, "Setting shutter to "+str(value) + - " may take about 10 seconds.") + saveConfig() return Response(status=204) @@ -959,12 +869,33 @@ def clearObsLog(): setTimeDate = True +@app.route('/enablesolutionDisplay', methods=['post']) +def enablesolutionDisplay(): + global enableSoldisplay + t = request.form['checked'] + enableSoldisplay = t == 'true' + return Response('ok') @app.route('/lastVerboseSolve', methods=['post']) def verboseSolveUpdate(): global verboseSolveText return Response(verboseSolveText) +@app.route("/solveLog") +def streamSolveLog(): + global solveLog + #print (len(solveLog)) + str = '' + while solveLog: + try: + s = solveLog.popleft() + str += s + except Exception as e: + + print(e) + break + + return Response(str) @app.route('/skyStatus', methods=['post']) def skyStatus(): @@ -972,12 +903,19 @@ def skyStatus(): if setTimeDate: setTimeDate = False t = float(request.form['time'])/1000 - time.clock_settime(time.CLOCK_REALTIME, t) + try: + time.clock_settime(time.CLOCK_REALTIME, t) + except Exception as e: + print("could not set clock", e ) + resp = copy.deepcopy(skyStatusText) - skyStatusText = '' - return Response(resp) + + #skyStatusText = '' + return Response(resp) + + @app.route('/Focus', methods=['post']) def Focus(): global focusStd @@ -985,11 +923,11 @@ def Focus(): lastmode = state def gen(): - global skyStatusText, solveT, testNdx, triggerSolutionDisplay,\ - doDebug, state, solveCurrent, framecnt, lastDisplayedFile,lastmode,\ - GUIReadyForImage, GUIImage + global skyStatusText, solveT, testNdx,\ + doDebug, state, framecnt, lastDisplayedFile,lastmode,\ + GUIImage, imageName # Video streaming generator function. - + print("gen called") lastImageTime = datetime.now() yield (b'\r\n--framex\r\n' b'Content-Type: image/jpeg\r\n\r\n') while True: @@ -1002,31 +940,29 @@ def gen(): if lastDisplayedFile == solveThisImage: continue + print("history",lastDisplayedFile, solveThisImage) lastDisplayedFile = solveThisImage with open(solveThisImage, 'rb') as infile: frame = infile.read() - else: - GUIReadyForImage = True - while(GUIReadyForImage): - time.sleep(.1) - frame = GUIImage + else: + frame = cv2.imencode('.jpeg', getImage())[1].tobytes() + if doDebug: - logging.warning("frame sent to GUI %d", framecnt) - if state is Mode.SOLVING: + print("frame sent to GUI ", framecnt) + if state is Mode.SOLVING: #count even solve frames in debug framecnt = framecnt + 1 if state is Mode.ALIGN: framecnt = framecnt + 1 skyStatusText = "frame %d" % (framecnt) + + # this send the image and also the header for the next image yield (frame + b'\r\n--framex\r\n' b'Content-Type: image/jpeg\r\n\r\n') - if state is Mode.SOLVING: - solveCurrent = True - @app.route('/deleteProfile/', methods=['POST']) def deleteProfile(value): @@ -1058,21 +994,21 @@ def apply(): profile['FieldWidthMode'] = mode profile['name'] = cur - profile['fieldLoValue'] = req.get("fieldLoValue") - profile['fieldHiValue'] = req.get("fieldHiValue") - profile['aPPLoValue'] = req.get("aPPLowValue") - profile['aPPHiValue'] = req.get("aPPHiValue") + profile['fieldLoValue'] = req.get("fieldLoValue" ,default = 10) + profile['fieldHiValue'] = req.get("fieldHiValue", default = 30) + profile['aPPLoValue'] = req.get("aPPLowValue", default = 1) + profile['aPPHiValue'] = req.get("aPPHiValue", default = 100) if req.get("searchRadius") != "": - profile['searchRadius'] = float(req.get("searchRadius")) + profile['searchRadius'] = float(req.get("searchRadius", default = 10)) else: profile['searchRadius'] = 0 - profile['solveSigma'] = int(req.get("solveSigma")) - profile['solveDepth'] = int(req.get("solveDepth")) + profile['solveSigma'] = int(req.get("solveSigma", default = 7)) + profile['solveDepth'] = int(req.get("solveDepth", default = 20)) profile['solveVerbose'] = bool(req.get("solveVerbose")) profile['showStars'] = bool(req.get("showStars")) profile['verbose'] = bool(req.get("verbose")) profile['maxTime'] = float(req.get('CPUTimeout')) - profile['additionalParms'] = req.get('additionalParms') + profile['additionalParms'] = req.get('additionalParms', default = '') #print("curprofile", profile) saveConfig() print('\n\n\nskyconfig', json.dumps( @@ -1099,7 +1035,7 @@ def demoMode(): setupImageFromFile() - solveLog = [x + '\n' for x in testFiles] + return Response(skyStatusText) @@ -1122,7 +1058,7 @@ def findHistoryFiles(): print("test files len", len(testFiles)) - solveLog = [x + '\n' for x in testFiles] + while datetime.now() < x: time.sleep(1) print("gather done") @@ -1174,7 +1110,7 @@ def restartThread(): @app.route('/restartc', methods=['POST']) def restartc(): global skyStatusText, skyCam, state - + os.system("echo '1' | sudo tee /sys/class/leds/PWR/brightness >>/dev/null") state = Mode.PAUSED th = threading.Thread(target=restartThread) th.start() @@ -1198,7 +1134,7 @@ def toggletestMode(): measureStackLock = threading.Lock() allDone = False # measure transparency of current image -import traceback + readytoSend = False @@ -1213,22 +1149,7 @@ def sendStatus(msg, col=1): message = ' '.join(statusCols) readytoSend = True -@app.route('/skyQstatus', methods=['POST','GET']) -def skyQStatus(): - global readytoSend,message,allDone - print("skyall called") - sendStatus("Ready") - - def inner(): - global allDone,readytoSend,message - while True: - #measureStackLock.acquire() - if readytoSend: - yield 'data: '+message+ '\n\n' - readytoSend = False - - return Response(inner(), mimetype='text/event-stream') @app.route('/showImage') @@ -1243,30 +1164,6 @@ def processImageX(): return(html) - - - -def returnImage(): - global testNdx, solveThisImage - setupImageFromFile() - filename = 'quality'+datetime.now().strftime("%m_%d_%y_%H_%M_%S.jpeg") - copyfile(solveThisImage, os.path.join(solve_path, filename)) - out = ''%('./static/'+filename) - print("file name",out) - from PIL import Image, ExifTags - - img = Image.open('./static/' + filename) - img_exif = img._getexif() - - for key, val in img_exif.items(): - if key in ExifTags.TAGS: - if 'ExposureTime' in ExifTags.TAGS[key]: - print('EXP ',f'{ExifTags.TAGS[key]}:{val[0]/val[1]}') - break - - sendStatus(out) - return Response('%d %s'%(testNdx, solveThisImage)) - @app.route('/sqDelete', methods=['POST']) def deletecurrentHistoryImage(): global testNdx, testFiles, state @@ -1274,27 +1171,13 @@ def deletecurrentHistoryImage(): return('%d %s'%('Replay Images must be selected first', ' ')) fn = testFiles[testNdx] testFiles.remove(fn) + print("deleting history file- ", testNdx, fn) os.remove(fn) - if testNdx > len(testFiles): - testNdx -= 1 - - return returnImage() -@app.route('/sqPrevious', methods=['POST']) -def sqPrevious(): - global testNdx - testNdx -= 1 - if testNdx < 0: - testNdx = len(testFiles)-1 - return returnImage() + setupImageFromFile() + + return Response("deleted"); -@app.route('/sqNext', methods=['POST']) -def sqNext(): - global testNdx - testNdx += 1 - if testNdx >= len(testFiles): - testNdx = 0 - return returnImage() @app.route('/nextImage', methods=['POST']) def nextImagex(): @@ -1337,6 +1220,8 @@ def historyNdx(): if state is Mode.PLAYBACK: req = request.form testNdx = int(req.get("hNdx")) + if testNdx > len(testFiles): + testndx = len(testFiles) -1 setupImageFromFile() @@ -1345,11 +1230,13 @@ def historyNdx(): @app.route('/Debug', methods= ['POST']) def debugcommands(): global doDebug - doDebug = request.form.get("enableDebug") == "on" - if (doDebug): - sys.stdout = x = ListStream() - else: - sys.stdout = sys.__stdout__ + print('do debug called', request.form) + if request.form.get("enableDebug"): + doDebug = True + else : + doDebug = False + + print("debug",doDebug, request.form.get("enableDebug")) return Response(status=205) @@ -1404,8 +1291,8 @@ def prevObs(): @app.route('/solveThis', methods=['POSt']) def solveThis(): - global solveCurrent, state - solveCurrent = True + global state + state = Mode.SOLVETHIS skyStatusText = "Solving" return Response(skyStatusText) @@ -1447,16 +1334,16 @@ def downloadImage(): global solveThisImage if (state != Mode.ALIGN) and (state != Mode.SOLVING): + print("solve this image was", solveThisImage) + return send_file(solveThisImage, as_attachment=True) - with open(solveThisImage, 'rb') as infile: - frame = infile.read() else: frame = skyCam.get_frame() - fn = os.path.join(solve_path, "current.jpg") - with open(fn, "wb") as f: - f.write(frame) - return send_file(fn, as_attachment=True) + fn = os.path.join(solve_path, "current.jpg") + cv2.imwrite(fn, frame) + + return send_file(fn, as_attachment=True) @app.route('/zip', methods=['GET']) def zipImages(): diff --git a/skysolveSetup.sh b/skysolveSetup.sh index 594b3ff..d8bfb46 100755 --- a/skysolveSetup.sh +++ b/skysolveSetup.sh @@ -2,11 +2,13 @@ # # Installs files needed for skySolve program Script based in part on # AstroRaspbianPi Raspberry Pi Raspbian KStars/INDI Configuration Script -# Copyright (C) 2018 Robert Lancaster +# Copyright (C) 2018 Robert Lancaster # This script is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public # License as published by the Free Software Foundation; either # version 2 of the License, or (at your option) any later version. +#Text Format + if [ "$(whoami)" != "root" ]; then echo "Please run this script with sudo due to the fact that it must do a number of sudo tasks. Exiting now." @@ -19,6 +21,11 @@ else fi DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +YEL='\e[38;2;255;255;0m' +GRE='\e[38;0;255;0;0m' +DEF='\e[m' +BOL='\e[1m' + function display { @@ -47,10 +54,11 @@ function checkForConnection fi } -display "Welcome to the SKySolve Configuration Script." -display "This will update, install and configure your Raspberry Pi 4 to work with SkySolve. Be sure to read the script first to see what it does and to customize it." +display "This will update, install and configure your Raspberry Pi to work with SkySolve. Be sure to read the script first to see what it does and to customize it." +echo -e $YEL"Welcome to the SKySolve Configuration Script."$DEF +echo It can update the operating system, install the Field hot spot, and the plate solve code. read -p "Are you ready to proceed (y/n)? " proceed if [ "$proceed" != "y" ] @@ -133,7 +141,7 @@ then sed -i "s/#autologin-user-timeout=0/autologin-user-timeout=0/g" /etc/lightdm/lightdm.conf fi -display "Setting HDMI settings in /boot/config.txt." + # This pretends an HDMI display is connected at all times, otherwise, the pi might shut off HDMI # So that when you go to plug in an HDMI connector to diagnose a problem, it doesn't work @@ -176,103 +184,81 @@ if [ -z "$(grep 'xserver-command=X -s 0 dpms' '/etc/lightdm/lightdm.conf')" ] then sed -i "/\[Seat:\*\]/ a xserver-command=X -s 0 dpms" /etc/lightdm/lightdm.conf fi -display "setup networking hotpot" -# This will install the autohotspot files so that the pi can connect to local wifi or be a hotspot. -./Autohotspot/autohotspot-setup.sh 1 -display "Making Utilities Folder with script shortcuts for the Desktop" +display "Setting up hot spot" -# This will make a folder on the desktop for the launchers if it doesn't exist already -if [ ! -d "$USERHOME/Desktop/utilities" ] +if [ ! -f AccessPopup.tar.gz ] then - mkdir -p $USERHOME/Desktop/utilities - sudo chown $SUDO_USER:$SUDO_USER $USERHOME/Desktop/utilities -fi - + display "setup networking hotpot" + curl "https://www.raspberryconnect.com/images/scripts/AccessPopup.tar.gz" -o AccessPopup.tar.gz + tar -xvf ./AccessPopup.tar.gz - - -# This will create a shortcut on the desktop in the utilities folder for Installing Astrometry Index Files. -################## -sudo cat > $USERHOME/Desktop/utilities/InstallAstrometryIndexFiles.desktop <<- EOF -#!/usr/bin/env xdg-open -[Desktop Entry] -Version=1.0 -Type=Application -Terminal=true -Icon[en_US]=mate-preferences-desktop-display -Exec=sudo $(echo $DIR)/astrometryIndexInstaller.sh -Name[en_US]=Install Astrometry Index Files -Name=Install Astrometry Index Files -Icon=$(echo $DIR)/icons/mate-preferences-desktop-display.svg -EOF -################## -sudo chmod +x $USERHOME/Desktop/utilities/InstallAstrometryIndexFiles.desktop -sudo chown $SUDO_USER:$SUDO_USER $USERHOME/Desktop/utilities/InstallAstrometryIndexFiles.desktop - -# This will create a shortcut on the desktop folder for Updating the System. -################## -sudo cat > $USERHOME/Desktop/utilities/systemUpdater.desktop <<- EOF -#!/usr/bin/env xdg-open -[Desktop Entry] -Version=1.0 -Type=Application -Terminal=true -Icon[en_US]=system-software-update -Exec=sudo $(echo $DIR)/systemUpdater.sh -Name[en_US]=Software Update -Name=Software Update -Icon=$(echo $DIR)/icons/system-software-update.svg -EOF -################## -sudo chmod +x $USERHOME/Desktop/utilities/systemUpdater.desktop -sudo chown $SUDO_USER:$SUDO_USER $USERHOME/Desktop/utilities/systemUpdater.desktop - -read -p "Install samba file sharing(y/n)? " proceed - -if [ "$proceed" == "y" ] -then - - -######################################################### -############# File Sharing Configuration - -display "Setting up File Sharing" - -# Installs samba so that you can share files to your other computer(s). -sudo apt -y install samba samba-common-bin -sudo touch /etc/libuser.conf - -if [ ! -f /etc/samba/smb.conf ] -then - sudo mkdir -p /etc/samba/ -################## -sudo cat > /etc/samba/smb.conf <<- EOF -[global] - workgroup = ASTROGROUP - server string = Samba Server - server role = standalone server - log file = /var/log/samba/log.%m - max log size = 50 - dns proxy = no -[homes] - comment = Home Directories - browseable = no - read only = no - writable = yes - valid users = $SUDO_USER -EOF -################## -fi - -# Adds yourself to the user group of who can use samba, but checks first if you are already in the list -if [ -z "$(sudo pdbedit -L | grep $SUDO_USER)" ] -then - sudo smbpasswd -a $SUDO_USER - sudo adduser $SUDO_USER sambashare -fi fi - +# allow change of hot spot ap ssid name and password change +ap_ssid_change() +{ script_path="/usr/bin/" + scriptname="accesspopup" + if [ -f "$script_path$scriptname" ] >/dev/null 2>&1; then + echo -e "The current ssid and password for the Field hot spot access point are:" + ss="$( grep -F 'ap_ssid=' $script_path$scriptname )" + echo "SSID:${ss:8}" + pw="$( grep -F 'ap_pw=' $script_path$scriptname )" + echo "Password:${pw:6}" + prof="$( grep -F 'ap_profile_name=' $script_path$scriptname )" + echo -e $YEL"Enter the new SSID"$DEF + echo "Press enter to keep the existing SSID" + read newss + if [ ! -z "$newss" ]; then + sed -i "s/ap_ssid=.*/ap_ssid='$newss'/" "$script_path$scriptname" + fi + echo -e $YEL"Enter the new Password"$DEF + echo "The password must be at least 8 characters" + echo "Press enter to keep the existing Pasword" + read newpw + if [ ! -z "Snewpw" ] && [ ${#newpw} -ge 8 ]; then + sed -i "s/ap_pw=.*/ap_pw='$newpw'/" "$script_path$scriptname" + fi + echo "The Access Points SSID and Password are:" + ss="$( grep -F 'ap_ssid=' $script_path$scriptname )" + pw="$( grep -F 'ap_pw=' $script_path$scriptname )" + echo "SSID:${ss:8}" + echo "Password: ${pw:6}" + #remove AP profile + pro="$(nmcli -t -f NAME con show | grep ${prof:17:-1} )" + if [ ! -z "$pro" ]; then + nmcli con delete "$pro" >/dev/null 2>&1 + nmcli con reload + fi + else + echo "$scriptname is not available." + echo "Please install the script first" + fi +} +# This will install the autohotspot files so that the pi can connect to local wifi or be a hotspot. +echo -e $YEL"This will install the Field hotspot wifi into the PI"$DEF +echo "It will ask you for options." +echo "The two options keys to press are 1 - install hot spot and then 9 - to exit" +echo "You can also setup you local/Home wifi by pressing option 5 before you exit with 9." +echo +echo -e $YEL"Press any key to start the setup."$DEF +read + +cd AccessPopup +sudo ./installconfig.sh +ap_ssid_change +cd .. + +display "setting up pyhton libraries" +#install python scipy +echo "installing extra python modules" +sudo apt --fix-broken install +sudo apt install python3-scipy +sudo apt install python3-matplotlib +echo "This next step may take 10 to 15 minutes." +sudo apt install python3-numpy +echo "installing opencv" +sudo apt install python3-opencv +sudo pip install --break-system-packages flask-sock # Installs the Astrometry.net package for supporting offline plate solves. display "Installing Astrometry.net" @@ -287,7 +273,7 @@ sudo apt -y install astrometry.net #!/bin/bash # AstroPi3 Astrometry Index File Installer -# Copyright (C) 2018 Robert Lancaster +# Copyright (C) 2018 Robert Lancaster # This script is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public # License as published by the Free Software Foundation; either @@ -389,6 +375,7 @@ if [ "$answer" == "y" ] sudo rm *.deb fi +display "Enabling skysolve and Skysafari interface" #setup auto run of encoder and skysolve at boot. @@ -401,11 +388,15 @@ fi sudo systemctl enable skysolve.service -#install python scipy -sudo pip3 install scipy -sudo apt install python3-matplotlib -sudo pip3 install fitsio -echo "This next step may take 10 to 15 minutes." -sudo pip3 install -U numpy -echo "Your requested installations are complete." -display "Script Execution Complete. Your Raspberry should now be ready to use for SkySolve. You should restart your Pi." \ No newline at end of file +read -p "Is this an RPI 5 or later (y/n)? " pi5 + +if [ "$pi5" == "y" ] +then + sudo apt install python3-rpi-lgpio + apt --fix-broken install + + +fi + +echo -e $YEL"Your requested installations are complete.You should restart your Pi by typing: sudo reboot"$DEF +display "Script Execution Complete. Your Raspberry should now be ready to use for SkySolve." diff --git a/skysolve_setup.odt b/skysolve_setup newCamLib.odt similarity index 61% rename from skysolve_setup.odt rename to skysolve_setup newCamLib.odt index e48a634..d49f643 100644 Binary files a/skysolve_setup.odt and b/skysolve_setup newCamLib.odt differ diff --git a/skysolve_setup newCamLib.pdf b/skysolve_setup newCamLib.pdf new file mode 100644 index 0000000..5645fca Binary files /dev/null and b/skysolve_setup newCamLib.pdf differ diff --git a/skysolve_setup.pdf b/skysolve_setup.pdf deleted file mode 100644 index 10ff6da..0000000 Binary files a/skysolve_setup.pdf and /dev/null differ diff --git a/skysove starup.odt b/skysove starup.odt deleted file mode 100644 index 3ca2055..0000000 Binary files a/skysove starup.odt and /dev/null differ diff --git a/skysove starup.pdf b/skysove starup.pdf deleted file mode 100644 index e44f60c..0000000 Binary files a/skysove starup.pdf and /dev/null differ diff --git a/static/cap.jpg b/static/cap.jpg new file mode 100644 index 0000000..c8db5f9 Binary files /dev/null and b/static/cap.jpg differ diff --git a/static/demo/bigdipper.jpg b/static/demo/bigdipper.jpg deleted file mode 100755 index dc3156f..0000000 Binary files a/static/demo/bigdipper.jpg and /dev/null differ diff --git a/static/js/skySolve.js b/static/js/skySolve.js index dfab487..6f9dc4c 100755 --- a/static/js/skySolve.js +++ b/static/js/skySolve.js @@ -105,7 +105,7 @@ sks.consts = { showSolution: 0, profiles : {"a": 1, "b":2}, currentProfile: '' -}; +} function setSolverParams( cur){ @@ -113,7 +113,7 @@ function setSolverParams( cur){ console.log('cur profile',cur) // console.log('profiles', sks.consts.profiles) console.log("const.profiles",cur, JSON.stringify(sks.consts.profiles[cur])); - sks.consts.showStars()[0].checked =sks.consts.profiles[cur]["showStars"]; + //sks.consts.showStars()[0].checked =sks.consts.profiles[cur]['showStars'] sks.consts.SolveDepth().val(sks.consts.profiles[cur]['solveDepth']) sks.consts.SolveSigma().val(sks.consts.profiles[cur]['solveSigma']) sks.consts.FieldWidthaPPLow().val(sks.consts.profiles[cur]['aPPLoValue']) @@ -140,14 +140,12 @@ function setProfiles(profiles){ function setIniShutter( shutVal){ console.log("shutter value", shutVal) sks.consts.shutter().val(shutVal); - var shutr = document.getElementById('setShutter'); - shutr.value = shutVal; + } function setIniISO( ISOVal){ console.log("ISO val", ISOVal) sks.consts.ISO().val(ISOVal); - var iso2 = document.getElementById('setISO'); - iso2.value = ISOVal; + } function setIniFrame( frameVal){ sks.consts.frame().val(frameVal); @@ -174,7 +172,7 @@ $(document).ready(function(){ }); var checkBox = document.getElementById("autoStatusCB"); - console.log("checkBox", checkBox) + if (checkBox.checked == true) { setTimeout(updateStatusField, 1000); } @@ -191,8 +189,7 @@ $(document).ready(function(){ $.post("/setShutter/" + x, data = { suggest: x }, function(result) {}); - var numberVal = document.getElementById('setShutter'); - numberVal.value = x; + }); sks.consts.ISO().change( @@ -204,8 +201,7 @@ $(document).ready(function(){ suggest: x }, function(result) {}); - var numberVal = document.getElementById('setISO'); - numberVal.value = x; + }); sks.consts.frame().change( @@ -244,7 +240,7 @@ $(document).ready(function(){ } $('#solveProfile').change( function() { - console.log("this", this.value) + console.log("the profiles list", this, this.value) sks.consts.currentProfile = this.value; setSolverParams(this.value); } @@ -349,6 +345,11 @@ $(document).ready(function(){ ajax_get_Status('/shutdown') } ) + $('#update').click( + function(){ + ajax_get_Status('/update') + } + ) $('#restartc').click( function(){ @@ -576,5 +577,17 @@ $(document).ready(function(){ }); +function confirmDeleteFile() { + let text = "Delete this file?"; + if (confirm(text) == true) { + $.ajax({ + url: "/sqDelete", + method: 'POST', + success: function(result) { + document.getElementById("statusField").innerHTML = result; + } + }); + } + } diff --git a/static/radec.txt b/static/radec.txt index 175de55..bac7fc9 100755 --- a/static/radec.txt +++ b/static/radec.txt @@ -1 +1 @@ -02:18:46 165.628789 55.770433 +13:47:43 74.796000 9.351400 diff --git a/templates/template.html b/templates/template.html index 528987f..b804cb6 100755 --- a/templates/template.html +++ b/templates/template.html @@ -252,8 +252,6 @@ - -
@@ -270,25 +268,18 @@ + type="button" class="btn nightColor" data-toggle="collapse" data-target="#historyNdx" id="historyIndex"># + + - - - - - - ...
@@ -313,6 +304,8 @@ + +
@@ -330,7 +323,7 @@
  • The current image is displayed below the status display.
    - The status display updates every second or at the end of various commands.
    + The log is displayed below the image. The log will display data from the solve operation as well as from other commands.

  • @@ -453,14 +446,13 @@
    Demo
    -
    -
    -
    Status:
    -
    + +
    Status: 
    +
    @@ -470,7 +462,7 @@
    Demo
    -
    +
    @@ -492,9 +484,10 @@
    Demo
    @@ -502,7 +495,7 @@
    Demo
    @@ -510,14 +503,7 @@
    Demo
    - - @@ -525,8 +511,8 @@
    Demo
    - - + + @@ -554,17 +540,13 @@

    Plate Solver Parameters

    - - - - -
    + - >
    +
    -
    +

    These default values are good for the RPI High Quality Camera with 25mm lens"
    For more info about the solver see astrometry.net/doc/readme.html Under solving heading.

    @@ -573,7 +555,7 @@

    Plate Solver Parameters

    @@ -661,7 +643,7 @@

    Plate Solver Parameters

    Debug
    - + @@ -689,7 +671,7 @@

    Plate Solver Parameters

    @@ -754,10 +736,20 @@

    Plate Solver Parameters

    +
    +
    + + +
    - - + + +

    Log:
    @@ -767,7 +759,11 @@

    Plate Solver Parameters

    - @@ -1034,32 +1096,9 @@

    Plate Solver Parameters

    @@ -1075,11 +1114,14 @@

    Plate Solver Parameters

    var sky = document.getElementById("skyImage"); var solve = document.getElementById('solu'); + sky.style.width = (100 * zoom) + "%"; sky.style.height = (100 * zoom) + "%"; solve.style.width = (100 * zoom) + "%"; solve.style.width = (100 * zoom) + "%"; + + } function brightNormal() { var sky = document.getElementById("skyImage"); @@ -1106,11 +1148,12 @@

    Plate Solver Parameters

    var zoom = skyimageMag var sky = document.getElementById("skyImage"); var solve = document.getElementById('solu'); + sky.style.width = (100 * zoom) + "%"; sky.style.height = (100 * zoom) + "%"; solve.style.width = (100 * zoom) + "%"; solve.style.width = (100 * zoom) + "%"; - + } diff --git a/tetra3.py b/tetra3.py old mode 100755 new mode 100644 index c2a6b97..4cca9cb --- a/tetra3.py +++ b/tetra3.py @@ -4,8 +4,13 @@ Use it to identify stars in images and get the corresponding direction (i.e. right ascension and declination) in the sky which the camera points to. The only thing tetra3 needs to know is the -approximate field of view of your camera. tetra3 also includes a versatile function to find spot -centroids and statistics. +approximate field of view of your camera. + +tetra3 also includes a versatile function to find spot centroids and statistics. +Alternately, you can also use another star detection/centroiding library in conjunction +with tetra3 plate solving. Cedar Detect (https://github.com/smroid/cedar-detect) is a high +performance solution for this; see cedar_detect_client.py for a way to use tetra3 with +Cedar Detect. Included in the package: @@ -13,29 +18,34 @@ - :meth:`tetra3.get_centroids_from_image`: Extract spot centroids from an image. - :meth:`tetra3.crop_and_downsample_image`: Crop and/or downsample an image. -A default database (named `default_database`) is included in the repo, it is built for a maximum -field of view of 12 degrees and and the default settings. +The class :class:`tetra3.Tetra3` has three main methods for solving images: + + - :meth:`Tetra3.solve_from_image`: Solve the camera pointing direction of an image. + - :meth:`Tetra3.solve_from_centroids`: As above, but from a list of star centroids. + - :meth:`Tetra3.generate_database`: Create a new database for your application. -It is critical to set up the centroid extraction parameters (see :meth:`get_centroids_from_image` -to reliably return star centroids from a given image. After this is done, pass the same keyword -arguments to :meth:`Tetra3.solve_from_image` to use them when solving your images. +A default database (named `default_database`) is included in the repo, it is built for a field of +view range of 10 to 30 degrees with stars up to magnitude 8. Note: If you wish to build you own database (typically for a different field-of-view) you must download a star catalogue. tetra3 supports three options: - + * The 285KB Yale Bright Star Catalog 'BSC5' containing 9,110 stars. This is complete to - to about magnitude seven and is sufficient for >10 deg field-of-view setups. + to about magnitude seven and is sufficient for >20 deg field-of-view setups. * The 51MB Hipparcos Catalogue 'hip_main' containing 118,218 stars. This contains about - three stars per square degree and is sufficient down to about >3 deg field-of-view. + three stars per square degree and is sufficient down to about >10 deg field-of-view. * The 355MB Tycho Catalogue 'tyc_main' (also from the Hipparcos satellite mission) - containing 1,058,332 stars. This is complete to magnitude 10 and is sufficient for all tetra3 databases. + containing 1,058,332 stars, around 25 per square degree. This is complete to + magnitude 10 and is sufficient down to about >3 deg field-of-view. + The 'BSC5' data is avaiable from (use byte format file) and 'hip_main' and 'tyc_main' are available from (save the appropriate .dat file). The - downloaded catalogue must be placed in the tetra3 directory. + downloaded catalogue must be placed in the tetra3/tetra3 directory. -This is Free and Open-Source Software based on `Tetra` rewritten by Gustav Pettersson at ESA. +Cedar Solve is Free and Open-Source Software based on `Tetra` rewritten by Gustav +Pettersson at ESA, with further improvements by Steven Rosenthal. The original software is due to: J. Brown, K. Stubis, and K. Cahoy, "TETRA: Star Identification with Hash Tables", @@ -43,6 +53,22 @@ +Cedar Solve license: + Copyright 2023 Steven Rosenthal smr@dt3.org + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + tetra3 license: Copyright 2019 the European Space Agency @@ -58,6 +84,7 @@ See the License for the specific language governing permissions and limitations under the License. + Original Tetra license notice: Copyright (c) 2016 brownj4 @@ -78,145 +105,267 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + """ # Standard imports: from pathlib import Path import csv import logging +import math import itertools from time import perf_counter as precision_timestamp from datetime import datetime +from numbers import Number +from collections import OrderedDict # External imports: import numpy as np -from numpy.linalg import norm +from numpy.linalg import norm, lstsq import scipy.ndimage import scipy.optimize import scipy.stats - -_MAGIC_RAND = 2654435761 +import scipy +from scipy.spatial import KDTree +from scipy.spatial.distance import pdist, cdist + +from PIL import Image, ImageDraw + +# Local imports. +from breadth_first_combinations import breadth_first_combinations +from fov_util import fibonacci_sphere_lattice, num_fields_for_sky, separation_for_density +# Status codes returned by solve_from_image() and solve_from_centroids() +MATCH_FOUND = 1 +NO_MATCH = 2 +TIMEOUT = 3 +CANCELLED = 4 +TOO_FEW = 5 + +_MAGIC_RAND = np.uint64(2654435761) _supported_databases = ('bsc5', 'hip_main', 'tyc_main') - - -def _insert_at_index(item, index, table): - """Inserts to table with quadratic probing.""" - max_ind = table.shape[0] +_lib_root = Path(__file__).parent + +def _insert_at_index(pattern, hash_index, table): + """Inserts to table with quadratic probing. Returns table index where pattern was inserted + and bool indicating whether a collision occurred.""" + max_ind = np.uint64(table.shape[0]) + hash_index = np.uint64(hash_index) + collision = False for c in itertools.count(): - i = (index + c**2) % max_ind + c = np.uint64(c) + i = (hash_index + c*c) % max_ind if all(table[i, :] == 0): - table[i, :] = item - return - - -def _get_at_index(index, table): - """Gets from table with quadratic probing, returns list of all matches.""" - max_ind = table.shape[0] + table[i, :] = pattern + return (i, collision) + collision = True + +def _get_table_indices_from_hash(hash_index, table): + """Gets from table with quadratic probing, returns list of all possibly matching indices.""" + max_ind = np.uint64(table.shape[0]) + hash_index = np.uint64(hash_index) found = [] for c in itertools.count(): - i = (index + c**2) % max_ind + c = np.uint64(c) + i = (hash_index + c*c) % max_ind if all(table[i, :] == 0): - return found + return np.array(found) else: - found.append(table[i, :].squeeze()) - - -def _key_to_index(key, bin_factor, max_index): - """Get hash index for a given key.""" - # Get key as a single integer - index = sum(int(val) * int(bin_factor)**i for (i, val) in enumerate(key)) - # Randomise by magic constant and modulo to maximum index - return (index * _MAGIC_RAND) % max_index - - -def _generate_patterns_from_centroids(star_centroids, pattern_size): - """Iterate over centroids in order of brightness.""" - # break if there aren't enough centroids to make even one pattern - if len(star_centroids) < pattern_size: - return - star_centroids = np.array(star_centroids) - # create a list of the pattern's centroid indices - # add the lower and upper index bounds as the first - # and last elements, respectively - pattern_indices = [-1] + list(range(pattern_size)) + [len(star_centroids)] - # output the very brightest centroids before doing anything else - yield star_centroids[pattern_indices[1:-1]] - # iterate until the very dimmest centroids have been output - # which occurs when the first pattern index has reached its maximum value - while pattern_indices[1] < len(star_centroids) - pattern_size: - # increment the pattern indices in order - for index_to_change in range(1, pattern_size + 1): - pattern_indices[index_to_change] += 1 - # if the current set of pattern indices is valid, use them - if pattern_indices[index_to_change] < pattern_indices[index_to_change + 1]: - break - # otherwise, incrementing caused a conflict with the next pattern index - # resolve the conflict by resetting the current pattern index and moving on - else: - pattern_indices[index_to_change] = pattern_indices[index_to_change - 1] + 1 - # output the centroids corresponding to the current set of pattern indices - yield star_centroids[pattern_indices[1:-1]] + found.append(i) + +def _pattern_hash_to_index(pattern_hash, bin_factor, max_index): + """Get hash index for a given pattern_hash (tuple of ordered binned edge ratios). + Can be length p list or n by p array.""" + pattern_hash = np.uint64(pattern_hash) + bin_factor = np.uint64(bin_factor) + max_index = np.uint64(max_index) + # Combine pattern_hash components to a single large number. + # If p is the length of the pattern_hash (default 5) and B is the number of bins (default 50, + # calculated from max error), this will first give each pattern_hash a unique index from + # 0 to B^p-1, then multiply by large number and modulo to max index to randomise. + if pattern_hash.ndim == 1: + combined = np.sum(pattern_hash*bin_factor**np.arange(len(pattern_hash), dtype=np.uint64), + dtype=np.uint64) + else: + combined = np.sum(pattern_hash*bin_factor**np.arange(pattern_hash.shape[1], dtype=np.uint64)[None, :], + axis=1, dtype=np.uint64) + with np.errstate(over='ignore'): + combined = (combined*_MAGIC_RAND) % max_index + return combined + +def _compute_vectors(centroids, size, fov): + """Get unit vectors from star centroids (pinhole camera).""" + # compute list of (i,j,k) vectors given list of (y,x) star centroids and + # an estimate of the image's field-of-view in the x dimension + # by applying the pinhole camera equations + centroids = np.array(centroids, dtype=np.float32) + (height, width) = size[:2] + scale_factor = np.tan(fov/2)/width*2 + star_vectors = np.ones((len(centroids), 3)) + # Pixel centre of image + img_center = [height/2, width/2] + # Calculate normal vectors + star_vectors[:, 2:0:-1] = (img_center - centroids) * scale_factor + star_vectors = star_vectors / norm(star_vectors, axis=1)[:, None] + return star_vectors + +def _compute_centroids(vectors, size, fov): + """Get (undistorted) centroids from a set of (derotated) unit vectors + vectors: Nx3 of (i,j,k) where i is boresight, j is x (horizontal) + size: (height, width) in pixels. + fov: horizontal field of view in radians. + We return all centroids plus a list of centroids indices that are within + the field of view. + """ + (height, width) = size[:2] + scale_factor = -width/2/np.tan(fov/2) + centroids = scale_factor*vectors[:, 2:0:-1]/vectors[:, [0]] + centroids += [height/2, width/2] + keep = np.flatnonzero(np.logical_and( + np.all(centroids > [0, 0], axis=1), + np.all(centroids < [height, width], axis=1))) + return (centroids, keep) + +def _undistort_centroids(centroids, size, k): + """Apply r_u = r_d(1 - k'*r_d^2)/(1 - k) undistortion, where k'=k*(2/width)^2, + i.e. k is the distortion that applies width/2 away from the centre. + centroids: Nx2 pixel coordinates (y, x), (0.5, 0.5) top left pixel centre. + size: (height, width) in pixels. + k: distortion, negative is barrel, positive is pincushion + """ + centroids = np.array(centroids, dtype=np.float32) + (height, width) = size[:2] + # Centre + centroids -= [height/2, width/2] + r_dist = norm(centroids, axis=1)/width*2 + # Scale + scale = (1 - k*r_dist**2)/(1 - k) + centroids *= scale[:, None] + # Decentre + centroids += [height/2, width/2] + return centroids + +def _distort_centroids(centroids, size, k, tol=1e-6, maxiter=30): + """Distort centroids corresponding to r_u = r_d(1 - k'*r_d^2)/(1 - k), + where k'=k*(2/width)^2 i.e. k is the distortion that applies + width/2 away from the centre. + + Iterates with Newton-Raphson until the step is smaller than tol + or maxiter iterations have been exhausted. + """ + centroids = np.array(centroids, dtype=np.float32) + (height, width) = size[:2] + # Centre + centroids -= [height/2, width/2] + r_undist = norm(centroids, axis=1)/width*2 + # Initial guess, distorted are the same position + r_dist = r_undist.copy() + for i in range(maxiter): + r_undist_est = r_dist*(1 - k*r_dist**2)/(1 - k) + dru_drd = (1 - 3*k*r_dist**2)/(1 - k) + error = r_undist - r_undist_est + r_dist += error/dru_drd + + if np.all(np.abs(error) < tol): + break + + centroids *= (r_dist/r_undist)[:, None] + centroids += [height/2, width/2] + return centroids + +def _find_rotation_matrix(image_vectors, catalog_vectors): + """Calculate the least squares best rotation matrix between the two sets of vectors. + image_vectors and catalog_vectors both Nx3. Must be ordered as matching pairs. + """ + # find the covariance matrix H between the image and catalog vectors + H = np.dot(image_vectors.T, catalog_vectors) + # use singular value decomposition to find the rotation matrix + (U, S, V) = np.linalg.svd(H) + return np.dot(U, V) + +def _find_centroid_matches(image_centroids, catalog_centroids, r): + """Find matching pairs, unique and within radius r. + image_centroids: Nx2 (y, x) in pixels + catalog_centroids: Mx2 (y, x) in pixels + r: radius in pixels + + returns Kx2 list of matches, first column is index in image_centroids, + second column is index in catalog_centroids + """ + dists = cdist(image_centroids, catalog_centroids) + matches = np.argwhere(dists < r) + # Make sure we only have unique 1-1 matches + matches = matches[np.unique(matches[:, 0], return_index=True)[1], :] + matches = matches[np.unique(matches[:, 1], return_index=True)[1], :] + return matches + +def _angle_from_distance(dist): + """Given a euclidean distance between two points on the unit sphere, + return the center angle (in radians) between the two points. + """ + return 2.0 * np.arcsin(0.5 * dist) + +def _distance_from_angle(angle): + """Return the euclidean distance between two points on the unit sphere with the + given center angle (in radians). + """ + return 2.0 * np.sin(angle / 2.0) class Tetra3(): """Solve star patterns and manage databases. - To find the direction in the sky an image is showing this class calculates a "fingerprint" of - the stars seen in the image and looks for matching fingerprints in a pattern catalogue loaded - into memory. Subsequently, all stars that should be visible in the image (based on the - fingerprint's location) are looked for and the match is confirmed or rejected based on the + To find the direction in the sky an image is showing, this class calculates the geometric hashes + of star patterns seen in the image and looks for matching hashes in a pattern database loaded + into memory. Subsequently, all stars that should be visible in the image (based on the database + pattern's location) are looked for and the match is confirmed or rejected based on the probability that the found number of matches happens by chance. - Each pattern is made up of four stars, and the fingerprint is created by calculating the - distances between every pair of stars in the pattern and normalising by the longest to create - a set of five numbers between zero and one. This information, and the desired tolerance, is - used to find the indices in the database where the match may reside by a hashing function. + Each pattern is made up of four stars, and the pattern hash is created by calculating the + distances between every pair of stars in the pattern and normalising by the longest to create a + set of five numbers between zero and one. This information, and the desired tolerance, is used + to find the indices in the database where the match may reside by a table index hashing + function. See the description of :meth:`generate_database` for more detail. - A database needs to be generated with patterns which are of appropriate scale for the field - of view (FOV) of your camera. Therefore, generate a database using :meth:`generate_database` - with a `max_fov` which is the FOV of your camera (or slightly larger). A database with - `max_fov=12` (degrees) is included as `default_database.npz`. + A database needs to be generated with patterns which are of appropriate scale for the horizontal + field of view (FOV) of your camera. Therefore, generate a database using + :meth:`generate_database` with a `max_fov` which is the FOV of your camera (or slightly larger). + A database with `max_fov=30` (degrees) is included as `default_database.npz`. Star locations (centroids) are found using :meth:`tetra3.get_centroids_from_image`, use one of your images to find settings which work well for your images. Then pass those settings as - keyword arguments to :meth:`solve_from_image`. + keyword arguments to :meth:`solve_from_image`. Alternately, you can use Cedar Detect for + detecting and centroiding stars in your images. Example 1: Load database and solve image :: import tetra3 - # Create instance + # Create instance, automatically loads the default database t3 = tetra3.Tetra3() - # Load a database - t3.load_database('default_database') - # Create dictionary with desired extraction settings - extract_dict = {'min_sum': 250, 'max_axis_ratio': 1.5} - # Solve for image, optionally passing known FOV estimate and error range - result = t3.solve_from_image(image, fov_estimate=11, fov_max_error=.5, **extract_dict) + # Solve for image (PIL.Image), with some optional arguments + result = t3.solve_from_image(image, fov_estimate=11, fov_max_error=.5, max_area=300) Example 2: Generate and save database :: import tetra3 - # Create instance - t3 = tetra3.Tetra3() + # Create instance without loading any database + t3 = tetra3.Tetra3(load_database=None) # Generate and save database t3.generate_database(max_fov=20, save_as='my_database_name') Args: load_database (str or pathlib.Path, optional): Database to load. Will call - :meth:`load_database` with the provided argument after creating instance. + :meth:`load_database` with the provided argument after creating instance. Defaults to + 'default_database'. Can set to None to create Tetra3 object without loaded database. debug_folder (pathlib.Path, optional): The folder for debug logging. If None (the default) - the folder tetra3/debug will be used/created. + debug logging will be disabled unless handlers have been added to the `tetra3.Tetra3` + logger before creating the insance. """ - def __init__(self, load_database=None, debug_folder=None): + def __init__(self, load_database='default_database', debug_folder=None): # Logger setup self._debug_folder = None - if debug_folder is None: - self.debug_folder = Path(__file__).parent / 'debug' - else: - self.debug_folder = debug_folder self._logger = logging.getLogger('tetra3.Tetra3') if not self._logger.hasHandlers(): # Add new handlers to the logger if there are none @@ -224,24 +373,36 @@ def __init__(self, load_database=None, debug_folder=None): # Console handler at INFO level ch = logging.StreamHandler() ch.setLevel(logging.INFO) - # File handler at DEBUG level - fh = logging.FileHandler(self.debug_folder / 'tetra3.txt') - fh.setLevel(logging.DEBUG) # Format and add formatter = logging.Formatter('%(asctime)s:%(name)s-%(levelname)s: %(message)s') - fh.setFormatter(formatter) ch.setFormatter(formatter) - self._logger.addHandler(fh) self._logger.addHandler(ch) + if debug_folder is not None: + self.debug_folder = debug_folder + # File handler at DEBUG level + fh = logging.FileHandler(self.debug_folder / 'tetra3.txt') + fh.setLevel(logging.DEBUG) + fh.setFormatter(formatter) + self._logger.addHandler(fh) self._logger.debug('Tetra3 Constructor called with load_database=' + str(load_database)) self._star_table = None + self._star_kd_tree = None + self._star_catalog_IDs = None self._pattern_catalog = None + self._num_patterns = None + self._pattern_largest_edge = None self._verification_catalog = None + self._cancelled = False + self._db_props = {'pattern_mode': None, 'pattern_size': None, 'pattern_bins': None, - 'pattern_max_error': None, 'max_fov': None, - 'pattern_stars_per_fov': None, 'verification_stars_per_fov': None, - 'star_max_magnitude': None, 'star_min_separation': None} + 'pattern_max_error': None, 'max_fov': None, 'min_fov': None, + 'star_catalog': None, 'epoch_equinox': None, 'epoch_proper_motion': None, + 'lattice_field_oversampling': None, 'patterns_per_lattice_field': None, + 'verification_stars_per_fov': None, 'star_max_magnitude': None, + 'range_ra': None, 'range_dec': None, 'presort_patterns': None, + 'num_patterns': None} + if load_database is not None: self._logger.debug('Trying to load database') self.load_database(load_database) @@ -281,27 +442,75 @@ def star_table(self): """ return self._star_table + @property + def star_kd_tree(self): + """KDTree: KD tree of stars in the database. + """ + return self._star_kd_tree + @property def pattern_catalog(self): """numpy.ndarray: Catalog of patterns in the database.""" return self._pattern_catalog + @property + def num_patterns(self): + """numpy.uint32: Number of patterns in the database.""" + return self._num_patterns + + @property + def pattern_largest_edge(self): + """numpy.ndarray: Catalog of largest edges for each pattern in milliradian.""" + return self._pattern_largest_edge + + @property + def star_catalog_IDs(self): + """numpy.ndarray: Table of catalogue IDs for each entry in the star table. + + The table takes different format depending on the source catalogue used + to build the database. See the `star_catalog` key of + :meth:`database_properties` to find the source catalogue. + - bsc5: A numpy array of size (N,) with datatype uint16. Stores the 'BSC' number. + - hip_main: A numpy array of size (N,) with datatype uint32. Stores the 'HIP' number. + - tyc_main: A numpy array of size (N, 3) with datatype uint16. Stores the + (TYC1, TYC2, TYC3) numbers. + + Is None if no database is loaded or an older database without IDs stored. + """ + return self._star_catalog_IDs + @property def database_properties(self): """dict: Dictionary of database properties. Keys: - - 'pattern_mode': Method used to identify star patterns. + - 'pattern_mode': Method used to identify star patterns. Is always 'edge_ratio'. - 'pattern_size': Number of stars in each pattern. - 'pattern_bins': Number of bins per dimension in pattern catalog. - - 'pattern_max_error' Maximum difference allowed in pattern for a match. - - 'max_fov': Maximum angle between stars in the same pattern (Field of View; degrees). - - 'pattern_stars_per_fov': Number of stars used for patterns in each region of size - 'max_fov'. - - 'verification_stars_per_fov': Number of stars in catalog in each region of size 'max_fov'. + - 'pattern_max_error': Maximum difference allowed in pattern for a match. + - 'max_fov': Maximum camera horizontal field of view (in degrees) the database is built for. + This will also be the angular extent of the largest pattern. + - 'min_fov': Minimum camera horizontal field of view (in degrees) the database is built for. + This drives the density of stars in the database, patterns may be smaller than this. + - 'lattice_field_oversampling': When uniformly distributing pattern generation fields over + the celestial sphere, this determines the overlap factor. + Also stored as 'pattern_stars_per_fov' for compatibility with earlier versions. + - 'patterns_per_lattice_field': Number of patterns generated for each lattice field. + Also stored as 'pattern_stars_per_anchor_star' for compatibility with earlier versions. + - 'verification_stars_per_fov': Number of stars in solve-time FOV to retain. - 'star_max_magnitude': Dimmest apparent magnitude of stars in database. - - 'star_min_separation': Smallest separation allowed between stars (to remove doubles; - degrees). + - 'star_catalog': Name of the star catalog (e.g. bcs5, hip_main, tyc_main) the database was + built from. Returns 'unknown' for old databases where this data was not saved. + - 'epoch_equinox': Epoch of the 'star_catalog' celestial coordinate system. Usually 2000, + but could be 1950 for old Bright Star Catalog versions. + - 'epoch_proper_motion': year to which stellar proper motions have been propagated. + - 'presort_patterns': Indicates if the pattern indices are sorted by distance to the centroid. + - 'range_ra': The portion of the sky in right ascension (min, max) that is in the database + (degrees 0 to 360). If None, the whole sky is included. + - 'range_dec': The portion of the sky in declination (min, max) that is in the database + (degrees -90 to 90). If None, the whole sky is included. + - 'num_patterns': The number of patterns in the database. If None, this is one + half of the pattern table size. """ return self._db_props @@ -310,7 +519,7 @@ def load_database(self, path='default_database'): Args: path (str or pathlib.Path): The file to load. If given a str, the file will be looked - for in the tetra3 directory. If given a pathlib.Path, this path will be used + for in the tetra3/data directory. If given a pathlib.Path, this path will be used unmodified. The suffix .npz will be added. """ self._logger.debug('Got load database with: ' + str(path)) @@ -325,8 +534,24 @@ def load_database(self, path='default_database'): with np.load(path) as data: self._logger.debug('Loaded database, unpack files') self._pattern_catalog = data['pattern_catalog'] + self._star_table = data['star_table'] + # Insert all stars in a KD-tree for fast neighbour lookup + all_star_vectors = self._star_table[:, 2:5] + self._star_kd_tree = KDTree(all_star_vectors) + props_packed = data['props_packed'] + try: + self._pattern_largest_edge = data['pattern_largest_edge'] + except KeyError: + self._logger.debug('Database does not have largest edge stored, set to None.') + self._pattern_largest_edge = None + try: + self._star_catalog_IDs = data['star_catalog_IDs'] + except KeyError: + self._logger.debug('Database does not have catalogue IDs stored, set to None.') + self._star_catalog_IDs = None + self._logger.debug('Unpacking properties') for key in self._db_props.keys(): try: @@ -341,308 +566,846 @@ def load_database(self, path='default_database'): self._db_props[key] = props_packed['star_min_magnitude'][()] self._logger.debug('Unpacked star_min_magnitude to: ' \ + str(self._db_props[key])) + elif key == 'presort_patterns': + self._db_props[key] = False + self._logger.debug('No presort_patterns key, set to False') + elif key == 'star_catalog': + self._db_props[key] = 'unknown' + self._logger.debug('No star_catalog key, set to unknown') + elif key == 'num_patterns': + self._db_props[key] = self.pattern_catalog.shape[0] // 2 + self._logger.debug('No num_patterns key, set to half of pattern_catalog size') else: - raise + self._db_props[key] = None + self._logger.warning('Missing key in database (likely version difference): ' + str(key)) + if self._db_props['min_fov'] is None: + self._logger.debug('No min_fov key, copy from max_fov') + self._db_props['min_fov'] = self._db_props['max_fov'] + self._num_patterns = self._db_props['num_patterns'] + self._logger.debug('Database properties %s' % self._db_props) + def save_database(self, path): """Save database to file. Args: path (str or pathlib.Path): The file to save to. If given a str, the file will be saved - in the tetra3 directory. If given a pathlib.Path, this path will be used + in the tetra3/data directory. If given a pathlib.Path, this path will be used unmodified. The suffix .npz will be added. """ assert self.has_database, 'No database' self._logger.debug('Got save database with: ' + str(path)) if isinstance(path, str): self._logger.debug('String given, append to tetra3 directory') - path = (Path(__file__).parent / path).with_suffix('.npz') + path = (Path(__file__).parent / 'data' / path).with_suffix('.npz') else: self._logger.debug('Not a string, use as path directly') path = Path(path).with_suffix('.npz') - + self._logger.info('Saving database to: ' + str(path)) + # Pack properties as numpy structured array props_packed = np.array((self._db_props['pattern_mode'], self._db_props['pattern_size'], self._db_props['pattern_bins'], self._db_props['pattern_max_error'], self._db_props['max_fov'], - self._db_props['pattern_stars_per_fov'], + self._db_props['min_fov'], + self._db_props['star_catalog'], + self._db_props['epoch_equinox'], + self._db_props['epoch_proper_motion'], + self._db_props['lattice_field_oversampling'], + self._db_props['anchor_stars_per_fov'], # legacy + self._db_props['pattern_stars_per_fov'], # legacy + self._db_props['patterns_per_lattice_field'], + self._db_props['patterns_per_anchor_star'], # legacy self._db_props['verification_stars_per_fov'], self._db_props['star_max_magnitude'], - self._db_props['star_min_separation']), + self._db_props['simplify_pattern'], # legacy + self._db_props['range_ra'], + self._db_props['range_dec'], + self._db_props['presort_patterns'], + self._db_props['num_patterns']), dtype=[('pattern_mode', 'U64'), ('pattern_size', np.uint16), ('pattern_bins', np.uint16), ('pattern_max_error', np.float32), ('max_fov', np.float32), + ('min_fov', np.float32), + ('star_catalog', 'U64'), + ('epoch_equinox', np.uint16), + ('epoch_proper_motion', np.float32), + ('lattice_field_oversampling', np.uint16), + ('anchor_stars_per_fov', np.uint16), ('pattern_stars_per_fov', np.uint16), + ('patterns_per_lattice_field', np.uint16), + ('patterns_per_anchor_star', np.uint16), ('verification_stars_per_fov', np.uint16), ('star_max_magnitude', np.float32), - ('star_min_separation', np.float32)]) + ('simplify_pattern', bool), + ('range_ra', np.float32, (2,)), + ('range_dec', np.float32, (2,)), + ('presort_patterns', bool), + ('num_patterns', np.uint32)]) + self._logger.debug('Packed properties into: ' + str(props_packed)) self._logger.debug('Saving as compressed numpy archive') - np.savez_compressed(path, star_table=self.star_table, - pattern_catalog=self.pattern_catalog, props_packed=props_packed) - def generate_database(self, max_fov, save_as=None, star_catalog='bsc5', pattern_stars_per_fov=10, - verification_stars_per_fov=20, star_max_magnitude=7, - star_min_separation=.05, pattern_max_error=.005): - """Create a database and optionally save to file. Typically takes 5 to 30 minutes. + to_save = {'star_table': self.star_table, + 'pattern_catalog': self.pattern_catalog, + 'props_packed': props_packed} + if self.pattern_largest_edge is not None: + to_save['pattern_largest_edge'] = self.pattern_largest_edge + if self.star_catalog_IDs is not None: + to_save['star_catalog_IDs'] = self.star_catalog_IDs + + np.savez_compressed(path, **to_save) + + @staticmethod + def _load_catalog(star_catalog, catalog_file_full_pathname, range_ra, range_dec, epoch_proper_motion, logger): + """Loads the star catalog and returns at tuple of: + star_table: an array of [ra, dec, 0, 0, 0, mag] + star_catID: array of catalog IDs for the entries in star_table + epoch_equinox: the epoch of the star catalog's celestial coordinate system. + """ + + # Calculate number of star catalog entries: + if star_catalog == 'bsc5': + # See http://tdc-www.harvard.edu/catalogs/catalogsb.html + bsc5_header_type = [('STAR0', np.int32), ('STAR1', np.int32), + ('STARN', np.int32), ('STNUM', np.int32), + ('MPROP', np.int32), ('NMAG', np.int32), + ('NBENT', np.int32)] + reader = np.fromfile(catalog_file_full_pathname, dtype=bsc5_header_type, count=1) + entry = reader[0] + num_entries = entry[2] + header_length = reader.itemsize + if num_entries > 0: + epoch_equinox = 1950 + pm_origin = 1950 # this is an assumption, not specified in bsc5 docs + else: + num_entries = -num_entries + epoch_equinox = 2000 + pm_origin = 2000 # this is an assumption, not specified in bsc5 docs + # Check that the catalogue version has the data we need + stnum = entry[3] + if stnum != 1: + logger.warning('Catalogue %s has unexpected "stnum" header value: %s' % + (star_catalog, stnum)) + mprop = entry[4] + if mprop != 1: + logger.warning('Catalogue %s has unexpected "mprop" header value: %s' % + (star_catalog, mprop)) + nmag = entry[5] + if nmag != 1: + logger.warning('Catalogue %s has unexpected "nmag" header value: %s' % + (star_catalog, nmag)) + nbent = entry[6] + if nbent != 32: + logger.warning('Catalogue %s has unexpected "nbent" header value: %s' % + (star_catalog, nbent)) + elif star_catalog in ('hip_main', 'tyc_main'): + num_entries = sum(1 for _ in open(catalog_file_full_pathname)) + epoch_equinox = 2000 + pm_origin = 1991.25 + + logger.info('Loading catalogue %s with %s star entries.' % + (star_catalog, num_entries)) + + if epoch_proper_motion is None: + # If pm propagation was disabled, set end date to origin + epoch_proper_motion = pm_origin + logger.info('Using catalog RA/Dec %s epoch; not propagating proper motions from %s.' % + (epoch_equinox, pm_origin)) + else: + logger.info('Using catalog RA/Dec %s epoch; propagating proper motions from %s to %s.' % + (epoch_equinox, pm_origin, epoch_proper_motion)) + + # Preallocate star table: elements are [ra, dec, x, y, z, mag]. + star_table = np.zeros((num_entries, 6), dtype=np.float32) + # Preallocate ID table + if star_catalog == 'bsc5': + star_catID = np.zeros(num_entries, dtype=np.uint16) + elif star_catalog == 'hip_main': + star_catID = np.zeros(num_entries, dtype=np.uint32) + else: # is tyc_main + star_catID = np.zeros((num_entries, 3), dtype=np.uint16) + + # Read magnitude, RA, and Dec from star catalog: + if star_catalog == 'bsc5': + bsc5_data_type = [('ID', np.float32), ('RA', np.float64), + ('Dec', np.float64), ('type', np.int16), + ('mag', np.int16), ('RA_pm', np.float32), ('Dec_PM', np.float32)] + with open(catalog_file_full_pathname, 'rb') as star_catalog_file: + star_catalog_file.seek(header_length) # skip header + reader = np.fromfile(star_catalog_file, dtype=bsc5_data_type, count=num_entries) + for (i, entry) in enumerate(reader): + mag = entry[4]/100 + # RA/Dec in radians at epoch proper motion start. + alpha = float(entry[1]) + delta = float(entry[2]) + cos_delta = np.cos(delta) + + # Pick up proper motion terms. See notes for hip_main and tyc_main below. + # Radians per year. + mu_alpha_cos_delta = float(entry[5]) + mu_delta = float(entry[6]) + + # See notes below. + if cos_delta > 0.1: + mu_alpha = mu_alpha_cos_delta / cos_delta + else: + mu_alpha = 0 + mu_delta = 0 + + ra = alpha + mu_alpha * (epoch_proper_motion - pm_origin) + dec = delta + mu_delta * (epoch_proper_motion - pm_origin) + star_table[i,:] = ([ra, dec, 0, 0, 0, mag]) + star_catID[i] = np.uint16(entry[0]) + elif star_catalog in ('hip_main', 'tyc_main'): + # The Hipparcos and Tycho catalogs uses International Celestial + # Reference System (ICRS) which is essentially J2000. See + # https://cdsarc.u-strasbg.fr/ftp/cats/I/239/version_cd/docs/vol1/sect1_02.pdf + # section 1.2.1 for details. + with open(catalog_file_full_pathname, 'r') as star_catalog_file: + reader = csv.reader(star_catalog_file, delimiter='|') + incomplete_entries = 0 + for (i, entry) in enumerate(reader): + # Skip this entry if mag, ra, or dec are empty. + if entry[5].isspace() or entry[8].isspace() or entry[9].isspace(): + incomplete_entries += 1 + continue + # If propagating, skip if proper motions are empty. + if epoch_proper_motion != pm_origin \ + and (entry[12].isspace() or entry[13].isspace()): + incomplete_entries += 1 + continue + mag = float(entry[5]) + # RA/Dec in degrees at 1991.25 proper motion start. + alpha = float(entry[8]) + delta = float(entry[9]) + cos_delta = np.cos(np.deg2rad(delta)) + + mu_alpha = 0 + mu_delta = 0 + if epoch_proper_motion != pm_origin: + # Pick up proper motion terms. Note that the pmRA field is + # "proper motion in right ascension"; see + # https://en.wikipedia.org/wiki/Proper_motion; see also section + # 1.2.5 in the cdsarc.u-strasbg document cited above. + + # The 1000/60/60 term converts milliarcseconds per year to + # degrees per year. + mu_alpha_cos_delta = float(entry[12])/1000/60/60 + mu_delta = float(entry[13])/1000/60/60 + + # Divide the pmRA field by cos_delta to recover the RA proper + # motion rate. Note however that near the poles (delta near plus + # or minus 90 degrees) the cos_delta term goes to zero so dividing + # by cos_delta is problematic there. + # Section 1.2.9 of the cdsarc.u-strasbg document cited above + # outlines a change of coordinate system that can overcome + # this problem; we simply punt on proper motion near the poles. + if cos_delta > 0.1: + mu_alpha = mu_alpha_cos_delta / cos_delta + else: + # abs(dec) > ~84 degrees. Ignore proper motion. + mu_alpha = 0 + mu_delta = 0 + + ra = np.deg2rad(alpha + mu_alpha * (epoch_proper_motion - pm_origin)) + dec = np.deg2rad(delta + mu_delta * (epoch_proper_motion - pm_origin)) + star_table[i,:] = ([ra, dec, 0, 0, 0, mag]) + # Find ID, depends on the database + if star_catalog == 'hip_main': + star_catID[i] = np.uint32(entry[1]) + else: # is tyc_main + star_catID[i, :] = [np.uint16(x) for x in entry[1].split()] + + if incomplete_entries: + logger.info('Skipped %i incomplete entries.' % incomplete_entries) + + # Remove entries in which RA and Dec are both zero + # (i.e. keep entries in which either RA or Dec is non-zero) + kept = np.logical_or(star_table[:, 0] != 0, star_table[:, 1] != 0) + star_table = star_table[kept, :] + brightness_ii = np.argsort(star_table[:, 5]) + star_table = star_table[brightness_ii, :] # Sort by brightness + num_entries = star_table.shape[0] + # Trim and order catalogue ID array to match + if star_catalog in ('bsc5', 'hip_main'): + star_catID = star_catID[kept][brightness_ii] + else: + star_catID = star_catID[kept, :][brightness_ii, :] + + logger.info('Loaded %d stars' % num_entries) + + # If desired, clip out only a specific range of ra and/or dec for a partial coverage database + if range_ra is not None: + range_ra = np.deg2rad(range_ra) + if range_ra[0] < range_ra[1]: # Range does not cross 360deg discontinuity + kept = np.logical_and(star_table[:, 0] > range_ra[0], star_table[:, 0] < range_ra[1]) + else: + kept = np.logical_or(star_table[:, 0] > range_ra[0], star_table[:, 0] < range_ra[1]) + star_table = star_table[kept, :] + num_entries = star_table.shape[0] + # Trim down catalogue ID to match + if star_catalog in ('bsc5', 'hip_main'): + star_catID = star_catID[kept] + else: + star_catID = star_catID[kept, :] + logger.info('Limited to RA range %s, keeping %s stars.' % + (np.rad2deg(range_ra), num_entries)) + if range_dec is not None: + range_dec = np.deg2rad(range_dec) + if range_dec[0] < range_dec[1]: # Range does not cross +/-90deg discontinuity + kept = np.logical_and(star_table[:, 1] > range_dec[0], star_table[:, 1] < range_dec[1]) + else: + kept = np.logical_or(star_table[:, 1] > range_dec[0], star_table[:, 1] < range_dec[1]) + star_table = star_table[kept, :] + num_entries = star_table.shape[0] + # Trim down catalogue ID to match + if star_catalog in ('bsc5', 'hip_main'): + star_catID = star_catID[kept] + else: + star_catID = star_catID[kept, :] + logger.info('Limited to DEC range %s, keeping %s stars.' % + (np.rad2deg(range_dec), num_entries)) + + return (star_table, star_catID, epoch_equinox) + + def generate_database(self, max_fov, min_fov=None, save_as=None, + star_catalog='hip_main', + lattice_field_oversampling=100, patterns_per_lattice_field=50, + verification_stars_per_fov=150, star_max_magnitude=None, + pattern_max_error=.001, range_ra=None, range_dec=None, + presort_patterns=True, save_largest_edge=True, + multiscale_step=1.5, epoch_proper_motion='now', + pattern_stars_per_fov=None, simplify_pattern=None): + """Create a database and optionally save it to file. + + Takes a few minutes for a small (large FOV) database, can take many hours for a large + (small FOV) database. The primary knowledge necessary is the FOV you want the database + to work for and the highest magnitude of stars you want to include. + + For a single application, set max_fov equal to your known FOV. Alternatively, set + max_fov and min_fov to the range of FOVs you want the database to be built for. For + large difference in max_fov and min_fov, a multiscale database will be built where + patterns of several different sizes on the sky will be included. Note: - If you wish to build you own database (typically for a different field-of-view) you must - download a star catalogue. tetra3 supports three options: - + If you wish to build you own database you must download a star catalogue. tetra3 + supports three options, where the 'hip_main' is the default and recommended + database to use: * The 285KB Yale Bright Star Catalog 'BSC5' containing 9,110 stars. This is complete to to about magnitude seven and is sufficient for >10 deg field-of-view setups. * The 51MB Hipparcos Catalogue 'hip_main' containing 118,218 stars. This contains about three stars per square degree and is sufficient down to about >3 deg field-of-view. * The 355MB Tycho Catalogue 'tyc_main' (also from the Hipparcos satellite mission) - containing 1,058,332 stars. This is complete to magnitude 10 and is sufficient for all tetra3 databases. + containing 1,058,332 stars. This is complete to magnitude 10 and is sufficient + for all tetra3 databases. The 'BSC5' data is avaiable from (use byte format file) and 'hip_main' and 'tyc_main' are available from (save the appropriate .dat file). The - downloaded catalogue must be placed in the tetra3 directory. + downloaded catalogue must be placed in the tetra3/tetra3 directory. - Args: - max_fov (float): Maximum angle (in degrees) between stars in the same pattern. - save_as (str or pathlib.Path, optional): Save catalog here when finished. Calls - :meth:`save_database`. - star_catalog (string, optional): Abbreviated name of star catalog, one of 'bsc5', - 'hip_main', or 'tyc_main'. - pattern_stars_per_fov (int, optional): Number of stars used for pattern matching in each - region of size 'max_fov'. - verification_stars_per_fov (int, optional): Number of stars used for verification of the - solution in each region of size 'max_fov'. - star_max_magnitude (float, optional): Dimmest apparent magnitude of stars in database. - star_min_separation (float, optional): Smallest separation (in degrees) allowed between - stars (to remove doubles). - pattern_max_error (float, optional): Maximum difference allowed in pattern for a match. - - Example: + Example, the default database was generated with: :: # Create instance t3 = tetra3.Tetra3() # Generate and save database - t3.generate_database(max_fov=20, save_as='my_database_name') + t3.generate_database(max_fov=30, min_fov=10, save_as='default_database') + + If you know your FOV, set max_fov to this value and leave min_fov as None. The example above + takes less than 7 minutes to build on RPi4. + + Note on celestial coordinates: The RA/Dec values incorporated into the database are expressed in the + same celestial coordinate system as the input catalog. For hip_main and tyc_main this is J2000; for + bsc5 this is also J2000 (but could be B1950 for older Bright Star Catalogs). The solve_from_image() + function returns its solution's RA/Dec values along with the equinox epoch of the database's catalog. + + Notes on proper motion: star catalogs include stellar proper motion data. This means they give each + star's position as of a specified year (1991.25 for hip_main and tyc_main; 2000(?) for bsc5). In + addition, for each star, the annual rate of motion in RA/Dec is also given. This allows + generate_database() to output a database with stellar positions propagated to the year in which the + database was generated (by default; see below). Some stars don't have proper motions in the catalogue + and will therefore be excluded from the database, however, you can set epoch_proper_motion=None to + disable this propagation and all stars will be included. The field 'epoch_proper_motion' of the + database properties identifies the epoch for which the star positions are valid. + + Theoretically, when passing an image to solve_from_image(), the database's epoch_proper_motion should + be the same as the time at which the image was taken. In practice, this is generally unimportant + because most stars' proper motion is very small. One exception: for very small fields of view (high + magnification), even small proper motions can be significant. Another exception: when solving + historical images. In both cases, you should arrange to use a database built with a + epoch_proper_motion similar to the image's vintage. + + About patterns, pattern hashes, and collisions: + + Tetra3 refers to a grouping of four stars as a "pattern", and assigns each pattern a + pattern hash as follows: + + 1. Calculate the six edge distances between each pair of stars in the pattern. + 2. Normalise by the longest edge to create a set of five numbers each between zero and + one. + 3. Order the five edge ratios. + 4. Quantize each edge ratio into a designated number of bins. + 5. Concatenate the five ordered and quantized edge ratios to form the hash value for the + pattern. + + When solving an image, tetra3 forms patterns from 4-groups of stars in the image, computes + each pattern's hash in the same manner, and use these pattern hashes to look up the + corresponding database pattern (or patterns, see next). The location of stars in the + database pattern and other nearby catalog stars are used to validate the match in the image. + + Note that it is possible for multiple distinct patterns to share the same hash; this happens + more frequently as the number of quantization bins in step 4 is reduced. When multiple + patterns share the same hash we call this a "pattern hash collision". When solving an image, + pattern hash collisions increase the number of database patterns to be validated as a match + against the image's star patterns. + + In theory, a python dict could be used to map from pattern hash value to the list of + patterns with that hash value. Howver, catalog databases can easily contain millions of + patterns, so in practice such a pattern dict would occupy an uncomfortably large amount of + memory. + + Tetra3 instead uses an efficient array representation of its patterns, with each pattern + hash value being hashed (*) to form an index into the pattern array. Mapping the large space + of possible pattern hash values to the modest range of pattern array indices induces further + collisions, as does the open addressing hash algorithm used to manage the pattern array. + Because the pattern array is allocated to twice the number of patterns, the additional + hashing and table collisions induced are modest. + + * We have two hashing concepts in play. The first is "geometric hashing" from the field of + object recognition and pattern matching (https://en.wikipedia.org/wiki/Geometric_hashing), + where a 4-star pattern is distilled to our pattern hash, a 5-tuple of quantized edge ratios. + The second is a "hash table" (https://en.wikipedia.org/wiki/Hash_table) where the pattern + hash is hashed to index into a compact table of all of the star patterns. + Args: + max_fov (float): Maximum angle (in degrees) between stars in the same pattern. + min_fov (float, optional): Minimum FOV considered when the catalogue density is trimmed to size. + If None (the default), min_fov will be set to max_fov, i.e. a catalogue for a single + application is generated (this is most efficient size and speed wise). + save_as (str or pathlib.Path, optional): Save catalogue here when finished. Calls + :meth:`save_database`. + star_catalog (string, optional): Abbreviated name of star catalog, one of 'bsc5', + 'hip_main', or 'tyc_main'. Default 'hip_main'. + lattice_field_oversampling (int, optional): When uniformly distributing pattern + generation fields over the celestial sphere, this determines the overlap factor. + Default is 100. + patterns_per_lattice_field (int, optional): The number of patterns generated for each + lattice field. Typical values are 20 to 100; default is 50. + verification_stars_per_fov (int, optional): Target number of stars used for generating + patterns in each FOV region. Also used to limit the number of stars considered for + matching in solve images. Typical values are large; default is 150. + star_max_magnitude (float, optional): Dimmest apparent magnitude of stars retained + from star catalog. None (default) causes the limiting magnitude to be computed + based on `min_fov` and `verification_stars_per_fov`. + pattern_max_error (float, optional): This value determines the number of bins into which + a pattern hash's edge ratios are each quantized: + pattern_bins = 0.25 / pattern_max_error + Default .001, corresponding to pattern_bins=250. For a database with limiting magnitude + 7, this yields a reasonable pattern hash collision rate. + range_ra (tuple, optional): Tuple with the range (min_ra, max_ra) in degrees (0 to 360). + If set, only stars within the given right ascension will be kept in the database. + range_dec (tuple, optional): Tuple with the range (min_dec, max_dec) in degrees (-90 to 90). + If set, only stars within the give declination will be kept in the database. + presort_patterns (bool, optional): If True (the default), all star patterns will be + sorted during database generation to avoid doing it when solving. Makes database + generation slower but the solver faster. + save_largest_edge (bool, optional): If True (default), the absolute size of each + pattern is stored (via its largest edge angle) in a separate array. This makes the + database larger but the solver faster. + multiscale_step (float, optional): Determines the largest ratio between subsequent FOVs + that is allowed when generating a multiscale database. Defaults to 1.5. If the ratio + max_fov/min_fov is less than sqrt(multiscale_step) a single scale database is built. + epoch_proper_motion (string or float, optional): Determines the end year to which + stellar proper motions are propagated. If 'now' (default), the current year is used. + If 'none' or None, star motions are not propagated and this allows catalogue entries + without proper motions to be used in the database. + pattern_stars_per_fov (int, optional): Deprecated. If given, is used instead of + `lattice_field_oversampling`, which has similar values. + simplify_pattern: No longer meaningful, ignored. """ self._logger.debug('Got generate pattern catalogue with input: ' - + str((max_fov, save_as, star_catalog, pattern_stars_per_fov, - verification_stars_per_fov, star_max_magnitude, - star_min_separation, pattern_max_error))) + + str((max_fov, min_fov, save_as, star_catalog, lattice_field_oversampling, + patterns_per_lattice_field, verification_stars_per_fov, + star_max_magnitude, pattern_max_error, + range_ra, range_dec, presort_patterns, save_largest_edge, + multiscale_step, epoch_proper_motion))) + if pattern_stars_per_fov is not None and pattern_stars_per_fov != lattice_field_oversampling: + self._logger.warning( + 'pattern_stars_per_fov value %s is overriding lattice_field_oversampling value %s' % + (pattern_stars_per_fov, lattice_field_oversampling)) + lattice_field_oversampling = pattern_stars_per_fov + + # If True, measures and logs collisions (pattern hash, hash index and pattern table). + EVALUATE_COLLISIONS = False + + star_catalog, catalog_file_full_pathname = self._build_catalog_path(star_catalog) - assert star_catalog in _supported_databases, 'Star catalogue name must be one of: ' \ - + str(_supported_databases) max_fov = np.deg2rad(float(max_fov)) - pattern_stars_per_fov = int(pattern_stars_per_fov) - verification_stars_per_fov = int(verification_stars_per_fov) - star_max_magnitude = float(star_max_magnitude) - star_min_separation = float(star_min_separation) - pattern_size = 4 - pattern_bins = 25 - current_year = datetime.utcnow().year - - catalog_file_full_pathname = Path(__file__).parent / star_catalog - # Add .dat suffix for hip and tyc if not present - if star_catalog in ('hip_main', 'tyc_main') and not catalog_file_full_pathname.suffix: - catalog_file_full_pathname = catalog_file_full_pathname.with_suffix('.dat') - - assert catalog_file_full_pathname.exists(), 'No star catalogue found at ' +str( catalog_file_full_pathname) - - # Calculate number of star catalog entries: - if star_catalog == 'bsc5': - header_length = 28 - num_entries = 9110 - elif star_catalog in ('hip_main', 'tyc_main'): - header_length = 0 - num_entries = sum(1 for _ in open(catalog_file_full_pathname)) + if min_fov is None: + min_fov = max_fov + else: + min_fov = np.deg2rad(float(min_fov)) - self._logger.info('Loading catalogue ' + str(star_catalog) + ' with ' + str(num_entries) \ - + ' star entries.') + # Making lattice_field_oversampling larger yields more patterns, with diminishing + # returns. + # value fraction of patterns found compared to lattice_field_oversampling=100000 + # 100 0.61 + # 1000 0.86 + # 10000 0.96 + lattice_field_oversampling = int(lattice_field_oversampling) - # Preallocate star table: - star_table = np.zeros((num_entries, 6), dtype=np.float32) - - # Read magnitude, RA, and Dec from star catalog: - if star_catalog == 'bsc5': - bsc5_data_type = [('ID', np.float32), ('RA1950', np.float64), - ('Dec1950', np.float64), ('type', np.int16), - ('mag', np.int16), ('RA_pm', np.float32), ('Dec_PM', np.float32)] - with open(catalog_file_full_pathname, 'rb') as star_catalog_file: - star_catalog_file.seek(header_length) # skip header - reader = np.fromfile(star_catalog_file, dtype=bsc5_data_type, count=num_entries) - for (i, entry) in enumerate(reader): # star_num in range(num_entries): - mag = entry[4]/100 - if mag <= star_max_magnitude: - ra = entry[1] + entry[5] * (current_year - 1950) - dec = entry[2] + entry[6] * (current_year - 1950) - star_table[i,:] = ([ra, dec, 0, 0, 0, mag]) - elif star_catalog in ('hip_main', 'tyc_main'): - incomplete_entries = 0 - with open(catalog_file_full_pathname, 'r') as star_catalog_file: - reader = csv.reader(star_catalog_file, delimiter='|') - for (i, entry) in enumerate(reader): # star_num in range(num_entries): - # skip this entry if any of the required fields are empty: - if entry[5].isspace() or entry[8].isspace() or entry[9].isspace() or \ - entry[12].isspace() or entry[13].isspace(): - incomplete_entries +=1 - continue - mag = float(entry[5]) - if mag is not None and mag <= star_max_magnitude: - pmRA = np.float(entry[12])/1000/60/60 # convert milliarcseconds per year to degrees per year - ra = np.deg2rad(np.float(entry[8]) + pmRA * (current_year - 1991.25)) - pmDec = np.float(entry[13])/1000/60/60 # convert milliarcseconds per year to degrees per year - dec = np.deg2rad(np.float(entry[9]) + pmDec * (current_year - 1991.25)) - star_table[i,:] = ([ra, dec, 0, 0, 0, mag]) - if incomplete_entries: - self._logger.info('Skipped %i incomplete entries.' % incomplete_entries) + patterns_per_lattice_field = int(patterns_per_lattice_field) + verification_stars_per_fov = int(verification_stars_per_fov) + if star_max_magnitude is not None: + star_max_magnitude = float(star_max_magnitude) + PATTERN_SIZE = 4 + pattern_bins = round(1/4/pattern_max_error) + presort_patterns = bool(presort_patterns) + save_largest_edge = bool(save_largest_edge) + if epoch_proper_motion is None or str(epoch_proper_motion).lower() == 'none': + epoch_proper_motion = None + self._logger.debug('Proper motions will not be considered') + elif isinstance(epoch_proper_motion, Number): + self._logger.debug('Use proper motion epoch as given') + elif str(epoch_proper_motion).lower() == 'now': + epoch_proper_motion = datetime.utcnow().year + self._logger.debug('Proper motion epoch set to now: ' + str(epoch_proper_motion)) + else: + raise ValueError('epoch_proper_motion value %s is forbidden' % epoch_proper_motion) + + star_table, star_catID, epoch_equinox = Tetra3._load_catalog( + star_catalog, + catalog_file_full_pathname, + range_ra, + range_dec, + epoch_proper_motion, + self._logger, + ) + + if star_max_magnitude is None: + # Compute the catalog magnitude cutoff based on the required star density. + + # First, characterize the catalog star brightness distribution. + mag_histo_values, mag_histo_edges = np.histogram(star_table[:, 5], bins=100) + index_of_peak = np.argmax(mag_histo_values) + catalog_mag_limit = mag_histo_edges[index_of_peak] + catalog_mag_max = mag_histo_edges[-1] + self._logger.debug('Catalog star counts peak: mag=%.1f' % catalog_mag_limit) + + # How many FOVs are in the entire sky? + num_fovs = num_fields_for_sky(min_fov) + + # The total number of stars needed. + total_stars_needed = num_fovs * verification_stars_per_fov + + # Empirically determined fudge factor. With this, the star_max_magnitude is + # about 0.5 magnitude fainter than the dimmest pattern star. + total_stars_needed *= 0.7 + + cumulative = np.cumsum(mag_histo_values) + mag_index = np.where(cumulative > total_stars_needed)[0] + if mag_index.size == 0: + star_max_magnitude = catalog_mag_max + else: + star_max_magnitude = mag_histo_edges[mag_index[0]] + if star_max_magnitude > catalog_mag_limit: + self._logger.warning('Catalog magnitude limit %.1f is too low to provide %d stars' % + (catalog_mag_limit, total_stars_needed)) - # Remove entries in which RA and Dec are both zero - # (i.e. keep entries in which either RA or Dec is non-zero) - kept = np.logical_or(star_table[:, 0]!=0, star_table[:, 1]!=0) + kept = star_table[:, 5] <= star_max_magnitude star_table = star_table[kept, :] - star_table = star_table[np.argsort(star_table[:, -1]), :] # Sort by brightness + if star_catalog in ('bsc5', 'hip_main'): + star_catID = star_catID[kept] + else: + star_catID = star_catID[kept, :] + num_entries = star_table.shape[0] - self._logger.info('Loaded ' + str(num_entries) + ' stars with magnitude below ' \ - + str(star_max_magnitude) + '.') + self._logger.info('Kept %d stars brighter than magnitude %.1f.' % + (num_entries, star_max_magnitude)) - # Calculate star direction vectors: + # Calculate star direction vectors. for i in range(0, num_entries): - vector = np.array([np.cos(star_table[i,0])*np.cos(star_table[i,1]), - np.sin(star_table[i,0])*np.cos(star_table[i,1]), - np.sin(star_table[i,1])]) - star_table[i,2:5] = vector - - # Filter for maximum number of stars in FOV and doubles - keep_for_patterns = [False] * star_table.shape[0] - keep_for_verifying = [False] * star_table.shape[0] - all_star_vectors = star_table[:, 2:5].transpose() - # Keep the first one and skip index 0 in loop - keep_for_patterns[0] = True - keep_for_verifying[0] = True - for star_ind in range(1, star_table.shape[0]): - vector = star_table[star_ind, 2:5] - # Angle to all stars we have kept - angs_patterns = np.dot(vector, all_star_vectors[:, keep_for_patterns]) - angs_verifying = np.dot(vector, all_star_vectors[:, keep_for_verifying]) - # Check double star limit as well as stars-per-fov limit - if star_min_separation is None \ - or all(angs_patterns < np.cos(np.deg2rad(star_min_separation))): - num_stars_in_fov = sum(angs_patterns > np.cos(max_fov/2)) - if num_stars_in_fov < pattern_stars_per_fov: - # Only keep if not too many close by already - keep_for_patterns[star_ind] = True - keep_for_verifying[star_ind] = True - # Secondary stars-per-fov check, if we fail this we will not keep the star at all - if star_min_separation is None \ - or all(angs_verifying < np.cos(np.deg2rad(star_min_separation))): - num_stars_in_fov = sum(angs_verifying > np.cos(max_fov/2)) - if num_stars_in_fov < verification_stars_per_fov: - # Only keep if not too many close by already - keep_for_verifying[star_ind] = True - # Trim down star table and update indexing for pattern stars - star_table = star_table[keep_for_verifying, :] - pattern_stars = (np.cumsum(keep_for_verifying)-1)[keep_for_patterns] - - self._logger.info('For pattern matching with at most ' + str(pattern_stars_per_fov) - + ' stars per FOV and no doubles: ' + str(len(pattern_stars)) + '.') - self._logger.info('For verification with at most ' + str(verification_stars_per_fov) - + ' stars per FOV and no doubles: ' + str(star_table.shape[0]) + '.') - - self._logger.debug('Building temporary hash table for finding pattern neighbours') - temp_coarse_sky_map = {} - temp_bins = 4 - # insert the stars into the hash table - for star_id in pattern_stars: - vector = star_table[star_id, 2:5] - # find which partition the star occupies in the hash table - hash_code = tuple(((vector+1)*temp_bins).astype(np.int)) - # if the partition is empty, create a new list to hold the star - # if the partition already contains stars, add the star to the list - temp_coarse_sky_map[hash_code] = temp_coarse_sky_map.pop(hash_code, []) + [star_id] - - def temp_get_nearby_stars(vector, radius): - """Get nearby from temporary hash table.""" - # create list of nearby stars - nearby_star_ids = [] - # given error of at most radius in each dimension, compute the space of hash codes - hash_code_space = [range(max(low, 0), min(high+1, 2*temp_bins)) for (low, high) - in zip(((vector + 1 - radius) * temp_bins).astype(np.int), - ((vector + 1 + radius) * temp_bins).astype(np.int))] - # iterate over hash code space - for hash_code in itertools.product(*hash_code_space): - # iterate over the stars in the given partition, adding them to - # the nearby stars list if they're within range of the vector - for star_id in temp_coarse_sky_map.get(hash_code, []): - if np.dot(vector, star_table[star_id, 2:5]) > np.cos(radius): - nearby_star_ids.append(star_id) - return nearby_star_ids - - # generate pattern catalog - self._logger.info('Generating all possible patterns.') - pattern_list = [] - # initialize pattern, which will contain pattern_size star ids - pattern = [None] * pattern_size - for pattern[0] in pattern_stars: # star_ids_filtered: - vector = star_table[pattern[0], 2:5] - # find which partition the star occupies in the sky hash table - hash_code = tuple(((vector+1)*temp_bins).astype(np.int)) - # remove the star from the sky hash table - temp_coarse_sky_map[hash_code].remove(pattern[0]) - # iterate over all possible patterns containing the removed star - for pattern[1:] in itertools.combinations(temp_get_nearby_stars(vector, max_fov), - pattern_size-1): - # retrieve the vectors of the stars in the pattern - vectors = star_table[pattern, 2:5] - # verify that the pattern fits within the maximum field-of-view - # by checking the distances between every pair of stars in the pattern - if all(np.dot(*star_pair) > np.cos(max_fov) - for star_pair in itertools.combinations(vectors, 2)): - pattern_list.append(pattern.copy()) - - self._logger.info('Found ' + str(len(pattern_list)) + ' patterns. Building catalogue.') - catalog_length = 2 * len(pattern_list) - pattern_catalog = np.zeros((catalog_length, pattern_size), dtype=np.uint16) - for pattern in pattern_list: + vector = np.array([np.cos(star_table[i, 0])*np.cos(star_table[i, 1]), + np.sin(star_table[i, 0])*np.cos(star_table[i, 1]), + np.sin(star_table[i, 1])]) + star_table[i, 2:5] = vector + + # Insert all stars in a KD-tree for fast neighbour lookup + all_star_vectors = star_table[:, 2:5] + vector_kd_tree = KDTree(all_star_vectors) + + # Calculate set of FOV scales to create patterns at + fov_ratio = max_fov/min_fov + def logk(x, k): + return np.log(x) / np.log(k) + fov_divisions = np.ceil(logk(fov_ratio, multiscale_step)).astype(int) + 1 + if fov_ratio < np.sqrt(multiscale_step): + pattern_fovs = [max_fov] + else: + pattern_fovs = np.exp2(np.linspace(np.log2(min_fov), np.log2(max_fov), fov_divisions)) + self._logger.info('Generating patterns at FOV scales: ' + str(np.rad2deg(pattern_fovs))) + + # Theory of operation: + # + # We want our 4-star patterns to satisfy three criteria: be well distributed over the + # sky; favor bright stars; and have size commensurate with the FOV. + # + # Well distributed: we establish this by creating a set of FOV-sized "lattice fields" + # uniformly distributed over the celestial sphere. Within each lattice field we generate a + # fixed number (`patterns_per_lattice_field`, typically 50) of patterns, thus ensuring that + # all parts of the sky have the same density of database patterns. Because a solve-time + # field of view might not line up with a lattice field, a `lattice_field_oversampling` + # parameter (typically 100) is used to increase the number of lattice fields, overlapping + # them. + # + # Favor bright stars: within each lattice field, nearly all (see below) sky catalog stars in + # the lattice field are considered. Working with the brightest stars first, we form the + # desired number (`patterns_per_lattice_field`) of 4-star subsets within the field. + # + # Sized for FOV: in each lattice field, we form patterns using stars within FOV/2 radius of + # the field's center, thus ensuring that the resulting patterns will not be too large for + # the FOV. Because we work with the brightest stars first, most of the time patterns won't + # be too small for the FOV because brighter stars occur less frequently and are thus further + # apart on average. + # + # In the previous paragraph we appeal to the power law spatial distrbution of stars by + # brightness, so usually if we choose the brightest stars in a lattice field, the resulting + # patterns won't be tiny (because bright stars are spaced apart on average). However, + # clusters of bright stars do occur and if we aren't careful we could end up using up most + # of the `patterns_per_lattice_field` budget generating tiny patterns among the brightest + # cluster stars. + # + # Consider M45 (Pleiades). Within its roughly one degree diameter core, it has more than ten + # bright stars. 10 choose 4 is 210, so if patterns_per_lattice_field=50, we will generate + # all the patterns from the brightest Pleiades' stars. If the FOV is 10 degrees, these patterns + # will be of limited utility for plate solving because they are all very small relative to + # the FOV. + # + # We address this problem by applying a `pattern_stars_separation` constraint to the sky + # catalog stars before choosing a lattice field's pattern stars. In our 10 degree FOV + # example, a pattern_stars_separation of 1/2 degree creates an "exclusion zone" around each + # of the Pleiades brightest stars, leaving us with only the 5 or 6 most separated bright + # Pleiades stars. 6 choose 4 is just 15, so if `patterns_per_lattice_field` is larger than + # this (50 is typical), we'll generate plenty of patterns that include stars other than only + # the Pleiades members. + # + # A similar "cluster buster" step is performed at solve time, eliminating centroids that + # are too closely spaced. + + # Set of deduped patterns found, to be populated across all FOVs. + pattern_list = set() + for pattern_fov in reversed(pattern_fovs): + keep_for_patterns_at_fov = np.full(num_entries, False) + if fov_divisions == 1: + # Single scale database, trim to min_fov, make patterns up to max_fov + pattern_stars_separation = separation_for_density( + min_fov, verification_stars_per_fov) + else: + # Multiscale database, trim and make patterns iteratively at smaller FOVs + pattern_stars_separation = separation_for_density( + pattern_fov, verification_stars_per_fov) + self._logger.info('At FOV %s separate pattern stars by %.2f deg.' % + (round(np.rad2deg(pattern_fov), 5), + np.rad2deg(pattern_stars_separation))) + pattern_stars_dist = _distance_from_angle(pattern_stars_separation) + + # Loop through all stars in database, gather pattern stars for this FOV. + for star_ind in range(num_entries): + vector = all_star_vectors[star_ind, :] + # Check if any kept pattern stars are within the separation. + within_pattern_separation = vector_kd_tree.query_ball_point( + vector, pattern_stars_dist) + occupied_for_pattern = np.any(keep_for_patterns_at_fov[within_pattern_separation]) + # If there isn't a pattern star too close, add this to the pattern table. + if not occupied_for_pattern: + keep_for_patterns_at_fov[star_ind] = True + self._logger.info('Pattern stars at this FOV: %s.' % np.sum(keep_for_patterns_at_fov)) + + # Clip out tables of the kept stars. + pattern_star_table = star_table[keep_for_patterns_at_fov, :] + + # Insert pattern stars into KD tree for lattice field lookup. + pattern_kd_tree = KDTree(pattern_star_table[:, 2:5]) + + # Index conversion from pattern star_table to main star_table + pattern_index = np.nonzero(keep_for_patterns_at_fov)[0].tolist() + + # To ensure good coverage of patterns for the largest FOV of interest, you can just + # specify a somewhat larger `max_fov`. + fov_angle = pattern_fov / 2 + fov_dist = _distance_from_angle(fov_angle) + + # Enumerate all lattice fields over the celestial sphere. + total_field_pattern_stars = 0 + total_added_patterns = 0 + total_pattern_avg_mag = 0 + max_pattern_mag = -1 + min_stars_per_lattice_field = len(pattern_star_table) # Exceeds any possible value. + num_lattice_fields = 0 + n = num_fields_for_sky(pattern_fov) * lattice_field_oversampling + for lattice_field_center_vector in fibonacci_sphere_lattice(n): + # Find all pattern stars within lattice field. + field_pattern_stars = pattern_kd_tree.query_ball_point(lattice_field_center_vector, fov_dist) + if (range_ra is not None or range_dec is not None) and \ + len(field_pattern_stars) < verification_stars_per_fov / 4: + continue # Partial field due to range_ra and/or range_dec. + min_stars_per_lattice_field = min(len(field_pattern_stars), min_stars_per_lattice_field) + total_field_pattern_stars += len(field_pattern_stars) + num_lattice_fields += 1 + # Change to main star_table indices. + field_pattern_stars = [pattern_index[n] for n in field_pattern_stars] + field_pattern_stars.sort() # Brightness order. + + # Check all possible patterns in overall brightness order until we've accepted + # 'patterns_per_lattice_field' patterns. + patterns_this_lattice_field = 0 + for pattern in breadth_first_combinations(field_pattern_stars, PATTERN_SIZE): + len_before = len(pattern_list) + pattern_list.add(tuple(pattern)) + if len(pattern_list) > len_before: + total_added_patterns += 1 + total_mag = sum(star_table[p, 5] for p in pattern) + total_pattern_avg_mag += total_mag / PATTERN_SIZE + max_pattern_mag = max(star_table[pattern[-1], 5], max_pattern_mag) + if len(pattern_list) % 100000 == 0: + self._logger.info('Generated %s patterns so far.' % len(pattern_list)) + + patterns_this_lattice_field += 1 + if patterns_this_lattice_field >= patterns_per_lattice_field: + break + + self._logger.info( + 'avg/min pattern stars per lattice field %.2f/%d; avg/max pattern mag %.2f/%.2f' % + (total_field_pattern_stars / num_lattice_fields, + min_stars_per_lattice_field, + total_pattern_avg_mag / total_added_patterns, + max_pattern_mag)) + + pattern_list = list(pattern_list) + self._logger.info('Found %s patterns in total.' % len(pattern_list)) + + # Don't need this anymore. + del pattern_kd_tree + + # Create all patten hashes by calculating, sorting, and binning edge ratios; then compute + # a table index hash from the pattern hash, and store the table index -> pattern mapping. + self._logger.info('Start building catalogue.') + catalog_length = 3 * len(pattern_list) + # Determine type to make sure the biggest index will fit, create pattern catalogue + max_index = np.max(np.array(pattern_list)) + if max_index <= np.iinfo('uint8').max: + pattern_catalog = np.zeros((catalog_length, PATTERN_SIZE), dtype=np.uint8) + elif max_index <= np.iinfo('uint16').max: + pattern_catalog = np.zeros((catalog_length, PATTERN_SIZE), dtype=np.uint16) + else: + pattern_catalog = np.zeros((catalog_length, PATTERN_SIZE), dtype=np.uint32) + self._logger.info('Catalog size %s and type %s.' % + (pattern_catalog.shape, pattern_catalog.dtype)) + + if save_largest_edge: + pattern_largest_edge = np.zeros(catalog_length, dtype=np.float16) + self._logger.info('Storing largest edges as type %s' % pattern_largest_edge.dtype) + + # Gather collision information. + pattern_hashes_seen = set() + pattern_hash_collisions = 0 + hash_indices_seen = set() + hash_index_collisions = 0 + table_collisions = 0 + + # Go through each pattern and insert to the catalogue + for (pat_index, pattern) in enumerate(pattern_list): + if pat_index % 100000 == 0 and pat_index > 0: + self._logger.info('Inserting pattern number: ' + str(pat_index)) + # retrieve the vectors of the stars in the pattern - vectors = star_table[pattern, 2:5] - # calculate and sort the edges of the star pattern - edges = np.sort([np.sqrt((np.subtract(*star_pair)**2).sum()) - for star_pair in itertools.combinations(vectors, 2)]) - # extract the largest edge - largest_edge = edges[-1] - # divide the edges by the largest edge to create dimensionless ratios - edge_ratios = edges[:-1] / largest_edge - # convert edge ratio float to hash code by binning - hash_code = tuple((edge_ratios * pattern_bins).astype(np.int)) - hash_index = _key_to_index(hash_code, pattern_bins, pattern_catalog.shape[0]) - # use quadratic probing to find an open space in the pattern catalog to insert - for index in ((hash_index + offset ** 2) % pattern_catalog.shape[0] - for offset in itertools.count()): - # if the current slot is empty, add the pattern - if not pattern_catalog[index][0]: - pattern_catalog[index] = pattern - break + vectors = [star_table[p, 2:5].tolist() for p in pattern] + + edge_angles = [2.0 * math.asin(0.5 * math.dist(vectors[i], vectors[j])) + for i, j in itertools.combinations(range(4), 2)] + edge_angles_sorted = sorted(edge_angles) + largest_angle = edge_angles_sorted[-1] + edge_ratios = [angle / largest_angle for angle in edge_angles_sorted[:-1]] + + # convert edge ratio float to pattern hash by binning + pattern_hash = [int(ratio * pattern_bins) for ratio in edge_ratios] + hash_index = _pattern_hash_to_index(pattern_hash, pattern_bins, catalog_length) + + is_novel_index = False + if EVALUATE_COLLISIONS: + prev_len = len(pattern_hashes_seen) + pattern_hashes_seen.add(tuple(pattern_hash)) + if prev_len == len(pattern_hashes_seen): + pattern_hash_collisions += 1 + else: + prev_len = len(hash_indices_seen) + hash_indices_seen.add(hash_index) + if prev_len == len(hash_indices_seen): + hash_index_collisions += 1 + else: + is_novel_index = True + + if presort_patterns: + # find the centroid, or average position, of the star pattern + pattern_centroid = list(map(lambda a : sum(a) / len(a), zip(*vectors))) + + # calculate each star's radius, or Euclidean distance from the centroid + + # Elements: (distance, index in pattern) + centroid_distances = [ + (sum((x1 - x2) * (x1 - x2) for (x1, x2) in zip(v, pattern_centroid)), index) + for index, v in enumerate(vectors)] + centroid_distances.sort() + # use the radii to uniquely order the pattern, used for future matching + pattern = [pattern[i] for (_, i) in centroid_distances] + + (index, collision) = _insert_at_index(pattern, hash_index, pattern_catalog) + if save_largest_edge: + # Store as milliradian to better use float16 range + pattern_largest_edge[index] = largest_angle*1000 + if is_novel_index and collision: + table_collisions += 1 + self._logger.info('Finished generating database.') self._logger.info('Size of uncompressed star table: %i Bytes.' %star_table.nbytes) self._logger.info('Size of uncompressed pattern catalog: %i Bytes.' %pattern_catalog.nbytes) - + if EVALUATE_COLLISIONS: + self._logger.info('Collisions: pattern hash %s, index %s, table %s' % + (pattern_hash_collisions, hash_index_collisions, table_collisions)) self._star_table = star_table + self._star_kd_tree = vector_kd_tree + self._star_catalog_IDs = star_catID self._pattern_catalog = pattern_catalog + if save_largest_edge: + self._pattern_largest_edge = pattern_largest_edge self._db_props['pattern_mode'] = 'edge_ratio' - self._db_props['pattern_size'] = pattern_size + self._db_props['pattern_size'] = PATTERN_SIZE self._db_props['pattern_bins'] = pattern_bins self._db_props['pattern_max_error'] = pattern_max_error self._db_props['max_fov'] = np.rad2deg(max_fov) - self._db_props['pattern_stars_per_fov'] = pattern_stars_per_fov + self._db_props['min_fov'] = np.rad2deg(min_fov) + self._db_props['star_catalog'] = star_catalog + self._db_props['epoch_equinox'] = epoch_equinox + self._db_props['epoch_proper_motion'] = epoch_proper_motion + self._db_props['lattice_field_oversampling'] = lattice_field_oversampling + self._db_props['anchor_stars_per_fov'] = lattice_field_oversampling # legacy + self._db_props['pattern_stars_per_fov'] = lattice_field_oversampling # legacy + self._db_props['patterns_per_lattice_field'] = patterns_per_lattice_field + self._db_props['patterns_per_anchor_star'] = patterns_per_lattice_field # legacy self._db_props['verification_stars_per_fov'] = verification_stars_per_fov self._db_props['star_max_magnitude'] = star_max_magnitude - self._db_props['star_min_separation'] = star_min_separation + self._db_props['simplify_pattern'] = True # legacy + self._db_props['range_ra'] = range_ra + self._db_props['range_dec'] = range_dec + self._db_props['presort_patterns'] = presort_patterns + self._db_props['num_patterns'] = len(pattern_list) self._logger.debug(self._db_props) if save_as is not None: @@ -652,14 +1415,16 @@ def temp_get_nearby_stars(vector, radius): self._logger.info('Skipping database file generation.') def solve_from_image(self, image, fov_estimate=None, fov_max_error=None, - pattern_checking_stars=6, match_radius=.01, match_threshold=1e-9, - **kwargs): + match_radius=.01, match_threshold=1e-4, + solve_timeout=5000, target_pixel=None, target_sky_coord=None, distortion=0, + return_matches=False, return_visual=False, match_max_error=.002, + pattern_checking_stars=None, **kwargs): """Solve for the sky location of an image. Star locations (centroids) are found using :meth:`tetra3.get_centroids_from_image` and - keyword arguments are passed along to this method. Every combination of the - `pattern_checking_stars` (default 6) brightest stars found is checked against the database - before giving up. + keyword arguments are passed along to this method. Every 4-star combination of the + found stars found is checked against the database before giving up (or the `solve_timeout` + is reached). Example: :: @@ -670,16 +1435,33 @@ def solve_from_image(self, image, fov_estimate=None, fov_max_error=None, result = t3.solve_from_image(image, **extract_dict) Args: - image (numpy.ndarray): The image to solve for, must be convertible to numpy array. - fov_estimate (float, optional): Estimated field of view of the image in degrees. + image (PIL.Image): The image to solve for, must be convertible to numpy array. + fov_estimate (float, optional): Estimated horizontal field of view of the image in + degrees. fov_max_error (float, optional): Maximum difference in field of view from the estimate allowed for a match in degrees. - pattern_checking_stars (int, optional): Number of stars used to create possible - patterns to look up in database. match_radius (float, optional): Maximum distance to a star to be considered a match as a fraction of the image field of view. - match_threshold (float, optional): Maximum allowed mismatch probability to consider - a tested pattern a valid match. + match_threshold (float, optional): Maximum allowed false-positive probability to accept + a tested pattern a valid match. Default 1e-4. NEW: Corrected for the database size. + solve_timeout (float, optional): Timeout in milliseconds after which the solver will + give up on matching patterns. Defaults to 5000 (5 seconds). + target_pixel (numpy.ndarray, optional): Pixel coordinates to return RA/Dec for in + addition to the default (the centre of the image). Size (N,2) where each row is the + (y, x) coordinate measured from top left corner of the image. Defaults to None. + target_sky_coord (numpy.ndarray, optional): Sky coordinates to return image (y, x) for. + Size (N,2) where each row is the (RA, Dec) in degrees. Defaults to None. + distortion (float, optional): Set the estimated distortion of the image. + Negative distortion is barrel, positive is pincushion. Given as amount of distortion + at width/2 from centre. Can set to None to disable distortion calculation entirely. + Default 0. + return_matches (bool, optional): If set to True, the catalogue entries of the mached + stars and their pixel coordinates in the image is returned. + return_visual (bool, optional): If set to True, an image is returned that visualises + the solution. + match_max_error (float, optional): Maximum difference allowed in pattern for a match. + If None, uses the 'pattern_max_error' value from the database. + pattern_checking_stars: No longer meaningful, ignored. **kwargs (optional): Other keyword arguments passed to :meth:`tetra3.get_centroids_from_image`. @@ -687,234 +1469,857 @@ def solve_from_image(self, image, fov_estimate=None, fov_max_error=None, dict: A dictionary with the following keys is returned: - 'RA': Right ascension of centre of image in degrees. - 'Dec': Declination of centre of image in degrees. - - 'Roll': Rotation of image relative to north celestial pole. - - 'FOV': Calculated field of view of the provided image. + - 'Roll': Rotation in degrees of celestial north relative to image's "up" + direction (towards y=0). Zero when north and up coincide; a positive + roll angle means north is counter-clockwise from image "up". + - 'FOV': Calculated horizontal field of view of the provided image. + - 'distortion': Calculated distortion of the provided image. Omitted if + the caller's distortion estimate is None. - 'RMSE': RMS residual of matched stars in arcseconds. + - 'P90E': 90 percentile matched star residual in arcseconds. + - 'MAXE': Maximum matched star residual in arcseconds. - 'Matches': Number of stars in the image matched to the database. - - 'Prob': Probability that the solution is a mismatch. + - 'Prob': Probability that the solution is a false-positive. + - 'epoch_equinox': The celestial RA/Dec equinox reference epoch. + - 'epoch_proper_motion': The epoch the database proper motions were propageted to. - 'T_solve': Time spent searching for a match in milliseconds. - 'T_extract': Time spent exctracting star centroids in milliseconds. + - 'RA_target': Right ascension in degrees of the pixel positions passed in + target_pixel. Not included if target_pixel=None (the default). + - 'Dec_target': Declination in degrees of the pixel positions in target_pixel. + Not included if target_pixel=None (the default). + - 'x_target': image x coordinates for the sky positions passed in target_sky_coord. + If a sky position is outside of the field of view, the corresponding x_target + entry will be None. Not included if target_sky_coord=None (the default). + - 'y_target': image y coordinates for the sky positions passed in target_sky_coord. + If a sky position is outside of the field of view, the corresponding y_target + entry will be None. Not included if target_sky_coord=None (the default). + - 'matched_stars': An Mx3 list with the (RA, Dec, magnitude) of the M matched stars + that were used in the solution. RA/Dec in degrees. Not included if + return_matches=False (the default). + - 'matched_centroids': An Mx2 list with the (y, x) pixel coordinates in the image + corresponding to each matched star. Not included if return_matches=False. + - 'matched_catID': The catalogue ID corresponding to each matched star. See + Tetra3.star_catalog_IDs for information on the format. Not included if + return_matches=False. + - 'pattern_centroids': similar to matched_centroids, except just for the pattern + stars. Not included if return_matches=False. + - 'visual': A PIL image with spots for the given centroids in white, the coarse + FOV and distortion estimates in orange, the final FOV and distortion + estimates in green. Also has circles for the catalogue stars in green or + red for successful/unsuccessful match. Not included if return_visual=False. + - 'status': One of: + MATCH_FOUND: solution was obtained + NO_MATCH: no match was found after exhausting all possibilities + TIMEOUT: the 'solve_timeout' was reached before a match could be found + CANCELLED: the solve operation was cancelled before a match could be found + TOO_FEW: the 'image' has too few detected stars to attempt a pattern match + + If unsuccessful in finding a match, None is returned for all keys of the + dictionary except 'T_solve' and 'status', and the optional return keys are missing. - If unsuccsessful in finding a match, None is returned for all keys of the - dictionary except 'T_solve' and 'T_exctract'. """ assert self.has_database, 'No database loaded' - image = np.asarray(image) + self._logger.debug('Got solve from image with input: ' + str( + (image, fov_estimate, fov_max_error, match_radius, + match_threshold, solve_timeout, target_pixel, target_sky_coord, distortion, + return_matches, return_visual, match_max_error, kwargs))) + (width, height) = image.size[:2] + self._logger.debug('Image (height, width): ' + str((height, width))) + + # Run star extraction, passing kwargs along + t0_extract = precision_timestamp() + centr_data = get_centroids_from_image(image, **kwargs) + t_extract = (precision_timestamp() - t0_extract)*1000 + # If we get a tuple, need to use only first element and then reassemble at return + if isinstance(centr_data, tuple): + centroids = centr_data[0] + else: + centroids = centr_data + self._logger.debug('Found this many centroids, in time: ' + str((len(centroids), t_extract))) + # Run centroid solver, passing arguments along (could clean up with kwargs handler) + solution = self.solve_from_centroids( + centroids, (height, width), fov_estimate=fov_estimate, fov_max_error=fov_max_error, + match_radius=match_radius, match_threshold=match_threshold, + solve_timeout=solve_timeout, target_pixel=target_pixel, + target_sky_coord=target_sky_coord, distortion=distortion, + return_matches=return_matches, return_visual=return_visual, + match_max_error=match_max_error) + # Add extraction time to results and return + solution['T_extract'] = t_extract + if isinstance(centr_data, tuple): + return (solution,) + centr_data[1:] + else: + return solution + + def solve_from_centroids(self, star_centroids, size, fov_estimate=None, fov_max_error=None, + match_radius=.01, match_threshold=1e-4, + solve_timeout=5000, target_pixel=None, target_sky_coord=None, distortion=0, + return_matches=False, return_visual=False, return_rotation_matrix=False, + match_max_error=.002, pattern_checking_stars=None): + """Solve for the sky location using a list of centroids. + + Use :meth:`tetra3.get_centroids_from_image` or your own centroiding algorithm to + find an array of all the stars in your image and pass this result along with the + resolution of the image to this method. + + Every 4-star combination of the `star_centroids` found is checked against the + database before giving up (or the `solve_timeout` is reached). Since patterns + contain four stars, there will be N choose 4 (potentially a very large number!) + patterns tested against the database, so it is important to specify a meaningful + `solve_timeout`. + + Passing an estimated FOV and error bounds yields solutions much faster that letting tetra3 + figure it out. + + Example: + :: + + # Get centroids from image with custom parameters + centroids = get_centroids_from_image(image, simga=2, filtsize=30) + # Solve from centroids + result = t3.solve_from_centroids(centroids, size=image.size, fov_estimate=13) + + Args: + star_centroids (numpy.ndarray): (N,2) list of centroids, ordered by brightest first. + Each row is the (y, x) position of the star measured from the top left corner. + size (tuple of floats): (height, width) of the centroid coordinate system (i.e. + image resolution). + fov_estimate (float, optional): Estimated horizontal field of view of the image in + degrees. Default None. + fov_max_error (float, optional): Maximum difference in field of view from the estimate + allowed for a match in degrees. Default None. + match_radius (float, optional): Maximum distance to a star to be considered a match + as a fraction of the image field of view. Default 0.01. + match_threshold (float, optional): Maximum allowed false-positive probability to accept + a tested pattern a valid match. Default 1e-4. NEW: Corrected for the database size. + solve_timeout (float, optional): Timeout in milliseconds after which the solver will + give up on matching patterns. Defaults to 5000 (5 seconds). + target_pixel (numpy.ndarray, optional): Pixel coordinates to return RA/Dec for in + addition to the default (the centre of the image). Size (N,2) where each row is the + (y, x) coordinate measured from top left corner of the image. Defaults to None. + target_sky_coord (numpy.ndarray, optional): Sky coordinates to return image (y, x) for. + Size (N,2) where each row is the (RA, Dec) in degrees. Defaults to None. + distortion (float, optional): Set the estimated distortion of the image. + Negative distortion is barrel, positive is pincushion. Given as amount of distortion + at width/2 from centre. Can set to None to disable distortion calculation entirely. + Default 0. + return_matches (bool, optional): If set to True, the catalogue entries of the mached + stars and their pixel coordinates in the image is returned. + return_visual (bool, optional): If set to True, an image is returned that visualises + the solution. + return_rotation_matrix (bool, optional): If True, the 3x3 rotation matrix is returned. + match_max_error (float, optional): Maximum difference allowed in pattern for a match. + If None, uses the 'pattern_max_error' value from the database. + pattern_checking_stars: No longer meaningful, ignored. + + Returns: + dict: A dictionary with the following keys is returned: + - 'RA': Right ascension of centre of image in degrees. + - 'Dec': Declination of centre of image in degrees. + - 'Roll': Rotation in degrees of celestial north relative to image's "up" + direction (towards y=0). Zero when north and up coincide; a positive + roll angle means north is counter-clockwise from image "up". + - 'FOV': Calculated horizontal field of view of the provided image. + - 'distortion': Calculated distortion of the provided image. Omitted if + the caller's distortion estimate is None. + - 'RMSE': RMS residual of matched stars in arcseconds. + - 'P90E': 90 percentile matched star residual in arcseconds. + - 'MAXE': Maximum matched star residual in arcseconds. + - 'Matches': Number of stars in the image matched to the database. + - 'Prob': Probability that the solution is a false-positive. + - 'epoch_equinox': The celestial RA/Dec equinox reference epoch. + - 'epoch_proper_motion': The epoch the database proper motions were propageted to. + - 'T_solve': Time spent searching for a match in milliseconds. + - 'RA_target': Right ascension in degrees of the pixel positions passed in + target_pixel. Not included if target_pixel=None (the default). If a Kx2 array + of target_pixel was passed, this will be a length K list. + - 'Dec_target': Declination in degrees of the pixel positions in target_pixel. + Not included if target_pixel=None (the default). If a Kx2 array + of target_pixel was passed, this will be a length K list. + - 'x_target': image x coordinates for the sky positions passed in target_sky_coord. + If a sky position is outside of the field of view, the corresponding x_target + entry will be None. Not included if target_sky_coord=None (the default). + - 'y_target': image y coordinates for the sky positions passed in target_sky_coord. + If a sky position is outside of the field of view, the corresponding y_target + entry will be None. Not included if target_sky_coord=None (the default). + - 'matched_stars': An Mx3 list with the (RA, Dec, magnitude) of the M matched stars + that were used in the solution. RA/Dec in degrees. Not included if + return_matches=False (the default). + - 'matched_centroids': An Mx2 list with the (y, x) pixel coordinates in the image + corresponding to each matched star. Not included if return_matches=False. + - 'matched_catID': The catalogue ID corresponding to each matched star. See + Tetra3.star_catalog_IDs for information on the format. Not included if + return_matches=False. + - 'pattern_centroids': similar to matched_centroids, except just for the pattern + stars. Not included if return_matches=False. + - 'visual': A PIL image with spots for the given centroids in white, the coarse + FOV and distortion estimates in orange, the final FOV and distortion + estimates in green. Also has circles for the catalogue stars in green or + red for successful/unsuccessful match. Not included if return_visual=False. + - 'rotation_matrix' 3x3 rotation matrix. Not included if + return_rotation_matrix=False. + - 'status': One of: + MATCH_FOUND: solution was obtained + NO_MATCH: no match was found after exhausting all possibilities + TIMEOUT: the 'solve_timeout' was reached before a match could be found + CANCELLED: the solve operation was cancelled before a match could be found + TOO_FEW: the 'image' has too few detected stars to attempt a pattern match + + If unsuccessful in finding a match, None is returned for all keys of the + dictionary except 'T_solve' and 'status', and the optional return keys are missing. + + """ + assert self.has_database, 'No database loaded' + t0_solve = precision_timestamp() + self._logger.debug('Got solve from centroids with input: ' + + str((len(star_centroids), size, fov_estimate, fov_max_error, + match_radius, match_threshold, + solve_timeout, target_pixel, target_sky_coord, distortion, + return_matches, return_visual, match_max_error))) if fov_estimate is None: - fov_estimate = np.deg2rad(self._db_props['max_fov']) + # If no FOV given at all, guess middle of the range for a start + fov_initial = np.deg2rad((self._db_props['max_fov'] + self._db_props['min_fov'])/2) else: fov_estimate = np.deg2rad(float(fov_estimate)) + fov_initial = fov_estimate if fov_max_error is not None: fov_max_error = np.deg2rad(float(fov_max_error)) match_radius = float(match_radius) - match_threshold = float(match_threshold) - pattern_checking_stars = int(pattern_checking_stars) + match_threshold = float(match_threshold) / self.num_patterns + self._logger.debug('Set threshold to: ' + str(match_threshold) + ', have ' + + str(self.num_patterns) + ' patterns.') + if solve_timeout is not None: + # Convert to seconds to match timestamp + solve_timeout = float(solve_timeout) / 1000 + if target_pixel is not None: + target_pixel = np.array(target_pixel) + if target_pixel.ndim == 1: + # Make shape (2,) array to (1,2), to match (N,2) pattern + target_pixel = target_pixel[None, :] + if target_sky_coord is not None: + target_sky_coord = np.array(target_sky_coord) + if target_sky_coord.ndim == 1: + # Make shape (2,) array to (1,2), to match (N,2) pattern + target_sky_coord = target_sky_coord[None, :] + return_matches = bool(return_matches) # extract height (y) and width (x) of image - height, width = image.shape[0:2] + (height, width) = size[:2] # Extract relevant database properties - num_stars = self._db_props['verification_stars_per_fov'] + verification_stars_per_fov = self._db_props['verification_stars_per_fov'] p_size = self._db_props['pattern_size'] p_bins = self._db_props['pattern_bins'] - p_max_err = self._db_props['pattern_max_error'] - # Run star extraction, passing kwargs along - t0_extract = precision_timestamp() - star_centroids = get_centroids_from_image(image, max_returned=num_stars, **kwargs) - t_extract = (precision_timestamp() - t0_extract)*1000 - - def compute_vectors(star_centroids, fov): - """Get unit vectors from star centroids (pinhole camera).""" - # compute list of (i,j,k) vectors given list of (y,x) star centroids and - # an estimate of the image's field-of-view in the x dimension - # by applying the pinhole camera equations - center_x = width / 2. - center_y = height / 2. - scale_factor = np.tan(fov / 2) / center_x - star_vectors = [] - for (star_y, star_x) in star_centroids: - j_over_i = (center_x - star_x) * scale_factor - k_over_i = (center_y - star_y) * scale_factor - i = 1. / np.sqrt(1 + j_over_i**2 + k_over_i**2) - j = j_over_i * i - k = k_over_i * i - vec = np.array([i, j, k]) - star_vectors.append(vec) - return star_vectors + if match_max_error is None or match_max_error < self._db_props['pattern_max_error']: + match_max_error = self._db_props['pattern_max_error'] + p_max_err = match_max_error + presorted = self._db_props['presort_patterns'] + # Indices to extract from dot product matrix (above diagonal) + upper_tri_index = np.triu_indices(p_size, 1) + + num_centroids = len(star_centroids) + image_centroids = np.asarray(star_centroids) + if num_centroids < p_size: + return {'RA': None, 'Dec': None, 'Roll': None, 'FOV': None, 'distortion': None, + 'RMSE': None, 'P90E': None, 'MAXE': None, 'Matches': None, 'Prob': None, + 'epoch_equinox': None, 'epoch_proper_motion': None, 'T_solve': 0, + 'status': TOO_FEW} + + # Apply the same "cluster buster" thinning strategy as is used in database + # construction. + pattern_stars_separation_pixels = width * separation_for_density( + fov_initial, verification_stars_per_fov) / fov_initial + keep_for_patterns = np.full(num_centroids, False) + centroids_kd_tree = KDTree(image_centroids) + for ind in range(num_centroids): + centroid = image_centroids[ind, :] + within_separation = centroids_kd_tree.query_ball_point( + centroid, pattern_stars_separation_pixels) + occupied = np.any(keep_for_patterns[within_separation]) + # If there isn't a pattern star too close, add this to the pattern table. + if not occupied: + keep_for_patterns[ind] = True + pattern_centroids_inds = np.nonzero(keep_for_patterns)[0] + num_pattern_centroids = len(pattern_centroids_inds) + if num_pattern_centroids < num_centroids: + self._logger.debug('Trimmed %d pattern centroids to %d' % + (num_centroids, num_pattern_centroids)) + + if num_centroids > verification_stars_per_fov: + image_centroids = image_centroids[:verification_stars_per_fov, :] + self._logger.debug('Trimmed %d match centroids to %d' % (num_centroids, len(image_centroids))) + num_centroids = len(image_centroids) + + if isinstance(distortion, (list, tuple)): + self._logger.warning('Tuple distortion %s no longer supported, ignoring' % distortion) + distortion = None + elif distortion is not None and not isinstance(distortion, Number): + self._logger.warning('Non-numeric distortion %s given, ignoring' % distortion) + distortion = None + + if distortion is None: + image_centroids_undist = image_centroids + else: + # If caller-estimated distortion, undistort centroids, then proceed as normal + image_centroids_undist = _undistort_centroids( + image_centroids, (height, width), k=distortion) + self._logger.debug('Undistorted centroids with k=%d' % distortion) + + # Compute star vectors using an estimate for the field-of-view in the x dimension + image_centroids_vectors = _compute_vectors( + image_centroids_undist, (height, width), fov_initial) + + # Find the possible range of edge ratio patterns these four image centroids + # could correspond to. + pattlen = int(np.math.factorial(p_size) / 2 / np.math.factorial(p_size - 2) - 1) + image_pattern_edge_ratio_min = np.ones(pattlen) + image_pattern_edge_ratio_max = np.zeros(pattlen) + + catalog_lookup_count = 0 + catalog_eval_count = 0 + image_patterns_evaluated = 0 + search_space_explored = 0 + + # Try all `p_size` star combinations chosen from the image centroids, brightest first. + self._logger.debug('Checking up to %d image patterns from %d pattern centroids.' % + (math.comb(num_pattern_centroids, p_size), num_pattern_centroids)) + status = NO_MATCH + for image_pattern_indices in breadth_first_combinations(pattern_centroids_inds, p_size): + # Check if timeout has elapsed, then we must give up + if solve_timeout is not None: + elapsed_time = precision_timestamp() - t0_solve + if elapsed_time > solve_timeout: + self._logger.debug('Timeout reached after: %.2f sec.' % elapsed_time) + status = TIMEOUT + break + if self._cancelled: + elapsed_time = precision_timestamp() - t0_solve + self._logger.debug('Cancelled after: %.3f sec.' % elapsed_time) + status = CANCELLED + self._cancelled = False + break - t0_solve = precision_timestamp() - for image_centroids in _generate_patterns_from_centroids( - star_centroids[:pattern_checking_stars], p_size): - # compute star vectors using an estimate for the field-of-view in the x dimension - pattern_star_vectors = compute_vectors(image_centroids, fov_estimate) - # calculate and sort the edges of the star pattern - pattern_edges = np.sort([norm(np.subtract( - *star_pair)) for star_pair in itertools.combinations(pattern_star_vectors, 2)]) - # extract the largest edge - pattern_largest_edge = pattern_edges[-1] - # divide the pattern's edges by the largest edge to create dimensionless ratios - pattern_edge_ratios = pattern_edges[:-1] / pattern_largest_edge - # Pssible hash codes to look up - hash_code_space = [range(max(low, 0), min(high+1, p_bins)) for (low, high) - in zip(((pattern_edge_ratios - p_max_err) * p_bins).astype(np.int), - ((pattern_edge_ratios + p_max_err) * p_bins).astype(np.int))] - # iterate over hash code space, only looking up non-duplicate codes - for hash_code in set(tuple(sorted(code)) - for code in itertools.product(*hash_code_space)): - hash_code = tuple(hash_code) - hash_index = _key_to_index(hash_code, p_bins, self.pattern_catalog.shape[0]) - matches = _get_at_index(hash_index, self.pattern_catalog) - if len(matches) == 0: + # Set largest distance to None, this is cached to avoid recalculating in future FOV estimation. + pattern_largest_distance = None + + image_pattern_vectors = image_centroids_vectors[image_pattern_indices, :] + # Calculate what the edge ratios are and broaden by p_max_err tolerance + edge_angles_sorted = np.sort(_angle_from_distance(pdist(image_pattern_vectors))) + image_pattern_largest_edge = edge_angles_sorted[-1] + image_pattern = edge_angles_sorted[:-1] / image_pattern_largest_edge + image_pattern_edge_ratio_min = image_pattern - p_max_err + image_pattern_edge_ratio_max = image_pattern + p_max_err + image_pattern_hash = (image_pattern*p_bins).astype(int) + + image_patterns_evaluated += 1 + + # Possible range of pattern hashes we need to look up + pattern_hash_space_min = np.maximum(0, image_pattern_edge_ratio_min*p_bins).astype(int) + pattern_hash_space_max = np.minimum(p_bins, image_pattern_edge_ratio_max*p_bins).astype(int) + # Make a list of the low/high values in each binned edge ratio position. + pattern_hash_range = list(range(low, high + 1) for (low, high) in zip(pattern_hash_space_min, + pattern_hash_space_max)) + def dist(pattern_hash): + return sum((a-b)*(a-b) for (a, b) in zip(pattern_hash, image_pattern_hash)) + + # Make a list of all pattern hash values to explore; tag each with its distance from + # 'image_pattern_hash' for sorting, so the first pattern hash values we try are the + # ones closest to what we measured in the image to be solved. + pattern_hash_list = list((dist(code), code) for code in itertools.product(*pattern_hash_range)) + pattern_hash_list.sort() + + # Iterate over pattern hash values, starting from 'image_pattern_hash' and working + # our way outward. + for (_, pattern_hash) in pattern_hash_list: + search_space_explored += 1 + # Calculate corresponding hash index. + hash_index = _pattern_hash_to_index(pattern_hash, p_bins, self.pattern_catalog.shape[0]) + + (catalog_pattern_edges, all_catalog_pattern_vectors) = \ + self._get_all_patterns_for_index( + hash_index, upper_tri_index, image_pattern_largest_edge, fov_estimate, fov_max_error) + if catalog_pattern_edges is None: continue - - for match_row in matches: - # retrieve the vectors of the stars in the catalog pattern - catalog_vectors = self.star_table[match_row, 2:5] - # calculate and sort the edges of the star pattern - catalog_edges = np.sort([norm(np.subtract(*star_pair)) for star_pair - in itertools.combinations(catalog_vectors, 2)]) - # extract the largest edge - catalog_largest_edge = catalog_edges[-1] - # divide the edges by the largest edge to create dimensionless ratios - catalog_edge_ratios = catalog_edges[:-1] / catalog_largest_edge - # check if match is within the given maximum allowable error - # note that this also filters out star patterns from colliding bins - if any([abs(val) > p_max_err for val in (catalog_edge_ratios - - pattern_edge_ratios)]): - continue - # compute the actual field-of-view using least squares optimization - # compute the catalog pattern's edges for error estimation - catalog_edges = np.append(catalog_edge_ratios * catalog_largest_edge, - catalog_largest_edge) - # helper function that calculates a list of errors in pattern edge lengths - # with the catalog edge lengths for a given fov - - def fov_to_error(fov): - # recalculate the pattern's star vectors given the new fov - pattern_star_vectors = compute_vectors( - image_centroids, fov) - # recalculate the pattern's edge lengths - pattern_edges = np.sort([norm(np.subtract(*star_pair)) for star_pair - in itertools.combinations(pattern_star_vectors, - 2)]) - # return a list of errors, one for each edge - return catalog_edges - pattern_edges - # find the fov that minimizes the squared error, starting with the estimate - fov = scipy.optimize.leastsq(fov_to_error, fov_estimate)[0][0] + catalog_lookup_count += len(catalog_pattern_edges) + + all_catalog_largest_edges = catalog_pattern_edges[:, -1] + all_catalog_edge_ratios = catalog_pattern_edges[:, :-1] / all_catalog_largest_edges[:, None] + + # Compare catalogue edge ratios to the min/max range from the image pattern. + valid_patterns = np.argwhere(np.all(np.logical_and( + image_pattern_edge_ratio_min < all_catalog_edge_ratios, + image_pattern_edge_ratio_max > all_catalog_edge_ratios), axis=1)).flatten() + + # Go through each matching pattern and calculate further + for index in valid_patterns: + catalog_eval_count += 1 + + # Estimate coarse FOV from the pattern + catalog_largest_edge = all_catalog_largest_edges[index] + if fov_estimate is not None: + # Can quickly correct FOV by scaling given estimate + fov = catalog_largest_edge / image_pattern_largest_edge * fov_initial + else: + # Use camera projection to calculate coarse fov + # The FOV estimate will be the same for each attempt with this pattern + # so we can cache the value by checking if we have already set it + if pattern_largest_distance is None: + pattern_largest_distance = np.max( + pdist(image_centroids_undist[image_pattern_indices, :])) + f = pattern_largest_distance / 2 / np.tan(catalog_largest_edge/2) + fov = 2*np.arctan(width/2/f) # If the FOV is incorrect we can skip this immediately - if fov_max_error is not None and abs(fov - fov_estimate) > fov_max_error: + if fov_estimate is not None and fov_max_error is not None \ + and abs(fov - fov_estimate) > fov_max_error: continue # Recalculate vectors and uniquely sort them by distance from centroid - pattern_star_vectors = compute_vectors(image_centroids, fov) + image_pattern_vectors = _compute_vectors( + image_centroids_undist[image_pattern_indices, :], (height, width), fov) # find the centroid, or average position, of the star pattern - pattern_centroid = np.mean(pattern_star_vectors, axis=0) + pattern_centroid = np.mean(image_pattern_vectors, axis=0) # calculate each star's radius, or Euclidean distance from the centroid - pattern_radii = [norm(star_vector - pattern_centroid) - for star_vector in pattern_star_vectors] + pattern_radii = cdist(image_pattern_vectors, pattern_centroid[None, :]).flatten() # use the radii to uniquely order the pattern's star vectors so they can be # matched with the catalog vectors - pattern_sorted_vectors = np.array(pattern_star_vectors)[ - np.argsort(pattern_radii)] - # find the centroid, or average position, of the star pattern - catalog_centroid = np.mean(catalog_vectors, axis=0) - # calculate each star's radius, or Euclidean distance from the centroid - catalog_radii = [norm(vector - catalog_centroid) for vector in catalog_vectors] - # use the radii to uniquely order the catalog vectors - catalog_sorted_vectors = catalog_vectors[np.argsort(catalog_radii)] - - # calculate the least-squares rotation matrix from catalog to image frame - def find_rotation_matrix(image_vectors, catalog_vectors): - # find the covariance matrix H between the image and catalog vectors - H = np.sum([np.dot(image_vectors[i].reshape((3, 1)), - catalog_vectors[i].reshape((1, 3))) - for i in range(len(image_vectors))], axis=0) - # use singular value decomposition to find the rotation matrix - (U, S, V) = np.linalg.svd(H) - rotation_matrix = np.dot(U, V) - # correct reflection matrix if determinant is -1 instead of 1 - # by flipping the sign of the third column of the rotation matrix - rotation_matrix[:, 2] *= np.linalg.det(rotation_matrix) - return rotation_matrix + image_pattern_vectors = np.array(image_pattern_vectors)[np.argsort(pattern_radii)] + + # Now get pattern vectors from catalogue, and sort if necessary + catalog_pattern_vectors = all_catalog_pattern_vectors[index, :] + if not presorted: + # find the centroid, or average position, of the star pattern + catalog_centroid = np.mean(catalog_pattern_vectors, axis=0) + # calculate each star's radius, or Euclidean distance from the centroid + catalog_radii = cdist(catalog_pattern_vectors, catalog_centroid[None, :]).flatten() + # use the radii to uniquely order the catalog vectors + catalog_pattern_vectors = catalog_pattern_vectors[np.argsort(catalog_radii)] # Use the pattern match to find an estimate for the image's rotation matrix - rotation_matrix = find_rotation_matrix(pattern_sorted_vectors, - catalog_sorted_vectors) - # calculate all star vectors using the new field-of-view - all_star_vectors = compute_vectors(star_centroids, fov) - rotated_star_vectors = np.array([np.dot(rotation_matrix.T, star_vector) - for star_vector in all_star_vectors]) - # Find all star vectors inside the (diagonal) field of view for matching + rotation_matrix = _find_rotation_matrix(image_pattern_vectors, + catalog_pattern_vectors) + + # Find all star vectors inside the (diagonal) field of view for matching, in + # catalog brightness order. image_center_vector = rotation_matrix[0, :] fov_diagonal_rad = fov * np.sqrt(width**2 + height**2) / width - nearby_star_vectors = self.star_table[ - self._get_nearby_stars(image_center_vector, fov_diagonal_rad/2), 2:5] - # Match the nearby star vectors to the proposed measured star vectors - match_tuples = [] - for ind, measured_vec in enumerate(rotated_star_vectors): - within_match_radius = (np.dot(measured_vec.reshape((1, 3)), - nearby_star_vectors.transpose()) - > np.cos(match_radius * fov)).flatten() - if sum(within_match_radius) == 1: # If exactly one matching star: - match_ind = within_match_radius.nonzero()[0][0] - match_tuples.append((all_star_vectors[ind], - nearby_star_vectors[match_ind])) - # Statistical reasoning for probability that current match is incorrect: - num_extracted_stars = len(all_star_vectors) - num_nearby_catalog_stars = len(nearby_star_vectors) - num_star_matches = len(match_tuples) - # Probability that a single star is a mismatch - prob_single_star_mismatch = \ - 1 - (1 - num_nearby_catalog_stars * match_radius**2) - # Two matches can always be made using the degrees of freedom of the pattern - prob_mismatch = scipy.stats.binom.cdf(num_extracted_stars - - (num_star_matches - 2), + nearby_star_inds = self._get_nearby_stars(image_center_vector, fov_diagonal_rad/2) + nearby_star_vectors = self.star_table[nearby_star_inds, 2:5] + + # Derotate nearby stars and get their (undistorted) centroids using coarse fov + nearby_star_vectors_derot = np.dot(rotation_matrix, nearby_star_vectors.T).T + (nearby_star_centroids, kept) = _compute_centroids(nearby_star_vectors_derot, + (height, width), fov) + nearby_star_centroids = nearby_star_centroids[kept, :] + nearby_star_vectors = nearby_star_vectors[kept, :] + nearby_star_inds = nearby_star_inds[kept] + # Only keep as many nearby stars as the image centroids. The 2x "fudge factor" + # is because image centroids brightness rankings might not match the nearby star + # catalog brightness rankings, so keeping some extra nearby stars helps ensure + # more matches. + nearby_star_centroids = nearby_star_centroids[:2*num_centroids] + nearby_star_vectors = nearby_star_vectors[:2*num_centroids] + nearby_star_inds = nearby_star_inds[:2*num_centroids] + + # Match the image centroids to the nearby star centroids. + matched_stars = _find_centroid_matches( + image_centroids_undist, nearby_star_centroids, width*match_radius) + num_extracted_stars = num_centroids + num_nearby_catalog_stars = len(nearby_star_centroids) + num_star_matches = len(matched_stars) + self._logger.debug("Number of nearby stars: %d, total matched: %d" \ + % (num_nearby_catalog_stars, num_star_matches)) + + # Probability that a single star is a mismatch (fraction of FOV area + # that are stars) + prob_single_star_mismatch = num_nearby_catalog_stars * match_radius**2 + # Probability that this rotation matrix's set of matches happen randomly + # we subtract two degrees of fredom + prob_mismatch = scipy.stats.binom.cdf(num_extracted_stars - (num_star_matches - 2), num_extracted_stars, 1 - prob_single_star_mismatch) - if prob_mismatch < match_threshold: - # Solved in this time - t_solve = (precision_timestamp() - t0_solve)*1000 - # diplay mismatch probability in scientific notation - self._logger.debug("NEW P: %.4g" % prob_mismatch) - # if a match has been found, recompute rotation with all matched vectors - rotation_matrix = find_rotation_matrix(*zip(*match_tuples)) - # Residuals calculation - measured_vs_catalog = [(np.dot(rotation_matrix.T, pair[0]), pair[1]) - for pair in match_tuples] - angles = np.arcsin([norm(np.cross(m, c)) / norm(m) / norm(c) - for (m, c) in measured_vs_catalog]) - residual = np.rad2deg(np.sqrt(np.mean(angles**2))) * 3600 - # extract right ascension, declination, and roll from rotation matrix - ra = np.rad2deg(np.arctan2(rotation_matrix[0, 1], - rotation_matrix[0, 0])) % 360 - dec = np.rad2deg(np.arctan2(rotation_matrix[0, 2], - norm(rotation_matrix[1:3, 2]))) - roll = np.rad2deg(np.arctan2(rotation_matrix[1, 2], - rotation_matrix[2, 2])) % 360 - self._logger.debug("RA: %03.8f" % ra + ' deg') - self._logger.debug("DEC: %03.8f" % dec + ' deg') - self._logger.debug("ROLL: %03.8f" % roll + ' deg') - self._logger.debug("FOV: %03.8f" % np.rad2deg(fov) + ' deg') - self._logger.debug('MATCH: %i' % len(match_tuples) + ' stars') - self._logger.debug('SOLVE: %.2f' % round(t_solve, 2) + ' ms') - self._logger.debug('RESID: %.2f' % residual + ' asec') - return {'RA': ra, 'Dec': dec, 'Roll': roll, 'FOV': np.rad2deg(fov), - 'RMSE': residual, 'Matches': len(match_tuples), - 'Prob': prob_mismatch, 'T_solve': t_solve, 'T_extract': t_extract} + self._logger.debug("Mismatch probability = %.2e, at FOV = %.5fdeg" \ + % (prob_mismatch, np.rad2deg(fov))) + if prob_mismatch >= match_threshold: + continue + + # display mismatch probability in scientific notation + self._logger.debug("MATCH ACCEPTED") + self._logger.debug("Prob: %.4g, corr: %.4g" + % (prob_mismatch, prob_mismatch*self.num_patterns)) + + # Get the vectors for all matches in the image using coarse fov + matched_image_centroids_undist = image_centroids_undist[matched_stars[:, 0], :] + matched_image_vectors = _compute_vectors(matched_image_centroids_undist, + (height, width), fov) + matched_catalog_vectors = nearby_star_vectors[matched_stars[:, 1], :] + # Recompute rotation matrix for more accuracy + rotation_matrix = _find_rotation_matrix(matched_image_vectors, matched_catalog_vectors) + # extract right ascension, declination, and roll from rotation matrix + ra = np.rad2deg(np.arctan2(rotation_matrix[0, 1], + rotation_matrix[0, 0])) % 360 + dec = np.rad2deg(np.arctan2(rotation_matrix[0, 2], + norm(rotation_matrix[1:3, 2]))) + roll = np.rad2deg(np.arctan2(rotation_matrix[1, 2], + rotation_matrix[2, 2])) % 360 + + if distortion is None: + # Compare mutual angles in catalogue to those with current + # FOV estimate in order to scale accurately for fine FOV + angles_camera = _angle_from_distance(pdist(matched_image_vectors)) + angles_catalogue = _angle_from_distance(pdist(matched_catalog_vectors)) + fov *= np.mean(angles_catalogue / angles_camera) + k = None + else: + # Accurately calculate the FOV and distortion by looking at the angle from boresight + # on all matched catalogue vectors and all matched image centroids + matched_catalog_vectors_derot = np.dot(rotation_matrix, matched_catalog_vectors.T).T + tangent_matched_catalog_vectors = norm( + matched_catalog_vectors_derot[:, 1:], axis=1) \ + /matched_catalog_vectors_derot[:, 0] + # Get the (distorted) pixel distance from image centre for all matches + # (scaled relative to width/2) + radius_matched_image_centroids = norm(matched_image_centroids_undist + - [height/2, width/2], axis=1)/width*2 + # Solve system of equations in RMS sense for focal length f and distortion k + # where f is focal length in units of image width/2 + # and k is distortion at width/2 (negative is barrel) + # undistorted = distorted*(1 - k*(distorted*2/width)^2) + A = np.hstack((tangent_matched_catalog_vectors[:, None], + radius_matched_image_centroids[:, None]**3)) + b = radius_matched_image_centroids[:, None] + (f, k) = lstsq(A, b, rcond=None)[0].flatten() + # Correct focal length to be at horizontal FOV + f = f/(1 - k) + self._logger.debug('Calculated focal length to %.2f and distortion to %.3f' % (f, k)) + # Calculate (horizontal) true field of view + fov = 2*np.arctan(1/f) + # Re-undistort centroids for final calculations + image_centroids_undist = _undistort_centroids(image_centroids, (height, width), k) + matched_image_centroids_undist = image_centroids_undist[matched_stars[:, 0], :] + + # Get vectors + final_match_vectors = _compute_vectors( + matched_image_centroids_undist, (height, width), fov) + # Rotate to the sky + final_match_vectors = np.dot(rotation_matrix.T, final_match_vectors.T).T + + # Calculate residual angles with more accurate formula + distance = norm(final_match_vectors - matched_catalog_vectors, axis=1) + distance.sort() + p90_index = int(0.9 * (len(distance)-1)) + p90_err_angle = np.rad2deg(_angle_from_distance(distance[p90_index])) * 3600 + max_err_angle = np.rad2deg(_angle_from_distance(distance[-1])) * 3600 + angle = _angle_from_distance(distance) + rms_err_angle = np.rad2deg(np.sqrt(np.mean(angle**2))) * 3600 + + # Solved in this time + t_solve = (precision_timestamp() - t0_solve)*1000 + solution_dict = {'RA': ra, 'Dec': dec, + 'Roll': roll, + 'FOV': np.rad2deg(fov), + 'distortion': k, + 'RMSE': rms_err_angle, + 'P90E': p90_err_angle, + 'MAXE': max_err_angle, + 'Matches': num_star_matches, + 'Prob': prob_mismatch*self.num_patterns, + 'epoch_equinox': self._db_props['epoch_equinox'], + 'epoch_proper_motion': self._db_props['epoch_proper_motion'], + 'T_solve': t_solve, + 'status': MATCH_FOUND} + + # If we were given target pixel(s), calculate their ra/dec + if target_pixel is not None: + self._logger.debug('Calculate RA/Dec for targets: %s' % target_pixel) + # Calculate the vector in the sky of the target pixel(s) + if k is not None: + target_pixel = _undistort_centroids(target_pixel, (height, width), k) + target_vector = _compute_vectors( + target_pixel, (height, width), fov) + rotated_target_vector = np.dot(rotation_matrix.T, target_vector.T).T + # Calculate and add RA/Dec to solution + target_ra = np.rad2deg(np.arctan2(rotated_target_vector[:, 1], + rotated_target_vector[:, 0])) % 360 + target_dec = 90 - np.rad2deg( + np.arccos(rotated_target_vector[:,2])) + + if target_ra.shape[0] > 1: + solution_dict['RA_target'] = target_ra.tolist() + solution_dict['Dec_target'] = target_dec.tolist() + else: + solution_dict['RA_target'] = target_ra[0] + solution_dict['Dec_target'] = target_dec[0] + + + # If we were given target sky coord(s), calculate their image x/y if + # within FOV. + if target_sky_coord is not None: + self._logger.debug('Calculate y/x for sky targets: %s' % target_sky_coord) + target_sky_vectors = [] + for tsc in target_sky_coord: + ra = np.deg2rad(tsc[0]) + dec = np.deg2rad(tsc[1]) + target_sky_vectors.append([np.cos(ra) * np.cos(dec), + np.sin(ra) * np.cos(dec), + np.sin(dec)]) + target_sky_vectors = np.array(target_sky_vectors) + target_sky_vectors_derot = np.dot(rotation_matrix, target_sky_vectors.T).T + (target_centroids, kept) = _compute_centroids(target_sky_vectors_derot, + (height, width), fov) + if k is not None: + target_centroids = _distort_centroids(target_centroids, (height, width), k) + target_y = [] + target_x = [] + for i in range(target_centroids.shape[0]): + if i in kept: + target_y.append(target_centroids[i][0]) + target_x.append(target_centroids[i][1]) + else: + target_y.append(None) + target_x.append(None) + if target_sky_coord.shape[0] > 1: + solution_dict['y_target'] = target_y + solution_dict['x_target'] = target_x + else: + solution_dict['y_target'] = target_y[0] + solution_dict['x_target'] = target_x[0] + + # If requested to return data about matches, append to dict + if return_matches: + match_data = self._get_matched_star_data( + image_centroids[matched_stars[:, 0]], nearby_star_inds[matched_stars[:, 1]]) + solution_dict.update(match_data) + + pattern_centroids = [] + for img_pat_ind in image_pattern_indices: + pattern_centroids.append(image_centroids[img_pat_ind]) + solution_dict.update({'pattern_centroids': pattern_centroids}) + + # If requested to create a visualisation, do so and append + if return_visual: + self._logger.debug('Generating visualisation') + img = Image.new('RGB', (width, height)) + img_draw = ImageDraw.Draw(img) + # Make list of matched and not from catalogue + matched = matched_stars[:, 1] + not_matched = np.array([True]*len(nearby_star_centroids)) + not_matched[matched] = False + not_matched = np.flatnonzero(not_matched) + + def draw_circle(centre, radius, **kwargs): + bbox = [centre[1] - radius, + centre[0] - radius, + centre[1] + radius, + centre[0] + radius] + img_draw.ellipse(bbox, **kwargs) + + for cent in image_centroids: + # Centroids with no/given distortion + draw_circle(cent, 2, fill='white') + for cent in image_centroids_undist: + # Image centroids with coarse distortion for matching + draw_circle(cent, 1, fill='darkorange') + for cent in image_centroids_undist[image_pattern_indices, :]: + # Make the pattern ones larger + draw_circle(cent, 3, outline='darkorange') + for cent in matched_image_centroids_undist: + # Centroid position with solution distortion + draw_circle(cent, 1, fill='green') + for match in matched: + # Green circle for succeessful match + draw_circle(nearby_star_centroids[match], + width*match_radius, outline='green') + for match in not_matched: + # Red circle for failed match + draw_circle(nearby_star_centroids[match], + width*match_radius, outline='red') + + solution_dict['visual'] = img + + if return_rotation_matrix: + solution_dict['rotation_matrix'] = rotation_matrix.tolist() + + self._logger.debug(solution_dict) + self._logger.debug( + 'For %d centroids, evaluated %s image patterns; searched %s pattern hashes' % + (num_centroids, + image_patterns_evaluated, + search_space_explored)) + self._logger.debug( + 'Looked up/evaluated %s/%s catalog patterns' % + (catalog_lookup_count, catalog_eval_count)) + return solution_dict + # Close of image_pattern_indices loop + + # Failed to solve (or timeout or cancel), get time and return None t_solve = (precision_timestamp() - t0_solve) * 1000 self._logger.debug('FAIL: Did not find a match to the stars! It took ' + str(round(t_solve)) + ' ms.') - return {'RA': None, 'Dec': None, 'Roll': None, 'FOV': None, 'RMSE': None, 'Matches': None, - 'Prob': None, 'T_solve': t_solve, 'T_extract': t_extract} + self._logger.debug( + 'FAIL: For %d centroids, evaluated %s image patterns; searched %s pattern hashes' % + (num_centroids, + image_patterns_evaluated, + search_space_explored)) + self._logger.debug( + 'FAIL: Looked up/evaluated %s/%s catalog patterns' % + (catalog_lookup_count, catalog_eval_count)) + return {'RA': None, 'Dec': None, 'Roll': None, 'FOV': None, 'distortion': None, + 'RMSE': None, 'P90E': None, 'MAXE': None, 'Matches': None, 'Prob': None, + 'epoch_equinox': None, 'epoch_proper_motion': None, 'T_solve': t_solve, + 'status': status} + + def cancel_solve(self): + """Signal that a currently running solve_from_image() or solve_from_centroids() should + terminate immediately. + If no solve_from_{image,centroids} is running, this call affects the next solve attempt. + """ + self._logger.debug('cancelling') + self._cancelled = True + + def _get_all_patterns_for_index( + self, hash_index, upper_tri_index, image_pattern_largest_edge, fov_estimate, fov_max_error): + """Returns (edges, vectors) for all pattern table entries for `hash_index`.""" + + # Iterate over table hash indices. + hash_match_inds = _get_table_indices_from_hash(hash_index, self.pattern_catalog) + if len(hash_match_inds) == 0: + return (None, None) + + if self.pattern_largest_edge is not None \ + and fov_estimate is not None \ + and fov_max_error is not None: + # Can immediately compare FOV to patterns to remove mismatches + largest_edge = self.pattern_largest_edge[hash_match_inds].astype(np.float32) + fov2 = largest_edge / image_pattern_largest_edge * fov_estimate / 1000 + keep = abs(fov2 - fov_estimate) < fov_max_error + hash_match_inds = hash_match_inds[keep] + if len(hash_match_inds) == 0: + return (None, None) + catalog_matches = self.pattern_catalog[hash_match_inds, :] + + # Get star vectors for all matching hashes + catalog_pattern_vectors = self.star_table[catalog_matches, 2:5] + # Calculate pattern by angles between vectors + # implement more accurate angle calculation + # this is a bit manual, I could not see a faster way + arr1 = np.take(catalog_pattern_vectors, upper_tri_index[0], axis=1) + arr2 = np.take(catalog_pattern_vectors, upper_tri_index[1], axis=1) + catalog_pattern_edges = np.sort(_angle_from_distance(norm(arr1 - arr2, axis=-1))) + + return (catalog_pattern_edges, catalog_pattern_vectors) def _get_nearby_stars(self, vector, radius): - """Get stars within radius radians of the vector.""" - return np.where(np.dot(np.asarray(vector), self.star_table[:, 2:5].T) > np.cos(radius))[0] + """Get star indices within radius radians of the vector. Sorted brightest first.""" + max_dist = _distance_from_angle(radius) + nearby = self._star_kd_tree.query_ball_point(vector, max_dist) + return np.sort(nearby) + + def _get_matched_star_data(self, centroid_data, star_indices): + """Get dictionary of matched star data to return. + + centroid_data: ndarray of centroid data Nx2, each row (y, x) + star_indices: ndarray of matching star indices len N + + return dict with keys: + - matched_centroids: Nx2 (y, x) in pixel coordinates, sorted by brightness + - matched_stars: Nx3 (ra (deg), dec (deg), magnitude) + - matched_catID: (N,) or (N, 3) with catalogue ID + """ + output = {} + output['matched_centroids'] = centroid_data.tolist() + stars = self.star_table[star_indices, :][:, [0, 1, 5]] + stars[:,:2] = np.rad2deg(stars[:,:2]) + output['matched_stars'] = stars.tolist() + if self.star_catalog_IDs is None: + output['matched_catID'] = None + elif len(self.star_catalog_IDs.shape) > 1: + # Have 2D array, pick rows + output['matched_catID'] = self.star_catalog_IDs[star_indices, :].tolist() + else: + # Have 1D array, pick indices + output['matched_catID'] = self.star_catalog_IDs[star_indices].tolist() + return output + + @staticmethod + def _build_catalog_path(star_catalog): + """ build the path to the star catalog and parse the catalog name + Args: + star_catalog (str or pathlib.Path, optional): the name or path to the star catalog file + Returns: + (tuple[str, pathlib.Path]): return the pure catalog name and the file path + """ + if star_catalog in _supported_databases: + # only name supplied, assume file is adjacent to this code file + catalog_file_full_pathname = _lib_root / star_catalog + else: + # a path string or path object supplied, parse out the pure name + catalog_file_full_pathname = Path(star_catalog).expanduser() + star_catalog = catalog_file_full_pathname.name.rstrip(catalog_file_full_pathname.suffix) + if star_catalog not in _supported_databases: + raise ValueError(f"star_catalog name must be one of {_supported_databases}, got: {star_catalog}") -def get_centroids_from_image(image, sigma=3, image_th=None, crop=None, downsample=None, + # Add .dat suffix for hip and tyc if not present + if star_catalog in ('hip_main', 'tyc_main') and not catalog_file_full_pathname.suffix: + catalog_file_full_pathname = catalog_file_full_pathname.with_suffix('.dat') + + if not catalog_file_full_pathname.exists(): + raise ValueError(f'No star catalogue found at {str(catalog_file_full_pathname)}') + + return star_catalog, catalog_file_full_pathname + + # celestial_coords: [[ra, dec], ...] in degrees + # returns: [[y, x], ...] + def transform_to_image_coords( + self, celestial_coords, width, height, fov, rotation_matrix, distortion): + rotation_matrix = np.array(rotation_matrix) + + celestial_vectors = [] + for cc in celestial_coords: + ra = np.deg2rad(cc[0]) + dec = np.deg2rad(cc[1]) + celestial_vectors.append([np.cos(ra) * np.cos(dec), + np.sin(ra) * np.cos(dec), + np.sin(dec)]) + celestial_vectors = np.array(celestial_vectors) + celestial_vectors_derot = np.dot(rotation_matrix, celestial_vectors.T).T + + (image_coords, kept) = _compute_centroids( + celestial_vectors_derot, (height, width), fov) + image_coords = _distort_centroids(image_coords, (height, width), distortion) + result = [] + for i in range(image_coords.shape[0]): + if i in kept: + result.append(image_coords[i]) + return result + + # image_coords: [[y, x], ...] + # returns: [[ra, dec], ...] in degrees + def transform_to_celestial_coords( + self, image_coords, width, height, fov, rotation_matrix, distortion): + rotation_matrix = np.array(rotation_matrix) + + image_coords = np.array(image_coords) + image_coords = _undistort_centroids(image_coords, (height, width), distortion) + image_vectors = _compute_vectors(image_coords, (height, width), fov) + rotated_image_vectors = np.dot(rotation_matrix.T, image_vectors.T).T + + # Calculate and add RA/Dec to solution + ra = np.rad2deg(np.arctan2(rotated_image_vectors[:, 1], + rotated_image_vectors[:, 0])) % 360 + dec = 90 - np.rad2deg(np.arccos(rotated_image_vectors[:,2])) + + celestial_vectors = [] + for i in range(len(ra)): + celestial_vectors.append((ra[i], dec[i])) + + return celestial_vectors + + +def get_centroids_from_image(image, sigma=2, image_th=None, crop=None, downsample=None, filtsize=25, bg_sub_mode='local_mean', sigma_mode='global_root_square', - binary_open=True, centroid_window=None, max_area=None, min_area=None, + binary_open=True, centroid_window=None, max_area=100, min_area=5, max_sum=None, min_sum=None, max_axis_ratio=None, max_returned=None, return_moments=False, return_images=False): """Extract spot centroids from an image and calculate statistics. @@ -927,16 +2332,17 @@ def get_centroids_from_image(image, sigma=3, image_th=None, crop=None, downsampl To aid in finding optimal settings pass `return_images=True` to get back a dictionary with partial extraction results and tweak the parameters accordingly. The dictionary entry - 'binary_mask' is the result of the process which identifies stars and is most useful for this. - - In general, the best extraction is attained with `bg_sub_mode='local_median'` and - `sigma_mode='local_median_abs'` with a reasonable (e.g. 7 to 15) size filter. However, this may - be slow (especially for larger filter sizes) and requires that the camera readout bit-depth is - sufficient to accurately capture the camera noise. A recommendable and much faster alternative - is `bg_sub_mode='local_mean'` and `sigma_mode='global_root_square'` with a large (e.g. 15 to 25) - sized filter, which is the default. You may elect to do background subtraction and image - thresholding by your own methods, then pass `bg_sub_mode=None` and your threshold as `image_th` - to bypass these extraction steps. + `binary_mask` shows the result of the raw star detection and `final_centroids` labels the + centroids in the original image (green for accepted, red for rejected). + + Technically, the best extraction is attained with `bg_sub_mode='local_median'` and + `sigma_mode='local_median_abs'` with a reasonable (e.g. 15) size filter and a very sharp image. + However, this may be slow (especially for larger filter sizes) and requires that the camera + readout bit-depth is sufficient to accurately capture the camera noise. A recommendable and + much faster alternative is `bg_sub_mode='local_mean'` and `sigma_mode='global_root_square'` + with a larger (e.g. 25 or more) sized filter, which is the default. You may elect to do + background subtraction and image thresholding by your own methods, then pass `bg_sub_mode=None` + and your threshold as `image_th` to bypass these extraction steps. The algorithm proceeds as follows: 1. Convert image to 2D numpy.ndarray with type float32. @@ -946,14 +2352,14 @@ def get_centroids_from_image(image, sigma=3, image_th=None, crop=None, downsampl - 'local_median': Create the background image using a median filter of size `filtsize` and subtract pixelwise. - - 'local_mean' (the default): Create the background image using a mean filter of size `filtsize` and - subtract pixelwise. + - 'local_mean' (the default): Create the background image using a mean filter of size + `filtsize` and subtract pixelwise. - 'global_median': Subtract the median value of all pixels from each pixel. - 'global_mean': Subtract the mean value of all pixels from each pixel. 4. Calculate the image threshold if image_th is None. If image_th is defined this value will be used to threshold the image. The threshold is determined by calculating the - noise standard deviation with the metod selected as `sigma_mode` and then scaling it by + noise standard deviation with the method selected as `sigma_mode` and then scaling it by `sigma` (default 3). The available methods are: - 'local_median_abs': For each pixel, calculate the standard deviation as @@ -979,9 +2385,9 @@ def get_centroids_from_image(image, sigma=3, image_th=None, crop=None, downsampl positions to correspond to pixels in the original image. Args: - image (numpy.ndarray): Image to find centroids in. + image (PIL.Image): Image to find centroids in. sigma (float, optional): The number of noise standard deviations to threshold at. - Default 3. + Default 2. image_th (float, optional): The value to threshold the image at. If supplied `sigma` and `simga_mode` will have no effect. crop (tuple, optional): Cropping to apply, see :meth:`tetra3.crop_and_downsample_image`. @@ -998,13 +2404,14 @@ def get_centroids_from_image(image, sigma=3, image_th=None, crop=None, downsampl to thresholded binary mask. centroid_window (int, optional): If supplied, recalculate statistics using a square window of the supplied size. - max_area (int, optional): Reject spots larger than this. - min_area (int, optional): Reject spots smaller than this. - max_sum (float, optional): Reject spots with a sum larger than this. - min_sum (float, optional): Reject spots with a sum smaller than this. + max_area (int, optional): Reject spots larger than this. Defaults to 100 pixels. + min_area (int, optional): Reject spots smaller than this. Defaults to 5 pixels. + max_sum (float, optional): Reject spots with a sum larger than this. Defaults to None. + min_sum (float, optional): Reject spots with a sum smaller than this. Defaults to None. max_axis_ratio (float, optional): Reject spots with a ratio of major over minor axis larger - than this. - max_returned (int, optional): Return at most this many spots. + than this. Defaults to None. + max_returned (int, optional): Return at most this many spots. Defaults to None, which + returns all spots. Will return in order of brightness (spot sum). return_moments (bool, optional): If set to True, return the calculated statistics (e.g. higher order moments, sum, area) together with the spot positions. return_images (bool, optional): If set to True, return a dictionary with partial results @@ -1012,44 +2419,21 @@ def get_centroids_from_image(image, sigma=3, image_th=None, crop=None, downsampl Returns: numpy.ndarray or tuple: If `return_moments=False` and `return_images=False` (the defaults) - an array of shape (N,2) is returned with centroid positions (y down, x right) of the - found spots in order of brightness. If `return_moments=True` a tuple of numpy arrays - is returned with: (N,2) centroid positions, N sum, N area, (N,3) xx yy and xy second - moments, N major over minor axis ratio. If `return_images=True` a tuple is returned - with the results as defined previously and a dictionary with images and data of partial - results. + an array of shape (N,2) is returned with centroid positions (y down, x right) of the + found spots in order of brightness. If `return_moments=True` a tuple of numpy arrays + is returned with: (N,2) centroid positions, N sum, N area, (N,3) xx yy and xy second + moments, N major over minor axis ratio. If `return_images=True` a tuple is returned + with the results as defined previously and a dictionary with images and data of partial + results. The keys are: `converted_input`: The input after conversion to a mono float + numpy array. `cropped_and_downsampled`: The image after cropping and downsampling. + `removed_background`: The image after background subtraction. `binary_mask`: The + thresholded image where raw stars are detected (after binary opening). + `final_centroids`: The original image annotated with green circles for the extracted + centroids, and red circles for any centroids that were rejected. """ - # bg_sub_mode and sigma_mode: - # local_median, global_median, global_mean - - # Versatile spot extractor for images, used in tetra3 for wide fields and - # in satellite closed-loop tracking. - # PROCESS: - # 0. Convert to numpy single precision greyscale (32-bit float) - # 1. Crop by factor 'crop' if not None (centered crop) - # 2. Downsample by factor 'downsample' if not None (sums values) - # 3. Subtract background by median filter of 'filtsize' width (odd) - # [Set filtsize=None to do single value background subtraction] - # 4. If local_sigma False: - # Find RMS or 1.48*MAD for image as global standard deviation - # If local_sigma True: - # Find RMS or 1.48*MAD for local areas of 'filtsize' width to use - # as a pixel-by-pixel estimate of the local standard deviation - # 5. Threshold by sigma*[local/global] standard deviation if image_th None, else use image_th - # 6. Find area and moments for each region, apply thresholds - # 7. Sort by sum, keep at most 'max_returned' - # 8. Correct for effects of crop and downsample - # RETURNS: - # Default: Numpy array size Nx2 with y,x centroid positions (y down, x right) - # return_moments=True: 5-tuple with Numpy arrays: - # 0: size Nx2 with y,x centroid positions - # 1: size N with sum (zeroth moment) - # 2: size N with area (pixels) - # 3: size Nx3 with xx,yy,xy variances (second moment) - # 4: size N with ratio of major/minor axis - # 1. Ensure image is float np array and 2D: + raw_image = image.copy() image = np.asarray(image, dtype=np.float32) if image.ndim == 3: assert image.shape[2] in (1, 3), 'Colour image must have 1 or 3 colour channels' @@ -1082,8 +2466,8 @@ def get_centroids_from_image(image, sigma=3, image_th=None, crop=None, downsampl assert filtsize is not None, \ 'Must define filter size for local median background subtraction' assert filtsize % 2 == 1, 'Filter size must be odd' - image = image - scipy.ndimage.filters.uniform_filter(image, size=filtsize) - + image = image - scipy.ndimage.filters.uniform_filter(image, size=filtsize, + output=image.dtype) elif bg_sub_mode.lower() == 'global_median': image = image - np.median(image) elif bg_sub_mode.lower() == 'global_mean': @@ -1117,8 +2501,8 @@ def get_centroids_from_image(image, sigma=3, image_th=None, crop=None, downsampl raise AssertionError('sigma_mode must be string: local_median_abs, local_root_square,' + ' global_median_abs, or global_root_square') image_th = img_std * sigma - if return_images: - images_dict['image_threshold'] = image_th + #if return_images: + # images_dict['image_threshold'] = image_th # 5. Threshold to find binary mask bin_mask = image > image_th if binary_open: @@ -1128,8 +2512,8 @@ def get_centroids_from_image(image, sigma=3, image_th=None, crop=None, downsampl # 6. Label each region in the binary mask (labels, num_labels) = scipy.ndimage.label(bin_mask) index = np.arange(1, num_labels + 1) - if return_images: - images_dict['labelled_regions'] = labels + #if return_images: + # images_dict['labelled_regions'] = labels if num_labels < 1: # Found nothing in binary image, return empty. if return_moments and return_images: @@ -1151,38 +2535,83 @@ def calc_stats(a, p): - Variance xx, yy, xy (second moment) - Area (pixels) - Major axis/minor axis ratio + First variable will be NAN if failed any of the checks """ (y, x) = (np.unravel_index(p, (height, width))) area = len(a) + centroid = np.sum([a, x*a, y*a], axis=-1) + m0 = centroid[0] + centroid[1:] = centroid[1:] / m0 + m1_x = centroid[1] + m1_y = centroid[2] + # Check basic filtering if min_area and area < min_area: - return (np.nan,)*8 + return (np.nan, m1_y+.5, m1_x+.5, np.nan, np.nan, np.nan, np.nan, np.nan) if max_area and area > max_area: - return (np.nan,)*8 - m0 = np.sum(a) + return (np.nan, m1_y+.5, m1_x+.5, np.nan, np.nan, np.nan, np.nan, np.nan) if min_sum and m0 < min_sum: - return (np.nan,)*8 + return (np.nan, m1_y+.5, m1_x+.5, np.nan, np.nan, np.nan, np.nan, np.nan) if max_sum and m0 > max_sum: - return (np.nan,)*8 - m1_x = np.sum(x * a) / m0 - m1_y = np.sum(y * a) / m0 - m2_xx = max(0, np.sum((x - m1_x)**2 * a) / m0) - m2_yy = max(0, np.sum((y - m1_y)**2 * a) / m0) - m2_xy = np.sum((x - m1_x) * (y - m1_y) * a) / m0 - major = np.sqrt(2 * (m2_xx + m2_yy + np.sqrt((m2_xx - m2_yy)**2 + 4 * m2_xy**2))) - minor = np.sqrt(2 * max(0, m2_xx + m2_yy - np.sqrt((m2_xx - m2_yy)**2 + 4 * m2_xy**2))) - if max_axis_ratio and minor <= 0: - return (np.nan,)*8 - axis_ratio = major / max(minor, .000000001) - if max_axis_ratio and axis_ratio > max_axis_ratio: - return (np.nan,)*8 - return (m0, m1_y+.5, m1_x+.5, m2_xx, m2_yy, m2_xy, area, axis_ratio) + return (np.nan, m1_y+.5, m1_x+.5, np.nan, np.nan, np.nan, np.nan, np.nan) + # If higher order data is requested or used for filtering, calculate. + if return_moments or max_axis_ratio is not None: + # Need to calculate second order data about the regions, firstly the moments + # then use that to get major/minor axes. + m2_xx = max(0, np.sum((x - m1_x)**2 * a) / m0) + m2_yy = max(0, np.sum((y - m1_y)**2 * a) / m0) + m2_xy = np.sum((x - m1_x) * (y - m1_y) * a) / m0 + major = np.sqrt(2 * (m2_xx + m2_yy + np.sqrt((m2_xx - m2_yy)**2 + 4 * m2_xy**2))) + minor = np.sqrt(2 * max(0, m2_xx + m2_yy - np.sqrt((m2_xx - m2_yy)**2 + 4 * m2_xy**2))) + if max_axis_ratio and minor <= 0: + return (np.nan, m1_y+.5, m1_x+.5, np.nan, np.nan, np.nan, np.nan, np.nan) + axis_ratio = major / max(minor, .000000001) + if max_axis_ratio and axis_ratio > max_axis_ratio: + return (np.nan, m1_y+.5, m1_x+.5, np.nan, np.nan, np.nan, np.nan, np.nan) + return (m0, m1_y+.5, m1_x+.5, m2_xx, m2_yy, m2_xy, area, axis_ratio) + else: + return (m0, m1_y+.5, m1_x+.5, np.nan, np.nan, np.nan, area, np.nan) tmp = scipy.ndimage.labeled_comprehension(image, labels, index, calc_stats, '8f', None, pass_positions=True) - valid = np.all(~np.isnan(tmp), axis=1) + valid = ~np.isnan(tmp[:, 0]) extracted = tmp[valid, :] + rejected = tmp[~valid, :] if return_images: - images_dict['label_statistics'] = bin_mask.copy() + # Convert 16-bit to 8-bit: + if raw_image.mode == 'I;16': + tmp = np.array(raw_image, dtype=np.uint16) + tmp //= 256 + tmp = tmp.astype(np.uint8) + raw_image = Image.fromarray(tmp) + # Convert mono to RGB + if raw_image.mode != 'RGB': + raw_image = raw_image.convert('RGB') + # Draw green circles for kept centroids, red for rejected + img_draw = ImageDraw.Draw(raw_image) + def draw_circle(centre, radius, **kwargs): + bbox = [centre[1] - radius, + centre[0] - radius, + centre[1] + radius, + centre[0] + radius] + img_draw.ellipse(bbox, **kwargs) + for entry in extracted: + pos = entry[1:3].copy() + size = .01*width + if downsample is not None: + pos *= downsample + pos += [offs_h, offs_w] + size *= downsample + draw_circle(pos, size, outline='green') + for entry in rejected: + pos = entry[1:3].copy() + size = .01*width + if downsample is not None: + pos *= downsample + pos += [offs_h, offs_w] + size *= downsample + draw_circle(pos, size, outline='red') + images_dict['final_centroids'] = raw_image + # 8. Sort order = (-extracted[:, 0]).argsort() if max_returned: @@ -1217,17 +2646,17 @@ def calc_stats(a, p): extracted[:, 1:3] = extracted[:, 1:3] * downsample # Scale centroid if crop: extracted[:, 1:3] = extracted[:, 1:3] + np.array([offs_h, offs_w]) # Offset centroid - # Return results - if return_moments and return_images: - return ((extracted[:, 1:3], extracted[:, 0], extracted[:, 6], extracted[:, 3:6], - extracted[:, 7]), images_dict) - elif return_moments: - return (extracted[:, 1:3], extracted[:, 0], extracted[:, 6], extracted[:, 3:6], - extracted[:, 7]) - elif return_images: - return (extracted[:, 1:3], images_dict) - else: + # Return results, default just the centroids + if not any((return_moments, return_images)): return extracted[:, 1:3] + # Otherwise, build list of requested returned items + result = [extracted[:, 1:3]] + if return_moments: + result.append([extracted[:, 0], extracted[:, 6], extracted[:, 3:6], + extracted[:, 7]]) + if return_images: + result.append(images_dict) + return tuple(result) def crop_and_downsample_image(image, crop=None, downsample=None, sum_when_downsample=True, @@ -1247,7 +2676,8 @@ def crop_and_downsample_image(image, crop=None, downsample=None, sum_when_downsa regions into one. The image width and height must be divisible by this factor. sum_when_downsample (bool, optional): If True (the default) downsampled pixels are calculated by summing the original pixel values. If False the mean is used. - return_offsets (bool, optional): If True, return the applied cropping offset. + return_offsets (bool, optional): If set to True, the applied cropping offset from the top + left corner is returned. Returns: numpy.ndarray or tuple: If `return_offsets=False` (the default) a 2D array with the cropped and dowsampled image is returned. If `return_offsets=True` is passed a tuple containing