From 230632014dcbcd5da61b6f9c81a71b695852f999 Mon Sep 17 00:00:00 2001 From: Andrej Prsa Date: Tue, 8 Oct 2024 12:33:57 -0400 Subject: [PATCH] Squashed commit of the following: commit 34281b8a2771addc8d4d878294d44f46df554841 Merge: 1584f6c4 22fa0dbe Author: Kyle Conroy Date: Mon Oct 7 15:16:16 2024 -0400 Merge 2.4.15 into release-2.5 commit 22fa0dbe7ea06d31147a9f1c170a4d023e35455d Author: Kyle Conroy Date: Mon Oct 7 11:20:56 2024 -0400 modified publish workflow (#952) * modified publish workflow using same as https://github.com/aprsa/ndpolator/blob/main/.github/workflows/on_release.yaml * address build wheel warnings * bump ubuntu runner to 24.04 * upgrade cibuildwheel * skip failing builds commit fbde243a433783b83f4d0543a5fd3da0a4a4c3a4 Author: Kyle Conroy Date: Thu Oct 3 15:48:13 2024 -0400 2.4.15 bugfix release (#907) * Fix handling of include_times for RVs with compute_times/phases. [#889] * GPs on models computed in phase-space will be properly computed based on residuals in time space. [#899] * Fix units of requivfrac. [#894] * Fix adopting mask_phases from lc_geometry. [#896] * Fix population of wavelength array in load function for passbands. [#914] * Temporarily cap numpy dependency < 2.0. [#930] * Fix installation of phoebe-server CLI script to launch from UI. [#929] * Fix passing compute to export_solver with features attached. [#922] * sigmas_lnf: change handling of noise-nuissance parameter for RVs to no longer depend on the RV amplitude. [#901] * Remove duplicated phoebe-server code. [#940] * Fix python 3.12+ support by updating invalid escape sequences. [#948] * Improved precision in calculation of constraints. [#945] --------- Co-authored-by: Kelly Hambleton (Prsa) Co-authored-by: David Jones Co-authored-by: Andrej Prsa Co-authored-by: Matthias Fabry Co-authored-by: Matthias Fabry Co-authored-by: Miroslav Broz commit bf850e1e2f4893035e79d15309ff530fd5de6fb8 Author: Kyle Conroy Date: Thu Oct 3 11:02:38 2024 -0400 release GH actions workflow (#949) --- .github/workflows/on_pr.yml | 2 +- .github/workflows/publish.yml | 59 + README.md | 15 + phoebe-server/phoebe-server | 1172 ----------------- phoebe/__init__.py | 2 +- phoebe/atmospheres/passbands.py | 8 +- phoebe/backend/backends.py | 26 +- phoebe/dependencies/crimpl/common.py | 2 +- phoebe/dependencies/distl/distl.py | 14 +- .../dependencies/ligeor/models/twogaussian.py | 8 +- phoebe/frontend/bundle.py | 15 +- .../default_bundles/default_binary.bundle | 54 +- .../default_contact_binary.bundle | 68 +- .../default_bundles/default_star.bundle | 20 +- phoebe/parameters/constraint.py | 2 +- phoebe/parameters/parameters.py | 21 +- pyproject.toml | 15 +- tests/tests/test_gp/test_gp.py | 80 ++ tests/tests/test_mesh/test_mesh_times.py | 15 + 19 files changed, 301 insertions(+), 1297 deletions(-) create mode 100644 .github/workflows/publish.yml delete mode 100644 phoebe-server/phoebe-server create mode 100644 tests/tests/test_gp/test_gp.py create mode 100644 tests/tests/test_mesh/test_mesh_times.py diff --git a/.github/workflows/on_pr.yml b/.github/workflows/on_pr.yml index dff012328..115cf1bbf 100644 --- a/.github/workflows/on_pr.yml +++ b/.github/workflows/on_pr.yml @@ -41,7 +41,7 @@ jobs: - name: Install phoebe optional modules run: | - python -m pip install rebound + python -m pip install rebound celerite2 emcee - name: Install PHOEBE from source run: | diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 000000000..e95524960 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,59 @@ +name: Release + +on: + workflow_dispatch: + pull_request: + release: + types: [created] + +jobs: + build-sdist: + name: Package source distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Build sdist tarball + run: | + pipx run build --sdist + - uses: actions/upload-artifact@v4 + with: + name: cibw-sdist + path: dist/*.tar.gz + + build-wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-24.04, macos-13, macos-14] + steps: + - name: Checkout the sources + uses: actions/checkout@v4 + - name: Build wheels + uses: pypa/cibuildwheel@v2.21.2 + env: + CIBW_SKIP: pp37-* pp38-* pp39-* pp31*-macosx* + - uses: actions/upload-artifact@v4 + with: + name: cibw-wheels-${{ matrix.os }}-${{ strategy.job-index }} + path: ./wheelhouse/*.whl + + publish-to-pypi: + if: github.event_name != 'pull_request' + needs: [build-sdist, build-wheels] + name: Publish release to PyPI + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/phoebe + permissions: + id-token: write + steps: + - name: Gather sdist tarball and wheels + uses: actions/download-artifact@v4 + with: + pattern: cibw-* + path: dist + merge-multiple: true + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 \ No newline at end of file diff --git a/README.md b/README.md index 3c0433016..5f58c0179 100644 --- a/README.md +++ b/README.md @@ -85,6 +85,21 @@ To understand how to use PHOEBE, please consult the [tutorials, scripts and manu CHANGELOG ---------- +### 2.4.15 + +* Fix handling of include_times for RVs with compute_times/phases. [#889] +* GPs on models computed in phase-space will be properly computed based on residuals in time space. [#899] +* Fix units of requivfrac. [#894] +* Fix adopting mask_phases from lc_geometry. [#896] +* Fix population of wavelength array in load function for passbands. [#914] +* Temporarily cap numpy dependency < 2.0. [#930] +* Fix installation of phoebe-server CLI script to launch from UI. [#929] +* Fix passing compute to export_solver with features attached. [#922] +* sigmas_lnf: change handling of noise-nuissance parameter for RVs to no longer depend on the RV amplitude. [#901] +* Remove duplicated phoebe-server code. [#940] +* Fix python 3.12+ support by updating invalid escape sequences. [#948] +* Improved precision in calculation of constraints. [#945] + ### 2.4.14 * Fix MPI off to not broadcast if never enabled diff --git a/phoebe-server/phoebe-server b/phoebe-server/phoebe-server deleted file mode 100644 index f5f1f378a..000000000 --- a/phoebe-server/phoebe-server +++ /dev/null @@ -1,1172 +0,0 @@ -#!/usr/bin/python - -""" -pip install flask -pip install flask-socketio -pip install gevent-websocket - -to launch with MPI enabled: -PHOEBE_ENABLE_MPI=TRUE PHOEBE_MPI_NP=8 phoebe-server [port] - -to set a maximum number of allowed max_computations -PHOEBE_SERVER_MAX_COMPUTATIONS=100 phoebe-server [port] -""" - -try: - from flask import Flask, jsonify, request, redirect, Response, make_response, send_from_directory, send_file - from flask_socketio import SocketIO, emit, join_room, leave_room - from flask_cors import CORS -except ImportError: - raise ImportError("dependencies not met: pip install flask flask-cors flask-socketio gevent-websocket") - -### NOTE: tested to work with eventlet, not sure about gevent - - -################################ SERVER/APP SETUP ############################## - -app = Flask(__name__) -CORS(app) -app._bundles = {} -app._clients = [] -app._clients_per_bundle = {} -app._last_access_per_bundle = {} -app._verbose = True -app._debug = False -app._killable = False - -# we'll disable sorting the responses by keys so that we can control the sorting -# by qualifier instead of uniqueid. This will sacrifice caching ability in the -# browser unless we set the order of all keys to be consistent. -app.config['JSON_SORT_KEYS'] = False - -# Create the Flask-SQLAlchemy object and an SQLite database -# app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///phoebe.db' -# db = flask.ext.sqlalchemy.SQLAlchemy(app) - -# Configure socket.io -app.config['SECRET_KEY'] = 'phoebesecret' -socketio = SocketIO(app) - -def _uniqueid(N=16): - """ - :parameter int N: number of character in the uniqueid - :return: the uniqueid - :rtype: str - """ - return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.ascii_lowercase) for _ in range(N)) - -def _new_bundleid(uniqueid=None, N=6): - """ - will have 52**N uniqueids available. But we'll check for duplicates just to - make sure. - """ - if uniqueid is None: - uniqueid = _uniqueid(N=N) - - if uniqueid not in app._bundles.keys(): - return uniqueid - else: - # you should really enter the lottery, unless N is <= 3 - return _new_bundleid(uniqueid=None, N=N) - -################################## ADDITIONAL IMPORTS ########################## - -import matplotlib.pyplot as plt -plt.switch_backend('Agg') - -import phoebe -import numpy as np -import json -import random -import string -import os -import sys -import tempfile -import traceback -import urllib2 -import StringIO -import inspect -from time import sleep -from collections import OrderedDict -from datetime import datetime - -from phoebe.parameters.unit_choices import unit_choices as _unit_choices - -phoebe.devel_on() # currently needed for client mode, remove for actual release -phoebe.interactive_off() -phoebe.parameters._is_server = True - -_max_computations = os.getenv('PHOEBE_SERVER_MAX_COMPUTATIONS', None) -if _max_computations is not None: - _max_computations = int(_max_computations) - -_dir_tmpimages = os.path.join(tempfile.gettempdir(), 'phoebe-server-tmpimages') - -if not os.path.exists(_dir_tmpimages): - os.makedirs(_dir_tmpimages) - -def bundle_memory_cleanup(stale_limit_seconds=600): - # TODO: its possible to get an entry in _clients_per_bundle that isn't - # available here. The error message is raised in the UI and redirects - # out... but the entry is still made here and never cleared - - now = datetime.now() - for bundleid, last_access in app._last_access_per_bundle.items(): - stale_for = (now-last_access).total_seconds() - clients = app._clients_per_bundle.get(bundleid, []) - active_clients = [c for c in clients if c in app._clients] - print("bundle_memory_cleanup: {} stale for {}/{} seconds with {} active clients and {} total clients".format(bundleid, stale_for, stale_limit_seconds, len(active_clients), len(clients))) - # we'll delete if any of the following - # * no active clients and past the stale limit - # * no clients at all and stale for 30 seconds (in the case of closing where the client sent a deregister signal) - # * stale for more than an 1 day from the webclient (in the case where the client was closed but couldn't send a disconnect signal) - if (len(active_clients)==0 and stale_for > stale_limit_seconds) or (len(clients)==0 and stale_for > 30) or (stale_for > 24*60*60 and np.all([c.split('-')[0]=='web' for c in _client_types_for_bundle(bundleid)])): - if app._verbose: - print("bundle_memory_cleanup: deleting {}".format(bundleid)) - if bundleid in app._bundles.keys(): - del app._bundles[bundleid] - if bundleid in app._clients_per_bundle.keys(): - del app._clients_per_bundle[bundleid] - if bundleid in app._last_access_per_bundle.keys(): - del app._last_access_per_bundle[bundleid] - -_available_kinds = {'component': phoebe.list_available_components(), - 'feature': phoebe.list_available_features(), - 'dataset': phoebe.list_available_datasets(), - 'figure': phoebe.list_available_figures(), - 'compute': phoebe.list_available_computes()} - -# logger = phoebe.logger('INFO') -_dir_tmpimages = os.path.join(tempfile.gettempdir(), 'phoebe-server-tmpimages') - -if not os.path.exists(_dir_tmpimages): - os.makedirs(_dir_tmpimages) - - -# TODO: can we also process and emit logger signals (https://docs.python.org/2/library/logging.handlers.html#sockethandler)? Or at the least we could call b.run_checks after each command manually and broadcast those messages - -############################################################################### -# We need to tell clients that its ok to accept API information from an external -# server since this will almost always be running from a different URL/port -# than the client. -# The following code that accomplishes this is taken (borrowed) almost entirely -# from http://flask.pocoo.org/snippets/56/ -from datetime import timedelta -from flask import make_response, request, current_app -from functools import update_wrapper - - -def crossdomain(origin=None, methods=None, headers=None, - max_age=21600, attach_to_all=True, - automatic_options=True): - if methods is not None: - methods = ', '.join(sorted(x.upper() for x in methods)) - if headers is not None and not isinstance(headers, basestring): - headers = ', '.join(x.upper() for x in headers) - if not isinstance(origin, basestring): - origin = ', '.join(origin) - if isinstance(max_age, timedelta): - max_age = max_age.total_seconds() - - def get_methods(): - if methods is not None: - return methods - - options_resp = current_app.make_default_options_response() - return options_resp.headers['allow'] - - def decorator(f): - def wrapped_function(*args, **kwargs): - if automatic_options and request.method == 'OPTIONS': - resp = current_app.make_default_options_response() - else: - resp = make_response(f(*args, **kwargs)) - if not attach_to_all and request.method != 'OPTIONS': - return resp - - h = resp.headers - - h['Access-Control-Allow-Origin'] = origin - h['Access-Control-Allow-Methods'] = get_methods() - h['Access-Control-Max-Age'] = str(max_age) - if headers is not None: - h['Access-Control-Allow-Headers'] = headers - return resp - - f.provide_automatic_options = False - return update_wrapper(wrapped_function, f) - return decorator - -############################# CLIENT MANAGEMENT ################################ - -def _client_types_for_bundle(bundleid): - return [c.split('-')[0] for c in app._clients_per_bundle.get(bundleid, [])] - - -############################ BUNDLE MANIPULATION ############################### - - -def _get_bundle_json(bundleid, do_jsonify=True): - b = app._bundles.get(bundleid) - app._last_access_per_bundle[bundleid] = datetime.now() - - data = b.to_json(incl_uniqueid=True) - - if do_jsonify: - return jsonify(data) - else: - return data - -def _value_string(param): - param_type = param.__class__.__name__ - - if param_type in ['StringParameter', 'ChoiceParameter', 'HierarchyParameter']: - return param.get_value() - elif param_type in ['ConstraintParameter']: - return "f({})".format(",".join([p.qualifier for p in param.vars.to_list() if p != param.constrained_parameter])) - elif param_type in ['SelectParameter']: - v = param.get_value() - ev = param.expand_value() - if len(v) == 0: - return "(empty)" - elif np.any(["*" in vi or "?" in vi for vi in v]): - return "[{} ({} {})]".format(",".join(v), len(ev), "match" if len(ev)==1 else "matches") - else: - return "[{}]".format(",".join(v)) - elif param_type in ['JobParameter']: - return param._value - elif param_type in ['UnitParameter']: - return str(param.get_value().to_string()) - elif param_type in ['IntParameter', 'DictParameter', 'BoolParameter']: - return str(param.get_value()) - elif param_type in ['FloatParameter']: - return str(param.get_value()) - elif param_type in ['FloatArrayParameter']: - if isinstance(param._value, phoebe.dependencies.nparray.nparray.ArrayWrapper): - return param._value.__str__() - else: - arr = param.get_value() - # unit = str(param.get_default_unit()) - if len(arr): - return "[{} ... {} ({})]".format(arr[0], arr[-1], len(arr)) - else: - return "[ ] (empty)" - else: - return '({})'.format(param_type) - -def _choices(parameter): - if hasattr(parameter, 'choices'): - return parameter.choices - elif parameter.__class__.__name__ == 'BoolParameter': - return ['True', 'False'] - # elif parameter.__class__.__name__ == 'UnitParameter': - # return _unit_choices(parameter.get_value()) - else: - return None - -def _param_json_overview(param): - p = {'uniqueid': param.uniqueid, - 'class': param.__class__.__name__, - 'valuestr': _value_string(param), - 'len': len(param.get_value()) if param.__class__.__name__ in ['SelectParameter', 'FloatArrayParameter'] else None, - 'unitstr': param.default_unit.to_string() if hasattr(param, 'default_unit') else '', - 'readonly': param.context in ['model'] or param.qualifier in ['phoebe_version'] or (hasattr(param, 'is_constraint') and param.is_constraint is not None), - } - - advanced_filter = [] - if not param.is_visible: - advanced_filter.append('not_visible') - if '_default' in [param.component, param.dataset, param.feature]: - advanced_filter.append('is_default') - if param.advanced: - advanced_filter.append('is_advanced') - if param.__class__.__name__ in ['ChoiceParameter'] and len(param.choices) <= 1: - # NOTE: we do not want to set is_single for SelectParameters as those - # allow setting 0 options - advanced_filter.append('is_single') - p['readonly'] = True - if param.context=='constraint': - advanced_filter.append('is_constraint') - - p['advanced_filter'] = advanced_filter - - - for k,v in param.meta.items(): - if k in ['history', 'fitting', 'feedback', 'plugin']: - continue - p[k] = v - - return p - -def _param_json_detailed(param): - p = {'description': param.description} - - if param.__class__.__name__ == 'ConstraintParameter': - p['related_to'] = {p.uniqueid: p.twig for p in param.vars.to_list()} - p['constraint'] = {} - p['constrains'] = {p.uniqueid: p.twig for p in [param.constrained_parameter]} - else: - p['related_to'] = {p.uniqueid: p.twig for p in param.related_to} if hasattr(param, 'related_to') else {} - p['constraint'] = {p.uniqueid: p.twig for p in [param.is_constraint]} if hasattr(param, 'is_constraint') and param.is_constraint is not None else {} - p['constrains'] = {p.uniqueid: p.twig for p in param.constrains} if hasattr(param, 'constrains') else {} - - if hasattr(param, 'limits'): - if hasattr(param, 'default_unit'): - p['limits'] = [l.to(param.default_unit).value if l is not None else None for l in param.limits] + [param.default_unit.to_string()] - else: - p['limits'] = param.limits + [None] - # else: - # p['limits'] = None - - if param.__class__.__name__ in ['SelectParameter']: - p['value'] = param.get_value() - elif param.__class__.__name__ in ['FloatArrayParameter']: - value = param.to_json()['value'] - if isinstance(value, list): - value = ",".join(str(vi) for vi in value) - p['value'] = value - elif param.__class__.__name__ in ['ConstraintParameter']: - p['value'] = param.expr - - if hasattr(param, 'choices') or param.__class__.__name__ in ['BoolParameter']: - p['choices'] = _choices(param) - - if hasattr(param, 'default_unit'): - p['unit_choices'] = _unit_choices(param.default_unit) - # else: - # p['unit_choices'] = None - - return p - -def _sort_tags(group, tags): - if group=='contexts': - # try to order contexts in same order as shown in UI.. then fill in with the rest - lst = [k for k in ['constraint', 'component', 'feature', 'dataset', 'figure', 'compute', 'model', 'time'] if k in tags] - for k in tags: - if k not in lst: - lst.append(k) - return lst - else: - return sorted(tags) - -def _get_failed_constraints(b): - affected_params = b._failed_constraints[:] - for constraint_id in b._failed_constraints: - cp = b.get_constraint(uniqueid=constraint_id, check_visible=False).constrained_parameter - affected_params += [cp.uniqueid] + [cpc.uniqueid for cpc in cp.constrains_indirect] - return affected_params - - -############################ HTTP ROUTES ###################################### -def _get_response(data, status_code=200, api=False, **metawargs): - d = {} - d['data'] = data - d['meta'] = metawargs - if api: - resp = jsonify(d) - resp.status_code = status_code - return resp - else: - return d - -@app.route("/info", methods=['GET']) -@crossdomain(origin='*') -def info(): - if app._verbose: - print("info", phoebe.__version__, app._parent) - - bundle_memory_cleanup() - - return _get_response({'success': True, 'phoebe_version': phoebe.__version__, 'parentid': app._parent, - 'nclients': len(app._clients), 'clients': app._clients, - 'nbundles': len(app._bundles.keys()), 'clients_per_bundle': app._clients_per_bundle, 'last_access_per_bundle': app._last_access_per_bundle, - 'available_kinds': _available_kinds, - 'max_computations': _max_computations - }, - api=True) - -@app.route('/new_bundle/', methods=['GET']) -@crossdomain(origin='*') -def new_bundle(type): - """ - Initiate a new bundle object, store it to local memory, and return the bundleid. - The client is then responsible for making an additional call to access parameters, etc. - - type: 'binary:detached' - """ - if app._verbose: - print("new_bundle(type={})".format(type)) - - def _new_bundle(constructor, **kwargs): - try: - b = getattr(phoebe, constructor)(**kwargs) - except Exception as err: - return _get_response({'success': False, 'error': str(err)}, api=True) - else: - b.set_value(qualifier='auto_add_figure', context='setting', value=True) - bundleid = _new_bundleid() - app._bundles[bundleid] = b - return _get_response({'success': True, 'bundleid': bundleid}, api=True) - - if type == 'single': - return _new_bundle('default_star') - elif type == 'binary:detached': - return _new_bundle('default_binary') - elif type == 'binary:semidetached:primary': - return _new_bundle('default_binary', semidetached='primary') - elif type == 'binary:semidetached:secondary': - return _new_bundle('default_binary', semidetached='secondary') - elif type == 'binary:contact': - return _new_bundle('default_binary', contact_binary=True) - else: - return _get_response({'success': False, 'error': 'bundle with type "{}" not implemented'.format(type)}, api=True) - -@app.route('/open_bundle/', methods=['POST']) -@crossdomain(origin='*') -def open_bundle(type): - """ - """ - if app._verbose: - print("open_bundle") - - try: - data = json.loads(request.data) - except ValueError: - data = {} - - if type == 'load:phoebe2': - if 'file' in request.files: - if app._verbose: print("opening bundle from file") - file = request.files['file'] - try: - bundle_data = json.load(file) - except: - return _get_response({'success': False, 'error': "could not read bundle json data from file. If the file is a PHOEBE 1/legacy file, try importing instead."}, api=True) - - else: - if app._verbose: print("opening bundle from json data") - try: - bundle_data = data['json'] - except: - return _get_response({'success': False, 'error': "could not read json data"}, api=True) - - try: - b = phoebe.Bundle(bundle_data) - except Exception as err: - return _get_response({'success': False, 'error': "failed to load bundle with error: "+str(err)}, api=True) - - elif type == 'load:legacy': - try: - b = phoebe.from_legacy(request.files['file']) - except Exception as err: - return _get_response({'success': False, 'error': "file not recognized as bundle or legacy phoebe file. Error: {}".format(str(err))}, api=True) - - else: - return _get_response({'success': False, 'error': "import with type={} not supported".format(type)}, api=True) - - - bundleid = data.get('bundleid', None) - if app._verbose: - print("trying bundleid={}".format(bundleid)) - bundleid = _new_bundleid(bundleid) - app._bundles[bundleid] = b - app._last_access_per_bundle[bundleid] = datetime.now() - - return _get_response({'success': True, 'bundleid': bundleid}, api=True) - -@app.route('/json_bundle/', methods=['GET']) -@crossdomain(origin='*') -def json_bundle(bundleid): - """ - """ - if app._verbose: - print("json_bundle(bundleid={})".format(bundleid)) - - if bundleid not in app._bundles.keys(): - print("json_bundle error: bundleid={}, app._bundles.keys()={}".format(bundleid, app._bundles.keys())) - return _get_response({'success': False, 'error': 'bundle not found with bundleid=\'{}\''.format(bundleid)}, api=True) - - bjson = _get_bundle_json(bundleid, do_jsonify=False) - - return _get_response({'success': True, 'bundle': bjson, 'bundleid': bundleid}, bundleid=bundleid, api=True) - -@app.route('/save_bundle/', methods=['GET']) -@crossdomain(origin='*') -def save_bundle(bundleid): - """ - """ - if app._verbose: - print("save_bundle(bundleid={})".format(bundleid)) - - - if bundleid not in app._bundles.keys(): - return _get_response({'success': False, 'error': 'bundle not found with bundleid={}'}, api=True) - - resp = _get_bundle_json(bundleid, do_jsonify=True) - - resp.headers.set('Content-Type', 'text/json') - resp.headers.set('Content-Disposition', 'attachment', filename='{}.bundle'.format(bundleid)) - - return resp - -@app.route('/export_compute//', defaults={'model': None}, methods=['GET']) -@app.route('/export_compute///', methods=['GET']) -@crossdomain(origin='*') -def export_compute(bundleid, compute, model=None): - """ - """ - if app._verbose: - print("export_compute(bundleid={}, compute={})".format(bundleid, compute)) - - - if bundleid not in app._bundles.keys(): - return _get_response({'success': False, 'error': 'bundle not found with bundleid={}'}, api=True) - - b = app._bundles.get(bundleid) - app._last_access_per_bundle[bundleid] = datetime.now() - - ef = tempfile.NamedTemporaryFile(prefix="export_compute", suffix=".py") - - script_fname=ef.name - b.export_compute(script_fname, out_fname=None, compute=compute, model=model) - - return send_file(ef.name, as_attachment=True, attachment_filename='{}_run_compute_{}.py'.format(bundleid,compute)) - -@app.route('/export_arrays//', methods=['GET']) -@crossdomain(origin='*') -def export_params(bundleid, params): - """ - """ - if app._verbose: - print("export_arrays(bundleid={}, params={})".format(bundleid, params)) - - - if bundleid not in app._bundles.keys(): - return _get_response({'success': False, 'error': 'bundle not found with bundleid={}'}, api=True) - - b = app._bundles.get(bundleid) - app._last_access_per_bundle[bundleid] = datetime.now() - - ef = tempfile.NamedTemporaryFile(prefix="export_params", suffix=".csv") - - b.export_arrays(ef.name, delimiter=',', uniqueid=params.split(",")) - - return send_file(ef.name, as_attachment=True, attachment_filename='{}_export_arrays.csv'.format(bundleid)) - - - -@app.route('/bundle/', methods=['GET']) -@crossdomain(origin='*') -def bundle(bundleid): - """ - """ - if app._verbose: - print("bundle(bundleid={})".format(bundleid)) - - - if bundleid not in app._bundles.keys(): - return _get_response({'success': False, 'error': 'bundle not found with bundleid={}'.format(bundleid)}, api=True) - - b = app._bundles.get(bundleid) - app._last_access_per_bundle[bundleid] = datetime.now() - - param_list = sorted([_param_json_overview(param) for param in b.to_list()], key=lambda p: p['qualifier']) - param_dict = OrderedDict((p.pop('uniqueid'), p) for p in param_list) - - tags = {k: _sort_tags(k, v) for k,v in b.tags.items()} - - # failed_constraints = _get_failed_constraints(b) - info = _run_checks(b, bundleid, do_emit=False) - info['success'] = True - info['parameters'] = param_dict - info['tags'] = tags - - return _get_response(info, api=True) - -@app.route('/parameter//', methods=['GET']) -@crossdomain(origin='*') -def parameter(bundleid, uniqueid): - """ - """ - if app._verbose: - print("parameter(bundleid={}, uniqueid={})".format(bundleid, uniqueid)) - - if bundleid not in app._bundles.keys(): - return _get_response({'success': False, 'error': 'bundle not found with bundleid={}'.format(bundleid)}, api=True) - - b = app._bundles.get(bundleid) - app._last_access_per_bundle[bundleid] = datetime.now() - - try: - param = b.get_parameter(uniqueid=str(uniqueid), check_visible=False, check_advanced=False, check_default=False) - except: - return _get_response({'success': False, 'error': 'could not find parameter with uniqueid={}'.format(uniqueid)}, api=True) - - data = _param_json_detailed(param) - - return _get_response({'success': True, 'parameter': data}, api=True) - -@app.route('/nparray/', methods=['GET']) -@crossdomain(origin='*') -def nparray(input): - if app._verbose: - print("nparray(input={}))".format(input)) - # input is a json-string representation of an array or nparray helper dictionary - - # first let's load the string - try: - if '{' not in input: - # then assume this is a comma-separate list to be converted to an array - npa = phoebe.dependencies.nparray.array([float(v) for v in input.replace('"', '').split(',') if len(v)]) - is_array = True - else: - npa = phoebe.dependencies.nparray.from_json(input) - is_array = False - except Exception as err: - return _get_response({'success': False, 'error': 'could not convert to valid nparray object with err: {}'.format(str(err))}, api=True) - - empty_arange = {'nparray': 'arange', 'start': '', 'stop': '', 'step': ''} - empty_linspace = {'nparray': 'linspace', 'start': '', 'stop': '', 'num': '', 'endpoint': True} - - if is_array: - # now we want to return all valid conversions - data = {'array': npa.to_array().to_dict(), - 'arraystr': ",".join([str(v) for v in npa.to_array().tolist()]), - 'linspace': empty_linspace, - 'arange': empty_arange} - else: - # now we want to return all valid conversions - data = {'array': npa.to_array().to_dict(), - 'arraystr': ",".join([str(v) for v in npa.to_array().tolist()]), - 'linspace': npa.to_dict() if npa.__class__.__name__ == 'Linspace' else npa.to_linspace().to_dict() if hasattr(npa, 'to_linspace') else empty_linspace, - 'arange': npa.to_dict() if npa.__class__.__name__ == 'Arange' else npa.to_arange().to_dict() if hasattr(npa, 'to_arange') else empty_arange} - - return _get_response({'success': True, 'response': data}, api=True) - -@app.route("//figure/", methods=['GET']) -def serve_figure(bundleid, figure): - fname = '{}_{}.png'.format(bundleid, figure) - if app._verbose: - print("serve_figure", fname) - return send_from_directory(_dir_tmpimages, fname) - -@app.route("//figure_afig/", methods=['GET']) -def serve_figure_afig(bundleid, figure): - fname = '{}_{}.afig'.format(bundleid, figure) - if app._verbose: - print("serve_figure_afig", fname) - return send_from_directory(_dir_tmpimages, fname) - - -############################# WEBSOCKET ROUTES ################################ - -########## SOCKET ERRORS -@socketio.on_error() -def error_handler(err): - print("websocket error:", err) - - if app._verbose: - ex_type, ex, tb = sys.exc_info() - print traceback.print_tb(tb) - - emit('msg', {'success': False, 'id': None, 'level': 'error', 'msg': 'websocket: '+err.message}, broadcast=False) - - - -########## CLIENT MANAGEMENT -@socketio.on('connect') -def connect(): - if app._verbose: - print('Client connected') - - # emit('connect', {'success': True, 'data': {'clients': app._clients, 'parentid': app._parent}}) - -@socketio.on('disconnect') -def disconnect(): - if app._verbose: - print('Client disconnected') - - # emit('disconnect', {'success': True, 'data': {'clients': app._clients, 'parentid': app._parent}}) - - -@socketio.on('register client') -def register_client(msg): - clientid = msg.get('clientid', None) - bundleid = msg.get('bundleid', None) - if bundleid is not None and bundleid not in app._bundles.keys(): - err = 'bundle not found with bundleid={}'.format(bundleid) - if app._verbose: - print("register client {} error: {}".format(msg, err)) - - emit('{}:errors:react'.format(bundleid), {'success': False, 'error': err}, broadcast=False) - return - - if app._verbose: - print("register_client(clientid={}, bundleid={})".format(clientid, bundleid)) - - if clientid is not None and clientid not in app._clients: - app._clients.append(clientid) - - if bundleid is not None: - if bundleid not in app._clients_per_bundle.keys(): - app._clients_per_bundle[bundleid] = [clientid] - elif clientid not in app._clients_per_bundle.get(bundleid, []): - app._clients_per_bundle[bundleid].append(clientid) - - bundle_memory_cleanup() - -@socketio.on('deregister client') -def deregister_client(msg): - clientid = msg.get('clientid', None) - bundleid = msg.get('bundleid', None) - if app._verbose: - print("deregister_client(clientid={}, bundleid={})".format(clientid, bundleid)) - - if bundleid is not None: - app._clients_per_bundle[bundleid] = [c for c in app._clients_per_bundle.get(bundleid, []) if c!=clientid] - - elif clientid is not None and clientid in app._clients: - # note: we'll leave the clientid in app._clients_per_bundle. Those bundles - # will become stale and eventually deleted by timeout in bundle_memory_cleanup. - app._clients.remove(clientid) - - # now cleanup from memory any bundle with NO cients - bundle_memory_cleanup() - -########## BUNDLE METHODS -def _run_checks(b, bundleid, do_emit=True): - report = b.run_checks() - - if do_emit: - emit('{}:checks:react'.format(bundleid), {'success': True, 'checks_status': report.status, 'checks_report': [item.to_dict() for item in report.items]}, broadcast=True) - - try: - b.run_failed_constraints() - except Exception as err: - emit('{}:errors:react'.format(bundleid), {'success': True, 'level': 'warning', 'error': err.message}, broadcast=False) - # if len(b._failed_constraints): - # msg = 'Constraints for the following parameters failed to run: {}. Affected values will not be updated until the constraints can succeed.'.format(', '.join([b.get_parameter(uniqueid=c, check_visible=False).constrained_parameter.uniquetwig for c in b._failed_constraints])) - # emit('{}:errors:react'.format(bundleid), {'success': True, 'level': 'warning', 'error': msg}, broadcast=False) - - failed_constraints = _get_failed_constraints(b) - if do_emit: - emit('{}:failed_constraints:react'.format(bundleid), {'failed_constraints': failed_constraints}, broadcast=True) - - return {'checks_status': report.status, 'checks_report': [item.to_dict() for item in report.items], 'failed_constraints': failed_constraints} - - -def _update_figures(b, bundleid, affected_ps=None): - # we need to update any figures in which: - # * a parameter tagged with that filter has been changed - # * a parameter tagged with a dataset selected in a given figure - # * a parameter tagged with a model selected in a given figure - if app._verbose: - print("_update_figures: ", bundleid) - - - if affected_ps is None: - figures = b.figures - - else: - if len(affected_ps.filter(context='figure', figure=[None])): - # then we changed something like color@primary@figure. Its not obvious - # how to estimate which figures need to be updated in this case without - # looking through all *_mode for component (in this case), so we'll - # just update all figures - figures = b.figures - else: - figures = affected_ps.figures - datasets = affected_ps.datasets - models = affected_ps.models - for figure in b.figures: - if figure in figures: - continue - figure_datasets = b.get_value(qualifier='datasets', figure=figure, check_visible=False, check_default=False, expand=True) - figure_models = b.get_value(qualifier='models', figure=figure, check_visible=False, check_default=False, expand=True) - if np.any([ds in figure_datasets for ds in datasets]) or np.any([ml in figure_models for ml in models]): - figures.append(figure) - - if len(affected_ps.filter(qualifier=['default_time_source', 'default_time'], check_visible=False)): - # then we need to add any figures which have time_source == 'default' - for figure in b.figures: - if figure in figures: - continue - if b.get_value(qualifier='time_source', figure=figure, context='figure', check_visible=False) == 'default': - figures.append(figure) - - - current_time = str(datetime.now()) - figure_update_times = {} - for figure in figures: - if app._verbose: - print("_update_figures: calling run_figure on figure: {}".format(figure)) - try: - # if True: - afig, mplfig = b.run_figure(figure=figure, save=os.path.join(_dir_tmpimages, '{}_{}.png'.format(bundleid, figure))) - render_kwargs = {'render': 'draw'} - # TODO: we need to keep all things sent to draw - # i=time, - # draw_sidebars=draw_sidebars, - # draw_title=draw_title, - # tight_layout=tight_layout, - # subplot_grid=subplot_grid, - afig.save(os.path.join(_dir_tmpimages, '{}_{}.afig'.format(bundleid, figure)), renders=[render_kwargs]) - except Exception as err: - if app._verbose: - print("_update_figures error: {}".format(str(err))) - # notify the client that the figure is now failing (and probably shouldn't be shown) - figure_update_times[figure] = 'failed' - # remove any existing cached file so that loading won't work - try: - os.remove(os.path.join(_dir_tmpimages, '{}_{}.png'.format(bundleid, figure))) - os.remove(os.path.join(_dir_tmpimages, '{}_{}.afig'.format(bundleid, figure))) - except: - pass - else: - figure_update_times[figure] = current_time - - if app._verbose: - print("_update_figures: emitting figures_updated {}".format(figure_update_times)) - emit('{}:figures_updated:react'.format(bundleid), {'figure_update_times': figure_update_times}, broadcast=True) - - - -@socketio.on('set_value') -def set_value(msg): - if app._verbose: - print("set_value: ", msg) - - bundleid = msg.pop('bundleid') - - if bundleid not in app._bundles.keys(): - err = 'bundle not found with bundleid={}'.format(bundleid) - if app._verbose: - print("set_value {} error: {}".format(msg, err)) - emit('{}:errors:react'.format(bundleid), {'success': False, 'error': err}, broadcast=False) - return - - - b = app._bundles[bundleid] - app._last_access_per_bundle[bundleid] = datetime.now() - - msg.setdefault('check_visible', False) - msg.setdefault('check_default', False) - msg.setdefault('check_advanced', False) - - client_types = _client_types_for_bundle(bundleid) - if 'web' in client_types or 'desktop' in client_types: - is_visible_before = {p.uniqueid: p.is_visible for p in b.to_list(check_visible=False, check_default=False, check_advanced=False)} - - try: - # TODO: handle getting nparray objects (probably as json strings/unicodes) - b.set_value_all(**msg) - ps_constraints = b.run_delayed_constraints() - except Exception as err: - if app._verbose: - print("set_value {} error: {}".format(msg, err.message)) - emit('{}:errors:react'.format(bundleid), {'success': False, 'error': err.message}, broadcast=False) - return - - try: - ps_list = ps_constraints + b.filter(**{k:v for k,v in msg.items() if k not in ['value']}).to_list() - except Exception as err: - if app._verbose: - print("set_value {} error on filter: {}".format(msg, err.message)) - emit('{}:errors:react'.format(bundleid), {'success': False, 'error': err.message}, broadcast=False) - return - else: - if 'web' in client_types or 'desktop' in client_types: - - # we need to also include parameters in which the visibility has changed - is_visible_changed = {p.uniqueid: p.is_visible for p in b.to_list(check_visible=False, check_default=False, check_advanced=False) if p.is_visible!=is_visible_before.get(p.uniqueid, None)} - # TODO: need to figure out what should be shown in the client if a new items has become visible but not within the filter - # TODO: tag visibility in the client needs to change based on the change in parameter visibilities - - ps_list += b.filter(uniqueid=is_visible_changed.keys(), check_visible=False, check_advanced=False, check_default=False).to_list() - - param_list = sorted([_param_json_overview(param) for param in ps_list], key=lambda p: p['qualifier']) - param_dict = OrderedDict((p.pop('uniqueid'), p) for p in param_list) - - if app._verbose: - print("set_value success, broadcasting changes:react: {}".format(param_dict)) - - emit('{}:changes:react'.format(bundleid), {'success': True, 'parameters': param_dict}, broadcast=True) - - # flush so the changes goes through before running checks and updating figures - socketio.sleep(0) - - _run_checks(b, bundleid) - _update_figures(b, bundleid, phoebe.parameters.ParameterSet(ps_list)) - - if 'python' in client_types: - ps_dict = {p.uniqueid: {'value': p.to_json()['value']} for p in ps_list} - - if app._verbose: - print("set_value success, broadcasting changes:python: {}".format(ps_dict)) - - emit('{}:changes:python'.format(bundleid), {'success': True, 'parameters': ps_dict}, broadcast=True) - - -# TODO: now that set_default_unit_all returns a PS, we could use bundle_method -# instead? - need to see what needs to be done from the python-client side -@socketio.on('set_default_unit') -def set_default_unit(msg): - if app._verbose: - print("set_default_unit: ", msg) - - bundleid = msg.pop('bundleid') - - if bundleid not in app._bundles.keys(): - err = 'bundle not found with bundleid={}'.format(bundleid) - if app._verbose: - print("set_default_unit {} error: {}".format(msg, err)) - emit('{}:errors:react'.format(bundleid), {'success': False, 'error': err}, broadcast=False) - return - - - b = app._bundles[bundleid] - app._last_access_per_bundle[bundleid] = datetime.now() - - msg.setdefault('check_visible', False) - msg.setdefault('check_default', False) - msg.setdefault('check_advanced', False) - - client_types = _client_types_for_bundle(bundleid) - - try: - # TODO: handle getting nparray objects (probably as json strings/unicodes) - b.set_default_unit_all(**msg) - except Exception as err: - if app._verbose: - print("set_default_unit {} error: {}".format(msg, err.message)) - emit('{}:errors:react'.format(bundleid), {'success': False, 'error': err.message}, broadcast=False) - return - - try: - ps_list = b.filter(**{k:v for k,v in msg.items() if k not in ['unit']}).to_list() - except Exception as err: - if app._verbose: - print("set_default_unit {} error on filter: {}".format(msg, err.message)) - emit('{}:errors:react'.format(bundleid), {'success': False, 'error': err.message}, broadcast=False) - return - else: - if 'web' in client_types or 'desktop' in client_types: - param_list = sorted([_param_json_overview(param) for param in ps_list], key=lambda p: p['qualifier']) - param_dict = OrderedDict((p.pop('uniqueid'), p) for p in param_list) - - if app._verbose: - print("set_default_unit success, broadcasting changes:react: {}".format(param_dict)) - - emit('{}:changes:react'.format(bundleid), {'success': True, 'parameters': param_dict}, broadcast=True) - - _update_figures(b, bundleid, phoebe.parameters.ParameterSet(ps_list)) - - if 'python' in client_types: - ps_dict = {p.uniqueid: {'default_unit': p.get_default_unit()} for p in ps_list} - - if app._verbose: - print("set_default_unit success, broadcasting changes:python: {}".format(ps_dict)) - - emit('{}:changes:python'.format(bundleid), {'success': True, 'parameters': ps_dict}, broadcast=True) - -@socketio.on('bundle_method') -def bundle_method(msg): - if app._verbose: - print("bundle_method: ", msg) - - bundleid = msg.pop('bundleid', None) - - for k,v in msg.items(): - if isinstance(v, unicode): - msg[k] = str(v) - - if bundleid is None: - emit('errors', {'success': False, 'error': "must provide bundleid"}, broadcast=False) - return - - if bundleid not in app._bundles.keys(): - err = 'bundle not found with bundleid={}'.format(bundleid) - if app._verbose: - print("bundle_method {} error: {}".format(msg, err)) - emit('{}:errors:react'.format(bundleid), {'success': False, 'error': err}, broadcast=False) - return - - - b = app._bundles[bundleid] - app._last_access_per_bundle[bundleid] = datetime.now() - - # msg.setdefault('check_visible', False) - # msg.setdefault('check_default', False) - # msg.setdefault('check_advanced', False) - - client_types = _client_types_for_bundle(bundleid) - - method = msg.pop('method') - - if method in ['run_compute']: - # TODO: have this be a environment variable or flag at the top-level? - # forbid expensive computations on this server - msg['max_computations'] = _max_computations - msg['detach'] = True - elif method in ['attach_job']: - msg['wait'] = False - # msg['cleanup'] = False - - # make sure to return parameters removed during overwrite so that we can - # catch that and emit the necessary changes to the client(s) - if method.split('_')[0] in ['add', 'run']: - msg['return_overwrite'] = True - msg.setdefault('overwrite', True) - - try: - ps = getattr(b, method)(**msg) - ps_list = ps.to_list() if hasattr(ps, 'to_list') else [ps] if isinstance(ps, phoebe.parameters.Parameter) else [] - except Exception as err: - if app._verbose: - print("bundle_method ERROR ({}): {}".format(msg, err.message)) - if app._debug: - raise - - if method=='attach_job' and 'Expecting object' in err.message: - # then its likely the object just hasn't been completely written to - # disk yet, this error is expected. - # TODO: catch this within PHOEBE instead and return a reasonable status - pass - else: - emit('{}:errors:react'.format(bundleid), {'success': False, 'error': err.message}, broadcast=False) - - - if method=='attach_job' and ('web' in client_types or 'desktop' in client_types): - # then we still need to emit the change the the status of the job parameter so the client stops polling - # param_list = [_param_json_overview(b.get_parameter(uniqueid=msg.get('uniqueid'), check_visible=False, check_default=False))] - # param_dict = OrderedDict((p.pop('uniqueid'), p) for p in param_list) - # jp = - pjo = _param_json_overview(b.get_parameter(uniqueid=msg.get('uniqueid'), check_visible=False, check_default=False)) - # pjo['valuestr'] = 'failed' - param_dict = {pjo.pop('uniqueid'): pjo} - packet = {'success': True, 'parameters': param_dict} - - emit('{}:changes:react'.format(bundleid), packet, broadcast=True) - - return - - - if method in ['flip_constraint']: - param = b.get_parameter(**{k:v for k,v in msg.items() if k!='solve_for'}) - ps_list += param.vars.to_list() - - # TODO: these should now already be handled in ps_list for rename_* - # we could also include these in the output to all methods from PHOEBE - # and then could remove all the logic here? - if method not in ['run_compute', 'attach_job']: - ps_list += b._handle_pblum_defaults(return_changes=True) - ps_list += b._handle_dataset_selectparams(return_changes=True) - ps_list += b._handle_compute_selectparams(return_changes=True) - ps_list += b._handle_component_selectparams(return_changes=True) - - if method in ['attach_job'] and b.get_value(uniqueid=msg['uniqueid'], check_visible=False) == 'loaded': - # we want to wait to do this until attach_job, if we do it after run_compute - # then overwriting an existing model will cause issues - if app._verbose: - print("bundle_method attach_job calling _handle_model_selectparams and _handle_meshcolor_choiceparams now that loaded") - ps_list += b._handle_model_selectparams(return_changes=True) - ps_list += b._handle_meshcolor_choiceparams(return_changes=True) - ps_list += b._handle_figure_time_source_params(return_changes=True) - - - if 'web' in client_types or 'desktop' in client_types: - # handle any deleted parameters - removed_params_list = [param.uniqueid for param in ps_list if param._bundle is None] - - # since some params have been removed, we'll skip any that have param._bundle is None - param_list = sorted([_param_json_overview(param) for param in ps_list if param._bundle is not None], key=lambda p: p['qualifier']) - param_dict = OrderedDict((p.pop('uniqueid'), p) for p in param_list) - - if app._verbose: - print("bundle_method success, broadcasting changes:react: {}".format(param_dict)) - - packet = {'success': True, 'parameters': param_dict, 'removed_parameters': removed_params_list} - - if method.split('_')[0] not in []: - # if we added new parameters, then the tags likely have changed - packet['tags'] = {k: _sort_tags(k, v) for k,v in b.tags.items()} - - if method.split('_')[0] == 'add': - context = method.split('_')[1] - packet['add_filter'] = {context: getattr(ps, context)} - elif method.split('_')[0] == 'run': - new_context = {'compute': 'model'}[method.split('_')[1]] - packet['add_filter'] = {new_context: getattr(ps, new_context)} - elif method == 'import_model': - new_context = 'model' - packet['add_filter'] = {'model': ps.model} - - emit('{}:changes:react'.format(bundleid), packet, broadcast=True) - # flush so the changes goes through before running checks and updating figures - socketio.sleep(0) - _run_checks(b, bundleid) - _update_figures(b, bundleid, phoebe.parameters.ParameterSet(ps_list)) - - if 'python' in client_types: - # TODO: this probably isn't sufficient for all methods. What information - # do we need to pass to the python clients for things like remove_*, run_*? - ps_dict = {p.uniqueid: {'default_unit': p.get_default_unit()} for p in ps_list} - - if app._verbose: - print("bundle_method success, broadcasting changes:python: {}".format(ps_dict)) - - emit('{}:changes:python'.format(bundleid), {'success': True, 'parameters': ps_dict}, broadcast=True) - - # app._bundles[bundleid] = b - - - -@socketio.on('rerun_all_figures') -def rerun_all_figures(msg): - if app._verbose: - print("bundle_method: ", msg) - - bundleid = msg.pop('bundleid', None) - - if bundleid is None: - emit('errors', {'success': False, 'error': "must provide bundleid"}, broadcast=False) - return - - if bundleid not in app._bundles.keys(): - err = 'bundle not found with bundleid={}'.format(bundleid) - if app._verbose: - print("bundle_method {} error: {}".format(msg, err)) - emit('{}:errors:react'.format(bundleid), {'success': False, 'error': err}, broadcast=False) - return - - - b = app._bundles[bundleid] - app._last_access_per_bundle[bundleid] = datetime.now() - - client_types = _client_types_for_bundle(bundleid) - _update_figures(b, bundleid, None) - - -if __name__ == "__main__": - #phoebe_server.py port, parent, host - if len(sys.argv) >= 2: - port = int(float(sys.argv[1])) - else: - port = 5555 - - if len(sys.argv) >= 3: - parent = sys.argv[2] - else: - parent = 'notprovided' - - if len(sys.argv) >=4: - host = sys.argv[3] - else: - host = '127.0.0.1' - - app._parent = parent - - if app._verbose: - print("*** SERVER READY at {}:{} ***".format(host, port)) - - socketio.run(app, host=host, port=port) diff --git a/phoebe/__init__.py b/phoebe/__init__.py index a4e746d3f..d4742c090 100644 --- a/phoebe/__init__.py +++ b/phoebe/__init__.py @@ -17,7 +17,7 @@ """ -__version__ = '2.4.14.dev+feature-blending' +__version__ = '2.4.15.dev+feature-blending' import os as _os import sys as _sys diff --git a/phoebe/atmospheres/passbands.py b/phoebe/atmospheres/passbands.py index 66586c1e1..d782e5e34 100644 --- a/phoebe/atmospheres/passbands.py +++ b/phoebe/atmospheres/passbands.py @@ -662,7 +662,7 @@ def _planck_spi(self, lam, Teff): return hclkt * expterm/(expterm-1) def compute_blackbody_intensities(self, teffs=None, include_extinction=False, rvs=None, ebvs=None, verbose=False): - """ + r""" Computes blackbody intensity interpolation functions/tables. Intensities are computed across the passed range of effective @@ -1128,7 +1128,7 @@ def export_legacy_ldcoeffs(self, models, atm='ck2004', filename=None, intens_wei f.close() def compute_ldints(self, ldatm): - """ + r""" Computes integrated limb darkening profiles for the passed `ldatm`. These are used for intensity-to-flux transformations. The evaluated @@ -1433,7 +1433,7 @@ def _log10_Inorm(self, query_pts, atm, intens_weighting='photon', atm_extrapolat return (log10_Inorm, nanmask) if return_nanmask else log10_Inorm def Inorm(self, query_pts, atm='ck2004', ldatm='ck2004', ldint=None, ld_func='interp', ld_coeffs=None, intens_weighting='photon', atm_extrapolation_method='none', ld_extrapolation_method='none', blending_method='none', return_nanmask=False): - """ + r""" Computes normal emergent passband intensity. Possible atm/ldatm/ld_func/ld_coeffs combinations: @@ -2729,7 +2729,7 @@ def get_passband(passband, content=None, reload=False, update_if_necessary=False return _pbtable[passband]['pb'] def Inorm_bol_bb(Teff=5772., logg=4.43, abun=0.0, atm='blackbody', intens_weighting='photon'): - """ + r""" Computes normal bolometric intensity using the Stefan-Boltzmann law, Inorm_bol_bb = 1/\pi \sigma T^4. If photon-weighted intensity is requested, Inorm_bol_bb is multiplied by a conversion factor that diff --git a/phoebe/backend/backends.py b/phoebe/backend/backends.py index 5b2581636..c0569ff32 100644 --- a/phoebe/backend/backends.py +++ b/phoebe/backend/backends.py @@ -120,28 +120,30 @@ def _timequalifier_by_kind(kind): def _expand_mesh_times(b, dataset_ps, component): def get_times(b, include_times_entry): if include_times_entry in b.datasets: - add_ps = b.filter(dataset=include_times_entry, context='dataset') + add_ps = b.filter(dataset=include_times_entry, context='dataset', **_skip_filter_checks) add_timequalifier = _timequalifier_by_kind(add_ps.kind) - add_ps_components = add_ps.filter(qualifier=add_timequalifier).components - # print "*** add_ps_components", add_dataset, add_ps_components + add_ps_compute_times_components = add_ps.filter(qualifier='compute_times', **_skip_filter_checks).components if len(add_ps.times): add_times = np.array([float(t) for t in add_ps.times]) - elif len(add_ps_components): + elif len(add_ps_compute_times_components): # then we need to concatenate over all components_ # (times@rv@primary and times@rv@secondary are not necessarily # identical) - add_times = np.unique(np.append(*[add_ps.get_value(qualifier='compute_times', component=c) for c in add_ps_components])) - if not len(add_times): - add_times = np.unique(np.append(*[add_ps.get_value(qualifier=add_timequalifier, component=c) for c in add_ps_components])) + add_times = np.unique(np.append(*[add_ps.get_value(qualifier='compute_times', component=c, **_skip_filter_checks) for c in add_ps_compute_times_components])) else: # then we're adding from some dataset at the system-level (like lcs) # that have component=None - add_times = add_ps.get_value(qualifier='compute_times', component=None, unit=u.d) - if not len(add_times): - add_times = add_ps.get_value(qualifier=add_timequalifier, component=None, unit=u.d) + add_times = add_ps.get_value(qualifier='compute_times', component=None, unit=u.d, **_skip_filter_checks) + + if not len(add_times): + add_ps_components = add_ps.filter(qualifier=add_timequalifier, **_skip_filter_checks).components + if len(add_ps_components): + add_times = np.unique(np.append(*[add_ps.get_value(qualifier=add_timequalifier, component=c, **_skip_filter_checks) for c in add_ps_components])) + else: + add_times = add_ps.get_value(qualifier=add_timequalifier, component=None, unit=u.d, **_skip_filter_checks) else: # then some sort of t0 from context='component' or 'system' - add_times = [b.get_value(include_times_entry, context=['component', 'system'])] + add_times = [b.get_value(include_times_entry, context=['component', 'system'], **_skip_filter_checks)] return add_times @@ -152,7 +154,7 @@ def get_times(b, include_times_entry): # we're first going to access the compute_times@mesh... this should not have a component tag this_times = dataset_ps.get_value(qualifier='compute_times', component=None, unit=u.d) this_times = np.unique(np.append(this_times, - [get_times(b, include_times_entry) for include_times_entry in dataset_ps.get_value(qualifier='include_times', expand=True)] + [get_times(b, include_times_entry) for include_times_entry in dataset_ps.get_value(qualifier='include_times', expand=True, **_skip_filter_checks)] ) ) diff --git a/phoebe/dependencies/crimpl/common.py b/phoebe/dependencies/crimpl/common.py index 12e75e5de..ab21cb199 100644 --- a/phoebe/dependencies/crimpl/common.py +++ b/phoebe/dependencies/crimpl/common.py @@ -394,7 +394,7 @@ def _slurm_kwarg_to_prefix(k): logenv_cmd = self.ssh_cmd.format("echo \'{}\' > {}".format(conda_env, _os.path.join(directory, "crimpl-conda-environment"))) # TODO: use job subdirectory for server_path - scp_cmd = self.scp_cmd_to.format(local_path=" ".join([script_fname]+[_os.path.normpath(f).replace(' ', '\ ') for f in files]), server_path=directory+"/") + scp_cmd = self.scp_cmd_to.format(local_path=" ".join([script_fname]+[_os.path.normpath(f).replace(' ', r'\ ') for f in files]), server_path=directory+"/") if use_scheduler: if use_scheduler == 'slurm': diff --git a/phoebe/dependencies/distl/distl.py b/phoebe/dependencies/distl/distl.py index 032e062fa..1281efd76 100644 --- a/phoebe/dependencies/distl/distl.py +++ b/phoebe/dependencies/distl/distl.py @@ -239,7 +239,7 @@ def __add__(self, other): @property def as_latex(self): - return r"\begin{align} "+self._stex+" \end{align}" + return r"\begin{align} " + self._stex + r" \end{align}" @property def as_latex_list(self): @@ -319,7 +319,7 @@ def _format_uncertainties_asymmetric(labels, labels_latex, units, qs_per_dim): unitstr = " "+unit.to_string() if unit is not None else "" unittex_spacer = "" if unit is None or unit in [_units.deg, _units.dimensionless_unscaled] else "~" unittex = unittex_spacer + unit._repr_latex_().replace('$', '') if unit is not None else '' - stex += "\mathrm{{ {} }} &= {}^{{ +{} }}_{{ -{} }} {} \\\\ ".format(label_latex.replace("$", ""), _np.round(qs[1], ndigits), _np.round(qs[2]-qs[1], ndigits), _np.round(qs[1]-qs[0], ndigits), unittex) + stex += r"\mathrm{{ {} }} &= {}^{{ +{} }}_{{ -{} }} {} \\\\ ".format(label_latex.replace("$", ""), _np.round(qs[1], ndigits), _np.round(qs[2]-qs[1], ndigits), _np.round(qs[1]-qs[0], ndigits), unittex) s += "{} = {} +{} -{} {}\n".format(label, _np.round(qs[1], ndigits), _np.round(qs[2]-qs[1], ndigits), _np.round(qs[1]-qs[0], ndigits), unitstr) return Latex(s, stex) @@ -350,7 +350,7 @@ def _format_uncertainties_symmetric(labels, labels_latex, units, values_per_dim, unitstr = " "+unit.to_string() if unit is not None else "" unittex_spacer = "" if unit is None or unit in [_units.deg, _units.dimensionless_unscaled] else "~" unittex = unittex_spacer + unit._repr_latex_().replace('$', '') if unit is not None else '' - stex += "\mathrm{{ {} }} &= {}\pm{{ {} }} {} \\\\ ".format(label_latex.replace("$", ""), _np.round(value, ndigits), _np.round(sigma, ndigits), unittex) + stex += "\\mathrm{{ {} }} &= {} \\pm {{ {} }} {} \\\\ ".format(label_latex.replace("$", ""), _np.round(value, ndigits), _np.round(sigma, ndigits), unittex) s += "{} = {} +/- {} {}\n".format(label, _np.round(value, ndigits), _np.round(sigma, ndigits), unitstr) return Latex(s, stex) @@ -4427,9 +4427,9 @@ def plot_func(self, func, x, N=1000, func_kwargs={}, show=False): bounds = _np.percentile(models, 100 * _norm.cdf([-2, -1, 1, 2]), axis=0) ret1 = _plt.fill_between(x, bounds[0, :], bounds[-1, :], - label="95\% uncertainty", facecolor="#03A9F4", alpha=0.4) + label=r"95\% uncertainty", facecolor="#03A9F4", alpha=0.4) ret2 = _plt.fill_between(x, bounds[1, :], bounds[-2, :], - label="68\% uncertainty", facecolor="#0288D1", alpha=0.4) + label=r"68\% uncertainty", facecolor="#0288D1", alpha=0.4) if show: _plt.show() @@ -4481,7 +4481,7 @@ class Composite(BaseUnivariateDistribution): Limitations and treatment "under-the-hood": * &: the pdfs of the two underlying distributions are sampled over their - 99.99\% intervals and multiplied to create a new pdf. A spline is then + 99.99% intervals and multiplied to create a new pdf. A spline is then fit to the pdf and integrated to create the cdf (which is inverted to create the ppf function). Each of these are then linearly interpolated to create the underlying scipy.stats object. This object is then used @@ -4490,7 +4490,7 @@ class Composite(BaseUnivariateDistribution): retaining covariances at all. * |: the pdfs and cdfs of the two underlying distributions are sampled over their - 99.9\% intervals and added to create the new pdfs and cdfs, respectively + 99.9% intervals and added to create the new pdfs and cdfs, respectively (and the cdf inverted to create the ppf function). Each of these are then linearly interpolated to create the underlying scipy.stats object. This object is then used for any call to the underlying call EXCEPT for sampling. diff --git a/phoebe/dependencies/ligeor/models/twogaussian.py b/phoebe/dependencies/ligeor/models/twogaussian.py index 47dfaaaee..b484a64e0 100644 --- a/phoebe/dependencies/ligeor/models/twogaussian.py +++ b/phoebe/dependencies/ligeor/models/twogaussian.py @@ -156,7 +156,7 @@ def save_model(self, nbins=1000, func='', param_values = [], save_file=''): @staticmethod def ellipsoidal(phi, Aell, phi0): - ''' + r''' Ellipsoidal model, defined as $y = (1/2) A_{ell} \cos (4 \pi (\phi - \phi_0))$ Parameters @@ -178,7 +178,7 @@ def ellipsoidal(phi, Aell, phi0): @staticmethod def gaussian(phi, mu, d, sigma): - ''' + r''' Gaussian model, defined as $y = d \exp(-(\phi-\mu)^2/(2\sigma^2))$ Parameters @@ -431,7 +431,7 @@ def cg12e2(phi, C, mu1, d1, sigma1, mu2, d2, sigma2, Aell): @staticmethod def lnlike(y, yerr, ymodel): - ''' + r''' Computes the loglikelihood of a model. $\log\mathrm{like} = \sum_i \log(\sqrt{2\pi} \sigma_i) + (y_i - model_i)^2/(2\sigma_i^2) @@ -442,7 +442,7 @@ def lnlike(y, yerr, ymodel): return -np.sum((y-ymodel)**2) def bic(self, ymodel, nparams): - ''' + r''' Computes the Bayesian Information Criterion (BIC) value of a model. BIC = 2 lnlike - n_params \log(n_data) diff --git a/phoebe/frontend/bundle.py b/phoebe/frontend/bundle.py index 46c08acdb..861d5b6d3 100644 --- a/phoebe/frontend/bundle.py +++ b/phoebe/frontend/bundle.py @@ -4483,7 +4483,7 @@ def run_checks_solver(self, solver=None, compute=None, solution=None, figure=Non **{k:v for k,v in kwargs.items() if k not in ['server', 'use_server']}) # test to make sure solver_times will cover the full dataset for time-dependent systems - if self.hierarchy.is_time_dependent(consider_gaussian_process=True): + if self.hierarchy.is_time_dependent(consider_gaussian_process=False): for dataset in self.filter(qualifier='enabled', compute=compute, context='compute', value=True, **_skip_filter_checks).datasets: solver_times = self.get_value(qualifier='solver_times', dataset=dataset, context='dataset', **_skip_filter_checks) if solver_times == 'times': @@ -4561,6 +4561,13 @@ def run_checks_solver(self, solver=None, compute=None, solution=None, figure=Non +addl_parameters, True, 'run_solver') + # this check can/should be removed in PHOEBE 2.5 + for param in self.filter(qualifier='sigmas_lnf', dataset=rv_datasets, context='dataset', **_skip_filter_checks).to_list(): + if np.isfinite(param.get_value()): + report.add_item(self, + "behavior of sigmas_lnf for RVs was changed (fixed) in PHOEBE 2.4.15 to be independent of the RV value. See https://github.com/phoebe-project/phoebe2/pull/901", + [param]+addl_parameters, + False, 'run_solver') if 'lc_datasets' in solver_ps.qualifiers: @@ -13013,7 +13020,7 @@ def _write_export_solver_script(self, script_fname, out_fname, solver, solution, exclude_solvers = [s for s in self.solvers if s!=solver] solver_ps = self.get_solver(solver=solver, **_skip_filter_checks) if 'compute' in solver_ps.qualifiers: - compute = solver_ps.get_value(qualifier='compute', compute=kwargs.get('compute', None), default=[], **_skip_filter_checks) + compute = kwargs.get('compute', solver_ps.get_value(qualifier='compute', **_skip_filter_checks)) exclude_features = [feature for feature in self.features if not self.get_value(qualifier='enabled', feature=feature, compute=compute, **_skip_filter_checks)] else: exclude_features = [] @@ -13879,11 +13886,11 @@ def adopt_solution(self, solution=None, t0_supconj_ind = adopt_qualifiers.index('t0_supconj') t0_supconj_old = self.get_value(uniqueid=adopt_uniqueids[t0_supconj_ind], unit=u.d, **_skip_filter_checks) - t0_supconj_new = fitted_values[t0_supconj_ind] + t0_supconj_new = fitted_values[adopt_inds[t0_supconj_ind]] phase_shift = self.to_phase(t0_supconj_new) - self.to_phase(t0_supconj_old) - fitted_values[mask_phases_ind] = [ph-phase_shift for ph in [ecl_ph for ecl_ph in fitted_values[mask_phases_ind]]] + fitted_values[adopt_inds[mask_phases_ind]] = [ph-phase_shift for ph in [ecl_ph for ecl_ph in fitted_values[adopt_inds[mask_phases_ind]]]] for uniqueid, value, unit in zip(adopt_uniqueids, fitted_values[adopt_inds], fitted_units[adopt_inds]): uniqueid, index = _extract_index_from_string(uniqueid) diff --git a/phoebe/frontend/default_bundles/default_binary.bundle b/phoebe/frontend/default_bundles/default_binary.bundle index 7e3953d89..4b34830bd 100644 --- a/phoebe/frontend/default_bundles/default_binary.bundle +++ b/phoebe/frontend/default_bundles/default_binary.bundle @@ -144,7 +144,7 @@ null "kind": "star", "context": "component", "description": "Critical (maximum) value of the equivalent radius for the given morphology", -"value": 2.013275176537638, +"value": 2.0132751765376384, "default_unit": "solRad", "limits": [ 0.0, @@ -209,7 +209,7 @@ null "kind": "star", "context": "component", "description": "logg at requiv", -"value": 4.437551877570185, +"value": 4.437551868254479, "default_unit": "", "limits": [ null, @@ -259,7 +259,7 @@ null "kind": "star", "context": "component", "description": "Rotation frequency (wrt the sky)", -"value": 6.283185, +"value": 6.283185307179586, "default_unit": "rad / d", "limits": [ 0.0, @@ -461,7 +461,7 @@ null "kind": "star", "context": "component", "description": "Mass", -"value": 0.9988131358058301, +"value": 0.9988131257959815, "default_unit": "solMass", "limits": [ 1e-12, @@ -493,7 +493,7 @@ null "kind": "star", "context": "component", "description": "Critical (maximum) value of the equivalent radius for the given morphology", -"value": 2.013275176537638, +"value": 2.0132751765376384, "default_unit": "solRad", "limits": [ 0.0, @@ -558,7 +558,7 @@ null "kind": "star", "context": "component", "description": "logg at requiv", -"value": 4.437551877570185, +"value": 4.437551868254479, "default_unit": "", "limits": [ null, @@ -608,7 +608,7 @@ null "kind": "star", "context": "component", "description": "Rotation frequency (wrt the sky)", -"value": 6.283185, +"value": 6.283185307179586, "default_unit": "rad / d", "limits": [ 0.0, @@ -810,7 +810,7 @@ null "kind": "star", "context": "component", "description": "Mass", -"value": 0.9988131358058301, +"value": 0.9988131257959815, "default_unit": "solMass", "limits": [ 1e-12, @@ -859,7 +859,7 @@ null "kind": "orbit", "context": "component", "description": "Orbital frequency (sidereal)", -"value": 6.283185, +"value": 6.283185307179586, "default_unit": "rad / d", "limits": [ null, @@ -990,7 +990,7 @@ null "kind": "orbit", "context": "component", "description": "Mean anomaly at t0@system", -"value": 89.99999559997653, +"value": 90.0, "default_unit": "deg", "limits": [ null, @@ -1247,7 +1247,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "6.283185 / {period@primary@component}", +"value": "6.28318530717958623e+00 / {period@primary@component}", "default_unit": "rad / d", "constraint_func": "freq", "constraint_kwargs": { @@ -1263,7 +1263,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "log10((({mass@primary@component} / ({requiv@primary@component} ** 2.000000)) * 2942.206218) * 9.319541)", +"value": "log10((({mass@primary@component} / ({requiv@primary@component} ** 2.000000)) * 2.94220621750441933e+03) * 9.31954089506172778e+00)", "default_unit": "", "constraint_func": "logg", "constraint_kwargs": { @@ -1279,7 +1279,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "1.000000 - {irrad_frac_refl_bol@primary@component}", +"value": "1.00000000000000000e+00 - {irrad_frac_refl_bol@primary@component}", "default_unit": "", "constraint_func": "irrad_frac", "constraint_kwargs": { @@ -1295,7 +1295,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "6.283185 / {period@secondary@component}", +"value": "6.28318530717958623e+00 / {period@secondary@component}", "default_unit": "rad / d", "constraint_func": "freq", "constraint_kwargs": { @@ -1311,7 +1311,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "log10((({mass@secondary@component} / ({requiv@secondary@component} ** 2.000000)) * 2942.206218) * 9.319541)", +"value": "log10((({mass@secondary@component} / ({requiv@secondary@component} ** 2.000000)) * 2.94220621750441933e+03) * 9.31954089506172778e+00)", "default_unit": "", "constraint_func": "logg", "constraint_kwargs": { @@ -1327,7 +1327,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "1.000000 - {irrad_frac_refl_bol@secondary@component}", +"value": "1.00000000000000000e+00 - {irrad_frac_refl_bol@secondary@component}", "default_unit": "", "constraint_func": "irrad_frac", "constraint_kwargs": { @@ -1391,7 +1391,7 @@ null "kind": "orbit", "context": "constraint", "description": "expression that determines the constraint", -"value": "{period@binary@component} / ((((-1.000000 * {period@binary@component}) * {dperdt@binary@component}) / 6.283185307179586231995926937088) + 1.000000000000000000000000000000)", +"value": "{period@binary@component} / ((((-1.00000000000000000e+00 * {period@binary@component}) * {dperdt@binary@component}) / 6.28318530717958623e+00) + 1.00000000000000000e+00)", "default_unit": "d", "constraint_func": "period_anom", "constraint_kwargs": { @@ -1407,7 +1407,7 @@ null "kind": "orbit", "context": "constraint", "description": "expression that determines the constraint", -"value": "(6.283185 * ({t0@system} - {t0_perpass@binary@component})) / {period@binary@component}", +"value": "(6.28318530717958623e+00 * ({t0@system} - {t0_perpass@binary@component})) / {period@binary@component}", "default_unit": "deg", "constraint_func": "mean_anom", "constraint_kwargs": { @@ -1465,7 +1465,7 @@ null "kind": "orbit", "context": "constraint", "description": "expression that determines the constraint", -"value": "6.283185 / {period@binary@component}", +"value": "6.28318530717958623e+00 / {period@binary@component}", "default_unit": "rad / d", "constraint_func": "freq", "constraint_kwargs": { @@ -1541,7 +1541,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "(39.478418 * ({sma@binary@component} ** 3.000000)) / ((({period@binary@component} ** 2.000000) * ({q@binary@component} + 1.000000)) * 2942.206217504419328179210424423218)", +"value": "(3.94784176043574320e+01 * ({sma@binary@component} ** 3.000000)) / ((({period@binary@component} ** 2.000000) * ({q@binary@component} + 1.000000)) * 2.94220621750441933e+03)", "default_unit": "solMass", "constraint_func": "mass", "constraint_kwargs": { @@ -1563,7 +1563,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "{sma@binary@component} / ((1.000000 / {q@binary@component}) + 1.000000)", +"value": "{sma@binary@component} / ((1.00000000000000000e+00 / {q@binary@component}) + 1.00000000000000000e+00)", "default_unit": "solRad", "constraint_func": "comp_sma", "constraint_kwargs": { @@ -1579,7 +1579,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "({sma@binary@component} * (sin({incl@binary@component}))) / ((1.000000 / {q@binary@component}) + 1.000000)", +"value": "({sma@binary@component} * (sin({incl@binary@component}))) / ((1.00000000000000000e+00 / {q@binary@component}) + 1.00000000000000000e+00)", "default_unit": "solRad", "constraint_func": "comp_asini", "constraint_kwargs": { @@ -1659,7 +1659,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "(39.478418 * ({sma@binary@component} ** 3.000000)) / ((({period@binary@component} ** 2.000000) * ((1.000000 / {q@binary@component}) + 1.000000)) * 2942.206217504419328179210424423218)", +"value": "(3.94784176043574320e+01 * ({sma@binary@component} ** 3.000000)) / ((({period@binary@component} ** 2.000000) * ((1.00000000000000000e+00 / {q@binary@component}) + 1.00000000000000000e+00)) * 2.94220621750441933e+03)", "default_unit": "solMass", "constraint_func": "mass", "constraint_kwargs": { @@ -2087,8 +2087,6 @@ null "context": "compute", "description": "Atmosphere table", "choices": [ -"ck2004", -"extern_planckint", "extern_atmx", "extern_planckint", "blackbody", @@ -2338,8 +2336,6 @@ null "context": "compute", "description": "Atmosphere table", "choices": [ -"ck2004", -"extern_planckint", "extern_atmx", "extern_planckint", "blackbody", @@ -2358,8 +2354,6 @@ null "context": "compute", "description": "Atmosphere table", "choices": [ -"ck2004", -"extern_planckint", "extern_atmx", "extern_planckint", "blackbody", @@ -2628,7 +2622,7 @@ null "qualifier": "phoebe_version", "context": "setting", "description": "Version of PHOEBE", -"value": "2.4.13.dev+feature-blending", +"value": "2.4.15", "copy_for": false, "readonly": true, "advanced": true, diff --git a/phoebe/frontend/default_bundles/default_contact_binary.bundle b/phoebe/frontend/default_bundles/default_contact_binary.bundle index ccf9135d5..ac04511bd 100644 --- a/phoebe/frontend/default_bundles/default_contact_binary.bundle +++ b/phoebe/frontend/default_bundles/default_contact_binary.bundle @@ -144,7 +144,7 @@ null "kind": "star", "context": "component", "description": "Critical (maximum) value of the equivalent radius for the given morphology", -"value": 1.6724563972838384, +"value": 1.6724563972838287, "default_unit": "solRad", "limits": [ 0.0, @@ -160,7 +160,7 @@ null "kind": "star", "context": "component", "description": "Critical (minimum) value of the equivalent radius for the given morphology", -"value": 1.2725418568681297, +"value": 1.27254185686813, "default_unit": "solRad", "limits": [ 0.0, @@ -209,7 +209,7 @@ null "kind": "star", "context": "component", "description": "logg at requiv", -"value": 4.089736163094955, +"value": 4.089736153779247, "default_unit": "", "limits": [ null, @@ -259,7 +259,7 @@ null "kind": "star", "context": "component", "description": "Rotation frequency (wrt the sky)", -"value": 12.56637, +"value": 12.566370614359172, "default_unit": "rad / d", "limits": [ 0.0, @@ -461,7 +461,7 @@ null "kind": "star", "context": "component", "description": "Mass", -"value": 1.0089067994531355, +"value": 1.0089067893421308, "default_unit": "solMass", "limits": [ 1e-12, @@ -477,7 +477,7 @@ null "kind": "star", "context": "component", "description": "Equivalent radius", -"value": 1.4999999999999996, +"value": 1.5000000000000122, "default_unit": "solRad", "limits": [ 1e-06, @@ -493,7 +493,7 @@ null "kind": "star", "context": "component", "description": "Critical (maximum) value of the equivalent radius for the given morphology", -"value": 1.6724563972838378, +"value": 1.672456397283698, "default_unit": "solRad", "limits": [ 0.0, @@ -509,7 +509,7 @@ null "kind": "star", "context": "component", "description": "Critical (minimum) value of the equivalent radius for the given morphology", -"value": 1.2725418568681297, +"value": 1.27254185686813, "default_unit": "solRad", "limits": [ 0.0, @@ -558,7 +558,7 @@ null "kind": "star", "context": "component", "description": "logg at requiv", -"value": 4.089736163094955, +"value": 4.08973615377924, "default_unit": "", "limits": [ null, @@ -608,7 +608,7 @@ null "kind": "star", "context": "component", "description": "Rotation frequency (wrt the sky)", -"value": 12.56637, +"value": 12.566370614359172, "default_unit": "rad / d", "limits": [ 0.0, @@ -810,7 +810,7 @@ null "kind": "star", "context": "component", "description": "Mass", -"value": 1.0089067994531355, +"value": 1.0089067893421308, "default_unit": "solMass", "limits": [ 1e-12, @@ -859,7 +859,7 @@ null "kind": "orbit", "context": "component", "description": "Orbital frequency (sidereal)", -"value": 12.56637, +"value": 12.566370614359172, "default_unit": "rad / d", "limits": [ null, @@ -990,7 +990,7 @@ null "kind": "orbit", "context": "component", "description": "Mean anomaly at t0@system", -"value": 89.99999559997653, +"value": 90.0, "default_unit": "deg", "limits": [ null, @@ -1133,7 +1133,7 @@ null "kind": "envelope", "context": "component", "description": "Fillout-factor of the envelope", -"value": 0.6417897080770951, +"value": 0.6417897080770861, "default_unit": "", "limits": [ 0.0, @@ -1149,7 +1149,7 @@ null "kind": "envelope", "context": "component", "description": "Potential of the envelope (from the primary component's reference)", -"value": 3.4013774072298766, +"value": 3.4013774072298815, "default_unit": "", "limits": [ 0.0, @@ -1213,7 +1213,7 @@ null "kind": "orbit", "context": "component", "description": "ratio between equivalent radii of children stars", -"value": 0.9999999999999997, +"value": 1.0000000000000082, "default_unit": "", "limits": [ 0.0, @@ -1229,7 +1229,7 @@ null "kind": "orbit", "context": "component", "description": "sum of fractional equivalent radii of children stars", -"value": 0.8955223880597013, +"value": 0.8955223880597052, "default_unit": "", "limits": [ 0.0, @@ -1326,7 +1326,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "6.283185 / {period@primary@component}", +"value": "6.28318530717958623e+00 / {period@primary@component}", "default_unit": "rad / d", "constraint_func": "freq", "constraint_kwargs": { @@ -1342,7 +1342,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "log10((({mass@primary@component} / ({requiv@primary@component} ** 2.000000)) * 2942.206218) * 9.319541)", +"value": "log10((({mass@primary@component} / ({requiv@primary@component} ** 2.000000)) * 2.94220621750441933e+03) * 9.31954089506172778e+00)", "default_unit": "", "constraint_func": "logg", "constraint_kwargs": { @@ -1358,7 +1358,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "1.000000 - {irrad_frac_refl_bol@primary@component}", +"value": "1.00000000000000000e+00 - {irrad_frac_refl_bol@primary@component}", "default_unit": "", "constraint_func": "irrad_frac", "constraint_kwargs": { @@ -1374,7 +1374,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "6.283185 / {period@secondary@component}", +"value": "6.28318530717958623e+00 / {period@secondary@component}", "default_unit": "rad / d", "constraint_func": "freq", "constraint_kwargs": { @@ -1390,7 +1390,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "log10((({mass@secondary@component} / ({requiv@secondary@component} ** 2.000000)) * 2942.206218) * 9.319541)", +"value": "log10((({mass@secondary@component} / ({requiv@secondary@component} ** 2.000000)) * 2.94220621750441933e+03) * 9.31954089506172778e+00)", "default_unit": "", "constraint_func": "logg", "constraint_kwargs": { @@ -1406,7 +1406,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "1.000000 - {irrad_frac_refl_bol@secondary@component}", +"value": "1.00000000000000000e+00 - {irrad_frac_refl_bol@secondary@component}", "default_unit": "", "constraint_func": "irrad_frac", "constraint_kwargs": { @@ -1470,7 +1470,7 @@ null "kind": "orbit", "context": "constraint", "description": "expression that determines the constraint", -"value": "{period@binary@component} / ((((-1.000000 * {period@binary@component}) * {dperdt@binary@component}) / 6.283185307179586231995926937088) + 1.000000000000000000000000000000)", +"value": "{period@binary@component} / ((((-1.00000000000000000e+00 * {period@binary@component}) * {dperdt@binary@component}) / 6.28318530717958623e+00) + 1.00000000000000000e+00)", "default_unit": "d", "constraint_func": "period_anom", "constraint_kwargs": { @@ -1486,7 +1486,7 @@ null "kind": "orbit", "context": "constraint", "description": "expression that determines the constraint", -"value": "(6.283185 * ({t0@system} - {t0_perpass@binary@component})) / {period@binary@component}", +"value": "(6.28318530717958623e+00 * ({t0@system} - {t0_perpass@binary@component})) / {period@binary@component}", "default_unit": "deg", "constraint_func": "mean_anom", "constraint_kwargs": { @@ -1544,7 +1544,7 @@ null "kind": "orbit", "context": "constraint", "description": "expression that determines the constraint", -"value": "6.283185 / {period@binary@component}", +"value": "6.28318530717958623e+00 / {period@binary@component}", "default_unit": "rad / d", "constraint_func": "freq", "constraint_kwargs": { @@ -1700,7 +1700,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "(39.478418 * ({sma@binary@component} ** 3.000000)) / ((({period@binary@component} ** 2.000000) * ({q@binary@component} + 1.000000)) * 2942.206217504419328179210424423218)", +"value": "(3.94784176043574320e+01 * ({sma@binary@component} ** 3.000000)) / ((({period@binary@component} ** 2.000000) * ({q@binary@component} + 1.000000)) * 2.94220621750441933e+03)", "default_unit": "solMass", "constraint_func": "mass", "constraint_kwargs": { @@ -1722,7 +1722,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "{sma@binary@component} / ((1.000000 / {q@binary@component}) + 1.000000)", +"value": "{sma@binary@component} / ((1.00000000000000000e+00 / {q@binary@component}) + 1.00000000000000000e+00)", "default_unit": "solRad", "constraint_func": "comp_sma", "constraint_kwargs": { @@ -1738,7 +1738,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "({sma@binary@component} * (sin({incl@binary@component}))) / ((1.000000 / {q@binary@component}) + 1.000000)", +"value": "({sma@binary@component} * (sin({incl@binary@component}))) / ((1.00000000000000000e+00 / {q@binary@component}) + 1.00000000000000000e+00)", "default_unit": "solRad", "constraint_func": "comp_asini", "constraint_kwargs": { @@ -1834,7 +1834,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "(39.478418 * ({sma@binary@component} ** 3.000000)) / ((({period@binary@component} ** 2.000000) * ((1.000000 / {q@binary@component}) + 1.000000)) * 2942.206217504419328179210424423218)", +"value": "(3.94784176043574320e+01 * ({sma@binary@component} ** 3.000000)) / ((({period@binary@component} ** 2.000000) * ((1.00000000000000000e+00 / {q@binary@component}) + 1.00000000000000000e+00)) * 2.94220621750441933e+03)", "default_unit": "solMass", "constraint_func": "mass", "constraint_kwargs": { @@ -2278,8 +2278,6 @@ null "context": "compute", "description": "Atmosphere table", "choices": [ -"ck2004", -"extern_planckint", "extern_atmx", "extern_planckint", "blackbody", @@ -2560,8 +2558,6 @@ null "context": "compute", "description": "Atmosphere table", "choices": [ -"ck2004", -"extern_planckint", "extern_atmx", "extern_planckint", "blackbody", @@ -2580,8 +2576,6 @@ null "context": "compute", "description": "Atmosphere table", "choices": [ -"ck2004", -"extern_planckint", "extern_atmx", "extern_planckint", "blackbody", @@ -2860,7 +2854,7 @@ null "qualifier": "phoebe_version", "context": "setting", "description": "Version of PHOEBE", -"value": "2.4.13.dev+feature-blending", +"value": "2.4.15", "copy_for": false, "readonly": true, "advanced": true, diff --git a/phoebe/frontend/default_bundles/default_star.bundle b/phoebe/frontend/default_bundles/default_star.bundle index 0c0a502b7..030b856df 100644 --- a/phoebe/frontend/default_bundles/default_star.bundle +++ b/phoebe/frontend/default_bundles/default_star.bundle @@ -144,7 +144,7 @@ null "kind": "star", "context": "component", "description": "Critical (maximum) value of the equivalent radius for the given morphology", -"value": 3.4292622920441116, +"value": 3.429265859647371, "default_unit": "solRad", "limits": [ 0.0, @@ -209,7 +209,7 @@ null "kind": "star", "context": "component", "description": "logg at requiv", -"value": 4.438067632266453, +"value": 4.438067627303133, "default_unit": "", "limits": [ null, @@ -259,7 +259,7 @@ null "kind": "star", "context": "component", "description": "Rotation frequency (wrt the sky)", -"value": 6.283185, +"value": 6.283185307179586, "default_unit": "rad / d", "limits": [ 0.0, @@ -490,7 +490,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "6.283185 / {period@starA@component}", +"value": "6.28318530717958623e+00 / {period@starA@component}", "default_unit": "rad / d", "constraint_func": "freq", "constraint_kwargs": { @@ -506,7 +506,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "log10((({mass@starA@component} / ({requiv@starA@component} ** 2.000000)) * 2942.206218) * 9.319541)", +"value": "log10((({mass@starA@component} / ({requiv@starA@component} ** 2.000000)) * 2.94220621750441933e+03) * 9.31954089506172778e+00)", "default_unit": "", "constraint_func": "logg", "constraint_kwargs": { @@ -522,7 +522,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "1.000000 - {irrad_frac_refl_bol@starA@component}", +"value": "1.00000000000000000e+00 - {irrad_frac_refl_bol@starA@component}", "default_unit": "", "constraint_func": "irrad_frac", "constraint_kwargs": { @@ -538,7 +538,7 @@ null "kind": "star", "context": "constraint", "description": "expression that determines the constraint", -"value": "0.814886 * (((2942.206218 * {mass@starA@component}) * (({period@starA@component} / 6.283185) ** 2.000000)) ** 0.333333)", +"value": "8.14885676770049971e-01 * (((2.94220621750441933e+03 * {mass@starA@component}) * (({period@starA@component} / 6.283185) ** 2.00000000000000000e+00)) ** 3.33333333333333315e-01)", "default_unit": "solRad", "constraint_func": "requiv_single_max", "constraint_kwargs": { @@ -864,8 +864,6 @@ null "context": "compute", "description": "Atmosphere table", "choices": [ -"ck2004", -"extern_planckint", "extern_atmx", "extern_planckint", "blackbody", @@ -1066,8 +1064,6 @@ null "context": "compute", "description": "Atmosphere table", "choices": [ -"ck2004", -"extern_planckint", "extern_atmx", "extern_planckint", "blackbody", @@ -1217,7 +1213,7 @@ null "qualifier": "phoebe_version", "context": "setting", "description": "Version of PHOEBE", -"value": "2.4.13.dev+feature-blending", +"value": "2.4.15", "copy_for": false, "readonly": true, "advanced": true, diff --git a/phoebe/parameters/constraint.py b/phoebe/parameters/constraint.py index ac97fb869..7706c7f3d 100644 --- a/phoebe/parameters/constraint.py +++ b/phoebe/parameters/constraint.py @@ -1851,7 +1851,7 @@ def requivfrac(b, component, solve_for=None, **kwargs): metawargs = component_ps.meta metawargs.pop('qualifier') - requivfrac_def = FloatParameter(qualifier='requivfrac', latexfmt=r'R_\mathrm{{ {component} }} / a_\mathrm{{ {parent} }}', value=1.0, default_unit=u.solRad, advanced=True, description='Fractional equivalent radius') + requivfrac_def = FloatParameter(qualifier='requivfrac', latexfmt=r'R_\mathrm{{ {component} }} / a_\mathrm{{ {parent} }}', value=1.0, default_unit=u.dimensionless_unscaled, advanced=True, description='Fractional equivalent radius') requivfrac, created = b.get_or_create('requivfrac', requivfrac_def, **metawargs) requiv = component_ps.get_parameter(qualifier='requiv', **_skip_filter_checks) diff --git a/phoebe/parameters/parameters.py b/phoebe/parameters/parameters.py index d34601aa6..da2a1131e 100644 --- a/phoebe/parameters/parameters.py +++ b/phoebe/parameters/parameters.py @@ -3853,11 +3853,16 @@ def _calculate_cf(self, model=None, dataset=None, component=None, sigmas = sigmas[inds] sigmas_lnf = ds_ps.get_value(qualifier='sigmas_lnf', component=ds_comp, default=-np.inf, **_skip_filter_checks) + dataset_kind = ds_ps.kind if len(sigmas): sigmas2 = sigmas**2 + if cf == 'lnf' and sigmas_lnf != -np.inf: - sigmas2 += model_interp.value**2 * np.exp(2 * sigmas_lnf) + if dataset_kind == 'rv': + sigmas2 += np.exp(2 * sigmas_lnf) + else: + sigmas2 += model_interp.value**2 * np.exp(2 * sigmas_lnf) if cf == 'lnf': ret += np.sum((residuals.value**2 / sigmas2) + np.log(2*np.pi*sigmas2)) @@ -7632,7 +7637,7 @@ def __math__(self, other, symbol, mathfunc): default_unit = getattr(self_quantity, mathfunc)(other_quantity).unit return ConstraintParameter(self._bundle, "{%s} %s {%s}" % (self.uniquetwig, symbol, other.uniquetwig), default_unit=default_unit) elif isinstance(other, u.Quantity): - return ConstraintParameter(self._bundle, "{%s} %s %0.30f" % (self.uniquetwig, symbol, _value_for_constraint(other)), default_unit=(getattr(self.quantity, mathfunc)(other).unit)) + return ConstraintParameter(self._bundle, "{%s} %s %0.17e" % (self.uniquetwig, symbol, _value_for_constraint(other)), default_unit=(getattr(self.quantity, mathfunc)(other).unit)) elif isinstance(other, float) or isinstance(other, int): if symbol in ['+', '-'] and hasattr(self, 'default_unit'): # assume same units as self (NOTE: NOT NECESSARILY SI) if addition or subtraction @@ -7657,7 +7662,7 @@ def __rmath__(self, other, symbol, mathfunc): elif isinstance(other, Parameter): return ConstraintParameter(self._bundle, "{%s} %s {%s}" % (other.uniquetwig, symbol, self.uniquetwig), default_unit=(getattr(self.quantity, mathfunc)(other.quantity).unit)) elif isinstance(other, u.Quantity): - return ConstraintParameter(self._bundle, "%0.30f %s {%s}" % (_value_for_constraint(other), symbol, self.uniquetwig), default_unit=(getattr(self.quantity, mathfunc)(other).unit)) + return ConstraintParameter(self._bundle, "%0.17e %s {%s}" % (_value_for_constraint(other), symbol, self.uniquetwig), default_unit=(getattr(self.quantity, mathfunc)(other).unit)) elif isinstance(other, float) or isinstance(other, int): if symbol in ['+', '-'] and hasattr(self, 'default_unit'): # assume same units as self if addition or subtraction @@ -7665,7 +7670,7 @@ def __rmath__(self, other, symbol, mathfunc): else: # assume dimensionless other = float(other)*u.dimensionless_unscaled - return ConstraintParameter(self._bundle, "%f %s {%s}" % (_value_for_constraint(other), symbol, self.uniquetwig), default_unit=(getattr(self.quantity, mathfunc)(other).unit)) + return ConstraintParameter(self._bundle, "%0.17e %s {%s}" % (_value_for_constraint(other), symbol, self.uniquetwig), default_unit=(getattr(self.quantity, mathfunc)(other).unit)) elif isinstance(other, u.Unit) and mathfunc=='__mul__': return self.quantity*other else: @@ -11787,7 +11792,7 @@ def __math__(self, other, symbol, mathfunc): return ConstraintParameter(self._bundle, "(%s) %s {%s}" % (self.expr, symbol, other.uniquetwig), default_unit=(getattr(self.result, mathfunc)(other.quantity).unit)) elif isinstance(other, u.Quantity): #print "***", other, type(other), isinstance(other, ConstraintParameter) - return ConstraintParameter(self._bundle, "(%s) %s %0.30f" % (self.expr, symbol, _value_for_constraint(other, self)), default_unit=(getattr(self.result, mathfunc)(other).unit)) + return ConstraintParameter(self._bundle, "(%s) %s %0.17e" % (self.expr, symbol, _value_for_constraint(other, self)), default_unit=(getattr(self.result, mathfunc)(other).unit)) elif isinstance(other, float) or isinstance(other, int): if symbol in ['+', '-']: # assume same units as self (NOTE: NOT NECESSARILY SI) if addition or subtraction @@ -11795,7 +11800,7 @@ def __math__(self, other, symbol, mathfunc): else: # assume dimensionless other = float(other)*u.dimensionless_unscaled - return ConstraintParameter(self._bundle, "(%s) %s %f" % (self.expr, symbol, _value_for_constraint(other, self)), default_unit=(getattr(self.result, mathfunc)(other).unit)) + return ConstraintParameter(self._bundle, "(%s) %s %0.17e" % (self.expr, symbol, _value_for_constraint(other, self)), default_unit=(getattr(self.result, mathfunc)(other).unit)) elif isinstance(other, str): return ConstraintParameter(self._bundle, "(%s) %s %s" % (self.expr, symbol, other), default_unit=(getattr(self.result, mathfunc)(eval(other)).unit)) elif _is_unit(other) and mathfunc=='__mul__': @@ -11813,7 +11818,7 @@ def __rmath__(self, other, symbol, mathfunc): return ConstraintParameter(self._bundle, "{%s} %s (%s)" % (other.uniquetwig, symbol, self.expr), default_unit=(getattr(self.result, mathfunc)(other.quantity).unit)) elif isinstance(other, u.Quantity): #~ print "*** rmath", other, type(other) - return ConstraintParameter(self._bundle, "%0.30f %s (%s)" % (_value_for_constraint(other, self), symbol, self.expr), default_unit=(getattr(self.result, mathfunc)(other).unit)) + return ConstraintParameter(self._bundle, "%0.17e %s (%s)" % (_value_for_constraint(other, self), symbol, self.expr), default_unit=(getattr(self.result, mathfunc)(other).unit)) elif isinstance(other, float) or isinstance(other, int): if symbol in ['+', '-']: # assume same units as self if addition or subtraction @@ -11821,7 +11826,7 @@ def __rmath__(self, other, symbol, mathfunc): else: # assume dimensionless other = float(other)*u.dimensionless_unscaled - return ConstraintParameter(self._bundle, "%f %s (%s)" % (_value_for_constraint(other, self), symbol, self.expr), default_unit=(getattr(self.result, mathfunc)(other).unit)) + return ConstraintParameter(self._bundle, "%0.17e %s (%s)" % (_value_for_constraint(other, self), symbol, self.expr), default_unit=(getattr(self.result, mathfunc)(other).unit)) elif isinstance(other, str): return ConstraintParameter(self._bundle, "%s %s (%s)" % (other, symbol, self.expr), default_unit=(getattr(self.result, mathfunc)(eval(other)).unit)) elif _is_unit(other) and mathfunc=='__mul__': diff --git a/pyproject.toml b/pyproject.toml index 2400ff34e..22022d7a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ [project] name = "phoebe" -version = "2.4.14.dev+release-2.5" +version = "2.4.15.dev+release-2.5" description = "PHOEBE: modeling and analysis of eclipsing binary stars" readme = "README.md" requires-python = ">=3.7" @@ -58,7 +58,7 @@ classifiers = [ "Topic :: Software Development :: User Interfaces" ] dependencies = [ - "numpy", + "numpy < 2.0.0", "scipy", "astropy", "pytest", @@ -80,7 +80,7 @@ repository = "https://github.com/phoebe-project/phoebe2" documentation = "http://phoebe-project.org/docs" [build-system] -requires = ["setuptools", "numpy", "wheel"] +requires = ["setuptools", "numpy < 2.0.0", "wheel"] build-backend = "setuptools.build_meta" [tool.setuptools] @@ -90,11 +90,16 @@ packages = [ "phoebe.parameters.solver", "phoebe.parameters.figure", "phoebe.frontend", + "phoebe.frontend.default_bundles", "phoebe.constraints", "phoebe.dynamics", "phoebe.distortions", "phoebe.algorithms", "phoebe.atmospheres", + "phoebe.atmospheres.tables.extinction", + "phoebe.atmospheres.tables.passbands", + "phoebe.atmospheres.tables.wd", + "phoebe.lib", "phoebe.backend", "phoebe.solverbackends", "phoebe.solverbackends.ebai", @@ -115,3 +120,7 @@ packages = [ "phoebe.dependencies.ligeor.models", "phoebe.dependencies.ligeor.utils", ] +script-files = [ + "client-server/phoebe-server", + "client-server/phoebe-autofig", +] diff --git a/tests/tests/test_gp/test_gp.py b/tests/tests/test_gp/test_gp.py new file mode 100644 index 000000000..779b34121 --- /dev/null +++ b/tests/tests/test_gp/test_gp.py @@ -0,0 +1,80 @@ +""" +""" +import phoebe +import numpy as np + + +def test_gps(verbose=False, plot=False): + # Make fake data + b = phoebe.default_binary() + + b.add_dataset('lc', compute_times=phoebe.linspace(0,5.,301)) + + #Make a set of fast compute options + b.add_compute(compute='fast_compute') + b.set_value_all('ld_mode', value='manual') + b.set_value('irrad_method', compute='fast_compute', value='none') + b.set_value_all('distortion_method', compute='fast_compute', value='sphere') + b.set_value_all('atm', value='ck2004') + + + b.run_compute(compute='fast_compute') + + times = b.get_value(qualifier='times', context='model') + fluxes = b.get_value(qualifier='fluxes', context='model') + np.random.normal(size=times.shape) * 0.07 + 0.2*np.sin(times) + sigmas = np.ones_like(fluxes) * 0.05 + + #Upload the fake data to PHOEBE + + b.add_dataset('lc', dataset='lc01', times=times, fluxes=fluxes, sigmas=sigmas, overwrite=True) + b.set_value_all('ld_mode', value='manual') + + b.run_compute(model='withoutGPs', compute='fast_compute') + + #Make a model with GPs + b.add_gaussian_process('celerite2', dataset='lc01', kernel='sho') + + if verbose: + print("initial values of rho, tau and sigma = ", b['rho@gp_celerite201'],b['tau@gp_celerite201'],b['sigma@gp_celerite201']) + + # Compute model in phase space + b.flip_constraint('compute_phases', solve_for='compute_times') + b.set_value('compute_phases', phoebe.linspace(-0.5,0.5,101)) + + b.run_compute(model='withGPs', compute='fast_compute') + + if plot: + b.plot(kind='lc', c={'withoutGPs': 'red', 'withGPs': 'green'}, + ls={'withoutGPs': 'dashed', 'withGPs': 'solid'}, + s={'model': 0.03}, + show=True) + + b.add_distribution('rho@gp_celerite201', phoebe.uniform_around(0.5), distribution='init_sample') + b.add_distribution('tau@gp_celerite201', phoebe.uniform_around(0.5), distribution='init_sample') + b.add_distribution('sigma@gp_celerite201', phoebe.uniform_around(0.1), distribution='init_sample') + + b.add_distribution('rho@gp_celerite201', phoebe.uniform(0.01,2.), distribution='mypriors') + b.add_distribution('tau@gp_celerite201', phoebe.uniform(0.01,2.), distribution='mypriors') + b.add_distribution('sigma@gp_celerite201', phoebe.uniform(0.5,1.5), distribution='mypriors') + + b.add_solver('sampler.emcee', solver='mcmc_gps', + init_from='init_sample', + priors='mypriors', + compute='fast_compute', nwalkers=7, niters=10) + + b.run_solver(solver='mcmc_gps', solution = 'mcmc_gps_sol') + b.adopt_solution('mcmc_gps_sol') + + b.run_compute(model='GPsol', compute='fast_compute') + if verbose: + print("fitted values of rho, tau and sigma = ", b['rho@gp_celerite201'],b['tau@gp_celerite201'],b['sigma@gp_celerite201']) + print ("compute with GPs solved") + + if plot: + b.plot(kind='lc', c={'GPsol': 'red', 'withGPs': 'green'}, + ls={'withGPs': 'dashed', 'GPsol': 'solid'}, + s={'model': 0.03},show=True) + +if __name__ == '__main__': + logger = phoebe.logger(clevel='INFO') + test_gps(verbose=True, plot=True) diff --git a/tests/tests/test_mesh/test_mesh_times.py b/tests/tests/test_mesh/test_mesh_times.py new file mode 100644 index 000000000..ce2499c9b --- /dev/null +++ b/tests/tests/test_mesh/test_mesh_times.py @@ -0,0 +1,15 @@ +import phoebe +import numpy as np + + + +def test_rv_compute_times(): + b = phoebe.default_binary() + b.add_dataset('rv', compute_phases=phoebe.linspace(0,1,4), dataset='rv01') + b.add_dataset('mesh', include_times='rv01', dataset='mesh01') + b.run_compute() + + assert len(b.filter(context='model', kind='mesh').times) == 4 + +if __name__ == '__main__': + test_rv_compute_times()