Skip to content

Commit

Permalink
Merge pull request #63 from StingraySoftware/Sprint_Week_31
Browse files Browse the repository at this point in the history
Sprint Week 31
  • Loading branch information
pbalm authored Sep 18, 2017
2 parents a9af9c0 + 239fe91 commit c352d21
Show file tree
Hide file tree
Showing 38 changed files with 724 additions and 526 deletions.
2 changes: 1 addition & 1 deletion setup/config/deply_darwin_config.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@ module.exports = {
url : 'http://localhost:5000'
},
logDebugMode : 'true',
logsPath : '$HOME/Dave_work/flaskserver.log',
logsPath : '$HOME/.dave/flaskserver.log',
splash_path : '/../../../dave/resources/templates/splash_page.html'
};
2 changes: 1 addition & 1 deletion setup/config/deply_linux_config.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@ module.exports = {
url : 'http://localhost:5000'
},
logDebugMode : 'true',
logsPath : '$HOME/Dave_work/flaskserver.log',
logsPath : '$HOME/.dave/flaskserver.log',
splash_path : '/../resources/templates/splash_page.html'
};
50 changes: 25 additions & 25 deletions setup/setup.bash
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ else
if [[ retVal -ne 0 ]] ; then
echo "Failed to create virtual Python environment."
return 1

# We can try to fix it by deleting the pip cache but the case so far I've seen, deleting the pip cache doens't solve it.
# echo "Failed to create virtual Python environment. Deleting pip cache and try again."
# if [[ "$OSTYPE" == "linux-gnu" ]]; then
Expand All @@ -161,16 +161,16 @@ else
# return 1
# fi
fi

fi
source activate dave

#Installing Stingray and Astropy Helpers
STINGRAY_FOLDER=$DIR/stingray
STINGRAY_URL=https://github.com/StingraySoftware/stingray.git
# Sets the specific commit to checkout:
# Sep 7th, 2017 -> https://github.com/StingraySoftware/stingray/commit/e833a5c4090641c84f16df64439b27af8356bbb2
STINGRAY_COMMIT_HASH=e833a5c4090641c84f16df64439b27af8356bbb2
# Sep 10th, 2017 -> https://github.com/StingraySoftware/stingray/commit/97094d49a8ff0a4e8392fde509116ba9f366a9f2
STINGRAY_COMMIT_HASH=97094d49a8ff0a4e8392fde509116ba9f366a9f2
LINUX_COMPILATION=lib.linux-x86_64-3.5
DARWIN_COMPILATION=lib.macosx-10.5-x86_64-3.5

Expand All @@ -189,7 +189,7 @@ if [ ! -e $STINGRAY_FOLDER ]; then
#Install stingray libraries
echo statsmodels >> requirements.txt
pip install -r requirements.txt

retVal=$?
if [[ retVal -ne 0 ]] ; then
echo "Failed to install Stingray dependencies"
Expand Down Expand Up @@ -237,8 +237,8 @@ fi
HENDRICS_FOLDER=$DIR/hendrics
HENDRICS_URL=https://github.com/StingraySoftware/HENDRICS.git
# Sets the specific commit to checkout:
# Sep 4, 2017 -> https://github.com/StingraySoftware/HENDRICS/commit/a1757c0b21bd3aeb55bec22bc23d3c5440f7440c
HENDRICS_COMMIT_HASH=a1757c0b21bd3aeb55bec22bc23d3c5440f7440c
# Sep 9th, 2017 -> https://github.com/StingraySoftware/HENDRICS/commit/a5b7b7389b832b1eeaa87e6e470c659e454f490f
HENDRICS_COMMIT_HASH=a5b7b7389b832b1eeaa87e6e470c659e454f490f

if [ ! -e $HENDRICS_FOLDER ]; then

Expand Down Expand Up @@ -280,24 +280,24 @@ fi


if [[ "$OSTYPE" == "darwin"* ]]; then
# Mac OSX
#This is for MagicFile but only applies to macosx
if [ ! -f /usr/local/bin/brew ]; then
if hash /opt/local/bin/port 2>/dev/null; then
echo "Installing LibMagic with MacPorts"
sudo /opt/local/bin/port install file
else
echo "Please install HomeBrew or MacPorts before continue."
echo "Run this HomeBrew installation command on a terminal and relanch DAVE:"
echo '/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"'
echo "Or install MacPorts with this guide:"
echo 'https://www.macports.org/install.php'
exit 1
fi
else
echo "Installing LibMagic with HomeBrew"
/usr/local/bin/brew install libmagic
fi
# Mac OSX
# This is for MagicFile but only applies to macosx
if [ ! -f /usr/local/bin/brew ]; then
if hash /opt/local/bin/port 2>/dev/null; then
echo "Installing LibMagic with MacPorts"
yes | sudo /opt/local/bin/port install libmagic
else
echo "Please install HomeBrew or MacPorts before continue."
echo "Run this HomeBrew installation command on a terminal and relanch DAVE:"
echo '/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"'
echo "Or install MacPorts with this guide:"
echo 'https://www.macports.org/install.php'
exit 1
fi
else
echo "Installing LibMagic with HomeBrew"
/usr/local/bin/brew install libmagic
fi
fi


Expand Down
3 changes: 3 additions & 0 deletions src/main/python/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,17 +10,20 @@ class CONFIG:
LOG_LEVEL = -1 # ALL = -1, DEBUG = 0, INFO = 1, WARN = 2, ERROR = 3, NONE = 4
USE_JAVASCRIPT_CACHE = False #If true, DAVE GUI will try to get js files from browser cache. Use False for development environments
PYTHON_CACHE_SIZE = 32 # The maximun number of items to store in the LRU cache
MAX_PLOT_POINTS = 1000 # The maximun number of elements to return in a JSON NDARRAY

def set_config(config):

CONFIG.IS_LOCAL_SERVER = config['IS_LOCAL_SERVER']
CONFIG.LOG_TO_SERVER_ENABLED = config['LOG_TO_SERVER_ENABLED']
CONFIG.LOG_LEVEL = int(config['LOG_LEVEL'])
CONFIG.MAX_PLOT_POINTS = int(config['MAX_PLOT_POINTS'])

return "IS_LOCAL_SERVER: " + str(CONFIG.IS_LOCAL_SERVER) \
+ ", DEBUG_MODE: " + str(CONFIG.DEBUG_MODE) \
+ ", LOG_TO_SERVER_ENABLED: " + str(CONFIG.LOG_TO_SERVER_ENABLED) \
+ ", LOG_LEVEL: " + str(CONFIG.LOG_LEVEL) \
+ ", BIG_NUMBER: " + str(CONFIG.BIG_NUMBER) \
+ ", MAX_PLOT_POINTS: " + str(CONFIG.MAX_PLOT_POINTS) \
+ ", PRECISSION: " + str(CONFIG.PRECISSION) \
+ ", USE_JAVASCRIPT_CACHE: " + str(CONFIG.USE_JAVASCRIPT_CACHE)
2 changes: 1 addition & 1 deletion src/main/python/utils/dataset_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -376,10 +376,10 @@ def update_dataset_filtering_by_gti(hdu_table, gti_table, ev_list, ev_list_err,
if ad_column in ds_columns_errors and len(ds_columns_errors[ad_column]) > end_event_idx:
error_values=np.nan_to_num(ds_columns_errors[ad_column][start_event_idx:end_event_idx])
hdu_table.columns[ad_column].add_values(values, error_values)

else:
logging.info("No data point in GTI # %s: GTI (from, to)=(%f, %f); event list (from, to)=(%d, %d)" % (gti_index, start, end, start_event_idx, end_event_idx))


# Returns a tuple with the counts and the key values
# of applying the histogram to an array
#
Expand Down
24 changes: 12 additions & 12 deletions src/main/python/utils/dave_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def get_lightcurve(src_filename, bck_filename, gti_filename, target, filters, ax
logging.debug("get_lightcurve gti: %s" % gti_filename)
logging.debug("get_lightcurve: filters %s" % filters)
logging.debug("get_lightcurve: axis %s" % axis)
logging.debug("get_lightcurve: dt %f" % dt)
logging.debug("get_lightcurve: dt %s" % dt)
logging.debug("get_lightcurve: baseline_opts %s" % baseline_opts)
logging.debug("get_lightcurve: variance_opts %s" % variance_opts)

Expand Down Expand Up @@ -236,7 +236,7 @@ def get_joined_lightcurves(lc0_filename, lc1_filename, lc0_bck_filename, lc1_bck
logging.debug("get_joined_lightcurves lc1_bck: %s" % lc1_bck_filename)
logging.debug("get_joined_lightcurves: filters %s" % filters)
logging.debug("get_joined_lightcurves: axis %s" % axis)
logging.debug("get_joined_lightcurves: dt %f" % dt)
logging.debug("get_joined_lightcurves: dt %s" % dt)

data = DaveEngine.get_joined_lightcurves(lc0_destination, lc1_destination,
lc0_bck_destination, lc1_bck_destination,
Expand Down Expand Up @@ -269,7 +269,7 @@ def get_divided_lightcurves_from_colors(src_filename, bck_filename, gti_filename
logging.debug("get_divided_lightcurves_from_colors gti: %s" % gti_filename)
logging.debug("get_divided_lightcurves_from_colors: filters %s" % filters)
logging.debug("get_divided_lightcurves_from_colors: axis %s" % axis)
logging.debug("get_divided_lightcurves_from_colors: dt %f" % dt)
logging.debug("get_divided_lightcurves_from_colors: dt %s" % dt)

data = DaveEngine.get_divided_lightcurves_from_colors(src_destination, bck_destination, gti_destination, filters, axis, dt)

Expand Down Expand Up @@ -335,7 +335,7 @@ def get_power_density_spectrum(src_filename, bck_filename, gti_filename, target,
logging.debug("get_power_density_spectrum gti: %s" % gti_filename)
logging.debug("get_power_density_spectrum: filters %s" % filters)
logging.debug("get_power_density_spectrum: axis %s" % axis)
logging.debug("get_power_density_spectrum: dt %f" % dt)
logging.debug("get_power_density_spectrum: dt %s" % dt)
logging.debug("get_power_density_spectrum: nsegm %f" % nsegm)
logging.debug("get_power_density_spectrum: segm_size %f" % segm_size)
logging.debug("get_power_density_spectrum: norm %s" % norm)
Expand Down Expand Up @@ -372,7 +372,7 @@ def get_dynamical_spectrum(src_filename, bck_filename, gti_filename, target,
logging.debug("get_dynamical_spectrum gti: %s" % gti_filename)
logging.debug("get_dynamical_spectrum: filters %s" % filters)
logging.debug("get_dynamical_spectrum: axis %s" % axis)
logging.debug("get_dynamical_spectrum: dt %f" % dt)
logging.debug("get_dynamical_spectrum: dt %s" % dt)
logging.debug("get_dynamical_spectrum: nsegm %f" % nsegm)
logging.debug("get_dynamical_spectrum: segm_size %f" % segm_size)
logging.debug("get_dynamical_spectrum: norm %s" % norm)
Expand Down Expand Up @@ -506,7 +506,7 @@ def get_phase_lag_spectrum(src_filename, bck_filename, gti_filename, target,
logging.debug("get_phase_lag_spectrum gti: %s" % gti_filename)
logging.debug("get_phase_lag_spectrum: filters %s" % filters)
logging.debug("get_phase_lag_spectrum: axis %s" % axis)
logging.debug("get_phase_lag_spectrum: dt %f" % dt)
logging.debug("get_phase_lag_spectrum: dt %s" % dt)
logging.debug("get_phase_lag_spectrum: nsegm %f" % nsegm)
logging.debug("get_phase_lag_spectrum: segm_size %f" % segm_size)
logging.debug("get_phase_lag_spectrum: norm %s" % norm)
Expand Down Expand Up @@ -548,7 +548,7 @@ def get_rms_spectrum(src_filename, bck_filename, gti_filename, target,
logging.debug("get_rms_spectrum gti: %s" % gti_filename)
logging.debug("get_rms_spectrum: filters %s" % filters)
logging.debug("get_rms_spectrum: axis %s" % axis)
logging.debug("get_rms_spectrum: dt %f" % dt)
logging.debug("get_rms_spectrum: dt %s" % dt)
logging.debug("get_rms_spectrum: nsegm %f" % nsegm)
logging.debug("get_rms_spectrum: segm_size %f" % segm_size)
logging.debug("get_rms_spectrum: norm %s" % norm)
Expand Down Expand Up @@ -602,7 +602,7 @@ def get_fit_powerspectrum_result(src_filename, bck_filename, gti_filename, targe
logging.debug("get_fit_powerspectrum_result gti: %s" % gti_filename)
logging.debug("get_fit_powerspectrum_result: filters %s" % filters)
logging.debug("get_fit_powerspectrum_result: axis %s" % axis)
logging.debug("get_fit_powerspectrum_result: dt %f" % dt)
logging.debug("get_fit_powerspectrum_result: dt %s" % dt)
logging.debug("get_fit_powerspectrum_result: nsegm %f" % nsegm)
logging.debug("get_fit_powerspectrum_result: segm_size %f" % segm_size)
logging.debug("get_fit_powerspectrum_result: norm %s" % norm)
Expand Down Expand Up @@ -644,7 +644,7 @@ def get_bootstrap_results(src_filename, bck_filename, gti_filename, target,
logging.debug("get_bootstrap_results gti: %s" % gti_filename)
logging.debug("get_bootstrap_results: filters %s" % filters)
logging.debug("get_bootstrap_results: axis %s" % axis)
logging.debug("get_bootstrap_results: dt %f" % dt)
logging.debug("get_bootstrap_results: dt %s" % dt)
logging.debug("get_bootstrap_results: nsegm %f" % nsegm)
logging.debug("get_bootstrap_results: segm_size %f" % segm_size)
logging.debug("get_bootstrap_results: norm %s" % norm)
Expand Down Expand Up @@ -717,7 +717,7 @@ def get_lomb_scargle(src_filename, bck_filename, gti_filename, target,
logging.debug("get_lomb_scargle gti: %s" % gti_filename)
logging.debug("get_lomb_scargle: filters %s" % filters)
logging.debug("get_lomb_scargle: axis %s" % axis)
logging.debug("get_lomb_scargle: dt %f" % dt)
logging.debug("get_lomb_scargle: dt %s" % dt)
logging.debug("get_lomb_scargle: freq_range %s" % freq_range)
logging.debug("get_lomb_scargle: nyquist_factor %s" % nyquist_factor)
logging.debug("get_lomb_scargle: ls_norm %s" % ls_norm)
Expand Down Expand Up @@ -757,7 +757,7 @@ def get_pulse_search(src_filename, bck_filename, gti_filename, target,
logging.debug("get_pulse_search gti: %s" % gti_filename)
logging.debug("get_pulse_search: filters %s" % filters)
logging.debug("get_pulse_search: axis %s" % axis)
logging.debug("get_pulse_search: dt %f" % dt)
logging.debug("get_pulse_search: dt %s" % dt)
logging.debug("get_pulse_search: freq_range %s" % freq_range)
logging.debug("get_pulse_search: mode %s" % mode)
logging.debug("get_pulse_search: oversampling %s" % oversampling)
Expand Down Expand Up @@ -798,7 +798,7 @@ def get_phaseogram(src_filename, bck_filename, gti_filename, target,
logging.debug("get_phaseogram gti: %s" % gti_filename)
logging.debug("get_phaseogram: filters %s" % filters)
logging.debug("get_phaseogram: axis %s" % axis)
logging.debug("get_phaseogram: dt %f" % dt)
logging.debug("get_phaseogram: dt %s" % dt)
logging.debug("get_phaseogram: f %s" % f)
logging.debug("get_phaseogram: nph %s" % nph)
logging.debug("get_phaseogram: nt %s" % nt)
Expand Down
58 changes: 34 additions & 24 deletions src/main/python/utils/dave_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def apply_rmf_file_to_dataset(destination, rmf_destination):
# @param: filters: array with the filters to apply
# [{ table = "txt_table", column = "Time", from=0, to=10 }, ... ]
# @param: styles: dictionary with the plot style info
# { type = "2d", labels=["Time", "Rate Count"]}
# { type = "2d", ... }
# @param: axis: array with the column names to use in ploting
# [{ table = "txt_table", column = "Time" },
# { table = "txt_table", column = "Rate" } ... ]
Expand All @@ -186,14 +186,6 @@ def get_plot_data(src_destination, bck_destination, gti_destination, filters, st
logging.warn("No plot type specified on styles")
return None

if "labels" not in styles:
logging.warn("No plot labels specified on styles")
return None

if len(styles["labels"]) < 2:
logging.warn("Wrong number of labels specified on styles")
return None

if len(axis) < 2:
logging.warn("Wrong number of axis")
return None
Expand All @@ -203,15 +195,6 @@ def get_plot_data(src_destination, bck_destination, gti_destination, filters, st
return Plotter.get_plotdiv_xy(filtered_ds, axis)

elif styles["type"] == "3d":

if len(styles["labels"]) < 3:
logging.warn("Wrong number of labels specified on styles")
return None

if len(axis) < 3:
logging.warn("Wrong number of axis")
return None

return Plotter.get_plotdiv_xyz(filtered_ds, axis)

elif styles["type"] == "scatter":
Expand Down Expand Up @@ -664,6 +647,9 @@ def get_dynamical_spectrum(src_destination, bck_destination, gti_destination,
pds = AveragedPowerspectrum(lc=lc, segment_size=segm_size, norm=norm, gti=gti)

if pds:

#pds = rebin_spectrum_if_necessary(pds)

freq = pds.freq

pds_array, nphots_all = pds._make_segment_spectrum(lc, segm_size)
Expand Down Expand Up @@ -1166,6 +1152,8 @@ def get_rms_spectrum(src_destination, bck_destination, gti_destination,

if pds:

#pds = rebin_spectrum_if_necessary(pds)

if freq_range[0] < 0:
freq_low = min(pds.freq)
else:
Expand Down Expand Up @@ -1486,12 +1474,17 @@ def get_bootstrap_results(src_destination, bck_destination, gti_destination,
else:
sim_pds = AveragedPowerspectrum(lc=sim_lc, segment_size=segm_size, norm=norm, gti=gti)

parest, res = fit_powerspectrum(sim_pds, fit_model, starting_pars,
max_post=False, priors=None, fitmethod="L-BFGS-B")
if sim_pds:
#sim_pds = rebin_spectrum_if_necessary(sim_pds)

parest, res = fit_powerspectrum(sim_pds, fit_model, starting_pars,
max_post=False, priors=None, fitmethod="L-BFGS-B")

models_params.append(res.p_opt)
powers.append(sim_pds.power)
models_params.append(res.p_opt)
powers.append(sim_pds.power)

else:
logging.warn(ExHelper.getException('get_bootstrap_results: cant create powerspectrum for i: ' + str(i)))
except:
logging.error(ExHelper.getException('get_bootstrap_results for i: ' + str(i)))

Expand Down Expand Up @@ -2008,9 +2001,26 @@ def create_power_density_spectrum(src_destination, bck_destination, gti_destinat
logging.debug("Create power density spectrum")

if pds_type == 'Sng':
return Powerspectrum(lc, norm=norm, gti=gti), lc, gti
pds = Powerspectrum(lc, norm=norm, gti=gti)
else:
return AveragedPowerspectrum(lc=lc, segment_size=segm_size, norm=norm, gti=gti), lc, gti
pds = AveragedPowerspectrum(lc=lc, segment_size=segm_size, norm=norm, gti=gti)

#if pds:
# pds = rebin_spectrum_if_necessary(pds)
#else:
# logging.warn("Can't create power spectrum")

return pds, lc, gti


# Reduces the pds data to Max_plot_points for improve pds performance
def rebin_spectrum_if_necessary (pds):
freq_size = len(pds.freq)
if freq_size > CONFIG.MAX_PLOT_POINTS:
df = (max(pds.freq) - min(pds.freq)) / CONFIG.MAX_PLOT_POINTS
logging.warn("Spectrum rebined to " + str(CONFIG.MAX_PLOT_POINTS) + " points, from " + str(freq_size) + " points, with df: " + str(df))
pds = pds.rebin(df=df)
return pds


def get_countrate_from_lc_ds (lc_destination, bck_destination, lc_name, bck_name):
Expand Down
6 changes: 6 additions & 0 deletions src/main/python/utils/dave_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,12 @@ def get_events_fits_dataset_with_stingray(destination, hdulist, dsId='FITS',

event_list, events_start_time = substract_tstart_from_events(fits_data, time_offset)

# Gets PI column data from eventlist if requiered and PHA not in additional_data
if "PI" in additional_columns \
and "PI" not in fits_data.additional_data \
and "PHA" not in fits_data.additional_data:
fits_data.additional_data["PI"] = event_list.pi

dataset = DataSet.get_dataset_applying_gtis(dsId, header, header_comments,
fits_data.additional_data, [],
event_list.time, [],
Expand Down
Loading

0 comments on commit c352d21

Please sign in to comment.