From 762e9771cdc72f71b0fc62abce980bba2efaf8bb Mon Sep 17 00:00:00 2001 From: "Kacper Kowalik (Xarthisius)" Date: Wed, 13 Nov 2024 07:58:53 -0600 Subject: [PATCH 1/2] Move code into new structure --- .gitignore | 166 ++++++++++++++++++ LICENSE.md => LICENSE | 0 pyproject.toml | 37 ++++ requirements.txt | 9 - alpss_main.py => src/alpss/__init__.py | 27 +-- .../alpss/alpss_auto_run.py | 0 alpss_run.py => src/alpss/alpss_run.py | 0 7 files changed, 212 insertions(+), 27 deletions(-) rename LICENSE.md => LICENSE (100%) create mode 100644 pyproject.toml delete mode 100644 requirements.txt rename alpss_main.py => src/alpss/__init__.py (99%) rename alpss_auto_run.py => src/alpss/alpss_auto_run.py (100%) rename alpss_run.py => src/alpss/alpss_run.py (100%) diff --git a/.gitignore b/.gitignore index f208e21..0dbf4f1 100644 --- a/.gitignore +++ b/.gitignore @@ -36,3 +36,169 @@ .Trashes ehthumbs.db Thumbs.db + + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +tmp*/ diff --git a/LICENSE.md b/LICENSE similarity index 100% rename from LICENSE.md rename to LICENSE diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..472e66d --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,37 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "ALPSS" +authors = [ + {name = "Jake Diamond", email = "jdiamo15@jhu.edu", github = "Jake-Diamond-9"}, + {name = "K. T. Ramesh", email = "ramesh@jhu.edu"}, +] +maintainers = [ + {name = "Jake Diamond", email = "jdiamo15@jhu.edu", github = "Jake-Diamond-9"}, + {name = "Kacper Kowalik", email = "xarthisius.kk@gmail.com", github = "xarthisius"}, + {name = "Ali Rachidi", email = "arachid1@jhu.edu", github = "arachid1"}, + {name = "All the contributors"}, +] +description = "Automated analysis of photonic Doppler velocimetry spall signals with uncertainty" +keywords = ["material science", "spall"] +readme = "README.md" +license = {text = "GPL-3"} +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Operating System :: OS Independent", + "Intended Audience :: Science/Research", + "Development Status :: 3 - Alpha", + "Topic :: Scientific/Engineering :: Materials Science", + "Natural Language :: English", +] +dependencies = [ + "numpy", + "pandas", + "matplotlib", + "scipy", + "watchdog", + "opencv-python", +] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index caab53f..0000000 --- a/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -numpy -pandas -matplotlib -scipy -watchdog -opencv-python -findiff -ipython -ipykernel \ No newline at end of file diff --git a/alpss_main.py b/src/alpss/__init__.py similarity index 99% rename from alpss_main.py rename to src/alpss/__init__.py index 8d701a3..3e3b15d 100644 --- a/alpss_main.py +++ b/src/alpss/__init__.py @@ -1,29 +1,27 @@ -from datetime import datetime +import os import traceback -from IPython.display import display +from datetime import datetime + +import cv2 as cv +import findiff import matplotlib.pyplot as plt -from matplotlib.patches import Rectangle -import pandas as pd import numpy as np -import os -from scipy.fft import fft, ifft, fftfreq +import pandas as pd +from matplotlib.patches import Rectangle +from scipy import signal +from scipy.fft import fft, fftfreq, ifft from scipy.fftpack import fftshift from scipy.optimize import curve_fit -from scipy import signal -import findiff -import cv2 as cv from scipy.signal import ShortTimeFFT # main function to link together all the sub-functions def alpss_main(**inputs): - # get the current working directory cwd = os.getcwd() # attempt to run the program in full try: - # begin the program timer start_time = datetime.now() @@ -88,13 +86,11 @@ def alpss_main(**inputs): # in case the program throws an error except Exception: - # print the traceback for the error print(traceback.format_exc()) # attempt to plot the voltage signal from the imported data try: - # import the desired data. Convert the time to skip and turn into number of rows t_step = 1 / inputs["sample_rate"] rows_to_skip = ( @@ -1111,7 +1107,6 @@ def smoothing( def spall_analysis(vc_out, iua_out, **inputs): # if user wants to pull out the spall points if inputs["spall_calculation"] == "yes": - # unpack dictionary values in to individual variables time_f = vc_out["time_f"] velocity_f_smooth = vc_out["velocity_f_smooth"] @@ -1135,7 +1130,6 @@ def spall_analysis(vc_out, iua_out, **inputs): # attempt to get the fist local minimum after the peak velocity to get the pullback # velocity. 'order' is the number of points on each side to compare to. try: - # get all the indices for relative minima in the domain, order them, and take the first one that occurs # after the peak velocity rel_min_idx = signal.argrelmin(velocity_f_smooth, order=pb_neighbors)[0] @@ -1302,7 +1296,6 @@ def spall_doi_finder(**inputs): # if not using a user input value for the signal start time if inputs["start_time_user"] == "none": - # Find the position/row of the top of the binary spectrogram for each time/column col_len = th3.shape[1] # number of columns row_len = th3.shape[0] # number of columns @@ -1313,7 +1306,6 @@ def spall_doi_finder(**inputs): for col_idx in range(col_len): # loop over every column for row_idx in range(row_len): # loop over every row - # moving from the top down, if the pixel is 255 then store the index and break to move to the next column idx_top = row_len - row_idx - 1 @@ -1358,7 +1350,6 @@ def spall_doi_finder(**inputs): # if using a user input for the signal start time else: - # these params become nan because they are only needed if the program # is finding the signal start time automatically f_doi_top_line_clean = np.nan diff --git a/alpss_auto_run.py b/src/alpss/alpss_auto_run.py similarity index 100% rename from alpss_auto_run.py rename to src/alpss/alpss_auto_run.py diff --git a/alpss_run.py b/src/alpss/alpss_run.py similarity index 100% rename from alpss_run.py rename to src/alpss/alpss_run.py From 4a5e394f78ce581a2fc7e1ddb245d806cb38a2c3 Mon Sep 17 00:00:00 2001 From: "Kacper Kowalik (Xarthisius)" Date: Wed, 13 Nov 2024 08:59:21 -0600 Subject: [PATCH 2/2] Finalize move --- preprint.pdf => paper/preprint.pdf | Bin pyproject.toml | 10 +- src/alpss/__init__.py | 207 ++++++++++++++++-- src/alpss/alpss_auto_run.py | 106 --------- src/alpss/alpss_run.py | 124 ----------- src/alpss/commands.py | 67 ++++++ .../input_data}/example_file.csv | 0 .../output_data}/example_file--inputs.csv | 0 .../example_file--noise--frac.csv | 0 .../output_data}/example_file--plots.png | Bin .../output_data}/example_file--results.csv | 0 .../example_file--vel--uncert.csv | 0 .../example_file--velocity--smooth.csv | 0 .../output_data}/example_file--velocity.csv | 0 .../output_data}/example_file--voltage.csv | 0 15 files changed, 260 insertions(+), 254 deletions(-) rename preprint.pdf => paper/preprint.pdf (100%) delete mode 100644 src/alpss/alpss_auto_run.py delete mode 100644 src/alpss/alpss_run.py create mode 100644 src/alpss/commands.py rename {input_data => tests/input_data}/example_file.csv (100%) rename {output_data => tests/output_data}/example_file--inputs.csv (100%) rename {output_data => tests/output_data}/example_file--noise--frac.csv (100%) rename {output_data => tests/output_data}/example_file--plots.png (100%) rename {output_data => tests/output_data}/example_file--results.csv (100%) rename {output_data => tests/output_data}/example_file--vel--uncert.csv (100%) rename {output_data => tests/output_data}/example_file--velocity--smooth.csv (100%) rename {output_data => tests/output_data}/example_file--velocity.csv (100%) rename {output_data => tests/output_data}/example_file--voltage.csv (100%) diff --git a/preprint.pdf b/paper/preprint.pdf similarity index 100% rename from preprint.pdf rename to paper/preprint.pdf diff --git a/pyproject.toml b/pyproject.toml index 472e66d..6463a83 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,8 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -name = "ALPSS" +name = "alpss" +version = "1.3.0" authors = [ {name = "Jake Diamond", email = "jdiamo15@jhu.edu", github = "Jake-Diamond-9"}, {name = "K. T. Ramesh", email = "ramesh@jhu.edu"}, @@ -24,7 +25,7 @@ classifiers = [ "Operating System :: OS Independent", "Intended Audience :: Science/Research", "Development Status :: 3 - Alpha", - "Topic :: Scientific/Engineering :: Materials Science", + "Topic :: Scientific/Engineering", "Natural Language :: English", ] dependencies = [ @@ -34,4 +35,9 @@ dependencies = [ "scipy", "watchdog", "opencv-python", + "findiff", ] + +[project.scripts] +alpss = "alpss.commands:run_alpss" +alpss-watch = "alpss.commands:start_watcher" diff --git a/src/alpss/__init__.py b/src/alpss/__init__.py index 3e3b15d..5af26f4 100644 --- a/src/alpss/__init__.py +++ b/src/alpss/__init__.py @@ -16,7 +16,166 @@ # main function to link together all the sub-functions -def alpss_main(**inputs): +def alpss_main( + filename=None, + save_data="yes", + start_time_user="none", + header_lines=1, + time_to_skip=2.3e-6, + time_to_take=1.5e-6, + t_before=5e-9, + t_after=50e-9, + start_time_correction=0e-9, + freq_min=1.5e9, + freq_max=4e9, + smoothing_window=601, + smoothing_wid=3, + smoothing_amp=1, + smoothing_sigma=1, + smoothing_mu=0, + pb_neighbors=400, + pb_idx_correction=0, + rc_neighbors=400, + rc_idx_correction=0, + sample_rate=80e9, + nperseg=512, + noverlap=435, + nfft=5120, + window="hann", + blur_kernel=(5, 5), + blur_sigx=0, + blur_sigy=0, + carrier_band_time=250e-9, + cmap="viridis", + uncert_mult=100, + order=6, + wid=5e7, + lam=1547.461e-9, + C0=4540, + density=1730, + delta_rho=9, + delta_C0=23, + delta_lam=8e-18, + theta=0, + delta_theta=5, + exp_data_dir=(os.getcwd() + "/input_data"), + out_files_dir=(os.getcwd() + "/output_data"), + display_plots="yes", + spall_calculation="yes", + plot_figsize=(30, 10), + plot_dpi=300, +): + """Main alpss function to run the full program + + Parameters + ---------- + filename : :obj:`str` + filename for the data to run + save_data : str + 'yes' or 'no' to save output data + start_time_user : :obj:`str` or float + if 'none' the program will attempt to find the signal start time automatically. + if float then the program will use that as the signal start time + header_lines : int + number of header lines to skip in the data file + time_to_skip : float + the amount of time to skip in the full data file before beginning to read in data + time_to_take : float + the amount of time to take in the data file after skipping time_to_skip + t_before : float + amount of time before the signal start time to include in the velocity calculation + t_after : float + amount of time after the signal start time to include in the velocity calculation + start_time_correction : float + amount of time to adjust the signal start time by + freq_min : float + minimum frequency for the region of interest + freq_max : float + maximum frequency for the region of interest + smoothing_window : int + number of points to use for the smoothing window. must be an odd number + smoothing_wid : float + half the width of the normal distribution used to calculate + the smoothing weights (recommend 3) + smoothing_amp : float + amplitude of the normal distribution used to calculate the smoothing weights (recommend 1) + smoothing_sigma : float + standard deviation of the normal distribution used to calculate the smoothing weights + (recommend 1) + smoothing_mu : float + mean of the normal distribution used to calculate the smoothing weights (recommend 0) + pb_neighbors : int + number of neighbors to compare to when searching for the pullback local minimum + pb_idx_correction : int + number of local minima to adjust by if the program grabs the wrong one + rc_neighbors : int + number of neighbors to compare to when searching for the recompression local maximum + rc_idx_correction : int + number of local maxima to adjust by if the program grabs the wrong one + sample_rate : float + sample rate of the oscilloscope used in the experiment + nperseg : int + number of points to use per segment of the stft + noverlap : int + number of points to overlap per segment of the stft + nfft : int + number of points to zero pad per segment of the stft + window : str or tuple or array_like + window function to use for the stft (recommend 'hann') + blur_kernel : tuple + kernel size for gaussian blur smoothing (recommend (5, 5)) + blur_sigx : float + standard deviation of the gaussian blur kernel in the x direction (recommend 0) + blur_sigy : float + standard deviation of the gaussian blur kernel in the y direction (recommend 0) + carrier_band_time : float + length of time from the beginning of the imported data window to average + the frequency of the top of the carrier band in the thresholded spectrogram + cmap : str + colormap for the spectrograms (recommend 'viridis') + uncert_mult : float + factor to multiply the velocity uncertainty by when plotting - allows for easier + visulaization when uncertainties are small + order : int + order for the gaussian notch filter used to remove the carrier band (recommend 6) + wid : float + width of the gaussian notch filter used to remove the carrier band (recommend 1e8) + lam : float + wavelength of the target laser + C0 : float + bulk wavespeed of the sample + density : float + density of the sample + delta_rho : float + uncertainty in density of the sample + delta_C0 : float + uncertainty in the bulk wavespeed of the sample + delta_lam : float + uncertainty in the wavelength of the target laser + theta : float + angle of incidence of the PDV probe + delta_theta : float + uncertainty in the angle of incidence of the PDV probe + exp_data_dir : str + directory from which to read the experimental data file + out_files_dir : str + directory to save output data to + display_plots : str + 'yes' to display the final plots and 'no' to not display them. if save_data='yes' + and and display_plots='no' the plots will be saved but not displayed + spall_calculation : str + 'yes' to run the calculations for the spall analysis and 'no' to extract the velocity + without doing the spall analysis + plot_figsize : tuple + figure size for the final plots + plot_dpi : float + dpi for the final plots + + Returns + ------- + None + """ + inputs = locals() # get the current working directory cwd = os.getcwd() @@ -98,10 +257,10 @@ def alpss_main(**inputs): ) # skip the header lines too nrows = inputs["time_to_take"] / t_step - # change directory to where the data is stored - os.chdir(inputs["exp_data_dir"]) data = pd.read_csv( - inputs["filename"], skiprows=int(rows_to_skip), nrows=int(nrows) + os.path.join(inputs["exp_data_dir"], inputs["filename"]), + skiprows=int(rows_to_skip), + nrows=int(nrows) ) # rename the columns of the data @@ -122,7 +281,7 @@ def alpss_main(**inputs): mag = np.abs(Zxx) # plotting - fig, (ax1, ax2) = plt.subplots(1, 2, num=2, figsize=(11, 4), dpi=300) + fig, (ax1, ax2) = plt.subplots(1, 2, num=2, figsize=(11, 4), dpi=300, clear=True) ax1.plot(time / 1e-9, voltage / 1e-3) ax1.set_xlabel("Time (ns)") ax1.set_ylabel("Voltage (mV)") @@ -139,15 +298,17 @@ def alpss_main(**inputs): fig.suptitle("ERROR: Program Failed", c="r", fontsize=16) plt.tight_layout() - plt.show() + if inputs["display_plots"] == "yes": + plt.show() + if inputs["save_data"] == "yes": + fig.savefig( + os.path.join(inputs["out_files_dir"], inputs["filename"][0:-4]) + "_error.png" + ) # if that also fails then print the traceback and stop running the program except Exception: print(traceback.format_exc()) - # move back to the original working directory - os.chdir(cwd) - # function to filter out the carrier frequency def carrier_filter(sdf_out, cen, **inputs): @@ -548,7 +709,7 @@ def plotting( **inputs, ): # create the figure and axes - fig = plt.figure(num=1, figsize=inputs["plot_figsize"], dpi=inputs["plot_dpi"]) + fig = plt.figure(num=1, figsize=inputs["plot_figsize"], dpi=inputs["plot_dpi"], clear=True) ax1 = plt.subplot2grid((3, 5), (0, 0)) # voltage data ax2 = plt.subplot2grid((3, 5), (0, 1)) # noise distribution histogram ax3 = plt.subplot2grid((3, 5), (1, 0)) # imported voltage spectrogram @@ -939,11 +1100,11 @@ def saving( sdf_out, cen, vc_out, sa_out, iua_out, fua_out, start_time, end_time, fig, **inputs ): # change to the output files directory - os.chdir(inputs["out_files_dir"]) + fname = os.path.join(inputs["out_files_dir"], inputs["filename"][0:-4]) # save the plots fig.savefig( - fname=(inputs["filename"][0:-4] + "--plots.png"), + fname=fname + "--plots.png", dpi="figure", format="png", facecolor="w", @@ -952,13 +1113,13 @@ def saving( # save the function inputs used for this run inputs_df = pd.DataFrame.from_dict(inputs, orient="index", columns=["Input"]) inputs_df.to_csv( - inputs["filename"][0:-4] + "--inputs" + ".csv", index=True, header=False + fname + "--inputs.csv", index=True, header=False ) # save the noisy velocity trace velocity_data = np.stack((vc_out["time_f"], vc_out["velocity_f"]), axis=1) np.savetxt( - inputs["filename"][0:-4] + "--velocity" + ".csv", velocity_data, delimiter="," + fname + "--velocity.csv", velocity_data, delimiter="," ) # save the smoothed velocity trace @@ -966,7 +1127,7 @@ def saving( (vc_out["time_f"], vc_out["velocity_f_smooth"]), axis=1 ) np.savetxt( - inputs["filename"][0:-4] + "--velocity--smooth" + ".csv", + fname + "--velocity--smooth.csv", velocity_data_smooth, delimiter=",", ) @@ -981,19 +1142,19 @@ def saving( axis=1, ) np.savetxt( - inputs["filename"][0:-4] + "--voltage" + ".csv", voltage_data, delimiter="," + fname + "--voltage.csv", voltage_data, delimiter="," ) # save the noise fraction noise_data = np.stack((vc_out["time_f"], iua_out["inst_noise"]), axis=1) np.savetxt( - inputs["filename"][0:-4] + "--noise--frac" + ".csv", noise_data, delimiter="," + fname + "--noise--frac.csv", noise_data, delimiter="," ) # save the velocity uncertainty vel_uncert_data = np.stack((vc_out["time_f"], iua_out["vel_uncert"]), axis=1) np.savetxt( - inputs["filename"][0:-4] + "--vel--uncert" + ".csv", + fname + "--vel--uncert.csv", vel_uncert_data, delimiter=",", ) @@ -1049,7 +1210,7 @@ def saving( } results_df = pd.DataFrame(data=results_to_save) results_df.to_csv( - inputs["filename"][0:-4] + "--results" + ".csv", index=False, header=False + fname + "--results.csv", index=False, header=False ) """ @@ -1242,9 +1403,11 @@ def spall_doi_finder(**inputs): ) # skip the 5 header lines too nrows = inputs["time_to_take"] / t_step - # change directory to where the data is stored - os.chdir(inputs["exp_data_dir"]) - data = pd.read_csv(inputs["filename"], skiprows=int(rows_to_skip), nrows=int(nrows)) + data = pd.read_csv( + os.path.join(inputs["exp_data_dir"], inputs["filename"]), + skiprows=int(rows_to_skip), + nrows=int(nrows) + ) # rename the columns of the data data.columns = ["Time", "Ampl"] diff --git a/src/alpss/alpss_auto_run.py b/src/alpss/alpss_auto_run.py deleted file mode 100644 index 375c089..0000000 --- a/src/alpss/alpss_auto_run.py +++ /dev/null @@ -1,106 +0,0 @@ -""" -Credit to Michael Cho -https://michaelcho.me/article/using-pythons-watchdog-to-monitor-changes-to-a-directory -""" - -# %% - -import time -from watchdog.observers import Observer -from watchdog.events import FileSystemEventHandler -from alpss_main import * -import os - - -class Watcher: - - # this is the directory where you will add the files to - DIRECTORY_TO_WATCH = os.getcwd() + "/input_data" - - def __init__(self): - self.observer = Observer() - - def run(self): - event_handler = Handler() - self.observer.schedule(event_handler, self.DIRECTORY_TO_WATCH, recursive=True) - self.observer.start() - try: - while True: - time.sleep(5) - except: - self.observer.stop() - print("Error") - - self.observer.join() - - -class Handler(FileSystemEventHandler): - - @staticmethod - def on_any_event(event): - if event.is_directory: - return None - - elif event.event_type == "created": - - # Take any action here when a file is first created. - print("Received created event - %s." % event.src_path) - - fname = os.path.split(event.src_path)[1] - print(f"File Created: {fname}") - - # use these function inputs the same as for the non-automated function alpss_run.py - alpss_main( - filename=fname, - save_data="yes", - start_time_user="none", - header_lines=1, - time_to_skip=2.3e-6, - time_to_take=1.5e-6, - t_before=5e-9, - t_after=50e-9, - start_time_correction=0e-9, - freq_min=1.5e9, - freq_max=4e9, - smoothing_window=601, - smoothing_wid=3, - smoothing_amp=1, - smoothing_sigma=1, - smoothing_mu=0, - pb_neighbors=400, - pb_idx_correction=0, - rc_neighbors=400, - rc_idx_correction=0, - sample_rate=80e9, - nperseg=512, - noverlap=435, - nfft=5120, - window="hann", - blur_kernel=(5, 5), - blur_sigx=0, - blur_sigy=0, - carrier_band_time=250e-9, - cmap="viridis", - uncert_mult=100, - order=6, - wid=5e7, - lam=1547.461e-9, - C0=4540, - density=1730, - delta_rho=9, - delta_C0=23, - delta_lam=8e-18, - theta=0, - delta_theta=5, - exp_data_dir=(os.getcwd() + "/input_data"), - out_files_dir=(os.getcwd() + "/output_data"), - display_plots="yes", - spall_calculation="yes", - plot_figsize=(30, 10), - plot_dpi=300, - ) - - -if __name__ == "__main__": - w = Watcher() - w.run() diff --git a/src/alpss/alpss_run.py b/src/alpss/alpss_run.py deleted file mode 100644 index 8c786dd..0000000 --- a/src/alpss/alpss_run.py +++ /dev/null @@ -1,124 +0,0 @@ -""" -ALPSS -Jake Diamond (2024) -Johns Hopkins University -Hopkins Extreme Materials Institute (HEMI) -Please report any bugs or comments to jdiamo15@jhu.edu - - -Key for input variables: -filename: str; filename for the data to run -save_data: str; 'yes' or 'no' to save output data -start_time_user: str or float; if 'none' the program will attempt to find the - signal start time automatically. if float then - the program will use that as the signal start time -header_lines: int; number of header lines to skip in the data file -time_to_skip: float; the amount of time to skip in the full data file before beginning to read in data -time_to_take: float; the amount of time to take in the data file after skipping time_to_skip -t_before: float; amount of time before the signal start time to include in the velocity calculation -t_after: float; amount of time after the signal start time to include in the velocity calculation -start_time_correction: float; amount of time to adjust the signal start time by -freq_min: float; minimum frequency for the region of interest -freq_max: float; maximum frequency for the region of interest -smoothing_window: int; number of points to use for the smoothing window. must be an odd number -smoothing_wid: float; half the width of the normal distribution used - to calculate the smoothing weights (recommend 3) -smoothing_amp: float; amplitude of the normal distribution used to calculate - the smoothing weights (recommend 1) -smoothing_sigma: float; standard deviation of the normal distribution used - to calculate the smoothing weights (recommend 1) -smoothing_mu: float; mean of the normal distribution used to calculate - the smoothing weights (recommend 0) -pb_neighbors: int; number of neighbors to compare to when searching - for the pullback local minimum -pb_idx_correction: int; number of local minima to adjust by if the program grabs the wrong one -rc_neighbors: int; number of neighbors to compare to when searching - for the recompression local maximum -rc_idx_correction: int; number of local maxima to adjust by if the program grabs the wrong one -sample_rate: float; sample rate of the oscilloscope used in the experiment -nperseg: int; number of points to use per segment of the stft -noverlap: int; number of points to overlap per segment of the stft -nfft: int; number of points to zero pad per segment of the stft -window: str or tuple or array_like; window function to use for the stft (recommend 'hann') -blur_kernel: tuple; kernel size for gaussian blur smoothing (recommend (5, 5)) -blur_sigx: float; standard deviation of the gaussian blur kernel in the x direction (recommend 0) -blur_sigy: float; standard deviation of the gaussian blur kernel in the y direction (recommend 0) -carrier_band_time: float; length of time from the beginning of the imported data window to average - the frequency of the top of the carrier band in the thresholded spectrogram -cmap: str; colormap for the spectrograms (recommend 'viridis') -uncert_mult: float; factor to multiply the velocity uncertainty by when plotting - allows for easier - visulaization when uncertainties are small -order: int; order for the gaussian notch filter used to remove the carrier band (recommend 6) -wid: float; width of the gaussian notch filter used to remove the carrier band (recommend 1e8) -lam: float; wavelength of the target laser -C0: float; bulk wavespeed of the sample -density: float; density of the sample -delta_rho: float; uncertainty in density of the sample -delta_C0: float; uncertainty in the bulk wavespeed of the sample -delta_lam: float; uncertainty in the wavelength of the target laser -theta: float; angle of incidence of the PDV probe -delta_theta: float; uncertainty in the angle of incidence of the PDV probe -exp_data_dir: str; directory from which to read the experimental data file -out_files_dir: str; directory to save output data to -display_plots: str; 'yes' to display the final plots and 'no' to not display them. if save_data='yes' - and and display_plots='no' the plots will be saved but not displayed -spall_calculation: str; 'yes' to run the calculations for the spall analysis and 'no' to extract the velocity - without doing the spall analysis -plot_figsize: tuple; figure size for the final plots -plot_dpi: float; dpi for the final plots -""" - -# %% -from alpss_main import * -import os - - -alpss_main( - filename="example_file.csv", - save_data="yes", - start_time_user="none", - header_lines=1, - time_to_skip=2.3e-6, - time_to_take=1.5e-6, - t_before=5e-9, - t_after=50e-9, - start_time_correction=0e-9, - freq_min=1.5e9, - freq_max=4e9, - smoothing_window=601, - smoothing_wid=3, - smoothing_amp=1, - smoothing_sigma=1, - smoothing_mu=0, - pb_neighbors=400, - pb_idx_correction=0, - rc_neighbors=400, - rc_idx_correction=0, - sample_rate=80e9, - nperseg=512, - noverlap=435, - nfft=5120, - window="hann", - blur_kernel=(5, 5), - blur_sigx=0, - blur_sigy=0, - carrier_band_time=250e-9, - cmap="viridis", - uncert_mult=100, - order=6, - wid=5e7, - lam=1547.461e-9, - C0=4540, - density=1730, - delta_rho=9, - delta_C0=23, - delta_lam=8e-18, - theta=0, - delta_theta=5, - exp_data_dir=(os.getcwd() + "/input_data"), - out_files_dir=(os.getcwd() + "/output_data"), - display_plots="yes", - spall_calculation="yes", - plot_figsize=(30, 10), - plot_dpi=300, -) diff --git a/src/alpss/commands.py b/src/alpss/commands.py new file mode 100644 index 0000000..8757b3c --- /dev/null +++ b/src/alpss/commands.py @@ -0,0 +1,67 @@ +import argparse +import os +import time + +from watchdog.events import FileSystemEventHandler +from watchdog.observers import Observer + +from . import alpss_main + +""" +Credit to Michael Cho +https://michaelcho.me/article/using-pythons-watchdog-to-monitor-changes-to-a-directory +""" + + +class Watcher: + # this is the directory where you will add the files to + DIRECTORY_TO_WATCH = os.getcwd() + "/input_data" + + def __init__(self): + self.observer = Observer() + + def run(self): + event_handler = Handler() + self.observer.schedule(event_handler, self.DIRECTORY_TO_WATCH, recursive=True) + self.observer.start() + try: + while True: + time.sleep(5) + except Exception as e: + self.observer.stop() + print("Error in Watcher.run: ", e) + + self.observer.join() + + +class Handler(FileSystemEventHandler): + @staticmethod + def on_any_event(event): + if event.is_directory: + return None + + elif event.event_type == "created": + # Take any action here when a file is first created. + print("Received created event - %s." % event.src_path) + + fname = os.path.split(event.src_path)[1] + print(f"File Created: {fname}") + + # use these function inputs the same as for the non-automated function alpss_run.py + alpss_main(filename=fname) + + +def start_watcher(): + w = Watcher() + w.run() + + +def run_alpss(): + parser = argparse.ArgumentParser(description="Run ALPSS on a file") + parser.add_argument( + "filename", + type=str, + help="The name of the file to run ALPSS on", + ) + args = parser.parse_args() + alpss_main(filename=args.filename) diff --git a/input_data/example_file.csv b/tests/input_data/example_file.csv similarity index 100% rename from input_data/example_file.csv rename to tests/input_data/example_file.csv diff --git a/output_data/example_file--inputs.csv b/tests/output_data/example_file--inputs.csv similarity index 100% rename from output_data/example_file--inputs.csv rename to tests/output_data/example_file--inputs.csv diff --git a/output_data/example_file--noise--frac.csv b/tests/output_data/example_file--noise--frac.csv similarity index 100% rename from output_data/example_file--noise--frac.csv rename to tests/output_data/example_file--noise--frac.csv diff --git a/output_data/example_file--plots.png b/tests/output_data/example_file--plots.png similarity index 100% rename from output_data/example_file--plots.png rename to tests/output_data/example_file--plots.png diff --git a/output_data/example_file--results.csv b/tests/output_data/example_file--results.csv similarity index 100% rename from output_data/example_file--results.csv rename to tests/output_data/example_file--results.csv diff --git a/output_data/example_file--vel--uncert.csv b/tests/output_data/example_file--vel--uncert.csv similarity index 100% rename from output_data/example_file--vel--uncert.csv rename to tests/output_data/example_file--vel--uncert.csv diff --git a/output_data/example_file--velocity--smooth.csv b/tests/output_data/example_file--velocity--smooth.csv similarity index 100% rename from output_data/example_file--velocity--smooth.csv rename to tests/output_data/example_file--velocity--smooth.csv diff --git a/output_data/example_file--velocity.csv b/tests/output_data/example_file--velocity.csv similarity index 100% rename from output_data/example_file--velocity.csv rename to tests/output_data/example_file--velocity.csv diff --git a/output_data/example_file--voltage.csv b/tests/output_data/example_file--voltage.csv similarity index 100% rename from output_data/example_file--voltage.csv rename to tests/output_data/example_file--voltage.csv