Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions .github/workflows/testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,16 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
uses: actions/setup-python@v5

with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
sudo apt-get update
python -m pip install --upgrade pip
python -m pip install pytest
python setup.py install
python3 -m pip install --upgrade pip
python3 -m pip install -e ".[dev]"


- name: test
run: |
Expand Down
57 changes: 57 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"

[project]
name = "study-lyte"
version = "0.7.2"
description = "Analysis software for the Lyte probe, a digital penetrometer for studying snow"
keywords = ["snow penetrometer", "smart probe", "digital penetrometer", 'lyte probe', "avalanches", "snow densiy"]
readme = "README.rst"
requires-python = ">=3.8"
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10'
]
dependencies = [ "pandas > 2.0.0", "pandas < 3.0.0",
"scipy", "shapely"]

[project.optional-dependencies]
dev = [
"pytest",
"pytest-cov",
"matplotlib",
"jupyterlab",
"twine"
]

docs = [ "nbsphinx>=0.8.12",
"sphinx-gallery>=0.9.0",
"nbconvert>=7.2.9",
"Sphinx>=5.0.0,<6.0.0",
"pandoc>=1.0.2",
"sphinxcontrib-apidoc>=0.3.0",
"ipython>=7.23.1"
]

all = ["study_lyte[dev,docs]"]

[project.license]
file = "LICENSE"

[project.urls]
Homepage = "https://adventuredata.com/"
Documentation = "https://study-lyte.readthedocs.io"
Repository = "https://github.com/AdventureData/study_lyte"
Issues = "https://github.com/AdventureData/study_lyte/issues"

[tool.setuptools]
include-package-data = false

[tool.setuptools.packages.find]
include = ["study_lyte*"]
exclude = ["docs*", "tests*"]
16 changes: 0 additions & 16 deletions requirements_dev.txt

This file was deleted.

43 changes: 0 additions & 43 deletions setup.py

This file was deleted.

37 changes: 26 additions & 11 deletions study_lyte/adjustments.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ def get_neutral_bias_at_border(series: pd.Series, fractional_basis: float = 0.00
Returns:
bias_adj: bias adjusted data to near zero
"""
bias = get_directional_mean(series.values, fractional_basis=fractional_basis, direction=direction)
arr = series.values if hasattr(series,'values') else series
bias = get_directional_mean(arr, fractional_basis=fractional_basis, direction=direction)
bias_adj = series - bias
return bias_adj

Expand Down Expand Up @@ -108,7 +109,7 @@ def merge_on_to_time(df_list, final_time):
if i == 0:
result = data
else:
result = pd.merge_ordered(result, data, on='time', fill_method='cubic')
result = pd.merge_ordered(result, data, on='time')

# interpolate the nan's
result = result.interpolate(method='nearest', limit_direction='both')
Expand Down Expand Up @@ -136,32 +137,47 @@ def merge_time_series(df_list):
if i == 0:
result = df.copy()
else:
result = pd.merge_ordered(result, df, on='time', fill_method='cubic')
result = pd.merge_ordered(result, df, on='time')

# interpolate the nan's
result = result.interpolate(method='index')
return result


def remove_ambient(active, ambient, min_ambient_range=50, direction='forward'):
def remove_ambient(active, ambient, min_ambient_range=100, direction='forward'):
"""
Attempts to remove the ambient signal from the active signal
"""
amb_max = ambient.max()
amb_min = ambient.min()
if abs(amb_max - amb_min) > min_ambient_range:
# Only adjust up to the dropdown
tol = 0.05
n = get_points_from_fraction(len(ambient), 0.01)
amb = ambient.rolling(window=n, center=True, closed='both', min_periods=1).mean()
amb_back = get_directional_mean(amb, direction='backward', fractional_basis=0.1)
active_forward = get_directional_mean(active, direction='forward', fractional_basis=0.1)

ind = amb < (amb_back * (1 + tol))
decayed_idx = np.argwhere(ind.values)
if decayed_idx.any():
decayed_idx = decayed_idx[0][0]
else:
decayed_idx = 0

norm_ambient = get_normalized_at_border(amb, direction=direction)
norm_active = get_normalized_at_border(active, direction=direction)
basis = get_directional_mean(active, direction=direction)
clean = (norm_active - norm_ambient) * basis
if clean.min() < 0 and 'backward' not in direction:
clean = clean - clean.min()
norm_ambient[decayed_idx:] = 0
norm_diff = norm_active - norm_ambient
norm_diff[ norm_diff <= 0] = 0 #np.nan
norm_diff = norm_diff.interpolate(method='cubic')
clean = active_forward * norm_diff
clean[:int(decayed_idx*(0.5))] = 1
# Zero cant work here
clean[clean < 1] = 1

else:
clean = active

return clean


Expand All @@ -175,7 +191,6 @@ def apply_calibration(series, coefficients, minimum=None, maximum=None):
result[result > maximum] = maximum
if minimum is not None:
result[result < minimum] = minimum

return result


Expand Down Expand Up @@ -312,4 +327,4 @@ def zfilter(series, fraction):
zi = np.zeros(filter_coefficients.shape[0]-1) #lfilter_zi(filter_coefficients, 1)
filtered, zf = lfilter(filter_coefficients, 1, series, zi=zi)
filtered = lfilter(filter_coefficients, 1, filtered[::-1], zi=zf)[0][::-1]
return filtered
return filtered
6 changes: 3 additions & 3 deletions study_lyte/depth.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import pandas as pd
from scipy.integrate import cumtrapz
from scipy.integrate import cumulative_trapezoid
import numpy as np
from types import SimpleNamespace

Expand Down Expand Up @@ -33,9 +33,9 @@ def get_depth_from_acceleration(acceleration_df: pd.DataFrame) -> pd.DataFrame:
position_vec = {}
for i, axis in enumerate(acceleration_columns):
# Integrate acceleration to velocity
v = cumtrapz(acc[axis].values, acc.index, initial=0)
v = cumulative_trapezoid(acc[axis].values, acc.index, initial=0)
# Integrate velocity to position
position_vec[axis] = cumtrapz(v, acc.index, initial=0)
position_vec[axis] = cumulative_trapezoid(v, acc.index, initial=0)

position_df = pd.DataFrame.from_dict(position_vec)
position_df['time'] = acc.index
Expand Down
48 changes: 29 additions & 19 deletions study_lyte/detect.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def get_acceleration_stop(acceleration, threshold=-0.2, max_threshold=0.1):
return acceleration_stop


def get_nir_surface(clean_active, threshold=-1, max_threshold=0.25):
def get_nir_surface(clean_active, threshold=30, max_threshold=None):
"""
Using the cleaned active, estimate the index at when the probe was in the snow.

Expand All @@ -190,22 +190,22 @@ def get_nir_surface(clean_active, threshold=-1, max_threshold=0.25):
Return:
surface: Integer index of the estimated snow surface
"""
n = get_points_from_fraction(len(clean_active), 0.01)
# n = get_points_from_fraction(len(clean_active), 0.01)
# Normalize by data unaffected by ambient
clean_norm = clean_active / clean_active[n:].mean()
neutral = get_neutral_bias_at_border(clean_norm)
neutral = get_neutral_bias_at_border(clean_active)

# Retrieve a likely candidate under challenging ambient conditions

max_idx = np.argwhere((neutral == neutral.min()).values)[0][0]
window = get_points_from_fraction(len(neutral), 0.01)
diff = neutral.rolling(window=window).std().values

# Detect likely candidate normal ambient conditions
surface = get_signal_event(neutral, search_direction='forward', threshold=threshold,
max_threshold=max_threshold, n_points=n)
surface = get_signal_event(diff, search_direction='backward', threshold=threshold,
max_threshold=max_threshold, n_points=1)
# No surface found and all values met criteria
if surface == len(neutral)-1:
if surface == len(neutral)-1 or surface is None:
surface = 0

# from .plotting import plot_nir_surface
# plot_nir_surface(neutral, diff, surface)
return surface


Expand Down Expand Up @@ -250,28 +250,38 @@ def get_ground_strike(signal, stop_idx):
"""
The probe hits ground somtimes before we detect stop.
"""
buffer = get_points_from_fraction(len(signal), 0.05)
buffer = get_points_from_fraction(len(signal), 0.12)
start = stop_idx - buffer
start = start if start > 0 else 0
end = stop_idx + buffer
end = end if end < len(signal) else len(signal)-1
rel_stop = stop_idx - start

sig_arr = signal[start:end]
norm1 = get_neutral_bias_at_index(sig_arr, rel_stop + buffer)
window = get_points_from_fraction(len(sig_arr), 0.01)
diff = sig_arr.rolling(window=window).std().values
diff = get_neutral_bias_at_border(diff, direction='backward')

# norm1 = get_neutral_bias_at_border(signal[start:end], 0.1, 'backward')
diff = zfilter(norm1.diff(), 0.001) # Large change in signal
impact = get_signal_event(diff, threshold=-1000, max_threshold=-70, n_points=1, search_direction='forward')
# Large change in signal
impact = get_signal_event(diff, threshold=150, max_threshold=1000, n_points=1, search_direction='forward')

# Large chunk of data that's the same near the stop
norm1 = get_neutral_bias_at_index(sig_arr, rel_stop+buffer).values
n_points = get_points_from_fraction(len(norm1), 0.1)
long_press = get_signal_event(norm1, threshold=-150, max_threshold=150, n_points=n_points, search_direction='backward')
tol = get_points_from_fraction(len(norm1), 0.2)
long_press = get_signal_event(norm1, threshold=-10000, max_threshold=150, n_points=n_points, search_direction='backward')
tol = get_points_from_fraction(len(norm1), 0.1)

ground = None
if impact is not None and long_press is not None:
if impact is not None:
impact += start
if long_press is not None:
long_press += start

if long_press is not None and impact is not None:
if (long_press-tol) <= impact <= (long_press+tol):
ground = impact + start
ground = impact

# from .plotting import plot_ground_strike, plot_ts
# plot_ground_strike(signal, diff, norm1, start, stop_idx, impact, long_press,ground)

return ground
2 changes: 1 addition & 1 deletion study_lyte/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def setup_log(debug=False):
format=default,
level=level, handlers=handlers)
# Set all ignored modules to be quiet.
ignore_modules = ['matplotlib']
ignore_modules = ['matplotlib', 'pyngui']
for name in logging.Logger.manager.loggerDict.keys():
if any([m in name for m in ignore_modules]):
logger = logging.getLogger(name)
Expand Down
Loading
Loading