Skip to content

Commit

Permalink
before photonics west
Browse files Browse the repository at this point in the history
  • Loading branch information
cudmore committed Jan 26, 2024
1 parent ab4b2dd commit 3be451a
Show file tree
Hide file tree
Showing 20 changed files with 834 additions and 421 deletions.
32 changes: 32 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,38 @@

SanPy Documentation is available at [https://cudmore.github.io/SanPy/](https://cudmore.github.io/SanPy/)

## 20240126

- Added a file folder opening window. This is show at first run and allows users to open new and previous opened files and folder.
- Limiting analysis to the visible part of the recording
- Greatly improved panning and zooming of all raw data
- Retained click+drag to pan x-axis and mouse-wheel to zoom x-axis
- New feature using keyboard shift, to click+drag the y-axis and mouse wheel to zoom the y-axis.

- Detailed fixes and additions
- Fix a bug where when user sets sweep, we loose the current zoom
- Roll over SanPy.log so it does not get too big
- fixed spin boxes in plot recording
- Fixed bug where `unknown` recording mode was not allowing data to be displayed (problem in initializing the filtered recording).
- Now include build date and time on each pi install

- Known bugs
- (seems to be fixed) If user open a plugin and closes the main raw data file, SanPy crashes

### Next release to do

- Add documentation for Plot Tool plugins

## 20240117

### New features
- Now provide opening of one raw data file. Either menu `File - Open` or drag and drop
- Add horizontal and vertical cursors to Vm and deriv plot. Can do lots with these including selecting spikes within the cursors and seet some detection params like dv/dt and mV spike threshold as well as some windows like refactory period and window to detect half-widths

### bug fixes
- Fixed lots of bugs (and extended the interface) of the plot recording plugin.
- Fixed lots of bugs in plotting with `Plot Tool` and `Plot Tool (pool)` plugins.

## 20231201

### New Features
Expand Down
16 changes: 8 additions & 8 deletions docs/docs/download.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ If you have an older Mac with an Intel/x86 chip then download the **`x86`** vers

If your not sure which type of Mac you have, then download the **`x86`** version.

[Download macOS x86 zip](https://github.com/cudmore/SanPy/releases/download/v0.2.3/SanPy-macOS-x86.zip){ .md-button .md-button--primary}
[Download macOS arm zip](https://github.com/cudmore/SanPy/releases/download/v0.2.3/SanPy-macOS-arm.zip){ .md-button .md-button--primary}
[Download macOS x86 zip](https://github.com/cudmore/SanPy/releases/download/v0.2.4/SanPy-macOS-x86.zip){ .md-button .md-button--primary}
[Download macOS arm zip](https://github.com/cudmore/SanPy/releases/download/v0.2.4/SanPy-macOS-arm.zip){ .md-button .md-button--primary}

### Mac download and run recipe

Expand All @@ -24,20 +24,20 @@ If your not sure which type of Mac you have, then download the **`x86`** version

## Windows download

[Download Windows exe](https://github.com/cudmore/SanPy/releases/download/v0.2.3/SanPy.exe
){ .md-button .md-button--primary}
[Download Windows exe](https://github.com/cudmore/SanPy/releases/download/v0.2.4/SanPyWindowsExe.zip){ .md-button .md-button--primary}

### Windows download and run recipe

- Download the `exe` file
- Download the `zip` file
- Uncompress the zip file to `SanPy.exe`
- Once downloaded, double-click on `SanPy.exe` to run SanPy

**Troubleshooting:** If you run into security issues, please see [Security on Windows](#security-on-windows).

<!--

## Current Version
- Version 0.1.25, Released on May 12, 2023
-->
- Version v0.2.4, Released on Jan 17, 2024


## Current and previous releases (on GitHub)

Expand Down
59 changes: 29 additions & 30 deletions sanpy/analysisDir.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

import os
import time
import sys
import copy # For copy.deepcopy() of bAnalysis
# import uuid # to generate unique key on bAnalysis spike detect
import pathlib # ned to use this (introduced in Python 3.4) to maname paths on Windows, stop using os.path
Expand Down Expand Up @@ -468,8 +467,7 @@ class analysisDir:

def __init__(
self,
path: str = None,
filePath : str = None,
path : str = None,
sanPyWindow : "sanpy.interface.SanPyWindow" = None,
fileLoaderDict : dict = None,
autoLoad: bool = False,
Expand All @@ -484,9 +482,7 @@ def __init__(
Parameters
----------
path (str):
Path to folder
filePath (str):
Path to one file
Path to file or folder
sanPyWindow (sanpy.interface.SanPyWindow)
PyQt, used to signal progress on loading
fileLoaderDict (dict):
Expand All @@ -503,14 +499,16 @@ def __init__(
- 202312 adding filepath to load just one file
"""

self._filePath = filePath
if filePath is not None and os.path.isfile(filePath):
self._filePath = filePath
path = os.path.split(filePath)[0]
self._filePath = None
if os.path.isfile(path):
self._filePath = path
folderPath = os.path.split(path)[0]
else:
folderPath = path

logger.info(f'{path} self._filePath:{self._filePath}')
logger.info(f'{path}')

self.path: str = path
self.path: str = folderPath # path to folder

self._sanPyWindow = sanPyWindow
# used to signal on building initial db
Expand Down Expand Up @@ -548,10 +546,10 @@ def __init__(
self.syncDfWithPath()

# if we have a filePath and not in df then add it
if self._filePath is not None:
# if self._filePath is not None:

logger.info('self._df')
print(self._df)
# logger.info('self._df')
# print(self._df)

# self._df = self.loadFolder(loadData=autoLoad)

Expand Down Expand Up @@ -970,13 +968,13 @@ def loadFolder(self, path=None, loadData=False) -> pd.DataFrame:

return df

def _checkColumns(self):
def _checkColumns(self, verbose = False):
"""Check columns in loaded vs sanpyColumns (and vica versa).
"""
if self._df is None:
return

verbose = True
# verbose = True
loadedColumns = self._df.columns
for col in loadedColumns:
if col not in self.sanpyColumns.keys():
Expand All @@ -986,11 +984,12 @@ def _checkColumns(self):
f'did not find loaded col: "{col}" in sanpyColumns.keys() ... ignore it'
)
for col in self.sanpyColumns.keys():
if not col in loadedColumns:
if col not in loadedColumns:
# loaded is missing expected, add it
logger.info(
f'did not find sanpyColumns.keys() col: "{col}" in loadedColumns ... adding col'
)
if verbose:
logger.info(
f'did not find sanpyColumns.keys() col: "{col}" in loadedColumns ... adding col'
)
self._df[col] = ""

def _updateLoadedAnalyzed(self, theRowIdx=None):
Expand Down Expand Up @@ -1225,8 +1224,8 @@ def _setColumnType(self, df):
# print('columns are:', df.columns)
for col in df.columns:
# when loading from csv, 'col' may not be in sanpyColumns
if not col in self.sanpyColumns:
logger.warning(f'Column "{col}" is not in sanpyColumns -->> ignoring')
if col not in self.sanpyColumns:
# logger.warning(f'Column "{col}" is not in sanpyColumns -->> ignoring')
continue
colType = self.sanpyColumns[col]["type"]
# print(f' _setColumnType() for "{col}" is type "{colType}"')
Expand Down Expand Up @@ -1501,10 +1500,10 @@ def appendRow(self, rowDict=None, ba=None):
# logger.warning(f"need to replace append with concat")
#df = df.append(rowSeries, ignore_index=True)

logger.info('concat this rowSeries')
print(rowSeries)
print('to this df')
print(df)
# logger.info('concat this rowSeries')
# print(rowSeries)
# print('to this df')
# print(df)

df = pd.concat([df, rowSeries], axis=0, ignore_index=True)

Expand All @@ -1514,9 +1513,9 @@ def appendRow(self, rowDict=None, ba=None):
if ba is not None:
df.loc[newRowIdx, "_ba"] = ba

print('')
logger.info('=== after concat')
print(df)
# print('')
# logger.info('=== after concat')
# print(df)

#
self._df = df
Expand Down
25 changes: 14 additions & 11 deletions sanpy/bAnalysis_.py
Original file line number Diff line number Diff line change
Expand Up @@ -855,17 +855,20 @@ def _rebuildFiltered(self):
logger.warning("not getting derivative ... sweepX was none?")
return

if (
self.fileLoader.recordingMode == recordingModes.iclamp
or self.fileLoader.recordingMode == recordingModes.kymograph
):
self.fileLoader._getDerivative()
elif self.fileLoader.recordingMode == recordingModes.vclamp:
self.fileLoader._getDerivative()
else:
logger.warning(
f'Did not take derivative, unknown recording mode "{self.fileLoader.recordingMode}"'
)
self.fileLoader._getDerivative()

# if (
# self.fileLoader.recordingMode == recordingModes.iclamp
# or self.fileLoader.recordingMode == recordingModes.kymograph
# ):
# self.fileLoader._getDerivative()
# elif self.fileLoader.recordingMode == recordingModes.vclamp:
# self.fileLoader._getDerivative()
# else:
# logger.warning(
# f'Did not take derivative, unknown recording mode "{self.fileLoader.recordingMode}"'
# )
# self.fileLoader._getDerivative()

def _getFilteredRecording(self):
"""
Expand Down
50 changes: 41 additions & 9 deletions sanpy/fileloaders/epochTable.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def getSweepEpoch(self, sweep):
return self._epochList[sweep]

def _builFromAbf(self, abf: pyabf.ABF):
dataPointsPerMs = abf.dataPointsPerMs
# dataPointsPerMs = abf.dataPointsPerMs
"""To convert point to seconds"""

try:
Expand All @@ -69,18 +69,30 @@ def _builFromAbf(self, abf: pyabf.ABF):
logger.error(e)
return

# abf.sweepEpoch is
# pyabf.waveform.EpochSweepWaveform
# logger.info(f' abf.sweepEpochs is {type(abf.sweepEpochs)}')
# print(abf.sweepEpochs)

# print(pyabf.waveform.epochTable())

# sweepEpochs is type "pyabf.waveform.EpochSweepWaveform"
for epochIdx, p1 in enumerate(abf.sweepEpochs.p1s):
p2 = abf.sweepEpochs.p2s[epochIdx] # stop point of each pulse
epochLevel = abf.sweepEpochs.levels[epochIdx]
epochType = abf.sweepEpochs.types[epochIdx]
pulseWidth = abf.sweepEpochs.pulseWidths[epochIdx]
pulsePeriod = abf.sweepEpochs.pulsePeriods[epochIdx]
digitalState = abf.sweepEpochs.pulsePeriods[epochIdx]
digitalState = abf.sweepEpochs.digitalStates[epochIdx] # 20240117 owl-analysis
##print(f"epoch index {epochIdx}: at point {p1} there is a {epochType} to level {epochLevel}")

p1_sec = p1 / abf.dataPointsPerMs / 1000
p2_sec = p2 / abf.dataPointsPerMs / 1000
try:
p1_sec = p1 / abf.dataPointsPerMs / 1000
p2_sec = p2 / abf.dataPointsPerMs / 1000
except (ZeroDivisionError) as e:
logger.error(f'ZeroDivisionError: {e}')
p1_sec = float('nan')
p2_sec = float('nan')

epochDict = {
"sweepNumber": abf.sweepNumber,
Expand Down Expand Up @@ -160,19 +172,39 @@ def numEpochs(self):


if __name__ == "__main__":
import matplotlib.pyplot as plt
import sanpy

# path = '/Users/cudmore/Sites/SanPy/data/19114000.abf'
path = "/Users/cudmore/data/theanne-griffith/07.28.21/2021_07_28_0001.abf"

path = '/Users/cudmore/Dropbox/data/sanpy-users/porter/data/2022_08_15_0022.abf'

ba = sanpy.bAnalysis(path)
print(ba)

sweep = 13

et = ba.fileLoader.getEpochTable(sweep)
df = et.getEpochList(asDataFrame=True)
pprint(df)
_abf = pyabf.ABF(path)
# _epochTable = pyabf.waveform.EpochTable(_abf, 1)
# print(' _epochTable')
# print(_epochTable)

sweeps = [9, 10, 11]
for sweep in sweeps:
ba.setSweep(sweep)
et = ba.fileLoader.getEpochTable(sweep)
df = et.getEpochList(asDataFrame=True)
print(f' sweep:{sweep}')
pprint(df)

# owl
#<bound method ABF.sweepD of ABF (v2.9) with 1 channel (pA), sampled at 10.0 kHz, containing 18 sweeps, having no tags, with a total length of 6.33 minutes, recorded with protocol "PPR_v-clamp_owl". path=/Users/cudmore/Dropbox/data/sanpy-users/porter/2022_08_15_0022.abf>
# print('self._abf.sweepD')
# for _i in range(8):
# print(_i)
# print(self._abf.sweepD(_i))
_abf.setSweep(sweep)
for _i in range(8):
_sweepD = _abf.sweepD(_i)

testPnt = 1280
epochIndex = et.findEpoch(testPnt)
Expand Down
24 changes: 20 additions & 4 deletions sanpy/fileloaders/fileLoader_abf.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,15 @@ def _loadAbf(

# on load, sweep is 0
if loadData:

# owl
#<bound method ABF.sweepD of ABF (v2.9) with 1 channel (pA), sampled at 10.0 kHz, containing 18 sweeps, having no tags, with a total length of 6.33 minutes, recorded with protocol "PPR_v-clamp_owl". path=/Users/cudmore/Dropbox/data/sanpy-users/porter/2022_08_15_0022.abf>
# print('self._abf.sweepD')
# for _i in range(8):
# print(_i)
# print(self._abf.sweepD(_i))


_numRows = self._abf.sweepX.shape[0]
numSweeps = len(self._sweepList)
self._sweepX = np.zeros((_numRows, 1))
Expand Down Expand Up @@ -118,8 +127,10 @@ def _loadAbf(

acqDate = abfDateTime.strftime("%Y-%m-%d")
acqTime = abfDateTime.strftime("%H:%M:%S")
logger.info(f'acqDate:"{acqDate}')
logger.info(f'acqTime:"{acqTime}')

# logger.info(f'acqDate:"{acqDate}')
# logger.info(f'acqTime:"{acqTime}')

# self._acqDate = abfDateTime.strftime("%Y-%m-%d")
# self._acqTime = abfDateTime.strftime("%H:%M:%S")

Expand All @@ -143,15 +154,20 @@ def _loadAbf(

# self._sweepLabelX = self._abf.sweepLabelX
# self._sweepLabelY = self._abf.sweepLabelY
if self._sweepLabelY in ["pA", "nA"]:
if self._sweepLabelY in ["A", "pA", "nA", "uA"]:
self._recordingMode = recordingModes.vclamp # 'V-Clamp'
# self._sweepY_label = self._abf.sweepUnitsY
elif self._sweepLabelY in ["mV"]:
elif self._sweepLabelY in ["mV", "uv", "V"]:
self._recordingMode = recordingModes.iclamp #'I-Clamp'
# self._sweepY_label = self._abf.sweepUnitsY
else:
logger.warning(f'did not understand adcUnit "{adcUnits}"')

# 20240125 ownanalysis
self._userList = self._abf.userList
if self._userList is not None:
logger.info(f'_userList:{self._userList}')

#
self.myFileType = "abf"

Expand Down
Loading

0 comments on commit 3be451a

Please sign in to comment.