Skip to content

Commit

Permalink
Implement reader for PicoQuant cor files (#199)
Browse files Browse the repository at this point in the history
* Update numpy to >v1, add PicoQuant cor format

* Add more descriptive file type wildcard to gui

* Update docstring for accuracy
  • Loading branch information
tsbischof authored Jan 11, 2023
1 parent 39db2b6 commit 0901d75
Show file tree
Hide file tree
Showing 10 changed files with 151 additions and 51 deletions.
4 changes: 2 additions & 2 deletions pycorrfit/gui/frontend.py
Original file line number Diff line number Diff line change
Expand Up @@ -1168,8 +1168,8 @@ def OnLoadBatch(self, e=None, dataname=None):
# the Nth occurence of the keys in Types correspond to the
# matching curves.
# Also make sure that number starts at one for each selected file.
coords = np.zeros(len(keys), dtype=np.int)
Run = np.zeros(len(Curveid), dtype=np.int)
coords = np.zeros(len(keys), dtype=np.int64)
Run = np.zeros(len(Curveid), dtype=np.int64)
WorkType = 1*Type
for fname in np.unique(Filename):
# unique returns sorted file names.
Expand Down
2 changes: 1 addition & 1 deletion pycorrfit/gui/page.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def __init__(self, parent, counter, modelid, active_parms, tau=None):
# This is necessary to prevent "Unsplit" of the SplitterWindow:
self.spcanvas.SetMinimumPaneSize(1)
# y difference in pixels between Auocorrelation and Residuals
cupsizey = size[1]*4/5
cupsizey = (size[1]*4)//5
# Calculate initial data
self.calculate_corr()
# Draw the settings section
Expand Down
2 changes: 1 addition & 1 deletion pycorrfit/gui/tools/statistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ def OnChooseValues(self, event=None):
#headlen = len(head)
# We will sort the checkboxes in more than one column if there
# are more than *maxitemsincolumn*
maxitemsincolumn = np.float(19)
maxitemsincolumn = np.float64(19)
Sizernumber = int(np.ceil(len(Info)/maxitemsincolumn))
self.boxsizerlist = list()
for i in np.arange(Sizernumber):
Expand Down
6 changes: 3 additions & 3 deletions pycorrfit/openfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,10 +226,10 @@ def LoadSessionData(sessionfile, parameters_only=False):
for row in bgtraceread:
# Exclude commentaries
if (str(row[0])[0:1] != '#'):
bgtrace.append((np.float(row[0]), np.float(row[1])))
bgtrace.append((np.float64(row[0]), np.float64(row[1])))
bgtrace = np.array(bgtrace)
newbackground = Trace(trace=bgtrace, name=str(
bgrow[1]), countrate=np.float(bgrow[0]))
bgrow[1]), countrate=np.float64(bgrow[0]))
Infodict["Backgrounds"].append(newbackground)
i = i + 1
bgfile.close()
Expand Down Expand Up @@ -260,7 +260,7 @@ def LoadSessionData(sessionfile, parameters_only=False):
for row in Wdatareader:
# Exclude commentaries
if (str(row[0])[0:1] != '#'):
Wdata.append(np.float(row[0]))
Wdata.append(np.float64(row[0]))
Weightsdict[pageid][Nkey] = np.array(Wdata)
Infodict["External Weights"] = Weightsdict
# Preferences
Expand Down
3 changes: 3 additions & 0 deletions pycorrfit/readfiles/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# To add a filetype add it here and in the
# dictionaries at the end of this file.
from .read_ASC_ALV import openASC
from .read_cor_PicoQuant import openCOR
from .read_CSV_PyCorrFit import openCSV
from .read_SIN_correlator_com import openSIN
from .read_FCS_Confocor3 import openFCS
Expand Down Expand Up @@ -242,6 +243,7 @@ def openZIP(path, filename=None):
# The wildcards point to the appropriate functions.
filetypes_dict = {"Correlator.com (*.SIN)|*.SIN;*.sin": openSIN,
"ALV (*.ASC)|*.ASC;*.asc": openASC,
"PicoQuant cor (*.cor)|*.cor": openCOR,
"PyCorrFit (*.csv)|*.csv": openCSV,
"Matlab 'Ries (*.mat)|*.mat": openMAT,
"PicoQuant (*.pt3)|*.pt3": openPT3,
Expand All @@ -255,6 +257,7 @@ def openZIP(path, filename=None):
# Dictionary with filetypes we can open that have intensity traces in them.
filetypes_bg_dict = {"Correlator.com (*.SIN)|*.SIN;*.sin": openSIN,
"ALV (*.ASC)|*.ASC": openASC,
"PicoQuant cor (*.cor)|*.cor": openCOR,
"PyCorrFit (*.csv)|*.csv": openCSV,
"PicoQuant (*.pt3)|*.pt3": openPT3,
"Zeiss ConfoCor3 (*.fcs)|*.fcs": openFCS,
Expand Down
14 changes: 7 additions & 7 deletions pycorrfit/readfiles/read_ASC_ALV.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def openASC_old(path):
for row in readdata:
for i in np.arange(len(curvelist)):
if len(row) > 0:
data[i].append((np.float(row[0]), np.float(row[i+1])))
data[i].append((np.float64(row[0]), np.float64(row[i+1])))
# Trace
# Trace is stored in two columns
# 1st column: time [s]
Expand All @@ -196,25 +196,25 @@ def openASC_old(path):
for row in readtrace:
# time in ms, countrate
trace.append(list())
trace[0].append((np.float(row[0])*timefactor,
np.float(row[1])))
trace[0].append((np.float64(row[0])*timefactor,
np.float64(row[1])))
# Only trace[0] contains the trace!
for i in np.arange(len(curvelist)-1):
trace.append(list())
trace[i+1].append((np.float(row[0])*timefactor, 0))
trace[i+1].append((np.float64(row[0])*timefactor, 0))
if not single:
k = len(curvelist)/2
if int(k) != k:
print("Problem with ALV data. Single mode not recognized.")
# presumably dual mode. There is a second trace
# time in ms, countrate
trace2.append(list())
trace2[0].append((np.float(row[0])*timefactor,
np.float(row[2])))
trace2[0].append((np.float64(row[0])*timefactor,
np.float64(row[2])))
# Only trace2[0] contains the trace!
for i in np.arange(len(curvelist)-1):
trace2.append(list())
trace2[i+1].append((np.float(row[0])*timefactor, 0))
trace2[i+1].append((np.float64(row[0])*timefactor, 0))

# group the resulting curves
corrlist = list()
Expand Down
6 changes: 3 additions & 3 deletions pycorrfit/readfiles/read_CSV_PyCorrFit.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,11 +123,11 @@ def openCSV(path, filename=None):
# separated values as well
if len(row) == 1:
row = row[0].split()
data.append((np.float(row[0].strip())*timefactor,
np.float(row[1].strip())))
data.append((np.float64(row[0].strip())*timefactor,
np.float64(row[1].strip())))
if len(row) == 5:
# this has to be correlation with weights
weights.append(np.float(row[4].strip()))
weights.append(np.float64(row[4].strip()))
if weightname == "external":
try:
weightname = "ext. " + \
Expand Down
12 changes: 6 additions & 6 deletions pycorrfit/readfiles/read_FCS_Confocor3.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,8 @@ def openFCS_Multiple(path):
for row in readtrace:
# tau in ms, trace in kHz
# So we need to put some factors here
trace.append((np.float(row[3])*1000,
np.float(row[4])/1000))
trace.append((np.float64(row[3])*1000,
np.float64(row[4])/1000))
trace = np.array(trace)
# If the trace is too big. Wee need to bin it.
newtrace = util.downsample_trace(trace)
Expand All @@ -172,8 +172,8 @@ def openFCS_Multiple(path):
corr = []
for row in readcorr:
# tau in ms, corr-function
corr.append((np.float(row[3])*1000,
np.float(row[4])-1))
corr.append((np.float64(row[3])*1000,
np.float64(row[4])-1))
if FoundType[:2] == "AC":
ac_correlations.append(np.array(corr))
elif FoundType[:2] == "CC":
Expand Down Expand Up @@ -360,7 +360,7 @@ def openFCS_Single(path):
for row in readtrace:
# tau in ms, trace in kHz
# So we need to put some factors here
trace.append((np.float(row[0])*1000, np.float(row[1])))
trace.append((np.float64(row[0])*1000, np.float64(row[1])))
trace = np.array(trace)
# If the trace is too big. Wee need to bin it.
newtrace = util.downsample_trace(trace)
Expand All @@ -378,7 +378,7 @@ def openFCS_Single(path):
corr = []
for row in readcorr:
# tau in ms, corr-function
corr.append((np.float(row[0]), np.float(row[1])-1))
corr.append((np.float64(row[0]), np.float64(row[1])-1))
corr = np.array(corr)
fcscurve = False

Expand Down
56 changes: 28 additions & 28 deletions pycorrfit/readfiles/read_SIN_correlator_com.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,29 +274,29 @@ def openSIN_old(path):
corrdata = []
for row in readcorr:
# tau in ms, corr-function minus "1"
corrdata.append((np.float(row[0])*timefactor, np.float(row[1])-1))
corrdata.append((np.float64(row[0])*timefactor, np.float64(row[1])-1))
correlations.append(np.array(corrdata))
trace = []
for row in readtrace:
# tau in ms, corr-function minus "1"
trace.append((np.float(row[0])*timefactor,
np.float(row[1])/timedivfac))
trace.append((np.float64(row[0])*timefactor,
np.float64(row[1])/timedivfac))
traces.append(np.array(trace))
elif Mode == "Single Cross":
curvelist.append("CC")
corrdata = []
for row in readcorr:
# tau in ms, corr-function minus "1"
corrdata.append((np.float(row[0])*timefactor, np.float(row[1])-1))
corrdata.append((np.float64(row[0])*timefactor, np.float64(row[1])-1))
correlations.append(np.array(corrdata))
trace1 = []
trace2 = []
for row in readtrace:
# tau in ms, corr-function minus "1"
trace1.append((np.float(row[0])*timefactor,
np.float(row[1])/timedivfac))
trace2.append((np.float(row[0])*timefactor,
np.float(row[2])/timedivfac))
trace1.append((np.float64(row[0])*timefactor,
np.float64(row[1])/timedivfac))
trace2.append((np.float64(row[0])*timefactor,
np.float64(row[2])/timedivfac))
traces.append([np.array(trace1), np.array(trace2)])
elif Mode == "Dual Auto":
curvelist.append("AC1")
Expand All @@ -305,18 +305,18 @@ def openSIN_old(path):
corrdata2 = []
for row in readcorr:
# tau in ms, corr-function minus "1"
corrdata1.append((np.float(row[0])*timefactor, np.float(row[1])-1))
corrdata2.append((np.float(row[0])*timefactor, np.float(row[2])-1))
corrdata1.append((np.float64(row[0])*timefactor, np.float64(row[1])-1))
corrdata2.append((np.float64(row[0])*timefactor, np.float64(row[2])-1))
correlations.append(np.array(corrdata1))
correlations.append(np.array(corrdata2))
trace1 = []
trace2 = []
for row in readtrace:
# tau in ms, corr-function minus "1"
trace1.append((np.float(row[0])*timefactor,
np.float(row[1])/timedivfac))
trace2.append((np.float(row[0])*timefactor,
np.float(row[2])/timedivfac))
trace1.append((np.float64(row[0])*timefactor,
np.float64(row[1])/timedivfac))
trace2.append((np.float64(row[0])*timefactor,
np.float64(row[2])/timedivfac))
traces.append(np.array(trace1))
traces.append(np.array(trace2))
elif Mode == "Dual Cross":
Expand All @@ -326,18 +326,18 @@ def openSIN_old(path):
corrdata2 = []
for row in readcorr:
# tau in ms, corr-function minus "1"
corrdata1.append((np.float(row[0])*timefactor, np.float(row[1])-1))
corrdata2.append((np.float(row[0])*timefactor, np.float(row[2])-1))
corrdata1.append((np.float64(row[0])*timefactor, np.float64(row[1])-1))
corrdata2.append((np.float64(row[0])*timefactor, np.float64(row[2])-1))
correlations.append(np.array(corrdata1))
correlations.append(np.array(corrdata2))
trace1 = []
trace2 = []
for row in readtrace:
# tau in ms, corr-function minus "1"
trace1.append((np.float(row[0])*timefactor,
np.float(row[1])/timedivfac))
trace2.append((np.float(row[0])*timefactor,
np.float(row[2])/timedivfac))
trace1.append((np.float64(row[0])*timefactor,
np.float64(row[1])/timedivfac))
trace2.append((np.float64(row[0])*timefactor,
np.float64(row[2])/timedivfac))
traces.append([np.array(trace1), np.array(trace2)])
traces.append([np.array(trace1), np.array(trace2)])
elif Mode == "Quad":
Expand All @@ -351,12 +351,12 @@ def openSIN_old(path):
corrdata21 = []
for row in readcorr:
# tau in ms, corr-function minus "1"
corrdata1.append((np.float(row[0])*timefactor, np.float(row[1])-1))
corrdata2.append((np.float(row[0])*timefactor, np.float(row[2])-1))
corrdata1.append((np.float64(row[0])*timefactor, np.float64(row[1])-1))
corrdata2.append((np.float64(row[0])*timefactor, np.float64(row[2])-1))
corrdata12.append(
(np.float(row[0])*timefactor, np.float(row[3])-1))
(np.float64(row[0])*timefactor, np.float64(row[3])-1))
corrdata21.append(
(np.float(row[0])*timefactor, np.float(row[4])-1))
(np.float64(row[0])*timefactor, np.float64(row[4])-1))
correlations.append(np.array(corrdata1))
correlations.append(np.array(corrdata2))
correlations.append(np.array(corrdata12))
Expand All @@ -365,10 +365,10 @@ def openSIN_old(path):
trace2 = []
for row in readtrace:
# tau in ms, corr-function minus "1"
trace1.append((np.float(row[0])*timefactor,
np.float(row[1])/timedivfac))
trace2.append((np.float(row[0])*timefactor,
np.float(row[2])/timedivfac))
trace1.append((np.float64(row[0])*timefactor,
np.float64(row[1])/timedivfac))
trace2.append((np.float64(row[0])*timefactor,
np.float64(row[2])/timedivfac))
traces.append(np.array(trace1))
traces.append(np.array(trace2))
traces.append([np.array(trace1), np.array(trace2)])
Expand Down
97 changes: 97 additions & 0 deletions pycorrfit/readfiles/read_cor_PicoQuant.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
"""PicoQuant .cor files"""
import csv
import pathlib
import warnings

import numpy as np

class LoadCORError(BaseException):
pass

def get_header_index(L, elem):
try:
index = L.index(elem)
except ValueError:
raise ValueError(f'Expected {elem} in the data header, but found only {L}')

return index

def openCOR(path, filename=None):
"""
Read data from a PicoQuant .cor file.
This format consists of a metadata header, a blank line, then a
whitespace-delimited data header followed by a data array.
------------------------------------------------------------
TTTR Correlator Export
PicoHarp Software version 3.0.0.3 format version 3.0
Raw data: c:\\users\\baker lab 432-a\\desktop\\grant fcs\\default_013.ptu
Recorded: 16/12/22 17:40:13
Mode: T2
Routing Mask A: 0 1 0 0 0
Routing Mask B: 1 0 0 0 0
Start time [s]: 0.000000
Time span [s]: 7.545534
Counts A: 382989
Counts B: 523316
Tau resolution [s]: 0.00000002500000
taustep tau/s G(A,A) G(B,B) G(A,B)
6 0.0000001500 0.8375 0.3556 0.1708
7 0.0000001750 0.6503 0.2652 0.1445
9 0.0000002250 0.5680 0.2173 0.1264
11 0.0000002750 0.4836 0.1594 0.1614
13 0.0000003250 0.3241 0.1528 0.1486
15 0.0000003750 0.2819 0.1236 0.1641
17 0.0000004250 0.3591 0.1467 0.1102
[...]
------------------------------------------------------------
Returns:
A dictionary containing:
Trace: list of 2d np.array containing two columns:
1st: tau in ms
2nd: corresponding correlation signal
Type: list of strings indictating the type of each correlation
Filename: the basename for each file loaded
Trace: blank list, since cor does not store intensity traces
"""
path = pathlib.Path(path)
if filename is not None:
warnings.warn("Using `filename` is deprecated.", DeprecationWarning)
path = path / filename
filename = path.name

s_to_ms = 1000 # cor provides seconds, we want ms

corfile = path.open('r', encoding='utf-8')
header = list()
for line in corfile:
header.append(line.strip())
if header[-1] == '':
break

if header[0] != 'TTTR Correlator Export':
raise ValueError(f'Error while reading {path.name}. Expected the first line to be "TTTR Correlator Export"')

data_header = next(corfile).split()
data = np.loadtxt(corfile)

tau = data[:, get_header_index(data_header, 'tau/s')]*s_to_ms
correlation_names = ['G(A,A)', 'G(A,B)', 'G(B,B)']
correlation_types = ['AC', 'CC', 'AC' ]
correlations = list()

for correlation_name in correlation_names:
correlation = data[:, get_header_index(data_header, correlation_name)]
cor = np.zeros((len(correlation), 2), dtype=correlation.dtype)
cor[:, 0] = tau
cor[:, 1] = correlation
correlations.append(cor)

dictionary = dict()
dictionary['Correlation'] = correlations
dictionary['Trace'] = [[] for _ in dictionary['Correlation']]
dictionary['Type'] = correlation_types
dictionary['Filename'] = [path.name for _ in dictionary['Correlation']]
return dictionary

0 comments on commit 0901d75

Please sign in to comment.