py4sci

Source code for sloth.mean_square_analyser

# -*- coding: utf-8 -*-
"""
Created on Thu Feb  9 13:40:05 2012

@author: monika kauer
"""
import numpy as np
from scipy import stats, sqrt, arange, optimize
import matplotlib.pylab as pylab
import math

#==============================================================================
#  Mean-square  methods
#==============================================================================

def msq_make_increments(length):
    inc=[]
    for i in range(0,len(length)-1):
        inc.append(length[i+1]-length[i])
    return inc
        
[docs]def msq_increments(inc, sampling): """takes drift corrected length calculates mean displacement of length""" msq = list() for i in range(0,len(inc),sampling): msq.append(np.average(inc[i:i+sampling])) return msq
def msq_drift_sub(increments, v, dt): msq=list() for i in increments: msq.append(i-v*dt) return msq def msq_noise(increments): o=[] for i in range(len(increments)-1): o.append(increments[i]*increments[i+1]) return np.average(o) def msq_D(increments): D=[] for i in range(len(increments)): D.append(increments[i]**2) return np.average(D) def msq_varD(D,o,dt,N): a=D*dt b=-1/3.*D*dt + o return (6*a**2+4*a*b+2*b**2)/N/dt**2 + (4*(a+b)**2)/N**2/dt**2 def msq_varO(D,o,dt,N): a=D*dt b=o return (4*(N+1)*a**2+8*a*b*(N+1)+b**2*(7*N+5))/N*2
[docs]def msq_from_data(length,dt, sampling): """calculates mean square displacement values for a series of lengths using Vestergard method of covariance estimators.""" #calculate displacement increments = msq_make_increments(length) increments = msq_increments(increments, sampling) N=len(increments)#correct N after reducing increments v=(length[-1]-length[0])/dt/N v=np.average(increments)/dt*sampling increments=msq_drift_sub(increments, v, dt) D=msq_D(increments)/2./dt+ msq_noise(increments)/dt o= (msq_D(increments)/2.-2*msq_noise(increments))/3. #calculate errors dv=np.std(increments, ddof=1)/dt*sampling/N**0.5 dD=msq_varD(D,o,dt,N)**0.5/N**0.5 do= msq_varO(D,o,dt,N)**0.5/N**0.5 return (o,do,D,dD,v,dv)
[docs]def welch_test(x,y,semx,semy,dofx,dofy): """returns t and dof for welchs unpaired t test""" return abs(x-y)/(sqrt(semx**2+semy**2)), (semx**2+semy**2)**2/(semx**4/dofx+semy**4/dofy)
[docs]def tracker_data_2(workingDir): """reads and averages coordinates in folder""" bead_pos=[] for datei in os.listdir(workingDir): raw_data=[] with open(workingDir+"/"+datei) as file: a=csv.reader(file, delimiter=" ") #a.next()#skip header for row in a: raw_data.append([float(row[0]),float(row[1])]) bead_pos.append(raw_data) bead_pos=np.transpose(bead_pos) f= open(workingDir+"/../%s_drift"%(datei.split("-")[0]),'wa') x=[] y=[] for j in range(len(bead_pos[0])): x.append(np.average(bead_pos[0][j])) y.append(np.average(bead_pos[1][j])) f.write('%f %f \n'%(np.average(bead_pos[0][j]),np.average(bead_pos[1][j]))) f.close() return [x,y]
[docs]def msq_from_data_lsq(length,dt,v): """calculates mean square displacement values for a series of lengths. Time intervals are taken as powers of 2 to avoid correlations.""" #calculate mean -square displacement msq=list() tau=[] i=1 #log distance to reduce correlations. nmax=np.log2(len(length)/2.) for a in range(0,int(np.floor(nmax))): tau.append(i*2) i=i*2 avg=[] weights=[] print tau for t in tau: msq=[] for i in range(0,len(length)-t,t+1): msq.append((length[i]-length[i+t])**2) msq=[k for k in msq if np.isnan(k)!=True] if len(msq)>1: avg.append(np.average(msq)) weights.append(np.std(msq, ddof=1)/sqrt(len(msq))) tau=map(lambda x: x*dt, tau) p,fit,pcov,info=lq_diffusion_fit_constrained(tau,avg,weights,[10.0,10.0,v]) s2=sum(info['fvec']*info['fvec'])# chi square from algorithm o=p[2] D=p[1]/2 v=p[0] do=math.sqrt(pcov[2][2])*math.sqrt(s2/(len(fit)-3)) dD=math.sqrt(pcov[1][1])*math.sqrt(s2/(len(fit)-3))/2. dv=math.sqrt(pcov[0][0])*math.sqrt(s2/(len(fit)-3)) return (o,do,D,dD,v,dv)
[docs]def lq_diffusion_fit_constrained(times,msq_data,errors,p0): '''takes 3 arrays time and msq and errors (SD or SEm) fits a diffusion model with 3 params\n y= (p[0]*x)**2+p[1]*x+p[2] return values are: parameters (a,b,c), fit at the x values and the covariance matrix ''' msq_data=np.array(msq_data) times=np.array(times) errors=np.array(errors) fitfunc = lambda p, x: (p[0]*x)**2+p[1]*x+p[2]# Target function def errfunc(p,times, msq_data,errors): if p[0]<0: return 10**10 if p[1]<0: return 10**10 if p[2]<0: return 10**10 else: return (msq_data-fitfunc(p, times))/errors p1,pcov,info,mesg,ier = optimize.leastsq(errfunc, p0, args=(times,msq_data,errors),full_output=True)#least square fit fit=fitfunc(p1,times) return p1,fit,pcov, info