234 lines
5.2 KiB
Python
234 lines
5.2 KiB
Python
|
# coding: utf-8
|
|||
|
|
|||
|
from __future__ import division, unicode_literals
|
|||
|
|
|||
|
from sympy.functions.special.gamma_functions import lowergamma, gamma
|
|||
|
from sympy.functions.special.error_functions import erf
|
|||
|
from uncertainties.core import UFloat
|
|||
|
from uncertainties import ufloat, wrap, correlated_values
|
|||
|
from numpy.polynomial.polynomial import polyfit
|
|||
|
|
|||
|
import collections
|
|||
|
import types
|
|||
|
import numpy as np
|
|||
|
import string
|
|||
|
|
|||
|
##
|
|||
|
## Variables
|
|||
|
##
|
|||
|
|
|||
|
class sample(np.ndarray):
|
|||
|
"""
|
|||
|
Sample type (ndarray subclass)
|
|||
|
Given a data sample outputs many statistical properties:
|
|||
|
n: number of measures
|
|||
|
min: minimum value
|
|||
|
max: maximum value
|
|||
|
mean: sample mean
|
|||
|
med: median value
|
|||
|
var: sample variance
|
|||
|
std: sample standard deviation
|
|||
|
stdm: standard deviation of the mean
|
|||
|
"""
|
|||
|
__array_priority__ = 2
|
|||
|
|
|||
|
def __new__(cls, *sample):
|
|||
|
if isinstance(sample[0], types.GeneratorType):
|
|||
|
sample = list(sample[0])
|
|||
|
s = np.asarray(sample).view(cls)
|
|||
|
return s
|
|||
|
|
|||
|
def __str__(self):
|
|||
|
return format_measure(self.mean, self.stdm)
|
|||
|
|
|||
|
def __array_finalize__(self, obj):
|
|||
|
if obj is None:
|
|||
|
return
|
|||
|
obj = np.asarray(obj)
|
|||
|
self.n = len(obj)
|
|||
|
self.min = np.min(obj)
|
|||
|
self.max = np.max(obj)
|
|||
|
self.mean = np.mean(obj)
|
|||
|
self.med = np.median(obj)
|
|||
|
self.var = np.var(obj, ddof=1)
|
|||
|
self.std = np.sqrt(self.var)
|
|||
|
self.stdm = self.std / np.sqrt(self.n)
|
|||
|
|
|||
|
def to_ufloat(self):
|
|||
|
return ufloat(self.mean, self.stdm)
|
|||
|
|
|||
|
##
|
|||
|
## Normal distribution
|
|||
|
##
|
|||
|
|
|||
|
def phi(x, mu, sigma):
|
|||
|
"""
|
|||
|
Normal CDF
|
|||
|
computes the probability P(x<a) given X is a normally distributed
|
|||
|
random variable with mean μ and standard deviation σ.
|
|||
|
"""
|
|||
|
return float(1 + erf((x-mu) / (sigma*np.sqrt(2))))/2
|
|||
|
|
|||
|
|
|||
|
def p(mu, sigma, a, b=None):
|
|||
|
"""
|
|||
|
Normal CDF shorthand
|
|||
|
given a normally distributed random variable X, with mean μ
|
|||
|
and standard deviation σ, computes the probability P(a<X<b)
|
|||
|
or P(X<a) whether b is given.
|
|||
|
"""
|
|||
|
if b:
|
|||
|
return phi(b, mu, sigma) - phi(a, mu, sigma) # P(a<X<b)
|
|||
|
return phi(a, mu, sigma) # P(x<a)
|
|||
|
|
|||
|
|
|||
|
def chi(x, d):
|
|||
|
"""
|
|||
|
χ² CDF
|
|||
|
given a χ² distribution with degrees of freedom
|
|||
|
computes the probability P(X^2x)
|
|||
|
"""
|
|||
|
return float(lowergamma(d/2, x/2)/gamma(d/2))
|
|||
|
|
|||
|
|
|||
|
def q(x, d):
|
|||
|
"""
|
|||
|
χ² 1-CDF
|
|||
|
given a χ² distribution with d degrees of freedom
|
|||
|
computes the probability P(X^2>x)
|
|||
|
"""
|
|||
|
return 1 - chi(x, d)
|
|||
|
|
|||
|
|
|||
|
def chi_squared(X, O, B, s=2):
|
|||
|
"""
|
|||
|
χ² test for a histogram
|
|||
|
given
|
|||
|
X: a normally distributed variable X
|
|||
|
O: ndarray of normalized absolute frequencies
|
|||
|
B: ndarray of bins delimiters
|
|||
|
s: number of constraints on X
|
|||
|
computes the probability P(χ²>χ₀²)
|
|||
|
"""
|
|||
|
N, M = X.n, len(O)
|
|||
|
d = M-1-s
|
|||
|
delta = (X.max - X.min)/M
|
|||
|
|
|||
|
E = [N*p(X.mean, X.std, B[k], B[k+1]) for k in range(M)]
|
|||
|
Chi = sum((O[k]/delta - E[k])**2/E[k] for k in range(M))
|
|||
|
|
|||
|
return q(Chi, d)
|
|||
|
|
|||
|
|
|||
|
def chi_squared_fit(X, Y, f, sigma=None, s=2):
|
|||
|
"""
|
|||
|
χ² test for fitted data
|
|||
|
given
|
|||
|
X: independent variable
|
|||
|
Y: dependent variable
|
|||
|
f: best fit function
|
|||
|
s: number of constraints on the data (optional)
|
|||
|
sigma: uncertainty on Y, number or ndarray (optional)
|
|||
|
"""
|
|||
|
if sigma is None:
|
|||
|
sigma_Y = np.mean([i.std for i in Y])
|
|||
|
else:
|
|||
|
sigma_Y = np.asarray(sigma).mean()
|
|||
|
Chi = sum(((Y - f(X))/sigma_Y)**2)
|
|||
|
return q(Chi, Y.size-s)
|
|||
|
|
|||
|
|
|||
|
##
|
|||
|
## Regressions
|
|||
|
##
|
|||
|
|
|||
|
def simple_linear(X, Y, sigma_Y):
|
|||
|
"""
|
|||
|
Linear regression of line Y=kX
|
|||
|
"""
|
|||
|
sigma = np.asarray(sigma_Y).mean()
|
|||
|
k = sum(X*Y) / sum(X**2)
|
|||
|
sigma_k = sigma / np.sqrt(sum(X**2))
|
|||
|
return ufloat(k, sigma_k)
|
|||
|
|
|||
|
|
|||
|
def linear(X, Y, sigma_Y):
|
|||
|
"""
|
|||
|
Linear regression of line Y=A+BX
|
|||
|
"""
|
|||
|
sigma = np.asarray(sigma_Y).mean()
|
|||
|
|
|||
|
N = len(Y)
|
|||
|
D = N*sum(X**2) - sum(X)**2
|
|||
|
A = (sum(X**2)*sum(Y) - sum(X)*sum(X*Y))/D
|
|||
|
B = (N*sum(X*Y) - sum(X)*sum(Y))/D
|
|||
|
|
|||
|
sigma_A = sigma * np.sqrt(sum(X**2)/D)
|
|||
|
sigma_B = sigma * np.sqrt(N/D)
|
|||
|
|
|||
|
return (ufloat(A, sigma_A),
|
|||
|
ufloat(B, sigma_B))
|
|||
|
|
|||
|
|
|||
|
def polynomial(X, Y, d=2, sigma_Y=None):
|
|||
|
"""
|
|||
|
d-th degree polynomial fit
|
|||
|
"""
|
|||
|
weights = None
|
|||
|
if sigma_Y:
|
|||
|
if isinstance(sigma_Y, collections.Iterable):
|
|||
|
weights = 1/np.asarray(sigma_Y)
|
|||
|
else:
|
|||
|
weights = np.repeat(1/sigma_Y, len(X))
|
|||
|
coeff, cov = np.polyfit(X, Y, d, w=weights,
|
|||
|
cov=True, full=False)
|
|||
|
print cov
|
|||
|
return correlated_values(coeff, cov)
|
|||
|
|
|||
|
|
|||
|
##
|
|||
|
## Misc
|
|||
|
##
|
|||
|
|
|||
|
def check_measures(X1, X2):
|
|||
|
"""
|
|||
|
Checks whether the results of two set of measures
|
|||
|
are compatible with each other. Gives the α-value
|
|||
|
α=1-P(X within tσ) where t is the weighted difference
|
|||
|
of X₁ and X₂
|
|||
|
"""
|
|||
|
t = np.abs(X1.n - X2.n)/np.sqrt(X1.s**2 + X2.s**2)
|
|||
|
print t
|
|||
|
return float(1 - erf(t/np.sqrt(2)))
|
|||
|
|
|||
|
|
|||
|
def combine_measures(*variables):
|
|||
|
"""
|
|||
|
Combines different (compatible) measures of the same
|
|||
|
quantity producing a new value with a smaller uncertainty
|
|||
|
than the individually taken ones
|
|||
|
"""
|
|||
|
W = np.array([1/i.s**2 for i in variables])
|
|||
|
X = np.array([i.n for i in variables])
|
|||
|
|
|||
|
best = sum(W*X)/sum(W)
|
|||
|
sigma = 1/np.sqrt(sum(W))
|
|||
|
|
|||
|
return ufloat(best, sigma)
|
|||
|
|
|||
|
|
|||
|
def format_measure(mean, sigma):
|
|||
|
"""
|
|||
|
Formats a measure in the standard declaration
|
|||
|
"""
|
|||
|
prec = int(np.log10(abs(sigma)))
|
|||
|
limit = 2-prec if prec < 0 else 2
|
|||
|
sigma = round(sigma, limit)
|
|||
|
mean = round(mean, limit)
|
|||
|
|
|||
|
return "{}±{}".format(mean, sigma)
|
|||
|
|
|||
|
sin = wrap(lambda x: np.sin(np.radians(x)))
|
|||
|
|
|||
|
epsilon = 0.05
|