# Source code for pysteg.features.farid

```##  \$Id\$
## -*- coding: utf-8 -*-

# UNDER REVIEW  ---  NOT IN USE

"""
The original feature vector of Lyu and Farid (2002).
36 simple statistics + 36 statistics of prediction errors.
Each 36-set has 12 features per wavelet decomposition level (levels 1,2,3).
Each 12-set has 4 features per direction (horizontal, vertical, diagonal).
Each 4-set is the mean, var, skew(ness), and kurtosis.

:Module:    pysteg.features.farid
:Date:      \$Date\$
:Revision:  \$Revision\$
:Copyright: © 2012: Hans Georg Schaathun <georg@schaathun.net>
"""

print "[pysteg.features.farid] \$Id\$"

# *********************
# pysteg.features.farid
# *********************
#
# .. automodule:: pysteg.analysis.wfeatures.farid
#
# ::

from ..wtools import getWavelet
from scipy.stats import skew as skewness, kurtosis
from numpy import mean, var
from scipy import log2
import numpy as np
import pywt

[docs]def reshape(A):
return np.reshape(np.transpose(A),(A.size,1))

[docs]def stats(E):
"""
Given an array E, calculate mean, variance, skewness and kurtosis.
(Auxiliary function for the Lyu-Farid feature vector.)
"""
E = E.flatten()
return [ mean(E), var(E), skewness(E), kurtosis(E) ]

[docs]def predictionFeatures(L,verbosity=0):
"""
Compute the linear predictions and its error statistics
from a series of matrices.  (Auxiliary function for pred1().
"""
S = [ abs(A.flatten()) for A in L ]
B = ( S >= 1 )
S = [ A[B] for A in S ]
S = [ A[:,None] for A in S ]
T = S
Q = np.hstack(S[1:])
if verbosity > 0:
print "T", type(T), T.shape, T.dtype
print "Q", type(Q), Q.shape, Q.dtype

# W -- Regression weights
#
#   ::

QT = Q.transpose()
Q2 = np.dot(QT,Q)
Q2I = np.linalg.inv(Q2)
Q2 = np.dot( Q2I, QT )
W = np.dot(Q2, T)

# P -- Prediction
#
#   ::

P = np.dot(Q, W)

# E -- Errors
#
#   ::

if (T==0).any() or (P==0).any():
print "T:", (T==0).any()
print "P:", (P==0).any()
raise Exception, "Zero occurs in logarithm"
E = log2(T) - log2(abs(P))
if verbosity > 2:
print E.dtype, T.dtype, P.dtype, Q.dtype, W.dtype
print np.min(T.flatten()), np.min(P.flatten())
R = stats(E)
if reduce( bool.__or__, [ bool(np.isinf(r) or np.isnan(r)) for r in R ] ):
print R
raise Exception, "NaN or inf detected in Farid feature."
return R

# Creating the prediction image
#
# ::

[docs]def pred1(hv,verbosity=0,*a,**kw):
"""
Compute the linear prediction error statistics from one
level of the wavelet decomposition.
"""
(H,V,D) = hv[-1]
(H1,V1,D1) = hv[-2]
R = []
(X,Y) = H.shape
hx = [ i/2 for i in xrange(1,X-1) ]
hy = [ i/2 for i in xrange(1,Y-1) ]
D1 = D1[hx,:][:,hy]
H1 = H1[hx,:][:,hy]
V1 = V1[hx,:][:,hy]
if verbosity > 0: print "H",
R += predictionFeatures( [
H[1:-1,1:-1], H[0:-2,1:-1], H[1:-1,0:-2],
H1, D[1:-1,1:-1], D1,
H[2:,1:-1], H[1:-1,2:] ], verbosity=verbosity )
if verbosity > 0: print "V",
R += predictionFeatures( [
V[1:-1,1:-1], V[0:-2,1:-1], V[1:-1,0:-2],
V1, D[1:-1,1:-1], D1, V[2:,1:-1],
V[1:-1,2:] ], verbosity=verbosity )
if verbosity > 0: print "D",
R += predictionFeatures( [
D[1:-1,1:-1], D[0:-2,1:-1], D[1:-1,0:-2], D1,
H[1:-1,1:-1], V[1:-1,1:-1], D[2:,1:-1], D[1:-1,2:] ],
verbosity=verbosity )
return R

[docs]def pred(H,*a,**kw):
"""
Given a tuple H containing a wavelet decomposition of an image,
calculate the Lyu-Farid feature based on the linear predictor.
"""
if len(H) < 3: return []
else: return pred(H[:-1], *a, **kw) + pred1(H, *a, **kw)

[docs]def fxbase(H,*a,**kw):
R = []
for h in H[2:]:
for A in h:
R.extend( stats(A) )
return R

# The Feature Vector
# ==================

[docs]def farid36(I,name="qmf9",*a,**kw):
"""
Calculate the 36 Farid features from the image I,
excluding the prediction image features.
Optionally a wavelet name can be given as well.
"""
w = getWavelet( name )
I = I.astype( float )
H = pywt.wavedec2( I, w, level=4 )
return fxbase(H,*a,**kw)

[docs]def farid36pred(I,name="qmf9",*a,**kw):
"""
Calculate the 36 Farid features from the prediction image of I.
Optionally a wavelet name can be given as well.
"""
w = getWavelet( name )
I = I.astype( float )
H = pywt.wavedec2( I, w, level=4 )
return pred(H,*a,**kw)
```