repo_name
stringlengths 6
103
| path
stringlengths 4
209
| copies
stringclasses 325
values | size
stringlengths 4
7
| content
stringlengths 838
1.04M
| license
stringclasses 15
values |
---|---|---|---|---|---|
arabenjamin/scikit-learn | sklearn/utils/tests/test_multiclass.py | 128 | 12853 |
from __future__ import division
import numpy as np
import scipy.sparse as sp
from itertools import product
from sklearn.externals.six.moves import xrange
from sklearn.externals.six import iteritems
from scipy.sparse import issparse
from scipy.sparse import csc_matrix
from scipy.sparse import csr_matrix
from scipy.sparse import coo_matrix
from scipy.sparse import dok_matrix
from scipy.sparse import lil_matrix
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raises_regex
from sklearn.utils.multiclass import unique_labels
from sklearn.utils.multiclass import is_multilabel
from sklearn.utils.multiclass import type_of_target
from sklearn.utils.multiclass import class_distribution
class NotAnArray(object):
"""An object that is convertable to an array. This is useful to
simulate a Pandas timeseries."""
def __init__(self, data):
self.data = data
def __array__(self):
return self.data
EXAMPLES = {
'multilabel-indicator': [
# valid when the data is formated as sparse or dense, identified
# by CSR format when the testing takes place
csr_matrix(np.random.RandomState(42).randint(2, size=(10, 10))),
csr_matrix(np.array([[0, 1], [1, 0]])),
csr_matrix(np.array([[0, 1], [1, 0]], dtype=np.bool)),
csr_matrix(np.array([[0, 1], [1, 0]], dtype=np.int8)),
csr_matrix(np.array([[0, 1], [1, 0]], dtype=np.uint8)),
csr_matrix(np.array([[0, 1], [1, 0]], dtype=np.float)),
csr_matrix(np.array([[0, 1], [1, 0]], dtype=np.float32)),
csr_matrix(np.array([[0, 0], [0, 0]])),
csr_matrix(np.array([[0, 1]])),
# Only valid when data is dense
np.array([[-1, 1], [1, -1]]),
np.array([[-3, 3], [3, -3]]),
NotAnArray(np.array([[-3, 3], [3, -3]])),
],
'multiclass': [
[1, 0, 2, 2, 1, 4, 2, 4, 4, 4],
np.array([1, 0, 2]),
np.array([1, 0, 2], dtype=np.int8),
np.array([1, 0, 2], dtype=np.uint8),
np.array([1, 0, 2], dtype=np.float),
np.array([1, 0, 2], dtype=np.float32),
np.array([[1], [0], [2]]),
NotAnArray(np.array([1, 0, 2])),
[0, 1, 2],
['a', 'b', 'c'],
np.array([u'a', u'b', u'c']),
np.array([u'a', u'b', u'c'], dtype=object),
np.array(['a', 'b', 'c'], dtype=object),
],
'multiclass-multioutput': [
np.array([[1, 0, 2, 2], [1, 4, 2, 4]]),
np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=np.int8),
np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=np.uint8),
np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=np.float),
np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=np.float32),
np.array([['a', 'b'], ['c', 'd']]),
np.array([[u'a', u'b'], [u'c', u'd']]),
np.array([[u'a', u'b'], [u'c', u'd']], dtype=object),
np.array([[1, 0, 2]]),
NotAnArray(np.array([[1, 0, 2]])),
],
'binary': [
[0, 1],
[1, 1],
[],
[0],
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1]),
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.bool),
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.int8),
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.uint8),
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.float),
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.float32),
np.array([[0], [1]]),
NotAnArray(np.array([[0], [1]])),
[1, -1],
[3, 5],
['a'],
['a', 'b'],
['abc', 'def'],
np.array(['abc', 'def']),
[u'a', u'b'],
np.array(['abc', 'def'], dtype=object),
],
'continuous': [
[1e-5],
[0, .5],
np.array([[0], [.5]]),
np.array([[0], [.5]], dtype=np.float32),
],
'continuous-multioutput': [
np.array([[0, .5], [.5, 0]]),
np.array([[0, .5], [.5, 0]], dtype=np.float32),
np.array([[0, .5]]),
],
'unknown': [
[[]],
[()],
# sequence of sequences that were'nt supported even before deprecation
np.array([np.array([]), np.array([1, 2, 3])], dtype=object),
[np.array([]), np.array([1, 2, 3])],
[set([1, 2, 3]), set([1, 2])],
[frozenset([1, 2, 3]), frozenset([1, 2])],
# and also confusable as sequences of sequences
[{0: 'a', 1: 'b'}, {0: 'a'}],
# empty second dimension
np.array([[], []]),
# 3d
np.array([[[0, 1], [2, 3]], [[4, 5], [6, 7]]]),
]
}
NON_ARRAY_LIKE_EXAMPLES = [
set([1, 2, 3]),
{0: 'a', 1: 'b'},
{0: [5], 1: [5]},
'abc',
frozenset([1, 2, 3]),
None,
]
MULTILABEL_SEQUENCES = [
[[1], [2], [0, 1]],
[(), (2), (0, 1)],
np.array([[], [1, 2]], dtype='object'),
NotAnArray(np.array([[], [1, 2]], dtype='object'))
]
def test_unique_labels():
# Empty iterable
assert_raises(ValueError, unique_labels)
# Multiclass problem
assert_array_equal(unique_labels(xrange(10)), np.arange(10))
assert_array_equal(unique_labels(np.arange(10)), np.arange(10))
assert_array_equal(unique_labels([4, 0, 2]), np.array([0, 2, 4]))
# Multilabel indicator
assert_array_equal(unique_labels(np.array([[0, 0, 1],
[1, 0, 1],
[0, 0, 0]])),
np.arange(3))
assert_array_equal(unique_labels(np.array([[0, 0, 1],
[0, 0, 0]])),
np.arange(3))
# Several arrays passed
assert_array_equal(unique_labels([4, 0, 2], xrange(5)),
np.arange(5))
assert_array_equal(unique_labels((0, 1, 2), (0,), (2, 1)),
np.arange(3))
# Border line case with binary indicator matrix
assert_raises(ValueError, unique_labels, [4, 0, 2], np.ones((5, 5)))
assert_raises(ValueError, unique_labels, np.ones((5, 4)), np.ones((5, 5)))
assert_array_equal(unique_labels(np.ones((4, 5)), np.ones((5, 5))),
np.arange(5))
def test_unique_labels_non_specific():
# Test unique_labels with a variety of collected examples
# Smoke test for all supported format
for format in ["binary", "multiclass", "multilabel-indicator"]:
for y in EXAMPLES[format]:
unique_labels(y)
# We don't support those format at the moment
for example in NON_ARRAY_LIKE_EXAMPLES:
assert_raises(ValueError, unique_labels, example)
for y_type in ["unknown", "continuous", 'continuous-multioutput',
'multiclass-multioutput']:
for example in EXAMPLES[y_type]:
assert_raises(ValueError, unique_labels, example)
def test_unique_labels_mixed_types():
# Mix with binary or multiclass and multilabel
mix_clf_format = product(EXAMPLES["multilabel-indicator"],
EXAMPLES["multiclass"] +
EXAMPLES["binary"])
for y_multilabel, y_multiclass in mix_clf_format:
assert_raises(ValueError, unique_labels, y_multiclass, y_multilabel)
assert_raises(ValueError, unique_labels, y_multilabel, y_multiclass)
assert_raises(ValueError, unique_labels, [[1, 2]], [["a", "d"]])
assert_raises(ValueError, unique_labels, ["1", 2])
assert_raises(ValueError, unique_labels, [["1", 2], [1, 3]])
assert_raises(ValueError, unique_labels, [["1", "2"], [2, 3]])
def test_is_multilabel():
for group, group_examples in iteritems(EXAMPLES):
if group in ['multilabel-indicator']:
dense_assert_, dense_exp = assert_true, 'True'
else:
dense_assert_, dense_exp = assert_false, 'False'
for example in group_examples:
# Only mark explicitly defined sparse examples as valid sparse
# multilabel-indicators
if group == 'multilabel-indicator' and issparse(example):
sparse_assert_, sparse_exp = assert_true, 'True'
else:
sparse_assert_, sparse_exp = assert_false, 'False'
if (issparse(example) or
(hasattr(example, '__array__') and
np.asarray(example).ndim == 2 and
np.asarray(example).dtype.kind in 'biuf' and
np.asarray(example).shape[1] > 0)):
examples_sparse = [sparse_matrix(example)
for sparse_matrix in [coo_matrix,
csc_matrix,
csr_matrix,
dok_matrix,
lil_matrix]]
for exmpl_sparse in examples_sparse:
sparse_assert_(is_multilabel(exmpl_sparse),
msg=('is_multilabel(%r)'
' should be %s')
% (exmpl_sparse, sparse_exp))
# Densify sparse examples before testing
if issparse(example):
example = example.toarray()
dense_assert_(is_multilabel(example),
msg='is_multilabel(%r) should be %s'
% (example, dense_exp))
def test_type_of_target():
for group, group_examples in iteritems(EXAMPLES):
for example in group_examples:
assert_equal(type_of_target(example), group,
msg=('type_of_target(%r) should be %r, got %r'
% (example, group, type_of_target(example))))
for example in NON_ARRAY_LIKE_EXAMPLES:
msg_regex = 'Expected array-like \(array or non-string sequence\).*'
assert_raises_regex(ValueError, msg_regex, type_of_target, example)
for example in MULTILABEL_SEQUENCES:
msg = ('You appear to be using a legacy multi-label data '
'representation. Sequence of sequences are no longer supported;'
' use a binary array or sparse matrix instead.')
assert_raises_regex(ValueError, msg, type_of_target, example)
def test_class_distribution():
y = np.array([[1, 0, 0, 1],
[2, 2, 0, 1],
[1, 3, 0, 1],
[4, 2, 0, 1],
[2, 0, 0, 1],
[1, 3, 0, 1]])
# Define the sparse matrix with a mix of implicit and explicit zeros
data = np.array([1, 2, 1, 4, 2, 1, 0, 2, 3, 2, 3, 1, 1, 1, 1, 1, 1])
indices = np.array([0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 5, 0, 1, 2, 3, 4, 5])
indptr = np.array([0, 6, 11, 11, 17])
y_sp = sp.csc_matrix((data, indices, indptr), shape=(6, 4))
classes, n_classes, class_prior = class_distribution(y)
classes_sp, n_classes_sp, class_prior_sp = class_distribution(y_sp)
classes_expected = [[1, 2, 4],
[0, 2, 3],
[0],
[1]]
n_classes_expected = [3, 3, 1, 1]
class_prior_expected = [[3/6, 2/6, 1/6],
[1/3, 1/3, 1/3],
[1.0],
[1.0]]
for k in range(y.shape[1]):
assert_array_almost_equal(classes[k], classes_expected[k])
assert_array_almost_equal(n_classes[k], n_classes_expected[k])
assert_array_almost_equal(class_prior[k], class_prior_expected[k])
assert_array_almost_equal(classes_sp[k], classes_expected[k])
assert_array_almost_equal(n_classes_sp[k], n_classes_expected[k])
assert_array_almost_equal(class_prior_sp[k], class_prior_expected[k])
# Test again with explicit sample weights
(classes,
n_classes,
class_prior) = class_distribution(y, [1.0, 2.0, 1.0, 2.0, 1.0, 2.0])
(classes_sp,
n_classes_sp,
class_prior_sp) = class_distribution(y, [1.0, 2.0, 1.0, 2.0, 1.0, 2.0])
class_prior_expected = [[4/9, 3/9, 2/9],
[2/9, 4/9, 3/9],
[1.0],
[1.0]]
for k in range(y.shape[1]):
assert_array_almost_equal(classes[k], classes_expected[k])
assert_array_almost_equal(n_classes[k], n_classes_expected[k])
assert_array_almost_equal(class_prior[k], class_prior_expected[k])
assert_array_almost_equal(classes_sp[k], classes_expected[k])
assert_array_almost_equal(n_classes_sp[k], n_classes_expected[k])
assert_array_almost_equal(class_prior_sp[k], class_prior_expected[k])
| bsd-3-clause |
peter-kiechle/tactile-sensors | python/classification/train_classifier.py | 1 | 16823 | # -*- coding: utf-8 -*-
import os, sys
import numpy as np
from scipy import stats
from sklearn import svm
from sklearn.lda import LDA
from sklearn.externals import joblib
# Library path
print("CWD: " + os.getcwd() )
lib_path = os.path.abspath('../../lib')
sys.path.append(lib_path)
import framemanager_python
# --------------------
# Auxiliary functions
# --------------------
def eval_if_defined(var):
if var in vars() or var in globals():
return var
else:
return False
def find_nearest(array, value):
idx = np.argmin(np.abs(array - value))
return array[idx]
def find_nearest_idx(array, value):
idx = np.argmin(np.abs(array - value))
return idx
def listdir_fullpath(path):
return [os.path.join(path, f) for f in os.listdir(path)]
def get_immediate_subdirectories(path):
return sorted([f for f in os.listdir(path) if os.path.isdir(os.path.join(path, f))])
def get_profiles(path):
return sorted([f for f in listdir_fullpath(path) if f.endswith(".dsa") and os.path.isfile(f)])
# Taken from http://stackoverflow.com/questions/4494404/find-large-number-of-consecutive-values-fulfilling-condition-in-a-numpy-array
# Author: Joe Kington
def contiguous_regions(condition):
"""Finds contiguous True regions of the boolean array "condition". Returns
a 2D array where the first column is the start index of the region and the
second column is the end index."""
# Find the indicies of changes in "condition"
d = np.diff(condition)
idx, = d.nonzero()
# We need to start things after the change in "condition". Therefore,
# we'll shift the index by 1 to the right.
idx += 1
if condition[0]:
# If the start of condition is True prepend a 0
idx = np.r_[0, idx]
if condition[-1]:
# If the end of condition is True, append the length of the array
idx = np.r_[idx, condition.size] # Edit
# Reshape the result into two columns
idx.shape = (-1,2)
return idx
# ------------------------------------------------------------------
# Prepare relevant pressure profiles and corresponding class labels
# ------------------------------------------------------------------
def list_pressure_profiles(profile_folder):
profiles = []
subfolders = get_immediate_subdirectories(profile_folder)
for folder in subfolders:
class_folder = os.path.join(profile_folder, folder)
class_profiles = get_profiles(class_folder)
profiles.extend(class_profiles)
return profiles
# ------------------------------------------
# Compute and store / load training samples
# ------------------------------------------
def provide_training_data(training_profiles, frameManager, featureExtractor, recompute_features=False, save_features=True):
global training_samples_raw
global training_sample_ids
global training_labels
global training_categories
if not eval_if_defined('features_available'): # Skip the rest if features are already available
# Try to load precomputed features from file
loading_failed = False
if not recompute_features:
try:
# Load training samples from disk using scikit's joblib (replacement of pickle)
training_samples_dict = joblib.load("dumped_training_samples.joblib.pkl")
training_samples_raw = training_samples_dict['training_samples_raw']
training_sample_ids = training_samples_dict['training_sample_ids']
training_labels = training_samples_dict['training_labels']
training_categories = training_samples_dict['training_categories']
loading_failed = False
print("Loading dumped_training_samples.npy!")
except IOError:
print("Loading dumped_training_samples.npy failed!")
loading_failed = True
if recompute_features or loading_failed:
training_samples_raw, training_sample_ids, training_labels, training_categories = create_samples(training_profiles, frameManager, featureExtractor)
if save_features:
# Save training samples using scikit's joblib (replacement of pickle)
training_samples_dict = {'training_samples_raw': training_samples_raw,
'training_sample_ids' : training_sample_ids,
'training_labels' : training_labels,
'training_categories' : training_categories}
joblib.dump(training_samples_dict, "dumped_training_samples.joblib.pkl")
# Dump a human readable version
readable_str = np.array(["% 3.5f" % n for n in training_samples_raw.reshape(training_samples_raw.size)])
readable_str = readable_str.reshape(training_samples_raw.shape)
merged_for_readability = np.hstack([training_sample_ids.reshape(-1, 1), training_labels.reshape(-1, 1), readable_str])
np.savetxt('dumped_training_samples_human_readable.csv', merged_for_readability,
fmt='%s', delimiter=', ', comments='# ', newline='\n',
header="Human readable raw features. Binary .npy file is used to store actual training data\n"
"Sample ID, Class label, Diameter, Compressibility, "
"StdDev Matrix 1, StdDev Matrix 5, "
"25 x CM Matrix 1,,,,,,,,,,,,,,,,,,,,,,,,, 25 x CM Matrix 5" )
print("Computed training samples saved!")
global features_available
features_available = True
return np.copy(training_samples_raw), training_labels, training_categories # Return a copy since samples are not rescaled yet
# -------------------------------------------------------------------------------------------------------
# Extracts features from recorded profiles, builds feature vectors and combines them to training samples
# -------------------------------------------------------------------------------------------------------
# |-> Diameter of minimal bounding sphere
# |-> Compressibility / rigidity
# |-> Standard deviation
# |-> Chebyshev moments
def create_samples(profiles, frameManager, featureExtractor):
pmax = 5 # Max moment order
n_features = 1 + 1 + 2*1 + 2*pmax*pmax
categories = {} # Classes and number of its members
labels = [] # Class membership for each sample grasp
sample_ids = [] # Individual grasp
samples = np.empty((0, n_features))
for i, profile in enumerate(profiles):
frameManager.load_profile(profile)
frameManager.set_filter_none()
#frameManager.set_filter_median(1, True)
#---------------------------------
# Simple step detection algorithm
#---------------------------------
# Find all non-zero sequences of both tactile sensor matrices
# Throw small sequencs away. Actual grasps are remaining
# For more elaborated methods: http://en.wikipedia.org/wiki/Step_detection
numTSFrames = frameManager.get_tsframe_count();
max_matrix_1 = frameManager.get_max_matrix_list(1)
max_matrix_5 = frameManager.get_max_matrix_list(5)
valid_contacts = np.empty([numTSFrames])
valid_contacts.fill(False)
for frameID in xrange(0, numTSFrames):
if (max_matrix_1[frameID] > 0.0 and max_matrix_5[frameID] > 0.0) :
valid_contacts[frameID] = True
thresh_sequence = 20 # Minimum length of a sequence to be considered a "grasp"
grasps = []
for start, stop in contiguous_regions(valid_contacts):
if (stop-start) > thresh_sequence:
grasps.append([start, stop-1])
num_grasps = len(grasps)
profile_subfolder = os.path.basename(os.path.dirname(profile))
class_name = (profile_subfolder.replace("_", " ")) # Class name is defined by the last subfolder's name
if categories.has_key(class_name):
categories[class_name] += num_grasps
else:
categories[class_name] = num_grasps
# Compute features for each detected grasp in profile
for grasp in grasps:
(grasp_diameter,
compressibility,
std_dev_matrix_1,
std_dev_matrix_5,
moments_matrix_1,
moments_matrix_5) = compute_features(frameManager, featureExtractor, grasp[0], grasp[1], pmax, max_matrix_1, max_matrix_5)
# Combine features
sample = np.concatenate(( [grasp_diameter],
[compressibility],
[std_dev_matrix_1],
[std_dev_matrix_5],
moments_matrix_1,
moments_matrix_5 )).reshape(1, n_features)
# Add feature vector to sample
samples = np.vstack([samples, sample])
# Give new sample a name and class mebership
labels.append(class_name)
if num_grasps > 1:
sample_ids.append(profile + "_" + str(grasp[0]) + "-" + str(grasp[1]))
else:
sample_ids.append(profile)
return samples, np.asarray(sample_ids), np.asarray(labels), categories
def compute_features(frameManager, featureExtractor, grasp_begin, grasp_end, pmax, max_matrix_1, max_matrix_5):
# Values computed in "calibrate_impression_depth.py"
max_val_matrix_1 = 3554.0
max_val_matrix_5 = 2493.0
impression_depth = 1.0 # Just an estimate of the maximal impression in [mm]
impression_factor_1 = impression_depth / max_val_matrix_1
impression_factor_5 = impression_depth / max_val_matrix_5
# Determine more robust frames of interest (begin and end frame of the grasp)
# by taking the objects diameter into account
# head + tail <= thresh_sequence
head_elem = 10
tail_elem = 10
miniballs = np.empty([grasp_end-grasp_begin+1, 4])
miniballs.fill(None)
#for i, frameID in enumerate(range(grasp_end-tail_elem+1, grasp_end+1)):
for i, frameID in enumerate(range(grasp_begin, grasp_end+1)):
theta = frameManager.get_corresponding_jointangles(frameID)
miniballs[i] = featureExtractor.compute_minimal_bounding_sphere_centroid(frameID, theta)
# Compensate for force dependent sensor matrix impression
diameter = (2*miniballs[:,3] +
max_matrix_1[grasp_begin:grasp_end+1]*impression_factor_1 +
max_matrix_5[grasp_begin:grasp_end+1]*impression_factor_5 )
slice_tail = diameter[-tail_elem:]
end_position = (grasp_end-tail_elem) + find_nearest_idx(slice_tail, np.median(slice_tail))
# Problem:
# The object's initial size cannot be measured accurately enough if the grasp applies torque.
# In that case, the contact surface between object and both sensor matrices is tilted leading to an
# overestimation of the real diameter. This asymetry disappears when all forces reach an equilibrium state.
# In order to get more robust object size features, the profile's centroids of the end position frame
# is used to recalculate the diameter during each step of the grasp.
centroid_matrix_1 = featureExtractor.compute_centroid(end_position, 1)
centroid_matrix_5 = featureExtractor.compute_centroid(end_position, 5)
points = np.array([ [1.0, centroid_matrix_1[0], centroid_matrix_1[1]],
[5.0, centroid_matrix_5[0], centroid_matrix_5[1]]], dtype=np.float64)
miniballs_refined = np.empty([grasp_end-grasp_begin+1, 4])
miniballs_refined.fill(None)
for i, frameID in enumerate(range(grasp_begin, grasp_end+1)):
theta = frameManager.get_corresponding_jointangles(frameID)
miniballs_refined[i] = featureExtractor.compute_minimal_bounding_sphere_points(points, theta)
# Compensate for force dependent sensor matrix impression
diameter_refined = (2*miniballs_refined[:,3] +
max_matrix_1[grasp_begin:grasp_end+1]*impression_factor_1 +
max_matrix_5[grasp_begin:grasp_end+1]*impression_factor_5 )
# Initial position: max diameter of minimal bounding sphere
slice_head = diameter_refined[0:head_elem]
initial_position = grasp_begin + np.nanargmax(slice_head)
# Local indices
initial_position_grasp = initial_position - grasp_begin
end_position_grasp = end_position - grasp_begin
# Compute features
#grasp_diameter = diameter_refined[initial_position]
#grasp_diameter = np.median(diameter_refined)
#grasp_diameter = stats.mode(diameter_refined)[0][0]
grasp_diameter = stats.mode(diameter)[0][0]
compressibility = diameter_refined[initial_position_grasp] - diameter_refined[end_position_grasp] # Change of minimal bounding sphere's size during grasp
std_dev_matrix_1 = featureExtractor.compute_standard_deviation(end_position, 1) # Standard deviation of intensity values (not 2D image moments)
std_dev_matrix_5 = featureExtractor.compute_standard_deviation(end_position, 5)
moments_matrix_1 = featureExtractor.compute_chebyshev_moments(end_position, 1, pmax).reshape(-1) # frameID, matrixID, pmax
moments_matrix_5 = featureExtractor.compute_chebyshev_moments(end_position, 5, pmax).reshape(-1)
return grasp_diameter, compressibility, std_dev_matrix_1, std_dev_matrix_5, moments_matrix_1, moments_matrix_5
#--------------------------------------------
# Main
#--------------------------------------------
training_profile_folder = "training_grasp_profiles_thesis"
frameManager = framemanager_python.FrameManagerWrapper()
featureExtractor = framemanager_python.FeatureExtractionWrapper(frameManager)
# Prepare relevant pressure profiles and corresponding class labels
training_profiles = list_pressure_profiles(training_profile_folder)
# Compute and store / load training samples
(training_samples,
training_labels,
training_categories) = provide_training_data(training_profiles,
frameManager,
featureExtractor,
recompute_features=False,
save_features=True)
# ----------------------
# Feature scaling
# ----------------------
# Rescale samples (Range [0,1])
#feature_max = np.max(training_samples, axis=0, keepdims=True)
#feature_min = np.min(training_samples, axis=0, keepdims=True)
#training_samples = (training_samples-feature_min) / (feature_max-feature_min)
#testing_samples = (testing_samples-feature_min) / (feature_max-feature_min)
# Normalize samples (zero mean)
#feature_mean = np.mean(training_samples, axis=0, keepdims=True)
#feature_max = np.max(training_samples, axis=0, keepdims=True)
#feature_min = np.min(training_samples, axis=0, keepdims=True)
#training_samples = (training_samples-feature_mean) / (feature_max-feature_min)
# Standardize samples (zero mean, one standard deviation)
feature_mean = np.mean(training_samples, axis=0, keepdims=True)
feature_stddev = np.std(training_samples, axis=0, keepdims=True)
training_samples = (training_samples - feature_mean) / feature_stddev
#--------------------------------------------------------
# Transform features using Linear Discriminant Analysis
#--------------------------------------------------------
lda = LDA(n_components=14)
lda.fit(training_samples, training_labels)
training_samples = lda.transform(training_samples)
# --------------------------------------------------------
# Multi-class SVM classification: one-against-one
# --------------------------------------------------------
# There are {n_classes * (n_classes - 1)} classifiers in the one-vs-one scheme
# Order of 0 to n classes:
# 0 vs 1, 0 vs 2, ... 0 vs n, 1 vs 2, ..., 1 vs n, ... n-1 vs n
classifier = svm.SVC( C=100, cache_size=200, class_weight=None, coef0=0.0, degree=3,
gamma=0.125, kernel='linear', max_iter=-1, probability=True, random_state=None,
shrinking=True, tol=0.001, verbose=False)
# Fit model
classifier.fit(training_samples, training_labels)
# Store fitted classifier and preprocessing steps to disk for later predictions
classifier_dict = {'classifier': classifier, 'means' : feature_mean, 'stddevs' : feature_stddev, 'LDA' : lda}
joblib.dump(classifier_dict, "dumped_classifier.joblib.pkl")
print("Classifier dumped!")
| gpl-3.0 |
tapomayukh/projects_in_python | classification/Classification_with_kNN/Single_Contact_Classification/Final/results/4-categories/6/test10_cross_validate_categories_6_1200ms.py | 1 | 4360 |
# Principal Component Analysis Code :
from numpy import mean,cov,double,cumsum,dot,linalg,array,rank,size,flipud
from pylab import *
import numpy as np
import matplotlib.pyplot as pp
#from enthought.mayavi import mlab
import scipy.ndimage as ni
import roslib; roslib.load_manifest('sandbox_tapo_darpa_m3')
import rospy
#import hrl_lib.mayavi2_util as mu
import hrl_lib.viz as hv
import hrl_lib.util as ut
import hrl_lib.matplotlib_util as mpu
import pickle
from mvpa.clfs.knn import kNN
from mvpa.datasets import Dataset
from mvpa.clfs.transerror import TransferError
from mvpa.misc.data_generators import normalFeatureDataset
from mvpa.algorithms.cvtranserror import CrossValidatedTransferError
from mvpa.datasets.splitters import NFoldSplitter
import sys
sys.path.insert(0, '/home/tapo/svn/robot1_data/usr/tapo/data_code/Classification/Data/Single_Contact_kNN/6')
from data_6 import Fmat_original
def pca(X):
#get dimensions
num_data,dim = X.shape
#center data
mean_X = X.mean(axis=1)
M = (X-mean_X) # subtract the mean (along columns)
Mcov = cov(M)
print 'PCA - COV-Method used'
val,vec = linalg.eig(Mcov)
#return the projection matrix, the variance and the mean
return vec,val,mean_X, M, Mcov
if __name__ == '__main__':
Fmat = Fmat_original
# Checking the Data-Matrix
m_tot, n_tot = np.shape(Fmat)
print 'Total_Matrix_Shape:',m_tot,n_tot
eigvec_total, eigval_total, mean_data_total, B, C = pca(Fmat)
#print eigvec_total
#print eigval_total
#print mean_data_total
m_eigval_total, n_eigval_total = np.shape(np.matrix(eigval_total))
m_eigvec_total, n_eigvec_total = np.shape(eigvec_total)
m_mean_data_total, n_mean_data_total = np.shape(np.matrix(mean_data_total))
print 'Eigenvalue Shape:',m_eigval_total, n_eigval_total
print 'Eigenvector Shape:',m_eigvec_total, n_eigvec_total
print 'Mean-Data Shape:',m_mean_data_total, n_mean_data_total
#Recall that the cumulative sum of the eigenvalues shows the level of variance accounted by each of the corresponding eigenvectors. On the x axis there is the number of eigenvalues used.
perc_total = cumsum(eigval_total)/sum(eigval_total)
# Reduced Eigen-Vector Matrix according to highest Eigenvalues..(Considering First 20 based on above figure)
W = eigvec_total[:,0:6]
m_W, n_W = np.shape(W)
print 'Reduced Dimension Eigenvector Shape:',m_W, n_W
# Normalizes the data set with respect to its variance (Not an Integral part of PCA, but sometimes useful)
length = len(eigval_total)
s = np.matrix(np.zeros(length)).T
i = 0
while i < length:
s[i] = sqrt(C[i,i])
i = i+1
Z = np.divide(B,s)
m_Z, n_Z = np.shape(Z)
print 'Z-Score Shape:', m_Z, n_Z
#Projected Data:
Y = (W.T)*B # 'B' for my Laptop: otherwise 'Z' instead of 'B'
m_Y, n_Y = np.shape(Y.T)
print 'Transposed Projected Data Shape:', m_Y, n_Y
#Using PYMVPA
PCA_data = np.array(Y.T)
PCA_label_1 = ['Rigid-Fixed']*35 + ['Rigid-Movable']*35 + ['Soft-Fixed']*35 + ['Soft-Movable']*35
PCA_chunk_1 = ['Styrofoam-Fixed']*5 + ['Books-Fixed']*5 + ['Bucket-Fixed']*5 + ['Bowl-Fixed']*5 + ['Can-Fixed']*5 + ['Box-Fixed']*5 + ['Pipe-Fixed']*5 + ['Styrofoam-Movable']*5 + ['Container-Movable']*5 + ['Books-Movable']*5 + ['Cloth-Roll-Movable']*5 + ['Black-Rubber-Movable']*5 + ['Can-Movable']*5 + ['Box-Movable']*5 + ['Rug-Fixed']*5 + ['Bubble-Wrap-1-Fixed']*5 + ['Pillow-1-Fixed']*5 + ['Bubble-Wrap-2-Fixed']*5 + ['Sponge-Fixed']*5 + ['Foliage-Fixed']*5 + ['Pillow-2-Fixed']*5 + ['Rug-Movable']*5 + ['Bubble-Wrap-1-Movable']*5 + ['Pillow-1-Movable']*5 + ['Bubble-Wrap-2-Movable']*5 + ['Pillow-2-Movable']*5 + ['Cushion-Movable']*5 + ['Sponge-Movable']*5
clf = kNN(k=5)
terr = TransferError(clf)
ds1 = Dataset(samples=PCA_data,labels=PCA_label_1,chunks=PCA_chunk_1)
print ds1.samples.shape
cvterr = CrossValidatedTransferError(terr,NFoldSplitter(cvtype=1),enable_states=['confusion'])
error = cvterr(ds1)
print error
print cvterr.confusion.asstring(description=False)
figure(1)
cvterr.confusion.plot(numbers='True')
#show()
# Variances
figure(2)
title('Variances of PCs')
stem(range(len(perc_total)),perc_total,'--b')
axis([-0.3,30.3,0,1.2])
grid('True')
show()
| mit |
JohnUrban/fast5tools | bin/fast5ReadLengthPlots.py | 1 | 16727 | #!/usr/bin/env python2.7
import h5py, os, sys, csv
import argparse
# General
from glob import glob
import string
from cStringIO import StringIO
from collections import defaultdict
from Bio import SeqIO
from math import log10, log
import numpy as np
# Plotting
import matplotlib.pyplot as plt
# Fast5Tools
from fast5tools.f5class import *
from fast5tools.f5ops import *
from fast5tools.helperops import *
from fast5tools.fileListClass import *
from fast5tools.plotops import *
#################################################
## Argument Parser
#################################################
parser = argparse.ArgumentParser(description = """
Given path(s) to fast5 file(s) and/or directories of fast5s,
- read length histogram
- bins have counts
- cumulative read length histogram
- bins have cumulative counts
- read length sum histogram
- bins have sums of readlengths in that bin
- cumulative read length histogram
- bins have cumulative sums of readlengths in each bin
- RL v Q
TODO:
If reference dataset given, the above plots for both, and:
- Subtraction plot:
- Proprtional_test_bin - proportional_ref_bin
- Log Fold Change plot:
- log(Proprtional_test_bin / proportional_ref_bin)
John Urban (2015, 2016, 2017, 2018)
""", formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument('fast5', metavar='fast5', nargs='+',
type= str,
help='''Paths to as many fast5 files and/or directories filled with fast5 files as you want.
Assumes all fast5 files have '.fast5' extension.
If inside dir of dirs with .fast5 files, then can just do "*" to get all files from all dirs.''')
parser_not_fast5 = parser.add_mutually_exclusive_group()
parser_not_fast5.add_argument('--fasta', '-fa', action='store_true', default=False,
help='''Looking at a FASTA file or list of FASTA files, not FAST5s''')
parser_not_fast5.add_argument('--fastq', '-fq', action='store_true', default=False,
help='''Looking at a FASTQ file or list of FASTQ files, not FAST5s''')
parser.add_argument('--reference',
type=str, default=None, help='''All files after this flag and before the next, are interpreted as Reference fastA/Q/5 files.
NOTE: Unlike the default datasets that can contain as many files/dirs/fofns as you'd like,
this can only be pointed at 1 object UNLESS you put everything between quotation marks, which allows you to
specify as many reference files/dirs/FOFNs as you'd like.
E.g. --reference "fast5dir1/ fast5dir2/" ''')
parser_ref_not_fast5 = parser.add_mutually_exclusive_group()
parser_ref_not_fast5.add_argument('--reffasta', '-rfa', action='store_true', default=False,
help='''The reference dataset is a FASTA file or list of FASTA files, not FAST5s''')
parser_ref_not_fast5.add_argument('--reffastq', '-rfq', action='store_true', default=False,
help='''The reference dataset is a FASTQ file or list of FASTQ files, not FAST5s''')
parser.add_argument('-r', '--readtype', default='template',
type= str,
help='''Choose type of fasta to get.
Choices: 'template', 'complement', '2d', 'molecule', 'all', 'MoleQual'.
Default: template.
Molecule returns single fasta for each fast5 by following rules:
if 2d present, return 2d.
elif complement present with no 2d, return longer of template or complement.
elif only template present, return template.
'MoleQual' is similar to molecule.
It differs only in choosing between template and complement when a 2D is not present.
Instead of choosing the longer one, it chooses the one with a higher quality mean quality score.''')
parser.add_argument('--bin-start', dest='bin_start', type=str, default='0', help='''Start binning at given read length. Default: 0 bp.
Can give number as bp, which requires no units - e.g. 500 = 500 bp.
Can also give as kb or Mb, but it requires the unit -- e.g. 500kb or 0.5Mb.
No space between the number and unit.''')
parser.add_argument('--bin-end', dest='bin_end', type=str, default='1Mb', help='''End binning at given read length.
Default: 1Mb.
Note that sometimes providing smaller end values (e.g. 100kb) makes for better plots.
Note that this plot purposely is independent of max read length as read length distributions trpically have long sparse tails, which make these plots front-heavy and end-sparse.
Other avenues (such as reporting the max length) might be better.
Nonetheless, feel free to give huge end values.''')
parser.add_argument('--bin-width', dest='bin_width', type=str, default='1kb', help='''Specify the width of bins in bp. Default: 1000 bp.''')
parser.add_argument('--minlen', type=int, default=0, help='''Only report reads >= minlen. Default: 0 bp.''')
parser.add_argument('--maxlen', type=int, default=int(3e9), help='''Only report reads <= maxlen. Default: 3 billion bp.''')
parser.add_argument('--minq', type=float, default=0, help='''Only report reads with mean quality scores >= Q. Default: 0.''')
parser.add_argument('--maxq', type=float, default=int(10e3), help='''Only report reads with mean quality scores <= Q.
Default: 10000 (this is orders of magnitude higher than normal max which are always < 20)''')
parser.add_argument('-n', '--nfiles', type=int, default=1000000000000,
help = '''This defaults to 1000000000000 in order to use all files (will you ever need to look at more than that?).
However, you can downsample with this option by adjusting this number to get info from the first N files.
Use --random to select N at random from the list.
Aim this script at a specific file for that file's contents.''')
parser.add_argument('-R', '--random', action='store_true', default=False,
help = '''Randomize what files are looked at.''')
parser.add_argument('-S', '--randomseed', type=int, default=False,
help = '''Randomize what files are looked at, but use given seed for reproducibility.''')
parser.add_argument('--filesused', type=str, default='qual_v_pos', help='''
''')
parser.add_argument('-o', '--outdir', type=str, default="./",
help = '''....''')
parser.add_argument('--filename', type=str, default='kmer_counts.txt', help='''
For output. Default: kmer_counts.txt. (Formerly defaulted to None).
If a filename is given, filesused will be reported in a similarly named file ending with .filesused.fofn
When --reference used, files will have similar name with reference_ prepended.''')
parser.add_argument('--plotfilesuffix', type=str, default=None, help='''
Suffix and extension for output plots. Default None (PDFs output in outdir using hard-coded prefixes).
Plots will be in specified outdir.
The minimum information to give is the extension (no dot needed) -- e.g. png, jpg, pdf.
Example1: myexperiment.png ((plots will be named plotprefix_myexperiment.png))
Example2: .jpg ((plots will be named plotprefix_.jpg))
Example3: jpg ((plots will be named plotprefix.jpg))
Example4: when None (default), plots will be named plotprefix.pdf''')
parser.add_argument('--notarlite', action='store_true', default=False, help=''' The default methof (called tarlite) extracts 1 file from a given tarchive at a time, processes, and deletes it.
This options says to turn tarlite off resulting in extracting entire tarchive before proceeding (and finally deleting).
It is possible that --notarlite is faster, but at the expense of exceeding file number limits or disk storage quotas.
Nonetheless, the difference in speed is a lot smaller than the difference in space needed.
For example, not using tarlite will require >2*tarchive amount of disk space (i.e. the tar.gz and its extracted contents).
The tarlite method only requires the disk space already taken by the tarchive and enough for 1 additional file at a time.
A corollary is that tarlite just needs to be allowed to form 1 (or a few) files compared to what could be thousands to millions.
''')
parser.add_argument('--tarlite', action='store_true', default=False, help='''This legacy option is outdated.
However, it is kept here to avoid breaking pipelines that make use of it.
The tarlite approach is now default. Specifying this will not change that default behavior.
It will just prevent pipelines from breaking.
However, not specifying this will still also result in the tarlite approach.
Use --notarlite to turn it off.''')
args = parser.parse_args()
## for fa in fasta or fq in fastq or f5 in fast5 -- get all lengths, and meanQ
## make DP objs for RL and Q
if __name__ == "__main__":
# Process Args
args.outdir = process_outdir(args.outdir)
outfile = args.outdir + args.filename if (args.filename is not None) else None
start = interpret_base_length(args.bin_start)
end = interpret_base_length(args.bin_end)
bw = interpret_base_length(args.bin_width)
## Execute
test_lengths, filesused = run_collect_lengths(initial_list=args.fast5, \
readtype=args.readtype, \
nfiles=args.nfiles, \
random=args.random, \
randomseed=args.randomseed, \
notarlite=args.notarlite, \
fasta=args.fasta, \
fastq=args.fastq, \
minlen=args.minlen, \
maxlen=args.maxlen, \
minq=args.minq, \
maxq=args.maxq)
## Get Partition
test = DataPartition(test_lengths, start, end, bw)
## Write
## TODO: writeout partition
## Files used
process_filesused(trigger=args.filename, filesused=filesused, outdir=args.outdir)
## Reference?
do_comparison = False
if args.reference is not None:
do_comparison = True
## Out
refoutfile = args.outdir + 'reference_' + args.filename if (args.filename is not None) else None
## Convert into list:
args.reference = args.reference.strip().split()
## Get object
ref_lengths, refsused = run_collect_lengths(initial_list=args.reference, \
readtype=args.readtype, \
nfiles=args.nfiles, \
random=args.random, \
randomseed=args.randomseed, \
notarlite=args.notarlite, \
fasta=args.fasta, \
fastq=args.fastq, \
minlen=args.minlen, \
maxlen=args.maxlen, \
minq=args.minq, \
maxq=args.maxq)
## Get Partition
ref = DataPartition(ref_lengths, start, end, bw)
## Write
## TODO: writeout partition
## Files used
trigger = 'reference_'+args.filename if args.filename is not None else None
process_filesused(trigger=trigger, filesused=refsused, outdir=args.outdir)
'''
Given path(s) to fast5 file(s) and/or directories of fast5s,
- read length histogram
- bins have counts
- cumulative read length histogram
- bins have cumulative counts
- read length sum histogram
- bins have sums of readlengths in that bin
- cumulative read length histogram
- bins have cumulative sums of readlengths in each bin
- RL v Q
TODO:
If reference dataset given, the above plots for both, and:
- Subtraction plot:
- Proprtional_test_bin - proportional_ref_bin
- Log Fold Change plot:
- log(Proprtional_test_bin / proportional_ref_bin)
'''
## COMPARITIVE ANALYSES
if do_comparison:
## DETERMINE PLOT SUFFIX
if args.plotfilesuffix is None:
sfx = '.pdf'
else:
if '.' in args.plotfilesuffix:
## assumes format words.ext, makes sfx = _words.ext
sfx = '_' + args.plotfilesuffix
else:
## assumes image type specified (e.g. pdf, jpg, png)
sfx = '.' + args.plotfilesuffix
make_name = make_name_function(pfx=args.outdir, sfx=sfx)
## PLOTTING
## TODO: Add subtraction foldchange stuff
## Counts
general_barplot(x=test.get_breaks()[:-1], height=test.get_counts(), width=bw, edgecolor='k', align='edge', saveas=make_name('test_read_length_bin_counts'))
## CumulativeCounts
general_barplot(x=test.get_breaks()[:-1], height=test.get_cum_counts(), width=bw, edgecolor='k', align='edge', saveas=make_name('test_cumulative_read_length_bin_counts'))
## Data
general_barplot(x=test.get_breaks()[:-1], height=test.get_heights(), width=bw, edgecolor='k', align='edge', saveas=make_name('test_read_length_data_per_bin'))
## Cumlative Data
general_barplot(x=test.get_breaks()[:-1], height=test.get_cum_heights(), width=bw, edgecolor='k', align='edge', saveas=make_name('test_cumulative_read_length_data_per_bin'))
## Proportional Counts
general_barplot(x=test.get_breaks()[:-1], height=test.get_proportion_total_counts(), width=bw, edgecolor='k', align='edge', saveas=make_name('test_proportional_read_length_bin_counts'))
## Proportional CumulativeCounts
general_barplot(x=test.get_breaks()[:-1], height=test.get_proportion_cum_total_counts(), width=bw, edgecolor='k', align='edge', saveas=make_name('test_proportional_cumulative_read_length_bin_counts'))
## Proportional Data
general_barplot(x=test.get_breaks()[:-1], height=test.get_proportion_total_height(), width=bw, edgecolor='k', align='edge', saveas=make_name('test_proportional_read_length_data_per_bin'))
## Proportional Cumlative Data
general_barplot(x=test.get_breaks()[:-1], height=test.get_proportion_cum_total_height(), width=bw, edgecolor='k', align='edge', saveas=make_name('test_proportional_cumulative_read_length_data_per_bin'))
if args.reference is not None:
## Counts
general_barplot(x=ref.get_breaks()[:-1], height=ref.get_counts(), width=bw, edgecolor='k', align='edge', saveas=make_name('ref_read_length_bin_counts'))
## CumulativeCounts
general_barplot(x=ref.get_breaks()[:-1], height=ref.get_cum_counts(), width=bw, edgecolor='k', align='edge', saveas=make_name('ref_cumulative_read_length_bin_counts'))
## Data
general_barplot(x=ref.get_breaks()[:-1], height=ref.get_heights(), width=bw, edgecolor='k', align='edge', saveas=make_name('ref_read_length_data_per_bin'))
## Counts
general_barplot(x=ref.get_breaks()[:-1], height=ref.get_cum_heights(), width=bw, edgecolor='k', align='edge', saveas=make_name('ref_cumulative_read_length_data_per_bin'))
## Proportional Counts
general_barplot(x=ref.get_breaks()[:-1], height=ref.get_proportion_total_counts(), width=bw, edgecolor='k', align='edge', saveas=make_name('ref_proportional_read_length_bin_counts'))
## Proportional CumulativeCounts
general_barplot(x=ref.get_breaks()[:-1], height=ref.get_proportion_cum_total_counts(), width=bw, edgecolor='k', align='edge', saveas=make_name('ref_proportional_cumulative_read_length_bin_counts'))
## Proportional Data
general_barplot(x=ref.get_breaks()[:-1], height=ref.get_proportion_total_height(), width=bw, edgecolor='k', align='edge', saveas=make_name('ref_proportional_read_length_data_per_bin'))
## Proportional Cumlative Data
general_barplot(x=ref.get_breaks()[:-1], height=ref.get_proportion_cum_total_height(), width=bw, edgecolor='k', align='edge', saveas=make_name('ref_proportional_cumulative_read_length_data_per_bin'))
##SUBTRACTION
general_barplot(x=ref.get_breaks()[:-1], height=test.get_difference_in_proprtional_counts(ref), width=bw, edgecolor='k', align='edge', saveas=make_name('difference_in_read_length_bin_counts'))
general_barplot(x=ref.get_breaks()[:-1], height=test.get_difference_in_proprtional_heights(ref), width=bw, edgecolor='k', align='edge', saveas=make_name('difference_in_read_length_data_per_bin'))
## LOG FOLD CHANGE
general_barplot(x=ref.get_breaks()[:-1], height=test.get_log_fold_change_of_proportional_counts(ref), width=bw, edgecolor='k', align='edge', saveas=make_name('logfoldchange_of_read_length_bin_counts'))
general_barplot(x=ref.get_breaks()[:-1], height=test.get_log_fold_change_of_proportional_heights(ref), width=bw, edgecolor='k', align='edge', saveas=make_name('logfoldchange_of_read_length_data_per_bin'))
| mit |
michellemorales/OpenMM | models/slim/preprocessing/inception_preprocessing.py | 13 | 13369 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Provides utilities to preprocess images for the Inception networks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.ops import control_flow_ops
def apply_with_random_selector(x, func, num_cases):
"""Computes func(x, sel), with sel sampled from [0...num_cases-1].
Args:
x: input Tensor.
func: Python function to apply.
num_cases: Python int32, number of cases to sample sel from.
Returns:
The result of func(x, sel), where func receives the value of the
selector as a python integer, but sel is sampled dynamically.
"""
sel = tf.random_uniform([], maxval=num_cases, dtype=tf.int32)
# Pass the real x only to one of the func calls.
return control_flow_ops.merge([
func(control_flow_ops.switch(x, tf.equal(sel, case))[1], case)
for case in range(num_cases)])[0]
def distort_color(image, color_ordering=0, fast_mode=True, scope=None):
"""Distort the color of a Tensor image.
Each color distortion is non-commutative and thus ordering of the color ops
matters. Ideally we would randomly permute the ordering of the color ops.
Rather then adding that level of complication, we select a distinct ordering
of color ops for each preprocessing thread.
Args:
image: 3-D Tensor containing single image in [0, 1].
color_ordering: Python int, a type of distortion (valid values: 0-3).
fast_mode: Avoids slower ops (random_hue and random_contrast)
scope: Optional scope for name_scope.
Returns:
3-D Tensor color-distorted image on range [0, 1]
Raises:
ValueError: if color_ordering not in [0, 3]
"""
with tf.name_scope(scope, 'distort_color', [image]):
if fast_mode:
if color_ordering == 0:
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
else:
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
else:
if color_ordering == 0:
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.2)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
elif color_ordering == 1:
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.2)
elif color_ordering == 2:
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.2)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
elif color_ordering == 3:
image = tf.image.random_hue(image, max_delta=0.2)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
else:
raise ValueError('color_ordering must be in [0, 3]')
# The random_* ops do not necessarily clamp.
return tf.clip_by_value(image, 0.0, 1.0)
def distorted_bounding_box_crop(image,
bbox,
min_object_covered=0.1,
aspect_ratio_range=(0.75, 1.33),
area_range=(0.05, 1.0),
max_attempts=100,
scope=None):
"""Generates cropped_image using a one of the bboxes randomly distorted.
See `tf.image.sample_distorted_bounding_box` for more documentation.
Args:
image: 3-D Tensor of image (it will be converted to floats in [0, 1]).
bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords]
where each coordinate is [0, 1) and the coordinates are arranged
as [ymin, xmin, ymax, xmax]. If num_boxes is 0 then it would use the whole
image.
min_object_covered: An optional `float`. Defaults to `0.1`. The cropped
area of the image must contain at least this fraction of any bounding box
supplied.
aspect_ratio_range: An optional list of `floats`. The cropped area of the
image must have an aspect ratio = width / height within this range.
area_range: An optional list of `floats`. The cropped area of the image
must contain a fraction of the supplied image within in this range.
max_attempts: An optional `int`. Number of attempts at generating a cropped
region of the image of the specified constraints. After `max_attempts`
failures, return the entire image.
scope: Optional scope for name_scope.
Returns:
A tuple, a 3-D Tensor cropped_image and the distorted bbox
"""
with tf.name_scope(scope, 'distorted_bounding_box_crop', [image, bbox]):
# Each bounding box has shape [1, num_boxes, box coords] and
# the coordinates are ordered [ymin, xmin, ymax, xmax].
# A large fraction of image datasets contain a human-annotated bounding
# box delineating the region of the image containing the object of interest.
# We choose to create a new bounding box for the object which is a randomly
# distorted version of the human-annotated bounding box that obeys an
# allowed range of aspect ratios, sizes and overlap with the human-annotated
# bounding box. If no box is supplied, then we assume the bounding box is
# the entire image.
sample_distorted_bounding_box = tf.image.sample_distorted_bounding_box(
tf.shape(image),
bounding_boxes=bbox,
min_object_covered=min_object_covered,
aspect_ratio_range=aspect_ratio_range,
area_range=area_range,
max_attempts=max_attempts,
use_image_if_no_bounding_boxes=True)
bbox_begin, bbox_size, distort_bbox = sample_distorted_bounding_box
# Crop the image to the specified bounding box.
cropped_image = tf.slice(image, bbox_begin, bbox_size)
return cropped_image, distort_bbox
def preprocess_for_train(image, height, width, bbox,
fast_mode=True,
scope=None):
"""Distort one image for training a network.
Distorting images provides a useful technique for augmenting the data
set during training in order to make the network invariant to aspects
of the image that do not effect the label.
Additionally it would create image_summaries to display the different
transformations applied to the image.
Args:
image: 3-D Tensor of image. If dtype is tf.float32 then the range should be
[0, 1], otherwise it would converted to tf.float32 assuming that the range
is [0, MAX], where MAX is largest positive representable number for
int(8/16/32) data type (see `tf.image.convert_image_dtype` for details).
height: integer
width: integer
bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords]
where each coordinate is [0, 1) and the coordinates are arranged
as [ymin, xmin, ymax, xmax].
fast_mode: Optional boolean, if True avoids slower transformations (i.e.
bi-cubic resizing, random_hue or random_contrast).
scope: Optional scope for name_scope.
Returns:
3-D float Tensor of distorted image used for training with range [-1, 1].
"""
with tf.name_scope(scope, 'distort_image', [image, height, width, bbox]):
if bbox is None:
bbox = tf.constant([0.0, 0.0, 1.0, 1.0],
dtype=tf.float32,
shape=[1, 1, 4])
if image.dtype != tf.float32:
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
# Each bounding box has shape [1, num_boxes, box coords] and
# the coordinates are ordered [ymin, xmin, ymax, xmax].
image_with_box = tf.image.draw_bounding_boxes(tf.expand_dims(image, 0),
bbox)
tf.summary.image('image_with_bounding_boxes', image_with_box)
distorted_image, distorted_bbox = distorted_bounding_box_crop(image, bbox)
# Restore the shape since the dynamic slice based upon the bbox_size loses
# the third dimension.
distorted_image.set_shape([None, None, 3])
image_with_distorted_box = tf.image.draw_bounding_boxes(
tf.expand_dims(image, 0), distorted_bbox)
tf.summary.image('images_with_distorted_bounding_box',
image_with_distorted_box)
# This resizing operation may distort the images because the aspect
# ratio is not respected. We select a resize method in a round robin
# fashion based on the thread number.
# Note that ResizeMethod contains 4 enumerated resizing methods.
# We select only 1 case for fast_mode bilinear.
num_resize_cases = 1 if fast_mode else 4
distorted_image = apply_with_random_selector(
distorted_image,
lambda x, method: tf.image.resize_images(x, [height, width], method=method),
num_cases=num_resize_cases)
tf.summary.image('cropped_resized_image',
tf.expand_dims(distorted_image, 0))
# Randomly flip the image horizontally.
distorted_image = tf.image.random_flip_left_right(distorted_image)
# Randomly distort the colors. There are 4 ways to do it.
distorted_image = apply_with_random_selector(
distorted_image,
lambda x, ordering: distort_color(x, ordering, fast_mode),
num_cases=4)
tf.summary.image('final_distorted_image',
tf.expand_dims(distorted_image, 0))
distorted_image = tf.subtract(distorted_image, 0.5)
distorted_image = tf.multiply(distorted_image, 2.0)
return distorted_image
def preprocess_for_eval(image, height, width,
central_fraction=0.875, scope=None):
"""Prepare one image for evaluation.
If height and width are specified it would output an image with that size by
applying resize_bilinear.
If central_fraction is specified it would crop the central fraction of the
input image.
Args:
image: 3-D Tensor of image. If dtype is tf.float32 then the range should be
[0, 1], otherwise it would converted to tf.float32 assuming that the range
is [0, MAX], where MAX is largest positive representable number for
int(8/16/32) data type (see `tf.image.convert_image_dtype` for details)
height: integer
width: integer
central_fraction: Optional Float, fraction of the image to crop.
scope: Optional scope for name_scope.
Returns:
3-D float Tensor of prepared image.
"""
with tf.name_scope(scope, 'eval_image', [image, height, width]):
if image.dtype != tf.float32:
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
# Crop the central region of the image with an area containing 87.5% of
# the original image.
if central_fraction:
image = tf.image.central_crop(image, central_fraction=central_fraction)
if height and width:
# Resize the image to the specified height and width.
image = tf.expand_dims(image, 0)
image = tf.image.resize_bilinear(image, [height, width],
align_corners=False)
image = tf.squeeze(image, [0])
image = tf.subtract(image, 0.5)
image = tf.multiply(image, 2.0)
return image
def preprocess_image(image, height, width,
is_training=False,
bbox=None,
fast_mode=True):
"""Pre-process one image for training or evaluation.
Args:
image: 3-D Tensor [height, width, channels] with the image.
height: integer, image expected height.
width: integer, image expected width.
is_training: Boolean. If true it would transform an image for train,
otherwise it would transform it for evaluation.
bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords]
where each coordinate is [0, 1) and the coordinates are arranged as
[ymin, xmin, ymax, xmax].
fast_mode: Optional boolean, if True avoids slower transformations.
Returns:
3-D float Tensor containing an appropriately scaled image
Raises:
ValueError: if user does not provide bounding box
"""
if is_training:
return preprocess_for_train(image, height, width, bbox, fast_mode)
else:
return preprocess_for_eval(image, height, width)
| gpl-2.0 |
probcomp/cgpm | src/factor/factor.py | 1 | 13620 | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import OrderedDict
import numpy as np
import sklearn.decomposition
from cgpm.cgpm import CGpm
from cgpm.utils import general as gu
from cgpm.utils import mvnormal as multivariate_normal
class FactorAnalysis(CGpm):
"""Factor analysis model with continuous latent variables z in a low
dimensional space. The generative model for a vector x is
z ~ Normal(0, I) where z \in R^L.
e ~ Normal(0, Psi) where Psi = diag(v_1,...,v_D)
x = W.z + mux + e where W \in R^(DxL) and mux \in R^D, learning by EM.
From standard results (Murphy Section 12.1)
z ~ Normal(0, I) Prior.
x|z ~ Normal(W.z + mux, Psi) Likelihood.
x ~ Normal(mux, W.W'+Psi) Marginal.
z|x ~ Normal(m, S) Posterior.
S = inv(I + W'.inv(Psi).W) (covariance)
m = S(W'.inv(Psi).(x-mux)) (mean)
The full joint distribution over [z,x] is then
The mean of [z,x] is [0, mux]
The covariance of [z,x] is (in block form)
I W'
(LxL) (LxD)
W W.W' + Psi
(DxL) (DxD)
where the covariance W' is computed directly
cov(z,x) = cov(z, W.z + mux + e)
= cov(z, W.z) + cov(z, mux) + cov(z, e)
= cov(z, W.z)
= cov(z,z).W'
= I*W'
= W'
Exercise: Confirm that expression for posterior z|x is consistent with
conditioning directly on the joint [z,x] using Schur complement
(Hint: see test suite).
The latent variables are exposed as output variables, but may not be
incorporated.
"""
def __init__(self, outputs, inputs, L=None, distargs=None, params=None,
rng=None):
# Default parameter settings.
if params is None:
params = {}
if distargs is None:
distargs = {}
# Entropy.
if rng is None:
rng = gu.gen_rng(1)
# No inputs.
if inputs:
raise ValueError('FactorAnalysis rejects inputs: %s.' % inputs)
# Correct outputs.
if len(outputs) < 2:
raise ValueError('FactorAnalysis needs >= 2 outputs: %s.' % outputs)
if len(set(outputs)) != len(outputs):
raise ValueError('Duplicate outputs: %s.' % outputs)
# Find low dimensional space.
if L is None:
raise ValueError('Specify latent dimension L: %s.' % L)
if L == 0:
raise ValueError('Latent dimension at least 1: %s.' % L)
if 'outputs' in distargs and any(s != 'numerical'
for s in distargs['outputs']['stattypes']):
raise ValueError('Factor non-numerical outputs: %s.' % distargs)
# Observable and latent variable indexes.
D = len(outputs[:-L])
if D < L:
raise ValueError(
'Latent dimension exceeds observed dimension: (%s,%s)'
% (outputs[:-L], outputs[-L:]))
# Parameters.
mux = params.get('mux', np.zeros(D))
Psi = params.get('Psi', np.eye(D))
W = params.get('W', np.zeros((D,L)))
# Build the object.
self.rng = rng
# Dimensions.
self.L = L
self.D = D
# Variable indexes.
self.outputs = outputs
self.observables = outputs[:-self.L]
self.latents = set(outputs[-self.L:])
self.inputs = []
self.output_mapping = {c:i for i,c in enumerate(self.outputs)}
# Dataset.
self.data = OrderedDict()
self.N = 0
# Parameters of Factor Analysis.
self.mux = np.asarray(mux)
self.Psi = np.asarray(Psi)
self.W = np.asarray(W)
# Parameters of joint distribution [x,z].
self.mu, self.cov = self.joint_parameters()
# Internal factor analysis model.
self.fa = None
def incorporate(self, rowid, observation, inputs=None):
# No duplicate observation.
if rowid in self.data:
raise ValueError('Already observed: %d.' % rowid)
# No inputs.
if inputs:
raise ValueError('No inputs allowed: %s.' % inputs)
if not observation:
raise ValueError('No observation specified: %s.' % observation)
# No unknown variables.
if any(q not in self.outputs for q in observation):
raise ValueError('Unknown variables: (%s,%s).'
% (observation, self.outputs))
# No latent variables.
if any(q in self.latents for q in observation):
raise ValueError('Cannot incorporate latent vars: (%s,%s,%s).'
% (observation, self.outputs, self.latents))
# Incorporate observed observable variables.
x = [observation.get(i, np.nan) for i in self.observables]
# Update dataset and counts.
self.data[rowid] = x
self.N += 1
def unincorporate(self, rowid):
try:
del self.data[rowid]
except KeyError:
raise ValueError('No such observation: %d.' % rowid)
self.N -= 1
def logpdf(self, rowid, targets, constraints=None, inputs=None):
# XXX Deal with observed rowid.
constraints = self.populate_constraints(rowid, targets, constraints)
if inputs:
raise ValueError('Prohibited inputs: %s' % (inputs,))
if not targets:
raise ValueError('No targets: %s' % (targets,))
if any(q not in self.outputs for q in targets):
raise ValueError('Unknown targets: %s' % (targets,))
if any(q in constraints for q in targets):
raise ValueError('Duplicate variable: %s, %s'
% (targets, constraints,))
# Reindex variables.
targets_r = self.reindex(targets)
constraints_r = self.reindex(constraints)
# Retrieve conditional distribution.
muG, covG = FactorAnalysis.mvn_condition(
self.mu, self.cov, targets_r.keys(), constraints_r)
# Compute log density.
x = np.array(targets_r.values())
return multivariate_normal.logpdf(x, muG, covG)
def simulate(self, rowid, targets, constraints=None, inputs=None, N=None):
# XXX Deal with observed rowid.
constraints = self.populate_constraints(rowid, targets, constraints)
if inputs:
raise ValueError('Prohibited inputs: %s' % (inputs,))
if not targets:
raise ValueError('No targets: %s' % (targets,))
if any(q not in self.outputs for q in targets):
raise ValueError('Unknown targets: %s' % (targets,))
if any(q in constraints for q in targets):
raise ValueError('Duplicate variable: %s, %s'
% (targets, constraints,))
# Reindex variables.
targets_r = self.reindex(targets)
constraints_r = self.reindex(constraints)
# Retrieve conditional distribution.
muG, covG = FactorAnalysis.mvn_condition(
self.mu, self.cov, targets_r, constraints_r)
# Generate samples.
sample = self.rng.multivariate_normal(mean=muG, cov=covG, size=N)
def get_sample(samp):
if isinstance(samp, float):
samp = [samp]
assert len(targets) == len(samp)
return dict(zip(targets, samp))
return get_sample(sample) if N is None else map(get_sample, sample)
def logpdf_score(self):
def compute_logpdf(x):
assert len(x) == self.D
targets = {i:v for i,v in enumerate(x) if not np.isnan(v)}
return self.logpdf(None, targets)
return sum(compute_logpdf(x) for x in self.data)
def transition(self, N=None):
X = np.asarray(self.data.values())
# Only run inference on observations without missing entries.
self.fa = sklearn.decomposition.FactorAnalysis(n_components=self.L)
self.fa.fit(X[~np.any(np.isnan(X), axis=1)])
assert self.L, self.D == self.fa.components_.shape
# Update parameters of Factor Analysis.
self.Psi = np.diag(self.fa.noise_variance_)
self.mux = self.fa.mean_
self.W = np.transpose(self.fa.components_)
self.mu, self.cov = self.joint_parameters()
def populate_constraints(self, rowid, targets, constraints):
if constraints is None:
constraints = {}
if rowid in self.data:
values = self.data[rowid]
assert len(values) == len(self.outputs[:self.D])
observations = {
output : value
for output, value in zip(self.outputs[:self.D], values)
if not np.isnan(value)
and output not in targets
and output not in constraints
}
constraints = gu.merged(constraints, observations)
return constraints
# --------------------------------------------------------------------------
# Internal.
def get_params(self):
return {
'mu': self.mu,
'Psi': self.Psi,
'W': self.W
}
@staticmethod
def name():
return 'low_dimensional_mvn'
@staticmethod
def is_continuous():
return True
@staticmethod
def is_conditional():
return False
@staticmethod
def is_numeric():
return True
# --------------------------------------------------------------------------
# Helper.
def reindex(self, variables):
# Reindex an output variable to its index in self.mu
# self.mu has as the first L items the last L items of self.outputs
# and as the remaining D items the first D items of self.outputs.
# The following diagram is useful:
# self.outputs: 12 14 -7 5 | 11 4 3
# <---D=4--->|<--L=3-->
# raw indices: 0 1 2 3 | 4 5 6
# reindexed: 3 4 5 6 | 0 1 2
assert isinstance(variables, (list, dict))
def convert(q):
i = self.output_mapping[q]
return i - self.D if q in self.latents else i + self.L
indexes = [convert(q) for q in variables]
if isinstance(variables, list):
return indexes
else:
return dict(zip(indexes, variables.values()))
def joint_parameters(self):
mean = np.concatenate((np.zeros(self.L), self.mux))
cov = np.row_stack((
np.column_stack((np.eye(self.L), self.W.T)),
np.column_stack((self.W, np.dot(self.W, self.W.T) + self.Psi))
))
return mean, cov
@staticmethod
def mvn_marginalize(mu, cov, query, evidence):
Q, E = query, evidence
# Retrieve means.
muQ = mu[Q]
muE = mu[E]
# Retrieve covariances.
covQ = cov[Q][:,Q]
covE = cov[E][:,E]
covJ = cov[Q][:,E]
covQE = np.row_stack((
np.column_stack((covQ, covJ)),
np.column_stack((covJ.T, covE))
))
assert np.allclose(covQE, covQE.T)
return muQ, muE, covQ, covE, covJ
@staticmethod
def mvn_condition(mu, cov, query, evidence):
assert isinstance(query, list)
assert isinstance(evidence, dict)
assert len(mu) == cov.shape[0] == cov.shape[1]
assert len(query) + len(evidence) <= len(mu)
# Extract indexes and values from evidence.
Ei, Ev = evidence.keys(), evidence.values()
muQ, muE, covQ, covE, covJ = \
FactorAnalysis.mvn_marginalize(mu, cov, query, Ei)
# Invoke Fact 4 from, where G means given.
# http://web4.cs.ucl.ac.uk/staff/C.Bracegirdle/bayesTheoremForGaussians.pdf
P = np.dot(covJ, np.linalg.inv(covE))
muG = muQ + np.dot(P, Ev - muE)
covG = covQ - np.dot(P, covJ.T)
return muG, covG
# --------------------------------------------------------------------------
# Serialization.
def to_metadata(self):
metadata = dict()
metadata['outputs'] = self.outputs
metadata['inputs'] = self.inputs
metadata['N'] = self.N
metadata['L'] = self.L
metadata['data'] = self.data.items()
# Store paramters as list for JSON.
metadata['params'] = dict()
metadata['params']['mux'] = self.mux.tolist()
metadata['params']['Psi'] = self.Psi.tolist()
metadata['params']['W'] = self.W.tolist()
metadata['factory'] = ('cgpm.factor.factor', 'FactorAnalysis')
return metadata
@classmethod
def from_metadata(cls, metadata, rng=None):
if rng is None:
rng = gu.gen_rng(0)
fact = cls(
outputs=metadata['outputs'],
inputs=metadata['inputs'],
L=metadata['L'],
params=metadata['params'],
rng=rng)
fact.data = OrderedDict(metadata['data'])
fact.N = metadata['N']
return fact
| apache-2.0 |
arabenjamin/scikit-learn | sklearn/covariance/tests/test_robust_covariance.py | 212 | 3359 | # Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Virgile Fritsch <virgile.fritsch@inria.fr>
#
# License: BSD 3 clause
import numpy as np
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.validation import NotFittedError
from sklearn import datasets
from sklearn.covariance import empirical_covariance, MinCovDet, \
EllipticEnvelope
X = datasets.load_iris().data
X_1d = X[:, 0]
n_samples, n_features = X.shape
def test_mcd():
# Tests the FastMCD algorithm implementation
# Small data set
# test without outliers (random independent normal data)
launch_mcd_on_dataset(100, 5, 0, 0.01, 0.1, 80)
# test with a contaminated data set (medium contamination)
launch_mcd_on_dataset(100, 5, 20, 0.01, 0.01, 70)
# test with a contaminated data set (strong contamination)
launch_mcd_on_dataset(100, 5, 40, 0.1, 0.1, 50)
# Medium data set
launch_mcd_on_dataset(1000, 5, 450, 0.1, 0.1, 540)
# Large data set
launch_mcd_on_dataset(1700, 5, 800, 0.1, 0.1, 870)
# 1D data set
launch_mcd_on_dataset(500, 1, 100, 0.001, 0.001, 350)
def launch_mcd_on_dataset(n_samples, n_features, n_outliers, tol_loc, tol_cov,
tol_support):
rand_gen = np.random.RandomState(0)
data = rand_gen.randn(n_samples, n_features)
# add some outliers
outliers_index = rand_gen.permutation(n_samples)[:n_outliers]
outliers_offset = 10. * \
(rand_gen.randint(2, size=(n_outliers, n_features)) - 0.5)
data[outliers_index] += outliers_offset
inliers_mask = np.ones(n_samples).astype(bool)
inliers_mask[outliers_index] = False
pure_data = data[inliers_mask]
# compute MCD by fitting an object
mcd_fit = MinCovDet(random_state=rand_gen).fit(data)
T = mcd_fit.location_
S = mcd_fit.covariance_
H = mcd_fit.support_
# compare with the estimates learnt from the inliers
error_location = np.mean((pure_data.mean(0) - T) ** 2)
assert(error_location < tol_loc)
error_cov = np.mean((empirical_covariance(pure_data) - S) ** 2)
assert(error_cov < tol_cov)
assert(np.sum(H) >= tol_support)
assert_array_almost_equal(mcd_fit.mahalanobis(data), mcd_fit.dist_)
def test_mcd_issue1127():
# Check that the code does not break with X.shape = (3, 1)
# (i.e. n_support = n_samples)
rnd = np.random.RandomState(0)
X = rnd.normal(size=(3, 1))
mcd = MinCovDet()
mcd.fit(X)
def test_outlier_detection():
rnd = np.random.RandomState(0)
X = rnd.randn(100, 10)
clf = EllipticEnvelope(contamination=0.1)
assert_raises(NotFittedError, clf.predict, X)
assert_raises(NotFittedError, clf.decision_function, X)
clf.fit(X)
y_pred = clf.predict(X)
decision = clf.decision_function(X, raw_values=True)
decision_transformed = clf.decision_function(X, raw_values=False)
assert_array_almost_equal(
decision, clf.mahalanobis(X))
assert_array_almost_equal(clf.mahalanobis(X), clf.dist_)
assert_almost_equal(clf.score(X, np.ones(100)),
(100 - y_pred[y_pred == -1].size) / 100.)
assert(sum(y_pred == -1) == sum(decision_transformed < 0))
| bsd-3-clause |
CodingCat/mxnet | benchmark/python/sparse/sparse_op.py | 10 | 9204 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import ctypes
from mxnet.test_utils import *
import scipy.sparse as sp
import os
import time
import argparse
from mxnet.base import check_call, _LIB
from mxnet.test_utils import get_bz2_data
from util import estimate_density
parser = argparse.ArgumentParser(description="Benchmark sparse operators",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--num-omp-threads', type=int, default=1, help='number of omp threads to set in MXNet')
args = parser.parse_args()
# some data information
kdda = {
'data_mini': 'kdda.t.mini',
'data_name': 'kdda.t',
'data_origin_name': 'kdda.t.bz2',
'url': "https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/kdda.t.bz2",
'feature_dim': 20216830,
'm': 200,
'batch_size': [64]
}
avazu = {
'data_mini': 'avazu-app.t.mini',
'data_name': 'avazu-app.t',
'data_origin_name': 'avazu-app.t.bz2',
'url': "https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/avazu-app.t.bz2",
'feature_dim': 1000000,
'm': 500,
'batch_size': [64, 128]
}
def measure_cost(repeat, f, *args, **kwargs):
# start bench
start = time.time()
results = []
for i in range(repeat):
results.append(f(*args, **kwargs))
for result in results:
result.wait_to_read()
end = time.time()
diff = end - start
return diff / repeat
def test_dot_real(data_dict):
def get_iter(path, data_shape, batch_size):
data_train = mx.io.LibSVMIter(data_libsvm=path,
data_shape=data_shape,
batch_size=batch_size)
data_iter = iter(data_train)
return data_iter
data_dir = os.path.join(os.getcwd(), 'data')
path = os.path.join(data_dir, data_dict['data_name'])
if not os.path.exists(path):
get_bz2_data(
data_dir,
data_dict['data_name'],
data_dict['url'],
data_dict['data_origin_name']
)
assert os.path.exists(path)
k = data_dict['feature_dim']
m = data_dict['m']
density = estimate_density(path, data_dict['feature_dim'])
mini_path = os.path.join(data_dir, data_dict['data_mini'])
if not os.path.exists(mini_path):
os.system("head -n 2000 %r > %r" % (path, mini_path))
assert os.path.exists(mini_path)
print "Running Benchmarking on %r data" % data_dict['data_mini']
for batch_size in data_dict['batch_size']: # iterator through different batch size of choice
print "batch_size is %d" % batch_size
# model
data_shape = (k, )
train_iter = get_iter(mini_path, data_shape, batch_size)
weight = mx.nd.random.uniform(low=0, high=1, shape=(k, m))
csr_data = []
dns_data = []
num_batch = 0
for batch in train_iter:
data = train_iter.getdata()
csr_data.append(data)
dns_data.append(data.tostype('default'))
num_batch += 1
bag_of_data = [csr_data, dns_data]
num_repeat = 5
costs = []
for d in bag_of_data:
weight.wait_to_read()
cost = 0.
count = 0
for d_batch in d:
d_batch.wait_to_read()
cost += measure_cost(num_repeat, mx.nd.dot, d_batch, weight)
count += 1
costs.append(cost/count)
t_sparse = costs[0]
t_dense = costs[1]
ratio = t_dense / t_sparse
print('density(%)\tn\tm\tk\tt_dense/t_sparse\tt_dense\tt_sparse')
fmt = "%0.4f\t\t%d\t%d\t%d\t%0.2f\t\t\t%0.4f\t%0.6f"
print(fmt % (density * 100, batch_size, m, k, ratio, t_dense, t_sparse))
def test_dot_synthetic():
"""benchmark mx.nd.dot(sparse_ndarray, dense_ndarray) with given density.
`t_sparse` is the time cost of dot(csr, dns), while `t_dense` is the time cost
of dot(dns, dns), with the same matrix except that it is in default storage type.
"""
def measure_cost_forward_baseline(repeat, dot, lhs, rhs):
start = time.time()
for i in range(repeat):
dot(lhs, rhs)
end = time.time()
diff = end - start
return diff / repeat
def measure_cost_backward_baseline(repeat, dot, transpose, lhs, rhs):
start = time.time()
for i in range(repeat):
dot(transpose(lhs), rhs)
end = time.time()
diff = end - start
return diff / repeat
def bench_dot_forward(m, k, n, density, ctx, repeat):
set_default_context(ctx)
dns = mx.nd.random.uniform(shape=(k, n)).copyto(ctx)
data_shape = (m, k)
csr_data = rand_ndarray(data_shape, 'csr', density)
dns_data = csr_data.tostype('default')
rhs_dns_np = dns.asnumpy()
lhs_csr_sp = sp.csr_matrix(dns_data.asnumpy()) # csr in scipy
lhs_dns_np = lhs_csr_sp.tostype('default')
data = [dns_data, csr_data]
costs = []
for d in data:
dns.wait_to_read()
d.wait_to_read()
cost = measure_cost(repeat, mx.nd.dot, d, dns)
costs.append(cost)
ratio = costs[0] / costs[1]
costs_baseline = []
cost = measure_cost_forward_baseline(repeat, np.dot, lhs_dns_np, rhs_dns_np)
costs_baseline.append(cost)
cost = measure_cost_forward_baseline(repeat, sp.spmatrix.dot, lhs_csr_sp, rhs_dns_np)
costs_baseline.append(cost)
ratio_baseline = costs_baseline[0] / costs_baseline[1]
fmt = "%0.1f\t\t%s\t%d\t%d\t%d\t%0.2f\t\t\t%0.2f\t%0.5f\t\t%0.2f\t\t\t\t%0.6f\t%0.5f"
print(fmt % (density * 100, str(ctx), n, m, k, ratio, costs[0], costs[1],
ratio_baseline, costs_baseline[0], costs_baseline[1]))
def bench_dot_backward(m, k, n, density, ctx, repeat):
set_default_context(ctx)
dns = mx.nd.random.uniform(shape=(m, n)).copyto(ctx)
data_shape = (m, k)
csr_data = rand_ndarray(data_shape, 'csr', density)
dns_data = csr_data.tostype('default')
rhs_dns_np = dns.asnumpy()
lhs_csr_sp = sp.csr_matrix(dns_data.asnumpy())
lhs_dns_np = lhs_csr_sp.tostype('default')
data = [dns_data, csr_data]
costs = []
for d in data:
dns.wait_to_read()
d.wait_to_read()
cost = measure_cost(repeat, mx.nd.dot, d, dns, transpose_a=True)
costs.append(cost)
ratio = costs[0] / costs[1]
costs_baseline = []
cost = measure_cost_backward_baseline(repeat, np.dot, np.transpose, lhs_dns_np, rhs_dns_np)
costs_baseline.append(cost)
cost = measure_cost_backward_baseline(repeat, sp.spmatrix.dot, sp.spmatrix.transpose, lhs_csr_sp, rhs_dns_np)
costs_baseline.append(cost)
ratio_baseline = costs_baseline[0] / costs_baseline[1]
fmt = "%0.1f\t\t%s\t%d\t%d\t%d\t%0.2f\t\t\t%0.2f\t%0.5f\t\t%0.2f\t\t\t\t%0.6f\t%0.5f"
print(fmt % (density * 100, str(ctx), n, m, k, ratio, costs[0], costs[1],
ratio_baseline, costs_baseline[0], costs_baseline[1]))
print("A = sparse NDArray of shape(m, k)")
print("B = dense NDArray of shape(k, n)")
print("dot_forward\tdot(csr, dns)")
print('density(%)\tcontext\tn\tm\tk\tt_dense/t_sparse\tt_dense\tt_sparse'
'\tt_scipy_dense/t_scipy_sparse\tt_scipy_dense\tt_scipy_sparse')
check_call(_LIB.MXSetNumOMPThreads(ctypes.c_int(args.num_omp_threads)))
# TODO(haibin) make these runtime options
m = 512
k = [50000, 100000]
n = [64, 128]
density = [1.00, 0.90, 0.70, 0.50, 0.30, 0.20, 0.10, 0.07, 0.05, 0.02, 0.01, 0.005, 0.001]
num_repeat = 10
# contexts = [mx.cpu(), mx.gpu(0)]
contexts = [mx.cpu()]
for i in range(2):
for ctx in contexts:
for den in density:
bench_dot_forward(m, k[i], n[i], den, ctx, num_repeat)
print("dot_backward\tdot(csr.T, dns)")
print('density(%)\tcontext\tn\tm\tk\tt_dense/t_sparse\tt_dense\tt_sparse'
'\tt_scipy_dense/t_scipy_sparse\tt_scipy_dense\tt_scipy_sparse')
for i in range(2):
for ctx in contexts:
for den in density:
bench_dot_backward(m, k[i], n[i], den, ctx, num_repeat)
if __name__ == "__main__":
test_dot_real(avazu)
test_dot_real(kdda)
test_dot_synthetic()
| apache-2.0 |
jlnh/SeizurePrediction | seizure/tasks.py | 1 | 23718 | from collections import namedtuple
import os.path
import numpy as np
import pylab as pl
import scipy.io
import common.time as time
from sklearn import cross_validation, preprocessing
from sklearn.metrics import roc_curve, auc
from scipy.signal import resample
from sklearn.linear_model import LogisticRegression as LR
from sklearn.isotonic import IsotonicRegression as IR
from copy import deepcopy
from matplotlib.backends.backend_pdf import PdfPages
TaskCore = namedtuple('TaskCore', ['cached_data_loader', 'data_dir', 'target', 'pipeline', 'classifier_name',
'classifier', 'normalize', 'gen_preictal', 'cv_ratio', 'plot2file'])
class Task(object):
"""
A Task computes some work and outputs a dictionary which will be cached on disk.
If the work has been computed before and is present in the cache, the data will
simply be loaded from disk and will not be pre-computed.
"""
def __init__(self, task_core):
self.task_core = task_core
def filename(self):
raise NotImplementedError("Implement this")
def run(self):
return self.task_core.cached_data_loader.load(self.filename(), self.load_data)
class LoadpreictalDataTask(Task):
"""
Load the preictal mat files 1 by 1, transform each 1-second segment through the pipeline
and return data in the format {'X': X, 'Y': y, 'latencies': latencies}
"""
def filename(self):
return 'data_preictal_%s_%s' % (self.task_core.target, self.task_core.pipeline.get_name())
def load_data(self):
return parse_input_data(self.task_core.data_dir, self.task_core.target, 'preictal', self.task_core.pipeline,
self.task_core.gen_preictal)
class LoadInterictalDataTask(Task):
"""
Load the interictal mat files 1 by 1, transform each 1-second segment through the pipeline
and return data in the format {'X': X, 'Y': y}
"""
def filename(self):
return 'data_interictal_%s_%s' % (self.task_core.target, self.task_core.pipeline.get_name())
def load_data(self):
return parse_input_data(self.task_core.data_dir, self.task_core.target, 'interictal', self.task_core.pipeline)
class LoadTestDataTask(Task):
"""
Load the test mat files 1 by 1, transform each 1-second segment through the pipeline
and return data in the format {'X': X}
"""
def filename(self):
return 'data_test_%s_%s' % (self.task_core.target, self.task_core.pipeline.get_name())
def load_data(self):
return parse_input_data(self.task_core.data_dir, self.task_core.target, 'test', self.task_core.pipeline)
class TrainingDataTask(Task):
"""
Creating a training set and cross-validation set from the transformed preictal and interictal data.
"""
def filename(self):
return None # not cached, should be fast enough to not need caching
def load_data(self):
preictal_data = LoadpreictalDataTask(self.task_core).run()
interictal_data = LoadInterictalDataTask(self.task_core).run()
return prepare_training_data(preictal_data, interictal_data, self.task_core.cv_ratio)
class CrossValidationScoreTask(Task):
"""
Run a classifier over a training set, and give a cross-validation score.
"""
def filename(self):
return 'score_%s_%s_%s' % (self.task_core.target, self.task_core.pipeline.get_name(), self.task_core.classifier_name)
def load_data(self):
data = TrainingDataTask(self.task_core).run()
classifier_data = train_classifier(self.task_core.classifier, data, normalize=self.task_core.normalize)
del classifier_data['classifier'] # save disk space
return classifier_data
class TrainClassifierTask(Task):
"""
Run a classifier over the complete data set (training data + cross-validation data combined)
and save the trained models.
"""
def filename(self):
return 'classifier_%s_%s_%s' % (self.task_core.target, self.task_core.pipeline.get_name(), self.task_core.classifier_name)
def load_data(self):
data = TrainingDataTask(self.task_core).run()
return train_classifier(self.task_core.classifier, self.task_core.plot2file, data, use_all_data=True, normalize=self.task_core.normalize)
class MakePredictionsTask(Task):
"""
Make predictions on the test data.
"""
def filename(self):
return 'predictions_%s_%s_%s' % (self.task_core.target, self.task_core.pipeline.get_name(), self.task_core.classifier_name)
def load_data(self):
data = TrainingDataTask(self.task_core).run()
y_classes = data.y_classes
del data
classifier_data = TrainClassifierTask(self.task_core).run()
test_data = LoadTestDataTask(self.task_core).run()
X_test = flatten(test_data.X)
return make_predictions(self.task_core.target, self.task_core.plot2file, X_test, y_classes, classifier_data)
class GetCrossSubjectDataTask(Task):
"""
assemble all the data cross the subject
and save the trained models.
"""
def filename(self):
return 'assemble_%s_%s' % (self.task_core.target, self.task_core.pipeline.get_name())
def load_data(self):
def concat(a, b):
return np.concatenate((a, b), axis=0)
preictal_data = LoadpreictalDataTask(self.task_core).run()
interictal_data = LoadInterictalDataTask(self.task_core).run()
test_data = LoadTestDataTask(self.task_core).run()
preictal_X, preictal_y = flatten(preictal_data.X), preictal_data.y
interictal_X, interictal_y = flatten(interictal_data.X), interictal_data.y
X_train = concat(preictal_X, interictal_X)
y_train = concat(preictal_y, interictal_y)
X_test = flatten(test_data.X)
return {
'X_train': X_train,
'y_train': y_train,
'X_test': X_test
}
class TrainCrossSubjectClassifierTask(Task):
"""
Run a classifier over the complete data set (training data + cross-validation data combined)
and save the trained models.
"""
def filename(self):
return 'classifier_cross_%s_%s_%s' % (self.task_core.pipeline.get_name(), self.task_core.classifier_name)
def load_data(self):
data = TrainingDataTask(self.task_core).run()
return train_classifier(self.task_core.classifier, data, use_all_data=True, normalize=self.task_core.normalize)
# a list of pairs indicating the slices of the data containing full seizures
# e.g. [(0, 5), (6, 10)] indicates two ranges of seizures
def seizure_ranges_for_latencies(latencies):
indices = np.where(latencies == 0)[0]
ranges = []
for i in range(1, len(indices)):
ranges.append((indices[i-1], indices[i]))
ranges.append((indices[-1], len(latencies)))
return ranges
#generator to iterate over competition mat data
def load_mat_data(data_dir, target, component):
dir = os.path.join(data_dir, target)
done = False
i = 0
while not done:
i += 1
if i < 10:
nstr = '000%d' %i
elif i < 100:
nstr = '00%d' %i
elif i < 1000:
nstr = '0%d' %i
else:
nstr = '%d' %i
filename = '%s/%s_%s_segment_%s.mat' % (dir, target, component, nstr)
if os.path.exists(filename):
data = scipy.io.loadmat(filename)
yield(data)
else:
if i == 1:
raise Exception("file %s not found" % filename)
done = True
# process all of one type of the competition mat data
# data_type is one of ('preictal', 'interictal', 'test')
def parse_input_data(data_dir, target, data_type, pipeline, subjectID=0, gen_preictal=False):
preictal = data_type == 'preictal'
interictal = data_type == 'interictal'
targetFrequency = 100 #re-sample to target frequency
sampleSizeinSecond = 600
totalSample = 12
mat_data = load_mat_data(data_dir, target, data_type)
# for each data point in preictal, interictal and test,
# generate (X, <y>) per channel
def process_raw_data(mat_data):
start = time.get_seconds()
print 'Loading data',
#print mat_data
X = []
y = []
previous_transformed_data = [] #used in two window model
previous_sequence = 0
for segment in mat_data:
for skey in segment.keys():
if "_segment_" in skey.lower():
mykey = skey
if preictal:
preictual_sequence = segment[mykey][0][0][4][0][0]
y_value = preictual_sequence #temporarily set to sequence number
if preictual_sequence != previous_sequence+1:
previous_transformed_data = [] #if data is not in sequence
previous_sequence = preictual_sequence
elif interictal:
y_value = 0
previous_transformed_data = [] #interictal data is not in sequence between files
else:
previous_transformed_data = [] #test data is not in sequence between files
data = segment[mykey][0][0][0]
sampleFrequency = segment[mykey][0][0][2][0][0]
axis = data.ndim - 1
if sampleFrequency > targetFrequency: #resample to target frequency
data = resample(data, targetFrequency*sampleSizeinSecond, axis=axis)
'''DataSampleSize: split the 10 minutes data into several clips:
For one second data clip, patient1 and patient2 were finished in 3 hours. Dog1 clashed after 7+ hours for out of memory
try ten second data clip
'''
DataSampleSize = data.shape[1]/(totalSample *1.0) #try to split data into equal size
splitIdx = np.arange(DataSampleSize, data.shape[1], DataSampleSize)
splitIdx = np.int32(np.ceil(splitIdx))
splitData = np.hsplit(data,splitIdx)
# for i in range(totalSample):
# s = splitData[i]
# s2 = splitData[i+totalSample]
for s in splitData:
if s.size > 0: #is not empty
# s = 1.0 * s #convert int to float
# s_scale = preprocessing.scale(s, axis=0, with_std = True)
# transformed_data = pipeline.apply([subjectID, s])
transformed_data = pipeline.apply(s)
# previous_transformed_data.append(transformed_data)
# transformed_data2 = pipeline.apply([subjectID, s1])
# if len(previous_transformed_data) > totalSample/2:
# combined_transformed_data = np.concatenate((transformed_data, previous_transformed_data.pop(0)), axis=transformed_data.ndim-1)
# X.append(combined_transformed_data)
X.append(transformed_data)
if preictal or interictal:
y.append(y_value)
print '(%ds)' % (time.get_seconds() - start)
X = np.array(X)
if preictal or interictal:
y = np.array(y)
print 'X', X.shape, 'y', y.shape
return X, y
else:
print 'X', X.shape
return X
data = process_raw_data(mat_data)
if len(data) == 2:
X, y = data
return {
'X': X,
'y': y
}
else:
X = data
return {
'X': X
}
# flatten data down to 2 dimensions for putting through a classifier
def flatten(data):
if data.ndim > 2:
return data.reshape((data.shape[0], np.product(data.shape[1:])))
else:
return data
# split up preictal and interictal data into training set and cross-validation set
def prepare_training_data(preictal_data, interictal_data, cv_ratio):
print 'Preparing training data ...',
preictal_X, preictal_y = flatten(preictal_data.X), preictal_data.y
interictal_X, interictal_y = flatten(interictal_data.X), interictal_data.y
# split up data into training set and cross-validation set for both seizure and early sets
preictal_X_train, preictal_y_train, preictal_X_cv, preictal_y_cv = split_train_preictal(preictal_X, preictal_y, cv_ratio)
interictal_X_train, interictal_y_train, interictal_X_cv, interictal_y_cv = split_train_random(interictal_X, interictal_y, cv_ratio)
def concat(a, b):
return np.concatenate((a, b), axis=0)
X_train = concat(preictal_X_train, interictal_X_train)
y_train = concat(preictal_y_train, interictal_y_train)
X_cv = concat(preictal_X_cv, interictal_X_cv)
y_cv = concat(preictal_y_cv, interictal_y_cv)
y_classes = np.unique(concat(y_train, y_cv))
start = time.get_seconds()
elapsedSecs = time.get_seconds() - start
print "%ds" % int(elapsedSecs)
print 'X_train:', np.shape(X_train)
print 'y_train:', np.shape(y_train)
print 'X_cv:', np.shape(X_cv)
print 'y_cv:', np.shape(y_cv)
print 'y_classes:', y_classes
return {
'X_train': X_train,
'y_train': y_train,
'X_cv': X_cv,
'y_cv': y_cv,
'y_classes': y_classes
}
# split interictal segments at random for training and cross-validation
def split_train_random(X, y, cv_ratio):
X_train, X_cv, y_train, y_cv = cross_validation.train_test_split(X, y, test_size=cv_ratio, random_state=0)
return X_train, y_train, X_cv, y_cv
def split_train_preictal(X, y, cv_ratio):
X_train = []
X_cv = []
y_train = []
y_cv = []
zxy = zip(X,y)
y = 1
for i in range(1,7):
# for i in range(1,2):
X1 = []
y1 = []
for r in zxy:
X2, y2 = r
if y2 <= i and y2 > i-1:
X1.append(X2)
# y1.append(y2)
y1.append(y) #set back to two level classification
X1_train, X1_cv, y1_train, y1_cv = cross_validation.train_test_split(X1, y1, test_size=cv_ratio, random_state=0)
X_train.append(X1_train)
y_train.append(y1_train)
X_cv.append(X1_cv)
y_cv.append(y1_cv)
X_train = np.concatenate(X_train)
y_train = np.concatenate(y_train)
X_cv = np.concatenate(X_cv)
y_cv = np.concatenate(y_cv)
return X_train, y_train, X_cv, y_cv
def train(classifier, X_train, y_train, X_cv, y_cv, y_classes):
print "Training ..."
print 'Dim', 'X', np.shape(X_train), 'y', np.shape(y_train), 'X_cv', np.shape(X_cv), 'y_cv', np.shape(y_cv)
start = time.get_seconds()
classifier.fit(X_train, y_train)
print "Scoring..."
score = score_classifier_auc(classifier, X_cv, y_cv, y_classes)
elapsedSecs = time.get_seconds() - start
print "t=%ds score=%f" % (int(elapsedSecs), score)
return score
# train classifier for predictions
def train_all_data(classifier, plot2file, X_train, y_train, X_cv, y_cv):
print "Training ..."
X = np.concatenate((X_train, X_cv), axis=0)
y = np.concatenate((y_train, y_cv), axis=0)
print 'Dim', np.shape(X), np.shape(y)
start = time.get_seconds()
classifier_cv = deepcopy(classifier)
classifier.fit(X, y)
classifier_cv.fit(X_train, y_train)
score_classifier_auc(classifier_cv, plot2file, X_cv, y_cv, y_cv)
y_estimate = classifier_cv.predict_proba(X_cv)
elapsedSecs = time.get_seconds() - start
print "t=%ds" % int(elapsedSecs)
return y_estimate
# sub mean divide by standard deviation
def normalize_data(X_train, X_cv):
scaler = preprocessing.StandardScaler()
scaler.fit(X_train)
X_train = scaler.transform(X_train)
X_cv = scaler.transform(X_cv)
return X_train, X_cv
# depending on input train either for predictions or for cross-validation
def train_classifier(classifier, plot2file, data, use_all_data=False, normalize=False):
X_train = data.X_train
y_train = data.y_train
X_cv = data.X_cv
y_cv = data.y_cv
if normalize:
X_train, X_cv = normalize_data(X_train, X_cv)
if not use_all_data:
score = train(classifier, X_train, y_train, X_cv, y_cv, data.y_classes)
return {
'classifier': classifier,
'score': score,
}
else:
y_estimate = train_all_data(classifier, plot2file, X_train, y_train, X_cv, y_cv)
calibrate_matrix = train_calibrator(y_cv, y_estimate, plot2file)
lr = LR()
lr.fit(y_estimate, y_cv)
return {
'classifier': classifier,
'calibrate_matrix': calibrate_matrix,
'LR' : lr
}
def train_calibrator(y, y_estimate, plot2file):
print "Training calibrator..."
start = time.get_seconds()
preictal_predictions = []
p_y_cv = [0.0 if x == 0.0 else 1.0 for x in y]
for i in range(len(y_estimate)):
p = y_estimate[i]
preictal = translate_prediction(p)
preictal_predictions.append(preictal)
fpr, tpr, thresholds = roc_curve(p_y_cv, preictal_predictions)
p_roc_auc = auc(fpr, tpr)
y_av = np.average(p_y_cv)
y_std = np.std(p_y_cv)
ye_av = np.average(preictal_predictions)
ye_std = np.std(preictal_predictions)
pl.clf()
pl.hist(preictal_predictions, bins=50)
pl.xlabel('preictal estimate')
pl.ylabel('counts')
pl.title('CV histogram (mean_cv= %0.3f, mean_es=%0.3f, std_es=%0.3f)' %(y_av, ye_av, ye_std))
# pl.show()
plot2file.savefig()
calibrate_matrix = np.array([ye_av, ye_std])
elapsedSecs = time.get_seconds() - start
print "t=%ds score=%f" % (int(elapsedSecs), p_roc_auc)
return calibrate_matrix
# convert the output of classifier predictions into (Seizure, Early) pair
def translate_prediction(prediction):
if prediction.shape[0] == 7:
interictal, p1, p2, p3, p4, p5, p6 = prediction
preictal = p1 + p2 + p3 + p4 + p5 + p6
return preictal
elif prediction.shape[0] == 2:
interictal, p1 = prediction
preictal = p1
return preictal
elif prediction.shape[0] == 1:
return prediction[0]
else:
raise NotImplementedError()
# use the classifier and make predictions on the test data
def make_predictions(target, plot2file, X_test, y_classes, classifier_data):
print classifier_data
classifier = classifier_data.classifier
lr = classifier_data.LR
predictions_proba = classifier.predict_proba(X_test)
# predictions_calibrated = lr.predict_proba(predictions_proba)
predictions_calibrated = predictions_proba
data = calibrate_prediction(plot2file, predictions_calibrated, classifier_data.calibrate_matrix)
predictions_calibrated = data['preictal_calibrated']
is_aggressive = data['is_aggressive']
lines = []
totalSample = 12
for i in range(len(predictions_calibrated)/totalSample):
j = i+1
if j < 10:
nstr = '000%d' %j
elif j < 100:
nstr = '00%d' %j
elif j < 1000:
nstr = '0%d' %j
else:
nstr = '%d' %j
preictal_segments = []
for k in range(totalSample):
p = predictions_calibrated[i*totalSample+k]
preictal = translate_prediction(p)
preictal_segments.append(preictal)
preictalOverAllSample = get_combine_prediction(preictal_segments, is_aggressive)
lines.append('%s_test_segment_%s.mat,%.15f' % (target, nstr, preictalOverAllSample))
return {
'data': '\n'.join(lines)
}
def get_combine_prediction(preictal_segments, is_aggressive):
from scipy.stats.mstats import *
#average method: arithmetic, geometry and harmonic
interictal_amean = 1.0 - np.mean(preictal_segments)
interictal = 1.0 - np.array(preictal_segments)
interictal_gmean = gmean(interictal)
interictal_hmean = hmean(interictal)
interictal_agmean = 0.5 * (interictal_amean + interictal_gmean)
interictal_hgmean = 0.5 * (interictal_hmean + interictal_gmean)
# combine_prediction = 1.0 - interictal_hmean
if is_aggressive:
return 1.0 - interictal_hmean
else:
return 1.0 - interictal_amean
# the scoring mechanism used by the competition leaderboard
def score_classifier_auc(classifier, plot2file, X_cv, y_cv, y_classes):
predictions = classifier.predict_proba(X_cv)
preictal_predictions = []
p_y_cv = [0.0 if x == 0.0 else 1.0 for x in y_cv]
for i in range(len(predictions)):
p = predictions[i]
preictal = translate_prediction(p)
preictal_predictions.append(preictal)
fpr, tpr, thresholds = roc_curve(p_y_cv, preictal_predictions)
p_roc_auc = auc(fpr, tpr)
# Plot ROC curve
pl.clf()
pl.subplot(211)
pl.plot(fpr, tpr, label='ROC curve (area = %0.2f)' % p_roc_auc)
pl.plot([0, 1], [0, 1], 'k--')
pl.xlim([0.0, 1.0])
pl.ylim([0.0, 1.0])
pl.xlabel('False Positive Rate')
pl.ylabel('True Positive Rate')
pl.title('Receiver operating characteristic example')
pl.legend(loc="lower right")
pl.subplot(212)
pl.plot(thresholds, tpr, label='tpr')
pl.plot(thresholds, fpr, label='fpr')
pl.xlim([0.0, 1.0])
pl.ylim([0.0, 1.0])
pl.xlabel('thresholds')
pl.ylabel('True/false Positive Rate')
# pl.title('thresholds vs. True/false Positive Rate')
pl.legend(loc="upper right")
plot2file.savefig()
# pl.show()
return p_roc_auc
def calibrate_prediction(plot2file, predictions, calibrate_matrix):
cmean = calibrate_matrix[0]
cstd = calibrate_matrix[1]
preictal_predictions = []
for i in range(len(predictions)):
p = predictions[i]
preictal = translate_prediction(p)
preictal_predictions.append(preictal)
ye_av = np.average(preictal_predictions)
ye_std = np.std(preictal_predictions)
pl.clf()
pl.hist(preictal_predictions, bins=50)
pl.xlabel('preictal estimate')
pl.ylabel('counts')
pl.title('Test data set histogram ( mean_es=%0.3f, std_es=%0.3f)' %(ye_av, ye_std))
plot2file.savefig()
# pl.show()
target = cmean
for i in range(1):
if i==0:
pp = np.percentile(preictal_predictions, (1.0-target)*100.0)
tobecalibrate = preictal_predictions
else:
pp = np.percentile(preictal_predictions, target*100.0)
preictal_calibrated = []
upper_limit = max(tobecalibrate)
lower_limit = min(tobecalibrate)
ratio1 = target/pp
ratio2 = (upper_limit-target)/(upper_limit-pp)
for p in tobecalibrate:
if p <= pp:
pc = ratio1 * (p - lower_limit)
else:
pc = target + ratio2 * (p-pp)
preictal_calibrated.append(pc)
tobecalibrate = preictal_calibrated
preictal_calibrated = np.reshape(preictal_calibrated, (len(preictal_calibrated),1))
yc_av = np.average(preictal_calibrated)
yc_std = np.std(preictal_calibrated)
pl.clf()
pl.hist(preictal_calibrated, bins=50)
pl.xlabel('preictal calibrated')
pl.ylabel('counts')
pl.title('histogram of preictal calibrated ( mean_es=%0.3f, std_es=%0.3f)' %(yc_av, yc_std))
plot2file.savefig()
if ye_av > 0.4:
is_aggressive = False
else:
is_aggressive = True
return {
'preictal_calibrated': preictal_calibrated,
'is_aggressive': is_aggressive
}
| mit |
hawk23/music-recommender | Evaluate_Recommender.py | 1 | 8559 | # Implementation of a simple evaluation framework for recommender systems algorithms
__author__ = 'mms'
# Load required modules
import csv
import numpy as np
from sklearn import cross_validation # machine learning & evaluation module
from random import randint
# Parameters
UAM_FILE = "UAM.txt" # user-artist-matrix (UAM)
ARTISTS_FILE = "UAM_artists.txt" # artist names for UAM
USERS_FILE = "UAM_users.txt" # user names for UAM
NF = 5 # number of folds to perform in cross-validation
K = 2 # parameter for k nearest function
# Function to read metadata (users or artists)
def read_from_file(filename):
data = []
with open(filename, 'r') as f: # open file for reading
reader = csv.reader(f, delimiter='\t') # create reader
headers = reader.next() # skip header
for row in reader:
item = row[0]
data.append(item)
f.close()
return data
# Function that implements a CF recommender. It takes as input the UAM, metadata (artists and users),
# the index of the seed user (to make predictions for) and the indices of the seed user's training artists.
# It returns a list of recommended artist indices
def recommend_CF(UAM, seed_uidx, seed_aidx_train, K = 1):
# UAM user-artist-matrix
# seed_uidx user index of seed user
# seed_aidx_train indices of training artists for seed user
# Get playcount vector for seed user
pc_vec = UAM[seed_uidx, :]
# Remove information on test artists from seed's listening vector
aidx_nz = np.nonzero(pc_vec)[0] # artists with non-zero listening events
aidx_test = np.setdiff1d(aidx_nz, seed_aidx_train) # compute set difference between all artist indices of user and train indices gives test artist indices
# print aidx_test
# Set to 0 the listening events of seed user for testing (in UAM; pc_vec just points to UAM, is thus automatically updated)
UAM[seed_uidx, aidx_test] = 0.0
# Seed user needs to be normalized again
# Perform sum-to-1 normalization
UAM[seed_uidx, :] = UAM[seed_uidx, :] / np.sum(UAM[seed_uidx, :])
# Compute similarities as inner product between pc_vec of user and all users via UAM (assuming that UAM is normalized)
sim_users = np.inner(pc_vec, UAM) # similarities between u and other users
# Alternatively, compute cosine similarities as inverse cosine distance between pc_vec of user and all users via UAM (assuming that UAM is normalized)
# sim_users = np.zeros(shape=(UAM.shape[0]), dtype=np.float32)
# for u in range(0, UAM.shape[0]):
# sim_users[u] = 1.0 - scidist.cosine(pc_vec, UAM[u,:])
# Sort similarities to all others
sort_idx = np.argsort(sim_users) # sort in ascending order
# Get all artist indices the seed user and her closest neighbor listened to, i.e., element with non-zero entries in UAM
artist_idx_u = seed_aidx_train # indices of artists in training set user
# Select the k closest neighbor to seed user (which is the last but one; last one is user u herself!)
kneighbor_idx = sort_idx[-(1+K):-1]
artist_idx_n = [] # indices of artists user u's neighbor(s) listened to
for neighbor_idx in kneighbor_idx:
listened_to = np.nonzero(UAM[neighbor_idx, :]) # indices of artists user u's neighbor listened to
artist_idx_n = np.union1d(listened_to[0], artist_idx_n) # np.nonzero returns a tuple of arrays, so we need to take the first element only
'''
if len(artist_idx_n) == 0:
artist_idx_n = listened_to[0]
else:
artist_idx_n = np.intersect1d(listened_to[0], artist_idx_n) # np.nonzero returns a tuple of arrays, so we need to take the first element only
'''
# Compute the set difference between seed user's neighbor and seed user,
# i.e., artists listened to by the neighbor, but not by seed user.
# These artists are recommended to seed user.
recommended_artists_idx = np.setdiff1d(artist_idx_n, artist_idx_u)
# or alternatively, convert to a numpy array by ...
# artist_idx_n.arrnp.setdiff1d(np.array(artist_idx_n), np.array(artist_idx_u))
# print "training-set: " + str(seed_aidx_train)
# print "recommended: " + str(recommended_artists_idx)
# Return list of recommended artist indices
return recommended_artists_idx
# This function defines a baseline recommender, which selects a random number of artists
# the seed user hasn't listened yet and returns these. Since this function is used with
# a cross fold validation all artists not in the seed_aidx_train set are artists the
# user hasn't listened to.
def recommend_baseline (UAM, seed_uidx, seed_aidx_train):
# UAM user-artist-matrix
# seed_uidx user index of seed user
# Get list of artist indices the user hasn't listened yet
all_artists_idx = range(0, len(UAM[0,:]))
not_listened = np.setdiff1d(all_artists_idx, seed_aidx_train)
# get number of artists to recommend
num_recommend = randint(1,len(not_listened))
# recommend artists
recommended_artists_idx = [not_listened[randint(0,len(not_listened)-1)] for _ in range(num_recommend)]
# print "not_listened: " + str(len(not_listened)) +" num_recommended: " + str(num_recommend) + " len(recommended_artists_idx): " + str(len(recommended_artists_idx))
# print "recommended: "+ str(recommended_artists_idx)
# return result with possible duplicates removed
return list(set(recommended_artists_idx))
# Main program
if __name__ == '__main__':
# Initialize variables to hold performance measures
avg_prec = 0 # mean precision
avg_rec = 0 # mean recall
# Load metadata from provided files into lists
artists = read_from_file(ARTISTS_FILE)
users = read_from_file(USERS_FILE)
# Load UAM
UAM = np.loadtxt(UAM_FILE, delimiter='\t', dtype=np.float32)
# For all users in our data (UAM)
no_users = UAM.shape[0]
for u in range(0, no_users):
# Get indices of seed user's artists listened to
u_aidx = np.nonzero(UAM[u, :])[0]
# Split user's artists into train and test set for cross-fold (CV) validation
fold = 0
kf = cross_validation.KFold(len(u_aidx), n_folds=NF) # create folds (splits) for 5-fold CV
for train_aidx, test_aidx in kf: # for all folds
# Show progress
print "User: " + str(u) + ", Fold: " + str(fold) + ", Training items: " + str(
len(train_aidx)) + ", Test items: " + str(len(test_aidx)), # the comma at the end avoids line break
# Call recommend function
copy_UAM = UAM.copy() # we need to create a copy of the UAM, otherwise modifications within recommend function will effect the variable
#rec_aidx = recommend_CF(copy_UAM, u, train_aidx)
rec_aidx = recommend_CF(copy_UAM, u, train_aidx, K)
#rec_aidx = recommend_baseline(copy_UAM, u, train_aidx)
print "Recommended items: ", len(rec_aidx)
# Compute performance measures
correct_aidx = np.intersect1d(test_aidx, rec_aidx) # correctly predicted artists
# True Positives is amount of overlap in recommended artists and test artists
TP = len(correct_aidx)
# False Positives is recommended artists minus correctly predicted ones
FP = len(np.setdiff1d(rec_aidx, correct_aidx))
# Precision is percentage of correctly predicted among predicted
prec = 100.0 * TP / len(rec_aidx)
# Recall is percentage of correctly predicted among all listened to
rec = 100.0 * TP / len(test_aidx)
# add precision and recall for current user and fold to aggregate variables
avg_prec += prec / (NF * no_users)
avg_rec += rec / (NF * no_users)
# Output precision and recall of current fold
print ("\tPrecision: %.2f, Recall: %.2f" % (prec, rec))
# Increase fold counter
fold += 1
# calculate f1 measure
f1 = 2 * ((avg_prec * avg_rec) / (avg_prec + avg_rec))
# Output mean average precision and recall
print ("\nMAP: %.2f, MAR: %.2f, F1: %.2f" % (avg_prec, avg_rec, f1))
| gpl-2.0 |
arabenjamin/scikit-learn | sklearn/ensemble/tests/test_weight_boosting.py | 35 | 16763 | """Testing for the boost module (sklearn.ensemble.boost)."""
import numpy as np
from sklearn.utils.testing import assert_array_equal, assert_array_less
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_equal, assert_true
from sklearn.utils.testing import assert_raises, assert_raises_regexp
from sklearn.cross_validation import train_test_split
from sklearn.grid_search import GridSearchCV
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import AdaBoostRegressor
from sklearn.ensemble import weight_boosting
from scipy.sparse import csc_matrix
from scipy.sparse import csr_matrix
from scipy.sparse import coo_matrix
from scipy.sparse import dok_matrix
from scipy.sparse import lil_matrix
from sklearn.svm import SVC, SVR
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.utils import shuffle
from sklearn import datasets
# Common random state
rng = np.random.RandomState(0)
# Toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
y_class = ["foo", "foo", "foo", 1, 1, 1] # test string class labels
y_regr = [-1, -1, -1, 1, 1, 1]
T = [[-1, -1], [2, 2], [3, 2]]
y_t_class = ["foo", 1, 1]
y_t_regr = [-1, 1, 1]
# Load the iris dataset and randomly permute it
iris = datasets.load_iris()
perm = rng.permutation(iris.target.size)
iris.data, iris.target = shuffle(iris.data, iris.target, random_state=rng)
# Load the boston dataset and randomly permute it
boston = datasets.load_boston()
boston.data, boston.target = shuffle(boston.data, boston.target,
random_state=rng)
def test_samme_proba():
# Test the `_samme_proba` helper function.
# Define some example (bad) `predict_proba` output.
probs = np.array([[1, 1e-6, 0],
[0.19, 0.6, 0.2],
[-999, 0.51, 0.5],
[1e-6, 1, 1e-9]])
probs /= np.abs(probs.sum(axis=1))[:, np.newaxis]
# _samme_proba calls estimator.predict_proba.
# Make a mock object so I can control what gets returned.
class MockEstimator(object):
def predict_proba(self, X):
assert_array_equal(X.shape, probs.shape)
return probs
mock = MockEstimator()
samme_proba = weight_boosting._samme_proba(mock, 3, np.ones_like(probs))
assert_array_equal(samme_proba.shape, probs.shape)
assert_true(np.isfinite(samme_proba).all())
# Make sure that the correct elements come out as smallest --
# `_samme_proba` should preserve the ordering in each example.
assert_array_equal(np.argmin(samme_proba, axis=1), [2, 0, 0, 2])
assert_array_equal(np.argmax(samme_proba, axis=1), [0, 1, 1, 1])
def test_classification_toy():
# Check classification on a toy dataset.
for alg in ['SAMME', 'SAMME.R']:
clf = AdaBoostClassifier(algorithm=alg, random_state=0)
clf.fit(X, y_class)
assert_array_equal(clf.predict(T), y_t_class)
assert_array_equal(np.unique(np.asarray(y_t_class)), clf.classes_)
assert_equal(clf.predict_proba(T).shape, (len(T), 2))
assert_equal(clf.decision_function(T).shape, (len(T),))
def test_regression_toy():
# Check classification on a toy dataset.
clf = AdaBoostRegressor(random_state=0)
clf.fit(X, y_regr)
assert_array_equal(clf.predict(T), y_t_regr)
def test_iris():
# Check consistency on dataset iris.
classes = np.unique(iris.target)
clf_samme = prob_samme = None
for alg in ['SAMME', 'SAMME.R']:
clf = AdaBoostClassifier(algorithm=alg)
clf.fit(iris.data, iris.target)
assert_array_equal(classes, clf.classes_)
proba = clf.predict_proba(iris.data)
if alg == "SAMME":
clf_samme = clf
prob_samme = proba
assert_equal(proba.shape[1], len(classes))
assert_equal(clf.decision_function(iris.data).shape[1], len(classes))
score = clf.score(iris.data, iris.target)
assert score > 0.9, "Failed with algorithm %s and score = %f" % \
(alg, score)
# Somewhat hacky regression test: prior to
# ae7adc880d624615a34bafdb1d75ef67051b8200,
# predict_proba returned SAMME.R values for SAMME.
clf_samme.algorithm = "SAMME.R"
assert_array_less(0,
np.abs(clf_samme.predict_proba(iris.data) - prob_samme))
def test_boston():
# Check consistency on dataset boston house prices.
clf = AdaBoostRegressor(random_state=0)
clf.fit(boston.data, boston.target)
score = clf.score(boston.data, boston.target)
assert score > 0.85
def test_staged_predict():
# Check staged predictions.
rng = np.random.RandomState(0)
iris_weights = rng.randint(10, size=iris.target.shape)
boston_weights = rng.randint(10, size=boston.target.shape)
# AdaBoost classification
for alg in ['SAMME', 'SAMME.R']:
clf = AdaBoostClassifier(algorithm=alg, n_estimators=10)
clf.fit(iris.data, iris.target, sample_weight=iris_weights)
predictions = clf.predict(iris.data)
staged_predictions = [p for p in clf.staged_predict(iris.data)]
proba = clf.predict_proba(iris.data)
staged_probas = [p for p in clf.staged_predict_proba(iris.data)]
score = clf.score(iris.data, iris.target, sample_weight=iris_weights)
staged_scores = [
s for s in clf.staged_score(
iris.data, iris.target, sample_weight=iris_weights)]
assert_equal(len(staged_predictions), 10)
assert_array_almost_equal(predictions, staged_predictions[-1])
assert_equal(len(staged_probas), 10)
assert_array_almost_equal(proba, staged_probas[-1])
assert_equal(len(staged_scores), 10)
assert_array_almost_equal(score, staged_scores[-1])
# AdaBoost regression
clf = AdaBoostRegressor(n_estimators=10, random_state=0)
clf.fit(boston.data, boston.target, sample_weight=boston_weights)
predictions = clf.predict(boston.data)
staged_predictions = [p for p in clf.staged_predict(boston.data)]
score = clf.score(boston.data, boston.target, sample_weight=boston_weights)
staged_scores = [
s for s in clf.staged_score(
boston.data, boston.target, sample_weight=boston_weights)]
assert_equal(len(staged_predictions), 10)
assert_array_almost_equal(predictions, staged_predictions[-1])
assert_equal(len(staged_scores), 10)
assert_array_almost_equal(score, staged_scores[-1])
def test_gridsearch():
# Check that base trees can be grid-searched.
# AdaBoost classification
boost = AdaBoostClassifier(base_estimator=DecisionTreeClassifier())
parameters = {'n_estimators': (1, 2),
'base_estimator__max_depth': (1, 2),
'algorithm': ('SAMME', 'SAMME.R')}
clf = GridSearchCV(boost, parameters)
clf.fit(iris.data, iris.target)
# AdaBoost regression
boost = AdaBoostRegressor(base_estimator=DecisionTreeRegressor(),
random_state=0)
parameters = {'n_estimators': (1, 2),
'base_estimator__max_depth': (1, 2)}
clf = GridSearchCV(boost, parameters)
clf.fit(boston.data, boston.target)
def test_pickle():
# Check pickability.
import pickle
# Adaboost classifier
for alg in ['SAMME', 'SAMME.R']:
obj = AdaBoostClassifier(algorithm=alg)
obj.fit(iris.data, iris.target)
score = obj.score(iris.data, iris.target)
s = pickle.dumps(obj)
obj2 = pickle.loads(s)
assert_equal(type(obj2), obj.__class__)
score2 = obj2.score(iris.data, iris.target)
assert_equal(score, score2)
# Adaboost regressor
obj = AdaBoostRegressor(random_state=0)
obj.fit(boston.data, boston.target)
score = obj.score(boston.data, boston.target)
s = pickle.dumps(obj)
obj2 = pickle.loads(s)
assert_equal(type(obj2), obj.__class__)
score2 = obj2.score(boston.data, boston.target)
assert_equal(score, score2)
def test_importances():
# Check variable importances.
X, y = datasets.make_classification(n_samples=2000,
n_features=10,
n_informative=3,
n_redundant=0,
n_repeated=0,
shuffle=False,
random_state=1)
for alg in ['SAMME', 'SAMME.R']:
clf = AdaBoostClassifier(algorithm=alg)
clf.fit(X, y)
importances = clf.feature_importances_
assert_equal(importances.shape[0], 10)
assert_equal((importances[:3, np.newaxis] >= importances[3:]).all(),
True)
def test_error():
# Test that it gives proper exception on deficient input.
assert_raises(ValueError,
AdaBoostClassifier(learning_rate=-1).fit,
X, y_class)
assert_raises(ValueError,
AdaBoostClassifier(algorithm="foo").fit,
X, y_class)
assert_raises(ValueError,
AdaBoostClassifier().fit,
X, y_class, sample_weight=np.asarray([-1]))
def test_base_estimator():
# Test different base estimators.
from sklearn.ensemble import RandomForestClassifier
from sklearn.svm import SVC
# XXX doesn't work with y_class because RF doesn't support classes_
# Shouldn't AdaBoost run a LabelBinarizer?
clf = AdaBoostClassifier(RandomForestClassifier())
clf.fit(X, y_regr)
clf = AdaBoostClassifier(SVC(), algorithm="SAMME")
clf.fit(X, y_class)
from sklearn.ensemble import RandomForestRegressor
from sklearn.svm import SVR
clf = AdaBoostRegressor(RandomForestRegressor(), random_state=0)
clf.fit(X, y_regr)
clf = AdaBoostRegressor(SVR(), random_state=0)
clf.fit(X, y_regr)
# Check that an empty discrete ensemble fails in fit, not predict.
X_fail = [[1, 1], [1, 1], [1, 1], [1, 1]]
y_fail = ["foo", "bar", 1, 2]
clf = AdaBoostClassifier(SVC(), algorithm="SAMME")
assert_raises_regexp(ValueError, "worse than random",
clf.fit, X_fail, y_fail)
def test_sample_weight_missing():
from sklearn.linear_model import LinearRegression
from sklearn.cluster import KMeans
clf = AdaBoostClassifier(LinearRegression(), algorithm="SAMME")
assert_raises(ValueError, clf.fit, X, y_regr)
clf = AdaBoostRegressor(LinearRegression())
assert_raises(ValueError, clf.fit, X, y_regr)
clf = AdaBoostClassifier(KMeans(), algorithm="SAMME")
assert_raises(ValueError, clf.fit, X, y_regr)
clf = AdaBoostRegressor(KMeans())
assert_raises(ValueError, clf.fit, X, y_regr)
def test_sparse_classification():
# Check classification with sparse input.
class CustomSVC(SVC):
"""SVC variant that records the nature of the training set."""
def fit(self, X, y, sample_weight=None):
"""Modification on fit caries data type for later verification."""
super(CustomSVC, self).fit(X, y, sample_weight=sample_weight)
self.data_type_ = type(X)
return self
X, y = datasets.make_multilabel_classification(n_classes=1, n_samples=15,
n_features=5,
random_state=42)
# Flatten y to a 1d array
y = np.ravel(y)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
for sparse_format in [csc_matrix, csr_matrix, lil_matrix, coo_matrix,
dok_matrix]:
X_train_sparse = sparse_format(X_train)
X_test_sparse = sparse_format(X_test)
# Trained on sparse format
sparse_classifier = AdaBoostClassifier(
base_estimator=CustomSVC(probability=True),
random_state=1,
algorithm="SAMME"
).fit(X_train_sparse, y_train)
# Trained on dense format
dense_classifier = AdaBoostClassifier(
base_estimator=CustomSVC(probability=True),
random_state=1,
algorithm="SAMME"
).fit(X_train, y_train)
# predict
sparse_results = sparse_classifier.predict(X_test_sparse)
dense_results = dense_classifier.predict(X_test)
assert_array_equal(sparse_results, dense_results)
# decision_function
sparse_results = sparse_classifier.decision_function(X_test_sparse)
dense_results = dense_classifier.decision_function(X_test)
assert_array_equal(sparse_results, dense_results)
# predict_log_proba
sparse_results = sparse_classifier.predict_log_proba(X_test_sparse)
dense_results = dense_classifier.predict_log_proba(X_test)
assert_array_equal(sparse_results, dense_results)
# predict_proba
sparse_results = sparse_classifier.predict_proba(X_test_sparse)
dense_results = dense_classifier.predict_proba(X_test)
assert_array_equal(sparse_results, dense_results)
# score
sparse_results = sparse_classifier.score(X_test_sparse, y_test)
dense_results = dense_classifier.score(X_test, y_test)
assert_array_equal(sparse_results, dense_results)
# staged_decision_function
sparse_results = sparse_classifier.staged_decision_function(
X_test_sparse)
dense_results = dense_classifier.staged_decision_function(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
# staged_predict
sparse_results = sparse_classifier.staged_predict(X_test_sparse)
dense_results = dense_classifier.staged_predict(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
# staged_predict_proba
sparse_results = sparse_classifier.staged_predict_proba(X_test_sparse)
dense_results = dense_classifier.staged_predict_proba(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
# staged_score
sparse_results = sparse_classifier.staged_score(X_test_sparse,
y_test)
dense_results = dense_classifier.staged_score(X_test, y_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
# Verify sparsity of data is maintained during training
types = [i.data_type_ for i in sparse_classifier.estimators_]
assert all([(t == csc_matrix or t == csr_matrix)
for t in types])
def test_sparse_regression():
# Check regression with sparse input.
class CustomSVR(SVR):
"""SVR variant that records the nature of the training set."""
def fit(self, X, y, sample_weight=None):
"""Modification on fit caries data type for later verification."""
super(CustomSVR, self).fit(X, y, sample_weight=sample_weight)
self.data_type_ = type(X)
return self
X, y = datasets.make_regression(n_samples=15, n_features=50, n_targets=1,
random_state=42)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
for sparse_format in [csc_matrix, csr_matrix, lil_matrix, coo_matrix,
dok_matrix]:
X_train_sparse = sparse_format(X_train)
X_test_sparse = sparse_format(X_test)
# Trained on sparse format
sparse_classifier = AdaBoostRegressor(
base_estimator=CustomSVR(),
random_state=1
).fit(X_train_sparse, y_train)
# Trained on dense format
dense_classifier = dense_results = AdaBoostRegressor(
base_estimator=CustomSVR(),
random_state=1
).fit(X_train, y_train)
# predict
sparse_results = sparse_classifier.predict(X_test_sparse)
dense_results = dense_classifier.predict(X_test)
assert_array_equal(sparse_results, dense_results)
# staged_predict
sparse_results = sparse_classifier.staged_predict(X_test_sparse)
dense_results = dense_classifier.staged_predict(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
types = [i.data_type_ for i in sparse_classifier.estimators_]
assert all([(t == csc_matrix or t == csr_matrix)
for t in types])
| bsd-3-clause |
heli522/scikit-learn | sklearn/metrics/tests/test_pairwise.py | 17 | 24947 | import numpy as np
from numpy import linalg
from scipy.sparse import dok_matrix, csr_matrix, issparse
from scipy.spatial.distance import cosine, cityblock, minkowski, wminkowski
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raises_regexp
from sklearn.utils.testing import assert_true
from sklearn.externals.six import iteritems
from sklearn.metrics.pairwise import euclidean_distances
from sklearn.metrics.pairwise import manhattan_distances
from sklearn.metrics.pairwise import linear_kernel
from sklearn.metrics.pairwise import chi2_kernel, additive_chi2_kernel
from sklearn.metrics.pairwise import polynomial_kernel
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.metrics.pairwise import sigmoid_kernel
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.metrics.pairwise import cosine_distances
from sklearn.metrics.pairwise import pairwise_distances
from sklearn.metrics.pairwise import pairwise_distances_argmin_min
from sklearn.metrics.pairwise import pairwise_distances_argmin
from sklearn.metrics.pairwise import pairwise_kernels
from sklearn.metrics.pairwise import PAIRWISE_KERNEL_FUNCTIONS
from sklearn.metrics.pairwise import PAIRWISE_DISTANCE_FUNCTIONS
from sklearn.metrics.pairwise import PAIRED_DISTANCES
from sklearn.metrics.pairwise import check_pairwise_arrays
from sklearn.metrics.pairwise import check_paired_arrays
from sklearn.metrics.pairwise import _parallel_pairwise
from sklearn.metrics.pairwise import paired_distances
from sklearn.metrics.pairwise import paired_euclidean_distances
from sklearn.metrics.pairwise import paired_manhattan_distances
from sklearn.preprocessing import normalize
def test_pairwise_distances():
# Test the pairwise_distance helper function.
rng = np.random.RandomState(0)
# Euclidean distance should be equivalent to calling the function.
X = rng.random_sample((5, 4))
S = pairwise_distances(X, metric="euclidean")
S2 = euclidean_distances(X)
assert_array_almost_equal(S, S2)
# Euclidean distance, with Y != X.
Y = rng.random_sample((2, 4))
S = pairwise_distances(X, Y, metric="euclidean")
S2 = euclidean_distances(X, Y)
assert_array_almost_equal(S, S2)
# Test with tuples as X and Y
X_tuples = tuple([tuple([v for v in row]) for row in X])
Y_tuples = tuple([tuple([v for v in row]) for row in Y])
S2 = pairwise_distances(X_tuples, Y_tuples, metric="euclidean")
assert_array_almost_equal(S, S2)
# "cityblock" uses sklearn metric, cityblock (function) is scipy.spatial.
S = pairwise_distances(X, metric="cityblock")
S2 = pairwise_distances(X, metric=cityblock)
assert_equal(S.shape[0], S.shape[1])
assert_equal(S.shape[0], X.shape[0])
assert_array_almost_equal(S, S2)
# The manhattan metric should be equivalent to cityblock.
S = pairwise_distances(X, Y, metric="manhattan")
S2 = pairwise_distances(X, Y, metric=cityblock)
assert_equal(S.shape[0], X.shape[0])
assert_equal(S.shape[1], Y.shape[0])
assert_array_almost_equal(S, S2)
# Low-level function for manhattan can divide in blocks to avoid
# using too much memory during the broadcasting
S3 = manhattan_distances(X, Y, size_threshold=10)
assert_array_almost_equal(S, S3)
# Test cosine as a string metric versus cosine callable
# "cosine" uses sklearn metric, cosine (function) is scipy.spatial
S = pairwise_distances(X, Y, metric="cosine")
S2 = pairwise_distances(X, Y, metric=cosine)
assert_equal(S.shape[0], X.shape[0])
assert_equal(S.shape[1], Y.shape[0])
assert_array_almost_equal(S, S2)
# Test with sparse X and Y,
# currently only supported for Euclidean, L1 and cosine.
X_sparse = csr_matrix(X)
Y_sparse = csr_matrix(Y)
S = pairwise_distances(X_sparse, Y_sparse, metric="euclidean")
S2 = euclidean_distances(X_sparse, Y_sparse)
assert_array_almost_equal(S, S2)
S = pairwise_distances(X_sparse, Y_sparse, metric="cosine")
S2 = cosine_distances(X_sparse, Y_sparse)
assert_array_almost_equal(S, S2)
S = pairwise_distances(X_sparse, Y_sparse.tocsc(), metric="manhattan")
S2 = manhattan_distances(X_sparse.tobsr(), Y_sparse.tocoo())
assert_array_almost_equal(S, S2)
S2 = manhattan_distances(X, Y)
assert_array_almost_equal(S, S2)
# Test with scipy.spatial.distance metric, with a kwd
kwds = {"p": 2.0}
S = pairwise_distances(X, Y, metric="minkowski", **kwds)
S2 = pairwise_distances(X, Y, metric=minkowski, **kwds)
assert_array_almost_equal(S, S2)
# same with Y = None
kwds = {"p": 2.0}
S = pairwise_distances(X, metric="minkowski", **kwds)
S2 = pairwise_distances(X, metric=minkowski, **kwds)
assert_array_almost_equal(S, S2)
# Test that scipy distance metrics throw an error if sparse matrix given
assert_raises(TypeError, pairwise_distances, X_sparse, metric="minkowski")
assert_raises(TypeError, pairwise_distances, X, Y_sparse,
metric="minkowski")
# Test that a value error is raised if the metric is unkown
assert_raises(ValueError, pairwise_distances, X, Y, metric="blah")
def test_pairwise_precomputed():
for func in [pairwise_distances, pairwise_kernels]:
# Test correct shape
assert_raises_regexp(ValueError, '.* shape .*',
func, np.zeros((5, 3)), metric='precomputed')
# with two args
assert_raises_regexp(ValueError, '.* shape .*',
func, np.zeros((5, 3)), np.zeros((4, 4)),
metric='precomputed')
# even if shape[1] agrees (although thus second arg is spurious)
assert_raises_regexp(ValueError, '.* shape .*',
func, np.zeros((5, 3)), np.zeros((4, 3)),
metric='precomputed')
# Test not copied (if appropriate dtype)
S = np.zeros((5, 5))
S2 = func(S, metric="precomputed")
assert_true(S is S2)
# with two args
S = np.zeros((5, 3))
S2 = func(S, np.zeros((3, 3)), metric="precomputed")
assert_true(S is S2)
# Test always returns float dtype
S = func(np.array([[1]], dtype='int'), metric='precomputed')
assert_equal('f', S.dtype.kind)
# Test converts list to array-like
S = func([[1]], metric='precomputed')
assert_true(isinstance(S, np.ndarray))
def check_pairwise_parallel(func, metric, kwds):
rng = np.random.RandomState(0)
for make_data in (np.array, csr_matrix):
X = make_data(rng.random_sample((5, 4)))
Y = make_data(rng.random_sample((3, 4)))
try:
S = func(X, metric=metric, n_jobs=1, **kwds)
except (TypeError, ValueError) as exc:
# Not all metrics support sparse input
# ValueError may be triggered by bad callable
if make_data is csr_matrix:
assert_raises(type(exc), func, X, metric=metric,
n_jobs=2, **kwds)
continue
else:
raise
S2 = func(X, metric=metric, n_jobs=2, **kwds)
assert_array_almost_equal(S, S2)
S = func(X, Y, metric=metric, n_jobs=1, **kwds)
S2 = func(X, Y, metric=metric, n_jobs=2, **kwds)
assert_array_almost_equal(S, S2)
def test_pairwise_parallel():
wminkowski_kwds = {'w': np.arange(1, 5).astype('double'), 'p': 1}
metrics = [(pairwise_distances, 'euclidean', {}),
(pairwise_distances, wminkowski, wminkowski_kwds),
(pairwise_distances, 'wminkowski', wminkowski_kwds),
(pairwise_kernels, 'polynomial', {'degree': 1}),
(pairwise_kernels, callable_rbf_kernel, {'gamma': .1}),
]
for func, metric, kwds in metrics:
yield check_pairwise_parallel, func, metric, kwds
def test_pairwise_callable_nonstrict_metric():
# paired_distances should allow callable metric where metric(x, x) != 0
# Knowing that the callable is a strict metric would allow the diagonal to
# be left uncalculated and set to 0.
assert_equal(pairwise_distances([[1]], metric=lambda x, y: 5)[0, 0], 5)
def callable_rbf_kernel(x, y, **kwds):
# Callable version of pairwise.rbf_kernel.
K = rbf_kernel(np.atleast_2d(x), np.atleast_2d(y), **kwds)
return K
def test_pairwise_kernels():
# Test the pairwise_kernels helper function.
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((2, 4))
# Test with all metrics that should be in PAIRWISE_KERNEL_FUNCTIONS.
test_metrics = ["rbf", "sigmoid", "polynomial", "linear", "chi2",
"additive_chi2"]
for metric in test_metrics:
function = PAIRWISE_KERNEL_FUNCTIONS[metric]
# Test with Y=None
K1 = pairwise_kernels(X, metric=metric)
K2 = function(X)
assert_array_almost_equal(K1, K2)
# Test with Y=Y
K1 = pairwise_kernels(X, Y=Y, metric=metric)
K2 = function(X, Y=Y)
assert_array_almost_equal(K1, K2)
# Test with tuples as X and Y
X_tuples = tuple([tuple([v for v in row]) for row in X])
Y_tuples = tuple([tuple([v for v in row]) for row in Y])
K2 = pairwise_kernels(X_tuples, Y_tuples, metric=metric)
assert_array_almost_equal(K1, K2)
# Test with sparse X and Y
X_sparse = csr_matrix(X)
Y_sparse = csr_matrix(Y)
if metric in ["chi2", "additive_chi2"]:
# these don't support sparse matrices yet
assert_raises(ValueError, pairwise_kernels,
X_sparse, Y=Y_sparse, metric=metric)
continue
K1 = pairwise_kernels(X_sparse, Y=Y_sparse, metric=metric)
assert_array_almost_equal(K1, K2)
# Test with a callable function, with given keywords.
metric = callable_rbf_kernel
kwds = {}
kwds['gamma'] = 0.1
K1 = pairwise_kernels(X, Y=Y, metric=metric, **kwds)
K2 = rbf_kernel(X, Y=Y, **kwds)
assert_array_almost_equal(K1, K2)
# callable function, X=Y
K1 = pairwise_kernels(X, Y=X, metric=metric, **kwds)
K2 = rbf_kernel(X, Y=X, **kwds)
assert_array_almost_equal(K1, K2)
def test_pairwise_kernels_filter_param():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((2, 4))
K = rbf_kernel(X, Y, gamma=0.1)
params = {"gamma": 0.1, "blabla": ":)"}
K2 = pairwise_kernels(X, Y, metric="rbf", filter_params=True, **params)
assert_array_almost_equal(K, K2)
assert_raises(TypeError, pairwise_kernels, X, Y, "rbf", **params)
def test_paired_distances():
# Test the pairwise_distance helper function.
rng = np.random.RandomState(0)
# Euclidean distance should be equivalent to calling the function.
X = rng.random_sample((5, 4))
# Euclidean distance, with Y != X.
Y = rng.random_sample((5, 4))
for metric, func in iteritems(PAIRED_DISTANCES):
S = paired_distances(X, Y, metric=metric)
S2 = func(X, Y)
assert_array_almost_equal(S, S2)
S3 = func(csr_matrix(X), csr_matrix(Y))
assert_array_almost_equal(S, S3)
if metric in PAIRWISE_DISTANCE_FUNCTIONS:
# Check the the pairwise_distances implementation
# gives the same value
distances = PAIRWISE_DISTANCE_FUNCTIONS[metric](X, Y)
distances = np.diag(distances)
assert_array_almost_equal(distances, S)
# Check the callable implementation
S = paired_distances(X, Y, metric='manhattan')
S2 = paired_distances(X, Y, metric=lambda x, y: np.abs(x - y).sum(axis=0))
assert_array_almost_equal(S, S2)
# Test that a value error is raised when the lengths of X and Y should not
# differ
Y = rng.random_sample((3, 4))
assert_raises(ValueError, paired_distances, X, Y)
def test_pairwise_distances_argmin_min():
# Check pairwise minimum distances computation for any metric
X = [[0], [1]]
Y = [[-1], [2]]
Xsp = dok_matrix(X)
Ysp = csr_matrix(Y, dtype=np.float32)
# euclidean metric
D, E = pairwise_distances_argmin_min(X, Y, metric="euclidean")
D2 = pairwise_distances_argmin(X, Y, metric="euclidean")
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(D2, [0, 1])
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# sparse matrix case
Dsp, Esp = pairwise_distances_argmin_min(Xsp, Ysp, metric="euclidean")
assert_array_equal(Dsp, D)
assert_array_equal(Esp, E)
# We don't want np.matrix here
assert_equal(type(Dsp), np.ndarray)
assert_equal(type(Esp), np.ndarray)
# Non-euclidean sklearn metric
D, E = pairwise_distances_argmin_min(X, Y, metric="manhattan")
D2 = pairwise_distances_argmin(X, Y, metric="manhattan")
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(D2, [0, 1])
assert_array_almost_equal(E, [1., 1.])
D, E = pairwise_distances_argmin_min(Xsp, Ysp, metric="manhattan")
D2 = pairwise_distances_argmin(Xsp, Ysp, metric="manhattan")
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# Non-euclidean Scipy distance (callable)
D, E = pairwise_distances_argmin_min(X, Y, metric=minkowski,
metric_kwargs={"p": 2})
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# Non-euclidean Scipy distance (string)
D, E = pairwise_distances_argmin_min(X, Y, metric="minkowski",
metric_kwargs={"p": 2})
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# Compare with naive implementation
rng = np.random.RandomState(0)
X = rng.randn(97, 149)
Y = rng.randn(111, 149)
dist = pairwise_distances(X, Y, metric="manhattan")
dist_orig_ind = dist.argmin(axis=0)
dist_orig_val = dist[dist_orig_ind, range(len(dist_orig_ind))]
dist_chunked_ind, dist_chunked_val = pairwise_distances_argmin_min(
X, Y, axis=0, metric="manhattan", batch_size=50)
np.testing.assert_almost_equal(dist_orig_ind, dist_chunked_ind, decimal=7)
np.testing.assert_almost_equal(dist_orig_val, dist_chunked_val, decimal=7)
def test_euclidean_distances():
# Check the pairwise Euclidean distances computation
X = [[0]]
Y = [[1], [2]]
D = euclidean_distances(X, Y)
assert_array_almost_equal(D, [[1., 2.]])
X = csr_matrix(X)
Y = csr_matrix(Y)
D = euclidean_distances(X, Y)
assert_array_almost_equal(D, [[1., 2.]])
rng = np.random.RandomState(0)
X = rng.random_sample((10, 4))
Y = rng.random_sample((20, 4))
X_norm_sq = (X ** 2).sum(axis=1)
Y_norm_sq = (Y ** 2).sum(axis=1)
# check that we still get the right answers with {X,Y}_norm_squared
D1 = euclidean_distances(X, Y)
D2 = euclidean_distances(X, Y, X_norm_squared=X_norm_sq)
D3 = euclidean_distances(X, Y, Y_norm_squared=Y_norm_sq)
D4 = euclidean_distances(X, Y, X_norm_squared=X_norm_sq,
Y_norm_squared=Y_norm_sq)
assert_array_almost_equal(D2, D1)
assert_array_almost_equal(D3, D1)
assert_array_almost_equal(D4, D1)
# check we get the wrong answer with wrong {X,Y}_norm_squared
X_norm_sq *= 0.5
Y_norm_sq *= 0.5
wrong_D = euclidean_distances(X, Y,
X_norm_squared=np.zeros_like(X_norm_sq),
Y_norm_squared=np.zeros_like(Y_norm_sq))
assert_greater(np.max(np.abs(wrong_D - D1)), .01)
# Paired distances
def test_paired_euclidean_distances():
# Check the paired Euclidean distances computation
X = [[0], [0]]
Y = [[1], [2]]
D = paired_euclidean_distances(X, Y)
assert_array_almost_equal(D, [1., 2.])
def test_paired_manhattan_distances():
# Check the paired manhattan distances computation
X = [[0], [0]]
Y = [[1], [2]]
D = paired_manhattan_distances(X, Y)
assert_array_almost_equal(D, [1., 2.])
def test_chi_square_kernel():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((10, 4))
K_add = additive_chi2_kernel(X, Y)
gamma = 0.1
K = chi2_kernel(X, Y, gamma=gamma)
assert_equal(K.dtype, np.float)
for i, x in enumerate(X):
for j, y in enumerate(Y):
chi2 = -np.sum((x - y) ** 2 / (x + y))
chi2_exp = np.exp(gamma * chi2)
assert_almost_equal(K_add[i, j], chi2)
assert_almost_equal(K[i, j], chi2_exp)
# check diagonal is ones for data with itself
K = chi2_kernel(Y)
assert_array_equal(np.diag(K), 1)
# check off-diagonal is < 1 but > 0:
assert_true(np.all(K > 0))
assert_true(np.all(K - np.diag(np.diag(K)) < 1))
# check that float32 is preserved
X = rng.random_sample((5, 4)).astype(np.float32)
Y = rng.random_sample((10, 4)).astype(np.float32)
K = chi2_kernel(X, Y)
assert_equal(K.dtype, np.float32)
# check integer type gets converted,
# check that zeros are handled
X = rng.random_sample((10, 4)).astype(np.int32)
K = chi2_kernel(X, X)
assert_true(np.isfinite(K).all())
assert_equal(K.dtype, np.float)
# check that kernel of similar things is greater than dissimilar ones
X = [[.3, .7], [1., 0]]
Y = [[0, 1], [.9, .1]]
K = chi2_kernel(X, Y)
assert_greater(K[0, 0], K[0, 1])
assert_greater(K[1, 1], K[1, 0])
# test negative input
assert_raises(ValueError, chi2_kernel, [[0, -1]])
assert_raises(ValueError, chi2_kernel, [[0, -1]], [[-1, -1]])
assert_raises(ValueError, chi2_kernel, [[0, 1]], [[-1, -1]])
# different n_features in X and Y
assert_raises(ValueError, chi2_kernel, [[0, 1]], [[.2, .2, .6]])
# sparse matrices
assert_raises(ValueError, chi2_kernel, csr_matrix(X), csr_matrix(Y))
assert_raises(ValueError, additive_chi2_kernel,
csr_matrix(X), csr_matrix(Y))
def test_kernel_symmetry():
# Valid kernels should be symmetric
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
for kernel in (linear_kernel, polynomial_kernel, rbf_kernel,
sigmoid_kernel, cosine_similarity):
K = kernel(X, X)
assert_array_almost_equal(K, K.T, 15)
def test_kernel_sparse():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
X_sparse = csr_matrix(X)
for kernel in (linear_kernel, polynomial_kernel, rbf_kernel,
sigmoid_kernel, cosine_similarity):
K = kernel(X, X)
K2 = kernel(X_sparse, X_sparse)
assert_array_almost_equal(K, K2)
def test_linear_kernel():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
K = linear_kernel(X, X)
# the diagonal elements of a linear kernel are their squared norm
assert_array_almost_equal(K.flat[::6], [linalg.norm(x) ** 2 for x in X])
def test_rbf_kernel():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
K = rbf_kernel(X, X)
# the diagonal elements of a rbf kernel are 1
assert_array_almost_equal(K.flat[::6], np.ones(5))
def test_cosine_similarity_sparse_output():
# Test if cosine_similarity correctly produces sparse output.
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((3, 4))
Xcsr = csr_matrix(X)
Ycsr = csr_matrix(Y)
K1 = cosine_similarity(Xcsr, Ycsr, dense_output=False)
assert_true(issparse(K1))
K2 = pairwise_kernels(Xcsr, Y=Ycsr, metric="cosine")
assert_array_almost_equal(K1.todense(), K2)
def test_cosine_similarity():
# Test the cosine_similarity.
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((3, 4))
Xcsr = csr_matrix(X)
Ycsr = csr_matrix(Y)
for X_, Y_ in ((X, None), (X, Y),
(Xcsr, None), (Xcsr, Ycsr)):
# Test that the cosine is kernel is equal to a linear kernel when data
# has been previously normalized by L2-norm.
K1 = pairwise_kernels(X_, Y=Y_, metric="cosine")
X_ = normalize(X_)
if Y_ is not None:
Y_ = normalize(Y_)
K2 = pairwise_kernels(X_, Y=Y_, metric="linear")
assert_array_almost_equal(K1, K2)
def test_check_dense_matrices():
# Ensure that pairwise array check works for dense matrices.
# Check that if XB is None, XB is returned as reference to XA
XA = np.resize(np.arange(40), (5, 8))
XA_checked, XB_checked = check_pairwise_arrays(XA, None)
assert_true(XA_checked is XB_checked)
assert_array_equal(XA, XA_checked)
def test_check_XB_returned():
# Ensure that if XA and XB are given correctly, they return as equal.
# Check that if XB is not None, it is returned equal.
# Note that the second dimension of XB is the same as XA.
XA = np.resize(np.arange(40), (5, 8))
XB = np.resize(np.arange(32), (4, 8))
XA_checked, XB_checked = check_pairwise_arrays(XA, XB)
assert_array_equal(XA, XA_checked)
assert_array_equal(XB, XB_checked)
XB = np.resize(np.arange(40), (5, 8))
XA_checked, XB_checked = check_paired_arrays(XA, XB)
assert_array_equal(XA, XA_checked)
assert_array_equal(XB, XB_checked)
def test_check_different_dimensions():
# Ensure an error is raised if the dimensions are different.
XA = np.resize(np.arange(45), (5, 9))
XB = np.resize(np.arange(32), (4, 8))
assert_raises(ValueError, check_pairwise_arrays, XA, XB)
XB = np.resize(np.arange(4 * 9), (4, 9))
assert_raises(ValueError, check_paired_arrays, XA, XB)
def test_check_invalid_dimensions():
# Ensure an error is raised on 1D input arrays.
XA = np.arange(45)
XB = np.resize(np.arange(32), (4, 8))
assert_raises(ValueError, check_pairwise_arrays, XA, XB)
XA = np.resize(np.arange(45), (5, 9))
XB = np.arange(32)
assert_raises(ValueError, check_pairwise_arrays, XA, XB)
def test_check_sparse_arrays():
# Ensures that checks return valid sparse matrices.
rng = np.random.RandomState(0)
XA = rng.random_sample((5, 4))
XA_sparse = csr_matrix(XA)
XB = rng.random_sample((5, 4))
XB_sparse = csr_matrix(XB)
XA_checked, XB_checked = check_pairwise_arrays(XA_sparse, XB_sparse)
# compare their difference because testing csr matrices for
# equality with '==' does not work as expected.
assert_true(issparse(XA_checked))
assert_equal(abs(XA_sparse - XA_checked).sum(), 0)
assert_true(issparse(XB_checked))
assert_equal(abs(XB_sparse - XB_checked).sum(), 0)
XA_checked, XA_2_checked = check_pairwise_arrays(XA_sparse, XA_sparse)
assert_true(issparse(XA_checked))
assert_equal(abs(XA_sparse - XA_checked).sum(), 0)
assert_true(issparse(XA_2_checked))
assert_equal(abs(XA_2_checked - XA_checked).sum(), 0)
def tuplify(X):
# Turns a numpy matrix (any n-dimensional array) into tuples.
s = X.shape
if len(s) > 1:
# Tuplify each sub-array in the input.
return tuple(tuplify(row) for row in X)
else:
# Single dimension input, just return tuple of contents.
return tuple(r for r in X)
def test_check_tuple_input():
# Ensures that checks return valid tuples.
rng = np.random.RandomState(0)
XA = rng.random_sample((5, 4))
XA_tuples = tuplify(XA)
XB = rng.random_sample((5, 4))
XB_tuples = tuplify(XB)
XA_checked, XB_checked = check_pairwise_arrays(XA_tuples, XB_tuples)
assert_array_equal(XA_tuples, XA_checked)
assert_array_equal(XB_tuples, XB_checked)
def test_check_preserve_type():
# Ensures that type float32 is preserved.
XA = np.resize(np.arange(40), (5, 8)).astype(np.float32)
XB = np.resize(np.arange(40), (5, 8)).astype(np.float32)
XA_checked, XB_checked = check_pairwise_arrays(XA, None)
assert_equal(XA_checked.dtype, np.float32)
# both float32
XA_checked, XB_checked = check_pairwise_arrays(XA, XB)
assert_equal(XA_checked.dtype, np.float32)
assert_equal(XB_checked.dtype, np.float32)
# mismatched A
XA_checked, XB_checked = check_pairwise_arrays(XA.astype(np.float),
XB)
assert_equal(XA_checked.dtype, np.float)
assert_equal(XB_checked.dtype, np.float)
# mismatched B
XA_checked, XB_checked = check_pairwise_arrays(XA,
XB.astype(np.float))
assert_equal(XA_checked.dtype, np.float)
assert_equal(XB_checked.dtype, np.float)
| bsd-3-clause |
ChadFulton/statsmodels | statsmodels/tsa/filters/bk_filter.py | 1 | 3199 | from __future__ import absolute_import
import numpy as np
from scipy.signal import fftconvolve
from ._utils import _maybe_get_pandas_wrapper
def bkfilter(X, low=6, high=32, K=12):
"""
Baxter-King bandpass filter
Parameters
----------
X : array-like
A 1 or 2d ndarray. If 2d, variables are assumed to be in columns.
low : float
Minimum period for oscillations, ie., Baxter and King suggest that
the Burns-Mitchell U.S. business cycle has 6 for quarterly data and
1.5 for annual data.
high : float
Maximum period for oscillations BK suggest that the U.S.
business cycle has 32 for quarterly data and 8 for annual data.
K : int
Lead-lag length of the filter. Baxter and King propose a truncation
length of 12 for quarterly data and 3 for annual data.
Returns
-------
Y : array
Cyclical component of X
References
---------- ::
Baxter, M. and R. G. King. "Measuring Business Cycles: Approximate
Band-Pass Filters for Economic Time Series." *Review of Economics and
Statistics*, 1999, 81(4), 575-593.
Notes
-----
Returns a centered weighted moving average of the original series. Where
the weights a[j] are computed ::
a[j] = b[j] + theta, for j = 0, +/-1, +/-2, ... +/- K
b[0] = (omega_2 - omega_1)/pi
b[j] = 1/(pi*j)(sin(omega_2*j)-sin(omega_1*j), for j = +/-1, +/-2,...
and theta is a normalizing constant ::
theta = -sum(b)/(2K+1)
Examples
--------
>>> import statsmodels.api as sm
>>> import pandas as pd
>>> dta = sm.datasets.macrodata.load_pandas().data
>>> index = pd.DatetimeIndex(start='1959Q1', end='2009Q4', freq='Q')
>>> dta.set_index(index, inplace=True)
>>> cycles = sm.tsa.filters.bkfilter(dta[['realinv']], 6, 24, 12)
>>> import matplotlib.pyplot as plt
>>> fig, ax = plt.subplots()
>>> cycles.plot(ax=ax, style=['r--', 'b-'])
>>> plt.show()
.. plot:: plots/bkf_plot.py
See Also
--------
statsmodels.tsa.filters.cf_filter.cffilter
statsmodels.tsa.filters.hp_filter.hpfilter
statsmodels.tsa.seasonal.seasonal_decompose
"""
#TODO: change the docstring to ..math::?
#TODO: allow windowing functions to correct for Gibb's Phenomenon?
# adjust bweights (symmetrically) by below before demeaning
# Lancosz Sigma Factors np.sinc(2*j/(2.*K+1))
_pandas_wrapper = _maybe_get_pandas_wrapper(X, K, K)
X = np.asarray(X)
omega_1 = 2.*np.pi/high # convert from freq. to periodicity
omega_2 = 2.*np.pi/low
bweights = np.zeros(2*K+1)
bweights[K] = (omega_2 - omega_1)/np.pi # weight at zero freq.
j = np.arange(1,int(K)+1)
weights = 1/(np.pi*j)*(np.sin(omega_2*j)-np.sin(omega_1*j))
bweights[K+j] = weights # j is an idx
bweights[:K] = weights[::-1] # make symmetric weights
bweights -= bweights.mean() # make sure weights sum to zero
if X.ndim == 2:
bweights = bweights[:,None]
X = fftconvolve(X, bweights, mode='valid')
# get a centered moving avg/convolution
if _pandas_wrapper is not None:
return _pandas_wrapper(X)
return X
| bsd-3-clause |
richardgroves/namebench | nb_third_party/jinja2/visitor.py | 1402 | 3316 | # -*- coding: utf-8 -*-
"""
jinja2.visitor
~~~~~~~~~~~~~~
This module implements a visitor for the nodes.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from jinja2.nodes import Node
class NodeVisitor(object):
"""Walks the abstract syntax tree and call visitor functions for every
node found. The visitor functions may return values which will be
forwarded by the `visit` method.
Per default the visitor functions for the nodes are ``'visit_'`` +
class name of the node. So a `TryFinally` node visit function would
be `visit_TryFinally`. This behavior can be changed by overriding
the `get_visitor` function. If no visitor function exists for a node
(return value `None`) the `generic_visit` visitor is used instead.
"""
def get_visitor(self, node):
"""Return the visitor function for this node or `None` if no visitor
exists for this node. In that case the generic visit function is
used instead.
"""
method = 'visit_' + node.__class__.__name__
return getattr(self, method, None)
def visit(self, node, *args, **kwargs):
"""Visit a node."""
f = self.get_visitor(node)
if f is not None:
return f(node, *args, **kwargs)
return self.generic_visit(node, *args, **kwargs)
def generic_visit(self, node, *args, **kwargs):
"""Called if no explicit visitor function exists for a node."""
for node in node.iter_child_nodes():
self.visit(node, *args, **kwargs)
class NodeTransformer(NodeVisitor):
"""Walks the abstract syntax tree and allows modifications of nodes.
The `NodeTransformer` will walk the AST and use the return value of the
visitor functions to replace or remove the old node. If the return
value of the visitor function is `None` the node will be removed
from the previous location otherwise it's replaced with the return
value. The return value may be the original node in which case no
replacement takes place.
"""
def generic_visit(self, node, *args, **kwargs):
for field, old_value in node.iter_fields():
if isinstance(old_value, list):
new_values = []
for value in old_value:
if isinstance(value, Node):
value = self.visit(value, *args, **kwargs)
if value is None:
continue
elif not isinstance(value, Node):
new_values.extend(value)
continue
new_values.append(value)
old_value[:] = new_values
elif isinstance(old_value, Node):
new_node = self.visit(old_value, *args, **kwargs)
if new_node is None:
delattr(node, field)
else:
setattr(node, field, new_node)
return node
def visit_list(self, node, *args, **kwargs):
"""As transformers may return lists in some places this method
can be used to enforce a list as return value.
"""
rv = self.visit(node, *args, **kwargs)
if not isinstance(rv, list):
rv = [rv]
return rv
| apache-2.0 |
tapomayukh/projects_in_python | classification/Classification_with_kNN/Single_Contact_Classification/Scaled_Features/results/objects/test10_cross_validate_objects_1200ms_scaled_method_iv.py | 1 | 4553 |
# Principal Component Analysis Code :
from numpy import mean,cov,double,cumsum,dot,linalg,array,rank,size,flipud
from pylab import *
import numpy as np
import matplotlib.pyplot as pp
#from enthought.mayavi import mlab
import scipy.ndimage as ni
import roslib; roslib.load_manifest('sandbox_tapo_darpa_m3')
import rospy
#import hrl_lib.mayavi2_util as mu
import hrl_lib.viz as hv
import hrl_lib.util as ut
import hrl_lib.matplotlib_util as mpu
import pickle
from mvpa.clfs.knn import kNN
from mvpa.datasets import Dataset
from mvpa.clfs.transerror import TransferError
from mvpa.misc.data_generators import normalFeatureDataset
from mvpa.algorithms.cvtranserror import CrossValidatedTransferError
from mvpa.datasets.splitters import NFoldSplitter
import sys
sys.path.insert(0, '/home/tapo/svn/robot1_data/usr/tapo/data_code/Classification/Data/Single_Contact_kNN/Scaled')
from data_method_IV import Fmat_original
def pca(X):
#get dimensions
num_data,dim = X.shape
#center data
mean_X = X.mean(axis=1)
M = (X-mean_X) # subtract the mean (along columns)
Mcov = cov(M)
###### Sanity Check ######
i=0
n=0
while i < 123:
j=0
while j < 140:
if X[i,j] != X[i,j]:
print X[i,j]
print i,j
n=n+1
j = j+1
i=i+1
print n
##########################
print 'PCA - COV-Method used'
val,vec = linalg.eig(Mcov)
#return the projection matrix, the variance and the mean
return vec,val,mean_X, M, Mcov
if __name__ == '__main__':
Fmat = Fmat_original
# Checking the Data-Matrix
m_tot, n_tot = np.shape(Fmat)
print 'Total_Matrix_Shape:',m_tot,n_tot
eigvec_total, eigval_total, mean_data_total, B, C = pca(Fmat)
#print eigvec_total
#print eigval_total
#print mean_data_total
m_eigval_total, n_eigval_total = np.shape(np.matrix(eigval_total))
m_eigvec_total, n_eigvec_total = np.shape(eigvec_total)
m_mean_data_total, n_mean_data_total = np.shape(np.matrix(mean_data_total))
print 'Eigenvalue Shape:',m_eigval_total, n_eigval_total
print 'Eigenvector Shape:',m_eigvec_total, n_eigvec_total
print 'Mean-Data Shape:',m_mean_data_total, n_mean_data_total
#Recall that the cumulative sum of the eigenvalues shows the level of variance accounted by each of the corresponding eigenvectors. On the x axis there is the number of eigenvalues used.
perc_total = cumsum(eigval_total)/sum(eigval_total)
# Reduced Eigen-Vector Matrix according to highest Eigenvalues..(Considering First 20 based on above figure)
W = eigvec_total[:,0:13]
m_W, n_W = np.shape(W)
print 'Reduced Dimension Eigenvector Shape:',m_W, n_W
# Normalizes the data set with respect to its variance (Not an Integral part of PCA, but useful)
length = len(eigval_total)
s = np.matrix(np.zeros(length)).T
i = 0
while i < length:
s[i] = sqrt(C[i,i])
i = i+1
Z = np.divide(B,s)
m_Z, n_Z = np.shape(Z)
print 'Z-Score Shape:', m_Z, n_Z
#Projected Data:
Y = (W.T)*B # 'B' for my Laptop: otherwise 'Z' instead of 'B'
m_Y, n_Y = np.shape(Y.T)
print 'Transposed Projected Data Shape:', m_Y, n_Y
#Using PYMVPA
PCA_data = np.array(Y.T)
PCA_label_2 = ['Styrofoam-Fixed']*5 + ['Books-Fixed']*5 + ['Bucket-Fixed']*5 + ['Bowl-Fixed']*5 + ['Can-Fixed']*5 + ['Box-Fixed']*5 + ['Pipe-Fixed']*5 + ['Styrofoam-Movable']*5 + ['Container-Movable']*5 + ['Books-Movable']*5 + ['Cloth-Roll-Movable']*5 + ['Black-Rubber-Movable']*5 + ['Can-Movable']*5 + ['Box-Movable']*5 + ['Rug-Fixed']*5 + ['Bubble-Wrap-1-Fixed']*5 + ['Pillow-1-Fixed']*5 + ['Bubble-Wrap-2-Fixed']*5 + ['Sponge-Fixed']*5 + ['Foliage-Fixed']*5 + ['Pillow-2-Fixed']*5 + ['Rug-Movable']*5 + ['Bubble-Wrap-1-Movable']*5 + ['Pillow-1-Movable']*5 + ['Bubble-Wrap-2-Movable']*5 + ['Pillow-2-Movable']*5 + ['Plush-Toy-Movable']*5 + ['Sponge-Movable']*5
clf = kNN(k=1)
terr = TransferError(clf)
ds1 = Dataset(samples=PCA_data,labels=PCA_label_2)
print ds1.samples.shape
cvterr = CrossValidatedTransferError(terr,NFoldSplitter(cvtype=1),enable_states=['confusion'])
error = cvterr(ds1)
print error
print cvterr.confusion.asstring(description=False)
figure(1)
cvterr.confusion.plot(numbers='True',numbers_alpha=2)
#show()
# Variances
figure(2)
title('Variances of PCs')
stem(range(len(perc_total)),perc_total,'--b')
axis([-0.3,130.3,0,1.2])
grid('True')
show()
| mit |
michaelgugino/turbo-lister | jinja2/visitor.py | 1402 | 3316 | # -*- coding: utf-8 -*-
"""
jinja2.visitor
~~~~~~~~~~~~~~
This module implements a visitor for the nodes.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from jinja2.nodes import Node
class NodeVisitor(object):
"""Walks the abstract syntax tree and call visitor functions for every
node found. The visitor functions may return values which will be
forwarded by the `visit` method.
Per default the visitor functions for the nodes are ``'visit_'`` +
class name of the node. So a `TryFinally` node visit function would
be `visit_TryFinally`. This behavior can be changed by overriding
the `get_visitor` function. If no visitor function exists for a node
(return value `None`) the `generic_visit` visitor is used instead.
"""
def get_visitor(self, node):
"""Return the visitor function for this node or `None` if no visitor
exists for this node. In that case the generic visit function is
used instead.
"""
method = 'visit_' + node.__class__.__name__
return getattr(self, method, None)
def visit(self, node, *args, **kwargs):
"""Visit a node."""
f = self.get_visitor(node)
if f is not None:
return f(node, *args, **kwargs)
return self.generic_visit(node, *args, **kwargs)
def generic_visit(self, node, *args, **kwargs):
"""Called if no explicit visitor function exists for a node."""
for node in node.iter_child_nodes():
self.visit(node, *args, **kwargs)
class NodeTransformer(NodeVisitor):
"""Walks the abstract syntax tree and allows modifications of nodes.
The `NodeTransformer` will walk the AST and use the return value of the
visitor functions to replace or remove the old node. If the return
value of the visitor function is `None` the node will be removed
from the previous location otherwise it's replaced with the return
value. The return value may be the original node in which case no
replacement takes place.
"""
def generic_visit(self, node, *args, **kwargs):
for field, old_value in node.iter_fields():
if isinstance(old_value, list):
new_values = []
for value in old_value:
if isinstance(value, Node):
value = self.visit(value, *args, **kwargs)
if value is None:
continue
elif not isinstance(value, Node):
new_values.extend(value)
continue
new_values.append(value)
old_value[:] = new_values
elif isinstance(old_value, Node):
new_node = self.visit(old_value, *args, **kwargs)
if new_node is None:
delattr(node, field)
else:
setattr(node, field, new_node)
return node
def visit_list(self, node, *args, **kwargs):
"""As transformers may return lists in some places this method
can be used to enforce a list as return value.
"""
rv = self.visit(node, *args, **kwargs)
if not isinstance(rv, list):
rv = [rv]
return rv
| gpl-3.0 |
jhaux/tensorflow | tensorflow/contrib/imperative/examples/mnist.py | 68 | 4576 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""MNIST training in imperative mode TensorFlow."""
# pylint: disable=redefined-outer-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.contrib.imperative as tf
from tensorflow.contrib.learn.python.learn.datasets.mnist import read_data_sets
IMAGE_SIZE = 28
IMAGE_PIXELS = IMAGE_SIZE * IMAGE_SIZE
NUM_CLASSES = 10
BATCH_SIZE = 100
NUM_EPOCHS = 2
LEARNING_RATE = 0.1
class Model(object):
"""Fully connected model for MNIST."""
def __init__(self, hidden1_units, hidden2_units):
"""Create the model parameters."""
self.params = []
# Hidden 1
with tf.name_scope('hidden1'):
self.weights1 = tf.Variable(
np.random.normal(scale=1.0 / np.sqrt(float(IMAGE_PIXELS)),
size=[IMAGE_PIXELS, hidden1_units]),
dtype=tf.float32,
name='weights')
self.biases1 = tf.Variable(
np.zeros([hidden1_units]),
dtype=tf.float32,
name='biases')
# Hidden 2
with tf.name_scope('hidden2'):
self.weights2 = tf.Variable(
np.random.normal(scale=1.0 / np.sqrt(float(hidden1_units)),
size=[hidden1_units, hidden2_units]),
dtype=tf.float32,
name='weights')
self.biases2 = tf.Variable(
np.zeros([hidden2_units]),
dtype=tf.float32,
name='biases')
# Linear
with tf.name_scope('softmax_linear'):
self.sm_w = tf.Variable(
np.random.normal(scale=1.0 / np.sqrt(float(hidden2_units)),
size=[hidden2_units, NUM_CLASSES]),
dtype=tf.float32,
name='weights')
self.sm_b = tf.Variable(
np.zeros([NUM_CLASSES]),
dtype=tf.float32,
name='biases')
self.params = [self.weights1, self.biases1,
self.weights2, self.biases2,
self.sm_w, self.sm_b]
def __call__(self, images):
"""Run the model's forward prop on `images`."""
hidden1 = tf.nn.relu(tf.matmul(images, self.weights1) + self.biases1)
hidden2 = tf.nn.relu(tf.matmul(hidden1, self.weights2) + self.biases2)
logits = tf.matmul(hidden2, self.sm_w) + self.sm_b
return logits
model = Model(128, 32)
data = read_data_sets('/tmp/mnist_train')
def get_test_accuracy():
"""Gets the model's classification accuracy on test data."""
num_examples = data.test.num_examples
test_images = np.split(data.test.images, num_examples/BATCH_SIZE)
test_labels = np.split(data.test.labels.astype(np.int32),
num_examples/BATCH_SIZE)
num_correct = 0
for _, (images, labels) in enumerate(zip(test_images, test_labels)):
with tf.new_step():
logits = model(images)
predictions = tf.argmax(tf.nn.softmax(logits), axis=1)
num_correct += np.sum(predictions.value == labels)
return float(num_correct) / float(num_examples)
num_examples = data.train.num_examples
train_images = np.split(data.train.images, num_examples/BATCH_SIZE)
train_labels = np.split(data.train.labels.astype(np.int32),
num_examples/BATCH_SIZE)
for epoch in range(NUM_EPOCHS):
for i, (images, labels) in enumerate(zip(train_images, train_labels)):
with tf.new_step() as step:
logits = model(images)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits, name='xentropy')
loss = tf.reduce_mean(cross_entropy, name='xentropy_mean')
gradients = tf.gradients(loss, model.params)
step.run([v.assign_sub(LEARNING_RATE * g)
for g, v in zip(gradients, model.params)])
if i % 10 == 0:
print('Loss after {} steps = {}'.format(i, loss))
if i % 100 == 0:
print('Test accuracy after {} steps = {}'
.format(i, get_test_accuracy()))
| apache-2.0 |
smhaller/myo-ros | nn_classifier.py | 1 | 2697 | #!/usr/bin/env python
'''Wrapper for nearest-neighbor classifier
This script defines the NNClassifier class, which is used from train_myo_ros and
classify_myo_ros for training of gestures and their classification. It stores the
training data in the files vals0.dat, vals1.dat, ..., vals9.dat.
When the library sklearn is available a KNeighborsClassifier will be used for
classification, otherwise the class of the nearest neighbor is returned.
This script is based on the myo.py file of the myo-raw project.
(see https://github.com/dzhu/myo-raw/ which is available under the MIT LICENSE)
Following changes where made:
- Removed code for the myo device, keeping the NNClassifier class.
'''
from __future__ import print_function
import struct
import numpy as np
try:
from sklearn import neighbors, svm
HAVE_SK = True
except ImportError:
HAVE_SK = False
def pack(fmt, *args):
return struct.pack('<' + fmt, *args)
SUBSAMPLE = 3
K = 15
class NNClassifier(object):
'''A wrapper for sklearn's nearest-neighbor classifier that stores
training data in vals0, ..., vals9.dat.'''
def __init__(self):
for i in range(10):
with open('vals%d.dat' % i, 'ab') as f: pass
self.read_data()
def store_data(self, cls, vals):
with open('vals%d.dat' % cls, 'ab') as f:
f.write(pack('8H', *vals))
# for i in range(8):
# f.write("%d " % vals[i])
# f.write("\n")
self.train(np.vstack([self.X, vals]), np.hstack([self.Y, [cls]]))
def read_data(self):
X = []
Y = []
for i in range(10):
X.append(np.fromfile('vals%d.dat' % i, dtype=np.uint16).reshape((-1, 8)))
# X.append(np.fromfile('vals%d.dat' % i, dtype=np.uint16, sep=" ").reshape((-1, 8)))
Y.append(i + np.zeros(X[-1].shape[0]))
self.train(np.vstack(X), np.hstack(Y))
def train(self, X, Y):
self.X = X
self.Y = Y
if HAVE_SK and self.X.shape[0] >= K * SUBSAMPLE:
self.nn = neighbors.KNeighborsClassifier(n_neighbors=K, algorithm='kd_tree')
self.nn.fit(self.X[::SUBSAMPLE], self.Y[::SUBSAMPLE])
else:
self.nn = None
def nearest(self, d):
dists = ((self.X - d)**2).sum(1)
ind = dists.argmin()
return self.Y[ind]
def classify(self, d):
if self.X.shape[0] < K * SUBSAMPLE: return 0
if not HAVE_SK: return self.nearest(d)
return int(self.nn.predict(d)[0])
def clearGestureFiles(self):
for i in range(10):
with open('vals%d.dat' % i, 'w') as f:
f.truncate()
self.read_data() | mit |
tensorflow/tensorflow-experimental_link_static_libraries_once | tensorflow/python/distribute/parallel_device/parallel_device_test.py | 10 | 25539 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
import threading
from absl.testing import parameterized
from tensorflow.python.checkpoint import checkpoint as tracking
from tensorflow.python.checkpoint import checkpoint_management
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.distribute.parallel_device import parallel_device
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.module import module
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import collective_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_resource_variable_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import stateful_random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.saved_model import load
from tensorflow.python.saved_model import save
from tensorflow.python.tpu import tpu_strategy_util
from tensorflow.python.util import nest
# When running collectives asynchronously, we need to give each parallel device
# execution a unique ID so the collectives don't interfere. Since the op is
# replicated with group/instance key intact, the replicated nodes will
# communicate.
# TODO(allenl): Switch to using a collective manager.
_COUNTER_LOCK = threading.Lock()
_COUNTER = 100
def _collective_reduce(inputs, operation, num_replicas):
def _reduce_tensor(tensor):
with _COUNTER_LOCK:
global _COUNTER
keys = _COUNTER
_COUNTER += 1
return collective_ops.all_reduce_v2(
t=tensor,
group_size=num_replicas,
merge_op=operation,
group_key=keys,
instance_key=keys)
return nest.map_structure(_reduce_tensor, inputs)
def _collective_sum(inputs, num_replicas):
return _collective_reduce(
inputs=inputs, operation="Add", num_replicas=num_replicas)
class _Dense(module.Module):
def __init__(self, output_size):
self.output_size = output_size
self.kernel = None
self.bias = None
def __call__(self, x):
if self.kernel is None:
self.kernel = variables.Variable(
array_ops.ones(
array_ops.stack([self.output_size,
array_ops.shape(x)[-1]])))
self.bias = variables.Variable(array_ops.ones([self.output_size]))
return math_ops.matmul(x, self.kernel, transpose_b=True) + self.bias
class _VirtualDeviceTestCase(test.TestCase):
def setUp(self):
super(_VirtualDeviceTestCase, self).setUp()
ctx = context.context()
if ctx.list_physical_devices("TPU"):
self.device_type = "TPU"
tpu_strategy_util.initialize_tpu_system()
elif ctx.list_physical_devices("GPU"):
self.device_type = "GPU"
gpus = ctx.list_physical_devices(self.device_type)
ctx.set_logical_device_configuration(gpus[0], [
context.LogicalDeviceConfiguration(memory_limit=100),
context.LogicalDeviceConfiguration(memory_limit=100),
])
else:
self.device_type = "CPU"
cpus = ctx.list_physical_devices("CPU")
ctx.set_logical_device_configuration(cpus[0], [
context.LogicalDeviceConfiguration(),
context.LogicalDeviceConfiguration(),
])
self.device = parallel_device.ParallelDevice(components=[
"/job:localhost/device:{}:0".format(self.device_type),
self.device_type + ":1"
])
self.assertIn(self.device_type + ":0", self.device.components[0])
self.assertIn(self.device_type + ":1", self.device.components[1])
class ParallelDeviceTests(_VirtualDeviceTestCase, parameterized.TestCase):
def test_register_parallel_device(self):
with self.device:
c = constant_op.constant(1.)
d = constant_op.constant(2.)
e = c + d
outputs = self.device.unpack(e)
self.assertAllClose([3., 3.], outputs)
self.assertIn(self.device.components[0], outputs[0].backing_device)
self.assertIn(self.device.components[1], outputs[1].backing_device)
def test_no_implicit_copyon(self):
a1 = constant_op.constant(1.)
a2 = constant_op.constant(2.)
with self.device:
with self.assertRaisesRegex(
errors.InvalidArgumentError,
"First pack non-parallel tensors for each device"):
a1 + a2 # pylint:disable=pointless-statement
def test_error_message_length(self):
x = array_ops.ones([3, 3, 3, 3, 3, 3])
with self.device:
with self.assertRaisesRegex(
errors.InvalidArgumentError,
r"TensorHandle\((.|\n){1,150}\[...\], shape="):
array_ops.identity(x)
def test_one_replica_eager_control_flow(self):
device = parallel_device.ParallelDevice(components=[
"/job:localhost/device:{}:0".format(self.device_type),
])
x = constant_op.constant([2, 3, 4])
with device:
x = device.pack([x])
if math_ops.reduce_any(math_ops.equal(x, constant_op.constant(4))):
y = constant_op.constant(1)
else:
y = constant_op.constant(2)
self.assertAllEqual([1], device.unpack(y))
@parameterized.named_parameters(
("variable", variables.Variable),
("tensor", lambda x: x))
def test_string_representation(self, transform):
x = self.device.pack(
[constant_op.constant([5., 6.]),
constant_op.constant([6., 7.])])
with self.device:
x = transform(x)
parallel_str = str(x)
self.assertIn("5", parallel_str)
self.assertIn("7", parallel_str)
self.assertIn(self.device_type + ":0", parallel_str)
self.assertIn(self.device_type + ":1", parallel_str)
parallel_repr = repr(x)
self.assertIn("5", parallel_repr)
self.assertIn("7", parallel_repr)
self.assertIn(self.device_type + ":0", parallel_repr)
self.assertIn(self.device_type + ":1", parallel_repr)
def test_device_id(self):
device_ids = self.device.unpack(self.device.device_ids)
self.assertAllClose([0, 1], device_ids)
# TODO(allenl): Should device IDs be int64 so they can be placed on GPUs?
# Currently backing_device is CPU.
self.assertIn(self.device.components[0], device_ids[0].device)
self.assertIn(self.device.components[1], device_ids[1].device)
def test_zeros(self):
with self.device:
x = array_ops.zeros([array_ops.identity(constant_op.constant(10))])
for component in self.device.unpack(x):
self.assertAllClose([0.] * 10, component)
def test_generator(self):
with self.device:
g_same = stateful_random_ops.Generator.from_seed(0)
g_different = stateful_random_ops.Generator.from_seed(
self.device.device_ids)
same = g_same.normal([10])
different = g_different.normal([10])
same_unpacked = self.device.unpack(same)
different_unpacked = self.device.unpack(different)
for same_component, different_component in zip(same_unpacked[1:],
different_unpacked[1:]):
self.assertAllClose(same_component, same_unpacked[0])
self.assertNotAllClose(different_component, different_unpacked[0])
def test_collective_reduce(self):
x = self.device.pack(
[constant_op.constant(-1.5),
constant_op.constant(3.5)])
with self.device:
reduced = _collective_sum(x, num_replicas=2)
outputs = self.device.unpack(reduced)
self.assertAllClose([2., 2.], outputs)
self.assertIn(self.device.components[0], outputs[0].backing_device)
self.assertIn(self.device.components[1], outputs[1].backing_device)
def test_collective_reduce_in_function(self):
x = self.device.pack(
[constant_op.constant(-1.5),
constant_op.constant(3.5)])
with self.device:
@def_function.function
def reduce(t):
return _collective_sum(t, num_replicas=2)
reduced = reduce(x)
outputs = self.device.unpack(reduced)
self.assertAllClose([2., 2.], outputs)
self.assertIn(self.device.components[0], outputs[0].backing_device)
self.assertIn(self.device.components[1], outputs[1].backing_device)
def test_collective_reduce_async_scope(self):
# Note that ops on the parallel device currently don't execute
# asynchronously. The test is just that we don't get deadlocks.
x = self.device.pack(
[constant_op.constant(-1.5),
constant_op.constant(3.5)])
with context.async_scope(), self.device:
reduced = _collective_sum(x, num_replicas=2)
outputs = self.device.unpack(reduced)
self.assertAllClose([2., 2.], outputs)
self.assertIn(self.device.components[0], outputs[0].backing_device)
self.assertIn(self.device.components[1], outputs[1].backing_device)
def test_collective_reduce_async_context(self):
previous = config.get_synchronous_execution()
try:
context._reset_context()
config.set_synchronous_execution(False)
self.setUp()
# Note that ops on the parallel device currently don't execute
# asynchronously. The test is just that we don't get deadlocks.
x = self.device.pack(
[constant_op.constant(-1.5),
constant_op.constant(3.5)])
with self.device:
reduced = _collective_sum(x, num_replicas=2)
outputs = self.device.unpack(reduced)
self.assertAllClose([2., 2.], outputs)
self.assertIn(self.device.components[0], outputs[0].backing_device)
self.assertIn(self.device.components[1], outputs[1].backing_device)
finally:
context._reset_context()
config.set_synchronous_execution(previous)
@parameterized.named_parameters(
[("RunFunctionsEagerly", True),
("", False)])
def test_cond(self, run_functions_eagerly):
try:
def_function.run_functions_eagerly(run_functions_eagerly)
pred = self.device.pack(
[constant_op.constant(True), constant_op.constant(False)])
capture = self.device.pack(
[constant_op.constant([1.]), constant_op.constant([2.])])
with self.device:
result = control_flow_ops.cond(
pred,
def_function.function(lambda: capture * 2.),
def_function.function(lambda: capture * 4.))
self.assertAllClose(
[[2.], [8.]], self.device.unpack(result))
finally:
def_function.run_functions_eagerly(False)
def test_cond_with_variable(self):
pred = self.device.pack(
[constant_op.constant(True), constant_op.constant(False)])
capture = self.device.pack(
[constant_op.constant([1.]), constant_op.constant([2.])])
with self.device:
v = None
@def_function.function
def true_branch():
nonlocal v
if v is None:
v = variables.Variable(constant_op.constant(2.))
return v * capture
result = control_flow_ops.cond(
pred, true_branch, def_function.function(lambda: capture * 4.))
self.assertAllClose(
[[2.], [8.]], self.device.unpack(result))
self.assertAllClose(
[2., 2.], self.device.unpack(v))
# There are two unique variable handles with separate storage.
h1, _ = self.device.unpack(v.handle)
gen_resource_variable_ops.assign_variable_op(h1, constant_op.constant(3.))
self.assertAllClose(
[3., 2.], self.device.unpack(v))
def test_collective_broadcast_in_function(self):
if self.device_type == "TPU":
self.skipTest("ParallelDevice broadcast collectives on TPUs need work")
@def_function.function
def broadcast_send_recv(device_id):
c = constant_op.constant([2])
@def_function.function
def send():
s0 = collective_ops.broadcast_send(
c * 3, c.shape, c.dtype, group_size=2, group_key=1, instance_key=1)
with ops.control_dependencies([s0.op]):
return array_ops.identity(c)
@def_function.function
def recv():
r0 = collective_ops.broadcast_recv(
c.shape, c.dtype, group_size=2, group_key=1, instance_key=1)
return r0
return control_flow_ops.switch_case(
device_id, branch_fns={0: send, 1: recv})
with self.device:
result = broadcast_send_recv(self.device.device_ids)
self.assertAllClose([[2], [6]], self.device.unpack(result))
def test_use_in_graph_error_is_informative(self):
@def_function.function
def uses_parallel():
with self.device:
return self.device.unpack(array_ops.ones([]))
with self.assertRaisesRegex(NotImplementedError, "inside `tf.function`"):
uses_parallel()
def test_checkpointing(self):
self.skipTest("b/216201668: revisit parallel device and checkpointing.")
prefix = os.path.join(self.get_temp_dir(), "ckpt")
different_values = self.device.pack(
[constant_op.constant(-1.),
constant_op.constant(3.)])
with self.device:
v = variables.Variable(different_values)
checkpoint = tracking.Checkpoint(v=v)
save_path = checkpoint.save(prefix)
with self.device:
v.assign(constant_op.constant(0.))
checkpoint.restore(save_path).assert_consumed()
with self.device:
outputs = self.device.unpack(v)
self.assertAllClose([-1., 3.], outputs)
with self.device:
restore_on_create = tracking.Checkpoint()
restore_on_create.restore(save_path)
restore_on_create.v = variables.Variable(0.)
outputs = self.device.unpack(restore_on_create.v)
self.assertAllClose([-1., 3.], outputs)
# Changing the number of devices / restoring into a single-device copy is OK
single_device = tracking.Checkpoint(v=variables.Variable(0.))
status = single_device.restore(save_path)
status.assert_existing_objects_matched()
self.assertAllClose(-1., single_device.v)
with self.assertRaisesRegex(AssertionError, "parallel_component_1"):
# There are parts of the variable that aren't restored into a
# single-device copy.
status.assert_consumed()
def test_pack_composite(self):
if self.device_type != "CPU":
self.skipTest("Iterator GetNext doesn't work on accelerators.")
datasets = [
dataset_ops.Dataset.from_tensor_slices(
[i + 1, (i + 1) * 2, (i + 1) * 3])
for i in range(len(self.device.components))]
parallel_dataset = self.device.pack(datasets)
with self.device:
iterator = iter(parallel_dataset)
parallel_sample = next(iterator)
component_iterators = self.device.unpack(iterator)
self.assertEqual(2, next(component_iterators[0]).numpy())
self.assertEqual(1, self.device.unpack(parallel_sample)[0].numpy())
self.assertEqual(4, next(component_iterators[1]).numpy())
self.assertEqual(2, self.device.unpack(parallel_sample)[1].numpy())
def test_pack_structure(self):
x_parts = [{"a": constant_op.constant(float(i))}
for i in range(len(self.device.components))]
x = self.device.pack(x_parts)
self.assertAllClose([{"a": 0.}, {"a": 1.}], self.device.unpack(x))
def test_pack_variable_value(self):
x_parts = [variables.Variable(i)
for i in range(len(self.device.components))]
x = self.device.pack(x_parts)
with self.device:
x1 = self.device.pack(x_parts)
for v in x_parts:
v.assign(-10) # Mutating the variable does not affect previous reads.
self.assertAllClose([0, 1], self.device.unpack(x))
self.assertAllClose([0, 1], self.device.unpack(x1))
def test_unpack_variable_value(self):
x_parts = [constant_op.constant(i)
for i in range(len(self.device.components))]
x = self.device.pack(x_parts)
with self.device:
v = variables.Variable(x)
v_unpacked = self.device.unpack(v)
v.assign(-10) # Mutating the variable does not affect previous reads.
self.assertAllClose([0, 1], v_unpacked)
def test_saved_model(self):
self.skipTest("b/216201668: revisit parallel device and saved model")
different_values = self.device.pack(
[constant_op.constant(-1.),
constant_op.constant(3.)])
with self.device:
m = module.Module()
m.v = variables.Variable(different_values)
m.f = def_function.function(lambda: m.v * 2.)
self.assertAllClose([-2., 6.], self.device.unpack(m.f()))
saved_model_path = os.path.join(self.get_temp_dir(), "saved_model")
save.save(m, saved_model_path)
context._reset_context()
self.setUp()
single_device_loaded = load.load(saved_model_path)
self.assertAllClose(-2., single_device_loaded.f())
assign_value = self.device.pack(
[constant_op.constant(.1), constant_op.constant(.2)])
with self.device:
parallel_loaded = load.load(saved_model_path)
self.assertAllClose([-2., 6.], self.device.unpack(parallel_loaded.f()))
self.assertAllClose([-1., 3.], self.device.unpack(parallel_loaded.v))
parallel_loaded.v.assign(assign_value)
self.assertAllClose([.2, .4], self.device.unpack(parallel_loaded.f()))
def _assert_close_to_non_parallel(self, computation):
"""Asserts that replication of `computation` works and is equivalent."""
with self.device:
parallel_result = computation()
non_parallel_result = computation()
# The computations should have the same number and structure of Tensor
# objects, even though the tensors themselves will be on different devices
# and represent different numbers of values.
nest.assert_same_structure(parallel_result, non_parallel_result)
non_parallel_flat = nest.flatten(non_parallel_result)
parallel_flat = nest.flatten(parallel_result)
self.assertGreater(len(parallel_flat), 0)
for non_parallel, parallel in zip(non_parallel_flat, parallel_flat):
self.assertEqual(self.device._name, parallel.device)
self.assertNotEqual(self.device._name, non_parallel.device)
for parallel_component in self.device.unpack(parallel):
self.assertAllClose(non_parallel, parallel_component)
def test_capturing(self):
with self.device:
x = constant_op.constant([1., 2.])
x = array_ops.identity(x)
@def_function.function
def f(y):
return x + y
y = array_ops.ones([2])
parallel_result = f(y)
self.assertAllClose([[2., 3.]] * 2, self.device.unpack(parallel_result))
def test_euclidean_norm(self):
def _test_fn():
with backprop.GradientTape() as tape:
x = array_ops.ones([5, 5])
tape.watch(x)
y = math_ops.reduce_euclidean_norm(x, axis=constant_op.constant(1))
return y, tape.gradient(y, x)
self._assert_close_to_non_parallel(_test_fn)
def test_reduce_sum(self):
def _test_fn():
with backprop.GradientTape() as tape:
x = array_ops.ones([5, 5])
tape.watch(x)
y = math_ops.reduce_sum(x, axis=constant_op.constant(1))
return y, tape.gradient(y, x)
self._assert_close_to_non_parallel(_test_fn)
def test_variable_created_in_function(self):
captured_value = constant_op.constant(2.)
class M(module.Module):
def __init__(self):
self.v = None
self.w = None
self.x = None
self.z = None
@def_function.function(autograph=False)
def __call__(self, x):
if self.v is None:
with ops.init_scope():
initial_value = constant_op.constant(2.)
self.z = variables.Variable(initial_value)
self.x = variables.Variable(captured_value)
self.w = variables.Variable(lambda: constant_op.constant(2.))
self.v = variables.Variable(constant_op.constant(2.))
return x * self.v * self.w * self.x * self.z
with self.device:
m = M()
packed_outputs = m(array_ops.ones([]))
outputs = self.device.unpack(packed_outputs)
self.assertAllClose([16., 16.], outputs)
def test_different_shapes(self):
x = self.device.pack(
[constant_op.constant([1., 2.]),
constant_op.constant([5.])])
with self.device:
y = x * 2.
self.assertEqual([None], y.shape.as_list())
self.assertAllClose([[2., 4.], [10.]], self.device.unpack(y))
different_axes = self.device.pack(
[constant_op.constant([1., 2.]),
constant_op.constant([[5.]])])
with self.assertRaisesRegex(Exception,
"components do not all have the same rank"):
different_axes.shape # pylint: disable=pointless-statement
class LayerTests(_VirtualDeviceTestCase):
def test_layer_forward(self):
with self.device:
layer = _Dense(5)
x = constant_op.constant([[2.]])
y = layer(x)
outputs = self.device.unpack(y)
self.assertAllClose([[3.] * 5], outputs[0])
self.assertAllClose([[3.] * 5], outputs[1])
self.assertIn(self.device.components[0], outputs[0].backing_device)
self.assertIn(self.device.components[1], outputs[1].backing_device)
# With different Layer inputs we get different outputs
x = self.device.pack(
[constant_op.constant([[-0.5]]),
constant_op.constant([[0.5]])])
with self.device:
y = layer(x)
outputs = self.device.unpack(y)
self.assertGreater(
math_ops.reduce_max(math_ops.abs(outputs[0] - outputs[1])), 1e-5)
self.assertIn(self.device.components[0], outputs[0].backing_device)
self.assertIn(self.device.components[1], outputs[1].backing_device)
def test_layer_sync_training(self):
x = self.device.pack(
[constant_op.constant([[-0.5]]),
constant_op.constant([[0.5]])])
with self.device:
layer = _Dense(5)
with backprop.GradientTape() as tape:
y = layer(x)
loss = (y - math_ops.range(5.))**2.
parameters = layer.trainable_variables
unreduced_gradients = tape.gradient(loss, parameters)
reduced_gradients = _collective_sum(unreduced_gradients, num_replicas=2)
for grad, param in zip(reduced_gradients, parameters):
param.assign_sub(0.01 * grad)
final_kernels = self.device.unpack(layer.kernel)
self.assertAllClose(final_kernels[0], final_kernels[1])
final_bias = self.device.unpack(layer.bias)
expected_bias = (1. - 0.01 * 2. * (1. + .5 - math_ops.range(5.)) -
0.01 * 2. * (1. - .5 - math_ops.range(5.)))
self.assertAllClose(expected_bias, final_bias[0], rtol=1e-4, atol=1e-4)
self.assertAllClose(expected_bias, final_bias[1], rtol=1e-4, atol=1e-4)
self.assertIn(self.device.components[0], final_kernels[0].backing_device)
self.assertIn(self.device.components[1], final_kernels[1].backing_device)
def test_layer_divergent_buffer_training(self):
x = self.device.pack(
[constant_op.constant([[-0.5]]),
constant_op.constant([[0.5]])])
with self.device:
layer = _Dense(5)
with backprop.GradientTape() as tape:
y = layer(x)
loss = (y - math_ops.range(5.))**2.
parameters = layer.trainable_variables
unreduced_gradients = tape.gradient(loss, parameters)
for grad, param in zip(unreduced_gradients, parameters):
param.assign_sub(0.01 * grad)
final_kernels = self.device.unpack(layer.kernel)
self.assertNotAllClose(final_kernels[0], final_kernels[1])
final_bias = self.device.unpack(layer.bias)
self.assertAllClose(1. - 0.01 * 2. * (1. - .5 - math_ops.range(5.)),
final_bias[0])
self.assertAllClose(1. - 0.01 * 2. * (1. + .5 - math_ops.range(5.)),
final_bias[1])
self.assertIn(self.device.components[0], final_kernels[0].backing_device)
self.assertIn(self.device.components[1], final_kernels[1].backing_device)
def test_training_loop(self):
self.skipTest("b/216201668: revisit parallel device and checkpointing")
for _ in range(5):
layer = _Dense(5)
checkpoint = tracking.Checkpoint(layer=layer)
manager = checkpoint_management.CheckpointManager(
checkpoint, directory=self.get_temp_dir(), max_to_keep=5)
manager.restore_or_initialize()
for _ in range(10):
x = self.device.pack(
[constant_op.constant([[-0.5]]),
constant_op.constant([[0.5]])])
with self.device:
with backprop.GradientTape() as tape:
y = layer(x)
loss = (y - math_ops.range(5.))**2.
parameters = layer.trainable_variables
unreduced_gradients = tape.gradient(loss, parameters)
reduced_gradients = _collective_sum(
unreduced_gradients, num_replicas=len(self.device.components))
for grad, param in zip(reduced_gradients, parameters):
param.assign_sub(0.01 * grad)
manager.save()
if __name__ == "__main__":
ops.enable_eager_execution()
test.main()
| apache-2.0 |
tensorflow/tensorflow-experimental_link_static_libraries_once | tensorflow/python/data/experimental/ops/snapshot.py | 12 | 12328 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Dataset snapshot and related functionality."""
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import gen_experimental_dataset_ops as ged_ops
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
COMPRESSION_GZIP = "GZIP"
COMPRESSION_SNAPPY = "SNAPPY"
COMPRESSION_NONE = None
class _LegacySnapshotDataset(dataset_ops.UnaryUnchangedStructureDataset):
"""A Dataset that captures a snapshot or reads from a snapshot."""
def __init__(self,
input_dataset,
path,
compression=None,
reader_path_prefix=None,
writer_path_prefix=None,
shard_size_bytes=None,
pending_snapshot_expiry_seconds=None,
num_reader_threads=None,
reader_buffer_size=None,
num_writer_threads=None,
writer_buffer_size=None,
shuffle_on_read=None,
shuffle_seed=None,
mode=None,
snapshot_name=None):
self._compression = compression if compression is not None else ""
self._reader_path_prefix = (
reader_path_prefix if reader_path_prefix is not None else "")
self._writer_path_prefix = (
writer_path_prefix if writer_path_prefix is not None else "")
self._shard_size_bytes = (
shard_size_bytes if shard_size_bytes is not None else -1)
self._pending_snapshot_expiry_seconds = (
pending_snapshot_expiry_seconds
if pending_snapshot_expiry_seconds is not None else -1)
self._num_reader_threads = (
num_reader_threads if num_reader_threads is not None else -1)
self._reader_buffer_size = (
reader_buffer_size if reader_buffer_size is not None else -1)
self._num_writer_threads = (
num_writer_threads if num_writer_threads is not None else -1)
self._writer_buffer_size = (
writer_buffer_size if writer_buffer_size is not None else -1)
self._shuffle_on_read = (
shuffle_on_read if shuffle_on_read is not None else False)
self._mode = (mode if mode is not None else "auto")
self._snapshot_name = (snapshot_name if snapshot_name is not None else "")
self._seed, self._seed2 = random_seed.get_seed(shuffle_seed)
self._input_dataset = input_dataset
self._path = ops.convert_to_tensor(path, dtype=dtypes.string, name="path")
variant_tensor = ged_ops.snapshot_dataset(
self._input_dataset._variant_tensor, # pylint: disable=protected-access
path=self._path,
compression=self._compression,
reader_path_prefix=self._reader_path_prefix,
writer_path_prefix=self._writer_path_prefix,
shard_size_bytes=self._shard_size_bytes,
pending_snapshot_expiry_seconds=self._pending_snapshot_expiry_seconds,
num_reader_threads=self._num_reader_threads,
reader_buffer_size=self._reader_buffer_size,
num_writer_threads=self._num_writer_threads,
writer_buffer_size=self._writer_buffer_size,
shuffle_on_read=self._shuffle_on_read,
seed=self._seed,
seed2=self._seed2,
mode=self._mode,
snapshot_name=self._snapshot_name,
**self._flat_structure)
super(_LegacySnapshotDataset, self).__init__(input_dataset, variant_tensor)
@deprecation.deprecated(
None, "Use `tf.data.experimental.snapshot(...)` instead.")
def legacy_snapshot(path,
compression=None,
reader_path_prefix=None,
writer_path_prefix=None,
shard_size_bytes=None,
pending_snapshot_expiry_seconds=None,
num_reader_threads=None,
reader_buffer_size=None,
num_writer_threads=None,
writer_buffer_size=None,
shuffle_on_read=None,
shuffle_seed=None,
mode=None,
snapshot_name=None):
"""Writes to/reads from a snapshot of a dataset.
This function attempts to determine whether a valid snapshot exists at the
`path`, and reads from the snapshot if so. If not, it will run the
preprocessing pipeline as usual, and write out a snapshot of the data
processed for future use.
Args:
path: A directory where we want to save our snapshots and/or read from a
previously saved snapshot.
compression: The type of compression to apply to the Dataset. Currently
supports "GZIP" or None. Defaults to None (no compression).
reader_path_prefix: A prefix to add to the path when reading from snapshots.
Defaults to None.
writer_path_prefix: A prefix to add to the path when writing to snapshots.
Defaults to None.
shard_size_bytes: The size of each shard to be written by the snapshot
dataset op. Defaults to 10 GiB.
pending_snapshot_expiry_seconds: How long to wait (in seconds) before the
snapshot op considers a previously unfinished snapshot to be stale.
num_reader_threads: Number of threads to parallelize reading from snapshot.
Especially useful if compression is turned on since the decompression
operation tends to be intensive. Defaults to 1. If > 1, then this might
introduce non-determinism i.e. the order in which the elements are read
from the snapshot are different from the order they're written.
reader_buffer_size: Maximum number of elements we can prefetch reading from
the snapshot. Defaults to 1. Increasing this might improve performance but
will increase memory consumption.
num_writer_threads: Number of threads to parallelize writing from snapshot.
We'll open up `num_writer_threads` files and write to them in parallel.
Especially useful if compression is turned on since the compression
operation tends to be intensive. Defaults to 1. If > 1, then this might
introduce non-determinism i.e. the order in which the elements are read
from the upstream iterator are different from the order they're written.
writer_buffer_size: Maximum number of pipeline elements to fill up the
buffer before writing them out using `num_writer_threads`.
shuffle_on_read: If this is True, then the order in which examples are
produced when reading from a snapshot will be random. Defaults to False.
shuffle_seed: Optional. If shuffle_seed is set, the random number generator
used for shuffling (when shuffle_on_read is turned on) is seeded by the
given seed. Otherwise, it is seeded by a random seed that differs for
every run.
mode: The mode at which snapshot should operate. Valid options are "auto",
"read", "write", and "passthrough". The default mode is "auto", where the
snapshot op will automatically determine what mode to operate in.
snapshot_name: If set, use the supplied string as a named snapshot name
instead of introspecting the data pipeline and automatically generating a
unique identifier for the snapshot.
Returns:
A `Dataset` transformation function, which can be passed to
`tf.data.Dataset.apply`.
"""
def _apply_fn(dataset):
return _LegacySnapshotDataset(
input_dataset=dataset,
path=path,
compression=compression,
reader_path_prefix=reader_path_prefix,
writer_path_prefix=writer_path_prefix,
shard_size_bytes=shard_size_bytes,
pending_snapshot_expiry_seconds=pending_snapshot_expiry_seconds,
num_reader_threads=num_reader_threads,
reader_buffer_size=reader_buffer_size,
num_writer_threads=num_writer_threads,
writer_buffer_size=writer_buffer_size,
shuffle_on_read=shuffle_on_read,
shuffle_seed=shuffle_seed,
mode=mode,
snapshot_name=snapshot_name)
return _apply_fn
@deprecation.deprecated(None, "Use `tf.data.Dataset.snapshot(...)`.")
@tf_export("data.experimental.snapshot")
def snapshot(path, compression="AUTO", reader_func=None, shard_func=None):
"""API to persist the output of the input dataset.
The snapshot API allows users to transparently persist the output of their
preprocessing pipeline to disk, and materialize the pre-processed data on a
different training run.
This API enables repeated preprocessing steps to be consolidated, and allows
re-use of already processed data, trading off disk storage and network
bandwidth for freeing up more valuable CPU resources and accelerator compute
time.
https://github.com/tensorflow/community/blob/master/rfcs/20200107-tf-data-snapshot.md
has detailed design documentation of this feature.
Users can specify various options to control the behavior of snapshot,
including how snapshots are read from and written to by passing in
user-defined functions to the `reader_func` and `shard_func` parameters.
`shard_func` is a user specified function that maps input elements to snapshot
shards.
Users may want to specify this function to control how snapshot files should
be written to disk. Below is an example of how a potential shard_func could
be written.
```python
dataset = ...
dataset = dataset.enumerate()
dataset = dataset.apply(tf.data.experimental.snapshot("/path/to/snapshot/dir",
shard_func=lambda x, y: x % NUM_SHARDS, ...))
dataset = dataset.map(lambda x, y: y)
```
`reader_func` is a user specified function that accepts a single argument:
(1) a Dataset of Datasets, each representing a "split" of elements of the
original dataset. The cardinality of the input dataset matches the
number of the shards specified in the `shard_func` (see above). The function
should return a Dataset of elements of the original dataset.
Users may want specify this function to control how snapshot files should be
read from disk, including the amount of shuffling and parallelism.
Here is an example of a standard reader function a user can define. This
function enables both dataset shuffling and parallel reading of datasets:
```python
def user_reader_func(datasets):
# shuffle the datasets splits
datasets = datasets.shuffle(NUM_CORES)
# read datasets in parallel and interleave their elements
return datasets.interleave(lambda x: x, num_parallel_calls=AUTOTUNE)
dataset = dataset.apply(tf.data.experimental.snapshot("/path/to/snapshot/dir",
reader_func=user_reader_func))
```
By default, snapshot parallelizes reads by the number of cores available on
the system, but will not attempt to shuffle the data.
Args:
path: Required. A directory to use for storing / loading the snapshot to /
from.
compression: Optional. The type of compression to apply to the snapshot
written to disk. Supported options are `GZIP`, `SNAPPY`, `AUTO` or None.
Defaults to AUTO, which attempts to pick an appropriate compression
algorithm for the dataset.
reader_func: Optional. A function to control how to read data from snapshot
shards.
shard_func: Optional. A function to control how to shard data when writing a
snapshot.
Returns:
A `Dataset` transformation function, which can be passed to
`tf.data.Dataset.apply`.
"""
def _apply_fn(dataset):
"""Actual dataset transformation."""
return dataset.snapshot(
path=path,
compression=compression,
reader_func=reader_func,
shard_func=shard_func)
return _apply_fn
| apache-2.0 |
michellemorales/OpenMM | models/rebar/download_data.py | 11 | 3002 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Download MNIST, Omniglot datasets for Rebar."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import urllib
import gzip
import os
import config
import struct
import numpy as np
import cPickle as pickle
import datasets
MNIST_URL = 'see README'
MNIST_BINARIZED_URL = 'see README'
OMNIGLOT_URL = 'see README'
MNIST_FLOAT_TRAIN = 'train-images-idx3-ubyte'
def load_mnist_float(local_filename):
with open(local_filename, 'rb') as f:
f.seek(4)
nimages, rows, cols = struct.unpack('>iii', f.read(12))
dim = rows*cols
images = np.fromfile(f, dtype=np.dtype(np.ubyte))
images = (images/255.0).astype('float32').reshape((nimages, dim))
return images
if __name__ == '__main__':
if not os.path.exists(config.DATA_DIR):
os.makedirs(config.DATA_DIR)
# Get MNIST and convert to npy file
local_filename = os.path.join(config.DATA_DIR, MNIST_FLOAT_TRAIN)
if not os.path.exists(local_filename):
urllib.urlretrieve("%s/%s.gz" % (MNIST_URL, MNIST_FLOAT_TRAIN), local_filename+'.gz')
with gzip.open(local_filename+'.gz', 'rb') as f:
file_content = f.read()
with open(local_filename, 'wb') as f:
f.write(file_content)
os.remove(local_filename+'.gz')
mnist_float_train = load_mnist_float(local_filename)[:-10000]
# save in a nice format
np.save(os.path.join(config.DATA_DIR, config.MNIST_FLOAT), mnist_float_train)
# Get binarized MNIST
splits = ['train', 'valid', 'test']
mnist_binarized = []
for split in splits:
filename = 'binarized_mnist_%s.amat' % split
url = '%s/binarized_mnist_%s.amat' % (MNIST_BINARIZED_URL, split)
local_filename = os.path.join(config.DATA_DIR, filename)
if not os.path.exists(local_filename):
urllib.urlretrieve(url, local_filename)
with open(local_filename, 'rb') as f:
mnist_binarized.append((np.array([map(int, line.split()) for line in f.readlines()]).astype('float32'), None))
# save in a nice format
with open(os.path.join(config.DATA_DIR, config.MNIST_BINARIZED), 'w') as out:
pickle.dump(mnist_binarized, out)
# Get Omniglot
local_filename = os.path.join(config.DATA_DIR, config.OMNIGLOT)
if not os.path.exists(local_filename):
urllib.urlretrieve(OMNIGLOT_URL,
local_filename)
| gpl-2.0 |
arabenjamin/scikit-learn | sklearn/neighbors/unsupervised.py | 105 | 4461 | """Unsupervised nearest neighbors learner"""
from .base import NeighborsBase
from .base import KNeighborsMixin
from .base import RadiusNeighborsMixin
from .base import UnsupervisedMixin
class NearestNeighbors(NeighborsBase, KNeighborsMixin,
RadiusNeighborsMixin, UnsupervisedMixin):
"""Unsupervised learner for implementing neighbor searches.
Read more in the :ref:`User Guide <unsupervised_neighbors>`.
Parameters
----------
n_neighbors : int, optional (default = 5)
Number of neighbors to use by default for :meth:`k_neighbors` queries.
radius : float, optional (default = 1.0)
Range of parameter space to use by default for :meth`radius_neighbors`
queries.
algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, optional
Algorithm used to compute the nearest neighbors:
- 'ball_tree' will use :class:`BallTree`
- 'kd_tree' will use :class:`KDtree`
- 'brute' will use a brute-force search.
- 'auto' will attempt to decide the most appropriate algorithm
based on the values passed to :meth:`fit` method.
Note: fitting on sparse input will override the setting of
this parameter, using brute force.
leaf_size : int, optional (default = 30)
Leaf size passed to BallTree or KDTree. This can affect the
speed of the construction and query, as well as the memory
required to store the tree. The optimal value depends on the
nature of the problem.
p: integer, optional (default = 2)
Parameter for the Minkowski metric from
sklearn.metrics.pairwise.pairwise_distances. When p = 1, this is
equivalent to using manhattan_distance (l1), and euclidean_distance
(l2) for p = 2. For arbitrary p, minkowski_distance (l_p) is used.
metric : string or callable, default 'minkowski'
metric to use for distance computation. Any metric from scikit-learn
or scipy.spatial.distance can be used.
If metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays as input and return one value indicating the
distance between them. This works for Scipy's metrics, but is less
efficient than passing the metric name as a string.
Distance matrices are not supported.
Valid values for metric are:
- from scikit-learn: ['cityblock', 'cosine', 'euclidean', 'l1', 'l2',
'manhattan']
- from scipy.spatial.distance: ['braycurtis', 'canberra', 'chebyshev',
'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto',
'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath',
'sqeuclidean', 'yule']
See the documentation for scipy.spatial.distance for details on these
metrics.
metric_params: dict, optional (default = None)
additional keyword arguments for the metric function.
Examples
--------
>>> import numpy as np
>>> from sklearn.neighbors import NearestNeighbors
>>> samples = [[0, 0, 2], [1, 0, 0], [0, 0, 1]]
>>> neigh = NearestNeighbors(2, 0.4)
>>> neigh.fit(samples) #doctest: +ELLIPSIS
NearestNeighbors(...)
>>> neigh.kneighbors([[0, 0, 1.3]], 2, return_distance=False)
... #doctest: +ELLIPSIS
array([[2, 0]]...)
>>> rng = neigh.radius_neighbors([0, 0, 1.3], 0.4, return_distance=False)
>>> np.asarray(rng[0][0])
array(2)
See also
--------
KNeighborsClassifier
RadiusNeighborsClassifier
KNeighborsRegressor
RadiusNeighborsRegressor
BallTree
Notes
-----
See :ref:`Nearest Neighbors <neighbors>` in the online documentation
for a discussion of the choice of ``algorithm`` and ``leaf_size``.
http://en.wikipedia.org/wiki/K-nearest_neighbor_algorithm
"""
def __init__(self, n_neighbors=5, radius=1.0,
algorithm='auto', leaf_size=30, metric='minkowski',
p=2, metric_params=None, **kwargs):
self._init_params(n_neighbors=n_neighbors,
radius=radius,
algorithm=algorithm,
leaf_size=leaf_size, metric=metric, p=p,
metric_params=metric_params, **kwargs)
| bsd-3-clause |
rdevon/cortex | cortex/built_ins/datasets/dSprites.py | 1 | 1755 | '''dShapes dataset
Taken and adapted from https://github.com/Near32/PYTORCH_VAE
'''
from os import path
import urllib.request
from torch.utils.data import Dataset
import numpy as np
from PIL import Image
from . import logger
DATASETS = ['dSprites']
class dSprites(Dataset):
_url = ('https://github.com/deepmind/dsprites-dataset/blob/master/'
'dsprites_ndarray_co1sh3sc6or40x32y32_64x64.npz?raw=true')
def __init__(self, root, download=True, transform=None, shuffle=False):
if not root:
raise ValueError('Dataset path not provided')
self.root = root
self.transform = transform
if download:
if path.isfile(root):
logger.warning('File already in path, ignoring download.')
else:
urllib.request.urlretrieve(self._url, root)
# Load dataset
dataset_zip = np.load(self.root)
logger.debug('Keys in the dataset:', dataset_zip.keys())
self.imgs = dataset_zip['imgs']
self.latents_values = dataset_zip['latents_values']
self.latents_classes = dataset_zip['latents_classes']
logger.info('Dataset loaded : OK.')
if shuffle:
self.idx = np.random.permutation(len(self))
self.imgs = self.imgs[self.idx]
self.latents_classes = self.latents_classes[self.idx]
self.latents_values = self.latents_values[self.idx]
def __len__(self):
return len(self.imgs)
def __getitem__(self, idx):
image = Image.fromarray(self.imgs[idx])
latent = self.latents_values[idx]
if self.transform is not None:
image = self.transform(image)
sample = (image, latent)
return sample
| bsd-3-clause |
DonBeo/scikit-learn | sklearn/feature_extraction/dict_vectorizer.py | 5 | 11439 | # Authors: Lars Buitinck
# Dan Blanchard <dblanchard@ets.org>
# License: BSD 3 clause
from array import array
from collections import Mapping
from operator import itemgetter
import numpy as np
import scipy.sparse as sp
from ..base import BaseEstimator, TransformerMixin
from ..externals import six
from ..externals.six.moves import xrange
from ..utils import check_array, tosequence
from ..utils.fixes import frombuffer_empty
def _tosequence(X):
"""Turn X into a sequence or ndarray, avoiding a copy if possible."""
if isinstance(X, Mapping): # single sample
return [X]
else:
return tosequence(X)
class DictVectorizer(BaseEstimator, TransformerMixin):
"""Transforms lists of feature-value mappings to vectors.
This transformer turns lists of mappings (dict-like objects) of feature
names to feature values into Numpy arrays or scipy.sparse matrices for use
with scikit-learn estimators.
When feature values are strings, this transformer will do a binary one-hot
(aka one-of-K) coding: one boolean-valued feature is constructed for each
of the possible string values that the feature can take on. For instance,
a feature "f" that can take on the values "ham" and "spam" will become two
features in the output, one signifying "f=ham", the other "f=spam".
Features that do not occur in a sample (mapping) will have a zero value
in the resulting array/matrix.
Parameters
----------
dtype : callable, optional
The type of feature values. Passed to Numpy array/scipy.sparse matrix
constructors as the dtype argument.
separator: string, optional
Separator string used when constructing new features for one-hot
coding.
sparse: boolean, optional.
Whether transform should produce scipy.sparse matrices.
True by default.
sort: boolean, optional.
Whether ``feature_names_`` and ``vocabulary_`` should be sorted when fitting.
True by default.
Attributes
----------
vocabulary_ : dict
A dictionary mapping feature names to feature indices.
feature_names_ : list
A list of length n_features containing the feature names (e.g., "f=ham"
and "f=spam").
Examples
--------
>>> from sklearn.feature_extraction import DictVectorizer
>>> v = DictVectorizer(sparse=False)
>>> D = [{'foo': 1, 'bar': 2}, {'foo': 3, 'baz': 1}]
>>> X = v.fit_transform(D)
>>> X
array([[ 2., 0., 1.],
[ 0., 1., 3.]])
>>> v.inverse_transform(X) == \
[{'bar': 2.0, 'foo': 1.0}, {'baz': 1.0, 'foo': 3.0}]
True
>>> v.transform({'foo': 4, 'unseen_feature': 3})
array([[ 0., 0., 4.]])
See also
--------
FeatureHasher : performs vectorization using only a hash function.
sklearn.preprocessing.OneHotEncoder : handles nominal/categorical features
encoded as columns of integers.
"""
def __init__(self, dtype=np.float64, separator="=", sparse=True,
sort=True):
self.dtype = dtype
self.separator = separator
self.sparse = sparse
self.sort = sort
def fit(self, X, y=None):
"""Learn a list of feature name -> indices mappings.
Parameters
----------
X : Mapping or iterable over Mappings
Dict(s) or Mapping(s) from feature names (arbitrary Python
objects) to feature values (strings or convertible to dtype).
y : (ignored)
Returns
-------
self
"""
feature_names = []
vocab = {}
for x in X:
for f, v in six.iteritems(x):
if isinstance(v, six.string_types):
f = "%s%s%s" % (f, self.separator, v)
if f not in vocab:
feature_names.append(f)
vocab[f] = len(vocab)
if self.sort:
feature_names.sort()
vocab = dict((f, i) for i, f in enumerate(feature_names))
self.feature_names_ = feature_names
self.vocabulary_ = vocab
return self
def _transform(self, X, fitting):
# Sanity check: Python's array has no way of explicitly requesting the
# signed 32-bit integers that scipy.sparse needs, so we use the next
# best thing: typecode "i" (int). However, if that gives larger or
# smaller integers than 32-bit ones, np.frombuffer screws up.
assert array("i").itemsize == 4, (
"sizeof(int) != 4 on your platform; please report this at"
" https://github.com/scikit-learn/scikit-learn/issues and"
" include the output from platform.platform() in your bug report")
dtype = self.dtype
if fitting:
feature_names = []
vocab = {}
else:
feature_names = self.feature_names_
vocab = self.vocabulary_
# Process everything as sparse regardless of setting
X = [X] if isinstance(X, Mapping) else X
indices = array("i")
indptr = array("i", [0])
# XXX we could change values to an array.array as well, but it
# would require (heuristic) conversion of dtype to typecode...
values = []
# collect all the possible feature names and build sparse matrix at
# same time
for x in X:
for f, v in six.iteritems(x):
if isinstance(v, six.string_types):
f = "%s%s%s" % (f, self.separator, v)
v = 1
if f in vocab:
indices.append(vocab[f])
values.append(dtype(v))
else:
if fitting:
feature_names.append(f)
vocab[f] = len(vocab)
indices.append(vocab[f])
values.append(dtype(v))
indptr.append(len(indices))
if len(indptr) == 1:
raise ValueError("Sample sequence X is empty.")
indices = frombuffer_empty(indices, dtype=np.intc)
indptr = np.frombuffer(indptr, dtype=np.intc)
shape = (len(indptr) - 1, len(vocab))
result_matrix = sp.csr_matrix((values, indices, indptr),
shape=shape, dtype=dtype)
# Sort everything if asked
if fitting and self.sort:
feature_names.sort()
map_index = np.empty(len(feature_names), dtype=np.int32)
for new_val, f in enumerate(feature_names):
map_index[new_val] = vocab[f]
vocab[f] = new_val
result_matrix = result_matrix[:, map_index]
if self.sparse:
result_matrix.sort_indices()
else:
result_matrix = result_matrix.toarray()
if fitting:
self.feature_names_ = feature_names
self.vocabulary_ = vocab
return result_matrix
def fit_transform(self, X, y=None):
"""Learn a list of feature name -> indices mappings and transform X.
Like fit(X) followed by transform(X), but does not require
materializing X in memory.
Parameters
----------
X : Mapping or iterable over Mappings
Dict(s) or Mapping(s) from feature names (arbitrary Python
objects) to feature values (strings or convertible to dtype).
y : (ignored)
Returns
-------
Xa : {array, sparse matrix}
Feature vectors; always 2-d.
"""
return self._transform(X, fitting=True)
def inverse_transform(self, X, dict_type=dict):
"""Transform array or sparse matrix X back to feature mappings.
X must have been produced by this DictVectorizer's transform or
fit_transform method; it may only have passed through transformers
that preserve the number of features and their order.
In the case of one-hot/one-of-K coding, the constructed feature
names and values are returned rather than the original ones.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Sample matrix.
dict_type : callable, optional
Constructor for feature mappings. Must conform to the
collections.Mapping API.
Returns
-------
D : list of dict_type objects, length = n_samples
Feature mappings for the samples in X.
"""
# COO matrix is not subscriptable
X = check_array(X, accept_sparse=['csr', 'csc'])
n_samples = X.shape[0]
names = self.feature_names_
dicts = [dict_type() for _ in xrange(n_samples)]
if sp.issparse(X):
for i, j in zip(*X.nonzero()):
dicts[i][names[j]] = X[i, j]
else:
for i, d in enumerate(dicts):
for j, v in enumerate(X[i, :]):
if v != 0:
d[names[j]] = X[i, j]
return dicts
def transform(self, X, y=None):
"""Transform feature->value dicts to array or sparse matrix.
Named features not encountered during fit or fit_transform will be
silently ignored.
Parameters
----------
X : Mapping or iterable over Mappings, length = n_samples
Dict(s) or Mapping(s) from feature names (arbitrary Python
objects) to feature values (strings or convertible to dtype).
y : (ignored)
Returns
-------
Xa : {array, sparse matrix}
Feature vectors; always 2-d.
"""
if self.sparse:
return self._transform(X, fitting=False)
else:
dtype = self.dtype
vocab = self.vocabulary_
X = _tosequence(X)
Xa = np.zeros((len(X), len(vocab)), dtype=dtype)
for i, x in enumerate(X):
for f, v in six.iteritems(x):
if isinstance(v, six.string_types):
f = "%s%s%s" % (f, self.separator, v)
v = 1
try:
Xa[i, vocab[f]] = dtype(v)
except KeyError:
pass
return Xa
def get_feature_names(self):
"""Returns a list of feature names, ordered by their indices.
If one-of-K coding is applied to categorical features, this will
include the constructed feature names but not the original ones.
"""
return self.feature_names_
def restrict(self, support, indices=False):
"""Restrict the features to those in support.
Parameters
----------
support : array-like
Boolean mask or list of indices (as returned by the get_support
member of feature selectors).
indices : boolean, optional
Whether support is a list of indices.
"""
if not indices:
support = np.where(support)[0]
names = self.feature_names_
new_vocab = {}
for i in support:
new_vocab[names[i]] = len(new_vocab)
self.vocabulary_ = new_vocab
self.feature_names_ = [f for f, i in sorted(six.iteritems(new_vocab),
key=itemgetter(1))]
return self
| bsd-3-clause |
heli522/scikit-learn | sklearn/linear_model/tests/test_ridge.py | 129 | 22974 | import numpy as np
import scipy.sparse as sp
from scipy import linalg
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import ignore_warnings
from sklearn import datasets
from sklearn.metrics import mean_squared_error
from sklearn.metrics import make_scorer
from sklearn.metrics import get_scorer
from sklearn.linear_model.base import LinearRegression
from sklearn.linear_model.ridge import ridge_regression
from sklearn.linear_model.ridge import Ridge
from sklearn.linear_model.ridge import _RidgeGCV
from sklearn.linear_model.ridge import RidgeCV
from sklearn.linear_model.ridge import RidgeClassifier
from sklearn.linear_model.ridge import RidgeClassifierCV
from sklearn.linear_model.ridge import _solve_cholesky
from sklearn.linear_model.ridge import _solve_cholesky_kernel
from sklearn.grid_search import GridSearchCV
from sklearn.cross_validation import KFold
diabetes = datasets.load_diabetes()
X_diabetes, y_diabetes = diabetes.data, diabetes.target
ind = np.arange(X_diabetes.shape[0])
rng = np.random.RandomState(0)
rng.shuffle(ind)
ind = ind[:200]
X_diabetes, y_diabetes = X_diabetes[ind], y_diabetes[ind]
iris = datasets.load_iris()
X_iris = sp.csr_matrix(iris.data)
y_iris = iris.target
DENSE_FILTER = lambda X: X
SPARSE_FILTER = lambda X: sp.csr_matrix(X)
def test_ridge():
# Ridge regression convergence test using score
# TODO: for this test to be robust, we should use a dataset instead
# of np.random.
rng = np.random.RandomState(0)
alpha = 1.0
for solver in ("svd", "sparse_cg", "cholesky", "lsqr"):
# With more samples than features
n_samples, n_features = 6, 5
y = rng.randn(n_samples)
X = rng.randn(n_samples, n_features)
ridge = Ridge(alpha=alpha, solver=solver)
ridge.fit(X, y)
assert_equal(ridge.coef_.shape, (X.shape[1], ))
assert_greater(ridge.score(X, y), 0.47)
if solver == "cholesky":
# Currently the only solver to support sample_weight.
ridge.fit(X, y, sample_weight=np.ones(n_samples))
assert_greater(ridge.score(X, y), 0.47)
# With more features than samples
n_samples, n_features = 5, 10
y = rng.randn(n_samples)
X = rng.randn(n_samples, n_features)
ridge = Ridge(alpha=alpha, solver=solver)
ridge.fit(X, y)
assert_greater(ridge.score(X, y), .9)
if solver == "cholesky":
# Currently the only solver to support sample_weight.
ridge.fit(X, y, sample_weight=np.ones(n_samples))
assert_greater(ridge.score(X, y), 0.9)
def test_primal_dual_relationship():
y = y_diabetes.reshape(-1, 1)
coef = _solve_cholesky(X_diabetes, y, alpha=[1e-2])
K = np.dot(X_diabetes, X_diabetes.T)
dual_coef = _solve_cholesky_kernel(K, y, alpha=[1e-2])
coef2 = np.dot(X_diabetes.T, dual_coef).T
assert_array_almost_equal(coef, coef2)
def test_ridge_singular():
# test on a singular matrix
rng = np.random.RandomState(0)
n_samples, n_features = 6, 6
y = rng.randn(n_samples // 2)
y = np.concatenate((y, y))
X = rng.randn(n_samples // 2, n_features)
X = np.concatenate((X, X), axis=0)
ridge = Ridge(alpha=0)
ridge.fit(X, y)
assert_greater(ridge.score(X, y), 0.9)
def test_ridge_sample_weights():
rng = np.random.RandomState(0)
for solver in ("cholesky", ):
for n_samples, n_features in ((6, 5), (5, 10)):
for alpha in (1.0, 1e-2):
y = rng.randn(n_samples)
X = rng.randn(n_samples, n_features)
sample_weight = 1 + rng.rand(n_samples)
coefs = ridge_regression(X, y,
alpha=alpha,
sample_weight=sample_weight,
solver=solver)
# Sample weight can be implemented via a simple rescaling
# for the square loss.
coefs2 = ridge_regression(
X * np.sqrt(sample_weight)[:, np.newaxis],
y * np.sqrt(sample_weight),
alpha=alpha, solver=solver)
assert_array_almost_equal(coefs, coefs2)
# Test for fit_intercept = True
est = Ridge(alpha=alpha, solver=solver)
est.fit(X, y, sample_weight=sample_weight)
# Check using Newton's Method
# Quadratic function should be solved in a single step.
# Initialize
sample_weight = np.sqrt(sample_weight)
X_weighted = sample_weight[:, np.newaxis] * (
np.column_stack((np.ones(n_samples), X)))
y_weighted = y * sample_weight
# Gradient is (X*coef-y)*X + alpha*coef_[1:]
# Remove coef since it is initialized to zero.
grad = -np.dot(y_weighted, X_weighted)
# Hessian is (X.T*X) + alpha*I except that the first
# diagonal element should be zero, since there is no
# penalization of intercept.
diag = alpha * np.ones(n_features + 1)
diag[0] = 0.
hess = np.dot(X_weighted.T, X_weighted)
hess.flat[::n_features + 2] += diag
coef_ = - np.dot(linalg.inv(hess), grad)
assert_almost_equal(coef_[0], est.intercept_)
assert_array_almost_equal(coef_[1:], est.coef_)
def test_ridge_shapes():
# Test shape of coef_ and intercept_
rng = np.random.RandomState(0)
n_samples, n_features = 5, 10
X = rng.randn(n_samples, n_features)
y = rng.randn(n_samples)
Y1 = y[:, np.newaxis]
Y = np.c_[y, 1 + y]
ridge = Ridge()
ridge.fit(X, y)
assert_equal(ridge.coef_.shape, (n_features,))
assert_equal(ridge.intercept_.shape, ())
ridge.fit(X, Y1)
assert_equal(ridge.coef_.shape, (1, n_features))
assert_equal(ridge.intercept_.shape, (1, ))
ridge.fit(X, Y)
assert_equal(ridge.coef_.shape, (2, n_features))
assert_equal(ridge.intercept_.shape, (2, ))
def test_ridge_intercept():
# Test intercept with multiple targets GH issue #708
rng = np.random.RandomState(0)
n_samples, n_features = 5, 10
X = rng.randn(n_samples, n_features)
y = rng.randn(n_samples)
Y = np.c_[y, 1. + y]
ridge = Ridge()
ridge.fit(X, y)
intercept = ridge.intercept_
ridge.fit(X, Y)
assert_almost_equal(ridge.intercept_[0], intercept)
assert_almost_equal(ridge.intercept_[1], intercept + 1.)
def test_toy_ridge_object():
# Test BayesianRegression ridge classifier
# TODO: test also n_samples > n_features
X = np.array([[1], [2]])
Y = np.array([1, 2])
clf = Ridge(alpha=0.0)
clf.fit(X, Y)
X_test = [[1], [2], [3], [4]]
assert_almost_equal(clf.predict(X_test), [1., 2, 3, 4])
assert_equal(len(clf.coef_.shape), 1)
assert_equal(type(clf.intercept_), np.float64)
Y = np.vstack((Y, Y)).T
clf.fit(X, Y)
X_test = [[1], [2], [3], [4]]
assert_equal(len(clf.coef_.shape), 2)
assert_equal(type(clf.intercept_), np.ndarray)
def test_ridge_vs_lstsq():
# On alpha=0., Ridge and OLS yield the same solution.
rng = np.random.RandomState(0)
# we need more samples than features
n_samples, n_features = 5, 4
y = rng.randn(n_samples)
X = rng.randn(n_samples, n_features)
ridge = Ridge(alpha=0., fit_intercept=False)
ols = LinearRegression(fit_intercept=False)
ridge.fit(X, y)
ols.fit(X, y)
assert_almost_equal(ridge.coef_, ols.coef_)
ridge.fit(X, y)
ols.fit(X, y)
assert_almost_equal(ridge.coef_, ols.coef_)
def test_ridge_individual_penalties():
# Tests the ridge object using individual penalties
rng = np.random.RandomState(42)
n_samples, n_features, n_targets = 20, 10, 5
X = rng.randn(n_samples, n_features)
y = rng.randn(n_samples, n_targets)
penalties = np.arange(n_targets)
coef_cholesky = np.array([
Ridge(alpha=alpha, solver="cholesky").fit(X, target).coef_
for alpha, target in zip(penalties, y.T)])
coefs_indiv_pen = [
Ridge(alpha=penalties, solver=solver, tol=1e-6).fit(X, y).coef_
for solver in ['svd', 'sparse_cg', 'lsqr', 'cholesky']]
for coef_indiv_pen in coefs_indiv_pen:
assert_array_almost_equal(coef_cholesky, coef_indiv_pen)
# Test error is raised when number of targets and penalties do not match.
ridge = Ridge(alpha=penalties[:3])
assert_raises(ValueError, ridge.fit, X, y)
def _test_ridge_loo(filter_):
# test that can work with both dense or sparse matrices
n_samples = X_diabetes.shape[0]
ret = []
ridge_gcv = _RidgeGCV(fit_intercept=False)
ridge = Ridge(alpha=1.0, fit_intercept=False)
# generalized cross-validation (efficient leave-one-out)
decomp = ridge_gcv._pre_compute(X_diabetes, y_diabetes)
errors, c = ridge_gcv._errors(1.0, y_diabetes, *decomp)
values, c = ridge_gcv._values(1.0, y_diabetes, *decomp)
# brute-force leave-one-out: remove one example at a time
errors2 = []
values2 = []
for i in range(n_samples):
sel = np.arange(n_samples) != i
X_new = X_diabetes[sel]
y_new = y_diabetes[sel]
ridge.fit(X_new, y_new)
value = ridge.predict([X_diabetes[i]])[0]
error = (y_diabetes[i] - value) ** 2
errors2.append(error)
values2.append(value)
# check that efficient and brute-force LOO give same results
assert_almost_equal(errors, errors2)
assert_almost_equal(values, values2)
# generalized cross-validation (efficient leave-one-out,
# SVD variation)
decomp = ridge_gcv._pre_compute_svd(X_diabetes, y_diabetes)
errors3, c = ridge_gcv._errors_svd(ridge.alpha, y_diabetes, *decomp)
values3, c = ridge_gcv._values_svd(ridge.alpha, y_diabetes, *decomp)
# check that efficient and SVD efficient LOO give same results
assert_almost_equal(errors, errors3)
assert_almost_equal(values, values3)
# check best alpha
ridge_gcv.fit(filter_(X_diabetes), y_diabetes)
alpha_ = ridge_gcv.alpha_
ret.append(alpha_)
# check that we get same best alpha with custom loss_func
f = ignore_warnings
scoring = make_scorer(mean_squared_error, greater_is_better=False)
ridge_gcv2 = RidgeCV(fit_intercept=False, scoring=scoring)
f(ridge_gcv2.fit)(filter_(X_diabetes), y_diabetes)
assert_equal(ridge_gcv2.alpha_, alpha_)
# check that we get same best alpha with custom score_func
func = lambda x, y: -mean_squared_error(x, y)
scoring = make_scorer(func)
ridge_gcv3 = RidgeCV(fit_intercept=False, scoring=scoring)
f(ridge_gcv3.fit)(filter_(X_diabetes), y_diabetes)
assert_equal(ridge_gcv3.alpha_, alpha_)
# check that we get same best alpha with a scorer
scorer = get_scorer('mean_squared_error')
ridge_gcv4 = RidgeCV(fit_intercept=False, scoring=scorer)
ridge_gcv4.fit(filter_(X_diabetes), y_diabetes)
assert_equal(ridge_gcv4.alpha_, alpha_)
# check that we get same best alpha with sample weights
ridge_gcv.fit(filter_(X_diabetes), y_diabetes,
sample_weight=np.ones(n_samples))
assert_equal(ridge_gcv.alpha_, alpha_)
# simulate several responses
Y = np.vstack((y_diabetes, y_diabetes)).T
ridge_gcv.fit(filter_(X_diabetes), Y)
Y_pred = ridge_gcv.predict(filter_(X_diabetes))
ridge_gcv.fit(filter_(X_diabetes), y_diabetes)
y_pred = ridge_gcv.predict(filter_(X_diabetes))
assert_array_almost_equal(np.vstack((y_pred, y_pred)).T,
Y_pred, decimal=5)
return ret
def _test_ridge_cv(filter_):
n_samples = X_diabetes.shape[0]
ridge_cv = RidgeCV()
ridge_cv.fit(filter_(X_diabetes), y_diabetes)
ridge_cv.predict(filter_(X_diabetes))
assert_equal(len(ridge_cv.coef_.shape), 1)
assert_equal(type(ridge_cv.intercept_), np.float64)
cv = KFold(n_samples, 5)
ridge_cv.set_params(cv=cv)
ridge_cv.fit(filter_(X_diabetes), y_diabetes)
ridge_cv.predict(filter_(X_diabetes))
assert_equal(len(ridge_cv.coef_.shape), 1)
assert_equal(type(ridge_cv.intercept_), np.float64)
def _test_ridge_diabetes(filter_):
ridge = Ridge(fit_intercept=False)
ridge.fit(filter_(X_diabetes), y_diabetes)
return np.round(ridge.score(filter_(X_diabetes), y_diabetes), 5)
def _test_multi_ridge_diabetes(filter_):
# simulate several responses
Y = np.vstack((y_diabetes, y_diabetes)).T
n_features = X_diabetes.shape[1]
ridge = Ridge(fit_intercept=False)
ridge.fit(filter_(X_diabetes), Y)
assert_equal(ridge.coef_.shape, (2, n_features))
Y_pred = ridge.predict(filter_(X_diabetes))
ridge.fit(filter_(X_diabetes), y_diabetes)
y_pred = ridge.predict(filter_(X_diabetes))
assert_array_almost_equal(np.vstack((y_pred, y_pred)).T,
Y_pred, decimal=3)
def _test_ridge_classifiers(filter_):
n_classes = np.unique(y_iris).shape[0]
n_features = X_iris.shape[1]
for clf in (RidgeClassifier(), RidgeClassifierCV()):
clf.fit(filter_(X_iris), y_iris)
assert_equal(clf.coef_.shape, (n_classes, n_features))
y_pred = clf.predict(filter_(X_iris))
assert_greater(np.mean(y_iris == y_pred), .79)
n_samples = X_iris.shape[0]
cv = KFold(n_samples, 5)
clf = RidgeClassifierCV(cv=cv)
clf.fit(filter_(X_iris), y_iris)
y_pred = clf.predict(filter_(X_iris))
assert_true(np.mean(y_iris == y_pred) >= 0.8)
def _test_tolerance(filter_):
ridge = Ridge(tol=1e-5)
ridge.fit(filter_(X_diabetes), y_diabetes)
score = ridge.score(filter_(X_diabetes), y_diabetes)
ridge2 = Ridge(tol=1e-3)
ridge2.fit(filter_(X_diabetes), y_diabetes)
score2 = ridge2.score(filter_(X_diabetes), y_diabetes)
assert_true(score >= score2)
def test_dense_sparse():
for test_func in (_test_ridge_loo,
_test_ridge_cv,
_test_ridge_diabetes,
_test_multi_ridge_diabetes,
_test_ridge_classifiers,
_test_tolerance):
# test dense matrix
ret_dense = test_func(DENSE_FILTER)
# test sparse matrix
ret_sparse = test_func(SPARSE_FILTER)
# test that the outputs are the same
if ret_dense is not None and ret_sparse is not None:
assert_array_almost_equal(ret_dense, ret_sparse, decimal=3)
def test_ridge_cv_sparse_svd():
X = sp.csr_matrix(X_diabetes)
ridge = RidgeCV(gcv_mode="svd")
assert_raises(TypeError, ridge.fit, X)
def test_ridge_sparse_svd():
X = sp.csc_matrix(rng.rand(100, 10))
y = rng.rand(100)
ridge = Ridge(solver='svd')
assert_raises(TypeError, ridge.fit, X, y)
def test_class_weights():
# Test class weights.
X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0],
[1.0, 1.0], [1.0, 0.0]])
y = [1, 1, 1, -1, -1]
clf = RidgeClassifier(class_weight=None)
clf.fit(X, y)
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([1]))
# we give a small weights to class 1
clf = RidgeClassifier(class_weight={1: 0.001})
clf.fit(X, y)
# now the hyperplane should rotate clock-wise and
# the prediction on this point should shift
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([-1]))
# check if class_weight = 'balanced' can handle negative labels.
clf = RidgeClassifier(class_weight='balanced')
clf.fit(X, y)
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([1]))
# class_weight = 'balanced', and class_weight = None should return
# same values when y has equal number of all labels
X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0], [1.0, 1.0]])
y = [1, 1, -1, -1]
clf = RidgeClassifier(class_weight=None)
clf.fit(X, y)
clfa = RidgeClassifier(class_weight='balanced')
clfa.fit(X, y)
assert_equal(len(clfa.classes_), 2)
assert_array_almost_equal(clf.coef_, clfa.coef_)
assert_array_almost_equal(clf.intercept_, clfa.intercept_)
def test_class_weight_vs_sample_weight():
"""Check class_weights resemble sample_weights behavior."""
for clf in (RidgeClassifier, RidgeClassifierCV):
# Iris is balanced, so no effect expected for using 'balanced' weights
clf1 = clf()
clf1.fit(iris.data, iris.target)
clf2 = clf(class_weight='balanced')
clf2.fit(iris.data, iris.target)
assert_almost_equal(clf1.coef_, clf2.coef_)
# Inflate importance of class 1, check against user-defined weights
sample_weight = np.ones(iris.target.shape)
sample_weight[iris.target == 1] *= 100
class_weight = {0: 1., 1: 100., 2: 1.}
clf1 = clf()
clf1.fit(iris.data, iris.target, sample_weight)
clf2 = clf(class_weight=class_weight)
clf2.fit(iris.data, iris.target)
assert_almost_equal(clf1.coef_, clf2.coef_)
# Check that sample_weight and class_weight are multiplicative
clf1 = clf()
clf1.fit(iris.data, iris.target, sample_weight ** 2)
clf2 = clf(class_weight=class_weight)
clf2.fit(iris.data, iris.target, sample_weight)
assert_almost_equal(clf1.coef_, clf2.coef_)
def test_class_weights_cv():
# Test class weights for cross validated ridge classifier.
X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0],
[1.0, 1.0], [1.0, 0.0]])
y = [1, 1, 1, -1, -1]
clf = RidgeClassifierCV(class_weight=None, alphas=[.01, .1, 1])
clf.fit(X, y)
# we give a small weights to class 1
clf = RidgeClassifierCV(class_weight={1: 0.001}, alphas=[.01, .1, 1, 10])
clf.fit(X, y)
assert_array_equal(clf.predict([[-.2, 2]]), np.array([-1]))
def test_ridgecv_store_cv_values():
# Test _RidgeCV's store_cv_values attribute.
rng = rng = np.random.RandomState(42)
n_samples = 8
n_features = 5
x = rng.randn(n_samples, n_features)
alphas = [1e-1, 1e0, 1e1]
n_alphas = len(alphas)
r = RidgeCV(alphas=alphas, store_cv_values=True)
# with len(y.shape) == 1
y = rng.randn(n_samples)
r.fit(x, y)
assert_equal(r.cv_values_.shape, (n_samples, n_alphas))
# with len(y.shape) == 2
n_responses = 3
y = rng.randn(n_samples, n_responses)
r.fit(x, y)
assert_equal(r.cv_values_.shape, (n_samples, n_responses, n_alphas))
def test_ridgecv_sample_weight():
rng = np.random.RandomState(0)
alphas = (0.1, 1.0, 10.0)
# There are different algorithms for n_samples > n_features
# and the opposite, so test them both.
for n_samples, n_features in ((6, 5), (5, 10)):
y = rng.randn(n_samples)
X = rng.randn(n_samples, n_features)
sample_weight = 1 + rng.rand(n_samples)
cv = KFold(n_samples, 5)
ridgecv = RidgeCV(alphas=alphas, cv=cv)
ridgecv.fit(X, y, sample_weight=sample_weight)
# Check using GridSearchCV directly
parameters = {'alpha': alphas}
fit_params = {'sample_weight': sample_weight}
gs = GridSearchCV(Ridge(), parameters, fit_params=fit_params,
cv=cv)
gs.fit(X, y)
assert_equal(ridgecv.alpha_, gs.best_estimator_.alpha)
assert_array_almost_equal(ridgecv.coef_, gs.best_estimator_.coef_)
def test_raises_value_error_if_sample_weights_greater_than_1d():
# Sample weights must be either scalar or 1D
n_sampless = [2, 3]
n_featuress = [3, 2]
rng = np.random.RandomState(42)
for n_samples, n_features in zip(n_sampless, n_featuress):
X = rng.randn(n_samples, n_features)
y = rng.randn(n_samples)
sample_weights_OK = rng.randn(n_samples) ** 2 + 1
sample_weights_OK_1 = 1.
sample_weights_OK_2 = 2.
sample_weights_not_OK = sample_weights_OK[:, np.newaxis]
sample_weights_not_OK_2 = sample_weights_OK[np.newaxis, :]
ridge = Ridge(alpha=1)
# make sure the "OK" sample weights actually work
ridge.fit(X, y, sample_weights_OK)
ridge.fit(X, y, sample_weights_OK_1)
ridge.fit(X, y, sample_weights_OK_2)
def fit_ridge_not_ok():
ridge.fit(X, y, sample_weights_not_OK)
def fit_ridge_not_ok_2():
ridge.fit(X, y, sample_weights_not_OK_2)
assert_raise_message(ValueError,
"Sample weights must be 1D array or scalar",
fit_ridge_not_ok)
assert_raise_message(ValueError,
"Sample weights must be 1D array or scalar",
fit_ridge_not_ok_2)
def test_sparse_design_with_sample_weights():
# Sample weights must work with sparse matrices
n_sampless = [2, 3]
n_featuress = [3, 2]
rng = np.random.RandomState(42)
sparse_matrix_converters = [sp.coo_matrix,
sp.csr_matrix,
sp.csc_matrix,
sp.lil_matrix,
sp.dok_matrix
]
sparse_ridge = Ridge(alpha=1., fit_intercept=False)
dense_ridge = Ridge(alpha=1., fit_intercept=False)
for n_samples, n_features in zip(n_sampless, n_featuress):
X = rng.randn(n_samples, n_features)
y = rng.randn(n_samples)
sample_weights = rng.randn(n_samples) ** 2 + 1
for sparse_converter in sparse_matrix_converters:
X_sparse = sparse_converter(X)
sparse_ridge.fit(X_sparse, y, sample_weight=sample_weights)
dense_ridge.fit(X, y, sample_weight=sample_weights)
assert_array_almost_equal(sparse_ridge.coef_, dense_ridge.coef_,
decimal=6)
def test_raises_value_error_if_solver_not_supported():
# Tests whether a ValueError is raised if a non-identified solver
# is passed to ridge_regression
wrong_solver = "This is not a solver (MagritteSolveCV QuantumBitcoin)"
exception = ValueError
message = "Solver %s not understood" % wrong_solver
def func():
X = np.eye(3)
y = np.ones(3)
ridge_regression(X, y, alpha=1., solver=wrong_solver)
assert_raise_message(exception, message, func)
def test_sparse_cg_max_iter():
reg = Ridge(solver="sparse_cg", max_iter=1)
reg.fit(X_diabetes, y_diabetes)
assert_equal(reg.coef_.shape[0], X_diabetes.shape[1])
| bsd-3-clause |
JuliaSprenger/python-neo | neo/test/iotest/common_io_test.py | 5 | 20208 | '''
Common tests for IOs:
* check presence of all necessary attr
* check types
* write/read consistency
See BaseTestIO.
The public URL is in url_for_tests.
To deposite new testing files, please create a account at
gin.g-node.org and upload files at NeuralEnsemble/ephy_testing_data
data repo.
'''
__test__ = False
import os
import inspect
from copy import copy
import unittest
import pathlib
from neo.core import Block, Segment
from neo.io.basefromrawio import BaseFromRaw
from neo.test.tools import (assert_same_sub_schema,
assert_neo_object_is_compliant,
assert_sub_schema_is_lazy_loaded,
assert_children_empty)
from neo.test.rawiotest.tools import can_use_network
from neo.test.rawiotest.common_rawio_test import repo_for_test
from neo.utils import (download_dataset,
get_local_testing_data_folder)
try:
import datalad
HAVE_DATALAD = True
except:
HAVE_DATALAD = False
from neo.test.iotest.tools import (cleanup_test_file,
close_object_safe, create_generic_io_object,
create_generic_reader,
create_generic_writer,
get_test_file_full_path,
iter_generic_io_objects,
iter_generic_readers, iter_read_objects,
read_generic,
write_generic)
from neo.test.generate_datasets import generate_from_supported_objects
class BaseTestIO:
'''
This class make common tests for all IOs.
Several startegies:
* for IO able to read write : test_write_then_read
* for IO able to read write with hash conservation (optional):
test_read_then_write
* for all IOs : test_assert_readed_neo_object_is_compliant
2 cases:
* files are at G-node and downloaded:
download_test_files_if_not_present
* files are generated by MyIO.write()
'''
# ~ __test__ = False
# all IO test need to modify this:
ioclass = None # the IOclass to be tested
entities_to_test = [] # list of files to test compliances
entities_to_download = [] # when files are at gin
# when reading then writing produces files with identical hashes
hash_conserved_when_write_read = False
# when writing then reading creates an identical neo object
read_and_write_is_bijective = True
# allow environment to tell avoid using network
use_network = can_use_network()
local_test_dir = get_local_testing_data_folder()
def setUp(self):
'''
Set up the test fixture. This is run for every test
'''
self.higher = self.ioclass.supported_objects[0]
self.shortname = self.ioclass.__name__.lower().rstrip('io')
# these objects can both be written and read
self.io_readandwrite = list(set(self.ioclass.readable_objects) &
set(self.ioclass.writeable_objects))
# these objects can be either written or read
self.io_readorwrite = list(set(self.ioclass.readable_objects) |
set(self.ioclass.writeable_objects))
if HAVE_DATALAD:
for remote_path in self.entities_to_download:
download_dataset(repo=repo_for_test, remote_path=remote_path)
self.files_generated = []
self.generate_files_for_io_able_to_write()
# be carefull self.entities_to_test is class attributes
self.files_to_test = [self.get_local_path(e) for e in self.entities_to_test]
else:
self.files_to_test = []
raise unittest.SkipTest("Requires datalad download of data from the web")
def create_local_dir_if_not_exists(self):
'''
Create a local directory to store testing files and return it.
The directory path is also written to self.local_test_dir
'''
self.local_test_dir = create_local_temp_dir(
self.shortname, directory=os.environ.get("NEO_TEST_FILE_DIR", None))
return self.local_test_dir
def download_test_files_if_not_present(self):
'''
Download %s file at G-node for testing
url_for_tests is global at beginning of this file.
''' % self.ioclass.__name__
if not self.use_network:
raise unittest.SkipTest("Requires download of data from the web")
url = url_for_tests + self.shortname
try:
make_all_directories(self.files_to_download, self.local_test_dir)
download_test_file(self.files_to_download,
self.local_test_dir, url)
except OSError as exc:
raise unittest.TestCase.failureException(exc)
download_test_files_if_not_present.__test__ = False
def cleanup_file(self, path):
'''
Remove test files or directories safely.
'''
cleanup_test_file(self.ioclass, path, directory=self.local_test_dir)
def able_to_write_or_read(self, writeread=False, readwrite=False):
'''
Return True if generalized writing or reading is possible.
If writeread=True, return True if writing then reading is
possible and produces identical neo objects.
If readwrite=True, return True if reading then writing is possible
and produces files with identical hashes.
'''
# Find the highest object that is supported by the IO
# Test only if it is a Block or Segment, and if it can both read
# and write this object.
if self.higher not in self.io_readandwrite:
return False
if self.higher not in [Block, Segment]:
return False
# when io need external knowldge for writting or read such as
# sampling_rate (RawBinaryIO...) the test is too much complex to design
# genericaly.
if (self.higher in self.ioclass.read_params and
len(self.ioclass.read_params[self.higher]) != 0):
return False
# handle cases where the test should write then read
if writeread and not self.read_and_write_is_bijective:
return False
# handle cases where the test should read then write
if readwrite and not self.hash_conserved_when_write_read:
return False
return True
def get_local_base_folder(self):
return get_local_testing_data_folder()
def get_local_path(self, sub_path):
root_local_path = self.get_local_base_folder()
local_path = root_local_path / sub_path
# TODO later : remove the str when all IOs handle the pathlib.Path objects
local_path = str(local_path)
return local_path
def generic_io_object(self, filename=None, return_path=False, clean=False):
'''
Create an io object in a generic way that can work with both
file-based and directory-based io objects.
If filename is None, create a filename (default).
If return_path is True, return the full path of the file along with
the io object. return ioobj, path. Default is False.
If clean is True, try to delete existing versions of the file
before creating the io object. Default is False.
'''
return create_generic_io_object(ioclass=self.ioclass,
filename=filename,
directory=self.local_test_dir,
return_path=return_path,
clean=clean)
def read_file(self, filename=None, return_path=False, clean=False,
target=None, readall=False, lazy=False):
'''
Read from the specified filename.
If filename is None, create a filename (default).
If return_path is True, return the full path of the file along with
the object. return obj, path. Default is False.
If clean is True, try to delete existing versions of the file
before creating the io object. Default is False.
If target is None, use the first supported_objects from ioobj
If target is False, use the 'read' method.
If target is the Block or Segment class, use read_block or
read_segment, respectively.
If target is a string, use 'read_'+target.
The lazy parameter is passed to the reader. Defaults is True.
If readall is True, use the read_all_ method instead of the read_
method. Default is False.
'''
ioobj, path = self.generic_io_object(filename=filename,
return_path=True, clean=clean)
obj = read_generic(ioobj, target=target, lazy=lazy,
readall=readall, return_reader=False)
if return_path:
return obj, path
return obj
def write_file(self, obj=None, filename=None, return_path=False,
clean=False, target=None):
'''
Write the target object to a file using the given neo io object ioobj.
If filename is None, create a filename (default).
If return_path is True, return the full path of the file along with
the object. return obj, path. Default is False.
If clean is True, try to delete existing versions of the file
before creating the io object. Default is False.
If target is None, use the first supported_objects from ioobj
If target is False, use the 'read' method.
If target is the Block or Segment class, use read_block or
read_segment, respectively.
If target is a string, use 'read_'+target.
obj is the object to write. If obj is None, an object is created
automatically for the io class.
'''
ioobj, path = self.generic_io_object(filename=filename,
return_path=True, clean=clean)
obj = write_generic(ioobj, target=target, return_reader=False)
if return_path:
return obj, path
return obj
def iter_io_objects(self, return_path=False, clean=False):
'''
Return an iterable over the io objects created from files_to_test
If return_path is True, yield the full path of the file along with
the io object. yield ioobj, path Default is False.
If clean is True, try to delete existing versions of the file
before creating the io object. Default is False.
'''
return iter_generic_io_objects(ioclass=self.ioclass,
filenames=self.files_to_test,
directory=self.local_test_dir,
return_path=return_path,
clean=clean)
def iter_readers(self, target=None, readall=False,
return_path=False, return_ioobj=False, clean=False):
'''
Return an iterable over readers created from files_to_test.
If return_path is True, return the full path of the file along with
the reader object. return reader, path.
If return_ioobj is True, return the io object as well as the reader.
return reader, ioobj. Default is False.
If both return_path and return_ioobj is True,
return reader, path, ioobj. Default is False.
If clean is True, try to delete existing versions of the file
before creating the io object. Default is False.
If readall is True, use the read_all_ method instead of the
read_ method. Default is False.
'''
return iter_generic_readers(ioclass=self.ioclass,
filenames=self.files_to_test,
directory=self.local_test_dir,
return_path=return_path,
return_ioobj=return_ioobj,
target=target,
clean=clean,
readall=readall)
def iter_objects(self, target=None, return_path=False, return_ioobj=False,
return_reader=False, clean=False, readall=False,
lazy=False):
'''
Iterate over objects read from the list of filenames in files_to_test.
If target is None, use the first supported_objects from ioobj
If target is False, use the 'read' method.
If target is the Block or Segment class, use read_block or
read_segment, respectively.
If target is a string, use 'read_'+target.
If return_path is True, yield the full path of the file along with
the object. yield obj, path.
If return_ioobj is True, yield the io object as well as the object.
yield obj, ioobj. Default is False.
If return_reader is True, yield the io reader function as well as the
object. yield obj, reader. Default is False.
If some combination of return_path, return_ioobj, and return_reader
is True, they are yielded in the order: obj, path, ioobj, reader.
If clean is True, try to delete existing versions of the file
before creating the io object. Default is False.
The lazy parameters is passed to the reader. Defaults is True.
If readall is True, use the read_all_ method instead of the read_
method. Default is False.
'''
return iter_read_objects(ioclass=self.ioclass,
filenames=self.files_to_test,
directory=self.local_test_dir,
target=target,
return_path=return_path,
return_ioobj=return_ioobj,
return_reader=return_reader,
clean=clean, readall=readall,
lazy=lazy)
def generate_files_for_io_able_to_write(self):
'''
Write files for use in testing.
'''
self.files_generated = []
if not self.able_to_write_or_read():
return
generate_from_supported_objects(self.ioclass.supported_objects)
ioobj, path = self.generic_io_object(return_path=True, clean=True)
if ioobj is None:
return
self.files_generated.append(path)
write_generic(ioobj, target=self.higher)
close_object_safe(ioobj)
def test_write_then_read(self):
'''
Test for IO that are able to write and read - here %s:
1 - Generate a full schema with supported objects.
2 - Write to a file
3 - Read from the file
4 - Check the hierachy
5 - Check data
Work only for IO for Block and Segment for the highest object
(main cases).
''' % self.ioclass.__name__
if not self.able_to_write_or_read(writeread=True):
return
ioobj1 = self.generic_io_object(clean=True)
if ioobj1 is None:
return
ob1 = write_generic(ioobj1, target=self.higher)
close_object_safe(ioobj1)
ioobj2 = self.generic_io_object()
# Read the highest supported object from the file
obj_reader = create_generic_reader(ioobj2, target=False)
ob2 = obj_reader()[0]
if self.higher == Segment:
ob2 = ob2.segments[0]
# some formats (e.g. elphy) do not support double floating
# point spiketrains
try:
assert_same_sub_schema(ob1, ob2, True, 1e-8)
assert_neo_object_is_compliant(ob1)
assert_neo_object_is_compliant(ob2)
# intercept exceptions and add more information
except BaseException as exc:
raise
close_object_safe(ioobj2)
def test_read_then_write(self):
'''
Test for IO that are able to read and write, here %s:
1 - Read a file
2 Write object set in another file
3 Compare the 2 files hash
NOTE: TODO: Not implemented yet
''' % self.ioclass.__name__
if not self.able_to_write_or_read(readwrite=True):
return
# assert_file_contents_equal(a, b)
def test_assert_readed_neo_object_is_compliant(self):
'''
Reading %s files in `files_to_test` produces compliant objects.
Compliance test: neo.test.tools.assert_neo_object_is_compliant for
lazy mode.
''' % self.ioclass.__name__
for obj, path in self.iter_objects(lazy=False, return_path=True):
try:
# Check compliance of the block
assert_neo_object_is_compliant(obj)
# intercept exceptions and add more information
except BaseException as exc:
exc.args += ('from %s' % os.path.basename(path), )
raise
def test_readed_with_lazy_is_compliant(self):
'''
Reading %s files in `files_to_test` with `lazy` is compliant.
Test the reader with lazy = True.
The schema must contain proxy objects.
''' % self.ioclass.__name__
# This is for files presents at G-Node or generated
if self.ioclass.support_lazy:
for obj, path in self.iter_objects(lazy=True, return_path=True):
try:
assert_sub_schema_is_lazy_loaded(obj)
# intercept exceptions and add more information
except BaseException as exc:
raise
def test_create_group_across_segment(self):
"""
Read {io_name} files in 'files_to_test' with
create_group_across_segment test cases.
Test read_block method of BaseFromRaw with different test cases
for create_group_across_segment.
""".format(io_name=self.ioclass.__name__)
test_cases = [
{"SpikeTrain": True},
{"AnalogSignal": True},
{"Event": True},
{"Epoch": True},
{"SpikeTrain": True,
"AnalogSignal": True,
"Event": True,
"Epoch": True},
True
]
expected_outcomes = [
None,
None,
NotImplementedError,
NotImplementedError,
NotImplementedError,
NotImplementedError,
]
mock_test_case = unittest.TestCase()
if issubclass(self.ioclass, BaseFromRaw):
for obj, reader in self.iter_objects(target=Block,
lazy=self.ioclass.support_lazy,
return_reader=True):
if "create_group_across_segment" in inspect.signature(reader).parameters.keys():
# Ignore testing readers for IOs where read_block is overridden to exclude
# the create_group_across_segment functionality, for eg. NixIO_fr
for case, outcome in zip(test_cases, expected_outcomes):
if outcome is not None:
with mock_test_case.assertRaises(outcome):
reader(lazy=self.ioclass.support_lazy, create_group_across_segment=case)
else:
reader(lazy=self.ioclass.support_lazy, create_group_across_segment=case)
def test__handle_pathlib_filename(self):
if self.files_to_test:
filename = get_test_file_full_path(self.ioclass, filename=self.files_to_test[0],
directory=self.local_test_dir)
pathlib_filename = pathlib.Path(filename)
if self.ioclass.mode == 'file':
self.ioclass(filename=pathlib_filename)
elif self.ioclass.mode == 'dir':
self.ioclass(dirname=pathlib_filename)
| bsd-3-clause |
ChadFulton/statsmodels | statsmodels/datasets/utils.py | 1 | 11318 | from statsmodels.compat.python import (StringIO, urlopen, HTTPError, URLError, lrange,
cPickle, urljoin, long, PY3)
import shutil
from os import environ, makedirs
from os.path import expanduser, exists, dirname, abspath, join
import numpy as np
from pandas import read_stata, read_csv, DataFrame, Series, Index
def webuse(data, baseurl='https://www.stata-press.com/data/r11/', as_df=True):
"""
Download and return an example dataset from Stata.
Parameters
----------
data : str
Name of dataset to fetch.
baseurl : str
The base URL to the stata datasets.
as_df : bool
Deprecated. Always returns a DataFrame
Returns
-------
dta : DataFrame
A DataFrame containing the Stata dataset.
Examples
--------
>>> dta = webuse('auto')
Notes
-----
Make sure baseurl has trailing forward slash. Doesn't do any
error checking in response URLs.
"""
url = urljoin(baseurl, data+'.dta')
return read_stata(url)
class Dataset(dict):
def __init__(self, **kw):
# define some default attributes, so pylint can find them
self.endog = None
self.exog = None
self.data = None
self.names = None
dict.__init__(self, kw)
self.__dict__ = self
# Some datasets have string variables. If you want a raw_data
# attribute you must create this in the dataset's load function.
try: # some datasets have string variables
self.raw_data = self.data.astype(float)
except:
pass
def __repr__(self):
return str(self.__class__)
def process_pandas(data, endog_idx=0, exog_idx=None, index_idx=None):
names = data.columns
if isinstance(endog_idx, (int, long)):
endog_name = names[endog_idx]
endog = data[endog_name].copy()
if exog_idx is None:
exog = data.drop([endog_name], axis=1)
else:
exog = data[names[exog_idx]].copy()
else:
endog = data.loc[:, endog_idx].copy()
endog_name = list(endog.columns)
if exog_idx is None:
exog = data.drop(endog_name, axis=1)
elif isinstance(exog_idx, (int, long)):
exog = data[names[exog_idx]].copy()
else:
exog = data[names[exog_idx]].copy()
if index_idx is not None: # NOTE: will have to be improved for dates
index = Index(data.iloc[:, index_idx])
endog.index = index
exog.index = index.copy()
data = data.set_index(names[index_idx])
exog_name = list(exog.columns)
dataset = Dataset(data=data, names=list(names), endog=endog,
exog=exog, endog_name=endog_name, exog_name=exog_name)
return dataset
def _maybe_reset_index(data):
"""
All the Rdatasets have the integer row.labels from R if there is no
real index. Strip this for a zero-based index
"""
if data.index.equals(Index(lrange(1, len(data) + 1))):
data = data.reset_index(drop=True)
return data
def _get_cache(cache):
if cache is False:
# do not do any caching or load from cache
cache = None
elif cache is True: # use default dir for cache
cache = get_data_home(None)
else:
cache = get_data_home(cache)
return cache
def _cache_it(data, cache_path):
if PY3:
# for some reason encode("zip") won't work for me in Python 3?
import zlib
# use protocol 2 so can open with python 2.x if cached in 3.x
data = data.decode('utf-8')
open(cache_path, "wb").write(zlib.compress(cPickle.dumps(data,
protocol=2)))
else:
open(cache_path, "wb").write(cPickle.dumps(data).encode("zip"))
def _open_cache(cache_path):
if PY3:
# NOTE: don't know why but decode('zip') doesn't work on my
# Python 3 build
import zlib
data = zlib.decompress(open(cache_path, 'rb').read())
# return as bytes object encoded in utf-8 for cross-compat of cached
data = cPickle.loads(data).encode('utf-8')
else:
data = open(cache_path, 'rb').read().decode('zip')
data = cPickle.loads(data)
return data
def _urlopen_cached(url, cache):
"""
Tries to load data from cache location otherwise downloads it. If it
downloads the data and cache is not None then it will put the downloaded
data in the cache path.
"""
from_cache = False
if cache is not None:
cache_path = join(cache,
url.split("://")[-1].replace('/', ',') + ".zip")
try:
data = _open_cache(cache_path)
from_cache = True
except:
pass
# not using the cache or didn't find it in cache
if not from_cache:
data = urlopen(url, timeout=3).read()
if cache is not None: # then put it in the cache
_cache_it(data, cache_path)
return data, from_cache
def _get_data(base_url, dataname, cache, extension="csv"):
url = base_url + (dataname + ".%s") % extension
try:
data, from_cache = _urlopen_cached(url, cache)
except HTTPError as err:
if '404' in str(err):
raise ValueError("Dataset %s was not found." % dataname)
else:
raise err
data = data.decode('utf-8', 'strict')
return StringIO(data), from_cache
def _get_dataset_meta(dataname, package, cache):
# get the index, you'll probably want this cached because you have
# to download info about all the data to get info about any of the data...
index_url = ("https://raw.githubusercontent.com/vincentarelbundock/Rdatasets/master/"
"datasets.csv")
data, _ = _urlopen_cached(index_url, cache)
# Python 3
if PY3: # pragma: no cover
data = data.decode('utf-8', 'strict')
index = read_csv(StringIO(data))
idx = np.logical_and(index.Item == dataname, index.Package == package)
dataset_meta = index.loc[idx]
return dataset_meta["Title"].item()
def get_rdataset(dataname, package="datasets", cache=False):
"""download and return R dataset
Parameters
----------
dataname : str
The name of the dataset you want to download
package : str
The package in which the dataset is found. The default is the core
'datasets' package.
cache : bool or str
If True, will download this data into the STATSMODELS_DATA folder.
The default location is a folder called statsmodels_data in the
user home folder. Otherwise, you can specify a path to a folder to
use for caching the data. If False, the data will not be cached.
Returns
-------
dataset : Dataset instance
A `statsmodels.data.utils.Dataset` instance. This objects has
attributes:
* data - A pandas DataFrame containing the data
* title - The dataset title
* package - The package from which the data came
* from_cache - Whether not cached data was retrieved
* __doc__ - The verbatim R documentation.
Notes
-----
If the R dataset has an integer index. This is reset to be zero-based.
Otherwise the index is preserved. The caching facilities are dumb. That
is, no download dates, e-tags, or otherwise identifying information
is checked to see if the data should be downloaded again or not. If the
dataset is in the cache, it's used.
"""
# NOTE: use raw github bc html site might not be most up to date
data_base_url = ("https://raw.githubusercontent.com/vincentarelbundock/Rdatasets/"
"master/csv/"+package+"/")
docs_base_url = ("https://raw.githubusercontent.com/vincentarelbundock/Rdatasets/"
"master/doc/"+package+"/rst/")
cache = _get_cache(cache)
data, from_cache = _get_data(data_base_url, dataname, cache)
data = read_csv(data, index_col=0)
data = _maybe_reset_index(data)
title = _get_dataset_meta(dataname, package, cache)
doc, _ = _get_data(docs_base_url, dataname, cache, "rst")
return Dataset(data=data, __doc__=doc.read(), package=package, title=title,
from_cache=from_cache)
# The below function were taken from sklearn
def get_data_home(data_home=None):
"""Return the path of the statsmodels data dir.
This folder is used by some large dataset loaders to avoid
downloading the data several times.
By default the data dir is set to a folder named 'statsmodels_data'
in the user home folder.
Alternatively, it can be set by the 'STATSMODELS_DATA' environment
variable or programatically by giving an explit folder path. The
'~' symbol is expanded to the user home folder.
If the folder does not already exist, it is automatically created.
"""
if data_home is None:
data_home = environ.get('STATSMODELS_DATA',
join('~', 'statsmodels_data'))
data_home = expanduser(data_home)
if not exists(data_home):
makedirs(data_home)
return data_home
def clear_data_home(data_home=None):
"""Delete all the content of the data home cache."""
data_home = get_data_home(data_home)
shutil.rmtree(data_home)
def check_internet(url=None):
"""Check if internet is available"""
url = "https://github.com" if url is None else url
try:
urlopen(url)
except URLError as err:
return False
return True
def strip_column_names(df):
"""
Remove leading and trailing single quotes
Parameters
----------
df : DataFrame
DataFrame to process
Returns
-------
df : DataFrame
Dataframe with stripped column names
Notes
-----
In-place modification
"""
columns = []
for c in df:
if c.startswith('\'') and c.endswith('\''):
c = c[1:-1]
elif c.startswith('\''):
c = c[1:]
elif c.endswith('\''):
c = c[:-1]
columns.append(c)
df.columns = columns
return df
def load_csv(base_file, csv_name, sep=',', convert_float=False):
"""Standard simple csv loader"""
filepath = dirname(abspath(base_file))
filename = join(filepath,csv_name)
engine = 'python' if sep != ',' else 'c'
float_precision = {}
if engine == 'c':
float_precision = {'float_precision': 'high'}
data = read_csv(filename, sep=sep, engine=engine, **float_precision)
if convert_float:
data = data.astype(float)
return data
def as_numpy_dataset(ds, as_pandas=None, retain_index=False):
"""Convert a pandas dataset to a NumPy dataset"""
if as_pandas:
return ds
if as_pandas is None:
import warnings
warnings.warn('load will return datasets containing pandas DataFrames and Series '
'in the Future. To suppress this message, specify as_pandas=False',
FutureWarning)
ds.data = ds.data.to_records(index=retain_index)
for d in dir(ds):
if d.startswith('_'):
continue
attr = getattr(ds, d)
if isinstance(attr, (Series, DataFrame)):
setattr(ds, d, np.asarray(attr))
return ds
| bsd-3-clause |
googlearchive/rgc-models | response_model/python/population_subunits/jitter/jitter_analysis.py | 1 | 14706 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""This code is used for analysing jitter stimulus models.
Hopefully, finer resolution gives better subunit estimates.
"""
import sys
import os.path
import tensorflow as tf
from absl import app
from absl import flags
from absl import gfile
import cPickle as pickle
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pylab
import matplotlib.pyplot as plt
import numpy as np
import scipy.io as sio
from scipy import ndimage
import random
FLAGS = flags.FLAGS
# flags for data locations
flags.DEFINE_string('folder_name', 'experiment_jitter', 'folder where to store all the data')
flags.DEFINE_string('save_location',
'/home/bhaishahster/',
'where to store logs and outputs?');
flags.DEFINE_string('data_location',
'/home/foam/retina_data/Google datasets/2016-04-21-1/data006(2016-04-21-1_data006_data006)/',
'where to take data from?')
# flags for stochastic learning
flags.DEFINE_integer('batchsz', 100, 'batch size for training')
flags.DEFINE_integer('n_chunks',1793, 'number of data chunks') # should be 216
flags.DEFINE_integer('n_b_in_c', 1, 'number of batches in one chunk of data')
flags.DEFINE_integer('train_len', 216 - 21, 'how much training length to use?')
flags.DEFINE_float('step_sz', 200, 'step size for learning algorithm')
# random number generators initialized
# removes unneccessary data variabilities while comparing algorithms
flags.DEFINE_integer('np_randseed', 23, 'numpy RNG seed')
flags.DEFINE_integer('randseed', 65, 'python RNG seed')
# flags for model/loss specification
flags.DEFINE_string('model_id', 'relu_window_mother_sfm', 'which model to fit')
flags.DEFINE_string('loss', 'poisson', 'which loss to use?')
# model specific terms
# useful for convolution-like models
flags.DEFINE_integer('window', 16, 'size of window for each subunit in relu_window model')
flags.DEFINE_integer('stride', 16, 'stride for relu_window')
flags.DEFINE_integer('su_channels', 3, 'number of color channels each subunit should take input from')
# some models need regularization of parameters
flags.DEFINE_float('lam_w', 0.0001, 'sparsitiy regularization of w')
flags.DEFINE_float('lam_a', 0.0001, 'sparsitiy regularization of a')
# dataset specific
flags.DEFINE_float('n_cells',1, 'number of cells in the dataset')
FLAGS = flags.FLAGS
response = np.array([])
test_chunks = np.array([])
train_chunks = np.array([])
train_counter = np.array([])
def init_chunks():
# initialize the chunks, called at the beginning of the code.
global test_chunks
global train_chunks
global train_counter
random_chunks = np.arange(FLAGS.n_chunks)+1
test_chunks = random_chunks[0:20]
train_chunks = random_chunks[22:]
train_counter =0
def get_stim_resp(data_type='train'):
# this function gets you the training and testing chunks!
# permute chunks and decide the training and test chunks
global train_chunks
global test_chunks
global train_counter
global response
def get_stimulus_batch(ichunk):
stim_path = FLAGS.data_location + 'Stimulus/'
stim_file = sio.loadmat(gfile.Open(stim_path+'stim_chunk_' + str(ichunk) + '.mat'))
chunk_start = np.squeeze(stim_file['chunk_start'])
chunk_end = np.squeeze(stim_file['chunk_end'])
jump = stim_file['jump']
stim_chunk = stim_file['stimulus_chunk']
stim_chunk = np.transpose(stim_chunk, [3,0,1,2])
return stim_chunk, chunk_start, chunk_end
if(data_type=='test'):
print('Loading test')
chunk_ids = np.array(test_chunks).astype('int')
if(data_type=='train'):
print('Loading train')
if(train_counter>=train_chunks.shape[0]):
train_chunks = np.shuffle(train_chunks)
train_counter=0
chunk_ids = [np.squeeze(np.array(train_chunks[train_counter]).astype('int'))]
train_counter =train_counter + 1
stim_total = np.zeros((0,640,320,3))
resp_total =np.zeros((0,FLAGS.n_cells))
data_len_total = 0
for ichunk in chunk_ids:
print('Loading chunk:' + str(ichunk))
# get chunks
if(ichunk==chunk_ids[0]):
print('first entry')
# first entry into the chunk
stim_chunk, chunk_start, chunk_end = get_stimulus_batch(ichunk)
resp_chunk = response[chunk_start+29:chunk_end+1,:]
else:
print('second entry')
stim_chunk, chunk_start, chunk_end = get_stimulus_batch(ichunk)
stim_chunk = stim_chunk[30:,:,:,:]
resp_chunk = response[chunk_start+30-1:chunk_end,:]
data_len = resp_chunk.shape[0]
print(chunk_start, chunk_end)
print(np.shape(stim_chunk), np.shape(resp_chunk))
# remove overlapping parts of chunks and then append them!
stim_total = np.append(stim_total, stim_chunk, axis=0)
resp_total = np.append(resp_total, resp_chunk, axis=0)
data_len_total = data_len_total + data_len
return stim_total, resp_total, data_len_total
def setup_dataset():
# initialize paths, get dataset properties, etc
path = FLAGS.data_location
# load cell response
response_path = path + 'response.mat'
response_file = sio.loadmat(gfile.Open(response_path))
resp_mat = response_file['binned_spikes']
resp_file_cids = np.squeeze(response_file['cell_ids'])
# load off parasol cell IDs
cids_path = path + 'cell_ids/cell_ids_OFF parasol.mat'
cids_file = sio.loadmat(gfile.Open(cids_path))
cids_select = np.squeeze(cids_file['cids'])
# find index of cells to choose from resp_mat
resp_file_choose_idx = np.array([])
for icell in np.array(cids_select):
idx = np.where(resp_file_cids == icell)
resp_file_choose_idx = np.append(resp_file_choose_idx, idx[0])
# finally, get selected cells from resp_mat
global response
response = resp_mat[resp_file_choose_idx.astype('int'),:].T
# load population time courses
time_c_file_path = path + 'cell_ids/time_courses.mat'
time_c_file = sio.loadmat(gfile.Open(time_c_file_path))
tc_mat = time_c_file['time_courses']
tm_cids = np.squeeze(time_c_file['cids'])
# choose cells of interest
tc_file_choose_idx = np.array([])
for icell in np.array(cids_select):
idx = np.where(tm_cids == icell)
tc_file_choose_idx = np.append(tc_file_choose_idx, idx[0])
tc_select = tc_mat[tc_file_choose_idx.astype('int'),:,:]
tc_mean = np.squeeze(np.mean(tc_select,axis=0))
n_cells = cids_select.shape[0]
FLAGS.n_cells = n_cells
# 'response', cell ids are 'cids_select' with 'n_cells' cells, 'tc_select' are timecourses, 'tc_mean' for mean time course
return response, cids_select, n_cells, tc_select, tc_mean
def get_windows():
# use FLAGS to get convolutional 'windows' for convolutional models.
window = FLAGS.window
n_channels = FLAGS.su_channels
n_pix = ((2* window + 1) ** 2)*n_channels # number of pixels in the window
w_mask = np.zeros((2 * window + 1, 2 * window + 1, n_channels, n_pix))
icnt = 0
# make mask_tf: weight (dimx X dimy X npix) for convolutional layer,
# where each layer is 1 for a particular pixel in window and 0 for others.
# this is used for flattening the pixels in a window, so that different weights could be applied to each window
for ichannel in range(n_channels):
for ix in range(2 * window + 1):
for iy in range(2 * window + 1):
w_mask[ix, iy, ichannel, icnt] =1
icnt = icnt + 1
mask_tf = tf.constant(np.array(w_mask, dtype='float32'))
# number of windows in x and y dimensions
dimx = np.floor(1 + ((640 - (2 * window + 1))/FLAGS.stride)).astype('int')
dimy = np.floor(1 + ((320 - (2 * window + 1))/FLAGS.stride)).astype('int')
return mask_tf, dimx, dimy, n_pix
def main(argv):
# initialize training and testing chunks
init_chunks()
# setup dataset
_,cids_select, n_cells, tc_select, tc_mean = setup_dataset()
# print parameters
print('Save folder name: ' + str(FLAGS.folder_name) +
'\nmodel:' + str(FLAGS.model_id) +
'\nLoss:' + str(FLAGS.loss) +
'\nbatch size' + str(FLAGS.batchsz) +
'\nstep size' + str(FLAGS.step_sz) +
'\ntraining length: ' + str(FLAGS.train_len) +
'\nn_cells: '+str(n_cells))
# filename for saving file
short_filename = ('_loss='+
str(FLAGS.loss) + '_batch_sz='+ str(FLAGS.batchsz) +
'_step_sz'+ str(FLAGS.step_sz) +
'_tlen=' + str(FLAGS.train_len) + '_jitter')
# setup model
with tf.Session() as sess:
# initialize stuff
if FLAGS.loss == 'poisson':
b_init = np.array(0.000001*np.ones(n_cells)) # a very small positive bias needed to avoid log(0) in poisson loss
else:
b_init = np.log((tot_spks_chosen_cells)/(216000. - tot_spks_chosen_cells)) # log-odds, a good initialization for some
# RGB time filter
tm4D = np.zeros((30,1,3,3))
for ichannel in range(3):
tm4D[:,0,ichannel,ichannel] = tc_mean[:,ichannel]
tc = tf.Variable((tm4D).astype('float32'),name = 'tc')
d1=640
d2=320
colors=3
# make data placeholders
stim = tf.placeholder(tf.float32,shape=[None,d1,d2,colors],name='stim')
resp = tf.placeholder(tf.float32,shape=[None,n_cells],name='resp')
data_len = tf.placeholder(tf.float32,name='data_len')
# time convolution
# time course should be time,d1,color,color
# original stimulus is (time, d1,d2,color). Permute it to (d2,time,d1,color) so that 1D convolution could be mimicked using conv_2d.
stim_time_filtered = tf.transpose(tf.nn.conv2d(tf.transpose(stim,(2,0,1,3)),tc, strides=[1,1,1,1], padding='VALID'), (1,2,0,3))
# learn almost convolutional model
short_filename = ('model=' + str(FLAGS.model_id) + short_filename)
mask_tf, dimx, dimy, n_pix = get_windows()
w_del = tf.Variable(np.array( 0.05*np.random.randn(dimx, dimy, n_pix),dtype='float32'), name='w_del')
w_mother = tf.Variable(np.array( np.ones((2 * FLAGS.window + 1, 2 * FLAGS.window + 1, FLAGS.su_channels, 1)),dtype='float32'), name='w_mother')
a = tf.Variable(np.array(np.random.randn(dimx*dimy, n_cells),dtype='float32'), name='a')
a_sfm = tf.transpose(tf.nn.softmax(tf.transpose(a)))
b = tf.Variable(np.array(b_init,dtype='float32'), name='b')
vars_fit = [w_mother, w_del, a] # which variables to fit
if not FLAGS.loss == 'poisson':
vars_fit = vars_fit + [b]
# stimulus filtered with convolutional windows
stim4D = stim_time_filtered#tf.expand_dims(tf.reshape(stim, (-1,40,80)), 3)
stim_convolved = tf.reduce_sum(tf.nn.conv2d(stim4D, w_mother, strides=[1, FLAGS.stride, FLAGS.stride, 1], padding="VALID"),3)
stim_masked = tf.nn.conv2d(stim4D, mask_tf, strides=[1, FLAGS.stride, FLAGS.stride, 1], padding="VALID" )
stim_del = tf.reduce_sum(tf.mul(stim_masked, w_del), 3)
# activation of different subunits
su_act = tf.nn.relu(stim_del + stim_convolved)
# get firing rate
lam = tf.matmul(tf.reshape(su_act, [-1, dimx*dimy]), a_sfm) + b
# regularization
regularization = FLAGS.lam_w * tf.reduce_sum(tf.nn.l2_loss(w_del))
# projection to satisfy hard variable constraints
b_pos = tf.assign(b, (b + tf.abs(b))/2)
def proj():
if FLAGS.loss == 'poisson':
sess.run(b_pos)
if FLAGS.loss == 'poisson':
loss_inter = (tf.reduce_sum(lam)/120. - tf.reduce_sum(resp*tf.log(lam))) / data_len
loss = loss_inter + regularization # add regularization to get final loss function
# training consists of calling training()
# which performs a train step and project parameters to model specific constraints using proj()
train_step = tf.train.AdagradOptimizer(FLAGS.step_sz).minimize(loss, var_list=vars_fit)
def training(inp_dict):
sess.run(train_step, feed_dict=inp_dict) # one step of gradient descent
proj() # model specific projection operations
# evaluate loss on given data.
def get_loss(inp_dict):
ls = sess.run(loss,feed_dict = inp_dict)
return ls
# saving details
# make a folder with name derived from parameters of the algorithm - it saves checkpoint files and summaries used in tensorboard
parent_folder = FLAGS.save_location + FLAGS.folder_name + '/'
FLAGS.save_location = parent_folder + short_filename + '/'
save_filename = FLAGS.save_location + short_filename
# create summary writers
# create histogram summary for all parameters which are learnt
for ivar in vars_fit:
tf.histogram_summary(ivar.name, ivar)
# loss summary
l_summary = tf.scalar_summary('loss',loss)
# loss without regularization summary
l_inter_summary = tf.scalar_summary('loss_inter',loss_inter)
# Merge all the summary writer ops into one op (this way, calling one op stores all summaries)
merged = tf.merge_all_summaries()
# training and testing has separate summary writers
train_writer = tf.train.SummaryWriter(FLAGS.save_location + 'train',
sess.graph)
test_writer = tf.train.SummaryWriter(FLAGS.save_location + 'test')
## load previous results
sess.run(tf.initialize_all_variables())
saver_var = tf.train.Saver(tf.all_variables(), keep_checkpoint_every_n_hours=0.05)
load_prev = False
start_iter=0
try:
# restore previous fits if they are available - useful when programs are preempted frequently on.
latest_filename = short_filename + '_latest_fn'
restore_file = tf.train.latest_checkpoint(FLAGS.save_location, latest_filename)
start_iter = int(restore_file.split('/')[-1].split('-')[-1]) # restore previous iteration count and start from there.
saver_var.restore(sess, restore_file) # restore variables
load_prev = True
print('Previous dataset loaded')
except:
print('No previous dataset')
# plot w_mother
w_mot_eval = sess.run(w_mother)
plt.figure()
for idim in range(3):
plt.subplot(1,3,idim+1)
plt.imshow(np.squeeze(w_mot_eval[:,:,idim,0]),cmap='gray')
plt.title('mother cell')
plt.show()
plt.draw()
if __name__ == '__main__':
app.run()
| apache-2.0 |
ijager/Thesis | helper.py | 1 | 3645 | #!/usr/bin/env python
import matplotlib.pyplot as plt
import numpy as np
import scipy.io as sio
import time
import sys
import measurement
import echo
import distance
import image
import argparse
import glob
import collections
import pickle
def find_matching(X,Y):
""" find indices so that Y[indices] is congruent with X
returns indices
"""
index = []
for x in X:
m = 9999999
temp_i = 0
for i,y in enumerate(Y):
d = np.linalg.norm(x-y)
if d < m:
m = d
temp_i = i
index.append(temp_i)
return index
def test(datasetname, N, et, rt, ur):
dataset = sio.loadmat(datasetname)
fs =float(dataset['fs'])
h = float(dataset['h'])
l = float(dataset['l'])
w = float(dataset['w'])
r = dataset['receivers']
s = dataset['sources']
data = dataset['data'].T
c = float(dataset['c'])
room = np.array([[0,0],[0,l],[w,l],[w,0]])
maxsize = np.sqrt(w**2+l**2+h**2) #m
max_delay = maxsize / float(c)
maxlength = int(2 * max_delay * fs)
t0 = time.time()
measurements = [measurement.MeasurementData(data=np.hstack(source_data).T,
receivers=r,
sources=s[i],
room_dimensions=(w,l,h),
c=c,
fs=fs)
for i,source_data in enumerate(data)]
echo_data = [echo.EchoData(m.find_echoes(crop=maxlength, interpolate=ur)) for m in measurements]
D = measurement.squared_distance_matrix(r, augmented=True)
S, E = zip(*[e.find_labels(D,threshold=et, parallel=True, verbose=args.verbose) for e in echo_data[:N]])
E = [e for e in E if len(e) > 0]
S = np.vstack(S)
distancedata = distance.DistanceData(S,E)
results = distancedata.find_images(r)
t1 = time.time()
if len(results) > 0:
imagedata = image.ImageSourceData(results, N, r, (w,l,h))
wall_points,vertices = imagedata.find_walls(threshold=rt, bestN=10)
if len(vertices) == 4:
i = find_matching(room, vertices)
error = np.sqrt(np.mean((vertices[i] - room)**2))
return (w*l*h, error, fs, t1-t0)
return (w*l*h, -1, fs, t1-t0)
parser = argparse.ArgumentParser(description='Estimate the shape of a room from room impulse response data')
parser.add_argument('dataset', help='Dataset containing the room impulse response data measured in a room using 2 or more sources and 5 microphones')
parser.add_argument('-N', help='Number of sources', default=4)
parser.add_argument('-et', help='Echo Threshold', default=0.005)
parser.add_argument('-rt', help='Result Threshold', default=0.05)
parser.add_argument('-ur', help='Upsampling rate', default=10)
parser.add_argument('-o', help='Output file', default=None)
parser.add_argument('--verbose', '-v', help='Print information during the estimation process', action='store_true')
args = parser.parse_args()
N = int(args.N)
et = float(args.et)
rt = float(args.rt)
upsampling_rate = int(args.ur)
output = collections.defaultdict(list)
datasets = glob.glob(args.dataset)
for dataset in datasets:
print(dataset)
try:
volume, error, fs, t = test(dataset, N, et, rt, upsampling_rate)
if error > 0:
print(error)
output[volume].append((error, t))
except:
tb = sys.exc_info()[2]
print('error:', tb)
if not args.o is None:
fname = args.o
else:
fname = 'results_dictionary'
with open(fname, 'wb') as f:
pickle.dump(output, f)
| mit |
jetbox/pybitcoin | pybitcoin/passphrases/english_words.py | 2 | 753213 | # -*- coding: utf-8 -*-
"""
pybitcoin
~~~~~
:copyright: (c) 2014 by Halfmoon Labs
:license: MIT, see LICENSE for more details.
"""
# 2048 words
english_words_bip39 = 'abandon,ability,able,about,above,absent,absorb,abstract,absurd,abuse,access,accident,account,accuse,achieve,acid,acoustic,acquire,across,act,action,actor,actress,actual,adapt,add,addict,address,adjust,admit,adult,advance,advice,aerobic,affair,afford,afraid,again,age,agent,agree,ahead,aim,air,airport,aisle,alarm,album,alcohol,alert,alien,all,alley,allow,almost,alone,alpha,already,also,alter,always,amateur,amazing,among,amount,amused,analyst,anchor,ancient,anger,angle,angry,animal,ankle,announce,annual,another,answer,antenna,antique,anxiety,any,apart,apology,appear,apple,approve,april,arch,arctic,area,arena,argue,arm,armed,armor,army,around,arrange,arrest,arrive,arrow,art,artefact,artist,artwork,ask,aspect,assault,asset,assist,assume,asthma,athlete,atom,attack,attend,attitude,attract,auction,audit,august,aunt,author,auto,autumn,average,avocado,avoid,awake,aware,away,awesome,awful,awkward,axis,baby,bachelor,bacon,badge,bag,balance,balcony,ball,bamboo,banana,banner,bar,barely,bargain,barrel,base,basic,basket,battle,beach,bean,beauty,because,become,beef,before,begin,behave,behind,believe,below,belt,bench,benefit,best,betray,better,between,beyond,bicycle,bid,bike,bind,biology,bird,birth,bitter,black,blade,blame,blanket,blast,bleak,bless,blind,blood,blossom,blouse,blue,blur,blush,board,boat,body,boil,bomb,bone,bonus,book,boost,border,boring,borrow,boss,bottom,bounce,box,boy,bracket,brain,brand,brass,brave,bread,breeze,brick,bridge,brief,bright,bring,brisk,broccoli,broken,bronze,broom,brother,brown,brush,bubble,buddy,budget,buffalo,build,bulb,bulk,bullet,bundle,bunker,burden,burger,burst,bus,business,busy,butter,buyer,buzz,cabbage,cabin,cable,cactus,cage,cake,call,calm,camera,camp,can,canal,cancel,candy,cannon,canoe,canvas,canyon,capable,capital,captain,car,carbon,card,cargo,carpet,carry,cart,case,cash,casino,castle,casual,cat,catalog,catch,category,cattle,caught,cause,caution,cave,ceiling,celery,cement,census,century,cereal,certain,chair,chalk,champion,change,chaos,chapter,charge,chase,chat,cheap,check,cheese,chef,cherry,chest,chicken,chief,child,chimney,choice,choose,chronic,chuckle,chunk,churn,cigar,cinnamon,circle,citizen,city,civil,claim,clap,clarify,claw,clay,clean,clerk,clever,click,client,cliff,climb,clinic,clip,clock,clog,close,cloth,cloud,clown,club,clump,cluster,clutch,coach,coast,coconut,code,coffee,coil,coin,collect,color,column,combine,come,comfort,comic,common,company,concert,conduct,confirm,congress,connect,consider,control,convince,cook,cool,copper,copy,coral,core,corn,correct,cost,cotton,couch,country,couple,course,cousin,cover,coyote,crack,cradle,craft,cram,crane,crash,crater,crawl,crazy,cream,credit,creek,crew,cricket,crime,crisp,critic,crop,cross,crouch,crowd,crucial,cruel,cruise,crumble,crunch,crush,cry,crystal,cube,culture,cup,cupboard,curious,current,curtain,curve,cushion,custom,cute,cycle,dad,damage,damp,dance,danger,daring,dash,daughter,dawn,day,deal,debate,debris,decade,december,decide,decline,decorate,decrease,deer,defense,define,defy,degree,delay,deliver,demand,demise,denial,dentist,deny,depart,depend,deposit,depth,deputy,derive,describe,desert,design,desk,despair,destroy,detail,detect,develop,device,devote,diagram,dial,diamond,diary,dice,diesel,diet,differ,digital,dignity,dilemma,dinner,dinosaur,direct,dirt,disagree,discover,disease,dish,dismiss,disorder,display,distance,divert,divide,divorce,dizzy,doctor,document,dog,doll,dolphin,domain,donate,donkey,donor,door,dose,double,dove,draft,dragon,drama,drastic,draw,dream,dress,drift,drill,drink,drip,drive,drop,drum,dry,duck,dumb,dune,during,dust,dutch,duty,dwarf,dynamic,eager,eagle,early,earn,earth,easily,east,easy,echo,ecology,economy,edge,edit,educate,effort,egg,eight,either,elbow,elder,electric,elegant,element,elephant,elevator,elite,else,embark,embody,embrace,emerge,emotion,employ,empower,empty,enable,enact,end,endless,endorse,enemy,energy,enforce,engage,engine,enhance,enjoy,enlist,enough,enrich,enroll,ensure,enter,entire,entry,envelope,episode,equal,equip,era,erase,erode,erosion,error,erupt,escape,essay,essence,estate,eternal,ethics,evidence,evil,evoke,evolve,exact,example,excess,exchange,excite,exclude,excuse,execute,exercise,exhaust,exhibit,exile,exist,exit,exotic,expand,expect,expire,explain,expose,express,extend,extra,eye,eyebrow,fabric,face,faculty,fade,faint,faith,fall,false,fame,family,famous,fan,fancy,fantasy,farm,fashion,fat,fatal,father,fatigue,fault,favorite,feature,february,federal,fee,feed,feel,female,fence,festival,fetch,fever,few,fiber,fiction,field,figure,file,film,filter,final,find,fine,finger,finish,fire,firm,first,fiscal,fish,fit,fitness,fix,flag,flame,flash,flat,flavor,flee,flight,flip,float,flock,floor,flower,fluid,flush,fly,foam,focus,fog,foil,fold,follow,food,foot,force,forest,forget,fork,fortune,forum,forward,fossil,foster,found,fox,fragile,frame,frequent,fresh,friend,fringe,frog,front,frost,frown,frozen,fruit,fuel,fun,funny,furnace,fury,future,gadget,gain,galaxy,gallery,game,gap,garage,garbage,garden,garlic,garment,gas,gasp,gate,gather,gauge,gaze,general,genius,genre,gentle,genuine,gesture,ghost,giant,gift,giggle,ginger,giraffe,girl,give,glad,glance,glare,glass,glide,glimpse,globe,gloom,glory,glove,glow,glue,goat,goddess,gold,good,goose,gorilla,gospel,gossip,govern,gown,grab,grace,grain,grant,grape,grass,gravity,great,green,grid,grief,grit,grocery,group,grow,grunt,guard,guess,guide,guilt,guitar,gun,gym,habit,hair,half,hammer,hamster,hand,happy,harbor,hard,harsh,harvest,hat,have,hawk,hazard,head,health,heart,heavy,hedgehog,height,hello,helmet,help,hen,hero,hidden,high,hill,hint,hip,hire,history,hobby,hockey,hold,hole,holiday,hollow,home,honey,hood,hope,horn,horror,horse,hospital,host,hotel,hour,hover,hub,huge,human,humble,humor,hundred,hungry,hunt,hurdle,hurry,hurt,husband,hybrid,ice,icon,idea,identify,idle,ignore,ill,illegal,illness,image,imitate,immense,immune,impact,impose,improve,impulse,inch,include,income,increase,index,indicate,indoor,industry,infant,inflict,inform,inhale,inherit,initial,inject,injury,inmate,inner,innocent,input,inquiry,insane,insect,inside,inspire,install,intact,interest,into,invest,invite,involve,iron,island,isolate,issue,item,ivory,jacket,jaguar,jar,jazz,jealous,jeans,jelly,jewel,job,join,joke,journey,joy,judge,juice,jump,jungle,junior,junk,just,kangaroo,keen,keep,ketchup,key,kick,kid,kidney,kind,kingdom,kiss,kit,kitchen,kite,kitten,kiwi,knee,knife,knock,know,lab,label,labor,ladder,lady,lake,lamp,language,laptop,large,later,latin,laugh,laundry,lava,law,lawn,lawsuit,layer,lazy,leader,leaf,learn,leave,lecture,left,leg,legal,legend,leisure,lemon,lend,length,lens,leopard,lesson,letter,level,liar,liberty,library,license,life,lift,light,like,limb,limit,link,lion,liquid,list,little,live,lizard,load,loan,lobster,local,lock,logic,lonely,long,loop,lottery,loud,lounge,love,loyal,lucky,luggage,lumber,lunar,lunch,luxury,lyrics,machine,mad,magic,magnet,maid,mail,main,major,make,mammal,man,manage,mandate,mango,mansion,manual,maple,marble,march,margin,marine,market,marriage,mask,mass,master,match,material,math,matrix,matter,maximum,maze,meadow,mean,measure,meat,mechanic,medal,media,melody,melt,member,memory,mention,menu,mercy,merge,merit,merry,mesh,message,metal,method,middle,midnight,milk,million,mimic,mind,minimum,minor,minute,miracle,mirror,misery,miss,mistake,mix,mixed,mixture,mobile,model,modify,mom,moment,monitor,monkey,monster,month,moon,moral,more,morning,mosquito,mother,motion,motor,mountain,mouse,move,movie,much,muffin,mule,multiply,muscle,museum,mushroom,music,must,mutual,myself,mystery,myth,naive,name,napkin,narrow,nasty,nation,nature,near,neck,need,negative,neglect,neither,nephew,nerve,nest,net,network,neutral,never,news,next,nice,night,noble,noise,nominee,noodle,normal,north,nose,notable,note,nothing,notice,novel,now,nuclear,number,nurse,nut,oak,obey,object,oblige,obscure,observe,obtain,obvious,occur,ocean,october,odor,off,offer,office,often,oil,okay,old,olive,olympic,omit,once,one,onion,online,only,open,opera,opinion,oppose,option,orange,orbit,orchard,order,ordinary,organ,orient,original,orphan,ostrich,other,outdoor,outer,output,outside,oval,oven,over,own,owner,oxygen,oyster,ozone,pact,paddle,page,pair,palace,palm,panda,panel,panic,panther,paper,parade,parent,park,parrot,party,pass,patch,path,patient,patrol,pattern,pause,pave,payment,peace,peanut,pear,peasant,pelican,pen,penalty,pencil,people,pepper,perfect,permit,person,pet,phone,photo,phrase,physical,piano,picnic,picture,piece,pig,pigeon,pill,pilot,pink,pioneer,pipe,pistol,pitch,pizza,place,planet,plastic,plate,play,please,pledge,pluck,plug,plunge,poem,poet,point,polar,pole,police,pond,pony,pool,popular,portion,position,possible,post,potato,pottery,poverty,powder,power,practice,praise,predict,prefer,prepare,present,pretty,prevent,price,pride,primary,print,priority,prison,private,prize,problem,process,produce,profit,program,project,promote,proof,property,prosper,protect,proud,provide,public,pudding,pull,pulp,pulse,pumpkin,punch,pupil,puppy,purchase,purity,purpose,purse,push,put,puzzle,pyramid,quality,quantum,quarter,question,quick,quit,quiz,quote,rabbit,raccoon,race,rack,radar,radio,rail,rain,raise,rally,ramp,ranch,random,range,rapid,rare,rate,rather,raven,raw,razor,ready,real,reason,rebel,rebuild,recall,receive,recipe,record,recycle,reduce,reflect,reform,refuse,region,regret,regular,reject,relax,release,relief,rely,remain,remember,remind,remove,render,renew,rent,reopen,repair,repeat,replace,report,require,rescue,resemble,resist,resource,response,result,retire,retreat,return,reunion,reveal,review,reward,rhythm,rib,ribbon,rice,rich,ride,ridge,rifle,right,rigid,ring,riot,ripple,risk,ritual,rival,river,road,roast,robot,robust,rocket,romance,roof,rookie,room,rose,rotate,rough,round,route,royal,rubber,rude,rug,rule,run,runway,rural,sad,saddle,sadness,safe,sail,salad,salmon,salon,salt,salute,same,sample,sand,satisfy,satoshi,sauce,sausage,save,say,scale,scan,scare,scatter,scene,scheme,school,science,scissors,scorpion,scout,scrap,screen,script,scrub,sea,search,season,seat,second,secret,section,security,seed,seek,segment,select,sell,seminar,senior,sense,sentence,series,service,session,settle,setup,seven,shadow,shaft,shallow,share,shed,shell,sheriff,shield,shift,shine,ship,shiver,shock,shoe,shoot,shop,short,shoulder,shove,shrimp,shrug,shuffle,shy,sibling,sick,side,siege,sight,sign,silent,silk,silly,silver,similar,simple,since,sing,siren,sister,situate,six,size,skate,sketch,ski,skill,skin,skirt,skull,slab,slam,sleep,slender,slice,slide,slight,slim,slogan,slot,slow,slush,small,smart,smile,smoke,smooth,snack,snake,snap,sniff,snow,soap,soccer,social,sock,soda,soft,solar,soldier,solid,solution,solve,someone,song,soon,sorry,sort,soul,sound,soup,source,south,space,spare,spatial,spawn,speak,special,speed,spell,spend,sphere,spice,spider,spike,spin,spirit,split,spoil,sponsor,spoon,sport,spot,spray,spread,spring,spy,square,squeeze,squirrel,stable,stadium,staff,stage,stairs,stamp,stand,start,state,stay,steak,steel,stem,step,stereo,stick,still,sting,stock,stomach,stone,stool,story,stove,strategy,street,strike,strong,struggle,student,stuff,stumble,style,subject,submit,subway,success,such,sudden,suffer,sugar,suggest,suit,summer,sun,sunny,sunset,super,supply,supreme,sure,surface,surge,surprise,surround,survey,suspect,sustain,swallow,swamp,swap,swarm,swear,sweet,swift,swim,swing,switch,sword,symbol,symptom,syrup,system,table,tackle,tag,tail,talent,talk,tank,tape,target,task,taste,tattoo,taxi,teach,team,tell,ten,tenant,tennis,tent,term,test,text,thank,that,theme,then,theory,there,they,thing,this,thought,three,thrive,throw,thumb,thunder,ticket,tide,tiger,tilt,timber,time,tiny,tip,tired,tissue,title,toast,tobacco,today,toddler,toe,together,toilet,token,tomato,tomorrow,tone,tongue,tonight,tool,tooth,top,topic,topple,torch,tornado,tortoise,toss,total,tourist,toward,tower,town,toy,track,trade,traffic,tragic,train,transfer,trap,trash,travel,tray,treat,tree,trend,trial,tribe,trick,trigger,trim,trip,trophy,trouble,truck,true,truly,trumpet,trust,truth,try,tube,tuition,tumble,tuna,tunnel,turkey,turn,turtle,twelve,twenty,twice,twin,twist,two,type,typical,ugly,umbrella,unable,unaware,uncle,uncover,under,undo,unfair,unfold,unhappy,uniform,unique,unit,universe,unknown,unlock,until,unusual,unveil,update,upgrade,uphold,upon,upper,upset,urban,urge,usage,use,used,useful,useless,usual,utility,vacant,vacuum,vague,valid,valley,valve,van,vanish,vapor,various,vast,vault,vehicle,velvet,vendor,venture,venue,verb,verify,version,very,vessel,veteran,viable,vibrant,vicious,victory,video,view,village,vintage,violin,virtual,virus,visa,visit,visual,vital,vivid,vocal,voice,void,volcano,volume,vote,voyage,wage,wagon,wait,walk,wall,walnut,want,warfare,warm,warrior,wash,wasp,waste,water,wave,way,wealth,weapon,wear,weasel,weather,web,wedding,weekend,weird,welcome,west,wet,whale,what,wheat,wheel,when,where,whip,whisper,wide,width,wife,wild,will,win,window,wine,wing,wink,winner,winter,wire,wisdom,wise,wish,witness,wolf,woman,wonder,wood,wool,word,work,world,worry,worth,wrap,wreck,wrestle,wrist,write,wrong,yard,year,yellow,you,young,youth,zebra,zero,zone,zoo'
# 25202 words
english_words_wiktionary = 'aardvark,aargh,aback,abacus,abandon,abandoned,abandoning,abandonment,abandons,abba,abbey,abbot,abbots,abdomen,abdominal,abduct,abducted,abducting,abduction,abductions,aberration,abetted,abide,abiding,abigail,abilities,ability,ablaze,able,abnormal,abnormalities,abnormality,abnormally,aboard,abode,abolish,abominable,abomination,aboriginal,abort,abortion,abortions,abound,about,above,aboveboard,abrasive,abreast,abroad,abrupt,abruptly,abs,absconded,absence,absences,absent,absentee,absolute,absolutely,absolutes,absolution,absolved,absorb,absorbed,absorbent,absorbing,absorption,abstain,abstinence,abstract,absurd,absurdity,absurdly,abundance,abundant,abundantly,abuse,abused,abuser,abusing,abusive,abut,abysmal,academia,academic,academics,academy,accelerant,accelerate,accelerated,accelerating,acceleration,accent,accents,accept,acceptable,acceptance,accepted,accepting,accepts,access,accessible,accessing,accessories,accessory,accident,accidental,accidentally,accidently,accidents,acclaim,acclimate,acclimated,accolades,accommodate,accommodating,accommodation,accommodations,accompanied,accompany,accompanying,accomplice,accomplices,accomplish,accomplished,accomplishing,accomplishment,accomplishments,accord,accordance,accorded,according,accordingly,account,accountability,accountable,accountant,accountants,accounted,accounting,accounts,accoutrements,accumulate,accumulated,accumulation,accuracy,accurate,accurately,accursed,accusation,accusations,accuse,accused,accuser,accusers,accuses,accusing,accustomed,ace,aced,aces,acetate,ache,ached,aches,achieve,achieved,achievement,achieving,aching,achingly,acid,acids,acing,acknowledge,acknowledged,acknowledgement,acknowledges,acme,acne,acorn,acoustic,acoustics,acquaint,acquaintance,acquaintances,acquainted,acquire,acquired,acquisition,acquisitions,acquitted,acre,acres,acrobat,across,act,acted,actin,acting,action,actionable,actions,activate,activated,activating,activation,activators,active,actively,activists,activities,activity,actor,actors,actress,actresses,acts,actual,actuality,actualization,actually,actuarial,acupuncture,acupuncturist,acute,ad,adage,adamant,adapt,adaptable,adapted,adapting,add,added,addendum,addict,addicted,addiction,addictive,addicts,adding,addition,additional,additions,additives,addled,address,addressed,addresses,addressing,adds,adenoids,adept,adequate,adequately,adhere,adherence,adhesive,adjacent,adjective,adjectives,adjourn,adjourned,adjust,adjustable,adjusted,adjusting,adjustment,adjustments,adjutant,administer,administered,administering,administration,administrative,administrator,administrators,admirable,admirably,admiral,admiration,admire,admired,admirer,admirers,admires,admiring,admission,admissions,admit,admits,admittance,admitted,admittedly,admitting,admonish,admonished,admonition,ado,adobe,adolescence,adolescent,adolescents,adopt,adopted,adopting,adoption,adoptive,adorable,adoration,adore,adored,adores,adoring,adrenaline,adrenals,ads,adult,adults,advance,advanced,advancement,advancements,advances,advancing,advantage,advantageous,advantages,advent,adventure,adventurer,adventures,adventurous,adversaries,adversary,adverse,adversely,adversity,advert,advertise,advertised,advertisement,advertisers,advertises,advertising,advice,advisable,advise,advised,adviser,advises,advising,advisor,advocacy,advocate,advocating,aerial,aerobic,aerobics,aerodynamics,aeroplane,aerosol,aerospace,aesthetic,aesthetics,afar,affair,affairs,affect,affected,affecting,affection,affectionate,affectionately,affections,affects,affidavit,affidavits,affiliated,affiliates,affiliation,affinity,affirm,affirmative,affirming,afflicted,afford,afforded,affront,afloat,afoot,aforementioned,aforethought,afraid,aft,after,afterglow,afterlife,aftermath,afternoon,afternoons,afterthought,afterward,afterwards,ag,aga,again,against,age,aged,ageing,ageless,agencies,agency,agenda,agendas,agent,agents,ages,aggravate,aggravated,aggravating,aggravation,aggression,aggressive,aggressively,aggressor,agile,agility,agin,aging,agitate,agitated,agitation,agitators,ago,agonized,agonizing,agony,agree,agreeable,agreed,agreeing,agreement,agreements,agrees,agricultural,aground,ah,aha,ahead,ahem,ahold,ahoy,ai,aid,aide,aided,aides,aiding,aids,ail,ailing,ailment,ailments,ails,aim,aimed,aiming,aimless,aimlessly,aims,ain,air,airborne,aircraft,airfield,airhead,airing,airlift,airlifted,airline,airlines,airman,airmen,airplane,airplanes,airport,airports,airs,airspace,airstrip,airtight,airwaves,airway,airways,aisle,aisles,ajar,al,ala,alabaster,alameda,alamo,alan,alarm,alarmed,alarming,alarmist,alarms,alas,alaska,alastor,albacore,albatross,albino,album,albums,alcazar,alchemist,alchemy,alcohol,alcoholic,alcoholics,alcoholism,alcove,alderman,ale,alec,alert,alerted,alerting,alerts,ales,alexander,alfalfa,algae,algebra,algorithms,alias,aliases,alibi,alibis,alien,alienate,alienated,alienating,alienation,aliens,alight,aligning,alike,alimony,alive,all,allee,allegation,allegations,alleged,allegedly,allegiance,allegiances,alleging,alleluia,allergic,allergies,allergy,alleviate,alley,alleys,alliance,allies,alligator,alligators,alliteration,allotted,allow,allowable,allowance,allowed,allowing,allows,alluding,allure,alluring,ally,alma,almighty,almond,almonds,almost,aloe,aloft,aloha,alone,along,alongside,aloud,alpha,alphabet,alphabetically,alphabetized,alpine,already,alright,als,also,alt,altar,alter,alteration,alterations,altercation,altered,altering,alternate,alternates,alternating,alternative,alternatives,alternator,alters,although,altitude,alto,altogether,altruistic,aluminum,alumni,alumnus,always,am,ama,amah,amaretto,amassed,amateur,amateurs,amaze,amazed,amazes,amazing,amazingly,amazon,ambassador,amber,ambiance,ambience,ambient,ambiguity,ambiguous,ambition,ambitions,ambitious,ambivalence,ambivalent,ambrosia,ambulance,ambulances,ambush,ambushed,amen,amend,amended,amendment,amendments,amends,amenities,ami,amiable,amicable,amigo,amigos,amin,amino,amiss,ammo,ammonia,ammunition,amnesia,amniotic,among,amongst,amoral,amorous,amount,amounted,amounts,amp,amphetamines,ampicillin,ample,amply,ampule,amputate,amputated,amputation,amulet,amulets,amuse,amused,amusement,amuses,amusing,an,ana,anachronism,anaesthetic,anagram,analogy,analyse,analysis,analyst,analysts,analyze,analyzed,analyzing,anaphylactic,anarchist,anarchists,anarchy,anatomically,anatomy,ancestor,ancestors,anchor,anchorage,anchoring,anchors,anchovies,ancient,ancients,and,android,ane,anecdote,anecdotes,anemia,anemic,anesthesia,anesthesiologist,anesthesiology,anesthetic,anesthetics,aneurysm,anew,angel,angels,angelus,anger,angina,angiogram,angioplasty,angle,angles,angling,angora,angrier,angrily,angry,angst,anguish,anguished,ani,animal,animals,animated,animation,animosity,anise,ankle,ankles,anna,annals,annex,annihilate,annihilated,annihilation,anniversary,announce,announced,announcement,announcements,announcer,announces,announcing,annoy,annoyance,annoyances,annoyed,annoying,annoyingly,annoys,annual,annually,annul,annulled,annulment,anoint,anointed,anomalies,anomaly,anon,anonymity,anonymous,anorexia,anorexic,another,answer,answered,answering,answers,ant,antacid,antagonism,antagonistic,antagonize,antagonizing,ante,antelope,antenna,antennae,anthem,anthology,anthrax,anthropologist,anthropologists,anthropology,anti,antibiotic,antibiotics,antibodies,antibody,anticipate,anticipated,anticipating,anticipation,antics,antidepressant,antidepressants,antidote,antihistamine,antihistamines,antiquated,antique,antiques,antiquing,antiquities,antiquity,antisocial,antivenin,antler,ants,antsy,anvil,anxiety,anxious,anxiously,any,anybody,anyhow,anymore,anyone,anyplace,anything,anytime,anyway,anyways,anywhere,apache,apart,apartheid,apartment,apartments,apathy,ape,apes,apex,aphrodisiac,apiece,aplastic,apocalypse,apocalyptic,apollo,apologetic,apologies,apologise,apologize,apologized,apologizes,apologizing,apology,apostle,apostles,apostrophe,appalled,appalling,apparatus,apparel,apparent,apparently,apparition,appeal,appealed,appealing,appeals,appear,appearance,appearances,appeared,appearing,appears,appease,appeased,appendage,appendages,appendectomy,appendicitis,appendix,appetite,appetites,appetizer,appetizers,appetizing,applaud,applauded,applauding,applause,apple,applejack,apples,applesauce,appliance,appliances,applicants,application,applications,applied,applies,apply,applying,appoint,appointed,appointing,appointment,appointments,appraisal,appraise,appreciate,appreciated,appreciates,appreciation,appreciative,apprehend,apprehended,apprehension,apprehensive,apprentice,apprised,approach,approached,approaches,approaching,appropriate,appropriately,appropriations,approval,approve,approved,approving,approximate,approximately,approximation,apricot,apron,aprons,apropos,aptitude,aptly,aqua,aquarium,aquatic,ar,arachnid,arachnids,arbitrary,arbitration,arbitrator,arbor,arboretum,arc,arcade,arch,archaic,arched,archenemy,archeological,archeology,archer,arches,architect,architects,architecture,archives,arctic,ardent,are,area,areas,arena,arf,argentine,argon,arguably,argue,argued,argues,arguing,argument,argumentative,arguments,argyle,aria,arid,ariel,aright,arise,arises,aristocratic,arithmetic,ark,arm,armaments,armed,armies,arming,armor,armored,armory,armour,armpits,arms,army,aroma,arose,around,arouse,aroused,arousing,arraigned,arraignment,arrange,arranged,arrangement,arrangements,arranging,array,arrears,arrest,arrested,arresting,arrests,arrhythmia,arrival,arrivals,arrive,arrived,arrives,arriving,arrogance,arrogant,arrow,arrowhead,arrows,arroyo,arse,arsenal,arsenic,arson,arsonist,art,arterial,arteries,artery,artful,arthritis,artichoke,artichokes,article,articles,articulate,artifact,artifacts,artificial,artillery,artist,artiste,artistic,artistry,artists,arts,artsy,artwork,arty,arugula,as,asbestos,ascension,ascot,ash,ashamed,ashes,ashore,ashram,ashtray,ashtrays,aside,asinine,ask,asked,askew,asking,asks,asleep,asparagus,aspect,aspects,aspen,asphalt,asphyxiation,aspirations,aspire,aspirin,aspirins,assailant,assassin,assassinate,assassination,assassins,assault,assaulted,assaulting,assaults,assemble,assembled,assembler,assemblies,assembling,assembly,assertive,assertiveness,asses,assess,assessed,assessing,assessment,asset,assets,assign,assigned,assigning,assignment,assignments,assimilate,assimilated,assist,assistance,assistant,assistants,assisted,assisting,associate,associated,associates,associating,association,associations,assorted,assortment,assume,assumed,assumes,assuming,assumption,assumptions,assurance,assurances,assure,assured,assuredly,assures,assuring,asteroid,asteroids,asthma,astonished,astonishing,astonishment,astound,astounding,astral,astray,astronaut,astronauts,astronomer,astronomical,astronomy,astrophysics,astute,asunder,asylum,at,ate,atheists,athlete,athletes,athletic,atlas,atmosphere,atom,atomic,atonement,atop,atrium,atrocious,atrocities,atrophied,atropine,att,attaboy,attach,attache,attached,attachment,attachments,attack,attacked,attacker,attackers,attacking,attacks,attain,attained,attempt,attempted,attempting,attempts,attend,attendance,attendant,attendants,attended,attending,attends,attention,attentions,attentive,attest,attic,attired,attitude,attitudes,attorney,attorneys,attract,attracted,attracting,attraction,attractive,attracts,attribute,attributed,attributes,attuned,auction,auctioneer,auctioning,audacity,audible,audience,audiences,audio,audiotape,audit,audited,auditing,audition,auditioning,auditions,auditor,auditorium,auditory,auger,aught,augmentation,august,auk,auld,aunt,auntie,aunties,aunts,aura,auras,aurora,auspicious,authentic,authenticate,authenticated,authenticity,author,authoritative,authorities,authority,authorization,authorize,authorized,authorizing,authors,autism,autistic,auto,autobiographical,autograph,autographed,autographs,automated,automatic,automatically,automatics,automaton,automobile,automobiles,automotive,autonomous,autonomy,autopsies,autopsy,autumn,auxiliary,ava,availability,available,avalanche,avatar,avatars,ave,avenge,avenged,avenger,avengers,avenue,average,averages,averse,aversion,aviation,avid,avocado,avoid,avoidance,avoided,avoiding,avoids,aw,await,awaiting,awaits,awake,awaken,awakened,awakening,awakes,award,awarded,awards,aware,awareness,away,awe,awed,awesome,awful,awfully,awhile,awkward,awkwardly,awkwardness,awoke,awol,awry,ax,axe,axel,axis,axle,ay,aye,baba,babble,babbling,babe,babes,babies,baboon,baboons,babu,baby,babysitter,babysitters,bach,bachelor,bachelorette,bachelors,back,backbone,backdoor,backdrop,backed,backer,backfire,backfired,backfires,backfiring,backgammon,background,backhand,backing,backpack,backpacking,backpacks,backroom,backs,backseat,backside,backslide,backstabbing,backstage,backstreet,backstroke,backup,backups,backward,backwards,backyard,bacon,bacteria,bacterial,bad,badder,baddest,badge,badgered,badgering,badges,badly,badminton,badness,baffled,baffles,baffling,bag,bagel,bagels,baggage,bagged,baggies,bagging,baggy,bagman,bagpipes,bags,bah,bail,bailed,bailey,bailiff,bailiffs,bailing,bails,bait,baited,baiting,bake,baked,baker,bakeries,bakers,bakery,bakes,baking,baklava,balance,balanced,balances,balancing,balboa,balconies,balcony,bald,balding,baldness,bale,balk,ball,ballads,ballast,balled,baller,ballerina,ballet,ballgame,ballistic,ballistics,ballon,balloon,balloons,ballot,ballots,ballpark,ballplayer,ballplayers,ballpoint,ballroom,balls,ballsy,balm,balmoral,baloney,bam,bambino,bamboo,bamboozled,ban,banal,banality,banana,bananas,band,bandage,bandages,bandit,bandits,bands,bandwagon,bane,bang,banged,bangers,banging,bangles,bangs,banish,banished,banister,banjo,bank,bankbooks,banker,bankers,banking,bankroll,bankrolled,bankrupt,bankruptcy,bankrupted,banks,banned,banner,banners,banning,bannister,banquet,banshee,banter,banzai,baptism,baptist,baptists,baptize,baptized,bar,barb,barbarian,barbarians,barbaric,barbecue,barbecued,barbecues,barbed,barber,barbers,barbershop,barbs,bard,bare,bared,barefoot,barely,barf,barfed,barfing,bargain,bargained,bargaining,barge,barged,barges,barging,baring,barium,bark,barked,barkeep,barker,barking,barks,barley,barlow,barmaid,barman,barn,barnacle,barnyard,barometer,baron,baroness,baronet,baroque,barracks,barracuda,barrage,barre,barred,barrel,barreling,barrels,barren,barricade,barricaded,barricades,barrier,barring,barrio,barrister,barroom,barrow,bars,barstool,bartender,bartending,barter,base,baseball,based,baseless,baseman,basement,basements,bases,bash,bashed,bashful,bashing,basic,basically,basics,basil,basin,basis,bask,basket,basketball,basketballs,baskets,basking,bass,bassett,bassinet,bassoon,baste,bastille,bat,batch,bates,bath,bathe,bathed,bathing,bathrobe,bathrobes,bathroom,bathrooms,baths,bathtub,batman,baton,bats,batted,batter,battered,batteries,battering,battery,batting,battle,battlefield,battleground,battles,battleship,battling,batty,bauble,baubles,bawdy,bawl,bawling,bay,bayberry,bayonet,bayou,bays,bazaar,bazooka,be,beach,beaches,beacon,beaded,beads,beagle,beak,beakers,beam,beamed,beaming,beams,bean,beanbag,beanie,beans,bear,bearable,beard,bearded,beards,bearer,bearers,bearing,bearings,bears,beast,beastie,beasts,beat,beaten,beater,beating,beatings,beatnik,beats,beau,beaucoup,beauties,beautiful,beautifully,beauty,beaver,beavers,became,because,beck,beckons,becks,become,becomes,becoming,bed,bedbug,bedbugs,bedlam,bedpan,bedpans,bedridden,bedrock,bedroom,bedrooms,beds,bedside,bedspread,bedtime,bee,beech,beef,beefcake,beefed,beefs,beefy,been,beep,beeped,beeper,beepers,beeps,beer,beers,beery,bees,beeswax,beetle,beetles,beets,befall,befitting,before,beforehand,beg,began,begat,begets,beggar,beggars,begged,begging,begin,beginner,beginning,beginnings,begins,begrudge,begs,begun,behalf,behave,behaved,behaving,behavior,behavioral,behaviour,beheading,behind,behold,behooves,beige,being,beings,bel,belabor,belated,beleaguered,belie,belief,beliefs,believable,believe,believed,believer,believers,believes,believing,belittle,belive,bell,bellboy,belle,bellhops,bellies,belligerent,bellman,bells,belly,bellyaching,bellybutton,belong,belonged,belonging,belongings,belongs,beloved,below,belt,belted,belts,belvedere,ben,bench,benched,benches,benching,bend,bended,bender,bending,bends,bendy,bene,beneath,benedict,benefactor,benefactors,beneficial,benefit,benefited,benefits,benes,benevolence,benevolent,benign,benjamin,benjamins,bennet,benny,bent,benthic,bequest,berate,berating,bereavement,bereft,beret,berg,berlin,berries,berry,berserk,berserker,bertha,beryllium,beseech,beside,besides,besieged,besmirch,best,bested,bestow,bestowed,bestseller,bet,beta,beth,bethesda,betray,betrayal,betrayals,betrayed,betrayer,betraying,betrays,bets,better,betting,between,beverage,beware,bewitched,bey,beyond,bi,bialy,bias,biased,bib,bible,bibles,biblical,bibliography,bicentennial,bicker,bickering,bicuspids,bicycle,bicycles,bid,bidder,bidding,bide,biding,bids,biff,big,bigamist,bigamy,bigfoot,bigger,biggest,biggie,bighorn,bigmouth,bigness,bigot,bigotry,bijou,bike,biker,bikers,bikes,biking,bikini,bikinis,bilateral,bilge,bilingual,bill,billboard,billboards,billiard,billie,billing,billion,billionaire,billionaires,billions,bills,billy,bimbo,bimbos,bin,binary,bind,binder,binding,binds,binge,bingo,binoculars,bins,bio,biochemist,biochemistry,biographical,biographies,biography,biohazard,biological,biologically,biology,bionic,biopsy,bios,biotech,bipartisan,bipolar,birch,bird,birdcage,birdie,birdies,birds,birdseed,birth,birthday,birthdays,birthing,birthmark,birthplace,birthright,births,biscuit,biscuits,bishop,bishops,bison,bisque,bistro,bit,bite,bites,biting,bits,bitsy,bitten,bitter,bitterness,bitty,biz,bizarre,blab,blabbermouth,blabbing,black,blackberry,blackbird,blackboard,blacked,blacking,blackjack,blackmail,blackmailed,blackmailer,blackmailing,blackness,blackout,blackouts,blacks,blacksmith,bladder,bladders,blade,blades,blah,blam,blame,blamed,blameless,blames,blaming,bland,blank,blanket,blankets,blankly,blanks,blaring,blarney,blasphemous,blasphemy,blast,blasted,blasters,blasting,blatant,blatantly,blather,blathering,blaze,blazer,blazers,blazes,blazing,bleach,bleached,bleachers,bleaching,bleak,bled,bleed,bleeder,bleeding,bleeds,bleep,blemish,blend,blended,blender,blending,blends,bless,blessed,blessing,blessings,blew,blight,blimey,blimp,blind,blinded,blinders,blindfold,blindfolded,blinding,blindly,blindness,blinds,blindsided,blink,blinked,blinking,blinks,blip,blips,bliss,blissful,blissfully,blisters,blithely,blithering,blitz,blizzard,bloated,blob,bloc,block,blockade,blockage,blockbusters,blocked,blockhead,blocking,blocks,bloke,blokes,blond,blonde,blondes,blonds,blood,blooded,bloodhound,bloodied,bloodless,bloodline,bloods,bloodshed,bloodshot,bloodstream,bloodsucker,bloodsucking,bloody,bloom,blooming,blooms,blossom,blossomed,blossoms,blot,blotchy,blotter,blotto,blouse,blow,blowed,blower,blowfish,blowhard,blowing,blowjob,blowjobs,blown,blowout,blows,blowtorch,blowup,blubber,blubbering,bludgeoned,blue,bluebells,blueberries,blueberry,bluepoint,blueprints,bluer,blues,bluest,bluff,bluffing,bluffs,blume,blunder,blundering,blunders,blunt,blur,blurb,blurred,blurry,blurt,blurted,blurting,blush,blushing,bluster,bo,boa,boar,board,boarded,boarder,boarding,boardinghouse,boardroom,boards,boardwalk,boast,boat,boathouse,boating,boatload,boatman,boats,bob,bobbin,bobbing,bobby,bobcat,bod,bodega,bodies,bodily,body,bodyguard,bodyguards,bogeyman,boggle,boggles,boggling,bogs,bogus,bohemian,boil,boiled,boiler,boilers,boiling,boils,bold,bolder,boldly,bollocks,bologna,bolster,bolt,bolted,bolts,bomb,bombarded,bombarding,bombed,bomber,bombers,bombing,bombings,bombs,bombshell,bonbon,bond,bondage,bonded,bonding,bonds,bondsman,bone,boned,bonehead,boneless,bones,bonfire,bong,bongo,bongos,boning,bonkers,bonnet,bonnie,bonus,bonuses,bony,boo,boob,boobies,boobs,booby,booger,boogey,boogeyman,boogie,book,bookcase,booked,bookends,booker,bookie,booking,bookish,bookkeeper,booklet,booklets,bookman,bookmark,books,bookshelf,bookshelves,bookstore,boom,boombox,boomer,boomerang,boon,boonies,boorish,boost,boosted,booster,boosters,boosts,boot,booted,booth,booths,booties,bootleg,boots,booty,booze,boozer,boozing,bop,bora,bordeaux,bordello,border,bordering,borderline,borders,bore,bored,boredom,boring,born,borrow,borrowed,borrowing,bosom,bosomy,boss,bossed,bosses,bossing,bossy,boston,bot,botanical,botany,botched,both,bother,bothered,bothering,bothers,bottle,bottled,bottles,bottling,bottom,bottomed,bottomless,bottoms,botulism,bought,boulder,boulevard,bounce,bounced,bouncer,bouncing,bouncy,bound,boundaries,boundary,bounds,bounty,bouquet,bouquets,bourbon,bourgeois,bourne,bout,boutique,boutiques,boutonniere,bouts,bow,bowed,bowel,bowels,bowers,bowery,bowing,bowl,bowled,bowler,bowline,bowling,bowls,bowman,bows,box,boxcar,boxed,boxer,boxers,boxes,boxing,boy,boycott,boyfriend,boyfriends,boyhood,boys,boysenberry,bozo,bozos,bra,brace,bracelet,bracelets,braces,bracing,bracken,bracket,brad,brag,braggart,bragged,bragging,brags,braid,braided,braiding,braille,brain,brained,brainiest,brains,brainstorm,brainstorming,brainwash,brainwashed,brainwashing,brake,brakes,bran,branch,branched,branches,branching,brand,brandies,brandy,bras,brash,brass,brassiere,brassieres,brat,brats,bratwurst,brava,bravado,brave,braved,bravely,braver,bravery,bravest,bravo,brawl,brays,brazen,brazil,breach,breached,bread,breadth,break,breakable,breakdown,breakdowns,breaker,breakfast,breaking,breakout,breaks,breakthrough,breakthroughs,breakup,breakups,breakwater,breast,breasted,breasts,breath,breathable,breathe,breathed,breather,breathes,breathing,breathless,breaths,breathtaking,bred,bree,breech,breed,breeding,breeds,breeze,breezing,breezy,bren,brent,brethren,brew,brewed,brewer,brewery,brewing,brews,briar,bribe,bribed,bribery,bribes,bribing,brick,bricked,bricks,bridal,bride,bridegroom,brides,bridesmaid,bridesmaids,bridge,bridges,brie,brief,briefcase,briefcases,briefed,briefing,briefings,briefly,briefs,brig,brigade,brigadier,bright,brighten,brighter,brightest,brightly,brill,brilliance,brilliant,brilliantly,brim,brin,bring,bringing,brings,brink,brioche,bris,brisket,briskly,bristol,brit,britches,britt,brittle,bro,broad,broadcast,broadcasting,broadcasts,broadening,broader,broads,broccoli,brochure,brochures,brock,broiled,broiler,broke,broken,brokenhearted,broker,brokerage,bronchial,bronco,bronze,bronzed,bronzing,brooch,brood,brooding,broody,brook,brooks,broom,brooms,broomstick,broomsticks,bros,broth,brothel,brother,brotherhood,brotherly,brothers,brought,brouhaha,brow,browbeat,browbeating,brown,brownie,brownies,browning,brownout,browns,brownstone,browse,browsing,brr,bruise,bruised,bruises,bruising,brumby,brunch,brunette,brunettes,brunt,brush,brushed,brushes,brushing,brutal,brutality,brutally,brute,bubbies,bubble,bubbles,bubbly,buck,buckaroo,bucket,buckets,buckeyes,buckle,buckled,buckling,bucko,bucks,buckshot,bud,buddies,budding,buddy,budge,budget,budgeted,budgets,budging,buds,buff,buffalo,buffer,buffet,buffoon,buffoons,buffs,buffy,bug,bugged,bugger,buggered,bugging,buggy,bugle,bugs,build,builder,building,buildings,builds,buildup,built,bulb,bulbous,bulbs,bulge,bulging,bulimic,bulk,bulky,bull,bulldog,bulldoze,bulldozers,bullet,bulletin,bulletins,bulletproof,bullets,bullheaded,bullied,bullies,bullion,bullpen,bulls,bully,bum,bumble,bumbling,bummed,bummer,bummers,bumming,bump,bumped,bumper,bumping,bumpkins,bumps,bumpy,bums,bun,bunch,bunches,bundle,bundles,bungalow,bungalows,bungee,bungled,bunion,bunions,bunk,bunker,bunking,bunks,bunnies,bunny,buns,bunt,bunting,bura,burbs,burden,burdened,burdens,bureau,bureaucrat,bureaucrats,burgeoning,burger,burgers,burgess,burglar,burglaries,burglary,burgundy,burial,buried,buries,burke,burlap,burley,burly,burn,burned,burner,burning,burnout,burns,burnt,burp,burping,burrito,burritos,burro,burrows,burst,bursting,burton,bury,burying,bus,busboy,busboys,buses,bush,bushel,bushes,busier,busiest,business,businesses,businessman,businessmen,businesswoman,businesswomen,busload,bussing,bust,busted,buster,bustier,busting,bustle,bustling,busts,busty,busy,busybody,but,butch,butcher,butchered,butchers,butler,butlers,buts,butt,butted,butter,butterball,buttercup,butterflies,butterfly,buttering,butters,butterscotch,buttery,butting,buttocks,button,buttoned,buttoning,buttons,butts,buy,buyer,buyers,buying,buyout,buys,buzz,buzzard,buzzards,buzzed,buzzer,buzzes,buzzing,bwana,by,bye,byes,bygones,bylaws,byline,bypass,byproduct,bystander,bystanders,cab,caballero,cabaret,cabbage,cabdriver,cabernet,cabin,cabinet,cabinets,cabins,cable,cables,caboose,cabs,cacciatore,cache,cachet,cackle,cackling,cacophony,cactus,cad,cadavers,caddie,caddy,cadet,cadmium,caesar,cafe,cafeteria,caff,caffeinated,caffeine,cage,caged,cages,cagey,cahoots,cain,cake,cakes,cakewalk,calamitous,calamity,calcium,calculate,calculated,calculating,calculation,calculations,calculator,calculators,calculus,calendar,calendars,calender,calf,caliber,calibre,calico,call,called,caller,callers,calling,callous,calls,calm,calmed,calmer,calming,calmly,calms,calorie,calories,calves,calzone,calzones,cam,camcorder,came,camel,camels,camera,cameraman,cameras,camp,campaign,campaigned,campaigning,campaigns,camped,camper,campers,campfire,camping,campos,camps,campus,campuses,cams,camshaft,can,canal,canals,canape,canaries,canary,canasta,cancel,canceled,canceling,cancellation,cancellations,cancelled,cancels,cancer,cancers,candid,candidacy,candidate,candidates,candies,candle,candlelight,candlelit,candles,candlestick,candlesticks,candor,candy,cane,canine,canines,canisters,cannabis,canned,cannery,cannibal,cannibals,cannoli,cannon,cannonball,cannons,cannot,canoe,canoes,canopy,cans,cant,canteen,canter,canton,canvas,canvass,canyon,canyons,cap,capabilities,capable,capacity,cape,caper,capital,capitalism,capitalist,capitalists,capitalize,capitals,capitol,capo,capote,capper,capping,cappuccino,caprice,caps,capsize,capsized,capsule,capsules,captain,captains,captioning,captivated,captivating,captive,captives,captivity,capture,captured,capturing,car,caramba,caramel,carat,carats,carb,carbo,carbohydrates,carbon,carbs,carbuncle,carburetor,carcass,carcinogens,card,cardboard,cardiac,cardigan,cardinal,cardinals,cardiogram,cardiologist,cardiology,cardiovascular,cards,care,cared,career,careers,carefree,careful,carefully,caregiver,careless,carelessness,cares,caress,caretaker,cargo,caribou,caring,caritas,carjacking,carl,carlin,carmen,carmine,carney,carnie,carnival,carnivore,carnivorous,carol,carolers,caroling,carolling,carotid,carousel,carp,carpenter,carpentry,carpet,carpeting,carpool,carr,carriage,carriages,carried,carrier,carriers,carries,carrot,carrots,carry,carrying,cars,cart,carted,cartel,cartels,carter,cartilage,carting,cartographers,carton,cartons,cartoon,cartoonist,cartoons,cartouche,cartridge,carts,carve,carved,carvel,carver,carvers,carving,carvings,carwash,casa,casbah,cascade,case,cased,caseload,cases,cash,cashed,cashews,cashier,cashing,cashmere,casing,casings,casino,casinos,casitas,casket,caskets,casserole,cassette,cassettes,cast,caste,casting,castle,castles,castor,castrated,casts,casual,casually,casualties,casualty,cat,cataclysmic,catalog,cataloging,catalogue,catalyst,catapult,cataracts,catastrophe,catastrophic,catatonic,catch,catcher,catchers,catches,catching,catchy,categorically,categories,categorized,category,cater,caterer,caterers,catering,caterpillar,caterpillars,caters,caterwauling,cates,catfight,catharsis,cathartic,cathedral,catheter,catholic,cats,catsup,cattle,catty,catwalk,caucus,caught,cauldron,cauliflower,cause,caused,causes,causing,caution,cautionary,cautious,cautiously,cavalcade,cavalier,cavalry,cave,caved,caveman,cavern,caverns,caves,caviar,cavities,cavity,cayman,caymans,cease,ceased,cedar,cedars,cee,ceiling,celebrate,celebrated,celebrates,celebrating,celebration,celebrities,celebrity,celery,celeste,celibacy,celibate,cell,cellar,cellars,celled,cellist,cellmate,cellmates,cello,cells,cellular,cellulite,cement,cemeteries,cemetery,censor,censorship,censure,censured,census,cent,centennial,center,centered,centerfold,centerpiece,centerpieces,centers,centimeter,centimeters,central,centre,centred,cents,centuries,century,ceramic,cereal,cerebellum,cerebral,cerebrum,ceremonial,ceremonies,ceremony,certain,certainly,certainties,certainty,certifiable,certifiably,certificate,certificates,certification,certified,certify,cerulean,cervical,cessation,cesspool,chad,chaff,chafing,chagrined,chain,chained,chains,chainsaw,chainsaws,chair,chairman,chairs,chalet,chalk,chalkboard,chalked,challenge,challenged,challenger,challenges,challenging,chamber,chamberlain,chambers,chameleon,chamomile,champ,champagne,champion,champions,championship,championships,champs,chance,chancellor,chances,chandelier,chandeliers,chandler,chang,change,changed,changes,changing,channel,channeled,channels,chant,chanteuse,chanting,chants,chaos,chaotic,chap,chapel,chaperon,chaperone,chaperoned,chaperones,chaperoning,chaplain,chapman,chapped,chaps,chapter,chapters,character,characteristics,characterize,characterized,characterizing,characters,charade,charades,charcoal,chardonnay,charge,charged,charger,charges,charging,chariot,chariots,charismatic,charitable,charities,charity,charlatan,charley,charlie,charlies,charlotte,charm,charmed,charmer,charming,charmingly,charms,charred,chart,charted,charter,chartered,chartreuse,charts,chase,chased,chaser,chases,chasing,chasm,chassis,chastity,chat,chateau,chatted,chatter,chatterbox,chatting,chatty,chauffeur,chauvinistic,cheap,cheapen,cheaper,cheapest,cheat,cheated,cheater,cheaters,cheating,cheats,check,checkbook,checked,checker,checkered,checkers,checking,checkmate,checkout,checkpoints,checks,checkup,checkups,cheddar,cheek,cheekbones,cheeks,cheep,cheer,cheered,cheerful,cheering,cheerio,cheerleader,cheerleaders,cheerleading,cheers,cheery,cheese,cheeseburger,cheeseburgers,cheesecake,cheesed,cheeses,cheesy,cheetah,chef,chefs,chemical,chemically,chemicals,chemist,chemistry,chemo,chenille,cheque,cherish,cherished,cherries,cherry,cherub,chess,chessboard,chest,chestnut,chestnuts,chests,chesty,chevalier,chevron,chevy,chew,chewed,chewing,chewy,chez,chi,chic,chicano,chick,chickadee,chicken,chickened,chickening,chickenpox,chickens,chickenshit,chicks,chico,chief,chiefs,chiffon,chigger,chihuahua,child,childbirth,childhood,childhoods,childish,childlike,children,chile,chiles,chili,chill,chilled,chilling,chills,chilly,chime,chimera,chimney,chimp,chimpanzee,chimps,chin,china,chink,chinks,chino,chins,chip,chipmunk,chipped,chipper,chipping,chips,chiropractor,chirp,chirping,chirpy,chisel,chiseling,chit,chitchat,chivalrous,chlamydia,chloride,chlorine,chloroformed,chocolate,chocolates,chocolatey,choice,choices,choir,choirboy,choirs,choke,choked,choker,chokes,choking,cholera,cholesterol,cholinesterase,chomp,chompers,chomping,choose,choosers,chooses,choosing,choosy,chop,chopped,chopper,choppers,chopping,choppy,chops,chopsticks,chord,chords,chore,choreography,chores,chorus,chose,chosen,chow,chowder,christen,christened,christening,christie,christy,chrome,chromic,chromosomes,chronic,chronically,chronicle,chronology,chubby,chuck,chucked,chuckle,chuckles,chucks,chug,chugging,chum,chummy,chump,chumps,chums,chunk,chunks,chunky,church,churches,churn,chute,chutes,ciao,cicely,cider,cigar,cigarette,cigarettes,cigars,cinder,cinema,cinematic,cinnabar,cinnamon,cipher,circle,circled,circles,circling,circuit,circuited,circuitry,circuits,circular,circulate,circulated,circulation,circulatory,circumcision,circumference,circumstance,circumstances,circumstantial,circumvent,circus,cirrhosis,cissy,citation,citations,cite,cited,cities,citing,citizen,citizens,citizenship,citrus,city,citywide,civic,civics,civil,civilian,civilians,civilisation,civility,civilization,civilizations,civilized,civvies,clack,clad,claim,claimed,claiming,claims,clairvoyant,clam,clambake,clammed,clammy,clamoring,clamp,clamped,clamping,clams,clan,clandestine,clang,clanging,clanking,clap,clapped,clapper,clapping,clarence,clarification,clarify,clarifying,clarinet,clarity,clash,clashing,clasp,class,classes,classic,classical,classics,classier,classification,classified,classify,classless,classmate,classmates,classroom,classrooms,classy,clause,clauses,claustrophobia,claustrophobic,clavicle,claw,clawed,claws,clay,claymore,claymores,clean,cleaned,cleaner,cleaners,cleanest,cleaning,cleanliness,cleans,cleanse,cleansed,cleanser,cleanses,cleansing,cleanup,clear,clearance,clearances,cleared,clearer,clearing,clearly,clears,cleats,cleavage,cleave,clef,clemency,clement,clench,clergy,clergyman,clerical,clerk,clerks,clever,cleverly,cleverness,cliche,click,clicked,clicker,clicking,clicks,client,clientele,clients,cliff,cliffhanger,cliffs,climate,climax,climb,climbed,climbers,climbing,clinched,clincher,clinches,cling,clinging,clings,clingy,clinic,clinical,clinically,clinics,clink,clip,clipboard,clipped,clipper,clippers,clipping,clippings,clips,clique,clitoris,cloak,clobbered,clock,clocked,clocking,clocks,clockwise,clockwork,clod,clods,clogged,clogging,clogs,cloistered,clone,cloned,clones,clop,close,closed,closely,closeness,closer,closes,closest,closet,closeted,closets,closing,closure,clot,cloth,clothe,clothed,clothes,clothesline,clothing,cloths,clots,clotted,clotting,cloud,clouded,clouds,cloudy,clout,cloven,clover,cloverleaf,cloves,clown,clowning,clowns,club,clubbed,clubhouse,clubs,cluck,clucking,clue,clueless,clues,clump,clumps,clumsily,clumsiness,clumsy,clung,clunk,clunker,clunkers,clusters,clutch,clutched,clutches,clutter,cluttering,coach,coached,coaches,coaching,coal,coalition,coals,coarse,coast,coastal,coaster,coasters,coasting,coat,coating,coats,coattails,coax,coaxing,cob,cobb,cobbler,cobra,cobras,cobweb,cobwebs,coca,cocaine,cockamamie,cocked,cockles,cockney,cockpit,cockroach,cockroaches,cocktail,cocktails,cocky,coco,cocoa,coconut,coconuts,cocoon,cod,coddle,coddling,code,coded,codependent,codes,codicil,coding,coed,coeds,coerce,coerced,coercion,coexist,coffee,coffeehouse,coffees,coffers,coffin,coffins,cog,cognac,cognizant,coherent,cohesion,cohesive,coiffure,coil,coiled,coin,coincide,coincidence,coincidences,coincidental,coincidentally,coins,coitus,coke,cokes,col,cola,colchicine,cold,colder,coldest,coldly,coldness,colds,cole,coles,coleslaw,colic,colin,coliseum,colitis,collaborate,collaborated,collaborating,collaboration,collaborator,collage,collagen,collapse,collapsed,collapses,collapsing,collar,collarbone,collars,collateral,colleague,colleagues,collect,collected,collecting,collection,collections,collective,collectively,collector,collectors,collects,colleen,college,colleges,collide,collided,collie,collier,collins,collision,cologne,colon,colonel,colonels,colonial,colonials,colonies,colonists,colonization,colonized,colonnade,colony,color,colorado,colored,colorful,coloring,colors,colossal,colosseum,colour,coloured,colours,column,columnist,columnists,columns,coma,comas,comatose,comb,combat,combative,combination,combine,combined,combines,combing,combo,combust,combusted,combustible,combustion,come,comeback,comebacks,comedian,comedians,comedic,comedies,comedy,comer,comers,comes,comet,comeuppance,comfort,comfortable,comfortably,comforted,comforter,comforting,comforts,comfy,comic,comical,comics,coming,comma,command,commandant,commander,commanders,commanding,commandment,commandments,commando,commandos,commands,commemorating,commence,commencing,commendable,commendation,commensurate,comment,commentaries,commentary,commentator,commented,commenting,comments,commerce,commercial,commercialism,commercially,commercials,commie,commies,commiserate,commissary,commission,commissioned,commissioner,commissioners,commit,commitment,commitments,commits,committed,committee,committees,committing,commode,commodities,commodity,common,commoner,commonly,commonplace,commonwealth,commotion,communal,commune,communicate,communicated,communicating,communication,communications,communicator,communing,communion,communique,communism,communist,communists,communities,community,commute,commuted,comp,compact,compactor,compadre,companies,companion,companions,companionship,company,comparable,comparative,comparatively,compare,compared,compares,comparing,comparison,compartment,compartments,compass,compassion,compassionate,compatibility,compatible,compel,compelled,compelling,compels,compensate,compensated,compensation,compete,competence,competent,competing,competition,competitions,competitive,competitiveness,competitor,competitors,compilation,compiling,complacency,complacent,complain,complained,complaining,complains,complaint,complaints,complete,completed,completely,completes,completing,completion,complex,complexion,complexities,complexity,compliant,complicate,complicated,complicates,complicating,complication,complications,compliment,complimentary,compliments,comply,component,components,composed,composer,composers,composite,composition,compost,composure,compound,compounds,comprehend,comprehending,comprehension,comprehensive,compressed,compression,compressions,compressor,comprise,comprised,compromise,compromised,compromises,compromising,compulsion,compulsive,compulsively,compulsory,compute,computer,computerized,computers,comrade,comrades,con,conceal,concealed,concealer,concealing,concealment,concede,conceding,conceited,conceivable,conceivably,conceive,conceived,conceiving,concentrate,concentrated,concentrating,concentration,concentric,concept,conception,concepts,concern,concerned,concerning,concerns,concert,concerts,concession,concessions,concierge,concise,conclave,conclude,concluded,concludes,concluding,conclusion,conclusions,conclusive,conclusively,concocted,concocting,concoction,concord,concourse,concrete,concur,concurrently,concussion,concussions,condemn,condemnation,condemned,condemning,condensation,condensed,condescending,condiment,condition,conditioned,conditioner,conditioners,conditioning,conditions,condo,condolences,condom,condominium,condoms,condone,condoned,condoning,condor,condos,conducive,conduct,conducted,conducting,conductor,conducts,cone,cones,coney,confederacy,confederate,confederates,confederation,confer,conference,conferences,conferred,conferring,confess,confessed,confessing,confession,confessional,confessions,confessor,confetti,confidant,confidante,confide,confided,confidence,confidences,confident,confidential,confidentiality,confidentially,confides,confiding,configuration,confine,confined,confinement,confining,confirm,confirmation,confirmed,confirming,confirms,confiscated,confiscating,confit,conflict,conflicted,conflicting,conflicts,confluence,conform,conformity,confound,confront,confrontation,confrontations,confronted,confronting,confronts,confuse,confused,confusing,confusion,congenial,congeniality,congestion,congrats,congratulate,congratulated,congratulating,congratulations,congregation,congress,congressional,congressman,congresswoman,conjecture,conjugal,conjugate,conjunction,conjure,conjured,conjures,conjuring,conk,conked,conn,connect,connected,connecting,connection,connections,connects,conned,conner,conning,conniption,conniving,connoisseur,conquer,conquered,conquering,conquers,conquest,cons,conscience,conscientious,conscious,consciously,consciousness,consecrated,consecutive,consensual,consensus,consent,consented,consenting,consequence,consequences,consequently,conservation,conservative,conservatory,conserve,consider,considerable,considerably,considerate,consideration,considered,considering,considers,consigliere,consisted,consistent,consistently,consists,consolation,console,consoled,consolidate,consolidated,consoling,consort,consorting,consortium,conspicuous,conspiracies,conspiracy,conspirator,conspirators,conspire,conspired,conspiring,constable,constant,constantly,constellation,constipation,constituents,constitute,constitutes,constitution,constitutional,constitutionally,constraints,constrictor,construct,constructed,construction,constructive,construed,consul,consulate,consult,consultant,consultation,consultations,consulted,consulting,consults,consume,consumed,consumer,consumers,consumes,consuming,consummate,consummated,consumption,contact,contacted,contacting,contacts,contagious,contain,contained,container,containers,containing,containment,contains,contaminate,contaminated,contaminating,contamination,contemplate,contemplated,contemplating,contemporary,contempt,contender,contenders,content,contented,contention,contentment,contents,contest,contestant,contestants,contesting,context,continent,continental,continents,contingencies,contingency,continually,continuance,continuation,continue,continued,continues,continuing,continuity,continuous,continuously,continuum,contortionist,contours,contra,contraband,contraceptives,contract,contracted,contraction,contractions,contractor,contracts,contradict,contradicted,contradicting,contradiction,contradictory,contraption,contrary,contrast,contribute,contributed,contributes,contributing,contribution,contributions,contributor,contributors,contrite,contrived,control,controlled,controller,controlling,controls,controversial,controversy,contusion,contusions,convene,convened,convenes,convenience,convenient,conveniently,convent,convention,conventional,converge,converging,conversation,conversational,conversationalist,conversations,converse,conversion,convert,converted,convertible,convertibles,converting,convey,conveyor,convict,convicted,conviction,convictions,convince,convinced,convinces,convincing,convoy,convulsing,coo,cooing,cook,cookbook,cookbooks,cooked,cooker,cookie,cookies,cooking,cooks,cool,coolant,cooled,cooler,coolers,coolest,cooling,coolly,coolness,cools,coop,cooped,cooper,cooperate,cooperated,cooperating,cooperation,cooperative,coopers,coordinate,coordinated,coordinates,coordinating,coordinator,coot,cooties,cop,copacetic,cope,copied,copier,copies,copilot,coping,copiously,copper,copperhead,coppers,copping,cops,copter,copy,copycat,copying,copyright,cor,coral,cord,cordial,cordless,cordon,cordoned,cords,corduroy,core,coriander,cork,corker,corks,corkscrew,corky,corn,cornball,corned,corner,cornered,cornering,corners,cornerstone,cornfield,cornflakes,cornucopia,corny,corollary,corona,coronary,coronation,coroner,coroners,corporal,corporate,corporation,corporations,corporeal,corps,corpse,corpses,corral,correct,corrected,correcting,correction,correctional,corrections,corrective,correctly,correlation,correspond,corresponded,correspondence,correspondent,correspondents,corresponding,corresponds,corridor,corroborate,corroboration,corrupt,corrupted,corrupting,corruption,corsage,corset,cortex,cory,cos,cosign,cosmetic,cosmetics,cosmic,cosmically,cosmology,cosmopolitan,cosmos,cost,costa,costing,costly,costs,costume,costumes,cosy,cot,cotillion,cots,cottage,cottages,cotton,couch,cougar,cougars,cough,coughing,coughs,could,council,councillor,councilors,counsel,counseling,counselling,counsellor,counselor,counselors,count,countdown,counted,countenance,counter,counterattack,counterfeit,counterfeiting,countermeasures,counteroffer,counterpart,counterproductive,countess,counties,counting,countless,countries,country,countrymen,countryside,counts,county,coup,coupe,couple,couples,coupling,coupon,coupons,courage,courageous,courier,couriers,course,courses,coursing,court,courted,courteous,courtesy,courthouse,courting,courtroom,courtrooms,courts,courtship,courtside,courtyard,cousin,cousins,couture,cove,coven,covenant,cover,coverage,coveralls,covered,covering,covers,covert,coverup,covet,coveted,coveting,cow,coward,cowardice,cowardly,cowards,cowboy,cowboys,cowed,cower,cowering,cowgirl,coworker,cows,cox,coy,coyote,coyotes,coz,cozier,cozy,cozying,crab,crabby,crabgrass,crabs,crack,cracked,cracker,crackerjack,crackers,cracking,crackling,crackpot,cracks,cradle,craft,crafted,crafting,crafts,craftsmanship,craftsmen,crafty,cram,crammed,cramming,cramp,cramped,cramping,cramps,cranberry,crane,cranes,cranial,cranium,crank,cranking,cranks,cranky,cranny,crap,crapped,crapper,crapping,crappy,craps,crash,crashed,crasher,crashers,crashes,crashing,crass,crate,crated,crater,crates,crave,craves,craving,crawl,crawled,crawlers,crawling,crawls,crawly,crayons,craze,crazed,crazier,craziest,craziness,crazy,creak,creaky,cream,creamed,creamer,creaming,creamy,crease,creased,creases,create,created,creates,creating,creation,creations,creative,creatively,creativity,creator,creature,creatures,credence,credentials,credenza,credibility,credible,credit,credited,credits,credo,creed,creeds,creek,creep,creepers,creeping,creeps,creepy,cremated,cremation,crematorium,creme,crepe,crepes,crept,crescent,crest,crested,cretins,crevasse,crew,crewman,crews,crib,cribbage,cribs,crick,cricket,cried,crier,cries,crikey,crime,crimes,criminal,criminalistics,criminally,criminals,criminology,crimp,crimson,cringe,cripes,cripple,crippled,cripples,cris,crises,crisis,crisp,crisps,crispy,criteria,criterion,critic,critical,critically,criticism,criticize,criticized,criticizing,critics,critter,critters,croak,croaker,croc,crock,crocket,crocodile,crocodiles,croft,croissants,cronies,crook,crooked,crooks,croon,crop,cropped,crops,croquet,cross,crossbow,crossed,crosses,crossfire,crosshairs,crossing,crossroads,crossword,crotch,crouch,crouched,crouching,croupier,croutons,crow,crowbar,crowd,crowded,crowding,crowds,crowed,crowing,crown,crowned,crowning,crowns,crows,crucial,crucible,crucified,crucifix,crucifixion,crucify,crud,cruddy,crude,crudely,cruel,cruelly,cruelty,cruise,cruised,cruiser,cruisers,cruises,cruising,crumb,crumble,crumbled,crumbles,crumbling,crumbs,crummy,crumpets,crumpled,crunch,crunched,crunches,crunchy,crusade,crusader,crusades,crush,crushed,crusher,crushes,crushing,crust,crusts,crusty,crutch,crutches,crux,cry,crying,cryogenic,crypt,cryptic,crypto,crypts,crystal,crystals,cub,cubbies,cubby,cube,cubed,cubes,cubic,cubicle,cubs,cuckoo,cucumber,cud,cuddle,cuddled,cuddles,cuddly,cuddy,cue,cued,cuff,cuffed,cuffing,cuffs,cuisine,culinary,culminating,culmination,culottes,culpa,culpability,culpable,cult,cultivate,cultivated,cultivating,cultural,culturally,culture,cultured,cultures,cumin,cummerbund,cumulative,cuneiform,cunning,cup,cupboard,cupboards,cupcake,cupcakes,cupid,cuppa,cups,cur,curacao,curate,curator,curb,curd,curdle,cure,cured,cures,curfew,curfews,curie,curing,curiosity,curious,curiously,curl,curled,curlers,curling,curly,curmudgeon,curran,currency,current,currently,currents,curriculum,curry,curse,cursed,curses,cursing,cursive,cursory,curt,curtain,curtains,curtsy,curvaceous,curve,curveball,curves,cushion,cushions,cushy,cusp,cuss,cussing,custard,custodial,custodian,custody,custom,customarily,customary,customer,customers,customs,cut,cutaway,cutbacks,cute,cuteness,cuter,cutest,cutesy,cuticle,cuticles,cutie,cutlass,cutler,cutlery,cutoff,cutoffs,cutout,cuts,cutter,cutters,cutthroat,cutting,cyanide,cybernetic,cyberspace,cyborg,cycle,cycles,cyclone,cyclops,cyclotron,cylinder,cylinders,cymbal,cynic,cynical,cynicism,cynics,cypher,cypress,cyprus,cyst,cystic,dab,dabble,dabbled,dabbling,dad,daddies,daddy,dads,daffodils,daffy,daft,dag,dagger,dago,dah,dahl,dahlia,dailies,daily,dainty,daiquiri,dairy,dais,daisies,daisy,dale,dalliance,dally,dallying,dalton,dam,damage,damaged,damages,damaging,dame,damn,damnable,damned,damnedest,damning,damp,dampened,dampener,damper,damsel,damsels,dance,danced,dancer,dancers,dances,dancing,dandelion,dandelions,dandruff,dandy,dang,danger,dangerous,dangerously,dangers,dangle,dangled,dangling,danish,daphne,dapper,dare,dared,daredevil,dares,daring,dark,darken,darkened,darker,darkest,darkness,darkroom,darks,darling,darlings,darn,darndest,darned,dart,darts,dash,dashboard,dashed,dasher,dashing,data,database,databases,date,dated,dateless,dateline,dater,dates,dating,dato,daughter,daughters,daunting,dauphin,davenport,davy,dawdling,dawn,dawned,dawning,day,daybreak,daydream,daydreaming,daylight,daylights,days,daytime,daze,dazzle,dazzled,dazzling,de,deacon,dead,deadbeat,deadbeats,deader,deadlier,deadliest,deadline,deadlines,deadlock,deadly,deaf,deafening,deal,dealer,dealers,dealership,dealing,dealings,deals,dealt,dean,dear,dearest,dearie,dearly,dears,death,deathbed,deathly,deaths,deb,debacle,debatable,debate,debated,debates,debating,debilitating,debit,debonair,debrief,debriefed,debriefing,debris,debt,debts,debut,debutante,debutantes,decade,decadence,decadent,decades,decaf,decanter,decapitate,decapitated,decapitation,decay,decaying,deceased,deceit,deceitful,deceive,deceived,deceiving,decency,decent,deception,deceptions,deceptive,deceptively,decibel,decibels,decide,decided,decides,deciding,decipher,deciphering,decision,decisions,decisive,deck,decked,decker,decks,declaration,declare,declared,declaring,decline,declined,declining,deco,decoded,decoder,decompose,decomposed,decomposing,decompress,deconstruction,decontamination,decor,decorate,decorated,decorating,decoration,decorations,decorative,decorator,decorators,decorum,decoupage,decoy,decoys,decreased,decree,decrypted,decryption,dedicate,dedicated,dedicating,dedication,deduce,deduct,deducted,deductible,deduction,deductions,dee,deed,deeded,deeds,deejay,deem,deemed,deep,deeper,deepest,deeply,deer,deets,defaced,defacing,default,defeat,defeated,defeats,defect,defected,defective,defector,defects,defence,defend,defendant,defendants,defended,defender,defending,defense,defenseless,defenses,defensive,deferred,defiance,defiant,defiantly,defibrillator,deficiencies,deficiency,deficient,deficit,defied,defies,define,defined,defining,definite,definitely,definition,definitions,definitive,definitively,deflate,deflated,deflecting,deflection,deflower,deformity,defraud,defrost,deft,deftly,defuse,defused,defy,defying,degenerate,degeneration,degenerative,degradation,degrade,degraded,degrading,degree,degrees,dehydrated,dehydration,deigned,deities,deity,deke,del,delay,delayed,delaying,delays,delectable,delegate,delegates,delegation,delete,deleted,deli,deliberate,deliberately,deliberation,deliberations,delicacy,delicate,delicately,delicates,delicious,delight,delighted,delightful,delightfully,delights,delinquency,delinquent,delirious,delirium,deliver,delivered,deliveries,delivering,delivers,delivery,dell,delly,delta,deltas,delude,deluded,deluding,deluge,delusion,delusional,delusions,deluxe,delve,delving,demand,demanded,demanding,demands,demean,demeaning,demented,dementia,demerits,demise,demo,democracy,democrat,democratic,democrats,demographic,demographics,demolish,demolition,demolitions,demon,demonic,demonology,demons,demonstrate,demonstrated,demonstrates,demonstration,demonstrations,demonstrators,demoted,demur,demure,den,denial,denied,denies,denning,denominational,denominations,denominators,denounce,denouncing,dense,density,dent,dental,dentist,dentists,dents,dentures,deny,denying,deodorant,depart,departed,departing,department,departmental,departments,departure,depend,dependable,dependant,depended,dependency,dependent,depending,depends,depict,depicted,depicting,depiction,depicts,deplete,deplorable,deplore,deploy,deployed,deport,deported,depose,deposed,deposing,deposit,deposited,deposition,depositions,depository,deposits,depot,depraved,deprecating,depress,depressed,depressing,depression,depressor,depressors,deprivation,deprive,deprived,depriving,depth,depths,deputies,deputized,deputy,derail,derailing,deranged,derby,derelict,derision,derivative,derive,derives,dermatologist,derogatory,derrick,derriere,derris,descend,descendants,descending,descends,descent,describe,described,describes,describing,description,descriptive,desecrate,desecrated,desecration,desert,deserted,deserter,deserting,deserts,deserve,deserved,deserves,deserving,design,designate,designated,designation,designed,designer,designers,designing,designs,desirable,desire,desired,desires,desist,desk,desks,desktop,desolate,despair,desperado,desperate,desperately,desperation,despicable,despise,despised,despises,despising,despite,despondent,dessert,desserts,destabilize,destination,destinations,destined,destiny,destitute,destroy,destroyed,destroyer,destroyers,destroying,destroys,destruct,destructing,destruction,destructive,destructs,detach,detached,detachment,detail,detailed,detailing,details,detain,detained,detaining,detect,detected,detecting,detection,detective,detectives,detector,detectors,detente,detention,detergent,deteriorated,deterioration,determination,determine,determined,determines,determining,deterrent,detest,detestable,detests,detonate,detonated,detonates,detonating,detonation,detonator,detonators,detour,detours,detoxing,detract,detrimental,deuce,deuces,devastate,devastated,devastating,devastatingly,devastation,develop,developed,developer,developing,development,developments,develops,deviants,deviated,deviation,device,devices,devil,deviled,devilishly,devils,devious,devise,devised,devon,devote,devoted,devoting,devotion,devour,devoured,devouring,devours,dew,dewars,dewy,dex,dey,diabetes,diabetic,diabetics,diabolical,diagnose,diagnosed,diagnoses,diagnosis,diagnostic,diagnostician,diagonal,diagram,dial,dialect,dialed,dialing,dialogue,dials,dialysis,diameter,diamond,diamonds,diaper,diapers,diaphragm,diaphragms,diaries,diarrhea,diary,diathesis,diatribes,diazepam,dibs,dice,diced,dicey,dickens,dictate,dictated,dictates,dictating,dictation,dictator,dictators,dictatorship,dictatorships,dictionary,did,diddling,diddly,die,died,dies,diesel,diet,dieter,dieting,diets,differ,difference,differences,different,differential,differently,differs,difficult,difficulties,difficulty,diffuse,diffusion,dig,digest,digested,digesting,digestion,digestive,digger,diggers,digging,digit,digital,digitally,digits,dignified,dignify,dignitaries,dignity,digress,digs,dike,dilated,dilation,dilemma,dilettante,diligence,diligent,diligently,dill,dills,dilly,dilute,diluted,dim,dime,dimension,dimensional,dimensions,dimes,diminish,diminished,diminutive,dimly,dimmer,dimming,dimple,dimpled,dimwit,dine,dined,diner,dinero,diners,ding,dinged,dingle,dingo,dings,dingy,dining,dink,dinks,dinky,dinner,dinners,dinnertime,dinning,dinosaur,dinosaurs,dioxide,dip,diphtheria,diploma,diplomas,diplomatic,dipped,dipping,dips,dipstick,dire,direct,directed,directing,direction,directions,directive,directly,director,directorate,directors,directory,dirk,dirt,dirtiest,dirty,dis,disabilities,disability,disable,disabled,disabling,disadvantage,disagree,disagreed,disagreeing,disagreement,disagreements,disagrees,disallowed,disappear,disappearance,disappearances,disappeared,disappearing,disappears,disappoint,disappointed,disappointing,disappointment,disappointments,disappoints,disapproval,disapprove,disapproved,disapproves,disapproving,disarm,disarmed,disarming,disarray,disassembled,disaster,disasters,disastrous,disbarred,disbelief,disc,discarded,discerning,discharge,discharged,disciples,disciplinary,discipline,disciplined,disciplines,disciplining,disclose,disclosed,disclosure,disco,discoloration,discolored,discomfort,disconnect,disconnected,discontent,discontinue,discontinued,discord,discotheque,discount,discounting,discounts,discourage,discouraged,discourse,discourteous,discover,discovered,discoveries,discovering,discovers,discovery,discredit,discreet,discreetly,discrepancies,discrepancy,discrete,discretion,discriminate,discriminated,discriminating,discrimination,discs,discus,discuss,discussed,discusses,discussing,discussion,discussions,disdain,disease,diseased,diseases,disfigured,disfiguring,disgrace,disgraced,disgruntled,disguise,disguised,disguises,disgust,disgusted,disgusting,disgustingly,dish,disheartening,dishes,disheveled,dishonest,dishonor,dishonorable,dishwasher,disillusioned,disillusionment,disinfect,disinfectant,disinformation,disingenuous,disinherit,disinherited,disintegrate,disintegrated,disk,disks,dislike,dislocated,dislodge,dislodged,disloyal,dismal,dismantle,dismantled,dismantling,dismay,dismember,dismemberment,dismiss,dismissal,dismissed,dismissing,dismissive,dismount,disobedience,disobey,disobeyed,disobeying,disorder,disorders,disorganized,disorientation,disoriented,disorienting,disown,disowned,disparaging,disparity,dispatch,dispatched,dispatcher,dispatches,dispensary,dispensation,dispense,dispensed,dispenser,dispensers,dispensing,disperse,displace,displaced,displacement,display,displayed,displaying,displays,displeased,displeasure,disposable,disposal,dispose,disposing,disposition,disproportionate,disprove,dispute,disputes,disputing,disqualified,disqualify,disregard,disregarded,disrespect,disrespected,disrespectful,disrespecting,disrupt,disrupted,disrupting,disruptions,disruptive,disrupts,dissatisfied,dissect,dissected,dissection,dissed,dissent,dissertation,disservice,dissident,dissing,dissipate,dissipated,dissolve,dissolved,dissolves,dissolving,dissuade,distance,distances,distancing,distant,distaste,distasteful,distended,distillery,distinct,distinction,distinctions,distinctive,distinctly,distinguish,distinguished,distinguishing,distort,distorted,distortion,distortions,distract,distracted,distracting,distraction,distractions,distraught,distress,distressed,distressing,distribute,distributed,distributing,distribution,distributor,distributors,district,districts,distrust,distrustful,disturb,disturbance,disturbances,disturbed,disturbing,dit,ditch,ditched,ditches,ditching,ditsy,ditto,ditty,ditz,diuretic,diuretics,diva,divas,dive,diver,divers,diversion,diversionary,diversions,divert,diverted,diverting,dives,divest,divide,divided,dividends,divine,diving,divining,divinity,division,divisional,divisions,divorce,divorced,divorcee,divorces,divorcing,divulged,divvy,dizziness,dizzy,dizzying,do,dobbin,dobbins,dobson,doc,dock,docked,docking,docks,docs,doctor,doctored,doctors,doctrine,document,documentaries,documentary,documented,documents,dodge,dodgeball,dodged,dodger,dodgers,dodging,dodgy,doe,doer,does,dog,dogged,doggie,doggies,doggone,doggy,doghouse,dogs,dogwood,doily,doing,dojo,dol,dolce,dole,doling,doll,dollar,dollars,dolled,dollhouse,dollop,dolls,dolly,dolphin,dolphins,dolt,dom,domain,dome,domes,domestic,domesticated,domesticity,domicile,dominance,dominant,dominate,dominates,dominating,domination,dominick,dominion,dominoes,don,donate,donated,donating,donation,donations,done,dong,dongs,donkey,donkeys,donna,donor,donors,donut,donuts,doodle,doodles,doofus,doohickey,doom,doomed,doomsday,door,doorbell,doorknob,doorknobs,doorman,doormat,doornail,doors,doorstep,doorway,doozy,dopamine,dope,dopes,dopey,doping,doppelganger,dorado,dork,dorky,dorm,dormant,dormitory,dorms,dorsal,dory,dos,dosage,dosages,dose,dosed,doses,dossier,dost,dot,dote,dotes,doth,dots,dotted,dotty,double,doubled,doubles,doubly,doubt,doubted,doubtful,doubting,doubts,dough,doughnut,doughnuts,dour,doused,dove,doves,dowager,dowdy,down,downed,downer,downfall,downgraded,downhill,downing,download,downloaded,downpour,downright,downriver,downs,downside,downsize,downsizing,downstairs,downstream,downtown,downtrodden,downward,downy,dowser,doze,dozed,dozen,dozens,dozer,dozing,drab,draft,drafted,drafting,drafts,drafty,drag,dragged,dragging,dragon,dragonfly,dragons,dragoons,drags,drain,drainage,drained,draining,drainpipe,drake,drama,dramas,dramatic,dramatically,drank,drape,draped,drapes,drastic,drastically,draw,drawback,drawer,drawers,drawing,drawings,drawn,draws,drawstring,dread,dreaded,dreadful,dreading,dream,dreamed,dreamer,dreamers,dreaming,dreamless,dreams,dreamt,dreamy,dreary,dreck,dredge,dredged,dredging,dregs,dreidel,dress,dressed,dresser,dresses,dressing,dressings,dressy,drew,dribble,dribbles,dribbling,dried,drier,dries,drift,drifted,drifter,drifting,driftwood,drill,drilled,drilling,drink,drinker,drinkers,drinking,drinks,drip,dripped,dripping,drippy,drips,drive,drivel,driven,driver,drivers,drives,driveway,driveways,driving,droll,drone,drones,drool,drooled,drooling,drools,droop,droopy,drop,dropout,dropped,dropper,dropping,droppings,drops,drosophila,drought,drove,droves,drown,drowned,drowning,drowsy,drudge,drug,drugged,drugging,druggist,drugs,drugstore,drum,drummed,drummer,drumming,drums,drumstick,drumsticks,drunk,drunkard,drunken,drunks,druthers,dry,dryer,dryers,drying,drywall,dual,duality,dub,dubbed,dubious,duce,duchess,duck,ducked,ducking,duckling,ducks,ducky,duct,ducts,dud,dude,dudes,duds,due,duel,dueling,dues,duet,duff,duffel,duffle,dug,dugout,dui,duke,dukes,dulcet,dull,dullard,dulled,dullest,dullness,duly,dumb,dumber,dumbest,dumdum,dummies,dummkopf,dummy,dump,dumped,dumper,dumping,dumpling,dumplings,dumps,dun,dunes,dung,dungeon,dunk,dunked,dunking,dunks,duo,duped,duplex,duplicate,durable,duration,duress,during,durned,dusk,dusky,dust,dusted,dusting,dusty,dutch,duties,dutiful,duty,duvet,dwarf,dwarfs,dwarves,dweeb,dwell,dweller,dwellers,dwelling,dwells,dwindling,dye,dyed,dyeing,dyer,dying,dynamic,dynamics,dynamite,dynamo,dynasty,dysentery,dysfunction,dysfunctional,each,eager,eagerly,eagle,eagles,ear,earful,earl,earlier,earliest,earlobe,earlobes,early,earmarked,earmarks,earn,earned,earnest,earning,earns,earphones,earpiece,earplugs,earring,earrings,ears,earshot,earth,earthlings,earthly,earthquake,earthquakes,earthy,earwig,ease,easel,eases,easier,easiest,easily,easing,east,eastbound,easter,eastern,easy,easygoing,eat,eaten,eater,eaters,eating,eats,eaves,eavesdrop,eavesdropped,eavesdropping,ebb,ebony,eccentric,eccentricities,echelon,echo,echoes,eclectic,eclipse,ecological,economic,economical,economically,economics,economy,ecosystem,ecstasy,ecstatic,ectopic,ectoplasm,ecumenical,eczema,ed,eddy,edema,edge,edged,edges,edgewise,edging,edgy,edible,edict,edit,edited,editing,edition,editor,editorial,editorials,editors,edits,educate,educated,educating,education,educational,educator,educators,eel,eels,eerie,eerily,effacing,effect,effected,effecting,effective,effectively,effectiveness,effects,effeminate,efficiency,efficient,efficiently,effort,effortless,efforts,egg,egghead,egging,eggnog,eggplant,eggs,eggshell,eggshells,ego,egocentric,egomaniac,egomaniacal,egos,egotistical,egregious,egyptian,eh,eight,eighteen,eighteenth,eighth,eighties,eights,eighty,einstein,either,ejaculate,eject,ejection,eking,el,elaborate,elapsed,elastic,elated,elbow,elbows,elder,elderly,elders,eldest,elect,elected,electing,election,elections,elective,electoral,electorate,electric,electrical,electrician,electricians,electricity,electrified,electro,electrocute,electrocuted,electrodes,electrolyte,electrolytes,electronic,electronically,electronics,electrons,electroshock,elegance,elegant,element,elemental,elementary,elements,elephant,elephants,elevate,elevated,elevates,elevation,elevator,elevators,eleven,eleventh,elf,elicit,eligible,eliminate,eliminated,eliminates,eliminating,elimination,elite,elitist,elixir,elk,elks,ell,elliptical,elm,elms,elope,eloped,eloping,eloquence,eloquent,eloquently,else,elsewhere,elude,eluded,eludes,elusive,elves,em,emanates,emancipated,emancipation,embalmed,embalming,embankment,embark,embarked,embarking,embarrass,embarrassed,embarrasses,embarrassing,embarrassingly,embarrassment,embassies,embassy,embedded,embellished,ember,embezzle,embezzled,embezzlement,embezzler,embezzling,embittered,emblem,embodied,embody,embossed,embrace,embraced,embraces,embracing,embroidered,embroiled,embryo,emerald,emeralds,emerge,emerged,emergencies,emergency,emerges,emery,eminence,eminent,eminently,emission,emissions,emit,emotion,emotional,emotionally,emotions,empathic,empathy,emperor,emperors,emphasis,emphasize,emphasized,emphatic,emphatically,empire,empirical,employ,employed,employee,employees,employer,employers,employing,employment,emporium,empowered,empowerment,empress,emptied,empties,emptiness,empty,emptying,ems,emulating,en,enable,enabled,enables,enabling,enact,enacted,enamel,enamored,encephalitis,enchant,enchanted,enchanting,enchantment,encinas,enclosed,encoded,encore,encounter,encountered,encounters,encourage,encouraged,encouragement,encourages,encouraging,encrusted,encrypted,encryption,encyclopedia,encyclopedias,end,endanger,endangered,endangering,endangerment,endear,endearing,endeavor,endeavors,endeavour,ended,ending,endings,endive,endless,endlessly,endorphins,endorse,endorsement,endorsements,endorsing,endowment,ends,endurance,endure,endured,enduring,enema,enemies,enemy,energized,energy,enforce,enforced,enforcement,enforcer,enforcing,eng,engage,engaged,engagement,engagements,engages,engaging,engine,engineer,engineered,engineering,engineers,engines,english,engraved,engraving,engrossed,engrossing,engulfed,enhance,enhanced,enhancements,enhancer,enhances,enigma,enjoy,enjoyable,enjoyed,enjoying,enjoyment,enjoys,enlarged,enlighten,enlightened,enlightenment,enlist,enlisted,enlisting,ennui,enormity,enormous,enormously,enough,enquiries,enrage,enraged,enrich,enriched,enriching,enrolled,enrolling,enrollment,ensconced,ensemble,ensign,enslave,enslaved,ensue,ensued,ensuing,ensure,ensuring,entail,entails,entanglements,enter,entered,entering,enterprise,enterprises,enterprising,enters,entertain,entertained,entertainer,entertaining,entertainment,enthralled,enthused,enthusiasm,enthusiast,enthusiastic,entice,enticed,entire,entirely,entirety,entities,entitle,entitled,entitles,entity,entomologist,entomology,entourage,entrails,entrance,entrances,entrapment,entree,entrenched,entrepreneur,entrepreneurial,entries,entrust,entrusted,entry,entwined,envelope,envelopes,envied,envious,environment,environmental,environmentalist,envision,envisioned,envoy,envy,enzyme,ephemeral,epic,epidemic,epidural,epilepsy,epileptic,epinephrine,epiphany,episode,episodes,epizootics,epoxy,epsilon,equal,equality,equally,equals,equation,equator,equestrian,equilibrium,equinox,equipment,equipped,equitable,equity,equivalent,er,era,erase,erased,eraser,erasers,erases,erasing,ere,erect,erection,ergo,erica,erode,eroding,erogenous,eros,erosion,erotic,err,errand,errands,errant,erratic,erratically,erred,erroneous,error,errors,ers,erupt,eruption,es,escalated,escalating,escalator,escapade,escapades,escape,escaped,escapee,escapes,escaping,escargot,escort,escorted,escorts,esophagus,esoteric,especially,espionage,espresso,espressos,ess,essay,essays,essence,essential,essentially,establish,established,establishes,establishing,establishment,establishments,estate,esteem,esteemed,ester,estimate,estimated,estimates,estimating,estimation,estranged,estrogen,et,eta,etcetera,etched,eternal,eternally,eternity,eth,ethanol,ether,ethic,ethical,ethically,ethics,ethnic,ethnicity,ethyl,etiquette,eucalyptus,eugenia,eulogy,eunuch,euphemism,euphemisms,euphoric,euro,euros,euthanasia,evacuate,evacuated,evacuating,evacuation,evade,evaluate,evaluated,evaluating,evaluation,evaluations,evangelical,evaporate,evaporated,evasions,evasive,eve,even,evening,evenings,evenly,evens,event,eventful,events,eventual,eventuality,eventually,ever,everglades,everlasting,every,everybody,everyday,everyman,everyone,everything,everywhere,eves,evict,evicted,evidence,evidenced,evident,evidently,evil,evils,eviscerate,eviscerated,evocative,evoked,evolution,evolutionary,evolve,evolved,evolving,ewe,ex,exacerbate,exact,exacting,exactly,exaggerate,exaggerated,exaggerating,exaggeration,exalted,exam,examination,examine,examined,examiner,examining,example,examples,exams,exasperated,exasperating,exceed,exceeded,exceeding,exceedingly,exceeds,excel,excellence,excellency,excellent,excels,except,excepted,exception,exceptional,exceptionally,exceptions,excess,excesses,excessive,excessively,exchange,exchanged,exchanges,exchanging,excite,excited,excitement,excites,exciting,exclamation,excluded,excludes,excluding,exclusion,exclusive,exclusively,excommunicated,excruciating,exculpatory,excursion,excursions,excuse,excused,excuses,excusing,execs,execute,executed,executing,execution,executioner,executions,executive,executives,executor,exemplary,exempt,exemption,exemptions,exercise,exercises,exercising,exerting,exertion,exes,exfoliate,exhale,exhaust,exhausted,exhausting,exhaustion,exhaustive,exhausts,exhibit,exhibited,exhibiting,exhibition,exhibits,exhilarated,exhilarating,exhilaration,exhumation,exhume,exhumed,exigent,exile,exiled,exiles,exist,existed,existence,existent,existential,existentialist,existing,exists,exit,exited,exiting,exits,exodus,exonerate,exonerated,exorbitant,exorcise,exorcism,exorcist,exotic,expand,expanded,expanding,expands,expansion,expect,expectancy,expectation,expectations,expected,expecting,expects,expedient,expedite,expedited,expedition,expel,expelled,expelling,expendable,expenditure,expenditures,expense,expenses,expensive,experience,experienced,experiences,experiencing,experiment,experimental,experimentation,experimented,experimenting,experiments,expert,expertise,experts,expiration,expired,expires,explain,explained,explaining,explains,explanation,explanations,explanatory,explicit,explicitly,explode,exploded,explodes,exploding,exploit,exploitation,exploited,exploiting,exploration,exploratory,explore,explored,explorer,explorers,exploring,explosion,explosions,explosive,explosives,exponential,export,exporter,exporting,exports,expose,exposed,exposes,exposing,exposure,express,expressed,expresses,expressing,expression,expressions,expressive,expressly,expulsion,expunged,exquisite,exquisitely,extend,extended,extending,extension,extensions,extensive,extent,extenuating,exterior,exterminate,exterminated,exterminating,extermination,exterminator,external,extinct,extinction,extinguish,extinguished,extinguisher,extort,extorted,extorting,extortion,extortionist,extra,extract,extracted,extracting,extraction,extracts,extracurricular,extracurriculars,extradite,extradited,extradition,extramarital,extraordinaire,extraordinarily,extraordinary,extras,extraterrestrial,extraterrestrials,extravagant,extravaganza,extreme,extremely,extremes,extremism,extremists,extremities,extricate,eye,eyeball,eyeballs,eyebrow,eyebrows,eyed,eyeful,eyeing,eyelash,eyelashes,eyelids,eyeliner,eyes,eyesight,eyesore,eyewitness,eyewitnesses,eyre,fa,fable,fabled,fabric,fabricate,fabricated,fabrication,fabulous,fabulously,face,faced,facedown,faceless,faces,facet,facetious,facets,facial,facials,facilitate,facilitated,facilities,facility,facing,fact,factions,factoid,factor,factories,factoring,factors,factory,facts,factual,faculties,faculty,fad,fade,faded,fades,fading,fads,fail,failed,failing,failings,fails,failure,failures,faint,fainted,fainter,faintest,fainting,fair,fairer,fairest,fairgrounds,fairies,fairly,fairness,fairway,fairy,faith,faithful,faithfulness,faithless,fajita,fake,faked,faking,falafel,falcon,fall,fallacy,fallback,fallen,fallible,falling,fallout,fallow,falls,false,falsely,falsify,faltered,fame,famed,familial,familiar,familiarize,familiars,families,family,famine,famished,famous,famously,fan,fanatic,fanaticism,fanatics,fancied,fanciful,fancy,fanfare,fang,fangs,fanning,fanny,fans,fantabulous,fantasies,fantasize,fantasized,fantasizing,fantastic,fantastically,fantasy,fantasyland,far,faraway,farce,fare,fared,farewell,farewells,farina,farm,farmer,farmers,farmhouse,farmland,farms,farrow,fart,farted,farther,farthing,farts,fascinate,fascinated,fascinating,fascination,fascism,fascist,fashion,fashionably,fashioned,fashions,fast,fastball,fasten,fastened,faster,fastest,fasting,fat,fatal,fatalities,fatality,fate,fates,father,fathered,fatherhood,fathering,fatherly,fathers,fathom,fatigue,fatigues,fatso,fatten,fattening,fattest,fatty,faucet,faucets,fault,faults,faulty,faun,fauna,faux,fave,favor,favorable,favorably,favored,favorite,favorites,favoritism,favors,favour,favours,fawning,fax,faxed,faxes,fay,fear,feared,fearful,fearing,fearless,fears,fearsome,feasible,feast,feasting,feat,feather,feathering,feathers,feats,feature,featured,features,featuring,feces,feckless,fed,federal,federation,fedora,feds,fee,feeble,feed,feedback,feeder,feeders,feeding,feeds,feel,feelers,feeling,feelings,feels,fees,feet,feign,feisty,felicity,feline,fell,fella,fellah,fellahs,fellas,fellatio,feller,fellers,felling,fellow,fellows,fellowship,felon,felonies,felonious,felons,felony,felt,female,females,feminine,feminist,feminists,femme,femmes,femur,fen,fence,fences,fencing,fend,fender,fenders,fending,fer,ferment,fern,ferret,ferrets,ferry,fertile,fertility,fertilization,fertilize,fertilizer,fervent,fervor,fess,fester,festering,festival,festivals,festive,festivities,feta,fetal,fetch,fetched,fetish,fetus,fetuses,feud,feudal,feuds,fever,feverish,few,fewer,fey,fez,fiance,fiancee,fiasco,fib,fibber,fibbing,fiber,fiberglass,fibers,fibre,fibrosis,fickle,fiction,fictional,ficus,fiddle,fiddler,fiddling,fidelity,fido,fiefdom,field,fielder,fielding,fields,fieldstone,fiend,fiendish,fiends,fierce,fiercest,fiery,fiesta,fife,fifteen,fifteenth,fifth,fifths,fifties,fiftieth,fifty,fig,fight,fighter,fighters,fighting,fights,figment,figurative,figuratively,figure,figured,figurehead,figures,figurines,figuring,file,filed,files,filet,filibuster,filing,fill,filled,filler,fillets,filling,fillings,fills,filly,film,filmed,filming,filmmaker,filmmakers,filmmaking,films,filter,filtered,filters,filth,filthy,filtration,fin,finagle,final,finale,finalist,finalists,finality,finalize,finalized,finalizing,finally,finals,finance,financed,finances,financial,financially,financing,finch,find,finder,finders,finding,findings,finds,fine,fined,finer,fines,finesse,finessed,finest,finger,fingered,fingernail,fingernails,fingerprint,fingerprints,fingers,fingertips,finish,finished,finishes,finishing,fink,fins,fir,fire,firearms,fireball,fireballs,firebird,firebug,firecracker,firecrackers,fired,firefight,firefighters,fireflies,firehouse,firelight,fireman,firemen,fireplace,fireplaces,firepower,fireproof,fires,firestorm,firewater,firewood,fireworks,firing,firm,firmer,firmly,firms,firs,first,firstborn,firsthand,firstly,fish,fished,fisher,fisherman,fishermen,fishes,fishing,fishnet,fishy,fission,fist,fisted,fistfight,fistful,fists,fit,fitch,fitness,fits,fitted,fitter,fittest,fitting,fittings,five,fiver,fives,fix,fixable,fixated,fixating,fixation,fixed,fixer,fixes,fixing,fixings,fixture,fixtures,fizz,fizzle,fizzled,flack,flag,flagged,flagging,flags,flagship,flail,flailing,flair,flak,flake,flaked,flakes,flaky,flame,flamenco,flames,flaming,flammable,flan,flank,flanking,flanks,flannel,flap,flapjacks,flapped,flapping,flaps,flare,flared,flares,flaring,flash,flashback,flashbacks,flashed,flashes,flashing,flashlight,flashlights,flashy,flask,flat,flatbed,flats,flattened,flatter,flattered,flatterer,flattering,flatters,flattery,flatulence,flatware,flaunt,flaunting,flavor,flavored,flavors,flavour,flavours,flaw,flawed,flawless,flawlessly,flaws,flay,flayed,flea,fleabag,fleas,flecks,fled,fledged,fledgling,flee,fleece,fleeing,fleet,fleeting,flesh,fleshy,fletcher,flew,flex,flexibility,flexible,flexing,flick,flicked,flicker,flickering,flicking,flicks,flier,fliers,flies,flight,flights,flighty,flimsy,flinch,flinching,fling,flinging,flint,flip,flipped,flipper,flippers,flipping,flips,flirt,flirtatious,flirted,flirting,float,floated,floater,floating,floats,flock,floe,flog,flogged,flogging,flood,flooded,floodgates,flooding,floods,floor,floorboard,floorboards,floored,floors,floozy,flop,flopped,flopping,floppy,flops,flora,floral,florence,florin,florist,florists,floss,flossing,flotation,flounder,floundering,flour,flourished,flow,flower,flowering,flowers,flowing,flown,flows,flu,flue,fluff,fluffed,fluffing,fluffy,fluid,fluids,fluke,flung,flunk,flunked,flunkies,flunking,flunky,fluorescent,flurries,flurry,flush,flushed,flustered,flute,flutes,fluttering,flux,fly,flyboy,flyer,flyers,flying,foal,foam,foaming,foamy,fob,focal,focus,focused,focuses,focusing,focussed,focussing,fodder,foe,fog,fogged,foggiest,foibles,foil,foiled,foisting,fold,folded,folder,folding,folds,foliage,folk,folklore,folks,folksy,follicle,follow,followed,follower,followers,following,follows,folly,fond,fonder,fondest,fondle,fondled,fondling,fondue,font,food,foods,fool,fooled,foolhardy,fooling,foolish,foolishness,foolproof,fools,foot,footage,football,footed,footer,foothold,footing,footman,footnote,footnotes,footprints,footsies,footsteps,footstool,footwear,footwork,fop,for,forage,foraging,foray,forbade,forbid,forbidden,forbidding,forbids,force,forced,forceful,forceps,forces,forcibly,forcing,ford,fore,forearm,foreclosed,foreclosure,forefathers,forego,foregone,forehead,foreign,foreigners,foreman,foremost,forensic,forensics,foreplay,foresaw,foresee,foreseeable,foresight,foreskin,forest,forester,forestry,forests,forethought,foretold,forever,foreword,forfeit,forfeited,forfeits,forgave,forge,forged,forger,forgeries,forgery,forget,forgetful,forgets,forgettable,forgetting,forging,forgive,forgiven,forgiveness,forgives,forgiving,forgo,forgot,forgotten,fork,forked,forklift,forks,form,formal,formaldehyde,formality,formally,format,formation,formations,formed,former,formerly,formidable,forming,forms,formula,formulas,formulate,formulating,fornicating,fornication,forsake,forsaken,forsaking,fort,forte,forth,forthcoming,forthright,forthwith,forties,fortieth,fortified,fortitude,fortnight,fortress,fortuitous,fortunate,fortunately,fortune,fortunes,fortuneteller,forty,forum,forward,forwarded,forwarding,fosse,fossil,fossilized,foster,fostered,fought,foul,fouled,found,foundation,foundations,founded,founder,founding,fountain,fountainhead,fountains,four,fours,foursome,fourteen,fourteenth,fourth,fowl,fowler,fox,foxes,foxhole,foxholes,foxy,foyer,fraction,fractions,fracture,fractured,fractures,fragile,fragment,fragments,fragrance,frail,frailty,frame,framed,framers,frames,framework,framing,franc,franchise,franchises,francs,frank,frankfurter,franklin,frankly,franks,frantic,frantically,frat,fraternal,fraternities,fraternity,fraternization,fraternizing,fraud,frauds,fraught,fraulein,fray,frayed,frazzled,freak,freaked,freaking,freakish,freaks,freaky,freckle,freckling,free,freebie,freed,freedman,freedom,freedoms,freeing,freelance,freelancer,freelancing,freeloader,freeloading,freely,freeman,freer,frees,freeway,freeways,freeze,freezer,freezers,freezes,freezing,freight,freighter,french,frenzy,frequencies,frequency,frequent,frequently,fresh,freshen,freshener,freshening,fresher,freshest,freshly,freshman,freshmen,freshness,freshwater,fret,fretting,friction,fridge,fried,friend,friendless,friendlier,friendliest,friendly,friends,friendship,friendships,fries,frigate,frigging,fright,frighten,frightened,frightening,frightens,frightful,frightfully,frigid,frills,fringe,fringes,frisk,frisky,fritter,fritters,fritz,frivolous,frizzy,fro,frog,frogs,frolic,from,front,frontal,frontier,frontiers,fronting,fronts,frost,frostbite,frosting,frosty,froufrou,frown,froze,frozen,frugal,fruit,fruitcake,fruitful,fruition,fruitless,fruits,fruity,frustrated,frustrates,frustrating,frustration,fry,fryer,frying,fuchsia,fudge,fudged,fudging,fuel,fueled,fueling,fuels,fugitive,fugitives,fugu,fugue,fuhrer,fulcrum,fulfil,fulfill,fulfilled,fulfilling,fulfillment,full,fuller,fullest,fully,fumble,fumbling,fumes,fumigated,fumigating,fumigation,fun,function,functional,functioning,functions,fund,fundamental,fundamentalist,fundamentally,funded,funding,fundraiser,fundraisers,fundraising,funds,funeral,funerals,fungal,fungi,fungus,funk,funky,funnier,funnies,funniest,funny,fur,furious,furiously,furlong,furnace,furnished,furniture,furrowed,furry,furs,further,furthering,furthermore,furthest,furtive,fury,fuse,fused,fuses,fusilli,fusion,fuss,fussing,fussy,futile,futility,futon,future,futures,futuristic,fuzz,fuzzy,gabbing,gabby,gaby,gadget,gaff,gag,gaga,gage,gagged,gagging,gaggle,gags,gaiety,gain,gained,gainful,gainfully,gaining,gains,gal,gala,galactic,galaxy,gale,galilee,gall,gallant,gallbladder,galleries,gallery,galling,gallivanting,gallon,gallons,galloping,gallows,galore,galoshes,gals,galvanized,gambit,gamble,gambler,gambling,game,gamer,games,gamma,gammy,gams,gamut,gander,gang,ganged,ganging,gangland,gangly,gangrene,gangs,gangster,gangsters,gangway,ganja,gap,gaps,gar,garage,garages,garb,garbage,garcon,garden,gardener,gardeners,gardenias,gardening,gardens,gargantuan,gargling,gargoyle,gargoyles,garibaldi,garish,garland,garlic,garment,garments,garner,garnet,garnish,garrison,garter,garters,garth,gas,gasbag,gases,gasket,gaslight,gasoline,gasp,gasped,gasping,gassed,gasses,gassy,gastric,gat,gate,gated,gatehouse,gatekeeper,gates,gateway,gather,gathered,gathering,gatherings,gathers,gator,gaudy,gauge,gauging,gault,gauntlet,gauze,gave,gavel,gawk,gawking,gaze,gazebo,gazed,gazelle,gazpacho,gear,geared,gearing,gears,gearshift,gecko,ged,gee,geek,geeks,geeky,gees,geese,geez,geezer,geezers,geishas,gel,gelatin,gelato,gels,gem,gemma,gems,gen,gendarme,gender,gene,genealogy,general,generally,generals,generate,generated,generates,generating,generation,generations,generator,generators,generic,generosity,generous,generously,genes,genesis,genetic,genetically,geneticist,genetics,geneva,genie,genital,genitals,genius,geniuses,genoa,genome,genre,gentle,gentleman,gentlemanly,gentlemen,gentler,gently,gents,genuine,genuinely,genus,geographic,geographical,geographically,geography,geological,geologist,geologists,geology,geometric,geometry,geopolitical,geosynchronous,gerbil,gerbils,geriatric,geriatrics,germ,german,germans,germs,gestapo,gestating,gestation,gesture,gestures,get,getaway,gets,getter,getting,getup,geyser,ghastly,ghetto,ghettos,ghost,ghosts,ghoul,ghoulish,giant,giants,gib,gibberish,gibbons,giblets,gibson,giddy,giddyup,gift,gifted,gifts,gig,gigantic,giggle,giggles,giggling,giggly,gigolo,gigs,gilbert,gilded,gill,gills,gimbal,gimlet,gimme,gimmicks,gimmie,gimp,gin,ginger,gingerbread,ginny,ginseng,giraffe,giraffes,girl,girlfriend,girlfriends,girlie,girls,girly,girth,gist,git,give,giveaway,given,givens,giver,givers,gives,giving,gizmo,gizmos,gizzard,glaciers,glad,glade,glades,gladiator,gladiators,gladly,gladstone,glamor,glamorous,glamour,glance,glanced,glances,gland,glands,glare,glares,glaring,glass,glasses,glassware,glassy,glaucoma,glaze,glazed,glazer,gleam,gleaming,glee,glen,glengarry,glib,glide,gliders,gliding,glimmer,glimpse,glimpsed,glint,glitch,glitches,glitter,glittering,gloat,gloating,global,globe,globes,gloom,gloomy,glop,gloria,glorified,glorious,gloriously,glory,gloss,glossy,glove,glover,gloves,glow,glowing,glucose,glue,glued,glues,gluing,glum,glutton,gluttony,glycerin,gnat,gnats,gnaw,gnawing,gnome,gnomes,go,goa,goad,goading,goal,goalie,goals,goat,goatee,goats,gob,gobble,gobbledegook,gobbledygook,gobbles,goblet,goblin,goblins,gobs,god,goddaughter,goddess,goddesses,godfather,godforsaken,godlike,godliness,godly,godmother,gods,godsend,godson,goers,goes,gofer,goggle,goggles,going,goiter,gold,golden,goldenrod,goldfish,golf,golfers,golfing,golly,gondola,gone,goner,goners,gong,gonorrhea,gonzo,goo,goober,goobers,good,goodbye,goodbyes,goodie,goodies,goodly,goodman,goodness,goods,goodwill,goody,gooey,goof,goofball,goofing,goofy,googly,goon,gooney,goonie,goonies,goons,goopy,goose,gopher,gor,gore,gorge,gorgeous,gorilla,gory,gosh,gospel,gossip,gossiping,gossips,got,gothic,gotten,gouge,gouged,gouging,goulash,gourd,gourmet,gout,governed,governess,governing,government,governmental,governments,governor,gown,gowns,grab,grabbed,grabbing,grabby,grabs,grace,graced,graceful,gracefully,graces,gracing,gracious,graciously,grad,grade,graded,grader,graders,grades,grading,gradually,graduate,graduated,graduates,graduating,graduation,graffiti,grafts,graham,grail,grain,grainy,gram,grammar,gramps,grams,gran,grand,grandad,grandbaby,grandchild,grandchildren,granddad,granddaddy,granddaughter,granddaughters,grander,grandeur,grandfather,grandfathers,grandkid,grandkids,grandma,grandmother,grandmothers,grandpa,grandparent,grandparents,grandson,grandstand,grange,granger,granite,granny,granola,grant,granted,granting,grants,grape,grapefruit,grapes,grapevine,graphic,graphics,graphite,graphs,grasp,grasped,grasping,grass,grasshopper,grassy,grate,grated,grateful,gratification,gratified,gratifying,gratitude,gratuitous,gratuity,grave,gravel,gravely,graves,gravest,graveyard,graveyards,gravity,gravy,gray,grazed,grazing,grease,greaseball,greased,greasing,greasy,great,greater,greatest,greatly,greatness,greed,greedy,greek,green,greener,greenhouse,greens,greet,greeting,greetings,greets,gremlin,grenade,grenades,grew,grey,greyhound,grid,griddle,gridlock,grief,grievance,grieve,grieves,grieving,griff,griffin,griffins,grift,grifters,grill,grilled,grilling,grills,grim,grime,grimes,grin,grind,grinder,grinding,grinds,grindstone,grinning,grins,grip,gripe,gripes,griping,gripping,grips,grisly,gristle,grits,grizzly,groaning,grocer,groceries,grocery,grog,groggy,groin,groom,groomed,groomer,grooming,grooms,groomsmen,groove,groovy,grope,groped,groping,gross,grossed,grosser,grosses,grossing,grossly,grotesque,grouch,grouchy,ground,groundbreaking,grounded,groundhog,grounding,groundless,grounds,groundskeeper,groundwork,group,groupie,groupies,grouping,groups,grouse,grove,grovel,groveling,groves,grow,growers,growing,growl,grown,grownup,grownups,grows,growth,grub,grubbing,grubby,grubs,grudge,grudges,grudging,gruel,gruesome,grumbling,grumpy,grunge,grungy,grunt,guacamole,guarantee,guaranteed,guaranteeing,guarantees,guard,guarded,guardian,guardians,guardianship,guarding,guardrail,guards,guava,guerilla,guerillas,guerrilla,guerrillas,guess,guessed,guesses,guessing,guest,guesthouse,guests,guff,guidance,guide,guided,guidelines,guides,guiding,guild,guilder,guillotine,guilt,guilty,guinea,guineas,guitar,guitarist,guitars,gulag,gulch,gulf,gull,gullible,gum,gumption,gums,gumshoe,gun,gunfire,gunk,gunman,gunmen,gunned,gunner,gunnery,gunning,gunpoint,gunpowder,guns,gunshot,gunshots,gurgling,gurney,guru,gush,gusher,gushing,gushy,gusto,gut,gutless,guts,gutter,gutters,gutting,guy,guys,guzzling,gym,gyms,gynecologist,gypped,gypsies,gypsy,ha,haberdashery,habit,habitat,habits,hack,hacked,hacker,hackers,hacking,hackman,hacks,hacksaw,had,hades,hadj,hag,haggis,haggling,hags,hah,haha,hail,hailed,hailing,hails,hair,hairball,hairbrush,haircut,haircuts,hairdo,hairdresser,hairdressers,haired,hairless,hairline,hairnet,hairpin,hairs,hairstyles,hairy,hale,half,halfback,halfway,halibut,halitosis,hall,hallelujah,hallmark,hallo,hallowed,halls,hallucinate,hallucinating,hallucination,hallucinations,hallucinogen,hallway,hallways,halo,halothane,halt,halter,halves,ham,hamburg,hamburger,hamburgers,hamlet,hammer,hammered,hammering,hammers,hammock,hamper,hams,hamster,hamsters,hamstring,hand,handbag,handbags,handball,handbasket,handbook,handcuffed,handcuffs,handed,handful,handgun,handguns,handheld,handicap,handicapped,handing,handkerchief,handle,handled,handler,handles,handling,handoff,handout,handouts,handprint,handrail,hands,handshake,handsome,handsomely,handsomest,handstand,handwriting,handy,handyman,hang,hanged,hanger,hangers,hanging,hangman,hangnail,hangout,hangouts,hangover,hangovers,hangs,hank,hankering,hankie,hanks,hanky,hansom,hap,happen,happened,happening,happenings,happens,happier,happiest,happily,happiness,happy,haps,harass,harassed,harassing,harassment,harbor,harboring,harbors,harbour,harbouring,hard,hardball,harden,hardened,hardens,harder,hardest,hardly,hardship,hardware,hardwired,hardworking,hardy,hare,harem,harlot,harm,harmed,harmful,harming,harmless,harmonious,harmony,harms,harness,harnessed,harp,harper,harpies,harping,harpoons,harps,harpy,harridan,harriers,harrowing,harry,harsh,harshly,hart,harts,harvest,has,hash,hassle,hassled,hassling,hast,haste,hasten,hastened,hastily,hasty,hat,hatch,hatchback,hatched,hatches,hatchet,hate,hated,hateful,hater,hates,hath,hating,hatred,hats,hatter,haughty,haul,hauled,hauling,hauls,haunt,haunted,haunting,haunts,haute,have,haven,having,havoc,haw,hawk,hawker,hawking,hawks,hay,haycock,hayloft,hayseed,haystack,hayward,haywire,hazard,hazardous,hazards,haze,hazel,hazelnut,hazing,hazy,he,head,headache,headaches,headband,headboard,headdress,headed,header,headfirst,headgear,headhunter,heading,headless,headlights,headline,headliner,headlines,headlining,headlock,headlong,headmaster,headphones,headpiece,headquarters,heads,headset,headsets,headstone,headway,heady,heal,healed,healer,healers,healing,heals,health,healthier,healthiest,healthy,heap,heaped,heaping,heaps,hear,heard,hearing,hearings,hears,hearsay,hearse,heart,heartache,heartbeat,heartbeats,heartbreak,heartbreaker,heartbreaking,heartbroken,heartburn,hearted,heartfelt,heartland,heartless,hearts,heartsick,heartthrob,heartwarming,hearty,heat,heated,heater,heath,heathen,heathens,heather,heating,heats,heave,heaved,heaven,heavenly,heavens,heavier,heaviest,heavily,heaving,heavy,heavyset,heavyweight,heck,heckle,heckled,heckles,heckling,hectic,hector,hedge,hedges,hedging,heed,heel,heeled,heels,heft,hefty,heh,heifer,heigh,height,heighten,heightened,heights,heil,heinie,heinous,heir,heiress,heirloom,heirlooms,heirs,heist,held,helicopter,helicopters,helipad,helix,hell,hellbent,heller,hellfire,hellhole,hellhound,hellish,hello,hells,helluva,helm,helmet,helmets,helms,helo,help,helped,helper,helpers,helpful,helping,helpless,helplessly,helplessness,helps,hem,hemisphere,hemline,hemlines,hemlock,hemoglobin,hemolytic,hemorrhaging,hemorrhoid,hemorrhoids,hemp,hen,hence,henchman,henchmen,henhouse,henry,hens,hep,hepatitis,her,herald,herb,herbal,herbs,hercules,herd,herding,herds,here,hereafter,hereby,hereditary,herein,heresy,heritage,hermaphrodite,hernia,hernias,herniated,hero,heroes,heroic,heroics,heroine,herring,hers,herself,hertz,hesitant,hesitate,hesitated,hesitates,hesitating,hesitation,hessian,hetero,heterosexual,hex,hexes,hey,heyday,hi,hiatus,hibernating,hibernation,hibiscus,hic,hiccup,hiccups,hick,hickey,hickory,hicks,hid,hidden,hide,hideaway,hideous,hideously,hideout,hides,hiding,hierarchy,hieroglyphs,high,highball,higher,highest,highland,highlands,highlight,highlighted,highlighters,highlights,highly,highness,highs,hightail,highway,highways,hijacking,hijinks,hike,hiked,hiker,hikers,hiking,hilarious,hilarity,hill,hillbillies,hillbilly,hillcrest,hills,hillside,hilltop,hilt,him,himself,hind,hindrance,hindsight,hinges,hint,hinted,hinting,hints,hip,hippest,hippie,hippies,hippo,hippopotamus,hippos,hips,hire,hired,hires,hiring,his,hiss,hissed,hisself,hisses,hissing,historian,historic,historical,historically,histories,history,hit,hitch,hitched,hitchhike,hitchhiker,hitchhikers,hitchhiking,hitching,hits,hitter,hitters,hitting,hive,hives,hm,hmm,ho,hoagie,hoarding,hoarse,hoax,hoaxes,hob,hobbies,hobbit,hobbits,hobble,hobbling,hobby,hobo,hoboes,hock,hockey,hocking,hocks,hocus,hoe,hoedown,hoes,hog,hogan,hogging,hogs,hogwash,hoist,hoisted,hoisting,hokey,hold,holden,holder,holders,holding,holdings,holds,holdup,hole,holed,holes,holiday,holidays,holier,holies,holiest,holiness,holing,holland,hollandaise,holler,hollering,hollers,hollow,hollowed,holly,holocaust,hologram,holographic,holster,holt,holy,homage,hombre,hombres,home,homebody,homeboy,homeboys,homecoming,homegrown,homeless,homely,homemade,homemaker,homeopathic,homeowners,homer,homeroom,homers,homes,homesick,homestead,hometown,homework,homey,homicidal,homicide,homicides,homily,homing,hon,honda,honed,honest,honestly,honesty,honey,honeymoon,honeymooners,honeymooning,honeymoons,honeysuckle,hong,honk,honks,honky,honor,honorable,honorably,honorary,honored,honoring,honors,honour,honourable,honouring,honours,hooch,hood,hooded,hoodlums,hoodwinked,hoof,hook,hooked,hooker,hookers,hooking,hooks,hookup,hooky,hooligan,hooligans,hoop,hoopla,hoops,hooray,hoosegow,hoot,hootenanny,hooter,hooters,hooves,hop,hope,hoped,hopeful,hopefully,hopeless,hopelessly,hopelessness,hopes,hoping,hopped,hopper,hopping,hoppy,hops,hora,horde,hordes,horizon,horizons,horizontal,hormonal,hormone,hormones,horn,horned,hornet,hornets,horns,horny,horoscope,horoscopes,horrendous,horrible,horribly,horrid,horrific,horrified,horrifying,horror,horrors,horse,horseback,horsehair,horseman,horsemen,horsepower,horseradish,horses,horseshit,horsey,horsing,horticulture,hose,hosed,hoses,hosing,hospice,hospitable,hospital,hospitality,hospitalized,hospitals,host,hostage,hostages,hosted,hostel,hostess,hostile,hostiles,hostilities,hostility,hosting,hosts,hot,hotbed,hotcakes,hotdog,hotdogs,hotel,hotels,hothead,hotheaded,hothouse,hotline,hots,hotshot,hotter,hottest,hound,hounded,hounding,hounds,hour,hourglass,hourly,hours,house,housebroken,housecleaning,housed,houseguest,houseguests,household,households,housekeeper,housekeeping,houseman,houses,housewarming,housewife,housewives,housing,hovel,hover,hovercraft,hovering,how,howdy,howe,however,howl,howling,hows,hoy,hub,hubbub,hubby,hubcaps,hubris,huck,huckleberry,huckster,huddle,huddled,hue,huffed,huffing,huffy,hug,huge,hugest,hugged,hugger,huggers,hugging,hugs,huh,hula,hulk,hulking,hull,hullo,hum,human,humane,humanitarian,humanity,humankind,humanly,humanoid,humans,humble,humbled,humbling,humbly,humbug,humdinger,humid,humidifier,humidity,humidor,humiliate,humiliated,humiliates,humiliating,humiliation,humiliations,humility,hummed,hummer,humming,hummus,humor,humored,humoring,humorless,humour,hump,humpback,humped,humph,humping,humps,hums,humus,hun,hunch,hunchback,hunched,hunches,hundred,hundreds,hundredth,hung,hunger,hungover,hungry,hunh,hunk,hunker,hunks,hunky,huns,hunt,hunted,hunter,hunters,hunting,hup,hurdles,hurl,hurley,hurling,hurrah,hurray,hurricane,hurricanes,hurried,hurry,hurrying,hurst,hurt,hurtful,hurting,hurtling,hurts,husband,husbands,hush,hushed,husk,huskies,husks,husky,hussy,hustle,hustler,hut,hutch,huts,huzzah,hybrid,hybrids,hydra,hydrate,hydrated,hydraulic,hydraulics,hydrochloride,hydrogen,hydrolase,hyenas,hygiene,hygienic,hygienist,hymn,hymns,hype,hyped,hyper,hyperactive,hyperbole,hyperspace,hypertension,hyperventilate,hyperventilating,hyphen,hypnosis,hypnotic,hypnotize,hypnotized,hypo,hypochondriac,hypocrisy,hypocrite,hypocrites,hypocritical,hypodermic,hypotensive,hypotenuse,hypothermia,hypothetical,hypothetically,hysterectomy,hysteria,hysteric,hysterical,hysterically,hysterics,iambic,ibuprofen,ice,iceberg,icebergs,icebox,icebreaker,iced,icehouse,iceman,ich,icicle,icicles,icing,ick,icky,icon,icons,icy,id,idea,ideal,idealist,idealistic,idealized,ideally,ideals,ideas,identical,identifiable,identification,identified,identifies,identify,identifying,identities,identity,ideology,idiocy,idiom,idiosyncrasies,idiot,idiotic,idiots,idle,idling,idly,idol,idolized,idols,idyllic,if,iff,iffy,ifs,igloo,igneous,ignite,ignited,ignition,ignoramus,ignorance,ignorant,ignore,ignored,ignores,ignoring,iguana,iguanas,iliad,ilk,ill,illegal,illegally,illegals,illegible,illegitimate,illicit,illness,illnesses,illogical,ills,illuminate,illuminating,illumination,illusion,illusions,illustrate,illustrated,illustrates,illustration,illustrious,image,imagery,images,imaginable,imaginary,imagination,imaginations,imaginative,imagine,imagined,imagines,imagining,imam,imbecile,imbeciles,imbecilic,imbedded,imbued,imitate,imitating,imitation,immaculate,immaterial,immature,immaturity,immediate,immediately,immense,immensely,immerse,immersion,immigrants,immigration,imminent,immobile,immobilize,immodest,immoral,immortal,immortality,immortalized,immune,immunity,immutable,imp,impact,impacted,impacts,impaired,impala,impale,impaled,impart,impartial,impassioned,impatience,impatient,impeach,impeached,impeccable,impediment,impediments,impeding,impending,impenetrable,imperative,imperfect,imperfection,imperfections,imperial,imperialist,impersonal,impersonate,impersonated,impersonating,impersonator,impertinent,impervious,impetuous,implant,implanted,implants,implausible,implement,implemented,implicate,implicated,implicates,implicating,implication,implications,implicitly,implied,implies,imploding,implore,imply,implying,impolite,import,importance,important,importantly,imported,importer,importing,imports,impose,imposed,imposing,imposition,impossibility,impossible,impossibly,imposter,impostor,impotence,impotent,impound,impounded,impractical,impregnated,impress,impressed,impressing,impression,impressionable,impressionists,impressions,impressive,imprint,imprisoned,imprisonment,impromptu,improper,improperly,impropriety,improve,improved,improvement,improvements,improves,improving,improvise,improvised,improvising,impudence,impudent,impulse,impulses,impulsive,impulsively,impunity,in,inability,inaccessible,inaccuracies,inaccurate,inactive,inadequacy,inadequate,inadmissible,inadvertently,inappropriate,inappropriately,inaugural,inbound,inbred,inbreeding,incantation,incantations,incapable,incapacitate,incapacitated,incarcerate,incarcerated,incarceration,incarnation,incase,incendiary,incense,incensed,incentive,incentives,inception,incessantly,incest,incestuous,inch,inches,incidence,incident,incidental,incidentally,incidents,incinerate,incinerated,incinerator,incision,incite,inclined,include,included,includes,including,incognito,incoherent,income,incoming,incommunicado,incomparable,incompatible,incompetence,incompetent,incomplete,incomprehensible,inconceivable,inconsiderate,inconsistencies,inconsistent,inconsolable,inconspicuous,inconvenience,inconveniencing,inconvenient,incorporate,incorrect,incorrectly,incorrigible,increase,increased,increases,increasing,increasingly,incredible,incredibly,increments,incriminate,incriminating,incrimination,incubation,incubator,incubators,incur,incurred,indebted,indecent,indecision,indeed,indefensible,indefinite,indefinitely,indelicate,indentured,independence,independent,indescribable,indestructible,indeterminate,index,indicate,indicated,indicates,indicating,indication,indications,indicative,indicator,indicators,indict,indicted,indictment,indictments,indie,indies,indifference,indifferent,indigenous,indigestion,indignant,indignation,indignities,indigo,indirectly,indiscreet,indiscretion,indiscretions,indispensable,indistinguishable,individual,individuality,individually,individuals,indoor,indoors,induce,induced,inducement,inducing,induction,indulge,indulged,indulgence,indulgent,indulging,industrial,industrialist,industries,industrious,industry,inedible,ineffective,inefficient,inept,inevitability,inevitable,inevitably,inexcusable,inexpensive,inexperience,inexperienced,inexplicable,infallible,infamous,infamy,infant,infantile,infantry,infants,infarction,infatuated,infatuation,infect,infected,infecting,infection,infections,infectious,infects,inference,inferior,inferiority,infernal,inferno,infertile,infestation,infested,infidelities,infidelity,infighting,infiltrate,infiltrated,infiltration,infinite,infinitely,infinity,infirmary,inflame,inflamed,inflammation,inflammatory,inflate,inflated,inflating,inflation,inflexible,inflict,inflicted,inflicting,infliction,influence,influenced,influences,influencing,influential,influenza,influx,info,infomercial,inform,informal,informant,information,informational,informed,informer,informing,informs,infra,infraction,infrared,infrastructure,infuriate,infuriates,infuriating,infused,ingenious,ingenue,ingest,ingested,ingles,ingrates,ingratitude,ingredient,ingredients,inhabit,inhabitants,inhalation,inhale,inhaled,inhaler,inhaling,inherently,inherit,inheritance,inherited,inheriting,inherits,inhibitor,inhibitors,inhuman,inhumane,initial,initially,initials,initiate,initiated,initiating,initiation,initiative,initiatives,inject,injected,injection,injections,injector,injunction,injure,injured,injuries,injury,injustice,ink,inkling,inlaid,inland,inmate,inmates,inn,innards,inner,innermost,inning,innings,innkeeper,innocence,innocent,innocently,innocents,innocuous,innovation,innovative,innuendo,innuendoes,inopportune,input,inquest,inquiries,inquiry,inquisition,inquisitor,inroads,ins,insane,insanely,insanity,insatiable,inscription,inscrutable,insect,insects,insecure,insecurities,insecurity,inseminated,insensitive,insensitivity,inseparable,insert,insertion,inside,insider,insides,insidious,insight,insightful,insights,insignia,insignificant,insincere,insinuated,insinuating,insinuations,insipid,insist,insisted,insistence,insistent,insisting,insists,insolence,insolent,insomnia,inspect,inspected,inspecting,inspection,inspections,inspector,inspectors,inspiration,inspirational,inspire,inspired,inspires,inspiring,install,installation,installed,installing,installment,installments,instance,instances,instant,instantaneous,instantly,instead,instep,instigated,instigator,instill,instilled,instinct,instinctively,instincts,institute,instituted,institution,institutional,institutionalized,institutions,instruct,instructed,instructing,instruction,instructions,instructor,instructors,instrument,instruments,insufferable,insufficient,insulated,insulation,insulin,insult,insulted,insulting,insults,insurance,insure,insured,insuring,insurmountable,insurrection,intact,intake,integral,integrate,integrated,integration,integrity,intellect,intellectual,intellectually,intellectuals,intelligence,intelligent,intelligently,intend,intended,intending,intends,intense,intensely,intensity,intensive,intent,intention,intentional,intentionally,intentions,intently,inter,interact,interacting,interaction,interactive,intercede,intercept,intercepted,intercepting,interchangeable,intercom,intercourse,interest,interested,interesting,interests,interface,interfacing,interfere,interfered,interference,interferes,interfering,interior,interject,interlude,intermediary,intermediate,intermission,intermittent,intern,internal,internally,international,internationally,interning,internist,internment,interns,internship,interplanetary,interpret,interpretation,interpreted,interpreter,interpreting,interpretive,interracial,interrogate,interrogated,interrogating,interrogation,interrogations,interrupt,interrupted,interrupting,interruption,interruptions,interrupts,intersection,interstate,interstellar,intertwined,intervene,intervened,intervening,intervention,interview,interviewed,interviewer,interviewing,interviews,intestinal,intestines,intimacy,intimate,intimated,intimately,intimidate,intimidated,intimidating,into,intolerable,intolerant,intoxicated,intoxicating,intoxication,intravenous,intravenously,intrigue,intrigued,intrigues,intriguing,intro,introduce,introduced,introduces,introducing,introduction,introductions,introductory,intros,intrude,intruded,intruder,intruders,intruding,intrusion,intubate,intuition,intuitive,inundated,invade,invaded,invaders,invading,invalid,invalidate,invaluable,invasion,invasive,invent,invented,invention,inventions,inventive,inventory,inverted,invest,invested,investigate,investigated,investigates,investigating,investigation,investigations,investigative,investigator,investigators,investing,investment,investments,investors,invigorated,invigorating,invincible,invisibility,invisible,invitation,invitations,invite,invited,invites,inviting,invoice,invoices,invoke,invoked,involve,involved,involvement,involves,involving,ion,ions,ipecac,iris,iron,ironclad,ironed,ironic,ironically,ironies,ironing,irons,irony,irrational,irrationally,irregular,irregularities,irrelevant,irreparable,irreplaceable,irresistible,irresponsibility,irresponsible,irrevocably,irrigate,irrigation,irritable,irritate,irritated,irritating,irritation,is,island,islanders,islands,isolate,isolated,isolation,isotopes,issue,issued,issues,issuing,it,itch,itches,itching,itchy,item,items,itinerary,its,itself,ivories,ivory,ivy,jab,jabber,jabbering,jabot,jabs,jack,jackal,jackals,jacked,jackers,jacket,jackets,jacking,jackpot,jacks,jade,jaded,jag,jagger,jags,jaguar,jaguars,jail,jailbird,jailbreak,jailed,jailhouse,jake,jalopy,jam,jamboree,jammed,jammer,jammies,jamming,jams,jane,janitor,janitorial,janitors,japan,jar,jargon,jarring,jars,jasmine,jasper,jaundice,jaunt,java,javelin,jaw,jawbone,jawed,jaws,jay,jaywalking,jazz,jazzed,jealous,jealousy,jean,jeans,jeep,jeepers,jeeps,jeez,jefe,jell,jellies,jelly,jellybean,jellyfish,jenny,jeopardize,jeopardized,jeopardizing,jeopardy,jerk,jerked,jerkin,jerking,jerks,jerky,jerries,jerry,jersey,jerseys,jess,jesse,jest,jester,jesuit,jesuits,jet,jets,jettison,jew,jewel,jeweler,jewelers,jewellery,jewelry,jewels,jews,jezebel,jib,jiff,jiffy,jig,jiggle,jiggling,jiggly,jigsaw,jihad,jill,jilted,jiminy,jimmies,jimmy,jin,jingle,jingles,jingling,jinx,jitters,jittery,jo,job,jobless,jobs,jock,jockey,jockeys,jocks,jockstrap,joe,joes,joey,jog,jogger,jogging,john,johnny,johns,join,joined,joiner,joining,joins,joint,joints,joke,joked,joker,jokers,jokes,joking,jollies,jolly,jolt,jones,jordan,joseph,josh,jot,jotted,journal,journalism,journalist,journalistic,journalists,journals,journey,journeyed,journeys,joust,joy,joyful,joyous,joys,joystick,jubilee,judas,judge,judged,judgement,judges,judging,judgment,judgmental,judgments,judicial,judiciary,judo,jug,juggernaut,juggle,juggling,jughead,jugs,jugular,juice,juiced,juices,juicy,jujitsu,juke,jukebox,julep,jumble,jumbled,jumbo,jump,jumped,jumper,jumpers,jumping,jumps,jumpsuit,jumpy,junction,juncture,jungle,jungles,junior,juniors,juniper,junk,junkie,junkies,junky,junkyard,juries,jurisdiction,juror,jurors,jury,jus,just,justice,justifiable,justification,justified,justifies,justify,jut,juvenile,ka,kabob,kahuna,kaiser,kale,kaleidoscope,kane,kangaroo,kaon,kappa,kaput,karaoke,karat,karate,karma,kasha,kashmir,kat,kay,kayak,kayaking,keel,keeled,keen,keep,keeper,keepers,keeping,keeps,keg,kegs,keister,kelly,kelp,kelson,ken,kendo,keno,kent,kept,kern,kerosene,kerry,ketch,ketchup,kettle,key,keyboard,keyboards,keycard,keyhole,keynote,keys,keystone,khaki,khakis,khan,kibble,kibosh,kick,kickback,kickbacks,kickboxing,kicked,kicker,kicking,kicks,kicky,kid,kidder,kiddie,kiddies,kidding,kiddo,kidnap,kidnapped,kidnapper,kidnappers,kidnapping,kidnaps,kidney,kidneys,kids,kielbasa,killjoy,kiln,kilo,kilometer,kilometers,kilos,kilt,kilter,kimono,kin,kind,kinder,kindergarten,kindergartners,kindest,kindling,kindly,kindness,kinds,king,kingdom,kingdoms,kingpin,kings,kink,kinky,kins,kinship,kiosk,kip,kirk,kismet,kiss,kissable,kissed,kisser,kisses,kissing,kissy,kit,kitchen,kite,kites,kitten,kittens,kitties,kitty,kiwi,klutz,klutzy,knack,knapsack,knee,kneecap,kneecaps,kneed,kneel,kneeling,knees,knelt,knew,knickers,knickknacks,knife,knifed,knight,knights,knit,knitted,knitting,knives,knob,knobby,knobs,knock,knockdown,knocked,knocker,knockers,knocking,knockoff,knockout,knocks,knoll,knot,knots,knotted,know,knowing,knowingly,knowledge,knowledgeable,known,knows,knuckle,knucklehead,knuckles,koala,kobo,koi,kooks,kooky,kosher,koss,kraft,kremlin,kris,kroner,kudos,la,lab,label,labeled,labelled,labels,labor,laboratories,laboratory,labored,laborers,labour,labrador,labs,labyrinth,lac,lace,laced,lacerated,laceration,lacerations,laces,lacey,lack,lacked,lackeys,lacking,lackluster,lacks,lacquer,lacrosse,lactic,lactose,lacy,lad,ladder,ladders,laddies,laden,ladies,ladle,lads,lady,ladyship,lag,lager,lagging,lagoon,laid,lair,laird,lake,laker,lakers,lakes,lakeshore,lakeside,lam,lama,lamb,lambda,lambert,lambs,lame,lameness,lament,laminated,lamp,lamppost,lamps,lance,lancer,land,landed,lander,landers,landfall,landfill,landing,landings,landlady,landlord,landlords,landmark,landmarks,lands,landscape,landscapes,landscaping,landslide,lane,lanes,lang,langley,language,languages,languishing,lanky,lantern,lanyard,lap,lapdog,lapel,lapping,laps,lapse,lapsed,lapses,laptop,laptops,lar,larceny,larch,lard,large,largely,larger,largest,lark,lars,larvae,larval,laryngitis,las,lasagna,lasagne,lascivious,laser,lasers,lash,lashed,lashes,lashing,lass,lassie,lasso,last,lasted,lasting,lasts,latch,latched,late,lately,latent,later,latest,latex,lathe,lather,latino,latitude,latrine,latte,latter,lattes,laugh,laughable,laughed,laughing,laughingstock,laughs,laughter,launch,launched,launcher,launches,launching,launder,laundered,laundering,laundry,laura,laureate,laurel,lava,lavender,lavish,lavished,law,lawful,lawfully,lawman,lawmen,lawn,lawnmower,lawns,laws,lawsuit,lawsuits,lawyer,lawyered,lawyers,lax,laxative,laxatives,lay,layaway,layer,layers,laying,layman,layout,lays,lazar,laziness,lazy,lead,leader,leaders,leadership,leading,leads,leaf,leaflets,leafs,leafy,league,leagues,leak,leaked,leaking,leaks,leaky,lean,leaned,leaning,leans,leap,leaping,leaps,leapt,lear,learn,learned,learner,learning,learns,learnt,leary,lease,leased,leash,least,leather,leave,leaver,leaves,leaving,lech,lecture,lectured,lectures,lecturing,led,lederhosen,ledge,ledger,ledgers,lee,leech,leeches,leering,leery,leeway,left,leftover,leftovers,lefts,lefty,leg,legacy,legal,legalities,legality,legalizing,legally,legend,legendary,legends,legged,leggy,legion,legions,legislate,legislation,legislative,legislature,legit,legitimacy,legitimate,legs,legwork,lei,leisure,leisurely,lemmings,lemon,lemonade,lemony,lemur,lend,lending,length,lengths,leniency,leno,lens,lenses,lent,lentils,leopard,leopards,leotard,leotards,leper,lepers,leprechaun,lesbian,lesbians,lesions,less,lessee,lessen,lesser,lesson,lessons,lest,let,lethal,lets,letter,letterhead,lettering,letterman,letters,letting,lettuce,leukemia,levee,level,levelheaded,levels,lever,leverage,leveraged,levitate,levitation,levity,levy,lewd,lewis,lex,lez,li,liability,liable,liaison,liane,liar,liars,libation,libel,liberal,liberals,liberated,liberating,liberation,liberties,liberty,libido,librarian,libraries,library,lice,licence,license,licensed,licenses,licensing,lichen,lick,licked,licker,licking,licks,licorice,lid,lido,lidocaine,lids,lie,lied,liege,lien,lier,lies,lieu,lieutenant,lieutenants,life,lifeboats,lifeguard,lifeless,lifelike,lifeline,lifelong,lifer,lifers,lifesaver,lifestyle,lifetime,lifetimes,lift,lifted,lifting,liftoff,lifts,ligament,ligature,light,lighted,lighten,lightened,lightening,lighter,lighters,lightheaded,lighthearted,lighthouse,lighting,lightly,lightness,lightning,lights,like,liked,likelihood,likely,likeness,likes,likewise,liking,lilac,lilacs,lilies,lily,lima,limb,limber,limbo,limbs,lime,limelight,limerick,limes,limestone,limey,limit,limitation,limitations,limited,limitless,limits,limo,limos,limousine,limousines,limp,limping,limps,lin,line,lineage,linear,linebacker,lined,linen,linens,liner,liners,lines,lineup,ling,linger,lingerie,lingering,lingers,lingo,linguistic,lining,link,linked,linking,links,linoleum,lint,lion,lions,lip,liposuction,lipped,lippy,lips,lipstick,liquefy,liquid,liquidate,liquidated,liquidation,liquor,liquored,lira,lis,list,listed,listen,listened,listener,listeners,listening,listens,listing,lists,lit,litany,lite,liter,literacy,literal,literally,literary,literate,literature,liters,lithium,litigation,litigious,litter,littered,littering,little,littlest,live,lived,livelihood,lively,liven,liver,livers,lives,livestock,livid,living,lizard,lizards,llama,lo,load,loaded,loading,loads,loaf,loafers,loan,loaned,loaning,loans,loath,loathe,loathed,loathes,loathing,lob,lobby,lobbying,lobbyist,lobe,lobes,lobotomy,lobster,lobsters,loca,local,locale,localized,locally,locals,locate,located,locating,location,locations,locator,loch,lock,lockdown,locked,locker,lockers,locket,locking,locks,locksmith,lockup,loco,locomotive,locust,locusts,lode,lodge,lodged,lodging,lodgings,loft,lofty,log,logan,logged,logger,logic,logical,logically,logistics,logo,logs,loin,loins,loitering,lollipop,lollipops,lolly,lone,lonelier,loneliest,loneliness,lonely,loner,loners,lonesome,long,longer,longest,longevity,longing,longitude,longs,longtime,loo,loofah,look,looked,looker,looking,lookout,lookouts,looks,loom,looming,loon,looney,loons,loony,loop,looped,loophole,loopholes,loops,loos,loose,loosely,loosen,loosened,loosening,looser,loosing,loot,looting,lop,lopped,lopper,lopsided,loran,lord,lording,lords,lordship,lore,lorry,lose,loser,losers,loses,losing,loss,losses,lost,lot,lothario,lotion,lots,lotte,lottery,lotto,loud,louden,louder,loudest,loudly,louie,louis,lounge,lounging,louse,lousy,lout,louvre,lovable,love,loveable,lovebirds,loved,loveless,lovelier,lovelies,loveliest,lovelorn,lovely,lovemaking,lover,lovers,loves,lovesick,loving,lovingly,low,lowdown,lowe,lower,lowered,lowering,lowers,lowery,lowest,lowlife,lowlifes,lowly,lows,lox,loyal,loyalties,loyalty,lozenges,luau,lube,lubricant,lubricants,lubrication,luce,lucid,lucifer,luck,lucked,luckier,luckiest,luckily,lucky,lucrative,ludicrous,lug,luge,luggage,lukewarm,lull,lullaby,lulu,lum,lumbar,lumber,lumbering,lumberjack,luminous,lump,lumpectomy,lumps,lumpy,luna,lunacy,lunar,lunatic,lunatics,lunch,luncheon,lunches,lunching,lunchroom,lunchtime,lung,lunge,lunged,lungs,lupus,lurch,lure,lured,lures,luring,lurk,lurking,lurks,luscious,lush,lust,luster,lusting,lusts,lutz,luv,lux,luxuries,luxury,lye,lying,lymph,lymphoma,lynch,lynched,lynching,lynx,lyrical,lyrics,ma,mac,macadamia,macaroni,macaroons,macaws,mace,mach,mache,machete,machinations,machine,machinery,machines,machismo,macho,mack,mad,madam,madame,madden,maddening,madder,made,madeleine,mademoiselle,madly,madman,madmen,madness,madonna,madre,madrigal,madwoman,mae,maestro,mafia,mag,magazine,magazines,magdalene,maggot,maggots,magic,magical,magically,magician,magicians,magistrate,magnet,magnetic,magnetism,magnets,magnificence,magnificent,magnificently,magnify,magnitude,magnolia,magnum,mags,maharajah,mahatma,maid,maiden,maidens,maids,mail,mailbox,mailboxes,mailed,mailer,mailing,mailman,mails,maim,maimed,maiming,main,mainframe,mainland,mainline,mainly,maintain,maintained,maintaining,maintains,maintenance,majestic,majesty,major,majored,majoring,majority,majors,make,makeover,makeovers,maker,makers,makes,makeshift,makeup,making,makings,malaria,male,males,malevolent,malfeasance,malfunction,malfunctioning,malfunctions,malice,malicious,maliciously,malign,malignant,mall,mallard,mallet,malls,malnourished,malpractice,malt,mama,mamie,mamma,mammals,man,manage,managed,management,manager,managerial,managers,manages,managing,manatee,manatees,mandarin,mandate,mandates,mandatory,mandrake,maneuver,maneuvered,maneuvering,maneuvers,manger,mangled,mangoes,mangos,mangy,manhandle,manhandled,manhattan,manhole,manhood,manhunt,maniac,maniacal,maniacs,manic,manicure,manicured,manicures,manifest,manifestations,manifested,manifesting,manifesto,manifests,manifold,manila,manipulate,manipulated,manipulates,manipulating,manipulation,manipulations,manipulative,manipulator,mankind,manly,manna,mannequin,mannequins,manner,mannered,manners,manning,mano,manor,manpower,mansion,mansions,manslaughter,mantel,manticore,mantis,mantle,mantra,manual,manually,manufacture,manufactured,manufacturer,manufactures,manufacturing,manure,manuscript,manuscripts,many,map,maple,mapped,mapping,maps,mar,marathon,marble,marbles,marc,marcel,march,marched,marches,marching,mare,margarita,margaritas,marge,margin,marginal,marginally,margins,maria,mariachi,marigold,marijuana,marina,marinate,marine,mariner,mariners,marines,marital,maritime,mark,marked,marker,markers,market,marketing,marketplace,markets,marking,markings,marks,marksmanship,marlin,marmalade,maroon,marooned,marquee,marquis,marriage,marriages,married,marries,marrow,marry,marrying,mars,marsh,marshal,marshall,marshmallow,marshmallows,mart,marten,martial,martin,martini,martinis,martins,martyr,martyrs,marvel,marveling,marvellous,marvelous,marzipan,mas,mascara,mascot,mascots,masculine,masculinity,mash,mashed,mask,masked,masking,masks,mason,masons,masquerade,masquerading,mass,massa,massacre,massacred,massacres,massage,massaged,massager,massages,massaging,masses,masseur,masseuse,massive,massively,mastectomy,master,mastered,mastermind,masterminded,masterpiece,masterpieces,masters,mastery,masturbated,masturbating,masturbation,mat,matador,match,matchbook,matched,matches,matching,matchmaker,matchmaking,mate,mated,material,materialistic,materialize,materialized,materials,maternal,maternity,mates,matey,math,mathematically,mathematician,mathematicians,mathematics,matinee,mating,matriarch,matrimonial,matrimony,matrix,matron,mats,matt,matted,matter,mattered,matters,mattress,mattresses,mature,matured,maturity,matzah,matzoh,maudlin,maul,mauled,mausoleum,maverick,mavis,maw,max,maxi,maxim,maximize,maximum,maxwell,may,maya,mayan,maybe,maybes,mayday,mayflower,mayflowers,mayhem,mayo,mayonnaise,mayor,mayoral,mayors,maze,me,mead,meadow,meadows,meager,meal,meals,mealy,mean,meaner,meanest,meanie,meaning,meaningful,meaningless,meanings,meanness,means,meant,meantime,meanwhile,measles,measly,measure,measured,measurements,measures,measuring,meat,meatball,meatballs,meathead,meatloaf,meats,meaty,mecca,mechanic,mechanical,mechanics,mechanism,mechanisms,med,medal,medals,meddle,meddled,meddlesome,meddling,medevac,media,mediator,medic,medicaid,medical,medically,medicare,medicate,medicating,medication,medications,medicine,medicines,medics,medieval,medina,mediocre,mediocrity,meditate,meditating,meditation,mediterranean,medium,medley,medusa,meet,meeting,meetings,meets,megaphone,mel,melancholy,melding,mellow,mellowed,mellowing,melodrama,melodramatic,melody,melon,melons,melt,meltdown,melted,melting,melts,member,members,membership,membrane,membranes,memento,mementos,memo,memoirs,memorabilia,memorable,memorial,memories,memorize,memorized,memorizing,memory,memos,men,menace,menacing,menage,menagerie,mend,mended,mending,menial,meningitis,menopausal,menopause,menorah,mensa,menstrual,mental,mentality,mentally,mention,mentioned,mentioning,mentions,mentor,menu,menus,meow,mercenaries,mercenary,mercer,merchandise,merchandising,merchant,merciful,mercilessly,mercury,mercy,mere,merely,merger,mergers,merging,meridian,meringue,merit,merits,merl,merle,merlot,mermaid,mermaids,merman,merrier,merrily,merry,mesa,mescaline,mesh,mesmerized,mesmerizing,mesquite,mess,message,messages,messed,messenger,messengers,messes,messier,messing,messy,met,meta,metabolic,metal,metallic,metals,metamorphosis,metaphor,metaphorical,metaphorically,metaphors,metaphysics,meteor,meteorite,meteors,meter,meters,meth,methadone,method,methodical,methodology,methods,meticulous,meticulously,metric,metro,metropolitan,mettle,mew,mezzanine,mi,mice,mick,mickey,micro,microbes,microchips,microfilm,microphone,microphones,microscope,microscopic,microwave,mid,middies,middle,middleman,middleweight,midge,midget,midgets,midland,midlife,midnight,midst,midterm,midterms,midtown,midway,midwestern,midwife,midwives,miffed,miggs,might,mightier,mighty,mignon,migraine,migrate,migration,mike,mikes,mil,mild,mildew,mildly,mile,mileage,miles,milestone,militant,militants,military,militia,milk,milked,milking,milkman,milky,mill,millennia,millennium,miller,millet,milligram,milligrams,millimeter,milling,million,millionaire,millionaires,millions,millisecond,mills,milo,milt,mime,mimes,mimic,mimicking,mimosas,mina,mince,mincemeat,mind,minded,mindful,minding,mindless,minds,mindset,mine,mined,minefield,miner,mineral,minerals,miners,mines,mingle,mini,miniature,minimal,minimalist,minimize,minimum,minimums,mining,minion,minions,miniscule,minister,ministers,ministry,minivan,mink,minks,minnow,minor,minorities,minority,minors,minstrel,mint,mints,minty,minus,minuscule,minuses,minute,minutes,miracle,miracles,miraculous,miraculously,mirage,mirror,mirrors,mirth,mis,misbehave,miscalculated,miscalculation,miscarriage,miscarry,miscellaneous,mischief,miscommunication,misconception,misconceptions,misconstrued,miscreant,misdeeds,misdemeanors,miserable,miserably,misery,misfit,misfits,misfortune,misgivings,misguided,mishandled,mishap,misheard,misinformed,misinterpret,misinterpretation,misinterpreted,misinterpreting,misjudged,mislead,misleading,misled,mismatched,misnomer,misogynistic,misplace,misplaced,misprint,misread,misreading,miss,missed,misses,missile,missiles,missing,mission,missionaries,missionary,missions,missis,misspelled,misspent,misspoke,missus,missy,mist,mistake,mistaken,mistakes,mister,mistletoe,mistook,mistreated,mistress,mistresses,mistrial,mistrust,misty,misunderstand,misunderstanding,misunderstandings,misunderstood,misuse,mite,mites,mitt,mitten,mittens,mitts,mitzvah,mix,mixed,mixer,mixers,mixes,mixing,mixture,mixup,mm,mo,moan,moaning,moans,moat,mob,mobile,mobility,mobilize,mobs,mobster,mocha,mock,mocked,mockery,mocking,mocks,mod,mode,model,modeled,modeling,modelling,models,modem,moderately,moderation,moderator,modern,modest,modesty,modicum,modification,modifications,modified,module,modus,mohair,mohel,moil,moist,moisture,moisturize,moisturizer,mojo,mol,molars,molasses,mold,molded,molding,moldings,molds,moldy,mole,molecular,molecule,molecules,moles,molest,molestation,molested,molester,molesting,moll,mollie,mollusk,molly,moloch,molten,molto,moly,mom,moment,momentarily,momentary,momento,moments,momentum,momma,mommies,mommy,moms,mon,monarchs,monarchy,monastery,monde,mondo,monetary,money,moneybags,moneymaker,moneys,monger,mongoloid,mongoose,mongrel,moniker,monitor,monitored,monitoring,monitors,monk,monkey,monkeys,monks,mono,monogamous,monogamy,monogrammed,monologue,monopolizing,monopoly,monoxide,monsieur,monsignor,monsoon,monster,monsters,monstrous,montage,monte,month,monthly,months,monument,monumental,monumentally,moo,mooch,moocher,mood,moods,moody,moola,moon,moonbeams,moonlight,moonlighting,moonlit,moons,moors,moose,moot,mop,mope,moped,mopes,mopey,moping,mopped,mopping,mops,mor,moral,morale,morales,morality,morally,morals,morbid,more,morello,moreover,morgan,morgue,morgues,morn,morning,mornings,morocco,moron,moronic,morons,morph,morphine,morris,morrow,morse,morsel,mort,mortal,mortality,mortals,mortar,mortars,mortgage,mortgages,mortified,mortifying,mortuary,mosey,mosque,mosquito,mosquitoes,mosquitos,moss,most,mostly,mote,motel,motels,moth,mothballs,mother,motherhood,mothering,motherless,mothers,moths,motif,motion,motions,motivate,motivated,motivates,motivation,motivational,motivations,motive,motives,motley,motor,motorbike,motorcade,motorcycle,motorcycles,motorized,motors,motto,mould,moulin,mound,mounds,mount,mountain,mountaineer,mountains,mountainside,mountaintop,mounted,mounting,mourn,mourned,mourners,mourning,mouse,mousey,mousse,moustache,mousy,mouth,mouthed,mouthful,mouthing,mouthpiece,mouths,mouthwash,mouthy,move,moved,movement,movements,movers,moves,movie,movies,moving,mow,mowed,mower,mowers,mowing,moxie,mu,much,muchacho,muchachos,muck,muckraker,mucous,mucus,mud,muddle,muddy,muff,muffin,muffins,muffled,muffler,mug,mugged,mugger,muggers,mugging,muggings,muggy,mugs,mulberry,mulch,mule,mules,muley,mulled,muller,mullet,mulligan,mulling,multimedia,multimillion,multinational,multiple,multiples,multiplex,multiplication,multiplied,multiply,multitude,mum,mumble,mumbled,mumbles,mumbling,mummies,mummy,mumps,mums,munch,munching,munchkin,munchkins,mundane,munitions,mural,murals,murder,murdered,murderer,murderers,murderess,murdering,murderous,murders,murky,murmur,murphy,muscle,muscled,muscles,muscular,muse,muses,museum,museums,mush,mushroom,mushrooms,mushy,music,musical,musicals,musician,musicians,musing,musket,musketeer,musketeers,muskie,muskrat,muss,mussels,must,mustache,mustang,mustangs,mustard,muster,mutant,mutants,mutated,mutating,mute,mutilate,mutilated,mutilation,mutiny,mutt,mutton,mutual,mutually,muumuu,muzzle,my,myriad,myrtle,myself,mysteries,mysterious,mysteriously,mystery,mystic,mystical,mysticism,mystified,mystifying,myth,mythic,mythical,mythological,mythology,myths,na,nacho,nachos,nada,nag,nagged,nagging,nah,nail,nailed,nailing,nails,naive,naivete,naked,nam,name,named,nameless,namely,names,namesake,nametag,naming,nan,nana,nance,nancy,nannies,nanny,nanosecond,nanotechnology,nap,napalm,naphthalene,napkin,napkins,napoleon,napping,nappy,naps,narc,narcissism,narcissist,narcissistic,narcolepsy,narcotic,narcotics,narrative,narrator,narrow,narrowed,narrowing,narrowly,narrows,narwhal,nary,nasal,nastiest,nasty,nation,national,nationally,nationals,nations,native,natives,nativity,natty,natural,naturally,nature,natured,naught,naughty,nausea,nauseated,nauseating,nauseous,nautical,nautilus,naval,navigate,navigating,navigation,navigational,navigator,navy,naw,nay,nazi,nazis,ne,near,nearby,nearest,nearly,neat,neatly,neatness,nebula,necessarily,necessary,necessity,neck,necking,necklace,necklaces,neckline,necks,necromancer,necrosis,nectar,nee,need,needed,neediness,needing,needle,needlepoint,needles,needless,needlessly,needs,needy,nefarious,negate,negative,negatively,negatives,negativity,neglect,neglected,neglectful,neglecting,negligee,negligence,negligent,negligible,negotiable,negotiate,negotiated,negotiating,negotiation,negotiations,negotiator,neighbor,neighborhood,neighborhoods,neighboring,neighborly,neighbors,neighbour,neighbours,neither,nellie,nelly,nelson,neon,neonatal,nephew,nephews,nepotism,nerd,nerds,nerdy,nerve,nerves,nervous,nervously,nervousness,ness,nest,net,nether,network,networking,networks,neural,neurological,neurologist,neurology,neuroses,neurosis,neurosurgeon,neurosurgery,neurotic,neutered,neutral,neutralize,neutralized,neutron,neve,never,nevermore,nevertheless,new,newborn,newborns,newcomer,newcomers,newest,newfound,newly,newlywed,newlyweds,news,newscast,newsletter,newsman,newspaper,newspapers,newsroom,newsstand,newsstands,newt,newton,next,nexus,nibble,nibbling,nice,nicely,nicer,nicest,niceties,niche,nick,nicked,nickel,nickels,nickname,nicknamed,nicknames,nicks,nicotine,niece,nieces,nifty,nigh,night,nightcap,nightclub,nightfall,nightgown,nighthawk,nightingale,nightlife,nightly,nightmare,nightmares,nightmarish,nights,nightstand,nightstick,nighttime,nighty,nihilist,nil,nimrod,nine,nines,nineteen,nineteenth,nineties,ninety,ninja,ninny,ninth,nip,nipped,nipper,nipping,nipple,nipples,nippy,nirvana,nite,nitrogen,nitroglycerin,nitrous,nitty,nitwit,nix,nixed,no,nobility,noble,nobleman,nobodies,nobody,nocturnal,nod,nodded,nodding,node,nodes,nods,nodules,noel,noggin,noh,noir,noise,noises,noisy,nomad,nomadic,nomads,nome,nominal,nominate,nominated,nominating,nomination,nominations,nominee,nominees,nonchalant,none,nonetheless,nonexistent,nonissue,nonnegotiable,nonsense,nonsensical,nonstop,nonviolent,noo,noodle,noodles,noon,noose,nope,nor,nordic,norland,norm,normal,normalcy,normally,north,northeast,northeastern,norther,northern,northwest,northwestern,nos,nose,nosebleeds,nosed,nosedive,noses,nosey,nosh,nosing,nostalgia,nostalgic,nostril,nostrils,nosy,not,notable,notably,notary,notation,notch,notches,note,notebook,noted,notepad,notes,nothing,nothings,notice,noticeable,noticeably,noticed,notices,noticing,notification,notified,notify,noting,notion,notions,notorious,notoriously,notwithstanding,nougat,noun,nourish,nourishing,nourishment,nous,nouveau,nova,novel,novelist,novels,novelty,novocaine,now,nowadays,nowhere,noxious,nozzle,nu,nuance,nuances,nub,nubile,nuclear,nude,nudes,nudge,nudie,nudist,nudity,nuggets,nuisance,nuke,nuked,nukes,null,numb,number,numbered,numbers,numbing,numbness,numerous,nun,nunnery,nuns,nuptial,nuptials,nurse,nursed,nursery,nurses,nursing,nurtured,nurturing,nut,nutcase,nutcracker,nuthouse,nutmeg,nutrition,nutritional,nutritionist,nutritious,nuts,nutshell,nuttier,nutty,nylon,nylons,nymph,nympho,nymphomaniac,oaf,oak,oaks,oar,oars,oasis,oath,oaths,oatmeal,oats,obese,obey,obeyed,obeying,obi,obits,obituary,object,objected,objecting,objection,objectionable,objections,objective,objectives,objectivity,objects,obligated,obligation,obligations,obligatory,oblige,obliged,oblique,obliterate,obliterated,oblivion,oblivious,obnoxious,oboe,obscene,obscenities,obscenity,obscure,obscured,obscurity,observable,observant,observation,observations,observatory,observe,observed,observer,observers,observing,obsess,obsessed,obsessing,obsession,obsessions,obsessive,obsessively,obsolete,obstacle,obstacles,obstetrician,obstinate,obstructed,obstruction,obtain,obtained,obtuse,obvious,obviously,occasion,occasional,occasionally,occasions,occult,occupancy,occupant,occupants,occupation,occupational,occupations,occupied,occupy,occupying,occur,occurred,occurrence,occurrences,occurs,ocean,oceanographic,oceans,octane,octopus,od,odd,oddball,oddest,oddly,odds,ode,odious,odor,odorless,odyssey,oedipal,of,off,offbeat,offence,offend,offended,offender,offenders,offending,offends,offense,offensive,offer,offered,offering,offerings,offers,office,officer,officers,offices,official,officially,officials,officiate,offing,offs,offset,offshore,offspring,often,oftentimes,ogle,ogling,ogre,ogres,oh,ohm,oho,oil,oiled,oils,oily,oink,ointment,okay,okayed,okeydokey,okra,old,olden,older,oldest,oldie,olds,ole,olfactory,olive,olives,om,omega,omelet,omelette,omelettes,omen,omens,ominous,omitted,omnipotent,on,onboard,once,oncology,oncoming,one,ones,oneself,ongoing,onion,onions,only,onstage,onto,onward,oodles,ooh,oomph,oops,ooze,oozing,op,opal,open,opened,opener,openers,opening,openings,openly,openness,opens,opera,operas,operate,operated,operates,operating,operation,operational,operations,operative,operatives,operator,opinion,opinionated,opinions,opium,opponent,opponents,opportune,opportunist,opportunities,opportunity,opposable,oppose,opposed,opposing,opposite,opposites,opposition,oppress,oppression,oppressive,ops,opted,optic,optimism,optimist,optimistic,optimum,option,optional,options,optometrist,opus,or,oracle,oracles,oral,orally,orange,oranges,orator,orb,orbed,orbing,orbit,orbital,orbiting,orbs,orchard,orchestra,orchestrate,orchestrated,orchestrating,ordeal,order,ordered,ordering,orderlies,orderly,orders,ordinance,ordinarily,ordinary,ordinate,ordinates,ordnance,ore,oregano,organ,organic,organisation,organise,organised,organism,organisms,organization,organizations,organize,organized,organizer,organizing,organs,orgasm,orgasmic,orgasms,orgies,orgy,oriental,orientation,oriented,orienteering,origami,origin,original,originality,originally,originals,originating,origins,orioles,ornament,ornamental,ornaments,ornate,ornery,orphan,orphanage,orphaned,orphans,ort,orthodontist,orthodox,orthopedic,os,ose,ostensibly,ostracized,ostrich,other,others,otherwise,otherworldly,otter,otto,ottoman,ouch,ought,ounce,ounces,our,ours,ourselves,out,outage,outback,outbid,outbound,outbreak,outburst,outcast,outcasts,outcome,outdated,outdid,outdo,outdone,outdoor,outdoors,outdoorsy,outer,outfield,outfit,outfits,outfitted,outgoing,outgrew,outgrow,outgrown,outhouse,outing,outlander,outlandish,outlast,outlaw,outlawed,outlaws,outlet,outlets,outline,outlined,outlines,outlive,outlived,outlook,outnumber,outnumbered,outpatient,outpost,outpouring,outrage,outraged,outrageous,outrageously,outrank,outright,outrun,outs,outset,outside,outsider,outsiders,outskirts,outsmarted,outspoken,outstanding,outvoted,outward,outweigh,outwit,outwitted,oval,ovarian,ovaries,ovation,oven,over,overachiever,overactive,overall,overalls,overanxious,overbearing,overbite,overblown,overboard,overcame,overcast,overcharge,overcome,overcoming,overcompensating,overcooked,overcrowded,overdid,overdo,overdone,overdose,overdosed,overdressed,overdrive,overdue,overestimate,overestimated,overexcited,overflow,overflowing,overgrown,overhaul,overhead,overhear,overheard,overhearing,overheated,overheating,overjoyed,overkill,overlap,overlapping,overload,overlook,overlooked,overlooking,overly,overnight,overnights,overpaid,overpass,overpower,overpriced,overprotective,overqualified,overrated,overreact,overreacted,overreacting,overreaction,override,overrule,overruled,overrun,overseas,oversee,overseeing,oversensitive,oversight,oversized,oversleep,overslept,overstating,overstay,overstayed,overstep,overstepping,overstress,overtaken,overthink,overtime,overtired,overtures,overturned,overview,overweight,overwhelm,overwhelmed,overwhelming,overwhelmingly,overwhelms,overworked,overwrought,ovulating,ow,owe,owed,owes,owing,owl,owls,own,owned,owner,owners,ownership,owning,owns,ox,oxen,oxford,oxygen,oxymoron,oy,oyez,oyster,oysters,ozone,pa,pac,pace,paced,pacemaker,pacer,paces,pachyderm,pacific,pacifier,pacifist,pacing,pack,package,packages,packed,packer,packet,packets,packing,packs,pact,pad,padded,padding,paddle,paddles,paddling,paddock,paddy,padlock,padre,pads,paella,pagan,page,pageant,paged,pager,pagers,pages,paging,pah,paid,pail,pain,pained,painful,painfully,painkiller,painkillers,painless,pains,paint,paintbrush,painted,painter,painters,painting,paintings,paints,pair,paired,pairs,paisley,pajama,pajamas,pal,palace,palaces,pale,paleontologist,paleontology,paler,pales,palette,palm,palmer,palms,palomino,palp,palpable,palpitations,pals,palsy,paltry,pam,pamper,pampered,pampering,pampers,pamphlet,pamphlets,pan,panache,panama,pancake,pancakes,pancreatic,panda,pandering,pandora,pane,panel,panels,panhandle,panic,panicked,panicking,panicky,panics,panned,pans,pansy,pant,pantaloons,pantheon,panther,panties,panting,pantry,pants,panty,pantyhose,pap,papa,paparazzi,papayas,paper,paperback,paperboy,papers,paperweight,paperwork,pappy,paprika,par,para,parable,parabolic,parachute,parachutes,parachuting,parade,paradigm,parading,paradise,paradox,paragon,paragraph,paragraphs,parakeet,paralegal,parallel,parallels,paralysis,paralyze,paralyzed,paralyzing,paramedic,paramedics,parameters,paramilitary,paramour,paranoia,paranoid,paranormal,paraphernalia,parasailing,parasite,parasites,parasitic,paratrooper,paratroopers,parchment,pardner,pardon,pardoned,pardons,pare,parent,parental,parenthood,parenting,parents,parfait,paris,parish,parishioner,parishioners,park,parka,parked,parker,parking,parks,parkway,parliament,parliamentary,parlor,parlors,parlour,parody,parole,paroled,parrot,parrots,parsley,parsons,part,parted,partial,partially,participant,participants,participate,participated,participating,participation,particle,particles,particular,particularly,particulars,partied,parties,parting,partisan,partly,partner,partnered,partners,partnership,partnerships,parton,partridge,parts,party,partying,pas,pass,passable,passage,passages,passageway,passageways,passed,passenger,passengers,passer,passes,passing,passion,passionate,passionately,passions,passive,passkey,passport,passports,password,past,pasta,paste,pasted,pastels,pasties,pastime,pastor,pastrami,pastry,pasts,pasture,pastures,pat,patch,patched,patches,patching,patchouli,pate,patent,patented,patently,patents,paternal,paternity,path,pathetic,pathetically,pathogen,pathological,pathologically,pathologist,pathology,pathos,paths,pathways,patience,patient,patiently,patients,patio,patois,patriarch,patriot,patriotic,patriots,patrol,patrolled,patrolling,patrolman,patrolmen,patron,patronize,patronized,patronizing,patrons,patsy,patten,patter,pattern,patterns,patties,patting,patty,pause,pauses,pave,paved,pavement,pavilion,paving,paw,pawing,pawn,pawning,pawnshop,paws,pax,pay,payable,payback,paycheck,paychecks,payday,paying,payload,payment,payments,payoff,payoffs,payroll,pays,pe,pea,peace,peaceful,peacefully,peacemaker,peacetime,peach,peaches,peachy,peak,peaked,peaks,peanut,peanuts,pear,pearl,pearls,pearly,peas,peasant,peat,pebble,pecan,pecans,peck,pecked,pecker,peckers,pecking,peckish,pecks,pecs,peculiar,pedal,pedaling,pedals,peddle,peddler,peddling,pedestal,pedestrian,pedestrians,pediatric,pediatrician,pediatrics,pedicure,pedicures,pedigree,pedophile,pedro,pee,peed,peeing,peek,peeked,peeking,peeks,peel,peeled,peeling,peels,peep,peepers,peeping,peeps,peer,peering,peerless,peers,pees,peeved,peg,pegged,peignoir,pele,pellet,pellets,pelt,pelting,pelts,pelvic,pemmican,pen,penal,penalize,penalized,penalty,penance,pence,pencil,pencils,pendant,pending,penetrate,penetrated,penetrating,penetration,penguin,penguins,penicillin,peninsula,penitentiary,pennant,penne,penned,pennies,penniless,penny,pens,pension,pensioners,pensions,pentagon,pentagram,penthouse,peon,people,peoples,pep,pepper,peppermint,pepperoni,peppers,per,perceive,perceived,percent,percentage,percentages,percentile,perception,perceptions,perceptive,perch,perchance,perched,percolating,perennial,perfect,perfecting,perfection,perfectionist,perfectly,perfecto,perform,performance,performances,performed,performer,performers,performing,performs,perfume,perfumed,perfumes,perhaps,peril,perils,perimeter,perimeters,period,periodic,periods,peripheral,periphery,periscope,perish,perishable,perished,perjure,perjured,perjury,perk,perks,perky,perm,permanent,permanently,permissible,permission,permit,permits,permitted,permitting,peroxide,perpetrate,perpetrated,perpetrator,perpetrators,perpetual,perpetuating,perry,persecute,persecuted,persecuting,persist,persistence,persistent,persists,persnickety,person,persona,personable,personal,personalities,personality,personalize,personalized,personally,personals,personified,personnel,persons,perspective,perspectives,perspiration,persuade,persuaded,persuasion,persuasive,pertaining,pertains,pertinent,perturbed,peruse,perverse,perversion,pervert,perverted,perverts,pesky,pesos,pessimist,pessimistic,pest,pester,pestering,pesticides,pestilence,pesto,pests,pet,petal,petals,peter,peters,petit,petite,petition,petitioner,petitioning,petrified,petrol,petroleum,pets,petticoat,petting,petty,pew,pewter,pfft,phantom,phantoms,pharaoh,pharaohs,pharmaceutical,pharmaceuticals,pharmacist,pharmacy,phase,phased,phases,phasing,pheasant,pheasants,phenomena,phenomenal,phenomenally,phenomenon,pheromones,phew,phi,philandering,philanthropist,philharmonic,philosopher,philosophers,philosophical,philosophies,philosophy,phobia,phobias,phobic,phoebe,phoenix,phone,phoned,phones,phoney,phonies,phony,phooey,phosphate,phosphorous,photo,photocopy,photogenic,photograph,photographed,photographer,photographers,photographic,photographing,photographs,photography,photos,phrase,phrases,physic,physical,physically,physicals,physician,physicist,physicists,physics,physiological,physiologically,physiology,physique,pi,pianist,piano,pianos,piazza,pic,pick,picked,picker,picket,picketing,picking,pickings,pickle,pickled,pickles,pickpocket,pickpockets,picks,pickup,pickups,picky,picnic,picnics,picture,pictured,pictures,picturing,piddles,piddling,pie,piece,pieced,pieces,piecing,pied,pier,pierce,pierced,piercing,pies,piffle,pig,pigeon,pigeons,piggies,piggy,piggyback,pigheaded,piglet,pigs,pigskin,pigsty,pigtails,pike,pilar,pile,piled,piles,pilgrim,pilgrimage,pilgrims,piling,pill,pillage,pillar,pillars,pillow,pillows,pills,pilot,pilots,pimp,pimped,pimping,pimple,pimples,pimply,pin,pina,pinafore,pinata,pinball,pinch,pinched,pinches,pinching,pincushion,pine,pineapple,pineapples,pinecone,pines,ping,pinhead,pinheads,pining,pink,pinkie,pinks,pinky,pinned,pinning,pinochle,pinot,pinpoint,pinpointed,pins,pint,pinto,pints,pioneer,pious,pip,pipe,pipeline,piper,pipes,piping,piqued,piranhas,pirate,pirated,pirates,pis,piss,pissant,pissed,pisses,pissing,pistachio,pistachios,pistol,pistols,piston,pistons,pit,pitch,pitched,pitcher,pitchers,pitches,pitchfork,pitching,pitfalls,pithy,pitied,pitiful,pits,pittance,pitted,pitting,pity,pitying,pivot,pivotal,pixels,pixie,pixies,pixilated,pizza,pizzas,placate,place,placebos,placed,placement,places,placid,placing,plague,plagued,plagues,plaguing,plaid,plain,plainclothes,plainly,plains,plaintiff,plaintiffs,plait,plan,plane,planes,planet,planetarium,planetary,planets,planing,plank,plankton,planned,planner,planners,planning,plans,plant,plantation,plantations,planted,planting,plants,plaque,plasma,plaster,plastered,plastic,plate,plateau,plateaued,plated,platelets,plates,platform,platforms,plating,platinum,platitudes,platonic,platoon,platter,platters,platypus,plausible,play,playa,playbook,playboy,playboys,played,player,players,playful,playground,playgrounds,playhouse,playing,playmate,playoffs,playroom,plays,plaything,playthings,playtime,playwright,plaza,plea,plead,pleaded,pleading,pleadings,pleads,pleas,pleasant,pleasantly,pleasantries,please,pleased,pleaser,pleases,pleasing,pleasure,pleasures,pleasuring,pled,pledge,pledged,pledges,pledging,plenty,plethora,pliers,plight,plop,plot,plots,plotted,plotting,plow,plowed,plowing,ploy,pluck,plucked,plucking,plucky,plug,plugged,plugging,plugs,plum,plumber,plumbers,plumbing,plummet,plummeted,plummeting,plump,plums,plunder,plunge,plunging,plural,plus,pluses,plush,plutonium,pneumonia,poach,poached,poacher,poachers,poaching,pocket,pocketbook,pocketed,pocketful,pockets,pod,podiatrist,podiatry,podium,pods,poem,poems,poet,poetic,poetry,poets,poi,poignant,point,pointe,pointed,pointer,pointers,pointing,pointless,points,pointy,poise,poised,poison,poisoned,poisoning,poisonous,poisons,poke,poked,poker,pokes,pokey,poking,polar,polarity,pole,polecat,poles,police,policeman,policemen,policies,policing,policy,polio,polish,polished,polishing,polite,politely,political,politically,politician,politicians,politics,polka,poll,pollack,pollard,polled,pollen,polling,pollock,polls,pollute,polluting,pollution,polo,poltergeist,poltergeists,poly,polyester,polygraph,polymerization,pom,pomegranate,pompoms,pompous,poncho,pond,ponder,ponds,pong,ponies,pontoon,pony,ponytail,pooch,poodle,poodles,poof,poofs,poofy,pooh,pool,pooling,pools,poolside,poop,pooped,poor,poorer,poorest,poorhouse,poorly,pop,popcorn,pope,poppa,popped,popper,poppers,poppet,poppies,popping,poppy,poppycock,pops,popular,popularity,populated,population,populations,porcelain,porch,porcupine,pore,pores,poring,pork,porky,porridge,port,portable,portal,portals,portent,porter,porterhouse,portfolio,portion,portions,portrait,portraits,portray,portrayal,portrayed,portrays,ports,pose,posed,poser,poses,posies,posing,position,positioned,positioning,positions,positive,positively,positives,posse,posses,possess,possessed,possesses,possessing,possession,possessions,possessive,possibilities,possibility,possible,possibly,possum,post,postage,postal,postcard,postcards,posted,poster,posterior,posters,posting,postman,postmark,postmaster,postmortem,postpartum,postpone,postponed,postponement,postponing,posts,posttraumatic,posture,pot,potassium,potato,potatoes,potency,potent,potential,potentially,pothead,pothole,potholes,potion,potions,pots,potsie,potted,potter,pottery,potting,potty,pouch,poultry,pounce,pound,pounder,pounding,pounds,pour,poured,pouring,pours,pout,pouting,poverty,pow,powder,powdered,powders,power,powered,powerful,powerhouse,powering,powerless,powers,pox,practical,practicality,practically,practice,practiced,practices,practicing,practise,practising,practitioner,praetorians,pragmatic,pragmatist,prairie,praise,praised,praises,praising,pralines,pram,prance,prancer,prancing,prank,pranks,prankster,prattling,pray,prayed,prayer,prayers,praying,prays,preach,preached,preacher,preachers,preaching,preachy,preamble,precaution,precautionary,precautions,preceded,precedence,precedent,precedents,preceding,precinct,precious,precipice,precise,precisely,precision,precludes,precocious,preconceived,predator,predators,predatory,predecessor,predecessors,predicament,predict,predictable,predicted,predicting,predictions,predisposed,predisposition,prednisone,preeclampsia,preemptive,preface,prefer,preferable,preferably,preference,preferences,preferred,prefers,prefix,pregnancies,pregnancy,pregnant,prehistoric,prejudice,prejudiced,prejudicial,prelim,preliminary,prelude,premarital,premature,prematurely,premed,premeditated,premier,premiere,premise,premises,premium,premiums,premonition,premonitions,prenatal,preoccupied,prep,preparation,preparations,prepare,prepared,preparing,preposterous,prepped,preppie,prepping,preppy,prerequisite,prerogative,preschool,prescribe,prescribed,prescribes,prescribing,prescription,prescriptions,presence,present,presentable,presentation,presented,presenting,presently,presents,preservation,preservatives,preserve,preserved,preserver,preserves,presets,preside,presided,presidency,president,presidential,presidents,presiding,press,pressed,presses,pressing,pressman,pressure,pressured,pressures,pressuring,prestige,prestigious,presto,presumably,presume,presumed,presuming,presumptuous,pretend,pretended,pretending,pretends,pretense,pretenses,pretentious,pretext,pretrial,prettier,prettiest,pretty,pretzel,pretzels,prevail,prevailed,prevails,prevent,preventative,prevented,preventing,prevention,preventive,prevents,preview,previous,previously,prey,preyed,preying,preys,price,priced,priceless,prices,pricey,prick,pricked,prickly,pricks,pride,prided,priest,priesthood,priests,prim,prima,primal,primaries,primarily,primary,primate,primates,prime,primed,primer,primitive,primo,primordial,primping,prince,princely,princes,princess,princesses,principal,principals,principle,principles,print,printed,printer,printers,printing,prints,prior,priorities,prioritize,prioritizing,priority,priors,prison,prisoner,prisoners,prisons,priss,prissy,privacy,private,privately,privilege,privileged,privileges,privy,prize,prized,prizes,pro,proactive,probability,probable,probably,probate,probation,probationary,probe,probes,problem,problematic,problems,procedural,procedure,procedures,proceed,proceeded,proceeding,proceedings,proceeds,process,processed,processes,processing,procession,processional,processors,proclaimed,proclivities,procrastinate,procrastinating,procrastination,procreate,proctologist,procure,procured,prod,prodded,prodding,prodigal,produce,produced,producer,producers,produces,producing,product,production,productions,productive,productivity,products,prof,profanity,profess,professed,profession,professional,professionalism,professionally,professionals,professor,professors,profile,profiles,profiling,profit,profitable,profits,profound,profoundly,profusely,progeny,prognosis,program,programme,programmed,programmer,programming,programs,progress,progressed,progresses,progressing,progression,progressive,prohibit,prohibited,prohibition,prohibits,project,projected,projectile,projecting,projection,projections,projector,projects,proliferation,prolong,prolonged,prom,promenade,prominent,promiscuous,promise,promised,promises,promising,promo,promote,promoted,promotes,promoting,promotion,promotions,prompt,prompter,prompting,promptly,proms,prone,pronounce,pronounced,pronouns,pronto,pronunciation,proof,proofed,proofing,proofs,prop,propaganda,propane,propelled,propellers,propensity,proper,properly,properties,property,prophecies,prophecy,prophesied,prophet,prophets,prophylactic,proportion,proportional,proportioned,proportions,proposal,proposals,propose,proposed,proposes,proposing,proposition,propositioning,propped,propping,proprietary,proprietor,propriety,props,propulsion,pros,prosciutto,prose,prosecute,prosecuted,prosecuting,prosecution,prosecutor,prosecutorial,prosecutors,prospect,prospective,prospector,prospects,prosper,prosperity,prostate,prosthetic,prostitute,prostitutes,prostitution,protect,protected,protecting,protection,protections,protective,protector,protectors,protects,protein,protest,protestant,protestants,protested,protesters,protesting,protestors,protests,proteus,protocol,protocols,protons,prototype,prototypes,protracted,protruding,proud,prouder,proudest,proudly,prove,proved,proven,provenance,proverb,proverbial,proves,provide,provided,providence,provider,provides,providing,provinces,proving,provision,provisional,provisions,provocation,provocations,provocative,provoke,provoked,provoking,provolone,prowess,prowl,prowler,proximity,proxy,prude,prudence,prudent,prudes,prune,prunes,pruning,pry,prying,psalm,pseudo,psi,psst,psych,psyche,psyched,psychiatric,psychiatrist,psychiatrists,psychiatry,psychic,psychically,psycho,psychoanalysis,psychoanalyze,psychobabble,psychological,psychologically,psychologist,psychologists,psychology,psychopath,psychopathic,psychopaths,psychos,psychosis,psychosomatic,psychotherapist,psychotherapy,psychotic,psychotics,pub,puberty,pubes,pubescent,pubic,public,publically,publication,publications,publicist,publicity,publicly,publish,published,publisher,publishers,publishing,puce,puck,pucker,pudding,puddle,puddles,puff,puffed,puffing,puffs,puffy,pug,puke,puking,pull,pulled,puller,pulling,pulls,pulmonary,pulp,pulpit,pulsating,pulse,pulses,pummel,pump,pumped,pumping,pumpkin,pumps,pun,punch,punched,punches,punching,punchy,punctual,punctuality,punctuation,puncture,punctured,pungent,punish,punished,punishes,punishing,punishment,punishments,punitive,punk,punks,punky,puns,punt,punters,puny,pup,pupil,pupils,puppet,puppeteer,puppets,puppies,puppy,purblind,purchase,purchased,purchases,purchasing,pure,puree,purely,purer,purest,purgatory,purge,purged,purging,purification,puritan,puritanical,puritans,purity,purple,purpose,purposefully,purposely,purposes,purr,purse,pursuant,pursue,pursued,pursuing,pursuit,pursuits,purview,pus,push,pushed,pusher,pushers,pushes,pushing,pushover,pushy,puss,pussycat,put,putrid,puts,putter,putting,putty,puzzle,puzzled,puzzles,puzzling,pygmies,pygmy,pyjamas,pyramid,pyramids,pyre,pyromaniac,pyrotechnics,quack,quacks,quad,quadrant,quahog,quaint,quake,quaker,quaking,qualifications,qualified,qualifies,qualify,qualifying,qualities,quality,qualms,quandary,quantities,quantity,quantum,quarantine,quarantined,quark,quarrel,quarreled,quarry,quart,quarter,quarterback,quarterbacks,quarters,quartet,queasy,queen,queens,quell,queller,query,quest,question,questionable,questioned,questioning,questionnaire,questions,queue,quibble,quiche,quick,quicker,quickest,quickie,quickly,quicksand,quid,quiet,quieter,quietly,quilt,quilting,quilts,quince,quinine,quintessential,quintet,quints,quintuplets,quirk,quirks,quirky,quit,quite,quits,quitter,quitters,quitting,quiver,quivering,quixote,quiz,quizmaster,quizzes,quota,quotation,quote,quoted,quotes,quoth,quoting,rabbi,rabbit,rabbits,rabble,rabid,rabies,raccoons,race,raced,racer,races,racetrack,racial,racing,racism,racist,rack,racked,racket,racketeer,racketeering,racking,racks,racquet,racquetball,racy,radar,radial,radiance,radiant,radiating,radiation,radiator,radical,radically,radio,radioactive,radioed,radiologist,radiology,radios,radish,radishes,radius,raffle,raft,rafting,rag,rage,rages,ragged,raggedy,ragging,raging,rags,ragtime,rah,raid,raided,raider,raiders,raiding,raids,rail,railing,railroad,railroading,railroads,rails,rain,rainbow,raincoat,rained,rainier,raining,rains,rainstorm,rainy,raise,raised,raiser,raisers,raises,raisin,raising,raisins,rajah,rake,raked,rallied,rally,rallying,ralph,ram,ramble,rambling,rambunctious,ramifications,rammed,ramp,rampage,ramrod,ramus,ran,ranch,rancher,ranchers,rancho,rancid,rand,random,randomly,randy,rang,range,ranger,rangers,ranges,ranging,rank,ranking,rankings,ranks,ransack,ransom,rant,ranting,rants,rap,rapid,rapidly,rapids,raping,rapist,rapists,rappers,raptor,raptors,rapture,rare,rarely,rarest,raring,rarity,rascals,rash,rashes,rashly,raspberry,rat,rate,rated,rates,rath,rather,rathole,rating,ratings,ratio,ration,rational,rationalize,rationalizing,rationally,rations,rats,ratted,ratting,rattle,rattled,rattles,rattlesnake,rattlesnakes,rattling,ratty,ravage,rave,raved,raven,ravenous,ravens,ravine,raving,ravings,ravish,ravishing,raw,rawhide,ray,rayed,rays,razor,razors,re,reach,reached,reaches,reaching,reacquaint,reacquainted,react,reacted,reacting,reaction,reactionary,reactions,reactive,reactor,reactors,reacts,read,reader,readers,readily,reading,readings,readout,reads,ready,real,realise,realised,realises,realism,realist,realistic,realistically,realities,reality,realization,realize,realized,realizes,realizing,really,realm,realms,realty,ream,reamed,reap,reaper,reapers,reappear,reappeared,reappears,rear,reared,rearing,rearrange,rearranging,rears,reason,reasonable,reasonably,reasoned,reasoning,reasons,reassemble,reassign,reassigned,reassigning,reassignment,reassurance,reassure,reassuring,reattach,rebate,rebel,rebelling,rebellion,rebellious,rebels,rebirth,reboot,reborn,rebound,rebounds,rebuild,rebuilding,rebuilt,rebuttal,rec,recall,recalled,recalling,recant,recanted,recanting,recap,recapture,recaptured,receding,receipt,receipts,receive,received,receiver,receives,receiving,recent,recently,reception,receptionist,receptive,recess,recession,recharge,recheck,rechecked,recipe,recipes,recipient,reciprocal,recital,recitals,recitation,recite,reciting,reckless,recklessly,recklessness,reckon,reckoned,reckoning,reclaim,reclaiming,recliner,reclining,reclusive,recognise,recognised,recognition,recognizable,recognizance,recognize,recognized,recognizes,recognizing,recollection,recombinant,recommend,recommendation,recommendations,recommended,recommending,recommends,recon,reconcile,reconciled,reconciliation,reconciling,reconnaissance,reconnect,reconnected,reconnecting,reconsider,reconsidered,reconstruct,reconvene,record,recorded,recorder,recorders,recording,recordings,records,recount,recourse,recover,recovered,recovering,recovers,recovery,recreate,recreated,recreating,recreation,recreational,recrimination,recriminations,recruit,recruited,recruiter,recruiting,recruits,rectal,rectangle,rectify,rectory,rectum,recuperate,recuperating,recurring,recuse,recycle,recycles,recycling,red,reddish,redecorate,redecorating,redeem,redeeming,redefine,redemption,redevelopment,redhead,redheads,redial,redid,redirect,redneck,rednecks,redo,redone,redress,reds,redskins,reduce,reduced,reduces,reducing,reduction,redundancies,redundancy,redundant,redwood,ree,reed,reef,reefer,reefs,reek,reeking,reeks,reel,reelected,reelection,reeled,reeling,reels,reenter,reevaluate,reeve,reeves,refer,referee,reference,referenced,references,referencing,referendum,referendums,referral,referrals,referred,referring,refers,refill,refills,refined,refinement,refinery,refining,reflect,reflected,reflecting,reflection,reflective,reflects,reflex,reflexes,refocus,reform,reformed,reforms,refrain,refresh,refreshed,refreshing,refreshments,refrigerated,refrigerator,refrigerators,refueling,refuge,refugee,refugees,refund,refundable,refusal,refuse,refused,refuses,refusing,refute,regain,regained,regaining,regal,regard,regarded,regarding,regardless,regards,regatta,regency,regenerate,regenerated,regeneration,regent,reggae,regime,regimen,regiment,regimental,regimes,regina,region,regional,regionals,register,registered,registering,registrar,registration,registry,regret,regrets,regrettable,regrettably,regretted,regretting,regroup,regular,regularity,regularly,regulars,regulate,regulated,regulation,regulations,regurgitate,rehab,rehabilitate,rehabilitated,rehabilitation,rehash,rehashing,rehearsal,rehearsals,rehearse,rehearsed,rehearsing,reheat,reign,reigning,reigns,reimburse,reimbursed,rein,reincarnated,reindeer,reinforced,reinforcement,reinforcements,reinstate,reinstated,reinstatement,reinstating,reinvent,reinvented,reinventing,reiterate,reject,rejected,rejecting,rejection,rejections,rejects,rejoice,rejoicing,rejuvenate,rejuvenated,rejuvenating,rekindle,rekindled,relapse,relapsing,relate,related,relates,relating,relation,relations,relationship,relationships,relative,relatively,relatives,relativity,relax,relaxants,relaxation,relaxed,relaxes,relaxing,relay,release,released,releases,releasing,relegated,relentless,relentlessly,relevance,relevant,reliability,reliable,reliance,relic,relics,relief,relies,relieve,relieved,relieving,religion,religious,religiously,relinquish,relinquishing,relish,relive,reliving,reload,relocate,relocation,reluctant,reluctantly,rely,relying,rem,remain,remainder,remained,remaining,remains,remake,remark,remarkable,remarkably,remarked,remarks,remarried,remarry,rematch,remedial,remedied,remedies,remedy,remember,remembered,remembering,remembers,remembrance,remind,reminded,reminder,reminders,reminding,reminds,reminisce,remission,remitting,remnants,remodeled,remodelling,remorse,remote,remotely,removal,remove,removed,remover,removes,removing,renaissance,renal,rename,render,rendered,renders,rendezvous,renegade,renege,reneging,renegotiate,renew,renewal,renewed,renewing,renounce,renovate,renovating,renovation,renovations,renown,renowned,rent,rental,rentals,rented,renting,rents,reopen,reopened,reopening,reorganize,reorganizing,rep,repaid,repaint,repair,repaired,repairing,repairman,repairs,reparations,repartee,repay,repayment,repeal,repeat,repeated,repeatedly,repeating,repellent,repent,repentance,repercussions,repertoire,repetition,repetitious,repetitive,rephrase,replace,replaceable,replaced,replacement,replacements,replacing,replay,replaying,replenish,replica,replicate,replicating,replied,replies,reply,report,reported,reportedly,reporter,reporters,reporting,reports,repository,repossess,represent,representation,representations,representative,representatives,represented,representing,represents,repress,repressed,repression,reprieve,reprimand,reprimanded,reprisal,reprisals,reproach,reprobate,reproduction,reproductive,reprogram,reprogramming,reps,reptile,reptiles,reptilian,republic,republican,republicans,repugnant,repulsive,reputation,repute,reputed,request,requested,requesting,requests,requiem,require,required,requirement,requirements,requires,requiring,requisite,requisition,requisitions,reread,reroute,rerouted,reruns,res,reschedule,rescheduled,rescinded,rescue,rescued,rescuer,rescuers,rescues,rescuing,research,researcher,researchers,researching,reseda,resemblance,resemble,resembles,resembling,resent,resented,resentful,resenting,resentment,resentments,resents,reservation,reservations,reserve,reserved,reserves,reserving,reset,resetting,reshoot,reshoots,residence,residences,residency,resident,residents,resides,residing,residual,residue,resign,resignation,resigned,resigning,resilient,resin,resist,resistance,resistant,resisted,resisting,resolute,resolution,resolve,resolved,resolving,resonance,resort,resorted,resorts,resource,resourceful,resources,respect,respectability,respectable,respected,respectful,respectfully,respecting,respectively,respects,respiration,respirations,respirator,respiratory,respond,responded,responding,responds,response,responses,responsibilities,responsibility,responsible,responsibly,responsive,rest,restart,restaurant,restaurants,restaurateur,rested,restful,resting,restitution,restless,restorative,restore,restored,restoring,restrain,restraining,restraint,restraints,restrict,restricted,restriction,restrictions,restroom,restrooms,restructuring,rests,result,resulted,resulting,results,resume,resumed,resumes,resurfaced,resurrection,retail,retailers,retain,retained,retainer,retaining,retake,retaliate,retaliated,retaliating,retaliatory,retard,retarded,retest,rethink,rethinking,rethought,retina,retinal,retinas,retire,retired,retirement,retiring,retort,retract,retractable,retraction,retreat,retreated,retreating,retreats,retribution,retrieval,retrieve,retrieved,retrieving,retro,retrofit,retrograde,return,returned,returning,returns,reunion,reunions,reunite,reunited,reuniting,rev,revamp,reveal,revealed,revealing,reveals,revelation,revelations,reveling,revels,revenge,revenue,revenues,revere,revered,reverence,reverend,reverently,reversal,reverse,reversed,reversible,revert,reverts,review,reviewed,reviewer,reviewing,reviews,revise,revised,revising,revisions,revisit,revival,revive,revived,reviving,revoke,revoked,revoking,revolting,revolution,revolutionaries,revolutionary,revolutionize,revolutions,revolve,revolved,revolver,revolves,revolving,revulsion,reward,rewarded,rewarding,rewards,rewind,rewrite,rewrites,rewriting,rewritten,rewrote,rex,rhetorical,rhinestone,rhinestones,rhino,rhinoceros,rhyme,rhymed,rhymes,rhythm,rhythms,rialto,rib,ribbed,ribbon,ribbons,ribs,rice,rich,richer,riches,richest,rick,rickety,rickey,ricks,rickshaw,ricochet,rid,riddance,ridden,ridding,riddle,riddled,riddler,riddles,ride,rider,riders,rides,ridge,ridicule,ridiculed,ridiculous,ridiculously,riding,rife,riff,rifle,rifles,rifling,rift,rig,rigged,rigging,right,righteous,righteousness,rightful,rightfully,rightly,righto,rights,righty,rigid,rigor,rigorous,rigs,rile,riled,riley,rim,rin,ring,ringer,ringers,ringing,rings,ringside,rink,rinse,rinsing,rioja,riot,rioting,riots,rip,ripe,ripped,ripper,ripping,ripple,ripples,rippling,rips,rise,risen,rises,rising,risk,risked,risking,risks,risky,risotto,rite,rites,ritter,ritual,ritualistic,rituals,ritz,ritzy,rival,rivalry,rivals,river,riverbank,rivers,riverside,riveted,riveting,riviera,roach,road,roadblock,roadblocks,roadhouse,roadie,roadies,roads,roadster,roadway,roam,roaming,roar,roaring,roast,roasted,roasting,roasts,rob,robbed,robber,robberies,robbers,robbery,robbin,robbing,robbins,robe,robes,robin,robins,robot,robotic,robots,robs,robust,rock,rocked,rocker,rockers,rocket,rockets,rocking,rocks,rocky,rod,rode,rodent,rodents,rodeo,rodman,rods,roger,rogers,rogue,rogues,role,roles,rolf,roll,rolled,roller,rollers,rolling,rolls,rom,roman,romance,romances,romancing,romantic,romantically,romanticize,romeo,romp,romper,romping,roof,roofer,roofs,rooftop,rooftops,rook,rookie,rookies,room,roomful,roomie,rooming,roommate,roommates,rooms,roomy,roost,rooster,roosters,root,rooted,rooting,roots,rope,roped,ropes,roscoe,rose,rosebud,rosebuds,rosebush,rosemary,roses,rosin,roster,rosy,rot,rotary,rotate,rotated,rotates,rotating,rotation,rotisserie,roto,rots,rotted,rotten,rotting,rotunda,rouge,rough,roughage,rougher,roughing,roughly,roughnecks,roughriders,roulette,round,roundabout,rounded,rounding,rounds,rouse,rousing,roust,rousted,route,routed,router,routes,routine,routinely,routines,routing,rover,roving,row,rowan,rowboat,rowdy,rowing,rows,royal,royally,royals,royalties,royalty,rub,rubbed,rubber,rubbers,rubbing,rubbish,rubble,rube,rubes,rubies,rubs,ruby,ruckus,rudder,rude,rudely,rudeness,ruder,rudimentary,rue,ruff,ruffians,ruffle,ruffled,ruffles,rug,rugby,rugged,ruin,ruined,ruining,ruins,rule,ruled,ruler,rulers,rules,ruling,rum,rumba,rumble,rumbling,rumblings,rummaging,rummy,rumor,rumored,rumors,rumour,rumours,rump,rumpled,rumpus,run,runaround,runaway,runaways,rundown,rune,runes,rung,runner,runners,running,runny,runoff,runs,runt,runway,rupture,ruptured,rural,ruse,rush,rushed,rushes,rushing,rust,rusted,rustle,rusty,rut,ruth,ruthless,ruthlessly,rutting,rya,rye,sabbath,sabbatical,sabe,saber,sabers,sabin,sable,sabotage,sabotaged,sabotaging,sac,saccharine,sack,sacks,sacrament,sacred,sacrifice,sacrificed,sacrifices,sacrificial,sacrificing,sacrilege,sad,saddened,sadder,saddest,saddle,saddled,sade,sadist,sadistic,sadly,sadness,safari,safe,safeguard,safeguards,safely,safer,safes,safest,safety,saffron,saga,sage,sagging,sahib,said,sail,sailboat,sailboats,sailed,sailing,sailor,sailors,sails,saint,saintly,saints,saith,sake,sakes,saki,sal,salaam,salad,salads,salamander,salami,salaries,salary,sale,sales,salesman,salesmen,salesperson,saleswoman,salient,salina,salinas,saline,saliva,sally,salmon,salmonella,salon,saloon,salsa,salt,salted,saltines,saltwater,salty,salutations,salute,saluted,saluting,salvage,salvaged,salvaging,salvation,samaritan,same,sample,sampled,samples,sampling,samurai,sanatorium,sanctimonious,sanction,sanctioned,sanctity,sanctuary,sanctum,sand,sandal,sandals,sandalwood,sandbag,sandbar,sandbox,sanded,sanders,sanding,sandman,sandpaper,sands,sandstorm,sandwich,sandwiches,sandy,sane,sanest,sang,sangria,sanitarium,sanitary,sanitation,sanity,sank,sans,santo,santos,sap,sapiens,sapphire,sapphires,sappy,saps,saran,sarcasm,sarcastic,sarcoidosis,sarcophagus,sardine,sardines,sarge,sark,sashimi,sassy,sat,satanic,satchel,satellite,satellites,satin,satire,satisfaction,satisfactory,satisfied,satisfies,satisfy,satisfying,saturated,saturation,satyr,sauce,saucer,saucers,sauerkraut,saul,sauna,sausage,sausages,savage,savagely,savages,savannah,save,saved,saver,saves,savin,saving,savings,savior,saviour,savor,savored,savoring,savour,savvy,saw,sawdust,sawed,sawing,saws,sawyer,sax,saxophone,say,sayer,saying,sayings,sayonara,says,scab,scabby,scabs,scag,scald,scalding,scale,scaled,scales,scallions,scallop,scallops,scalp,scalped,scalpel,scalper,scalping,scam,scammed,scamming,scamp,scampered,scampi,scams,scan,scandal,scanned,scanner,scanners,scanning,scans,scant,scapegoat,scar,scarce,scarcely,scare,scarecrow,scared,scares,scarf,scarfing,scarier,scariest,scaring,scarlet,scarred,scars,scarves,scary,scat,scatter,scattered,scattering,scavenger,scavenging,scenario,scenarios,scene,scenery,scenes,scenic,scent,scented,scents,scepter,schedule,scheduled,schedules,scheduling,schematics,scheme,schemed,schemes,scheming,schiller,schizo,schizoid,schizophrenia,schizophrenic,schlep,schmo,schmooze,schmoozing,schmuck,schnapps,schnauzer,schnitzel,schnoz,scholar,scholarly,scholarship,scholarships,school,schoolboy,schooled,schoolgirl,schooling,schools,schoolteacher,schoolwork,schooner,science,sciences,scientific,scientist,scientists,scintillating,scissor,scissors,scoff,scolded,scolding,scoliosis,scone,scones,scoop,scooped,scooping,scoops,scoot,scooter,scope,scopes,scoping,scorch,scorched,scorcher,scorching,score,scoreboard,scored,scores,scoring,scorned,scorpion,scorpions,scot,scotch,scotches,scotia,scots,scottie,scoundrel,scoundrels,scoured,scourge,scout,scouted,scouting,scouts,scow,scrabble,scram,scramble,scrambled,scrambler,scrambling,scrap,scrapbook,scrape,scraped,scrapes,scraping,scrapings,scrapped,scrappy,scraps,scratch,scratched,scratches,scratching,scratchy,scrawny,scream,screamed,screamer,screaming,screams,screech,screeching,screen,screened,screening,screens,screw,screwball,screwdriver,screwed,screwing,screws,screwup,screwups,screwy,scribble,scribbled,scribbling,script,scripted,scripts,scripture,scriptures,scroll,scrolls,scrooge,scrotum,scrounge,scrounging,scrub,scrubbed,scrubbing,scrubs,scrunch,scruples,scrutinized,scrutiny,scry,scuba,scud,scuff,scuffle,scullery,sculptor,sculpture,sculptures,scum,scummy,scurry,scurrying,scurvy,scuttled,scuzzy,sea,seaboard,seafood,seagull,seal,sealed,sealing,seals,seam,seaman,seamen,seamless,seams,seamstress,seaplane,sear,search,searched,searches,searching,seared,sears,seas,seascape,seashell,seashells,season,seasonal,seasoned,seasons,seat,seated,seating,seats,seaweed,sec,secluded,seclusion,second,secondary,seconded,secondly,seconds,secrecy,secret,secretarial,secretaries,secretary,secretive,secretly,secrets,section,sections,sector,sectors,secure,secured,securely,securing,securities,security,sedan,sedate,sedated,sedation,sedative,sedatives,seduce,seduced,seduces,seducing,seduction,seductive,see,seed,seeds,seedy,seeing,seek,seeker,seekers,seeking,seeks,seem,seemed,seemingly,seems,seen,seep,seeping,seer,sees,seesaw,seg,segment,segments,segue,seismic,seize,seized,seizes,seizing,seizure,seizures,seldom,select,selected,selecting,selection,selections,selective,selectman,self,selfish,selfishly,selfishness,selfless,selflessness,sell,seller,selling,sells,seltzer,selves,semantics,semblance,semen,semester,semi,semiautomatic,seminal,seminar,seminars,seminary,semple,sen,senate,senator,senators,send,sender,sending,sendoff,sends,senile,senility,senior,seniors,senor,senora,senorita,sensation,sensational,sense,sensed,senseless,senses,sensibilities,sensibility,sensible,sensing,sensitive,sensitivity,sensor,sensors,sensory,sensual,sensuous,sent,sentence,sentenced,sentences,sentencing,sentient,sentiment,sentimental,sentiments,sentinel,sentinels,sentry,separate,separated,separately,separates,separating,separation,seppuku,septic,septum,sequel,sequence,sequences,sequencing,sequestered,sequined,sequins,ser,sera,serenade,serene,serenity,serge,sergeant,serial,series,serious,seriously,seriousness,sermon,sermons,serotonin,serpent,serum,servant,servants,serve,served,server,servers,serves,service,serviced,services,servicing,serving,servings,servitude,sesame,session,sessions,sesterces,set,setback,setbacks,sets,setting,settings,settle,settled,settlement,settlements,settles,settling,setup,seven,sevens,seventeen,seventeenth,seventh,seventies,seventy,sever,several,severance,severe,severed,severely,severity,sew,sewage,sewed,sewer,sewers,sewing,sewn,sex,sexes,sexier,sexiest,sexism,sexist,sexless,sexual,sexuality,sexually,sexy,sh,sha,shabbily,shabby,shack,shackle,shackled,shackles,shad,shade,shades,shading,shadow,shadowing,shadows,shadowy,shady,shaft,shafted,shag,shagged,shagging,shaggy,shah,shake,shaken,shaker,shakers,shakes,shaking,shaky,shale,shall,shallow,shallows,shalom,shalt,sham,shaman,shambles,shame,shamed,shameful,shameless,shaming,shampoo,shamrock,shanghai,shanghaied,shank,shanks,shape,shaped,shapely,shapes,shaping,shards,share,shared,shareholder,shares,sharing,shark,sharking,sharks,sharp,sharpened,sharpener,sharpening,sharpens,sharper,sharpest,sharply,sharpshooters,shat,shatter,shattered,shattering,shave,shaved,shaven,shaver,shaves,shaving,shaw,shawl,shawn,shay,she,shea,shear,shebang,shed,shedding,sheds,sheen,sheeny,sheep,sheepskin,sheer,sheet,sheets,sheila,sheldrake,shelf,shell,shelled,shellfish,shelling,shells,shelly,shelter,sheltered,shelters,shelve,shelves,shenanigans,shepherd,shepherds,sheriff,sherlock,sherry,shes,shh,shield,shielded,shields,shift,shifted,shifter,shifting,shifts,shifty,shill,shillings,shimmer,shimmering,shimmy,shin,shindig,shine,shines,shingle,shingles,shining,shins,shiny,ship,shipment,shipments,shipped,shipping,ships,shipshape,shipwreck,shipwrecked,shipyard,shirking,shirt,shirtless,shirts,shiv,shiva,shiver,shivering,shivers,shoal,shock,shocked,shocker,shocking,shockingly,shocks,shoddy,shoe,shoehorn,shoelace,shoelaces,shoemaker,shoes,shone,shoo,shook,shoot,shooter,shooters,shooting,shootings,shootout,shoots,shop,shopkeeper,shoplifter,shoplifters,shoplifting,shopped,shopper,shopping,shops,shore,shoreline,shores,short,shortage,shortcake,shortcomings,shortcut,shorted,shorten,shorter,shortest,shorthand,shorthanded,shortly,shortness,shorts,shortsighted,shortstop,shorty,shot,shotgun,shotguns,shots,should,shoulder,shoulders,shout,shouted,shouting,shouts,shove,shoved,shovel,shoveled,shoveling,shovels,shoves,shoving,show,showbiz,showcase,showdown,showed,shower,showered,showering,showers,showgirl,showing,shown,showoff,showroom,shows,showstopper,showy,shrapnel,shred,shredded,shredder,shreds,shrew,shrewd,shriek,shrieking,shrimp,shrine,shrink,shrinkage,shrinking,shrinks,shrivel,shriveled,shroud,shrouds,shrub,shrubbery,shrubs,shrug,shrugged,shrugging,shrugs,shrunk,shrunken,shtick,shuck,shucks,shuffle,shuffled,shuffling,shun,shunned,shunt,shush,shushing,shut,shutdown,shuts,shutters,shutting,shuttle,shuttles,shy,shylock,shyness,si,sibling,siblings,sic,sicced,sick,sicken,sickened,sickening,sickens,sicker,sickest,sickly,sickness,sicko,sickos,side,sidebar,sideboard,sideburns,sidecar,sided,sidekick,sidekicks,sideline,sidelines,sides,sideshow,sidetracked,sidewalk,sidewalks,sideways,sidewinder,siding,sidle,siege,sierra,siesta,sift,sifting,sigh,sighing,sighs,sight,sighted,sighting,sightings,sightless,sights,sigma,sign,signal,signaled,signaling,signals,signature,signatures,signed,significance,significant,significantly,signifies,signify,signifying,signing,signor,signora,signore,signs,sikes,silence,silenced,silences,silent,silicone,silk,silken,silks,silky,sill,silliest,silliness,silly,silva,silver,silverware,silvery,sim,similar,similarities,similarity,similarly,simmer,simony,simp,simple,simpler,simplest,simpleton,simpletons,simplicity,simplify,simplistic,simply,sims,simulate,simulated,simulates,simulation,simulations,simulator,simultaneously,sin,since,sincere,sincerely,sincerest,sincerity,sine,sinful,sing,singe,singed,singer,singers,singing,single,singles,singleton,singling,sings,sinister,sink,sinker,sinking,sinks,sinned,sinner,sinners,sins,sinus,sinuses,sip,siphoning,sipped,sipping,sir,sire,siree,siren,sirens,sirloin,sirree,sirs,sis,sissies,sissy,sister,sisterhood,sisters,sit,sitcom,sitcoms,site,sites,sits,sitter,sitters,sitting,situated,situation,situations,six,sixes,sixpence,sixteen,sixteenth,sixth,sixties,sixty,sizable,size,sizeable,sized,sizes,sizing,sizzle,sizzling,skag,skate,skateboard,skateboards,skated,skater,skaters,skates,skating,skedaddle,skeeters,skeletal,skeleton,skeletons,skeptic,skeptical,skepticism,sketch,sketches,sketching,sketchy,skewed,skewer,skewered,ski,skid,skidded,skids,skied,skier,skies,skiff,skiing,skill,skilled,skillet,skillful,skills,skim,skimmed,skimming,skimp,skimpy,skin,skinned,skinner,skinny,skins,skip,skipped,skipper,skipping,skirmish,skirmishes,skirt,skirts,skis,skit,skittish,skittles,skivvies,skulk,skull,skulls,skunk,sky,skylight,skyrocket,skyscraper,skyscrapers,slack,slacker,slackers,slacks,slam,slammed,slammer,slamming,slams,slander,slanderous,slang,slant,slanted,slap,slapped,slapping,slaps,slash,slashed,slasher,slashing,slate,slated,slater,slaughter,slaughtered,slaughterhouse,slaughtering,slave,slaved,slavery,slaves,slaw,slay,slayed,slayer,slayers,slaying,sleaze,sleazebag,sleazeball,sleazy,sled,sledding,sledge,sledgehammer,sleek,sleep,sleeper,sleepers,sleeping,sleepless,sleepover,sleepovers,sleeps,sleepwalk,sleepwalking,sleepy,sleepyhead,sleet,sleeve,sleeves,sleigh,sleight,slender,slept,sleuth,slew,slice,sliced,slicer,slices,slick,slicker,slid,slide,slider,slides,sliding,slight,slighted,slightest,slightly,slim,slime,slimmer,slimming,slimy,sling,slinging,slings,slingshot,slink,slinking,slinky,slip,slipped,slipper,slippers,slippery,slipping,slips,slit,slither,slithered,slithering,sliver,slob,slobbering,slogan,slogans,slop,slope,slopes,sloppy,sloshed,slot,slots,slouch,slough,slow,slowed,slower,slowest,slowing,slowly,sludge,slug,slugged,slugger,slugging,sluggish,slugs,slum,slumber,slumlord,slumming,slumped,slung,slur,slurp,slurping,slush,sly,smack,smacked,smackers,smacks,small,smaller,smallest,smart,smartass,smarter,smartest,smarts,smarty,smash,smashed,smashes,smashing,smear,smeared,smearing,smears,smell,smelled,smelling,smells,smelly,smelt,smidgen,smile,smiled,smiles,smiley,smiling,smirk,smirking,smite,smith,smithereens,smithers,smitten,smock,smog,smoke,smoked,smoker,smokers,smokes,smokey,smoking,smoky,smoldering,smooch,smooching,smoochy,smooth,smoother,smoothest,smoothie,smoothly,smorgasbord,smother,smothered,smothering,smudge,smudged,smug,smuggle,smuggled,smuggler,smugglers,smuggling,smugness,snack,snacking,snacks,snag,snagged,snagging,snags,snails,snake,snakebite,snakes,snakeskin,snap,snapped,snapper,snapping,snappy,snaps,snapshot,snapshots,snarky,snarl,snarling,snatch,snatched,snatcher,snatchers,snatches,snatching,snazzy,sneak,sneaked,sneaker,sneakers,sneaking,sneaks,sneaky,sneer,sneeze,sneezed,sneezing,snicker,snickering,snickers,snide,sniff,sniffed,sniffing,sniffles,sniffling,snifter,snip,snipe,sniper,snipers,sniping,snit,snitch,snitches,snivelling,snob,snobby,snook,snoop,snooping,snoopy,snooty,snooze,snore,snores,snoring,snorkel,snorkeling,snort,snorted,snorting,snot,snotty,snout,snow,snowball,snowballed,snowballing,snowballs,snowbank,snowboard,snowed,snowflake,snowflakes,snowing,snowman,snowmen,snowmobile,snowmobiles,snowstorm,snowy,snub,snubbed,snuck,snuff,snug,snuggle,snuggled,snuggles,snuggling,so,soak,soaked,soaking,soap,soapbox,soaps,soapy,soar,soared,soaring,soars,sob,sobbing,sober,sobered,sobering,soberly,sobriety,sobs,soccer,sociable,social,socialism,socialite,socialize,socialized,socializing,socially,society,sociology,sociopath,sociopathic,sock,socked,socket,sockets,socks,sod,soda,sodas,sodding,sodium,sodomy,sofa,sofas,soft,softball,soften,softener,softening,softer,softest,softly,softness,software,softy,soggy,soil,soiled,soiree,sol,solace,solar,solarium,sold,soldier,soldiers,sole,solely,solemn,solenoid,soles,solicit,solicitation,solicited,soliciting,solicitor,solid,solidify,solitaire,solitary,solitude,solo,solution,solutions,solve,solved,solvent,solves,solving,somber,some,somebody,someday,somehow,someone,someplace,somerset,something,sometime,sometimes,someway,somewhat,somewhere,somewheres,son,sonar,sonata,song,songs,songwriter,sonics,sonnet,sonnets,sonny,sonogram,sons,sook,soon,sooner,soonest,soot,soothe,soothes,soothing,soothsayer,sop,sophisticated,sophistication,sophomore,soprano,sopranos,sorbet,sorcerers,sorcery,sordid,sore,sorel,sores,sororities,sorority,sorrel,sorrow,sorrows,sorry,sort,sorted,sorting,sorts,sos,sot,souffle,sought,soul,soulful,soulless,souls,sound,sounded,sounding,soundly,soundproof,sounds,soundstage,soup,soups,soupy,sour,source,sources,sourpuss,sous,souse,south,southbound,southeast,southern,southwest,souvenir,souvenirs,souvlaki,sovereign,sovereignty,soviet,soviets,sow,sowing,sown,sox,soy,soybean,spa,space,spacecraft,spaced,spaces,spaceship,spacey,spacing,spackle,spade,spades,spaghetti,span,spandex,spaniel,spank,spanked,spanking,spans,spar,spare,spared,spareribs,sparing,spark,sparked,sparkle,sparklers,sparkles,sparkling,sparkly,sparks,sparky,sparring,sparrow,spartan,spas,spasm,spasms,spastic,spat,spate,spatial,spatula,spawn,spawned,spaz,speak,speakeasy,speaker,speakerphone,speakers,speaking,speaks,spear,spears,special,specialise,specialist,specialists,specialize,specialized,specializes,specializing,specially,specials,specialties,specialty,species,specific,specifically,specifications,specifics,specified,specify,specimen,specimens,speck,specs,spectacle,spectacles,spectacular,spectacularly,spectator,spectators,specter,spectra,spectral,spectre,spectrum,speculate,speculating,speculation,speculations,speculative,speech,speeches,speechless,speed,speedboat,speedily,speeding,speedo,speedometer,speedos,speeds,speedway,speedy,spell,spelled,speller,spelling,spells,spelt,spence,spencer,spencers,spend,spender,spending,spends,spent,sperm,spew,spewing,sphere,sphinx,spice,spices,spicy,spider,spiders,spied,spiel,spies,spike,spiked,spikes,spikey,spiking,spiky,spill,spilled,spilling,spills,spin,spinach,spinal,spindly,spine,spineless,spinner,spinning,spins,spinster,spiny,spiral,spiraling,spirals,spirit,spirited,spirits,spiritual,spirituality,spit,spite,spiteful,spitfire,spits,spitting,spitz,splash,splashing,splashy,splat,splatter,spleen,splendid,splendidly,splendor,spliced,splicing,splint,splinter,splinters,split,splits,splitting,splurge,spoil,spoiled,spoiler,spoiling,spoils,spoilsport,spoke,spoken,spokes,spokesman,spokesperson,sponge,sponges,sponsor,sponsored,sponsoring,sponsors,sponsorship,spontaneity,spontaneous,spontaneously,spook,spooked,spooking,spooks,spooky,spool,spoon,spoonful,spooning,spoons,spores,sport,sporting,sports,sportsmanship,sportswear,sporty,spot,spotless,spotlight,spotlights,spots,spotted,spotter,spotters,spotting,spotty,spouse,spouting,sprain,sprained,sprang,sprawled,spray,sprayed,spraying,sprays,spread,spreading,spreads,spreadsheet,spreadsheets,spree,sprightly,spring,springer,springing,springs,springtime,sprinkle,sprinkled,sprinkler,sprinklers,sprinkles,sprint,sprints,sprite,spritzer,sprouted,sprouting,sprouts,spruce,sprung,spry,spud,spun,spunk,spunky,spur,spurred,spurs,spurt,sputnik,spy,spying,squabble,squad,squadron,squads,squall,squalor,squander,squandered,square,squared,squarely,squares,squaring,squash,squashed,squashing,squat,squatter,squatters,squatting,squaw,squawk,squawking,squeak,squeaking,squeaks,squeaky,squeal,squealed,squeegee,squeeze,squeezed,squeezes,squeezing,squid,squiggle,squiggly,squinting,squire,squirm,squirrel,squirrels,squirt,squirts,squish,squished,squishing,sri,stab,stabbed,stabbing,stability,stabilize,stabilized,stabilizing,stable,stables,stack,stacked,stacks,stadium,staff,staffed,staffer,staffers,stag,stage,staged,stages,stagger,staggered,staggering,staggeringly,staging,stagnant,stain,stained,stainless,stains,stair,staircase,stairs,stairway,stairwell,stake,staked,stakeout,stakeouts,stakes,staking,stale,stalemate,stalk,stalked,stalker,stalkers,stalking,stalks,stall,stalled,stalling,stallion,stamina,stammering,stamp,stamped,stampede,stamper,stamps,stance,stand,standard,standardized,standards,standby,standing,standish,standoff,standpoint,stands,standstill,standup,stang,stanza,staple,stapled,stapler,star,starboard,stardom,stardust,stare,stared,stares,staring,stark,starlet,starlets,starling,starred,starring,starry,stars,starship,start,started,starter,starters,starting,startle,startled,startling,starts,startup,starvation,starve,starved,starving,stash,stashed,stasis,stat,state,stated,statehood,stately,statement,statements,stateroom,states,stateside,statesmen,statewide,static,stating,station,stationary,stationed,stationery,stations,statistic,statistical,statistically,statistics,stats,statuary,statue,statues,stature,status,statute,statutes,staunch,stave,stay,stayed,staying,stays,stead,steadfast,steadily,steady,steak,steaks,steal,stealer,stealing,steals,stealth,stealthy,steam,steamed,steamer,steaming,steamroll,steamroller,steamy,steed,steel,steely,steep,steer,steerage,steered,steering,stein,stella,stellar,stem,stemmed,stems,stench,steno,stenographer,step,stepfather,stepmother,steppe,stepped,stepping,steps,stepson,stereo,stereotype,stereotypes,sterile,sterilize,sterilized,sterling,stern,sterner,steroid,steroids,stethoscope,stew,stewardess,stewardesses,stewards,stewed,stick,sticker,stickers,sticking,stickler,sticks,stickup,sticky,stiff,stiffer,stifle,stifler,stifling,stigma,stigmata,stiles,stiletto,stilettos,still,stillness,stills,stilts,stimulated,stimulating,stimulation,stimuli,stimulus,sting,stinger,stinging,stingray,stings,stingy,stink,stinking,stinks,stinky,stint,stipulate,stipulated,stipulates,stipulation,stir,stirred,stirring,stirs,stitch,stitched,stitches,stitching,stock,stockbroker,stockbrokers,stocked,stockholder,stockholders,stocking,stockings,stockpile,stocks,stocky,stodgy,stoic,stoke,stoked,stokes,stole,stolen,stomach,stomachache,stomachs,stomp,stomped,stomper,stomping,stone,stoned,stoner,stones,stonewalled,stoney,stony,stood,stooge,stool,stoolie,stools,stoop,stooped,stooping,stop,stopped,stopping,stops,stopwatch,storage,store,stored,storeroom,stores,stories,storing,stork,storm,stormed,storming,storms,stormy,story,storybook,stove,stow,stowaway,stowaways,stowed,straddle,straddling,straight,straightaway,straighten,straightened,straightening,straightforward,straights,strain,strained,straining,strains,strait,straitjacket,straits,strand,stranded,strange,strangely,strangeness,stranger,strangers,strangest,strangle,strangled,stranglehold,strangler,strangling,strangulation,strap,strapless,strapped,strapping,straps,strategic,strategically,strategies,strategist,strategize,strategizing,strategy,straw,strawberries,strawberry,straws,stray,streak,streaks,stream,streaming,streamlined,streams,street,streetcar,streetlights,streets,streetwalker,strength,strengthen,strengths,strenuous,strenuously,strep,stress,stressed,stresses,stressful,stressing,stretch,stretched,stretcher,stretches,stretching,strewn,stricken,strict,strictly,stride,strides,strife,strike,strikes,striking,strikingly,string,stringing,strings,strip,stripe,striped,stripes,striping,stripped,stripper,strippers,stripping,strips,striptease,strive,striving,strobe,stroke,strokes,stroll,stroller,strolling,strolls,strong,strongbox,stronger,strongest,stronghold,strongly,struck,structural,structure,structured,struggle,struggled,struggles,struggling,strummer,strumpet,strung,strut,struts,strutting,strychnine,stub,stubbed,stubble,stubborn,stubs,stuck,stud,studded,student,students,studied,studies,studio,studios,studious,studly,studs,study,studying,stuff,stuffed,stuffing,stuffs,stuffy,stumble,stumbled,stumbles,stumbling,stump,stumped,stumper,stumps,stun,stung,stunk,stunned,stunning,stunningly,stunt,stunted,stuntman,stunts,stupendous,stupid,stupider,stupidest,stupidity,stupidly,stupor,sturdy,sturgeon,stutter,stuttering,sty,style,styled,styles,stylings,stylish,stylist,stymied,suave,sub,subatomic,subbasement,subbing,subcommittee,subconscious,subconsciously,subdued,subdural,subject,subjected,subjecting,subjective,subjects,subjugation,sublet,subletting,sublevel,sublimating,sublime,submarine,submarines,submersible,submissive,submit,submitted,submitting,subordinate,subpoena,subpoenaed,subpoenas,subscription,subscriptions,subsequently,subservient,subsided,subsidiary,subsidies,subsidize,subsidy,substance,substances,substantial,substantially,substantiate,substantive,substation,substitute,substituting,subterfuge,subterranean,subtext,subtitle,subtitled,subtitles,subtle,subtlety,subtly,suburb,suburban,suburbia,suburbs,subversive,subvert,subway,subways,subzero,succeed,succeeded,succeeds,success,successes,successful,successfully,succession,succinct,succotash,succubus,succulent,succumb,succumbed,such,suck,sucked,sucker,suckered,suckers,sucking,sucks,suction,sudden,suddenly,suds,sue,sued,suede,sues,suffer,suffered,suffering,suffers,suffice,sufficient,sufficiently,suffocate,suffocated,suffocating,suffocation,sugar,sugarcoat,sugarplum,sugars,sugary,suggest,suggested,suggesting,suggestion,suggestions,suggestive,suggests,suicidal,suicide,suicides,suing,suit,suitable,suitcase,suitcases,suite,suited,suites,suitor,suitors,suits,sulfur,sulk,sulking,sullen,sultan,sultry,sum,summarily,summarize,summary,summation,summed,summer,summers,summertime,summit,summon,summoned,summoning,summons,sumo,sump,sumptuous,sums,sun,sunbathing,sunblock,sunburn,sundae,sundaes,sundown,sundress,sunflower,sunflowers,sung,sunglasses,sunk,sunken,sunless,sunlight,sunning,sunny,sunrise,sunroom,sunscreen,sunset,sunsets,sunshine,sunspots,sunstroke,suntan,sup,super,superb,supercollider,superego,superficial,superfluous,superhero,superheroes,superintendent,superior,superiors,superman,supermarket,supermarkets,supermodel,supernatural,supernova,superpower,superpowers,superstar,superstars,superstition,superstitious,supervise,supervised,supervising,supervision,supervisor,superwoman,supper,suppertime,supple,supplement,supplements,supplied,supplier,supplies,supply,support,supported,supporter,supporting,supportive,supports,suppose,supposed,supposedly,supposition,suppress,suppressed,suppression,supremacy,supreme,supremely,sure,surely,surf,surface,surfaced,surfaces,surfboard,surfed,surfer,surfers,surfing,surge,surgeon,surgeons,surgeries,surgery,surgical,surgically,surging,surly,surname,surpass,surpassed,surplus,surprise,surprised,surprises,surprising,surprisingly,surreal,surrender,surrendered,surrendering,surrogate,surround,surrounded,surrounding,surroundings,surrounds,surveillance,survey,survival,survive,survived,survives,surviving,survivor,survivors,susceptible,sushi,suspect,suspected,suspects,suspend,suspended,suspenders,suspending,suspense,suspension,suspicion,suspicions,suspicious,suss,sustain,sustained,sustaining,sustenance,sutra,sutures,swab,swabs,swaddling,swallow,swallowed,swallowing,swallows,swam,swami,swamp,swamped,swamps,swan,swank,swanky,swans,swap,swapped,swapping,swarm,swarming,swastika,swat,swatch,swatches,swatting,sway,swayed,swaying,swear,swearing,swears,sweat,sweater,sweaters,sweating,sweatpants,sweats,sweatshirt,sweatshirts,sweatshop,sweatshops,sweaty,swede,swedes,sweep,sweeper,sweeping,sweeps,sweepstakes,sweet,sweetbreads,sweeter,sweetest,sweetheart,sweethearts,sweetie,sweetly,sweetness,sweets,swell,swelled,swelling,swells,sweltering,swept,swerve,swerving,swift,swig,swill,swilling,swim,swimmer,swimmers,swimming,swimmingly,swims,swimsuit,swimsuits,swindled,swine,swing,swingers,swinging,swings,swipe,swiped,swiping,swirl,swirling,swirly,swiss,switch,switchblade,switched,switcheroo,switches,switching,switchman,swivel,swizzle,swollen,swooning,swoop,swooped,swooping,swoops,sword,swordfish,swords,swore,sworn,swung,sycamore,sycophant,sykes,syllable,syllabus,symbiote,symbiotic,symbol,symbolic,symbolically,symbolism,symbolize,symbolizes,symbols,symmetrical,sympathetic,sympathies,sympathize,sympathy,symphony,symptom,symptomatic,symptoms,synagogue,synapses,synaptic,sync,synch,synchronicity,synchronize,synchronized,syndicate,syndicated,syndrome,syne,synergy,synonymous,synthesis,synthesize,synthetic,syphilis,syphon,syringe,syrup,system,systematic,systematically,systemic,systems,systolic,ta,tab,tabby,table,tableau,tablecloth,tablecloths,tables,tablespoon,tablet,tabloid,tabloids,tabs,tach,tachycardia,tacit,tack,tacked,tackle,tackled,tackles,tackling,tacks,tacky,taco,tacos,tact,tactful,tactic,tactical,tactics,tactile,tad,tadpole,tae,taffeta,taffy,tag,tagged,tagging,tags,tail,tailed,tailing,tailor,tailored,tailors,tails,tailspin,taint,tainted,taj,take,takedown,taken,takeoff,takeout,takeover,taker,takers,takes,takin,taking,talcum,tale,talent,talented,talentless,talents,tales,talisman,talk,talkative,talked,talker,talkie,talking,talks,talky,tall,taller,tallest,tallied,tally,talons,tamales,tambourine,tame,tammy,tamper,tampered,tampering,tampon,tampons,tan,tandem,tangent,tangerine,tangible,tangle,tangled,tangling,tango,tank,tanked,tanker,tankers,tanking,tanks,tanned,tanner,tanning,tantric,tantrum,tantrums,tap,tapas,tape,taped,tapered,tapes,tapeworm,taping,tapioca,tapped,tapping,tar,tarantulas,tardiness,tardy,target,targeted,targeting,targets,tarmac,tarnish,tarnished,tarot,tarp,tarragon,tarred,tart,tartar,tarts,tarzan,tas,task,tasks,tassels,taste,tasted,tasteful,tastes,tasting,tasty,tat,tate,tater,tattered,tattle,tattoo,tattooed,tattoos,tau,taught,taunt,taunted,taunting,taunts,taut,tavern,tawdry,tax,taxed,taxes,taxi,taxicab,taxidermist,taxing,taxis,taxpayer,taxpayers,tea,teach,teacher,teachers,teaches,teaching,teacup,teal,team,teamed,teaming,teammate,teammates,teams,teamsters,teamwork,teapot,tear,tearful,tearing,tears,teary,teas,tease,teased,teasing,teaspoon,teaspoons,technical,technicalities,technicality,technically,technician,technicians,technique,techniques,technological,technologically,technologies,technology,ted,teddy,tedious,tee,teed,teeming,teen,teenage,teenaged,teenager,teenagers,teens,teensy,teeny,teenybopper,teetering,teeth,telegram,telegrams,telegraph,telekinesis,telekinetic,telemarketing,telemetry,telepathic,telepathically,telepathy,telephone,telephones,teleport,teleportation,telescope,telescopic,telethon,televised,television,telex,tell,teller,tellers,telling,tells,telly,temp,temper,temperament,temperamental,temperature,temperatures,tempered,tempers,tempest,temple,tempo,temporarily,temporary,temps,tempt,temptation,temptations,tempted,tempting,temptress,ten,tenacious,tenacity,tenant,tenants,tend,tended,tendencies,tendency,tender,tenderness,tending,tendon,tendonitis,tendons,tends,tenement,tenets,tenner,tennis,tenor,tenors,tens,tense,tension,tensions,tent,tentative,tenth,tenths,tents,tenuous,tenure,tepid,tequila,teriyaki,term,terminal,terminate,terminated,terminating,termination,terminator,terminology,terminus,termites,terms,terra,terrace,terraforming,terrain,terrestrial,terrible,terribly,terrific,terrifically,terrified,terrifies,terrify,terrifying,territorial,territories,territory,terror,terrorism,terrorist,terrorists,terrorize,terrorized,terrorizing,terrors,terry,test,testament,tested,testified,testifies,testify,testifying,testimonies,testimony,testing,testosterone,tests,testy,tet,tetanus,tether,tetherball,texas,text,textbook,textbooks,textile,textiles,texts,texture,than,thank,thanked,thankful,thankfully,thanking,thankless,thanks,thanksgiving,thanksgivings,that,thataway,thatcher,thaw,thawed,thawing,the,theater,theaters,theatre,theatres,theatrical,theatrics,thee,theft,thefts,their,theirs,them,theme,themed,themes,themselves,then,theologian,theological,theology,theorem,theoretical,theoretically,theories,theory,therapeutic,therapist,therapists,therapy,there,thereafter,thereby,therefore,therein,thereof,theres,thermal,thermometer,thermonuclear,thermos,thermostat,thesaurus,these,theses,thesis,thespian,theta,they,thick,thickening,thickens,thicker,thief,thieves,thigh,thighs,thin,thine,thing,things,think,thinkers,thinking,thinks,thinly,thinner,thinners,thinning,thins,third,thirds,thirst,thirsty,thirteen,thirteenth,thirties,thirtieth,thirty,this,thistle,tho,thong,thongs,thoracic,thoracotomy,thorn,thorns,thorough,thoroughly,thorpe,those,thou,though,thought,thoughtful,thoughtfully,thoughtless,thoughts,thousand,thousands,thrall,thrash,thrashing,thread,threads,thready,threat,threaten,threatened,threatening,threatens,threats,three,threesome,threshold,threw,thrice,thrift,thrill,thrilled,thriller,thrilling,thrills,thrive,thrives,thriving,throat,throats,throbbing,throne,thrones,throttle,through,throughout,throughway,throw,thrower,throwers,throwing,thrown,throws,thru,thrust,thrusters,thrusts,thruway,thug,thugs,thumb,thumbing,thumbprint,thumbs,thump,thumping,thunder,thunderbird,thunderbolt,thundering,thunderstorm,thus,thusly,thy,thyme,thyroid,thyself,ti,tiara,tibia,tic,tick,ticked,ticker,ticket,ticketed,tickets,ticking,tickle,tickles,tickling,ticklish,ticks,tidal,tidbits,tide,tides,tidings,tidy,tidying,tie,tied,tier,ties,tiff,tiffany,tiger,tigers,tight,tighten,tightened,tightening,tightens,tighter,tightly,tightness,tightrope,tights,tiki,til,tile,tiles,till,tiller,tilt,tilted,timber,timbers,timbre,time,timed,timeless,timeline,timely,timer,timers,times,timetable,timid,timing,timothy,timpani,tin,tinfoil,ting,tingle,tingling,tingly,tiniest,tinkered,tinkering,tinkle,tins,tinsel,tinted,tiny,tip,tipped,tipper,tippers,tipping,tippy,tips,tipsy,tiptoe,tiptoeing,tiramisu,tire,tired,tireless,tires,tiresome,tiring,tis,tissue,tissues,titan,titanic,titanium,titans,title,titles,tizzy,to,toad,toast,toasted,toaster,toasting,toasts,toasty,tobacco,toby,tod,today,toddle,toddler,toddlers,toddy,toe,toed,toenail,toenails,toes,toffee,tofu,toga,together,togetherness,toilet,toilets,toiling,toke,token,tokens,told,tolerable,tolerance,tolerant,tolerate,tolerated,toll,toller,tolls,tom,tomahawk,tomato,tomatoes,tomb,tombs,tombstone,tomcat,tome,tomfoolery,tommy,tomorrow,tomorrows,toms,ton,tone,toned,toner,tones,tong,tongs,tongue,tongues,tonic,tonics,tonight,tonnage,tons,tonsil,tonsils,tony,too,took,tool,toolbox,tools,toon,toons,toot,tooth,toothache,toothbrush,toothbrushes,toothed,toothpaste,toothpick,toothpicks,toots,top,topaz,topes,topic,topical,topics,topless,topnotch,topped,topping,toppings,topple,tops,topside,tor,torah,torch,torched,torches,torching,tore,torment,tormented,tormenting,torn,tornado,toro,torpedo,torpedoed,torpedoes,torque,torrent,torrid,torso,tortillas,tortious,tortoise,torture,tortured,tortures,torturing,torturous,tory,tosh,toss,tossed,tosses,tossing,total,totaled,totalled,totally,totals,tote,totem,toting,tots,touch,touchdown,touchdowns,touche,touched,touches,touching,touchstone,touchy,tough,toughen,tougher,toughest,toughness,toupee,tour,toured,touring,tourism,tourist,tourists,tournament,tournaments,tours,tow,toward,towards,towed,towel,towels,tower,towers,town,townhouse,townie,townies,towns,townsfolk,townspeople,toxic,toxicity,toxicology,toxin,toxins,toxoplasmosis,toy,toyed,toying,toys,trace,traced,tracer,traces,tracing,track,tracked,tracker,trackers,tracking,tracks,traction,tractor,tractors,trade,traded,trademark,tradeoff,trader,trades,trading,tradition,traditional,traditionally,traditions,traffic,trafficker,trafficking,tragedies,tragedy,tragic,tragically,trail,trailer,trailers,trailing,trails,train,trained,trainee,trainees,trainer,training,trains,traipse,traipsing,trait,traitor,traitors,traits,trajectory,tram,tramp,trample,trampling,tramps,trance,tranquil,tranquility,tranquilizer,trans,transaction,transactions,transatlantic,transcendent,transcends,transcript,transcripts,transfer,transference,transferred,transferring,transfers,transform,transformation,transformed,transfusion,transfusions,transgenic,transgression,transgressions,transient,transistor,transit,transition,transitional,translate,translated,translating,translation,translator,translators,transmission,transmissions,transmit,transmitted,transmitter,transmitters,transparent,transpired,transplant,transponder,transport,transportation,transported,transporting,transports,transsexual,transvestite,transvestites,trap,trapeze,trapped,trapper,trapping,trappings,traps,trash,trashed,trashes,trashing,trashy,trattoria,trauma,traumas,traumatic,traumatized,traumatizing,travel,traveled,traveler,travelers,traveling,travelled,traveller,travellers,travelling,travels,traverse,travesty,trawler,tray,trays,treacherous,treachery,tread,treadmill,treads,treason,treasure,treasured,treasurer,treasures,treasury,treat,treatable,treated,treaters,treaties,treating,treatment,treatments,treats,treaty,tree,trees,trek,trellis,tremble,trembling,tremendous,tremendously,tremor,tremors,trench,trenches,trend,trends,trendy,trespass,trespassed,trespasser,trespassers,trespassing,trey,triad,trial,trials,triangle,triangular,tribal,tribe,tribulations,tribunal,tribune,tribute,triceps,trick,tricked,trickery,trickier,tricking,trickle,tricks,trickster,tricky,tricycle,trident,tried,tries,trifecta,trifle,trifled,trifling,trig,trigger,triggered,triggering,triggers,trigonometry,trillion,trilogy,trim,trimester,trimmed,trimmers,trimmings,trinity,trinket,trinkets,trio,trip,tripe,triple,tripled,triplets,triplicate,tripped,tripping,trippy,trips,trite,triumph,triumphed,triumphs,trivial,troll,trolling,trollop,trolls,trombone,troop,trooper,troopers,troops,trophies,trophy,tropic,tropical,tropics,trot,trotting,troubadour,trouble,troubled,troublemaker,troublemakers,troubles,troublesome,troubling,troupe,trouper,trouser,trousers,trout,trove,troy,truant,truce,truck,truckers,truckload,trucks,true,truer,truest,truffle,truffles,truly,trump,trumpet,trumpets,trumps,trunk,trunks,truss,trussed,trust,trusted,trustee,trustees,trusting,trusts,trustworthy,trusty,truth,truthful,truthfully,truths,try,trying,tryout,tryouts,tsk,tsunami,tub,tuba,tubby,tube,tubers,tubes,tubing,tubs,tuck,tucked,tucker,tucking,tug,tuition,tularemia,tulip,tulle,tumble,tumbler,tumbling,tummy,tumor,tumors,tuna,tundra,tune,tuned,tunes,tung,tunic,tuning,tunnel,tunnels,turban,turbine,turbo,turbulence,turbulent,turd,turds,turf,turk,turkey,turkeys,turks,turmoil,turn,turnaround,turncoat,turndown,turned,turner,turning,turnips,turnout,turnpike,turns,turquoise,turret,turtle,turtleneck,turtles,tush,tushie,tushy,tusk,tussle,tut,tutor,tutoring,tutors,tutti,tutu,tux,tuxedo,tuxedos,tuxes,twain,twas,tweak,tweaked,tweaking,tweed,tweedle,tween,tweet,tweeze,twelfth,twelve,twenties,twentieth,twenty,twerp,twice,twig,twigs,twilight,twin,twine,twinge,twinkle,twins,twirl,twist,twisted,twister,twisting,twists,twisty,twit,twitch,twitching,twitchy,twits,two,twos,twosome,tycoon,tying,tykes,type,typed,types,typewriter,typhoid,typical,typically,typing,typist,typo,tyranny,tyrant,tyrants,tyre,tyres,ubiquitous,ugh,uglies,ugliest,ugliness,ugly,uh,ulcer,ulcers,ulterior,ultimate,ultimately,ultimatum,ultimatums,ultra,ultrasound,um,umbilical,umbrella,umm,ump,umpire,umpteenth,un,unable,unacceptable,unadvisedly,unaffected,unanimous,unanimously,unannounced,unanswered,unappealing,unappreciated,unarmed,unattached,unattainable,unattended,unattractive,unauthorized,unavailable,unavoidable,unaware,unbalanced,unbearable,unbearably,unbeatable,unbecoming,unbeknownst,unbelievable,unbelievably,unblemished,unborn,unbreakable,unbroken,unburden,unbuttoned,uncalled,uncanny,uncaring,uncertain,uncertainty,unchanged,uncharacteristic,uncharacteristically,uncivilized,unclaimed,uncle,unclear,unclench,uncles,uncomfortable,uncommon,uncomplicated,uncompromising,unconcerned,unconditional,unconscionable,unconscious,unconsciously,unconsciousness,unconstitutional,uncontrollable,uncontrollably,uncontrolled,unconventional,uncool,uncooperative,uncover,uncovered,uncovering,uncuff,uncut,undead,undecided,undeniably,under,underage,undercover,undercurrent,undercut,underdeveloped,underdog,underestimate,underestimated,underestimating,undergarment,undergarments,undergo,undergoing,undergone,undergrad,undergraduate,underground,underhanded,underline,underlined,underlings,underlying,undermine,undermined,undermines,undermining,underneath,underpaid,underpants,underpass,underprivileged,undershirt,understand,understandable,understandably,understanding,understands,understatement,understood,understudy,undertake,undertaken,undertaking,undertow,underwater,underway,underwear,underworld,undeserving,undesirable,undetectable,undetected,undies,undisciplined,undisclosed,undiscovered,undisputed,undivided,undo,undoing,undone,undoubtedly,undress,undressed,undressing,undue,unduly,undying,unearth,unearthed,uneasy,uneducated,unemployable,unemployed,unemployment,unencumbered,unending,unequivocally,unethical,uneventful,unexpected,unexpectedly,unexplained,unexplored,unexpressed,unfair,unfairly,unfairness,unfaithful,unfamiliar,unfathomable,unfinished,unfit,unflattering,unfold,unfolding,unfolds,unforeseen,unforgettable,unforgivable,unforgiving,unfortunate,unfortunately,unfounded,unfreeze,unfreezing,unfriendly,unfulfilled,unfunny,ungodly,ungrateful,unguarded,unhand,unhappily,unhappiness,unhappy,unharmed,unhealthy,unheard,unhinged,unholy,unhook,unhooked,unicorn,unicycle,unidentified,unified,uniform,uniformity,uniforms,unimaginative,unimportant,unimpressed,uninformed,uninhibited,uninspired,uninsured,unintelligible,unintentionally,uninteresting,uninterrupted,uninvited,uninvolved,union,unions,unique,unisex,unit,unite,united,units,unity,universal,universally,universe,universes,universities,university,unjustly,unknown,unknowns,unlawful,unleaded,unleash,unleashed,unless,unlikable,unlike,unlikely,unlimited,unlisted,unload,unloading,unlock,unlocked,unlocking,unlocks,unlovable,unloved,unlucky,unmanned,unmarked,unmarried,unmentionable,unmentionables,unmistakable,unmitigated,unnatural,unnecessary,unnerve,unnerved,unofficial,unopened,unorthodox,unpack,unpacked,unpacking,unpaid,unparalleled,unplanned,unpleasant,unpleasantness,unplug,unplugged,unpopular,unprecedented,unpredictable,unprepared,unprincipled,unprofessional,unprotected,unprovoked,unpunished,unqualified,unquenchable,unquote,unravel,unraveled,unraveling,unreachable,unreal,unrealistic,unreasonable,unrelated,unreliable,unresolved,unresponsive,unrest,unruly,uns,unsafe,unsaid,unsanitary,unsatisfactory,unsatisfied,unsatisfying,unsavory,unscathed,unscheduled,unscrupulous,unseal,unsealed,unseasonably,unseat,unseemly,unseen,unselfish,unsettled,unsettling,unshakable,unsigned,unsolved,unspeakable,unspeakably,unspoiled,unspoken,unstable,unstoppable,unstuck,unsubstantiated,unsuccessful,unsuitable,unsung,unsupervised,unsure,unsuspecting,untamed,untangle,untapped,untenable,unthinkable,untie,untied,until,untimely,unto,untouchable,untouched,untoward,untraceable,untrained,untreated,untrue,untrustworthy,unusual,unusually,unveil,unveiling,unwanted,unwarranted,unwashed,unwelcome,unwieldy,unwilling,unwind,unwise,unwitting,unwittingly,unworthy,unwrap,unwrapping,unwritten,unzip,up,upbeat,upbringing,upchuck,upcoming,update,updated,updates,updating,upfront,upgrade,upgraded,upgrades,upgrading,upheaval,upheld,uphill,uphold,upholstery,upkeep,uplifting,uplink,upload,uploading,upon,upped,upper,uppers,upping,uppity,upright,uprising,uproar,uproot,uprooted,ups,upscale,upset,upsets,upsetting,upshot,upside,upstage,upstaged,upstairs,upstanding,upstart,upstate,uptake,uptight,uptown,upward,upwards,uranium,urban,urchin,urethra,urge,urged,urgency,urgent,urgently,urges,urging,urinal,urinary,urinate,urinating,urination,urine,urn,urns,urologist,us,usable,use,used,useful,usefulness,useless,user,users,uses,usher,ushering,ushers,using,usual,usually,utensils,uterine,uterus,utilities,utility,utilize,utmost,utopia,utter,utterly,vacancies,vacant,vacate,vacated,vacation,vacationing,vacations,vaccinated,vaccination,vaccine,vacuum,vacuumed,vacuuming,vagabond,vague,vaguely,vaguest,vail,vain,valentine,valentines,valet,valiant,valid,validate,validated,validity,valise,valley,valor,valuable,valuables,value,valued,values,valve,valves,vamoose,vamp,vampire,vampires,vamps,van,vandalism,vandalized,vandals,vanilla,vanish,vanished,vanishes,vanishing,vanity,vanquish,vanquished,vanquishing,vantage,vapid,vaporize,vaporized,variable,variables,variance,variant,variations,varicose,varied,varies,variety,various,varnish,varsity,vary,vascular,vase,vasectomy,vast,vastly,vat,vault,veal,vector,vee,veer,veered,veering,veg,vegan,vegetable,vegetables,vegetarian,vegetarians,vegetative,veggies,vehicle,vehicles,vehicular,veil,veiled,veils,vein,veins,velocity,velvet,vendetta,vendettas,vending,vendor,vendors,veneer,venerable,venereal,venetian,vengeance,vengeful,venison,venom,venomous,vent,vented,ventilate,ventilated,ventilation,ventilator,venting,ventricle,ventricular,ventriloquism,ventriloquist,vents,venture,ventures,venue,venues,vera,veracity,veranda,verbal,verbally,verbatim,verbs,verdict,verge,verger,verification,verified,verify,verily,veritable,veritas,vermeil,vermin,vermouth,vernacular,veronica,versatile,verse,versed,version,versions,versus,vertebrae,vertical,vertically,vertigo,very,vespers,vessel,vessels,vest,vesta,vested,vestibule,vestigial,vests,vet,veteran,veterans,veterinarian,veterinary,veto,vetoed,vets,vetted,via,viable,vial,vials,vibe,vibes,vibrant,vibrating,vibrations,vibrator,vicar,vicariously,vice,viceroy,vicinity,vicious,viciousness,victim,victimless,victims,victor,victoria,victories,victorious,victory,video,videos,videotape,videotaped,videotapes,vie,view,viewed,viewer,viewers,viewing,viewpoint,views,vig,vigil,vigilance,vigilant,vigilante,vigor,vigorously,viking,vile,vill,villa,village,villagers,villages,villain,villains,villas,vindicated,vindication,vindictive,vindictiveness,vine,vinegar,vineyards,vino,vintage,vinyl,viola,violate,violated,violates,violating,violation,violations,violence,violent,violently,violet,violets,violin,violinist,violins,viper,vipers,viral,virgin,virginal,virginity,virgins,virile,virtual,virtually,virtue,virtuoso,virtuous,virulent,virus,viruses,vis,visa,visage,visas,visceral,viscount,viscous,visibility,visible,vision,visions,visit,visitation,visited,visiting,visitor,visitors,visits,visor,vista,visual,visualize,visualizing,visually,visuals,vital,vitality,vitally,vitals,vitamin,vitamins,viva,vivacious,vive,vivid,vixen,vixens,vocabulary,vocal,vocalist,vocals,vocation,vocational,vodka,vogue,voice,voiced,voices,voicing,void,voila,volant,volatile,volcanic,volcano,volcanoes,volition,volleyball,volt,voltage,volts,volume,volumes,voluntarily,voluntary,volunteer,volunteered,volunteering,volunteers,vomit,vomiting,voodoo,vortex,vote,voted,voter,voters,votes,voting,vouch,vouched,vouching,vow,vowed,vowel,vowels,vows,voyage,voyeur,vroom,vulgar,vulnerability,vulnerable,vulture,vultures,vying,wack,wacko,wackos,wacky,wad,waddle,wade,wading,waffle,waffles,wag,wage,wager,wages,wagging,waging,wagon,wagons,wahoo,waif,wail,wailing,waist,waistband,wait,waited,waiter,waiters,waiting,waitress,waitressed,waitresses,waitressing,waits,waive,waived,waiver,wake,waken,wakes,waking,walk,walked,walker,walking,walks,wall,walla,wallaby,walled,wallet,wallets,walling,wallop,wallow,wallowing,wallpaper,walls,wally,walnut,waltz,waltzing,wampum,wan,wand,wander,wandered,wanderer,wandering,wangler,waning,want,wanted,wanting,wanton,wants,war,ward,warden,wardrobe,ware,warehouse,warehouses,wares,warfare,warfarin,warhead,warheads,warlock,warlocks,warlord,warm,warmed,warmer,warmest,warming,warms,warmth,warn,warned,warner,warning,warnings,warp,warpath,warped,warrant,warranted,warrants,warren,warring,warrior,warriors,wars,warsaw,wart,wartime,warts,wary,was,wash,washcloth,washed,washer,washes,washing,washout,washroom,washrooms,washy,wasp,wasps,waste,wastebasket,wasted,wasteful,wastes,wasting,watch,watched,watcher,watchers,watches,watchful,watching,watchman,water,waterbed,watered,waterfall,waterfront,watering,waterloo,watermelon,waterproof,waters,waterworks,watery,watt,watts,wave,waved,wavelength,wavelengths,wavered,waves,waving,wavy,wax,waxed,waxes,waxing,waxy,way,ways,wayside,wayward,we,weak,weaken,weakened,weakening,weaker,weakest,weakling,weakness,weaknesses,wealth,wealthiest,wealthy,wean,weaning,weapon,weapons,wear,wearer,wearing,wears,weary,weasel,weasels,weather,weathered,weatherman,weathers,weave,weaver,web,webbing,weber,webs,webster,wed,wedded,wedding,weddings,wedge,wedges,wedgie,wedgies,wedlock,weds,wee,weed,weeds,week,weekend,weekends,weekly,weeks,weenie,weenies,weensy,weeny,weep,weeping,weepy,weevil,weigh,weighed,weighing,weighs,weight,weighted,weightless,weights,weiner,weir,weird,weirder,weirdest,weirdly,weirdness,weirdo,weirdoes,weirdos,welch,welcome,welcomed,welcomes,welcoming,weld,welded,welder,welding,welfare,well,welling,wellness,wells,welsh,welt,welts,wench,went,wept,were,werewolf,werewolves,west,westbound,western,westerns,westward,wet,wetlands,wets,wetter,wetting,wha,whack,whacked,whacko,whale,whales,wham,whammo,whammy,wharf,wharves,what,whatchamacallit,whatever,whatnot,whats,whatsoever,wheat,whee,wheel,wheelchair,wheelchairs,wheeler,wheeling,wheels,wheeze,wheezing,when,whence,whenever,where,whereabouts,whereas,wherefore,wherein,whereof,wherever,whet,whether,whew,whey,which,whichever,whiff,while,whilst,whim,whimper,whimpering,whims,whimsical,whimsy,whine,whiner,whining,whiny,whip,whiplash,whipped,whipper,whipping,whirl,whirling,whirlwind,whisk,whisked,whisker,whiskey,whisking,whisky,whisper,whispered,whispering,whispers,whistle,whistler,whistles,whistling,whit,white,whiteness,whiter,whites,whitey,whittle,whittled,whittling,whiz,who,whoa,whodunit,whoever,whole,wholeheartedly,wholesale,wholesaler,wholesome,wholly,whom,whomever,whoop,whoopee,whooping,whoops,whoosh,whopper,whopping,whose,why,whys,wick,wicked,wickedness,wide,widen,widening,wider,widow,widowed,widower,widows,width,wield,wielding,wiener,wieners,wife,wig,wigged,wiggle,wiggled,wiggles,wiggling,wiggly,wiggy,wigs,wild,wildcat,wildcats,wilder,wilderness,wildest,wildfire,wildlife,wildly,will,willed,willful,willfully,willies,willing,willingly,willingness,willow,willows,willpower,wills,willy,wilt,wily,wimp,wimps,wimpy,win,winch,wind,windbag,windbreaker,winded,windfall,winding,windjammer,windmills,window,windows,windowsill,windpipe,winds,windshield,windsurfing,windward,windy,wine,winery,wing,winged,winger,winging,wingman,wings,wining,wink,winking,winkle,winks,winner,winners,winning,winnings,winos,wins,winter,winters,wipe,wiped,wiper,wipers,wipes,wiping,wire,wired,wireless,wires,wiretap,wiretaps,wiring,wisdom,wise,wiseass,wisecracks,wised,wisely,wiser,wisest,wish,wished,wishes,wishful,wishing,wit,witch,witchcraft,witches,witching,witchy,with,withdraw,withdrawal,withdrawals,withdrawing,withdrawn,withdraws,withdrew,wither,withered,withering,withers,withheld,withhold,withholding,within,without,withstand,withstanding,witless,witness,witnessed,witnesses,witnessing,wits,witty,wives,wizard,wo,wobble,woe,woes,wok,woke,woken,wolf,wolfram,wolves,woman,womanhood,womanizer,womanizing,womanly,womb,women,won,wonder,wondered,wonderful,wonderfully,wondering,wonderland,wonders,wondrous,wonky,wont,woo,wood,woodchuck,wooded,wooden,woodland,woodpecker,woods,woodshed,woodsy,woodwork,woody,wooed,woof,wooing,wool,woolly,woops,woozy,word,worded,wording,words,wore,work,workable,workaholic,workday,worked,worker,workers,working,workings,workload,workman,workmen,workout,workplace,works,workshop,workstation,workup,world,worldly,worlds,worldwide,worm,wormed,wormhole,worming,worms,worn,worried,worrier,worries,worrisome,worry,worrying,worrywart,worse,worsened,worship,worshiped,worshipped,worshippers,worshipping,worships,worst,worth,worthless,worthwhile,worthy,would,wound,wounded,wounding,wounds,woven,wow,wowed,wracked,wracking,wraith,wrangle,wrangler,wrap,wrapped,wrapper,wrappers,wrapping,wraps,wrath,wreak,wreaked,wreaking,wreaks,wreaths,wreck,wreckage,wrecked,wrecker,wrecking,wrecks,wrench,wrenched,wrestle,wrestler,wrestling,wretch,wretched,wriggle,wright,wring,wringer,wringing,wrinkle,wrinkled,wrinkles,wrinkly,wrist,wrists,writ,write,writer,writers,writes,writhing,writing,writings,written,wrong,wronged,wrongful,wrongfully,wrongs,wrote,wrought,wrung,wry,wuss,wussies,wussy,wuthering,xerox,ya,yacht,yachts,yah,yahoo,yak,yakking,yam,yammer,yammering,yams,yang,yank,yanked,yanking,yanks,yap,yapping,yard,yards,yardstick,yarn,yaw,yawn,yawning,yay,ye,yea,yeah,year,yearbook,yearbooks,yearned,yearning,yearnings,yearns,years,yeast,yech,yeh,yell,yelled,yeller,yelling,yellow,yells,yen,yep,yes,yesterday,yesterdays,yet,yeti,yield,yielding,yields,yikes,yin,yipe,yippee,yippie,yo,yodel,yoga,yoghurt,yogi,yogurt,yoke,yokel,yokels,yolk,yom,yon,yonkers,yore,you,young,younger,youngest,youngster,youngsters,your,yours,yourself,yourselves,youse,youth,youthful,youths,yuan,yuck,yuk,yuletide,yum,yummy,yup,yuppie,yuppies,zag,zander,zany,zap,zapped,zeal,zebra,zebras,zed,zee,zephyr,zephyrs,zeppelin,zero,zeroed,zeroes,zeros,zest,zesty,zeta,zig,zillion,zillions,zinfandel,zing,zip,zipped,zipper,zipping,zippy,zit,zits,zodiac,zombie,zombies,zone,zoned,zones,zoning,zonked,zoo,zoom,zooming'
# 65536 words
english_words_google = 'the,of,and,to,a,in,for,is,on,that,by,this,with,i,you,it,not,or,be,are,from,at,as,your,all,have,new,more,an,was,we,will,home,can,us,about,if,page,my,has,search,free,but,our,one,other,do,no,information,time,they,site,he,up,may,what,which,their,news,out,use,any,there,see,only,so,his,when,contact,here,business,who,web,also,now,help,get,view,online,c,e,first,am,been,would,how,were,me,s,services,some,these,click,its,like,service,x,than,find,price,date,back,top,people,had,list,name,just,over,state,year,day,into,email,two,health,n,world,re,next,used,go,b,work,last,most,products,music,buy,data,make,them,should,product,system,post,her,city,t,add,policy,number,such,please,available,copyright,support,message,after,best,software,then,jan,good,well,d,where,rights,public,books,high,school,through,m,each,links,she,review,years,order,very,privacy,book,items,company,r,read,group,sex,need,many,user,said,de,does,set,under,general,research,university,january,mail,full,map,reviews,program,life,know,games,way,days,management,p,part,could,great,united,hotel,real,f,item,international,center,must,store,travel,comments,made,development,report,off,member,details,line,terms,before,hotels,did,send,right,type,because,local,those,using,results,office,education,national,car,design,take,posted,internet,address,community,within,states,area,want,phone,shipping,reserved,subject,between,forum,family,l,long,based,w,code,show,o,even,black,check,special,prices,index,being,women,much,sign,file,link,open,today,technology,south,case,project,same,pages,uk,version,section,own,found,sports,house,related,security,both,g,county,american,photo,game,members,power,while,care,network,down,computer,systems,three,total,place,end,following,download,h,him,without,per,access,think,north,resources,current,posts,big,media,law,control,water,history,pictures,size,art,personal,since,including,guide,shop,directory,board,location,change,white,text,small,rating,rate,government,children,during,usa,return,students,v,shopping,account,times,sites,level,digital,profile,previous,form,events,love,old,john,main,call,hours,image,department,title,description,non,k,y,insurance,another,why,shall,property,class,cd,still,money,quality,every,listing,content,country,private,little,visit,save,tools,low,reply,customer,december,compare,movies,include,college,value,article,york,man,card,jobs,provide,j,food,source,author,different,press,u,learn,sale,around,print,course,job,canada,process,teen,room,stock,training,too,credit,point,join,science,men,categories,advanced,west,sales,look,english,left,team,estate,box,conditions,select,windows,gay,thread,week,category,note,live,large,gallery,table,register,however,june,october,november,market,library,really,action,start,series,model,features,air,industry,plan,human,provided,tv,yes,required,second,hot,accessories,cost,movie,march,la,september,better,say,questions,july,going,medical,test,friend,come,dec,study,application,cart,staff,articles,san,again,play,looking,issues,april,never,users,complete,street,topic,comment,financial,things,working,against,standard,tax,person,below,mobile,less,got,party,payment,equipment,login,student,let,programs,offers,legal,above,recent,park,stores,side,act,problem,red,give,memory,performance,social,q,august,quote,language,story,sell,experience,rates,create,key,body,young,america,important,field,few,east,paper,single,ii,age,activities,club,example,girls,additional,password,z,latest,something,road,gift,question,changes,night,ca,hard,texas,oct,pay,four,poker,status,browse,issue,range,building,seller,court,february,always,result,light,write,war,nov,offer,blue,groups,al,easy,given,files,event,release,analysis,request,china,making,picture,needs,possible,might,professional,yet,month,major,star,areas,future,space,committee,hand,sun,cards,problems,london,washington,meeting,become,interest,id,child,keep,enter,california,share,similar,garden,schools,million,added,reference,companies,listed,baby,learning,energy,run,delivery,net,popular,term,film,stories,put,computers,journal,reports,co,try,welcome,central,images,president,notice,god,original,head,radio,until,cell,color,self,council,away,includes,track,australia,discussion,archive,once,others,entertainment,agreement,format,least,society,months,log,safety,friends,sure,trade,edition,cars,messages,marketing,tell,further,updated,association,able,having,provides,david,fun,already,green,studies,close,common,drive,specific,several,gold,feb,living,collection,called,short,arts,lot,ask,display,limited,solutions,means,director,daily,beach,past,natural,whether,due,et,five,upon,period,planning,says,official,weather,mar,land,average,done,technical,window,france,pro,region,island,record,direct,conference,environment,records,st,district,calendar,costs,style,front,statement,parts,aug,ever,early,miles,sound,resource,present,applications,either,ago,document,word,works,material,bill,written,talk,federal,rules,final,adult,tickets,thing,centre,requirements,via,cheap,nude,kids,finance,true,minutes,else,mark,third,rock,gifts,europe,reading,topics,bad,individual,tips,plus,auto,cover,usually,edit,together,percent,fast,function,fact,unit,getting,global,meet,far,economic,en,player,projects,lyrics,often,subscribe,submit,germany,amount,watch,included,feel,though,bank,risk,thanks,everything,deals,various,words,jul,production,commercial,james,weight,town,heart,advertising,received,choose,treatment,newsletter,archives,points,knowledge,magazine,error,camera,girl,currently,construction,toys,registered,clear,golf,receive,domain,methods,chapter,makes,protection,policies,loan,wide,beauty,manager,india,position,taken,sort,models,michael,known,half,cases,step,engineering,florida,simple,quick,none,wireless,license,paul,friday,lake,whole,annual,published,later,basic,shows,corporate,church,method,purchase,customers,active,response,practice,hardware,figure,materials,fire,holiday,chat,enough,designed,along,among,death,writing,speed,html,countries,loss,face,brand,discount,higher,effects,created,remember,standards,oil,bit,yellow,political,increase,advertise,kingdom,base,near,thought,stuff,french,storage,oh,japan,doing,loans,shoes,entry,stay,nature,orders,availability,africa,summary,turn,mean,growth,notes,agency,king,monday,european,activity,copy,although,drug,western,income,force,cash,employment,overall,bay,river,commission,ad,package,contents,seen,players,engine,port,album,regional,stop,supplies,started,administration,bar,institute,views,plans,double,dog,build,screen,exchange,types,soon,lines,electronic,continue,across,benefits,needed,season,apply,someone,held,ny,anything,printer,condition,effective,believe,organization,effect,asked,mind,sunday,selection,casino,lost,tour,menu,volume,cross,anyone,mortgage,hope,silver,corporation,wish,inside,solution,mature,role,rather,weeks,addition,came,supply,nothing,certain,executive,running,lower,necessary,union,jewelry,according,dc,clothing,mon,com,particular,fine,names,robert,hour,gas,skills,six,bush,islands,advice,career,military,rental,decision,leave,british,teens,pre,huge,sat,woman,facilities,zip,bid,kind,sellers,middle,move,cable,opportunities,taking,values,division,coming,tuesday,object,appropriate,machine,length,actually,nice,score,statistics,client,ok,returns,capital,follow,sample,investment,sent,shown,saturday,christmas,england,culture,band,flash,ms,lead,george,choice,went,starting,registration,fri,thursday,courses,consumer,hi,foreign,artist,outside,furniture,levels,channel,letter,mode,ideas,wednesday,structure,fund,summer,allow,degree,contract,button,releases,wed,homes,super,male,matter,custom,virginia,almost,took,located,multiple,asian,distribution,editor,inn,industrial,cause,potential,song,ltd,los,focus,late,fall,featured,idea,rooms,female,responsible,inc,communications,win,associated,thomas,primary,cancer,numbers,reason,tool,browser,spring,foundation,answer,voice,friendly,schedule,documents,communication,purpose,feature,bed,comes,police,everyone,independent,approach,brown,physical,operating,hill,maps,medicine,deal,hold,chicago,forms,glass,happy,tue,smith,wanted,developed,thank,safe,unique,survey,prior,telephone,sport,ready,feed,animal,sources,mexico,population,pa,regular,secure,navigation,operations,therefore,ass,simply,evidence,station,christian,round,favorite,understand,option,master,valley,recently,probably,sea,built,publications,blood,cut,improve,connection,publisher,hall,larger,networks,earth,parents,impact,transfer,introduction,kitchen,strong,tel,carolina,wedding,properties,hospital,ground,overview,ship,accommodation,owners,disease,excellent,paid,italy,perfect,hair,opportunity,kit,classic,basis,command,cities,william,express,award,distance,tree,peter,assessment,ensure,thus,wall,ie,involved,el,extra,especially,pussy,partners,budget,rated,guides,success,maximum,ma,operation,existing,quite,selected,boy,amazon,patients,restaurants,beautiful,warning,wine,locations,horse,vote,forward,flowers,stars,significant,lists,owner,retail,animals,useful,directly,manufacturer,ways,est,son,providing,rule,mac,housing,takes,iii,bring,catalog,searches,max,trying,mother,authority,considered,told,traffic,programme,joined,strategy,feet,agent,valid,bin,modern,senior,ireland,teaching,door,grand,testing,trial,charge,units,instead,canadian,cool,normal,wrote,enterprise,ships,entire,educational,md,leading,metal,positive,fl,fitness,chinese,opinion,asia,football,abstract,uses,output,funds,mr,greater,likely,develop,employees,artists,alternative,processing,responsibility,resolution,java,guest,seems,publication,pass,relations,trust,van,contains,session,photography,republic,fees,components,vacation,century,academic,assistance,completed,skin,indian,mary,il,expected,ring,grade,dating,pacific,mountain,organizations,pop,filter,mailing,vehicle,longer,consider,int,northern,behind,panel,floor,german,buying,match,proposed,default,require,iraq,boys,outdoor,deep,morning,otherwise,allows,rest,protein,plant,reported,hit,transportation,mm,pool,politics,partner,disclaimer,authors,boards,faculty,parties,fish,membership,mission,eye,string,sense,modified,pack,released,stage,internal,goods,recommended,born,unless,richard,detailed,japanese,race,approved,background,target,except,character,maintenance,ability,maybe,functions,ed,moving,brands,places,pretty,spain,southern,yourself,etc,winter,rape,battery,youth,pressure,submitted,boston,incest,debt,medium,television,interested,core,break,purposes,throughout,sets,dance,wood,itself,defined,papers,playing,awards,fee,studio,reader,virtual,device,established,answers,rent,las,remote,dark,external,apple,le,regarding,instructions,min,offered,theory,enjoy,remove,aid,surface,minimum,visual,host,variety,teachers,martin,manual,block,subjects,agents,increased,repair,fair,civil,steel,understanding,songs,fixed,wrong,beginning,hands,associates,finally,classes,paris,ohio,gets,sector,capacity,requires,jersey,un,fat,fully,father,electric,saw,instruments,quotes,officer,driver,businesses,dead,respect,unknown,specified,restaurant,mike,trip,worth,mi,procedures,poor,teacher,xxx,eyes,relationship,workers,farm,georgia,peace,traditional,campus,tom,showing,creative,coast,benefit,progress,funding,devices,lord,grant,sub,agree,fiction,hear,sometimes,watches,careers,beyond,goes,families,led,museum,themselves,fan,transport,interesting,wife,accepted,former,ten,hits,zone,complex,th,cat,galleries,references,die,presented,jack,flat,flow,agencies,literature,respective,parent,spanish,michigan,columbia,setting,dr,scale,stand,economy,highest,helpful,monthly,critical,frame,musical,definition,secretary,path,employee,chief,gives,bottom,magazines,packages,detail,francisco,laws,changed,pet,heard,begin,individuals,colorado,royal,clean,switch,russian,largest,african,guy,titles,relevant,guidelines,justice,bible,cup,basket,applied,weekly,vol,installation,described,demand,pp,suite,na,square,chris,attention,advance,skip,diet,army,auction,gear,lee,os,difference,allowed,correct,charles,nation,selling,lots,piece,sheet,firm,seven,older,illinois,regulations,elements,species,jump,cells,resort,facility,random,certificate,minister,motion,looks,fashion,directions,visitors,monitor,trading,forest,calls,whose,couple,giving,chance,vision,ball,ending,clients,actions,listen,discuss,accept,naked,goal,successful,sold,wind,communities,clinical,situation,sciences,markets,lowest,highly,publishing,appear,emergency,lives,currency,leather,determine,temperature,palm,announcements,patient,actual,historical,stone,bob,commerce,perhaps,persons,difficult,scientific,satellite,fit,tests,village,accounts,amateur,ex,met,pain,particularly,factors,coffee,cum,buyer,cultural,steve,easily,oral,ford,poster,edge,functional,root,au,fi,closed,holidays,ice,pink,zealand,balance,graduate,replies,shot,architecture,initial,label,thinking,scott,sec,recommend,canon,league,waste,minute,bus,optional,dictionary,cold,accounting,manufacturing,sections,chair,fishing,effort,phase,fields,bag,fantasy,po,letters,motor,va,professor,context,install,shirt,apparel,generally,continued,foot,mass,crime,count,breast,ibm,johnson,sc,quickly,dollars,religion,claim,driving,permission,surgery,patch,heat,wild,measures,generation,kansas,miss,chemical,doctor,task,reduce,brought,himself,nor,component,enable,exercise,bug,santa,mid,guarantee,leader,diamond,israel,se,processes,soft,alone,meetings,seconds,jones,arizona,interests,flight,congress,fuel,walk,produced,italian,wait,supported,pocket,saint,rose,freedom,argument,competition,creating,jim,drugs,joint,premium,fresh,characters,attorney,di,factor,growing,thousands,km,stream,apartments,pick,hearing,eastern,entries,dates,generated,signed,upper,administrative,serious,prime,limit,began,louis,steps,errors,shops,bondage,del,efforts,informed,ga,ac,thoughts,creek,ft,worked,quantity,urban,practices,sorted,reporting,essential,myself,tours,platform,load,labor,immediately,nursing,defense,machines,tags,heavy,covered,recovery,joe,guys,configuration,cock,merchant,comprehensive,expert,universal,protect,drop,solid,presentation,languages,became,orange,compliance,vehicles,prevent,theme,rich,im,campaign,marine,improvement,vs,guitar,finding,pennsylvania,examples,saying,spirit,ar,claims,challenge,acceptance,mo,seem,affairs,touch,intended,towards,sa,goals,hire,election,suggest,branch,charges,serve,reasons,magic,mount,smart,talking,gave,ones,latin,avoid,certified,manage,corner,rank,computing,oregon,element,birth,virus,abuse,requests,separate,quarter,procedure,leadership,tables,define,racing,religious,facts,breakfast,kong,column,plants,faith,chain,identify,avenue,missing,died,approximately,domestic,recommendations,moved,houston,reach,comparison,mental,viewed,moment,extended,sequence,inch,attack,sorry,centers,opening,damage,reserve,recipes,plastic,produce,snow,placed,truth,counter,failure,follows,eu,dollar,camp,ontario,automatically,des,minnesota,films,bridge,native,fill,williams,movement,printing,baseball,owned,approval,draft,chart,played,contacts,cc,jesus,readers,clubs,wa,jackson,equal,adventure,matching,offering,shirts,profit,leaders,posters,institutions,assistant,variable,ave,advertisement,expect,headlines,yesterday,compared,determined,wholesale,workshop,russia,gone,codes,kinds,extension,seattle,statements,golden,completely,teams,fort,cm,wi,lighting,senate,forces,funny,brother,gene,turned,portable,tried,electrical,applicable,disc,returned,pattern,boat,named,theatre,earlier,manufacturers,sponsor,classical,warranty,dedicated,indiana,direction,harry,objects,ends,delete,evening,assembly,nuclear,taxes,mouse,signal,criminal,issued,brain,sexual,wisconsin,powerful,dream,obtained,false,da,cast,flower,felt,personnel,passed,supplied,identified,falls,pic,soul,aids,opinions,promote,stated,professionals,appears,carry,flag,decided,covers,hr,em,advantage,hello,designs,maintain,tourism,priority,newsletters,adults,savings,iv,graphic,atom,payments,estimated,binding,brief,ended,winning,eight,anonymous,iron,straight,script,served,wants,miscellaneous,prepared,void,dining,alert,integration,atlanta,dakota,tag,interview,mix,framework,disk,installed,queen,credits,clearly,fix,handle,sweet,desk,dave,massachusetts,diego,hong,vice,associate,ne,truck,behavior,enlarge,ray,frequently,revenue,measure,changing,votes,du,duty,looked,discussions,bear,gain,festival,laboratory,ocean,flights,experts,signs,lack,depth,iowa,whatever,vintage,train,exactly,dry,explore,maryland,spa,concept,nearly,eligible,reality,forgot,handling,origin,knew,gaming,feeds,billion,destination,scotland,faster,intelligence,dallas,bought,con,ups,nations,route,followed,specifications,broken,frank,alaska,blow,battle,residential,speak,decisions,industries,protocol,query,clip,partnership,editorial,nt,expression,es,equity,provisions,speech,wire,principles,suggestions,rural,shared,sounds,replacement,tape,strategic,judge,economics,acid,cent,forced,compatible,fight,apartment,height,null,zero,speaker,filed,netherlands,obtain,recreation,offices,designer,remain,managed,pr,failed,marriage,roll,korea,banks,fr,participants,secret,bath,kelly,leads,negative,austin,favorites,toronto,theater,springs,missouri,andrew,var,perform,healthy,translation,estimates,font,assets,injury,mt,joseph,ministry,drivers,lawyer,figures,married,protected,proposal,sharing,philadelphia,portal,waiting,birthday,beta,fail,gratis,banking,officials,brian,toward,won,slightly,assist,conduct,contained,legislation,calling,serving,bags,miami,comics,matters,houses,doc,postal,relationships,tennessee,wear,controls,breaking,combined,ultimate,wales,representative,frequency,introduced,minor,finish,departments,residents,noted,displayed,reduced,physics,rare,spent,performed,extreme,samples,davis,daniel,bars,reviewed,row,oz,forecast,removed,helps,administrator,cycle,contain,accuracy,dual,rise,sleep,bird,brazil,creation,static,scene,hunter,addresses,lady,crystal,famous,writer,chairman,violence,fans,oklahoma,speakers,drink,academy,dynamic,gender,eat,permanent,agriculture,dell,cleaning,portfolio,practical,delivered,exclusive,seat,concerns,colour,vendor,originally,utilities,philosophy,regulation,officers,reduction,aim,bids,referred,supports,nutrition,recording,regions,junior,toll,les,cape,ann,rings,meaning,tip,secondary,wonderful,mine,ladies,henry,ticket,announced,guess,agreed,prevention,whom,ski,import,posting,presence,instant,mentioned,automatic,viewing,maintained,ch,increasing,majority,connected,christ,dan,dogs,sd,directors,aspects,austria,ahead,moon,participation,scheme,utility,fly,manner,matrix,containing,combination,amendment,despite,strength,guaranteed,turkey,libraries,proper,distributed,degrees,singapore,enterprises,delta,fear,seeking,inches,phoenix,convention,shares,principal,daughter,standing,comfort,colors,wars,ordering,kept,alpha,appeal,cruise,bonus,previously,hey,buildings,beat,disney,household,batteries,adobe,smoking,becomes,drives,arms,alabama,tea,improved,trees,achieve,positions,dress,subscription,dealer,contemporary,sky,utah,nearby,rom,carried,happen,exposure,hide,signature,gambling,refer,miller,provision,outdoors,clothes,caused,luxury,babes,frames,certainly,indeed,newspaper,toy,circuit,layer,printed,slow,removal,easier,liability,trademark,hip,printers,nine,adding,kentucky,mostly,eric,spot,taylor,prints,spend,factory,interior,grow,americans,optical,promotion,relative,amazing,clock,dot,hiv,identity,suites,conversion,feeling,hidden,reasonable,victoria,serial,relief,revision,influence,ratio,importance,rain,onto,planet,copies,recipe,zum,permit,seeing,proof,tennis,bass,prescription,bedroom,empty,instance,hole,pets,ride,licensed,orlando,specifically,tim,bureau,maine,represent,conservation,pair,ideal,recorded,don,pieces,finished,parks,dinner,lawyers,sydney,stress,cream,runs,trends,discover,ap,patterns,boxes,louisiana,hills,fourth,nm,advisor,mn,marketplace,nd,evil,aware,wilson,shape,evolution,irish,certificates,objectives,stations,suggested,op,remains,greatest,firms,concerned,operator,structures,generic,usage,cap,ink,charts,continuing,mixed,census,peak,competitive,exist,wheel,transit,dick,salt,compact,poetry,lights,tracking,angel,bell,keeping,preparation,attempt,receiving,matches,accordance,width,noise,engines,forget,array,discussed,accurate,stephen,elizabeth,climate,reservations,pin,alcohol,greek,instruction,managing,sister,raw,differences,walking,explain,smaller,newest,establish,happened,expressed,jeff,extent,sharp,ben,lane,paragraph,kill,mathematics,compensation,ce,export,managers,aircraft,sweden,conflict,conducted,versions,employer,occur,percentage,knows,mississippi,describe,concern,requested,citizens,connecticut,heritage,immediate,holding,trouble,spread,coach,agricultural,expand,supporting,audience,assigned,jordan,collections,ages,participate,plug,specialist,cook,affect,virgin,experienced,investigation,raised,hat,institution,directed,dealers,searching,sporting,helping,affected,lib,totally,plate,expenses,indicate,blonde,ab,proceedings,favourite,transmission,anderson,characteristics,der,lose,organic,seek,experiences,cheats,extremely,contracts,guests,diseases,concerning,equivalent,chemistry,tony,neighborhood,nevada,thailand,anyway,continues,tracks,advisory,cam,curriculum,logic,prince,circle,soil,grants,anywhere,psychology,responses,atlantic,wet,circumstances,edward,identification,ram,leaving,appliances,matt,cooking,speaking,fox,respond,sizes,plain,exit,entered,iran,arm,keys,launch,wave,checking,costa,belgium,holy,acts,guidance,mesh,trail,enforcement,symbol,crafts,highway,buddy,observed,dean,poll,glossary,fiscal,celebrity,styles,denver,unix,filled,bond,channels,appendix,notify,blues,chocolate,pub,portion,scope,hampshire,cables,cotton,controlled,requirement,authorities,biology,dental,killed,border,ancient,debate,representatives,starts,pregnancy,causes,arkansas,biography,leisure,attractions,learned,transactions,notebook,explorer,historic,attached,opened,husband,disabled,authorized,crazy,britain,concert,retirement,financing,efficiency,sp,comedy,adopted,efficient,linear,commitment,specialty,bears,jean,hop,carrier,edited,constant,visa,mouth,jewish,meter,linked,portland,interviews,concepts,gun,reflect,pure,deliver,wonder,hell,lessons,fruit,begins,qualified,reform,lens,treated,discovery,draw,classified,relating,assume,confidence,alliance,fm,confirm,warm,neither,lewis,howard,leaves,engineer,consistent,replace,clearance,connections,inventory,suck,organisation,babe,checks,reached,becoming,objective,indicated,sugar,crew,legs,sam,stick,securities,allen,relation,enabled,genre,slide,montana,volunteer,tested,rear,democratic,enhance,switzerland,exact,bound,formal,dimensions,contribute,lock,storm,colleges,mile,showed,challenges,editors,mens,threads,bowl,supreme,brothers,recognition,presents,ref,tank,submission,dolls,estimate,encourage,navy,kid,inspection,consumers,cancel,limits,territory,transaction,manchester,weapons,paint,delay,pilot,outlet,contributions,continuous,czech,resulting,cambridge,initiative,novel,pan,execution,disability,increases,ultra,winner,idaho,contractor,episode,examination,potter,dish,plays,bulletin,ia,pt,indicates,modify,oxford,adam,truly,painting,committed,extensive,universe,candidate,patent,slot,outstanding,ha,eating,perspective,planned,watching,lodge,messenger,mirror,tournament,consideration,sterling,sessions,kernel,stocks,buyers,journals,gray,catalogue,ea,antonio,charged,broad,taiwan,und,chosen,greece,swiss,sarah,clark,labour,hate,terminal,publishers,nights,behalf,caribbean,liquid,rice,nebraska,loop,salary,reservation,foods,guard,properly,orleans,saving,remaining,empire,resume,twenty,newly,raise,prepare,gary,depending,illegal,expansion,vary,hundreds,rome,arab,lincoln,helped,premier,tomorrow,purchased,milk,decide,consent,drama,visiting,performing,downtown,keyboard,contest,collected,nw,bands,boot,suitable,ff,absolutely,millions,lunch,audit,push,chamber,guinea,findings,muscle,iso,implement,clicking,scheduled,polls,typical,tower,yours,sum,significantly,chicken,temporary,attend,shower,alan,sending,jason,tonight,dear,sufficient,shell,province,catholic,oak,vat,vancouver,governor,beer,seemed,contribution,measurement,swimming,formula,constitution,solar,jose,catch,jane,pakistan,ps,reliable,consultation,northwest,sir,doubt,earn,finder,unable,periods,classroom,tasks,democracy,attacks,kim,merchandise,const,resistance,doors,symptoms,resorts,memorial,visitor,twin,forth,insert,baltimore,gateway,ky,dont,drawing,candidates,charlotte,ordered,biological,fighting,transition,happens,preferences,spy,romance,instrument,bruce,split,themes,powers,heaven,br,bits,pregnant,twice,classification,focused,egypt,physician,bargain,cellular,norway,vermont,asking,blocks,normally,lo,spiritual,hunting,suit,shift,chip,res,sit,bodies,photographs,cutting,simon,writers,marks,flexible,loved,favourites,numerous,relatively,birds,satisfaction,represents,char,pittsburgh,superior,preferred,saved,paying,cartoon,shots,intellectual,moore,granted,choices,carbon,spending,comfortable,magnetic,interaction,listening,effectively,registry,crisis,outlook,massive,denmark,employed,bright,treat,header,cs,poverty,formed,piano,echo,que,sheets,patrick,experimental,puerto,revolution,consolidation,displays,allowing,earnings,mystery,landscape,dependent,mechanical,journey,delaware,bidding,risks,banner,applicant,charter,fig,barbara,cooperation,counties,acquisition,ports,directories,recognized,dreams,notification,licensing,stands,teach,occurred,rapid,pull,hairy,diversity,cleveland,ut,reverse,deposit,investments,wheels,specify,dutch,sensitive,formats,depends,boots,holds,si,editing,poland,completion,pulse,universities,technique,contractors,voting,courts,notices,subscriptions,calculate,detroit,alexander,broadcast,converted,anniversary,improvements,strip,specification,pearl,accident,nick,accessible,accessory,resident,plot,possibly,typically,representation,regard,pump,exists,arrangements,smooth,conferences,strike,consumption,birmingham,flashing,narrow,afternoon,threat,surveys,sitting,putting,controller,ownership,committees,penis,legislative,vietnam,trailer,anne,castle,gardens,missed,malaysia,antique,labels,willing,molecular,acting,heads,stored,residence,attorneys,antiques,density,hundred,ryan,operators,strange,philippines,statistical,beds,breasts,mention,innovation,employers,grey,parallel,amended,operate,bills,bold,bathroom,stable,opera,definitions,von,doctors,lesson,asset,scan,elections,drinking,reaction,blank,enhanced,entitled,severe,generate,stainless,newspapers,hospitals,vi,humor,aged,exception,lived,duration,bulk,successfully,indonesia,pursuant,fabric,visits,primarily,tight,domains,capabilities,contrast,recommendation,flying,sin,berlin,cute,organized,ba,para,adoption,improving,cr,expensive,meant,capture,pounds,buffalo,organisations,plane,pg,explained,seed,programmes,desire,mechanism,camping,ee,jewellery,meets,welfare,peer,caught,eventually,marked,driven,measured,bottle,agreements,considering,marshall,massage,rubber,conclusion,closing,thousand,meat,legend,grace,susan,ing,adams,monster,alex,bang,villa,bone,columns,disorders,bugs,collaboration,hamilton,detection,ftp,cookies,inner,formation,med,engineers,entity,gate,holder,proposals,sw,settlement,portugal,lawrence,roman,duties,valuable,erotic,tone,ethics,forever,dragon,busy,captain,fantastic,imagine,brings,heating,leg,neck,hd,wing,governments,purchasing,appointed,taste,dealing,commit,tiny,rail,liberal,jay,trips,gap,sides,tube,turns,corresponding,descriptions,cache,belt,jacket,determination,animation,oracle,er,matthew,lease,productions,aviation,proud,excess,disaster,console,commands,jr,instructor,giant,achieved,injuries,shipped,seats,approaches,alarm,anthony,usual,loading,stamps,appeared,franklin,angle,rob,mining,melbourne,worst,betting,scientists,liberty,wyoming,argentina,era,convert,possibility,commissioner,dangerous,garage,exciting,thongs,unfortunately,respectively,volunteers,attachment,finland,morgan,derived,pleasure,honor,asp,eagle,pants,columbus,nurse,prayer,appointment,workshops,hurricane,quiet,luck,postage,producer,represented,mortgages,dial,responsibilities,cheese,comic,carefully,jet,productivity,investors,crown,par,underground,diagnosis,maker,crack,principle,picks,vacations,gang,calculated,fetish,appearance,smoke,apache,incorporated,craft,cake,apart,fellow,blind,lounge,mad,coins,andy,gross,strongly,cafe,valentine,hilton,ken,horror,su,familiar,capable,douglas,till,involving,pen,investing,christopher,admission,shoe,elected,carrying,victory,sand,madison,joy,editions,mainly,ethnic,ran,parliament,actor,finds,seal,situations,fifth,citizen,vertical,corrections,structural,municipal,describes,prize,sr,occurs,jon,absolute,disabilities,consists,substance,prohibited,addressed,lies,pipe,soldiers,guardian,lecture,simulation,ill,concentration,classics,lbs,lay,interpretation,horses,dirty,deck,wayne,donate,taught,bankruptcy,worker,alive,temple,substances,prove,discovered,wings,breaks,restrictions,participating,waters,promise,thin,exhibition,prefer,ridge,cabinet,harris,bringing,sick,dose,tiffany,tropical,collect,bet,composition,streets,definitely,shaved,turning,buffer,purple,existence,commentary,larry,developments,def,immigration,lets,mutual,necessarily,syntax,li,attribute,prison,skill,chairs,nl,everyday,apparently,surrounding,mountains,moves,popularity,inquiry,checked,exhibit,throw,trend,sierra,visible,cats,desert,ya,oldest,rhode,obviously,mercury,steven,handbook,greg,navigate,worse,summit,victims,spaces,fundamental,burning,escape,somewhat,receiver,substantial,tr,progressive,boats,glance,scottish,championship,arcade,richmond,sacramento,impossible,russell,tells,obvious,fiber,depression,graph,covering,platinum,judgment,bedrooms,talks,filing,foster,passing,awarded,testimonials,trials,tissue,nz,clinton,masters,bonds,cartridge,explanation,folk,commons,cincinnati,subsection,fraud,electricity,permitted,spectrum,arrival,pottery,emphasis,roger,aspect,awesome,mexican,confirmed,counts,priced,hist,crash,lift,desired,inter,closer,assumes,heights,shadow,riding,infection,lisa,expense,grove,venture,korean,healing,princess,mall,entering,packet,spray,studios,dad,buttons,observations,thompson,winners,extend,roads,subsequent,pat,dublin,rolling,fell,yard,disclosure,establishment,memories,nelson,te,arrived,creates,faces,tourist,cocks,av,mayor,murder,sean,adequate,senator,yield,grades,cartoons,pour,digest,reg,lodging,tion,dust,hence,entirely,replaced,rescue,undergraduate,losses,combat,reducing,stopped,occupation,lakes,butt,donations,associations,closely,radiation,diary,seriously,kings,shooting,kent,adds,ear,flags,baker,launched,elsewhere,pollution,conservative,shock,effectiveness,walls,abroad,ebony,tie,ward,drawn,arthur,ian,visited,roof,walker,demonstrate,atmosphere,suggests,kiss,beast,ra,operated,experiment,targets,overseas,purchases,dodge,counsel,federation,invited,yards,assignment,chemicals,gordon,mod,farmers,queries,rush,ukraine,absence,nearest,cluster,vendors,whereas,yoga,serves,woods,surprise,lamp,rico,partial,phil,everybody,couples,nashville,ranking,jokes,http,simpson,sublime,palace,acceptable,satisfied,glad,wins,measurements,verify,globe,trusted,copper,milwaukee,rack,warehouse,ec,rep,kerry,receipt,supposed,ordinary,nobody,ghost,violation,stability,mit,applying,southwest,boss,pride,expectations,independence,knowing,reporter,keith,champion,cloudy,linda,ross,personally,chile,anna,plenty,solo,sentence,throat,ignore,maria,uniform,excellence,wealth,tall,somewhere,vacuum,dancing,attributes,recognize,brass,writes,plaza,survival,quest,publish,screening,toe,trans,jonathan,whenever,nova,lifetime,pioneer,booty,forgotten,plates,acres,venue,athletic,essays,behaviour,vital,telling,fairly,coastal,cf,charity,intelligent,edinburgh,vt,excel,modes,obligation,campbell,wake,stupid,harbor,hungary,traveler,segment,realize,lan,enemy,puzzle,rising,aluminum,wells,opens,insight,restricted,republican,secrets,lucky,latter,merchants,thick,repeat,philips,attendance,penalty,drum,glasses,enables,nec,builder,vista,jessica,chips,terry,flood,ease,arguments,amsterdam,orgy,arena,adventures,pupils,stewart,announcement,outcome,xx,appreciate,expanded,casual,grown,polish,lovely,extras,centres,jerry,clause,smile,lands,ri,troops,indoor,bulgaria,armed,broker,charger,regularly,believed,pine,cooling,tend,gulf,rick,trucks,cp,mechanisms,divorce,laura,tokyo,partly,tradition,candy,pills,tiger,donald,folks,exposed,hunt,angels,deputy,sealed,physicians,loaded,fred,complaint,scenes,experiments,balls,afghanistan,scholarship,governance,mill,founded,chronic,moral,den,finger,keeps,pound,locate,pl,trained,burn,roses,ourselves,bread,tobacco,wooden,motors,tough,roberts,incident,gonna,lie,conversation,decrease,chest,pension,billy,revenues,emerging,worship,capability,ak,fe,craig,herself,producing,churches,precision,damages,reserves,contributed,solve,reproduction,minority,diverse,ingredients,sb,ah,johnny,sole,franchise,recorder,complaints,facing,nancy,promotions,tones,passion,rehabilitation,maintaining,sight,laid,clay,defence,patches,weak,refund,towns,divided,reception,wise,cyprus,odds,correctly,consequences,makers,hearts,geography,appearing,integrity,worry,discrimination,eve,carter,legacy,marc,pleased,danger,widely,phrase,genuine,raising,implications,paradise,hybrid,reads,roles,emotional,sons,leaf,pad,glory,platforms,ja,bigger,versus,combine,overnight,geographic,exceed,rod,saudi,fault,cuba,hrs,preliminary,districts,introduce,silk,kate,babies,bi,karen,compiled,romantic,revealed,specialists,generator,albert,examine,jimmy,graham,suspension,bristol,margaret,sad,correction,wolf,slowly,communicate,rugby,supplement,cal,portions,infant,promoting,samuel,fluid,grounds,fits,kick,regards,meal,ta,hurt,machinery,unlike,equation,baskets,probability,pot,dimension,wright,barry,proven,admissions,warren,slip,studied,reviewer,involves,quarterly,profits,devil,grass,comply,marie,illustrated,cherry,continental,alternate,deutsch,achievement,limitations,kenya,cuts,funeral,earrings,enjoyed,chapters,charlie,quebec,passenger,convenient,dennis,mars,francis,sized,noticed,socket,silent,literary,egg,signals,caps,pill,theft,childhood,swing,symbols,lat,meta,humans,facial,choosing,talent,dated,flexibility,seeker,wisdom,shoot,boundary,mint,offset,philip,elite,gi,spin,holders,believes,swedish,poems,jurisdiction,displaying,witness,collins,equipped,stages,encouraged,sur,winds,powder,broadway,acquired,wash,cartridges,stones,entrance,roots,declaration,losing,attempts,noble,glasgow,rev,gospel,advantages,shore,loves,induced,ll,knight,preparing,loose,aims,recipient,linking,extensions,appeals,earned,illness,islamic,athletics,southeast,ho,alternatives,pending,parker,determining,lebanon,kennedy,sh,soap,ae,triple,cooper,vincent,jam,secured,unusual,answered,destruction,increasingly,migration,disorder,routine,rocks,conventional,titans,applicants,wearing,axis,sought,mounted,habitat,median,guns,herein,animated,horny,judicial,rio,adjustment,hero,bachelor,attitude,engaged,falling,montreal,carpet,lenses,binary,attended,difficulty,collective,coalition,pi,dropped,duke,walter,ai,pace,besides,wage,producers,ot,collector,arc,hosts,moments,atlas,strings,dawn,representing,observation,feels,torture,carl,coat,mitchell,mrs,restoration,convenience,returning,ralph,opposition,container,yr,defendant,warner,confirmation,app,embedded,supervisor,wizard,corps,actors,liver,liable,morris,petition,recall,picked,assumed,departure,minneapolis,belief,killing,memphis,shoulder,texts,brokers,roy,ion,diameter,ottawa,doll,ic,tit,seasons,peru,refine,bidder,singer,evans,herald,literacy,fails,aging,intervention,fed,attraction,diving,invite,modification,alice,suppose,reed,involve,moderate,terror,younger,thirty,mice,opposite,understood,rapidly,ban,mercedes,assurance,clerk,happening,vast,mills,outline,amendments,holland,receives,metropolitan,compilation,verification,ent,odd,wrap,refers,mood,favor,veterans,gr,attractive,occasion,jefferson,victim,demands,sleeping,careful,beam,gardening,obligations,arrive,orchestra,sunset,tracked,moreover,lottery,tops,framed,aside,licence,essay,discipline,amy,dialogue,identifying,alphabetical,camps,declared,dispatched,aaron,trace,disposal,shut,packs,ge,switches,romania,voluntary,thou,consult,greatly,mask,midnight,ng,commonly,pe,photographer,inform,turkish,coal,cry,quantum,murray,intent,tt,zoo,largely,pleasant,announce,constructed,additions,requiring,spoke,arrow,engagement,rough,weird,tee,lion,inspired,holes,weddings,blade,suddenly,oxygen,meals,canyon,meters,merely,arrangement,conclusions,passes,bibliography,pointer,stretch,durham,furthermore,permits,cooperative,muslim,xl,neil,sleeve,cleaner,cricket,beef,feeding,stroke,township,cad,hats,robin,robinson,jacksonville,strap,headquarters,sharon,crowd,transfers,surf,olympic,transformation,remained,attachments,dir,entities,customs,administrators,personality,rainbow,hook,roulette,decline,gloves,cord,cloud,facilitate,subscriber,valve,val,explains,proceed,feelings,knife,jamaica,shelf,liked,adopt,denied,incredible,donation,outer,crop,deaths,rivers,commonwealth,manhattan,tales,katrina,islam,tu,fy,thumbs,seeds,cited,lite,hub,realized,twelve,founder,decade,dispute,portuguese,tired,adverse,everywhere,eng,steam,discharge,ef,drinks,ace,voices,acute,climbing,stood,sing,tons,perfume,carol,honest,albany,hazardous,restore,stack,somebody,sue,ep,reputation,democrats,hang,curve,creator,amber,qualifications,museums,variation,passage,transferred,trunk,lb,damn,pierre,photograph,oakland,colombia,waves,camel,lamps,underlying,hood,wrestling,suicide,chi,arabia,gathering,projection,juice,chase,mathematical,logical,sauce,fame,extract,specialized,panama,indianapolis,af,payable,corporations,courtesy,criticism,automobile,confidential,statutory,accommodations,athens,northeast,judges,retired,remarks,detected,decades,paintings,walked,arising,bracelet,ins,eggs,juvenile,injection,yorkshire,populations,protective,afraid,railway,indicator,pointed,causing,mistake,norton,locked,eliminate,fusion,mineral,ruby,steering,beads,fortune,preference,canvas,threshold,parish,claimed,screens,cemetery,croatia,flows,venezuela,exploration,fewer,nurses,stem,proxy,astronomy,lanka,edwards,drew,contests,translate,announces,costume,berkeley,voted,killer,gates,adjusted,rap,tune,bishop,pulled,corn,shaped,compression,seasonal,establishing,farmer,counters,puts,constitutional,grew,perfectly,tin,slave,instantly,cultures,norfolk,coaching,examined,trek,encoding,litigation,heroes,painted,ir,horizontal,resulted,portrait,ethical,carriers,mobility,floral,builders,ties,struggle,schemes,suffering,neutral,fisher,rat,spears,prospective,bedding,ultimately,joining,heading,equally,artificial,bearing,spectacular,seniors,worlds,guilty,affiliated,naturally,haven,tablet,jury,dos,tail,subscribers,charm,lawn,violent,underwear,basin,soup,potentially,ranch,crossing,inclusive,cottage,drunk,considerable,crimes,resolved,byte,nose,branches,delhi,holdings,alien,selecting,processors,broke,nepal,zimbabwe,difficulties,juan,complexity,constantly,browsing,resolve,barcelona,presidential,documentary,cod,territories,melissa,moscow,thesis,thru,jews,discs,rocky,bargains,frequent,nigeria,ceiling,ensuring,legislature,hospitality,gen,anybody,diamonds,fleet,bunch,singing,theoretical,afford,exercises,surveillance,quit,distinct,lung,substitute,inclusion,hopefully,brilliant,turner,sucking,cents,ti,todd,spoken,stayed,civic,manuals,sees,termination,watched,thereof,households,redeem,rogers,grain,authentic,regime,wishes,bull,montgomery,architectural,louisville,depend,differ,movements,ranging,monica,repairs,breath,amenities,virtually,cole,mart,candle,hanging,colored,authorization,tale,verified,lynn,formerly,bp,situated,comparative,seeks,loving,strictly,routing,docs,stanley,psychological,surprised,elegant,gains,renewal,genealogy,opposed,deemed,scoring,expenditure,brooklyn,liverpool,sisters,critics,spots,oo,hacker,madrid,similarly,margin,coin,solely,fake,salon,norman,excluding,headed,voters,cure,madonna,commander,arch,ni,murphy,thinks,suggestion,soldier,phillips,aimed,justin,bomb,harm,interval,mirrors,tricks,brush,investigate,thy,panels,repeated,assault,spare,deer,tongue,bowling,tri,pal,monkey,proportion,filename,skirt,florence,invest,honey,um,analyses,drawings,significance,ye,lovers,atomic,arabic,gauge,essentials,junction,protecting,faced,mat,rachel,solving,transmitted,produces,oven,ted,intensive,chains,kingston,sixth,engage,noon,switching,quoted,correspondence,farms,imports,supervision,cheat,bronze,expenditures,sandy,separation,testimony,suspect,celebrities,sender,boundaries,crucial,celebration,adjacent,filtering,tuition,spouse,exotic,threats,luxembourg,puzzles,reaching,vb,damaged,laugh,joel,surgical,destroy,citation,pitch,yo,premises,perry,proved,offensive,imperial,dozen,benjamin,teeth,cloth,studying,colleagues,stamp,lotus,salmon,olympus,separated,cargo,tan,salem,mate,likes,butter,pepper,weapon,luggage,burden,chef,zones,races,isle,stylish,slim,maple,luke,grocery,offshore,depot,kenneth,comp,alt,pie,blend,harrison,julie,occasionally,attending,emission,pete,finest,janet,bow,penn,recruiting,apparent,autumn,traveling,probe,midi,toilet,ranked,jackets,routes,packed,excited,helen,mounting,recover,tied,balanced,prescribed,catherine,timely,talked,delayed,chuck,reproduced,hon,dale,explicit,calculation,villas,ebook,consolidated,occasions,brooks,newton,oils,sept,exceptional,anxiety,whilst,unto,prompt,precious,minds,annually,considerations,pays,cox,fingers,sunny,ebooks,delivers,je,queensland,necklace,musicians,leeds,composite,cedar,arranged,lang,theaters,advocacy,raleigh,stud,fold,essentially,designing,threaded,uv,qualify,fingering,blair,hopes,mason,diagram,burns,pumps,slut,ejaculation,sg,vic,peoples,victor,mario,pos,attach,licenses,removing,advised,brunswick,spider,ranges,pairs,trails,preservation,hudson,isolated,interim,assisted,divine,streaming,approve,chose,compound,intensity,technological,syndicate,abortion,venues,blast,calcium,newport,addressing,pole,discounted,indians,shield,harvest,membrane,prague,bangladesh,constitute,locally,concluded,desperate,mothers,iceland,demonstration,governmental,manufactured,candles,graduation,bend,sailing,variations,sacred,morocco,tommy,springfield,refused,brake,exterior,greeting,oliver,congo,glen,delays,synthesis,olive,undefined,unemployment,scored,newcastle,velocity,relay,composed,tears,performances,oasis,cab,angry,fa,societies,brazilian,identical,petroleum,compete,ist,norwegian,lover,belong,honolulu,lips,escort,retention,exchanges,pond,rolls,thomson,barnes,wondering,malta,daddy,ferry,rabbit,profession,seating,dam,separately,physiology,collecting,das,exports,omaha,tire,dominican,chad,loads,friendship,heather,passport,unions,treasury,warrant,frozen,occupied,josh,royalty,scales,rally,observer,sunshine,strain,drag,ceremony,somehow,arrested,expanding,provincial,investigations,ripe,rely,hebrew,gained,rochester,dying,laundry,stuck,solomon,placing,stops,adjust,assessed,enabling,filling,sophisticated,imposed,silence,soviet,possession,cu,laboratories,treaty,vocal,trainer,organ,stronger,volumes,advances,vegetables,lemon,darkness,nuts,nail,vienna,implied,span,stanford,stockings,joke,respondent,packing,statute,rejected,satisfy,destroyed,shelter,chapel,manufacture,layers,guided,accredited,appliance,compressed,bahamas,powell,mixture,bench,tub,rider,radius,perspectives,mortality,logging,hampton,christians,borders,pads,butts,inns,bobby,impressive,sheep,accordingly,architect,railroad,lectures,challenging,wines,nursery,harder,cups,ash,microwave,cheapest,accidents,stuart,contributors,salvador,ali,salad,monroe,tender,violations,foam,temperatures,paste,clouds,discretion,tanzania,preserve,poem,unsigned,staying,easter,theories,repository,praise,jeremy,venice,jo,christianity,veteran,streams,landing,signing,executed,katie,negotiations,realistic,integral,asks,relax,namibia,generating,christina,congressional,synopsis,hardly,prairie,reunion,composer,bean,sword,absent,photographic,sells,ecuador,hoping,accessed,spirits,modifications,coral,float,colin,bias,imported,paths,bubble,por,acquire,contrary,millennium,tribune,vessel,acids,cheaper,admitted,dairy,admit,mem,fancy,equality,samoa,achieving,tap,fisheries,exceptions,reactions,beliefs,ci,companion,squad,analyze,ashley,scroll,relate,divisions,swim,wages,suffer,forests,fellowship,invalid,concerts,martial,males,victorian,retain,colours,execute,tunnel,genres,cambodia,patents,yn,chaos,lithuania,wheat,chronicles,obtaining,beaver,distribute,readings,decorative,confused,compiler,enlargement,eagles,bases,vii,accused,bee,campaigns,unity,loud,bride,rats,defines,airports,instances,indigenous,begun,brunette,packets,anchor,socks,parade,corruption,stat,trigger,incentives,gathered,essex,notified,differential,beaches,dramatic,surfaces,terrible,cruz,pendant,dresses,baptist,scientist,hiring,clocks,females,wallace,nevertheless,reflects,taxation,fever,cuisine,surely,practitioners,transcript,inflation,thee,ruth,pray,compounds,pope,drums,contracting,arnold,reasonably,chicks,bare,hung,cattle,radical,graduates,rover,recommends,controlling,treasure,flame,tanks,assuming,monetary,elderly,pit,arlington,floating,extraordinary,tile,indicating,bolivia,spell,hottest,stevens,kuwait,exclusively,emily,alleged,limitation,compile,webster,struck,illustration,plymouth,warnings,construct,inquiries,bridal,annex,mag,inspiration,tribal,curious,affecting,freight,eclipse,sudan,downloading,shuttle,aggregate,stunning,cycles,affects,detect,actively,knee,prep,pb,complicated,fastest,butler,injured,decorating,expressions,ton,courier,shakespeare,hints,collapse,unlikely,oe,gif,pros,conflicts,beverage,tribute,wired,immune,travelers,forestry,barriers,cant,rarely,infected,offerings,martha,genesis,barrier,argue,incorrect,trains,metals,bicycle,furnishings,letting,arise,guatemala,celtic,thereby,jamie,particle,perception,minerals,advise,humidity,bottles,boxing,wy,renaissance,pathology,sara,bra,ordinance,hughes,bitch,jeffrey,chess,operates,survive,oscar,festivals,menus,joan,possibilities,duck,reveal,canal,phi,contributing,herbs,cow,manitoba,analytical,missions,watson,lying,costumes,strict,dive,circulation,drill,offense,bryan,cet,protest,assumption,jerusalem,hobby,tries,invention,nickname,fiji,enquiries,washing,exploring,trick,enquiry,raid,timber,intense,showers,supporters,ruling,steady,dirt,statutes,withdrawal,myers,drops,predicted,wider,saskatchewan,enrolled,screw,ministers,publicly,hourly,blame,geneva,veterinary,handed,suffered,informal,incentive,butterfly,mechanics,heavily,fifty,mistakes,numerical,ons,uncle,defining,counting,reflection,sink,accompanied,assure,invitation,devoted,princeton,jacob,sodium,randy,spirituality,meanwhile,proprietary,timothy,brick,grip,naval,medieval,porcelain,bridges,captured,watt,decent,casting,dayton,translated,shortly,cameron,pins,carlos,reno,donna,andreas,warrior,diploma,cabin,innocent,scanning,consensus,polo,copying,delivering,patricia,horn,eddie,uganda,fired,journalism,perth,frog,grammar,intention,syria,disagree,klein,harvey,tires,logs,undertaken,hazard,leo,gregory,episodes,circular,anger,mainland,illustrations,suits,chances,snap,happiness,arg,substantially,bizarre,glenn,ur,auckland,fruits,geo,ribbon,calculations,doe,conducting,trinidad,kissing,wal,handy,swap,exempt,crops,reduces,accomplished,geometry,impression,guild,correlation,gorgeous,capitol,sim,dishes,barbados,nervous,refuse,extends,fragrance,mcdonald,replica,brussels,tribe,neighbors,trades,superb,buzz,transparent,rid,trinity,charleston,handled,legends,boom,calm,champions,floors,selections,inappropriate,exhaust,comparing,shanghai,speaks,burton,vocational,davidson,copied,scotia,farming,gibson,fork,troy,roller,batch,organize,appreciated,alter,ghana,edges,mixing,handles,skilled,fitted,albuquerque,harmony,distinguished,projected,assumptions,shareholders,twins,rip,triangle,amend,anticipated,oriental,reward,windsor,zambia,completing,hydrogen,comparable,chick,advocate,sims,confusion,copyrighted,tray,warranties,escorts,thong,medal,coaches,vessels,harbour,walks,sucks,sol,sage,knives,vulnerable,arrange,artistic,bat,honors,booth,reflected,unified,bones,breed,ignored,polar,fallen,precise,sussex,respiratory,invoice,lip,sap,gather,maternity,backed,alfred,colonial,carey,forming,embassy,cave,journalists,danny,rebecca,slight,proceeds,indirect,amongst,wool,foundations,arrest,horizon,nu,deeply,marina,liabilities,prizes,bosnia,decreased,patio,tolerance,lloyd,describing,optics,pursue,lightning,overcome,eyed,ou,quotations,grab,inspector,attract,brighton,beans,bookmarks,ellis,disable,snake,succeed,leonard,lending,reminder,xi,searched,riverside,plains,raymond,abilities,initiated,sullivan,za,trap,lonely,fool,ve,lancaster,suspended,observe,julia,attitudes,karl,berry,collar,simultaneously,racial,bermuda,amanda,sociology,exhibitions,confident,retrieved,exhibits,officially,dies,terrace,bacteria,replied,novels,recipients,ought,delicious,traditions,jail,safely,finite,kidney,periodically,fixes,sends,durable,allied,throws,moisture,hungarian,referring,spencer,uruguay,transform,tablets,tuning,gotten,educators,tyler,futures,vegetable,verse,humanities,independently,wanting,custody,scratch,launches,henderson,bk,britannica,ellen,competitors,rocket,bullet,towers,racks,lace,nasty,latitude,consciousness,ste,tumor,ugly,deposits,beverly,mistress,encounter,trustees,watts,duncan,hart,bernard,resolutions,ment,forty,tubes,attempted,col,priest,floyd,ronald,queue,trance,nicholas,yu,bundle,hammer,invasion,witnesses,runner,rows,administered,notion,sq,skins,mailed,spelling,arctic,rewards,beneath,strengthen,defend,frederick,seventh,gods,une,welsh,belly,aggressive,advertisements,quarters,stolen,soonest,haiti,disturbed,determines,sculpture,ears,fist,fitting,fixtures,mere,agrees,passengers,quantities,petersburg,consistently,cons,elder,cheers,dig,taxi,punishment,appreciation,subsequently,om,nat,gravity,providence,thumb,restriction,incorporate,backgrounds,treasurer,essence,flooring,ethiopia,mighty,athletes,humanity,transcription,holmes,complications,scholars,remembered,galaxy,chester,loc,worn,synthetic,shaw,vp,segments,testament,twist,stomach,partially,buried,minimize,darwin,ranks,wilderness,debut,generations,tournaments,bradley,deny,anatomy,judy,fraction,trio,proceeding,cube,defects,uncertainty,breakdown,milton,reconstruction,subsidiary,clarity,rugs,sandra,adelaide,encouraging,furnished,monaco,settled,folding,comparisons,beneficial,belize,fate,promised,penny,robust,threatened,republicans,discusses,porter,gras,jungle,ver,responded,rim,zen,ivory,alpine,dis,prediction,fabulous,alias,individually,battlefield,literally,newer,kay,spice,oval,implies,soma,ser,cooler,consisting,periodic,submitting,overhead,ascii,prospect,shipment,breeding,citations,geographical,donor,mozambique,tension,trash,shapes,tier,earl,manor,envelope,diane,disclaimers,excluded,andrea,breeds,rapids,sheffield,bailey,aus,finishing,emotions,wellington,incoming,prospects,bulgarian,eternal,cite,aboriginal,remarkable,rotation,nam,productive,boulevard,eugene,ix,gdp,pig,metric,minus,penalties,bennett,imagination,joshua,armenia,varied,grande,closest,actress,mess,assign,armstrong,politicians,lit,accommodate,tigers,aurora,una,slides,milan,premiere,lender,villages,shade,chorus,christine,rhythm,digit,argued,dietary,symphony,clarke,sudden,accepting,precipitation,lions,ada,pools,tb,lyric,claire,isolation,speeds,sustained,matched,approximate,rope,carroll,rational,fighters,chambers,dump,greetings,inherited,warming,incomplete,chronicle,fountain,chubby,grave,legitimate,biographies,burner,investigator,plaintiff,finnish,gentle,prisoners,deeper,muslims,hose,mediterranean,worthy,reveals,architects,saints,carries,sig,duo,excessive,devon,helena,saves,regarded,valuation,unexpected,cigarette,fog,characteristic,marion,lobby,egyptian,tunisia,outlined,consequently,treating,punch,appointments,gotta,cowboy,narrative,enormous,karma,consist,betty,queens,quantitative,lucas,subdivision,tribes,defeat,distinction,honduras,naughty,hazards,insured,harper,livestock,exemption,tenant,cabinets,tattoo,shake,algebra,shadows,holly,silly,yea,mercy,hartford,freely,marcus,sunrise,wrapping,mild,fur,nicaragua,tar,belongs,readily,soc,fence,infinite,diana,relatives,lindsay,clan,legally,shame,satisfactory,revolutionary,bracelets,civilian,mesa,fatal,remedy,breathing,briefly,thickness,adjustments,genius,discussing,fighter,flesh,retreat,adapted,barely,wherever,estates,rug,democrat,borough,maintains,failing,ka,retained,pamela,andrews,marble,extending,jesse,hull,surrey,dem,blackberry,highland,meditation,macedonia,combining,brandon,instrumental,giants,organizing,shed,balloon,winston,ham,solved,tide,hawaiian,partition,invisible,consoles,funk,magnet,translations,jaguar,reel,sheer,commodity,posing,wang,kilometers,bind,thanksgiving,rand,hopkins,urgent,guarantees,infants,gothic,cylinder,witch,buck,indication,eh,congratulations,cohen,sie,puppy,acre,cigarettes,revenge,expires,enemies,aqua,chen,emma,finances,accepts,enjoying,conventions,eva,patrol,smell,pest,coordinates,carnival,roughly,promises,responding,reef,physically,divide,consecutive,satin,bon,deserve,attempting,representations,chan,worried,tunes,garbage,competing,combines,mas,beth,bradford,len,phrases,kai,peninsula,chelsea,boring,reynolds,dom,jill,accurately,speeches,reaches,considers,sofa,ministries,vacancies,parliamentary,prefix,lucia,savannah,barrel,typing,nerve,dans,planets,deficit,boulder,pointing,renew,coupled,viii,harold,circuits,texture,jar,somerset,acknowledge,thoroughly,antigua,nottingham,thunder,tent,caution,identifies,qualification,locks,modelling,namely,miniature,hack,dare,interstate,pirates,aerial,hawk,consequence,rebel,systematic,perceived,origins,hired,textile,lamb,madagascar,nathan,tobago,presenting,cos,centuries,magnitude,richardson,hindu,vocabulary,licking,earthquake,fundraising,weights,albania,geological,lasting,wicked,introduces,kills,pushed,ro,participated,junk,wax,lucy,answering,hans,impressed,slope,failures,poet,conspiracy,surname,theology,nails,evident,epic,saturn,organizer,nut,sake,twisted,combinations,preceding,merit,cumulative,planes,edmonton,tackle,disks,arbitrary,prominent,retrieve,lexington,vernon,sans,irs,fairy,builds,shaft,lean,bye,occasional,leslie,deutsche,ana,innovations,kitty,drain,monte,fires,algeria,blessed,luis,reviewing,cardiff,cornwall,favors,potato,panic,explicitly,sticks,leone,ez,citizenship,excuse,reforms,basement,onion,strand,sandwich,uw,lawsuit,alto,cheque,hierarchy,influenced,banners,reject,eau,abandoned,bd,circles,italic,merry,mil,gore,complement,cult,dash,passive,mauritius,valued,cage,requesting,courage,verde,extraction,elevation,coleman,hugh,lap,utilization,beverages,jake,efficiently,textbook,dried,entertaining,luther,frontier,settle,stopping,refugees,knights,hypothesis,palmer,medicines,flux,derby,peaceful,altered,doctrine,scenic,intersection,sewing,consistency,collectors,conclude,recognised,munich,oman,propose,azerbaijan,lighter,rage,uh,prix,astrology,pavilion,tactics,trusts,occurring,supplemental,travelling,talented,annie,pillow,induction,derek,precisely,shorter,harley,spreading,provinces,relying,paraguay,steal,parcel,refined,bo,fifteen,widespread,incidence,fears,predict,boutique,rolled,avon,incidents,peterson,rays,shannon,enhancing,flavor,alike,walt,homeless,horrible,hungry,metallic,blocked,interference,warriors,palestine,undo,atmospheric,wm,dana,halo,curtis,parental,strikes,lesser,publicity,marathon,ant,proposition,pressing,gasoline,apt,dressed,scout,belfast,dealt,niagara,inf,eos,charms,trader,bucks,allowance,denial,uri,designation,thrown,raises,gem,duplicate,criterion,badge,wrist,civilization,analyzed,heath,tremendous,ballot,varying,remedies,validity,trustee,weighted,angola,performs,realm,corrected,jenny,helmet,salaries,elephant,yemen,encountered,scholar,nickel,surrounded,geology,creatures,coating,commented,wallet,cleared,accomplish,boating,drainage,corners,broader,vegetarian,rouge,yeast,yale,newfoundland,sn,pas,clearing,investigated,ambassador,coated,intend,stephanie,contacting,vegetation,doom,louise,kenny,specially,owen,hitting,yukon,beings,bite,aquatic,reliance,habits,striking,myth,infectious,singh,gig,gilbert,continuity,brook,fu,phenomenon,ensemble,assured,biblical,weed,conscious,accent,eleven,wives,utilize,mileage,auburn,unlock,pledge,vampire,angela,relates,nitrogen,dice,dock,differently,framing,organised,musician,blocking,sorts,limiting,dispatch,revisions,papua,restored,hint,armor,riders,chargers,remark,dozens,varies,reasoning,rendered,picking,charitable,guards,annotated,convinced,openings,buys,replacing,watershed,councils,occupations,acknowledged,nudity,pockets,granny,pork,zu,equilibrium,inquire,pipes,characterized,laden,cottages,merge,privilege,edgar,develops,qualifying,estimation,barn,pushing,fleece,fare,pierce,allan,dressing,sperm,bald,frost,leon,institutes,mold,dame,fo,sally,yacht,tracy,prefers,drilling,herb,ate,breach,whale,traveller,appropriations,suspected,tomatoes,beginners,instructors,bedford,stationery,idle,mustang,unauthorized,clusters,competent,momentum,fin,io,pastor,mud,calvin,uni,shark,contributor,demonstrates,phases,grateful,emerald,gradually,laughing,grows,cliff,desirable,tract,ballet,ol,journalist,abraham,bumper,afterwards,religions,garlic,shine,senegal,explosion,banned,briefs,signatures,cove,casa,mu,daughters,conversations,radios,tariff,opponent,simplified,muscles,wrapped,swift,vagina,eden,distant,champagne,ala,decimal,deviation,superintendent,dip,hostel,housewives,employ,mongolia,penguin,magical,influences,irrigation,miracle,reprint,reid,hydraulic,centered,robertson,yearly,penetration,wound,belle,rosa,conviction,hash,omissions,writings,hamburg,lazy,qualities,fathers,charging,cas,marvel,lined,cio,dow,prototype,petite,apparatus,terrain,pens,explaining,yen,strips,gossip,rangers,nomination,empirical,rotary,worm,dependence,beginner,boxed,lid,cubic,deaf,commitments,suggesting,sapphire,skirts,mats,remainder,crawford,labeled,privileges,marking,commodities,serbia,sheriff,griffin,declined,guyana,spies,neighbor,elect,highways,concentrate,intimate,reproductive,preston,deadly,molecules,rounds,refrigerator,intervals,sentences,exclusion,holocaust,keen,peas,receivers,disposition,variance,navigator,investigators,cameroon,baking,computed,needle,baths,cathedral,brakes,og,nirvana,ko,owns,til,sticky,destiny,generous,madness,climb,blowing,fascinating,landscapes,heated,lafayette,wto,computation,hay,salvation,dover,adrian,predictions,accompanying,vatican,brutal,selective,arbitration,token,editorials,zinc,sacrifice,seekers,isa,removable,yields,gibraltar,levy,suited,anthropology,skating,aberdeen,emperor,grad,bras,belts,blacks,educated,reporters,burke,proudly,necessity,rendering,inserted,pulling,curves,suburban,touring,clara,tomato,waterproof,expired,travels,flush,pale,hayes,humanitarian,invitations,functioning,delight,survivor,garcia,economies,alexandria,moses,counted,undertake,declare,continuously,johns,valves,gaps,impaired,achievements,donors,tear,jewel,teddy,convertible,teaches,ventures,nil,stranger,tragedy,julian,nest,painful,velvet,tribunal,ruled,pensions,prayers,nowhere,cop,paragraphs,gale,joins,adolescent,nominations,wesley,dim,lately,cancelled,mattress,likewise,banana,introductory,cakes,stan,reservoir,occurrence,idol,bloody,remind,worcester,charming,mai,tooth,disciplinary,annoying,respected,stays,disclose,affair,drove,upset,restrict,beside,mines,portraits,rebound,logan,mentor,interpreted,fought,baghdad,elimination,metres,hypothetical,immigrants,complimentary,pencil,freeze,performer,abu,titled,commissions,sphere,moss,concord,graduated,endorsed,ty,surprising,walnut,lance,ladder,italia,unnecessary,dramatically,liberia,sherman,cork,hansen,senators,mali,yugoslavia,bleeding,characterization,colon,likelihood,lanes,purse,fundamentals,contamination,endangered,compromise,masturbation,stating,dome,caroline,expiration,bless,engaging,negotiation,crest,opponents,triumph,nominated,electoral,welding,deferred,alternatively,heel,alloy,plots,polished,yang,gently,locking,casey,controversial,draws,blanket,bloom,lou,elliott,recovered,fraser,justify,blades,loops,surge,aw,tahoe,advert,possess,demanding,defensive,sip,forbidden,vanilla,deutschland,picnic,souls,arrivals,practitioner,dumb,smithsonian,hollow,vault,securely,examining,groove,revelation,pursuit,delegation,wires,dictionaries,mails,backing,greenhouse,sleeps,blake,transparency,dee,travis,endless,orbit,niger,bacon,survivors,colony,cannon,circus,forbes,mae,mel,descending,spine,trout,enclosed,feat,cooked,transmit,fatty,gerald,pressed,scanned,reflections,hunger,sic,municipality,joyce,detective,surgeon,cement,experiencing,fireplace,endorsement,disputes,textiles,missile,closes,seq,persistent,deborah,marco,assists,summaries,glow,gabriel,auditor,violin,prophet,bracket,isaac,oxide,oaks,magnificent,erik,colleague,naples,promptly,adaptation,hu,harmful,sexually,enclosure,dividend,newark,kw,paso,phantom,westminster,turtle,distances,absorption,treasures,warned,ware,fossil,mia,badly,apollo,wan,disappointed,persian,continually,communist,greene,grenada,creations,jade,scoop,acquisitions,foul,earning,excitement,somalia,verbal,blink,presently,seas,carlo,mysterious,novelty,bryant,tiles,librarian,switched,stockholm,pose,grams,richards,promising,relaxation,goat,render,carmen,ira,sen,thereafter,hardwood,temporal,sail,forge,commissioners,dense,brave,forwarding,awful,nightmare,reductions,southampton,impose,organisms,telescope,asbestos,portsmouth,meyer,enters,pod,savage,advancement,wu,willow,resumes,bolt,gage,throwing,existed,whore,generators,lu,wagon,dat,favour,knock,urge,generates,potatoes,thorough,inexpensive,kurt,peers,roland,quilt,huntington,creature,ours,mounts,syracuse,lone,refresh,aluminium,michel,subtle,notre,shipments,stripes,antarctica,cope,shepherd,cradle,chancellor,lime,kirk,flour,controversy,legendary,sympathy,choir,avoiding,beautifully,blond,expects,fabrics,hygiene,wit,poultry,virtue,burst,examinations,surgeons,bouquet,promotes,mandate,departmental,ind,corpus,johnston,terminology,gentleman,fibre,reproduce,shades,jets,qui,threatening,spokesman,frankfurt,prisoner,daisy,halifax,encourages,assembled,earliest,donated,insects,terminals,crude,morrison,maiden,sufficiently,examines,viking,myrtle,bored,yarn,knit,conditional,mug,bother,budapest,knitting,attacked,mating,compute,arrives,translator,automobiles,allah,continent,ob,fares,longitude,resist,challenged,hoped,pike,insertion,hugo,wagner,constraint,touched,strengthening,cologne,wishing,ranger,smallest,insulation,newman,marsh,scared,infringement,bent,laos,subjective,monsters,asylum,robbie,stake,cocktail,outlets,varieties,arbor,poison,dominated,costly,derivatives,prevents,stitch,rifle,severity,notable,warfare,judiciary,embroidery,mama,inland,greenland,interpret,accord,modest,countryside,sorting,liaison,unused,bulbs,consuming,tourists,sandals,seconded,waist,attributed,seychelles,fatigue,owl,patriot,sewer,crystals,kathleen,bosch,forthcoming,num,treats,marino,detention,carson,exceeds,complementary,gallon,coil,battles,traders,carlton,bitter,memorandum,burned,cardinal,dragons,converting,romeo,din,incredibly,delegates,turks,roma,balancing,att,vet,sided,claiming,courtyard,presidents,offenders,depart,cuban,tenants,expressly,distinctive,lily,brackets,unofficial,oversight,privately,minded,resin,allies,twilight,preserved,crossed,kensington,monterey,linen,rita,ascending,seals,nominal,alicia,decay,weaknesses,quartz,registers,eighth,usher,herbert,authorised,improves,advocates,phenomena,buffet,deciding,skate,joey,hackers,tilt,granite,repeatedly,lynch,masses,transformed,athlete,franc,bead,enforce,similarity,landlord,leak,timor,assorted,implements,adviser,flats,compelling,vouchers,expecting,heels,voter,urine,capri,towel,ginger,suburbs,imagery,sears,als,flies,competence,inadequate,crying,matthews,amateurs,crane,defendants,deployed,governed,considerably,investigating,rotten,habit,bulb,scattered,honour,useless,protects,northwestern,audiences,iris,coupe,hal,benin,bach,manages,erosion,abundance,carpenter,khan,insufficient,highlands,peters,fertility,clever,primer,che,lords,bu,tends,enjoyable,crescent,freshman,playground,negotiate,sixty,exploit,orgies,permanently,concentrated,distinguish,ei,projections,spark,illustrate,lin,patience,securing,pathway,shallow,stir,spike,plated,jacques,drawer,ingredient,togo,lifting,judith,curtain,disclosed,davies,tactical,pilots,copenhagen,expedition,pile,operative,humour,maturity,caller,distortion,prosecution,het,tonga,imprint,natalie,receipts,assisting,shirley,sanctions,goodbye,emerged,defect,poorly,goddess,backs,observers,magnets,formulas,spacious,shoulders,nas,argues,wade,soils,chapman,organs,det,loyalty,beloved,sometime,ballard,beating,faithful,libya,offence,invested,whatsoever,numbered,terminated,expands,sedan,pony,comprises,leap,bolton,founding,swan,covenant,dropping,archaeology,sailor,fittings,lining,banquet,cares,sanctuary,flora,statue,hilary,quotation,equals,hardy,caravan,diagrams,harness,manipulation,bells,vascular,alongside,impressions,yankees,forwarded,gal,transmitter,dorothy,freeman,andre,ems,puppies,relaxing,delphi,trophy,emotion,nets,sights,uniforms,disasters,asterisk,versatile,liquor,kindergarten,profitable,wounded,clayton,derivative,suffolk,necklaces,tot,occupancy,doses,educate,baked,glove,prejudice,herzegovina,probable,baldwin,incorporation,rem,evolutionary,arriving,decoration,trojan,assistants,counselor,spinal,eliminated,sooner,struggling,enacted,tenure,plush,weber,unstable,elk,nelly,fulfill,urged,reflecting,brent,gaining,definitive,appropriately,shifts,inactive,lansing,traveled,adapt,extracted,accession,patterson,carriage,therein,terminate,rex,fuels,traditionally,withdraw,soy,brett,anchorage,paula,landmark,greens,neat,naming,stern,bentley,bud,slaves,dentist,utilizing,mis,burkina,tutor,idiot,comprised,winnipeg,charities,mickey,sebastian,aliens,domino,raven,defeated,strains,dwelling,slice,tanning,gambia,aspen,lacking,symbolic,cest,objectionable,angles,pressures,webb,mediation,venus,bump,cowboys,flames,primitive,auf,stocking,esp,balloons,malcolm,georgetown,norwich,halls,decorations,pause,simplicity,postscript,dividends,relaxed,periodicals,pearson,demon,welcomed,infinity,gabon,notation,chandler,aunt,interviewed,crow,dia,discontinued,concurrent,decides,caption,bargaining,complain,pulmonary,adhesive,toledo,asses,altitude,compass,closet,couch,evolved,downs,exceeding,rogue,unfair,electronically,augusta,infantry,renowned,corridor,philosophical,scripture,celebrating,sahara,justification,rebuild,vacant,manuscript,fixing,gram,hiding,methodist,dye,sits,alphabet,shelves,toes,cleaned,honored,optic,hannah,telephones,insect,frances,diaries,chili,grief,leicester,sweat,dolphin,pendants,wonders,ventilation,masks,bust,lateral,quake,alley,gardner,sanders,pathways,telegraph,pertaining,memorable,professors,monument,formally,twain,ile,nevis,dew,lavender,justified,withdrawn,breeze,debates,gems,outgoing,mann,yankee,outs,deficiency,gum,progression,adv,saddle,malaria,loyal,torrent,odyssey,spite,nero,capita,imply,inaccuracies,tendency,caledonia,wholly,chill,utilized,embrace,ein,liner,manila,auxiliary,initiate,ua,elevated,purely,fry,lifts,vivid,allegations,stationary,corresponds,foil,whitney,celebrated,alarms,hunters,roi,allison,stairs,kt,acted,byron,critique,honestly,skull,continuation,carnegie,servant,falcon,jointly,canadians,avoided,comprising,tick,terrier,listened,explanations,renewed,incorporating,variant,riley,equatorial,critic,sediment,translators,squares,deg,bot,lea,vans,od,honeymoon,percussion,glue,cone,margins,sands,survived,spinning,adequately,spectral,prevalence,dominica,contaminated,fragment,finishes,lecturer,embroidered,bucket,steak,commits,cobra,threw,sutton,djibouti,authorize,decorated,credited,cherokee,apo,ao,recruit,simmons,gals,hoc,wherein,appearances,performers,dessert,dissertation,walsh,nos,marry,blankets,enthusiasm,confusing,celebrations,approaching,bounce,ivan,spiral,governors,weakness,wills,katherine,atoms,jacobs,mauritania,tissues,reminded,drake,cynthia,roosevelt,practicing,schmidt,nicely,surprisingly,expressing,della,laurel,carolyn,rails,fried,cairo,ambulance,practically,traded,signaling,vivo,domination,shrimp,chords,molecule,dedication,desires,woody,dismissed,cried,psychic,cracks,analyzing,sincerely,beaten,piercing,antilles,establishments,marginal,visions,efficacy,prestige,cocaine,accelerated,pinnacle,tucker,recognizes,plugs,responsive,supra,omitted,molly,proximity,ku,belonging,unbiased,pear,chiefs,franz,collision,supplementary,clue,scandal,lodges,dangers,lys,travellers,gia,scream,discrepancies,pirate,senses,repeats,willie,rival,slower,simulated,culinary,fairfax,beck,huh,accountant,propaganda,offender,waterloo,warwick,rounded,boarding,vanity,mitigation,tome,prof,homer,daylight,macdonald,gases,dependency,dioxide,fireworks,genus,approached,catching,cutter,connects,ont,liberals,aperture,roofing,dixon,elastic,melody,sins,cousin,hath,recalls,consultations,debts,phillip,burial,balcony,prescriptions,prop,avril,willis,myths,camden,coupling,knees,neglect,emerge,winchester,clutch,shy,poets,auditorium,pedro,maid,sid,carrie,towels,canterbury,trent,barber,intuitive,rigid,sta,degradation,ret,orthodox,erin,ferguson,fragments,mariana,qualitative,claude,minorities,blown,diffusion,baton,polynesia,barton,umbrella,rods,stimulation,abbey,pigs,olivia,refugee,straps,maya,discourse,lancashire,headache,stained,marital,socialist,bruno,attracted,undertaking,slavery,notwithstanding,feasible,romans,credibility,shores,fest,thames,flowing,montenegro,deed,whirlpool,perfumes,sustain,mechanic,bauer,eliminating,rejection,bowls,dissemination,cardinals,cosmic,dawson,defective,lengths,beacon,hoover,politically,elective,forensic,botanical,quartet,suspense,drafting,cruel,observing,advertised,commencement,southwestern,conform,helmets,firing,eager,denise,touching,vacancy,papa,settlements,strawberry,chang,gloria,elevator,pupil,feast,maggie,redemption,profound,canton,nina,registering,seth,warn,conservatives,bonnie,laying,provisional,compiling,strive,releasing,martinique,shells,painter,ankle,peso,leagues,monkeys,historically,transitions,prevented,digits,err,banker,sup,easiest,borrow,bamboo,lv,denotes,communicating,ki,decks,vibration,stepped,vent,blunt,protector,aux,react,understands,rises,issuing,accents,insane,buddha,voyage,een,colonel,transitional,mozart,acceleration,sketch,hoffman,balances,firearms,nightly,pitt,deduction,dancer,coats,pol,capsules,hyde,firmly,doo,dots,pursuing,aston,mugs,washed,resonance,mosaic,rhodes,fiesta,vase,forcing,fairs,flute,durability,meadows,hindi,harsh,outfit,substitution,burma,cease,deserves,aboard,irving,perfection,joints,overwhelming,poles,bounds,lyon,santiago,vera,advising,altogether,devils,dignity,europa,wondered,cheshire,boyd,sliding,accumulation,descriptive,inst,feasibility,negotiating,homo,pier,sioux,cote,premiums,lutheran,fellows,valencia,superman,perkins,ideally,splash,equip,saga,probation,ast,gran,commissioned,hedge,ke,fender,violet,dancers,mutation,envelopes,alle,compulsory,favorable,rue,preparations,maxwell,illustrates,inheritance,curry,oblique,pearls,worms,satisfying,succeeded,apples,elf,dewey,surviving,pouch,advent,proposes,hooks,ces,exploitation,singers,mayo,tasmania,mansion,cha,surrender,schneider,accumulated,arsenal,dub,screws,pyramid,enjoys,hacking,stripe,averages,peaks,tai,como,lisp,limousine,churchill,affirmative,keynote,planted,petitioner,spoon,bombs,niche,fortunately,cigar,vis,calculating,erie,berkshire,proportional,credentials,deprecated,municipalities,chin,locker,jenkins,squash,expectation,severely,spotted,curse,ajax,coconut,interrupt,conductor,wont,liberation,grandfather,removes,luxurious,titan,booked,anita,indirectly,nile,blessing,lumber,pillows,portals,illustrator,asleep,potassium,prompted,shout,presidency,abnormal,delicate,convince,whoever,straw,lifted,mankind,uncertain,paramount,upright,breakfasts,inspectors,emergencies,ernest,shocked,alcoholic,bakery,lieutenant,orchid,histories,loses,atkins,variability,observatory,soda,waited,preventive,peach,calculus,stefan,breathe,dunn,smiling,ounces,economically,uncut,intact,noting,shifting,samurai,moines,ivy,delegate,lightly,negotiated,herman,congestion,runners,stove,accidental,talents,nixon,refuge,brady,guadeloupe,walton,carved,ark,freak,obstacles,preferably,bluff,jasper,sed,newborn,sadly,laughed,avail,emerson,regulate,orchard,mythology,trousers,hatch,replaces,tomb,regina,stein,shortage,privileged,spill,goodness,drift,extracts,professions,explored,mysteries,fuller,decreases,crisp,cor,keeper,reinforced,johannesburg,spells,specifying,buddhist,inevitable,etiquette,environ,nic,coloured,births,kr,cubs,wheeler,ritual,miguel,pulp,onset,interpreter,specimens,initiation,assay,reconciliation,pots,recognizing,leigh,slam,respects,tents,plaque,accounted,deposited,lowe,beavers,crib,defending,pulls,autonomous,granting,motoring,appropriation,condensed,philippine,theological,quietly,scenery,drying,assemblies,collateral,learner,welcomes,swallow,tara,transplant,usenet,marines,lighthouse,proves,crab,jen,brightness,maurice,brooke,consumed,maxim,bore,depreciation,technically,enjoyment,cows,austrian,correspond,slate,suzanne,confined,inhabitants,straightforward,delighted,morton,peel,cue,jupiter,simultaneous,monopoly,debris,han,intentions,pagan,widow,sac,peg,randall,benson,sleeves,troubled,footnote,vibrant,evolving,sweater,approximation,skies,barrett,burners,alison,fitzgerald,kicks,disappeared,canoe,sovereign,reminds,organism,corrupt,violated,correspondent,drought,bake,hurricanes,symptom,laughter,propagation,ignorance,explosive,inventor,scaling,juicy,moody,fashioned,grains,vicinity,thyroid,purification,heal,southeastern,wizards,horoscope,prosperity,rainfall,mum,launching,pedal,plantation,storing,asa,tote,jumped,seemingly,tuned,passionate,staples,mayer,backward,sour,combustion,scrap,administer,bilateral,bella,blondes,disposable,williamson,sock,gentlemen,terra,literal,questioned,guiding,charcoal,vapor,beware,aloud,glorious,overlap,handsome,grounded,bail,goose,fn,judgement,cruiser,cumberland,gifted,esteem,cascade,endorse,strokes,shelby,hen,ancestry,dolphins,adopting,landed,nucleus,detached,scouts,warsaw,ib,mist,verb,chic,objection,phosphate,noisy,abide,sentinel,birthdays,preserving,vest,neal,economist,meridian,marriages,regret,stakes,rotating,brigade,movable,doubles,bliss,humiliation,tens,litter,reflective,abbreviations,executing,greenwich,flooding,rugged,jelly,grandmother,renovation,puma,appoint,panthers,perceptions,greenwood,ignition,humble,petrol,midway,mania,edwin,ax,clare,recognise,hostile,aphrodite,establishes,whites,rant,trapped,bolts,diplomatic,fringe,linguistic,internally,planetary,laurent,ego,manuel,gaza,influenza,gill,rude,sang,steele,citing,viewpoint,nay,servants,meanings,conception,unemployed,heavenly,exeter,amusement,middlesex,curl,albanian,overflow,hastings,subsidies,thirds,willingness,implicit,patriotic,simplify,darling,schwartz,satan,ornaments,oppose,terrific,definite,congregation,regiment,cheer,everett,reviewers,misleading,marty,vine,vale,whereby,deceased,sparks,simpler,captures,capitalism,hancock,falkland,cur,mammals,grape,russ,peppers,deeds,lively,inequality,educator,premature,tripod,immigrant,demonstrations,obsolete,rust,lon,interfere,traps,shuffle,wardrobe,vin,successes,racer,fabrication,guilt,sweep,nash,exploited,bladder,inflammatory,iss,immunity,bets,doyle,ducks,paints,neighbourhood,cheating,carr,fade,tastes,storms,smiled,jurisdictions,scrutiny,regeneration,lunar,differentiation,shields,nonsense,invented,elaine,posed,subjected,tasting,gwen,mob,expose,borrowing,arises,imf,precautions,branded,manning,lisbon,forks,monk,boxer,shining,weigh,clerical,voyager,hobart,moose,dorset,buenos,conscience,crush,mystic,solicitation,rectangular,fischer,pooh,enthusiast,positively,shaping,ich,afghan,inspire,paulo,torn,meantime,pumping,patented,revival,disappear,lever,redundant,regency,tasty,gag,mccarthy,heck,civilians,bark,carts,wasted,cocoa,invites,cushion,reversed,lynx,goa,specimen,ancestors,panther,mixes,graves,branding,examiner,vineyard,meadow,feeder,mercer,roms,goodman,listener,chloride,awaiting,kane,becker,bulls,orion,councillor,hurry,clarkson,beneficiary,hanson,offspring,panorama,roth,odor,demanded,wastes,clash,fidelity,sis,castro,flew,holden,ale,sem,rhapsody,trumpet,solitaire,decreasing,freezing,kaiser,wallis,criminals,retire,rumors,accomplishments,emergence,theatres,apex,crimson,compassion,needing,twentieth,pronounced,extensively,stain,conrad,wished,transient,kicked,coloring,curb,reign,trivial,coke,clauses,baron,sensible,unlawful,bates,webs,swinging,accountable,thrust,proving,opposing,novice,hewitt,dei,delightful,cane,cruising,fury,personalities,stiff,todo,noah,wore,christchurch,traces,rabbi,puffy,weston,headings,enthusiasts,ridiculous,scattering,secretaries,contracted,elbow,fights,scholarly,detailing,stark,roberto,strongest,hammond,padded,circa,revise,contributes,surroundings,proficiency,uranium,honours,consolidate,daniels,billions,hut,stafford,labrador,refusal,lima,suppression,weaver,readiness,secular,majesty,fishery,teresa,distributing,estimating,outdated,dues,pewter,distress,pumpkin,notably,intends,trevor,homosexual,garment,supplying,secondly,razor,cough,cerebral,grandma,oceans,displacement,backwards,arrows,volunteering,presumably,plea,constructive,bundles,tibet,pres,isles,stretching,ovens,garrett,esther,abundant,deductible,priests,accompany,compares,hesitate,inspiring,prey,deposition,laurie,tas,zodiac,pavement,keller,pedestrian,fencing,artery,inlet,rub,violate,stimulate,realise,fluids,conveniently,lick,gov,stealth,ter,ness,repayment,canopy,gloss,whip,porch,pertinent,lifelong,promoter,collegiate,construed,interchange,remotely,fletcher,concise,fibers,handful,brains,curtains,eaten,indigo,retaining,kelley,autobiography,conditioned,prohibition,motions,emphasize,excite,rebels,believing,hilarious,salisbury,gu,quoting,sinks,steep,dynasty,creed,nan,raiders,spreads,elegance,volatile,pointers,sensory,throne,chartered,slopes,socially,unfortunate,seized,territorial,leases,consisted,randolph,memoirs,alkaline,expire,och,midst,borne,forgive,competitor,mansfield,neighbours,marvin,conversions,usable,tempo,mutations,readable,almanac,conway,ay,gail,responds,denote,slayer,payne,purchaser,relies,inserting,tibetan,prepares,concludes,waterford,rodney,cylinders,mus,selects,fulton,directing,nationality,torch,zurich,stretched,depressed,encounters,haunted,spares,symmetry,bout,salons,olympia,hank,negligence,screened,helper,carlisle,rancho,transferring,stepping,hacks,attic,appetite,sensation,piper,morality,honorable,wealthy,handicap,skinny,sewage,endowment,demonstrating,avec,sonoma,esta,defender,amos,wretch,sunlight,stems,wo,ventura,convey,ang,evergreen,bearings,govern,feather,fond,sore,fiat,sixteen,blinds,traits,tightly,graded,successor,intrusion,sickness,guiana,underneath,prohibit,noel,cans,abused,avery,brushes,tenth,anthology,prosecutor,smiles,merged,auditors,grandchildren,desks,capsule,aided,suspend,eternity,introductions,weighing,currents,aide,kindly,nes,protests,sharks,notch,minors,dances,revealing,reprinted,fernando,mapped,resurrection,lieu,decree,tor,discovering,tuberculosis,lacks,horizons,daytime,elaborate,contour,gamble,fra,descent,gravel,analyse,disturbing,judged,shutter,illusion,ambitious,ole,notorious,ibid,residue,reds,enlarged,stephens,transforming,stripping,bart,assert,fluctuations,bowie,archaeological,inspect,thrice,babylon,edison,casualty,musings,poses,noir,eli,evan,mushroom,designate,scent,sequel,gymnastics,titanic,knob,wolves,exquisite,upward,sentenced,dundee,principe,acquiring,judging,unchanged,kicking,meg,fines,grasp,streak,ounce,thirteen,tragic,theodore,buena,irrelevant,professionally,liberties,sounding,milano,toast,happily,hooked,shrink,knox,unesco,mutually,beaded,remembering,boca,exodus,compartment,brittany,dove,testified,iis,cunningham,derive,affinity,presbyterian,pretend,buddhism,amnesty,borrower,gloucester,warrants,owens,fairness,needles,coll,quota,discreet,versa,imp,oi,mack,pu,sung,lowell,whichever,starr,elliot,uae,chooses,tuscany,crowded,tickling,wee,unreal,wounds,advisers,manufactures,physiological,addison,charters,generalized,unprecedented,flint,dummy,financially,awake,sanitation,swivel,ally,dissolved,cleanliness,kung,collectively,inhibition,burnt,solidarity,frustrated,muhammad,alma,ger,hanover,inverse,clifton,holt,isis,verdict,nominee,medals,dickinson,christi,lister,recurring,studs,rhetoric,modifying,incubus,impulse,surveyed,creditors,dull,tis,cabins,commenced,ballroom,employing,satellites,ignoring,stevenson,coherent,beetle,converts,majestic,bicycles,omni,clifford,critically,cy,composers,localities,owe,reciprocal,accelerate,hatred,questioning,manifest,indications,petty,permitting,som,behave,bees,zeppelin,felix,shiny,carmel,encore,smash,angelina,braun,destructive,sockets,claimant,psa,ample,countless,energies,repealed,listeners,abusive,merits,scarf,strangers,garland,voor,riviera,apprentice,obscure,napoleon,glamour,hated,sigh,trolley,principals,sidney,spicy,frankly,chronological,itinerary,fools,beard,discoveries,economical,miniatures,wedge,adjusting,mock,peggy,bats,patriots,ruins,sheila,dependencies,benton,chateau,denis,homestead,changer,sergeant,melt,syrian,ned,cypress,courtney,cites,prospectus,protectors,interiors,encouragement,disadvantages,abbott,tailor,chocolates,faux,supervised,interpreting,pascal,tha,serenity,ore,pant,sheridan,gallons,attainment,sanitary,cooperate,dreaming,fortunate,mushrooms,interpretations,geoffrey,faults,silva,grease,diablo,cairns,premise,epidemic,prima,rite,cinnamon,lac,discharged,alba,underworld,variants,palms,lawsuits,seated,lattice,realization,absorbed,sirius,chord,vous,turf,asphalt,improper,dilemma,rebuilding,livingston,commenting,shifted,tangible,smoked,hawks,irons,comet,berg,baltic,corrective,competency,muse,probing,teachings,tyne,fowler,xv,youngest,contingent,refreshing,syrup,xii,warmth,hawkins,lust,correlated,augustine,dominion,verses,astronomical,solvent,luna,amplitude,aesthetic,commercially,dion,wolfgang,completeness,irregular,barker,solids,capturing,certify,consulted,realised,jude,eighteen,singular,jennings,demons,unacceptable,redistribute,coping,baxter,outbreak,abdominal,deficiencies,curved,milestone,erase,lien,nip,bites,prose,marx,incidental,toni,arguing,vein,hale,swear,bel,clown,spontaneous,summers,taboo,equestrian,malicious,consume,amazed,fourteen,legislators,volcano,capacities,skeleton,tsp,suspects,displaced,sounded,honesty,dwarf,bis,northeastern,shocks,rewarding,battalion,candid,schooling,thornton,schoolgirl,caesar,pines,stellar,davenport,locating,monogram,philippe,aix,ornament,urges,sophie,attacking,microscope,threaten,bait,badges,kitten,brides,dent,stealing,bullets,emphasized,glossy,informations,haired,alterations,pablo,biographical,confirms,cavity,molded,vladimir,ida,probate,terrestrial,completes,beams,props,incense,formulated,dough,stool,towing,welch,rosemary,millionaire,turquoise,exposures,boone,substituted,horde,paperwork,nanny,suburb,hutchinson,cohort,succession,alliances,sums,averaged,glacier,pueblo,rigorous,relieve,clarion,override,angus,enthusiastic,lame,squeeze,sar,burgundy,struggles,farewell,soho,ashes,vanguard,natal,locus,evenings,misses,troubles,elton,purity,shaking,witnessed,cellar,friction,prone,valerie,enclosures,mer,equitable,fuse,lobster,judaism,atlantis,amid,onions,corinthians,crosses,uncomfortable,sylvia,furnace,poisoning,doubled,clues,inflammation,rabbits,icc,transported,crews,goodwill,anxious,tariffs,norris,ly,baptism,cutlery,overlooking,knot,rad,gut,staffordshire,factories,swords,advancing,timed,evolve,yuan,esa,suspicious,leased,subscribed,tate,dartmouth,brewing,coop,blossom,scare,confessions,bergen,lowered,thief,prisons,pictured,feminine,grabbed,rocking,nichols,blackwell,fulfilled,sweets,nautical,imprisonment,employs,gutenberg,bubbles,ashton,pitcher,judgments,muscular,motif,illnesses,plum,saloon,prophecy,loft,historian,elm,facsimile,hurts,folded,sofia,comprise,lump,disposed,chestnut,engraved,halt,alta,pastoral,unpaid,ghosts,doubts,locality,substantive,bulletins,worries,hug,rejects,spear,nigel,referee,transporter,jolie,broadly,ethereal,crossroads,aero,constructing,smoothly,parsons,bury,blanc,autonomy,bounded,insist,birch,slash,exercised,detecting,howell,digestive,entertain,cinderella,sesame,duct,touches,joanne,housewife,pursued,lend,corvette,yachts,stacy,christie,unrelated,lois,levi,stimulating,mont,misuse,cosmos,speculation,dixie,pans,enforced,legion,fulfillment,assertion,shook,lincolnshire,dismissal,mah,shocking,overland,prolonged,isaiah,backbone,unanimously,sausage,neighboring,uncommon,centralized,stratford,heidi,objections,unpublished,ames,slaughter,enlightenment,pistol,juniors,rockets,seymour,arithmetic,supposedly,bombay,originals,enrichment,milford,buckle,bartlett,fetch,kitchens,wat,rey,divers,townsend,blackburn,founders,sundays,upside,admiral,patron,sandwiches,sinclair,boiler,anticipate,induce,annapolis,padding,diagonal,unite,cracked,debtor,polk,mets,shear,mortal,sovereignty,franchises,rams,cleansing,gown,ponds,archery,excludes,sabbath,ruin,trump,nate,escaped,precursor,mates,stella,passages,vu,cereal,comprehension,sy,tow,resolving,drills,alexandra,champ,agreeing,rented,deductions,harrisburg,brushed,augmentation,otto,annuity,assortment,credible,ik,cultured,importing,deliberately,openly,crawl,theo,sparkling,bindings,convincing,flaws,este,tracing,deviations,incomes,fragile,jeremiah,sapiens,nyt,olsen,serbian,hai,restoring,sanchez,rushing,behold,amherst,alteration,murdered,hazel,ledger,scarlet,crushed,laughs,connie,referendum,modulation,statues,depths,spices,communion,uncertainties,colonies,followers,caldwell,squadron,bei,rupee,subsidy,demolition,irene,felony,lungs,monuments,veronica,filtered,growers,vinci,adj,haul,acknowledgement,duly,roasted,tenders,inviting,rig,ov,mick,mustard,strait,masterpiece,obey,donkey,jacks,conceived,boasts,praying,oss,multiply,intercourse,radial,mare,instructed,stole,kirby,armour,summarized,avalanche,northampton,manuscripts,cary,exhibited,disciples,shaving,bishops,kite,destroying,humorous,faa,corona,heap,griffith,erection,quasi,energetic,disturbance,saunders,ribbons,jew,exile,bilder,reside,cashier,jaw,butterflies,eats,knots,flea,offences,anton,pals,celebrates,hail,armenian,longitudinal,historians,realities,mentions,samson,jumps,fleming,optimistic,wasting,acclaimed,seldom,morrow,glitter,giovanni,lasted,awhile,scaled,contingency,wiltshire,vague,wraps,constituents,herd,handicapped,exported,lag,warns,harmless,sting,bravo,believers,dispersion,curiosity,resting,missiles,persistence,coarse,continents,carpets,recovering,submarine,blessings,prevailing,originated,axe,sculptures,intrinsic,thoughtful,nicht,archer,hertfordshire,warmer,calf,basil,grouped,dominate,orient,contra,damaging,populated,renee,boiling,journeys,parsing,splitting,derbyshire,abandon,rave,ej,dy,cigars,nicolas,inference,ras,recalled,transformer,weiss,declarations,rib,chattanooga,giles,drafts,excursions,jerk,shack,marrow,tavern,bathing,lambert,epilepsy,allowances,goggles,ses,unhappy,foregoing,certainty,sleek,gerard,antarctic,ord,successive,neglected,ariel,monty,cafes,classmates,hitch,fracture,ama,foremost,nineteenth,chesapeake,mahogany,actresses,clarence,ernst,buster,moderated,mal,nassau,flap,ignorant,allowable,compositions,sings,marcos,sorrow,carte,canned,collects,treaties,endurance,teaspoon,insulated,dupont,harriet,philosopher,rectangle,woo,queer,pains,decatur,wrapper,ahmed,buchanan,drummer,sobre,ceremonies,satisfies,appellate,comma,conformity,avant,supper,fulfilling,hooded,instability,seminary,presenter,offenses,emulation,lengthy,sonata,fortress,contiguous,perez,inaccurate,explanatory,settlers,stools,ministerial,xavier,torah,fao,publishes,stacks,owning,andersen,sermon,facilitating,complained,ferdinand,taps,thrill,lagoon,undoubtedly,withheld,insisted,reluctant,headaches,ramsey,oath,pigeon,rivals,freed,constrained,parrot,magnum,invoked,invaluable,keystone,inclined,gala,cheek,traction,utterly,gavin,illuminated,lasts,gloucestershire,psychologist,dane,claudia,perpetual,solicitor,clustering,glimpse,verbatim,innocence,quicker,grandparents,cardboard,attributable,sketches,angelo,tertiary,exhausted,smarter,shelters,attain,dora,inconvenience,tang,vaccination,farther,chats,riot,fats,mandarin,dungeon,germans,lilly,shire,mosquito,kashmir,lyons,putnam,corpse,speedy,ming,lush,barrels,transformations,analogue,werner,clyde,honorary,irwin,brewer,exchanged,adhere,fran,rafael,ccc,enquire,toilets,mains,whales,lindsey,parity,partitions,grim,hubbard,prism,chasing,flop,aggregation,shelley,batting,borrowed,rests,toss,depicted,grapes,proposing,winding,ripped,cobalt,pity,downward,catalogues,aspire,harvesting,garfield,groom,jewels,saturated,georges,quincy,doughty,weeds,stripped,clive,fixture,canary,steadily,imagined,darby,woke,fills,proportions,grips,clergy,solicitors,moderately,altar,salvage,stanton,creators,kilometres,cuff,repeating,empires,oyster,sturdy,massacre,undergo,risen,blended,imperative,beg,digging,lantern,catches,evangelical,eaton,ruler,henri,tokens,piping,swept,staring,seventy,troop,arose,decomposition,chatham,becky,elders,interpreters,supporter,klaus,conquest,repairing,assemble,whistle,dresden,diversified,fertilizer,analytic,predominantly,amethyst,woodward,rewritten,concerto,adorable,ambition,torres,apologize,restraint,eddy,condemned,berger,parole,corey,kendall,slips,trays,stewardship,esq,kisses,kerr,regulating,flock,exporting,arabian,bending,boris,ammunition,vega,pleasures,shortest,denying,shave,sexe,disruption,galway,colt,artillery,furnish,precedence,grinding,rubbish,missionary,knocked,swamp,pitching,bordeaux,manifold,wf,tornado,possessed,upstairs,turtles,vauxhall,welcoming,learns,manipulate,dividing,hickory,renovated,inmates,slices,cody,lawson,quo,damned,beethoven,faint,rebuilt,proceeded,lei,tentative,peterborough,fierce,jars,authenticity,hips,rene,gland,wigs,resignation,striped,zion,blends,garments,fraternity,tapestry,originating,stu,chap,blows,inevitably,converse,gardener,winnie,ita,higgins,warwickshire,penguins,attracting,jeeves,harp,wes,denton,anthem,tack,whitman,nowadays,woodstock,sack,inferior,abuses,inspected,deb,jockey,indicative,incumbent,ithaca,edmund,upholstery,aggression,practiced,ella,casualties,monarch,housed,administering,temptation,havana,roe,nasal,restrictive,costing,ranged,hier,spruce,paradox,billings,jeanne,oxidation,marin,halfway,amending,conflicting,georgian,compensate,recherche,loser,claus,braves,cracking,sued,shoots,interrupted,hemisphere,miranda,clover,kindness,porto,directs,jolly,snakes,swelling,spanning,politician,femme,unanimous,railways,approves,scriptures,misconduct,lester,resides,wording,obliged,perceive,rockies,siege,exercising,voluntarily,atkinson,nord,truths,grouping,wolfe,thereto,authorizing,enamel,toby,radiant,virgins,firstly,martini,butte,reeves,suspicion,disadvantage,bastard,spends,hicks,pratt,pedigree,fraudulent,sherwood,forgiveness,almond,har,petitions,francais,trenton,chalk,omar,alexis,axle,puppet,cultivation,surveying,grazing,pillar,mirage,questionable,seaside,precinct,renamed,cobb,unbelievable,soluble,piracy,rowing,siding,hardest,forrest,reminders,negro,blanca,equivalents,johann,pineapple,wrath,opal,simplest,patrons,peculiar,toon,europeans,commence,descendants,redmond,safeguard,lars,obsession,grind,albeit,billiards,clint,bankers,righteous,eo,redistribution,freaks,tra,sincere,intentionally,blitz,tended,censorship,cactus,viva,attained,blew,howe,nap,splendid,janice,lava,leonardo,sucked,scissors,cooks,sharply,granada,laurence,rebellion,rainy,tho,regent,evelyn,vinegar,vie,pluto,gil,vail,fisherman,misery,undergoing,limerick,envy,sweeping,healthier,ussr,preface,jameson,grievance,unread,sentiment,pencils,galloway,forged,viola,disclosures,provence,computerized,rustic,rumor,dillon,shah,eleanor,deception,conducts,divorced,rushed,weighs,magnolia,diver,disappointment,castles,notions,plateau,dexter,palette,blaze,wreck,threatens,strengthened,sammy,wakefield,devastating,centro,arabs,bild,robbery,eine,jasmine,crochet,brock,crowds,hoops,macon,stamped,increment,ju,ideals,chloe,ape,gee,apologies,malignant,dismiss,preceded,lawful,stag,crosby,rash,gateways,collapsed,horns,diversion,fantasies,beginnings,reversal,lex,presses,ordination,oxfordshire,yves,tandem,boil,deliberate,gagged,surprises,abe,roc,barley,potent,vo,amusing,mastering,nerves,retains,chimney,naomi,proverbs,risky,mistaken,carving,miracles,clair,slipped,realism,crete,fractions,bloodhound,sherry,desperately,indies,tulip,madame,remedial,vain,bert,dalton,bologna,departing,maze,barefoot,remuneration,bohemian,imposing,damon,tivoli,rode,amen,marching,evacuation,owing,warp,catholics,imo,faculties,denies,reinforce,inception,draper,bowman,subversion,benny,spires,barney,homosexuality,declares,masonry,medicinal,accrued,temples,realizing,annum,cemeteries,indoors,telescopes,magellan,champs,averaging,salads,addicted,flashlight,disappointing,eighty,unlocked,scarce,roche,ropes,spiders,obedience,plague,diluted,canine,gladly,brewery,lineage,mehr,brew,vaughan,kern,julius,coup,cannes,morse,dominance,piston,itu,cords,revisited,cass,sealing,topped,rag,despair,fore,absorb,injected,alps,commodore,enlisted,prophets,supernatural,overlooked,ditch,feared,prelude,rowe,slick,limestone,commentaries,manpower,lec,chunk,reels,lob,slept,gregg,drafted,chalet,hopper,sus,specialization,abstraction,ludwig,scandinavian,detained,luncheon,zenith,browns,waits,tenor,softly,plenary,scrub,wilkinson,limb,intestinal,poe,refusing,suffers,occupy,gan,bethlehem,caves,authoritative,celestial,immense,audrey,merlin,aiming,seizure,stuttgart,diplomacy,differing,foreigners,limp,capitalist,mute,prescott,protestant,metre,tricky,ordinances,koch,topaz,ans,imaginary,albion,sutherland,dar,dart,wrought,robe,theresa,heidelberg,multitude,tutors,ezra,housekeeping,captive,kettle,visitation,chr,gibbs,baggage,dusty,patty,serena,satire,tortured,pioneers,crate,episcopal,moonlight,mast,unfinished,goth,cared,affection,sworn,bowen,vicious,educating,kin,cozy,mackenzie,slippers,earthquakes,hayward,wandering,comb,liquids,beech,vineyards,amer,zur,frogs,consequential,unreasonable,osborne,stimulus,economists,miners,agnes,constituency,rocker,acknowledges,alas,sawyer,maori,tense,predicting,filipino,cooled,prudential,basel,migrant,devotion,invoke,arte,leaning,paddle,watkins,oxley,anterior,chop,rooted,onyx,benches,illumination,freedoms,foolish,finale,weaker,foley,fir,stirling,moran,compose,nausea,comfortably,hoop,temps,clearer,floods,fritz,mover,modeled,erica,malaga,sustaining,repaired,diocese,francois,obituary,painters,thistle,tem,sleepy,footnotes,rupert,shrine,purified,striving,dire,attendant,gull,jour,mir,northumberland,memoir,betsy,meredith,fauna,cliffs,hayden,roadside,smells,dispose,waking,feathers,reflex,falcons,spurs,sion,crashed,travelled,urgency,gould,brit,eliza,graduating,rims,harmonic,darts,shin,intriguing,flaw,tails,emulator,discarded,bibles,hangs,joanna,synonyms,stranded,horton,dolce,hercules,pane,browning,angular,veins,folds,sneak,incorrectly,avoidance,sauces,conquer,probabilities,immortal,mariners,endeavor,creole,mateo,teas,settling,badger,mohammed,saturdays,partisan,pri,gratitude,impress,willy,anon,eminent,ribs,communicated,exceptionally,quilts,splits,subscribing,companions,cheques,edith,screwed,magna,sectional,fashionable,polly,tidal,ballots,hog,testify,poole,boycott,vitality,clerks,crust,bothered,traverse,vengeance,dolly,garrison,sal,barb,huns,miner,fashions,barr,analogy,insomnia,constituent,aura,cecil,sponge,sect,diner,anticipation,enduring,scarborough,regis,winters,nous,explosives,mound,xiv,backgammon,ox,snatch,mole,obs,owed,ethan,kissed,buff,butcher,psalms,rum,chefs,engraving,constituted,hamlet,clad,excursion,inverness,orb,grange,resigned,fled,enriched,harrington,brandy,swings,scion,elle,reptiles,vortex,swallowing,purses,bodily,xiii,awe,beaumont,australasia,mandy,hoods,fireplaces,requisite,retrospective,emphasizes,lizard,hawthorne,bouquets,wears,shropshire,baja,regal,safeguards,cabbage,cub,spectator,arrests,circumstance,numbering,sliced,reproductions,byrd,sidewalk,prob,breaker,curly,alberto,asserted,jealous,refinement,durban,learnt,hound,squirrel,concealed,wharf,rhythms,departures,shotgun,stimulated,chickens,langley,briggs,cheyenne,lug,surveyor,maize,extinction,unaware,discretionary,ry,psalm,scented,gowns,spying,nicholson,lied,ek,bloc,recurrent,talbot,leaks,tam,swell,obstacle,ville,mantle,chico,driveway,irony,gesture,fairbanks,parfum,armies,hy,hugs,greenfield,santos,owls,cutters,acquires,ceased,merging,plaques,breadth,mammoth,convictions,intentional,sophia,prohibits,innings,reorganization,pronunciation,concession,measurable,ami,parcels,pastry,manners,phosphorus,viper,hid,volcanic,gypsy,thieves,preaching,repeal,uncovered,hemp,eileen,proficient,pelican,apocalypse,cousins,discharges,giorgio,admire,nk,poured,usefulness,unsolicited,binds,unveiled,burt,titus,suffix,installment,spindle,heavens,wink,mister,rounding,inorganic,flare,scholastic,wight,withholding,foliage,nod,ocr,fife,generals,crank,goats,autographs,stub,fundamentally,creamy,exposition,rains,buckley,middleton,organise,tort,brace,novelties,gigantic,abdul,sheldon,ryder,octave,struts,ud,suppress,harding,dams,deserved,violates,rutherford,separates,proofs,precedent,confirming,garth,nolan,mach,facilitated,paolo,metaphor,bridget,infusion,jessie,organising,argus,mango,spur,jubilee,landmarks,polite,sith,thigh,paving,cyclone,perennial,jacqueline,seventeen,meats,wie,bulldog,cleavage,analysed,uma,gradual,brethren,embodiment,violating,recruited,toilette,trailing,pact,honourable,lulu,windy,punished,chronology,mastery,thermometer,cranberry,kan,downhill,vita,steer,nesting,vogue,aired,outward,whisper,ipswich,compromised,confession,deprived,benedict,vodka,molding,zaire,bricks,communism,leopard,flowering,wig,jingle,bounty,arcadia,fishes,ringing,knobs,taurus,whiskey,absurd,tolerant,stoves,enactment,embryo,ska,nora,salts,marietta,furious,iteration,vida,ceilings,dispenser,respecting,approving,unsafe,separating,soups,residing,richie,markings,moist,trina,drained,mule,cummings,cessation,append,motive,pests,seasoned,sunflower,duel,bernardino,stocked,bethel,entre,sunderland,doris,motives,reinforcement,dwight,provost,guessing,tal,mead,harlem,throttle,gong,ber,sympathetic,fridays,isolate,unconscious,bays,faulty,affidavit,messiah,infamous,pleasing,seizures,appealed,surveyors,tenacious,waterfall,sensual,persecution,petit,burgess,gaze,chlorine,freshly,saxon,cabo,rye,isabella,monies,assassination,remarkably,pointe,stall,deere,entirety,destined,marcel,lad,hulk,ora,bal,flores,olivier,portage,dwellings,informing,yellowstone,characterize,ricardo,yourselves,rotterdam,hostage,cracker,anglican,monks,compliment,camino,storey,scotch,sermons,remembers,freddie,contention,juliet,adjunct,guernsey,bangor,persia,axes,stirring,wil,haze,pits,utter,bottled,ants,gastric,influencing,rents,christy,theirs,mattresses,donovan,lax,colts,rehearsal,strauss,reputable,wei,tuck,rei,slab,lure,ren,archbishop,ling,incompatible,emblem,roadway,overlapping,walters,dunes,murders,miserable,unsuccessful,decorate,appleton,bottoms,revocation,vomiting,chesterfield,exposing,pea,tubs,simulate,medina,thankful,alaskan,friedrich,elephants,pinch,flynn,braces,calhoun,deficient,annotations,filth,moderation,worrying,outrageous,kraft,blackboard,nitrate,skates,comstock,hers,grin,footprint,tunnels,crises,trillion,comforter,cashmere,heavier,meteorological,spit,labelled,darker,salomon,globes,dissent,daly,choral,unrestricted,happenings,leicestershire,neu,contempt,socialism,hem,edible,anarchy,arden,clicked,ineffective,drawers,byrne,acme,leakage,shady,chemist,evenly,reclamation,rove,lionel,praised,rhymes,blizzard,erect,refining,concessions,commandments,malone,confront,vests,lydia,coyote,breeder,electrode,pollen,drunken,mot,avis,valet,cheng,shrubs,watering,barrow,eliot,jung,transporting,rifles,posterior,aria,elgin,excise,poetic,mortar,blamed,rae,recommending,inmate,dirk,posture,thereon,valleys,declaring,commencing,armada,wrench,thanked,arranging,thrilled,bas,amelia,jonah,discomfort,scar,indictment,apology,collars,andover,pudding,plato,examiners,salzburg,rot,possesses,squared,needless,pies,palma,barnett,ther,heterogeneous,aspirations,fences,excavation,luckily,rutland,lighted,pneumonia,monastery,erected,expresses,migrate,carton,lorraine,councillors,hague,transforms,ammonia,roxy,outlaw,saws,bovine,dislike,systematically,ogden,interruption,demi,imminent,madam,tights,compelled,criticized,hypertext,electra,communal,landlords,emu,libby,seite,dynamite,tease,motley,aroma,pierced,translates,mais,cognition,cain,verona,syn,delegated,chatting,punish,fishermen,conforming,causal,stringent,rowan,assigning,dwell,hacked,inaugural,awkward,weaving,metropolis,psychologists,diligence,stair,dine,enforcing,struggled,lookout,arterial,injustice,mystical,ironing,commanded,woodlands,guardians,manifesto,slap,jaws,finn,pedestal,widening,underwood,saline,sonny,longevity,paw,isabel,sterile,botany,dissolution,pauline,quart,bison,suppressed,allegro,materially,cit,amor,xvi,fungi,phyllis,bengal,scrolls,awakening,fairies,prescribe,greed,nominate,sparkle,autograph,migrating,refrain,lastly,overcoming,wander,kona,relieved,luc,elena,intermittent,ante,vols,revolving,bundled,covert,crater,leah,favored,bred,fractional,fostering,thence,birthplace,bleed,reverend,transmitting,serie,neptune,caucasian,goblet,inventions,dea,practicable,fronts,ancestor,russians,incur,canonical,nodded,confronted,believer,australians,declines,peacock,utmost,yates,leroy,helpers,elapsed,academies,tout,gre,imitation,harvested,dab,hopeful,furnishing,negatively,residences,spinach,liquidation,predecessor,cheeks,hare,beasts,philanthropy,peanuts,discovers,discard,cavalry,breakers,quorum,forwards,prevalent,plat,exploits,dukes,offended,trimmed,py,worcestershire,bonn,prostitution,mosque,horseback,vested,terribly,earnest,homme,clancy,tory,rossi,oldham,gonzales,vor,confederate,presumed,annette,climax,blending,weave,postponed,philosophers,speeding,creditor,exits,pardon,oder,abby,teller,mandates,siena,veil,peck,custodian,dante,lange,quarry,seneca,oceanic,tres,helm,burbank,festive,rosen,alla,preserves,ingram,jess,secretion,insult,scraps,waived,cured,buggy,kennel,drilled,souvenirs,prescribing,slack,gin,differentiate,jays,pilgrim,vines,susceptibility,ambiguous,disputed,scouting,royale,instinct,gorge,righteousness,carrot,opaque,bullying,saul,flaming,apis,marian,liens,caterpillar,remington,chew,benefited,prevail,musik,undermine,omission,boyle,mio,diminished,jonas,locke,cages,jolla,capitals,correctness,implication,pap,banjo,shaker,natives,tive,stout,rewarded,athena,deepest,matthias,duane,sane,climbed,corrupted,relays,hanna,husbands,fading,colchester,persuade,roaming,determinations,weighed,ashamed,concierge,gorilla,gatherings,endure,nom,cheltenham,dickens,juniper,repetition,siberian,preparatory,fielding,dune,hee,adler,yosemite,cursed,youths,migrants,massey,tumble,stare,unlocking,missy,meade,contradiction,helium,wonderfully,dug,congenital,trojans,insanity,embraced,finely,authenticated,reformed,tolerate,lest,adhesion,tic,noticeable,cette,aesthetics,smoker,benign,hypotheses,afforded,aisle,dunno,blur,evidently,limbs,unforgettable,punt,tanned,altering,bunker,multiplication,paved,fabricated,pasture,richest,cruelty,mormon,scots,genuinely,neighbouring,plugged,tyson,souvenir,mifflin,cucumber,occurrences,marshal,anders,seize,decisive,spawn,blanks,dungeons,sailors,stony,fayette,shelving,annals,sadness,periodical,moe,dime,losers,punta,flavour,crypt,accomplishment,onwards,bogus,carp,prompts,witches,skinner,dusk,nouveau,customary,vertically,crashing,cautious,possessions,urging,passions,faded,counterpart,utensils,secretly,tying,lent,magician,indulgence,johan,melted,lund,fam,nel,extremes,puff,galileo,bloomfield,obsessed,flavored,groceries,motto,singled,alton,staple,pathetic,craftsman,irritation,rulers,collisions,militia,eis,conservatory,bananas,adherence,defended,grille,elisabeth,claw,pushes,alain,flagship,kittens,illegally,deter,tyre,furry,cubes,transcribed,bouncing,wand,cavalier,ish,rinse,outfits,charlton,respectfully,ulster,tides,chu,weld,venom,writ,patagonia,dispensing,puppets,tapping,immersion,explode,toulouse,escapes,berries,happier,mummy,punjab,stacked,brighter,cries,speciality,warranted,ruined,damp,sanity,ether,suction,crusade,rumble,correcting,shattered,heroic,retreats,formulate,sheds,anomalies,homogeneous,humphrey,spheres,belonged,assigns,sofas,croix,cushions,fern,defenders,odessa,lore,whipped,vox,dinners,rosie,genealogical,terre,selfish,eventual,nach,mitigate,jamestown,elisa,shelton,boiled,neville,natasha,endeavour,roswell,haute,herring,unfamiliar,expectancy,deterioration,proclaimed,arid,coincidence,idiots,mona,muddy,nuevo,hitchcock,cid,neighbour,raspberry,illusions,spikes,enumeration,suche,permissible,yielded,nuisance,siam,latent,marcia,drowning,spun,shalt,ric,loch,commanding,sparrow,poorest,hector,brotherhood,milling,sinking,sulphur,wicker,balm,figs,browne,nephew,confess,chit,chaotic,alexandre,lays,principally,visor,mundo,jarvis,drip,traced,outright,melodies,myriad,stains,sandal,rubbing,naive,wien,skeptical,remembrance,detects,dragged,foreman,allegiance,conduit,dependable,echoes,ladders,prudent,glowing,alchemy,linden,sven,geographically,alternating,tristan,audible,folio,presiding,mans,waterways,aff,fractures,apprenticeship,childbirth,dumped,barre,rama,johannes,fiery,convex,richer,mop,urn,soleil,connor,northamptonshire,biscuits,disclaims,sich,restless,unanswered,paired,vaults,ahmad,tossed,caucus,cooke,pillars,katy,zoe,overwhelmed,salute,parody,compensated,lacked,circulated,soo,maltese,acorn,bosses,pint,ascension,ply,mornings,mentioning,flagstaff,pretoria,thrive,rightly,paragon,basal,persist,wilde,indispensable,illicit,liar,pledged,pictorial,curling,ares,smoky,opus,aromatic,flirt,slang,emporium,princes,restricting,promoters,soothing,freshmen,departed,aristotle,finch,inherently,krishna,forefront,largo,amazingly,plural,dominic,skipped,hereinafter,nur,extracting,analogous,hebrews,tally,unpleasant,uno,tempted,blindness,creep,staining,shaded,cot,plaster,novo,hearted,obstruction,agility,complying,otis,overture,newcomers,noteworthy,agile,sacks,ionic,stray,runaway,slowing,watchers,supplemented,poppy,monmouth,frenzy,jargon,kangaroo,sleeper,elemental,unnamed,doncaster,particulars,jerking,bungalow,bazaar,predicate,recurrence,recruits,sharper,tablespoons,supervise,termed,frauen,stamping,coolest,reilly,basque,ire,pegasus,silhouette,dorado,daring,realms,maestro,turin,gus,forte,tipping,holster,fiddle,crunch,leipzig,bard,kellogg,reap,exemplary,caliber,apostle,playful,icelandic,multiplied,enchanted,belgrade,styled,commanders,thor,waive,bethany,vance,soprano,polishing,marquis,wen,translating,frontiers,adjoining,greet,acclaim,hardship,hast,miriam,cavaliers,rollers,carleton,pumped,differentiated,sonia,verifying,almighty,vel,intuition,revoked,openness,circulating,bryce,ilo,latch,verbs,drank,darlington,slippery,galerie,outpost,seville,mira,chatter,santo,lettuce,raging,tidy,jong,oppression,bows,yielding,torso,occult,expeditions,nok,hooker,lorenzo,beau,subordinate,lilies,articulate,ecstasy,sweetheart,fulfil,calcutta,hobbs,mediator,tad,cultivated,rang,disconnected,consulate,wilkes,disagreement,strands,sicily,compost,adjourned,familiarity,erroneous,pulses,theses,stuffing,jeux,wilton,flooded,reverted,crackers,greyhound,corsair,ironic,wards,unsupported,hinge,ultima,cockpit,venetian,sew,carrots,faire,laps,memorials,resumed,conversely,emory,stunt,excuses,vitae,hustle,stimuli,upwards,witty,transcend,loosely,anchors,hun,atheist,capped,oro,liking,preacher,complied,intangible,compassionate,substitutes,flown,frau,dubbed,silky,vows,macy,distorted,nathaniel,attracts,bern,qualifies,grizzly,micah,hurting,homicide,await,sparse,corridors,sont,mcdowell,fossils,victories,chemically,compliments,cider,crooked,gangs,segregation,nemo,overcast,inverted,lenny,achieves,forehead,skye,percy,scratches,conan,lilac,intellect,charmed,denny,harman,hears,wilhelm,nationalism,pervasive,auch,enfield,nie,clears,knowingly,pivot,undergraduates,digestion,mixtures,soaring,dragging,virtues,flushing,deprivation,delights,foreword,glide,transverse,engagements,withstand,newbury,authorizes,blooms,soar,uniformly,todos,piedmont,empowered,asi,lena,outlying,slogan,subdivisions,deducted,ezekiel,totaling,elijah,compton,vigorous,flee,biscuit,creme,submits,woes,waltz,menace,emerges,classify,paige,downstairs,statesman,cheerful,blush,leaflet,monde,weymouth,spherical,favourable,informs,dramas,cher,billiard,aut,malay,unseen,optimism,silica,kara,unusually,widest,impotence,medley,cadet,redskins,temper,asserts,stew,hereafter,retiring,smashing,accumulate,tahiti,mariner,collier,hush,whispered,generosity,vibrating,lama,artisan,akin,raphael,lola,embarrassing,aqueous,pembroke,stockholders,lillian,splinter,ibn,preferable,juices,ironically,morale,morales,solder,trench,persuasion,practise,lodged,revolt,renders,pristine,francaise,shines,catalan,auditory,applause,trait,popped,busted,basins,farmhouse,pounding,picturesque,ottoman,eater,utopia,insists,willard,lettering,marlborough,pouring,concentrating,soak,buckingham,hides,goodwin,manure,savior,dade,secrecy,wesleyan,duplicated,dreamed,fertile,hinges,plausible,creepy,narrator,augustus,fahrenheit,hillside,standpoint,nationalist,piazza,denoted,oneself,royalties,abbreviation,blanco,critiques,stroll,anomaly,thighs,boa,expressive,infect,pers,dotted,frontal,havoc,ubiquitous,arsenic,synonym,yer,doomed,francs,ballad,sling,contraction,devised,explorers,billie,ravens,underline,obscene,mes,hymn,continual,slowed,aladdin,tolerated,quay,outing,instruct,wilcox,overhaul,peruvian,indemnity,lev,imaginative,weir,remarked,portrayed,clarendon,ferris,julio,spelled,epoch,mourning,phelps,aft,plaid,fable,rescued,exploded,padres,scars,whisky,tes,uptown,susie,batter,reyes,vivian,nuggets,silently,pesos,shakes,dram,impartial,punctuation,initials,spans,pallet,pistols,mara,tanner,avenues,dun,compress,apostles,sober,tread,legitimacy,zoology,steals,unwilling,lis,paddy,plunge,pearce,vos,sinister,burr,arteries,formations,vantage,texans,diffuse,boredom,norma,crosse,mondo,helpless,wyatt,spades,slug,visionary,coffin,otter,navajo,earns,amplified,recess,dispersed,shouted,shilling,resemble,carbonate,mimi,discriminate,stared,crocodile,ratification,vases,advises,sind,coward,inequalities,garde,dyes,viz,turbulence,yell,fins,ritchie,dresser,rake,ornamental,riches,resign,injunction,intervene,poised,barking,josephine,dread,dag,handwriting,serpent,tapped,articulated,pitched,wisely,accustomed,bremen,steaks,playhouse,superficial,suns,josef,casts,bunk,stab,sanction,dyer,effected,tubular,moi,ode,avoids,richter,evidenced,heinz,argos,dit,larvae,dyke,cassidy,kernels,mobilization,amt,wilkins,manipulated,alleviate,seam,riddle,comedies,fainter,respectful,cabaret,recession,awaited,nozzle,externally,needy,wheeled,booksellers,darn,diners,greeks,reich,armored,weary,solitary,photographed,tweed,snowy,pianist,emmanuel,acapulco,surrounds,knocking,cosmopolitan,magistrate,everlasting,pigment,faction,tous,argentine,scandinavia,minnie,genie,linn,handel,microscopic,clarified,coherence,sensations,orphan,conferred,acp,disturbances,chandelier,embryonic,carver,paterson,delle,graceful,intercept,shouts,ascertain,veto,exhaustive,annoyed,bureaucracy,paz,stalls,fined,bien,inward,reflector,greeted,hartley,defenses,meaningless,clam,francesco,hes,georg,negligible,starch,melinda,godfather,apron,guts,ros,pragmatic,tyranny,warehouses,regimen,axel,antony,hahn,fluffy,marianne,slender,hereford,aides,forma,absorbing,cherries,gaelic,gomez,alec,distinguishing,glazed,judd,dashed,libyan,dickson,distressed,shouting,bullock,villagers,acknowledgments,ethiopian,mermaid,buds,sexes,wilder,sire,centred,confinement,islanders,ding,uncover,contested,coma,husky,conserve,bland,abatement,originator,whipping,skipping,routed,rudolph,abigail,missionaries,householder,plotting,yan,succeeding,elmer,sails,schuster,overlook,robes,sham,fungus,astonishing,graveyard,chunks,bourne,revert,ignores,popping,captains,loaf,pandora,gabrielle,stad,abel,enigma,glands,militant,jug,inferno,torrents,outset,confuse,yvonne,attaching,adept,doubtful,ratified,insecure,explosions,trunks,gareth,versatility,lothian,fem,intricate,strata,depository,hubert,proclamation,beauties,hybrids,gillian,darrell,irrespective,imposition,ensured,kidnapped,sai,cereals,outrage,poop,scrubs,orchestral,bellingham,dripping,afterward,devote,facets,musique,frightened,noises,ambiguity,booths,discourage,elusive,speculative,madeira,intimacy,hallway,whey,ripping,mei,hob,reloaded,garry,ester,annan,thriving,hampers,bragg,gracious,snail,curt,demise,theoretically,grooves,sutra,conveyed,swine,typographical,ellison,ado,trophies,quicken,werden,heron,graft,moth,crossings,derrick,mash,germ,envoy,breckenridge,pug,antoine,domingo,resembles,doorway,grandson,tat,catalina,redding,accompaniment,derivation,warden,voir,tug,margarita,clans,instituted,notary,thi,sociological,offending,forgetting,macedonian,votre,reservoirs,barlow,tyrone,halle,edged,encompass,spade,hermes,glare,metaphysical,insignificant,exchanging,pledges,mentality,turbulent,pip,pup,fortunes,sultan,masked,casing,plotted,haley,generously,amounted,icy,repression,reaper,honoring,facto,climatic,broaden,begging,wharton,sui,freddy,bushes,contend,restraints,truncated,gibbons,nitric,atop,glover,railroads,unicorn,normandy,floats,justices,orderly,wafer,puck,roofs,reefs,hover,quarantine,detrimental,molds,elias,hou,subsistence,chilled,foe,citadel,topography,leaflets,wrinkle,contemplated,adolescence,nun,harmon,indulge,bernhard,hearth,edna,embarrassed,aggressively,coincide,maynard,genoa,enlightened,clippings,radicals,penetrate,stride,catastrophe,greatness,archie,parasites,entertained,inventors,ferret,louisa,agony,marseille,taller,doubling,stupidity,moor,stephenson,enrich,foreground,revelations,replying,incapable,parte,acknowledgment,labyrinth,africans,sway,undergone,lacey,preach,triangular,disabling,cones,inversion,thankfully,taxed,presumption,excitation,salesman,hatfield,constantine,confederation,petals,imprisoned,heller,docks,landowners,sul,juno,deux,defiance,bully,valiant,constructions,youngsters,toad,breasted,banging,vertigo,unsatisfactory,fluent,rhyme,eros,aan,mcintosh,suffice,convened,nah,accusations,debated,stallion,equipments,necessities,camelot,deserted,keepers,logically,caravans,oranges,bum,presse,olga,contends,snort,occupants,organiser,vim,luminous,crowe,unparalleled,anyhow,waterfalls,obtains,antwerp,ulrich,hardened,primal,straits,upheld,wir,malt,sinai,endowed,cameo,attire,blaine,typewriter,pomona,goddard,fanny,plagiarism,milky,combs,upland,unconstitutional,adopts,macao,snaps,defends,depicts,pilgrimage,elevators,ohne,narrowed,eighteenth,hurst,inscription,ascent,pisa,tedious,pods,universally,chewing,accommodated,tendencies,rowland,welded,conforms,reggie,refreshments,depict,coils,callers,navel,arbitrator,prolific,nurseries,footsteps,indefinitely,sucker,bumps,frightening,wildly,sable,retarded,neatly,singleton,spaniel,somerville,worthless,git,spool,jeopardy,rovers,voiced,annoy,clap,aspiring,dazzling,cornelius,scientifically,grandpa,cornish,guessed,kennels,sera,axiom,stamina,hardness,abound,curing,socrates,aztec,confer,vents,mater,oneida,aiken,crowned,sandstone,adapting,cranes,rooster,proctor,prehistoric,balkans,dictate,joker,wiped,contours,abdomen,baden,tudor,paws,villains,poke,prayed,inefficient,heirs,parasite,shortcomings,cures,concentrates,preclude,fasting,loudly,horseshoe,zeus,constellation,recital,utrecht,freud,bedtime,thinkers,hume,reminiscent,rapport,ephesians,dope,truss,kiln,peaches,depressing,strangely,narratives,sud,skipper,gy,drains,maxima,unification,sous,testimonial,khaki,distributes,navigating,slough,prodigy,embossed,mould,jock,blasts,poorer,anglia,dyed,dissatisfied,bourbon,staggering,bismarck,hoe,rubbed,wasp,bookseller,fuss,muir,uterus,chimes,webber,aggregated,pico,exhibiting,gimme,nee,beaufort,radically,terminating,platter,chamberlain,steamboat,brewster,inferred,croft,ism,uplifting,penal,exclusions,pageant,henley,purchasers,pitchers,tracts,morally,hosiery,yt,reptile,overdue,cowan,mohawk,riots,hassan,schwarz,persuaded,teasing,rejecting,emphasizing,unbound,quentin,shepard,sacrifices,delinquent,contrasting,nestle,correspondents,guthrie,imperfect,disguise,eleventh,embassies,lapse,wally,phenomenal,civilizations,friendships,marjorie,shrub,kindred,reconsider,sanctioned,parfums,condemn,renegade,awaits,hue,augmented,amends,fullest,shafts,finer,ys,burdens,invocation,gillespie,brooch,motifs,nineteen,griffiths,invaders,edmond,volunteered,swollen,liste,grasses,scatter,steward,ito,cherished,smack,incidentally,sine,depleted,holiness,divinity,campaigning,tougher,sherlock,comprehend,cloak,pamphlet,clipper,umbrellas,priceless,mig,assassin,exploiting,cynical,toro,etched,bray,choke,underwent,comforts,appoints,keene,rachael,swallowed,imperialism,mouths,halter,ley,ike,pumpkins,shrinking,roar,novelist,potomac,arroyo,tipped,amidst,insurgents,wanda,etching,discouraged,gall,oblivion,gravy,inherit,sprinkle,stitching,advisable,loi,meme,gladstone,jugs,congregations,handing,payer,ze,beforehand,laborer,watcher,vibrations,apes,strawberries,abbas,moods,dobson,ives,soaked,abridged,palate,thierry,masculine,realizes,kahn,petitioners,constable,sayings,unconditional,vue,progressively,topping,baird,chilling,translucent,glaze,newcomer,branching,unmarried,unexpectedly,funniest,bona,scorpion,mirrored,sel,anatomical,misdemeanor,tobias,salle,infra,strasbourg,commemorative,implicitly,ewing,austen,assurances,comedian,rascal,nid,roberta,dizzy,outbreaks,annuities,slit,whitening,occupying,depicting,ordnance,verge,ransom,nomad,dagger,thorn,preamble,mor,spins,solicit,provoking,orchids,buckets,spoil,blazing,palermo,snapped,alligator,detectives,rochelle,nomenclature,abdullah,invade,regulates,rendezvous,strives,trapping,gardeners,clemens,deuteronomy,diminish,britannia,manifestations,tak,stitches,promulgated,mediocre,passports,ayrshire,invent,eagerly,damascus,reformation,hypocrisy,parishes,trooper,bun,compendium,disappears,hymns,monotone,palsy,propositions,locomotive,debating,cuffs,prosperous,famine,orally,elliptical,grabbing,jogging,stipulated,persuasive,horrors,bearer,pastors,acquainted,dependents,dizziness,ture,brilliance,nicky,originate,respectable,horace,prohibiting,disappearance,morals,invaded,spoiled,monet,pickle,quaker,haunting,manipulating,tangent,tempest,petra,dominique,waving,dai,uneven,plata,plurality,warrington,adventurous,luigi,bayou,accueil,confluence,blossoms,succeeds,orphans,louder,boilers,reunions,yelling,trough,leaned,quadrant,discrepancy,slid,antioch,tonic,magnus,harrow,jig,reckless,raining,peasant,vader,qua,figuring,crushing,thorpe,ordained,hodges,saucer,chinook,passover,byzantine,tomas,triangles,curvature,rites,sideways,devious,dreamer,acknowledging,estuary,burglary,pouches,thrilling,spectacle,sentiments,ditto,nana,waiter,oddly,suchen,raft,cul,nutshell,arrogant,hermann,induces,thrift,sae,admired,stunts,iaea,youthful,stumbled,emitted,sufficiency,tempered,slipping,solitude,cylindrical,destroyer,fide,undesirable,mongolian,weakly,parsley,undue,stunned,smiths,magyar,hostility,groves,pursuits,reflux,adaptations,jurisprudence,invariably,lecturers,progressed,brow,elves,kearney,graeme,kimball,chant,turnkey,sprays,tighten,revolver,crowns,intermediary,matted,apricot,tufts,cuckold,unreliable,rosewood,parry,existent,tongues,dictator,jehovah,fanatics,coeur,perpendicular,fay,hedgehog,raves,mamma,entails,folly,wheeling,sharpe,hawthorn,mural,bankrupt,wager,purge,interpolation,adjournment,pitfalls,stationed,ambrose,nightmares,aggravated,deem,melville,cavern,ene,sumner,descended,disgusting,flax,weakened,imposes,withdrew,tart,guerrilla,spoons,persona,poser,tram,distinctions,peabody,alia,iced,faulkner,scarcely,excused,fused,madeleine,roaring,witchcraft,stopper,fibres,cullen,crested,stump,scalp,gunn,erwin,conductors,criticisms,hadley,diplomat,sylvester,melon,tablespoon,manganese,siren,clasp,olives,nino,summons,lucrative,porous,shrewsbury,bile,siegel,cara,ese,ils,hinduism,elevations,thirst,endeavors,sportsman,scratching,iodine,phoebe,wipes,fro,krone,urgently,exposes,natures,liberalism,meer,derry,suisse,frankenstein,parc,heir,phy,successors,eccentric,yarmouth,transports,amour,illustrative,prosecuted,sailed,craving,advocating,titel,leaking,escaping,possessing,suicidal,cruisers,masonic,forage,loco,hellenic,kwh,ethel,distinctly,assertions,baba,pebble,staffs,ets,hoo,denomination,patched,patriotism,battling,tickle,bandit,acquaintance,lambs,loom,blouse,heightened,chests,ambitions,feline,grub,ulcer,slew,menstrual,canals,negatives,threading,duet,intolerance,ammonium,zephyr,tearing,muffins,naar,autor,fannie,foothills,atrium,thine,superiority,gestures,nemesis,engel,confessional,cardigan,taunton,evaporation,devise,abolished,sorrento,blanchard,uns,toying,parma,wreath,plight,opium,irrational,arches,naturalist,encompassing,penetrating,destroys,prussia,lowers,cookery,nal,beatrice,policeman,cartilage,turnpike,migratory,jurors,mea,enumerated,sheltered,doctrines,seams,pleaded,pca,elasticity,cel,gutter,ulcers,sloppy,flannel,volcanoes,ridden,contradictory,misunderstood,steamer,cong,barometer,exclaimed,diem,barge,spartan,nea,crystalline,rumours,famed,brandt,riga,bengali,respite,grimm,shetland,provocative,guido,tasted,licked,banged,rufus,hopeless,henrik,safest,daphne,ame,pollock,meteor,granville,veneer,anonymously,manageable,slant,disciplined,pollard,comme,chops,broom,plainly,ibrahim,snare,shank,uphold,revising,insignia,nurture,leash,hunts,faber,plantations,factions,falmouth,humility,commentators,impeachment,acton,engages,carbide,pullman,characterised,kinder,deems,outsiders,dodd,dissolve,adrienne,deduct,crawling,modifier,muck,colombo,hoax,cohesion,reconnaissance,antagonists,bachelors,observes,corporal,ligne,wary,locust,condenser,articulation,villain,tre,oft,secures,leviticus,impending,rejoice,pickering,poisson,bursts,versailles,hurdles,lucie,geese,condemnation,candies,sidewalks,formidable,pun,autres,mecca,rested,paused,macbeth,abandonment,nada,bertrand,broth,wentworth,seduction,fertilizers,maison,contrasts,giuseppe,tae,improperly,nebula,crows,blooming,mace,seminole,taper,synagogue,sugars,burnham,allure,intestine,ambassadors,reclaim,isla,kingdoms,richness,converge,pianos,dol,workings,penelope,extinct,ponder,revue,lunches,fooled,smear,rigging,derives,praises,detachment,luca,caracas,lids,pore,ey,radiance,oily,quitting,ina,grover,screams,masking,patchwork,heinrich,breton,assures,joys,involuntary,allegation,infinitely,dorchester,serge,morphine,gymnasium,waldo,diese,chiefly,judah,conjecture,mich,restitution,indicted,blasting,confronting,mastered,powders,debtors,grit,slain,nearer,ancestral,mujeres,faithfully,revolutions,sei,quail,tanker,administrations,sho,rector,ballast,immature,recognises,taxing,icing,substituting,executes,originality,pinned,gables,discontinue,bantam,bianca,zimmer,earthly,conceive,forfeiture,disastrous,gladiator,poplar,ence,recourse,martian,equinox,hinder,fredericksburg,presume,weil,armchair,cecilia,strut,kari,pavel,appropriateness,tame,solstice,oats,italien,wolff,plume,sparta,calypso,pantry,etienne,italics,reversing,murderer,courteous,wilt,smoothing,billet,pretending,hammock,receptions,revoke,intruder,wagons,jennie,platte,plank,paddling,ting,interrogation,neue,longing,irresistible,pilgrims,disappearing,sau,enact,inertia,misunderstanding,deity,pruning,agra,mandolin,rolf,swiftly,claws,brightly,manly,emit,shortened,fearful,potency,ifc,flawless,peril,alessandro,breaches,resultant,nestled,hairs,dumfries,drastic,guarded,celery,reconcile,grammatical,collin,ven,admiration,zanzibar,offend,severance,somali,combating,numb,retina,maids,tempting,bureaus,voyages,galatians,flo,planters,rocco,sheath,louie,chaplain,benefiting,dubious,occupies,mammal,shielded,degeneration,listens,swirl,emery,twists,scot,intrigue,blanche,dialect,nominating,fanatic,upton,pave,coverings,danced,slightest,libre,bromley,revive,corolla,predominant,abode,savoy,vogel,insecurity,trustworthy,uniformity,conquered,alarming,dur,amused,horizontally,knitted,exploding,narrowly,campo,rampant,suitcase,embarrassment,spectators,coronado,retaliation,inquirer,dreadful,metaphysics,drifting,ritter,attends,nicer,mellow,boast,gents,respiration,absentee,duplicates,dubois,corollary,tighter,predetermined,asparagus,airy,progresses,canister,stiffness,thrifty,canning,workmanship,complexities,shan,wrinkles,illustrating,perch,craven,divergence,homage,atrocities,londonderry,hops,emmy,chez,admittedly,ruiz,angst,liturgy,nativity,surety,tranquil,disseminated,staircase,cutler,cradles,electorate,airs,reconstructed,resent,opposes,silvia,distraction,dominates,kimberley,despatch,fugitive,tucked,jericho,turmoil,gilles,dietrich,haines,unjust,markedly,fascinated,disturb,terminates,exempted,bounced,rankin,brightest,saddles,scotsman,fitzpatrick,gushing,distracted,secluded,criticize,bog,livelihood,godfrey,minerva,superseded,iceberg,caleb,christening,jealousy,plumber,hagen,squeezed,judas,valle,dole,wick,gertrude,communists,owes,scents,bertha,levied,sag,barns,covenants,peat,proprietor,lizzie,raids,solos,compartments,maj,foi,importation,mss,planter,ici,metz,immaculate,pur,reindeer,telegram,ruben,shaken,wares,rivalry,verve,charley,carpenters,spree,sunk,morley,bespoke,inflicted,abbreviated,drowned,escorted,brute,barracks,kidneys,warbler,onward,kidnapping,inducing,lancet,antelope,terminus,castings,flanders,pellets,enclosing,starred,deacon,kabul,sweeps,butch,mercure,bookcase,assembling,diaphragm,questo,chores,consignment,yarns,liv,seedlings,fortified,reconsideration,barnard,profoundly,bartender,mayfair,jag,maneuver,ridder,vanished,lair,enclose,sinners,lille,calves,defer,desmond,liars,els,sod,lacy,pharaoh,advocated,itching,alles,devotional,taft,comparatively,spartans,tourney,reasoned,lawton,degli,saith,astral,ach,parallels,yelled,wren,terence,hamper,balkan,blurred,smuggling,instincts,hutton,masquerade,deans,duality,sensational,kites,smoother,expulsion,withhold,romano,grievances,betrayed,dumps,buckles,joyful,generalization,hin,pancakes,crave,cordova,focussed,ripple,claimants,consolidating,goldsmith,inclination,measles,arcs,portman,baptized,expelled,rupees,betrayal,flourish,heed,mein,graf,hawking,divides,composing,handicrafts,healed,burmese,boon,valor,pedestrians,gathers,pawn,stitched,camille,ceases,dorsal,collie,hereditary,exaggerated,buccaneers,spleen,allotment,jeu,multiplying,empress,orbits,whence,bois,trusting,sabre,stigma,abduction,attaches,tartan,twisting,tore,eth,mimic,shielding,stormy,vulgar,pathological,hodge,trimming,emanuel,serene,obligatory,corrugated,queenstown,forbid,unhealthy,felicity,ticks,fascination,sono,experimenting,splendor,vigil,robbed,rebirth,winona,progressing,fragrant,defeating,hotter,instantaneous,operatives,carmichael,bulky,exponent,desperation,parlor,setter,monumental,olaf,fer,stirred,toughest,fil,facade,frankfort,monograph,booze,widen,adjective,disciple,cipher,arrears,rhythmic,unaffected,starving,vide,lennox,sil,hearty,triton,deus,devine,adore,entertainer,colds,dependant,thicker,weeping,chandeliers,moneys,infancy,dips,honoured,yachting,cleanse,chilly,digs,bolivar,womb,irritating,monarchy,corset,hinged,attendants,cummins,robins,booming,artikel,scandals,screamed,cramps,enid,herrera,digger,espionage,pups,avenged,norte,glade,pendulum,bounces,nehemiah,thinner,noch,licks,soto,caste,jus,daft,sampson,psyche,rudolf,angling,stubborn,diplomats,physicist,tagalog,coo,requiem,bleu,redeemed,sighed,lures,bavaria,devastation,heroine,bingham,achilles,flaps,indifferent,cadence,frosted,schubert,rhine,manifested,denominations,interrupts,rattle,insults,oatmeal,marta,distilled,stricken,unrest,cascades,druid,dunbar,outsider,ris,abstinence,nag,poodle,wunder,stefano,sitter,colder,laborers,whispers,swarm,elise,ledge,winthrop,historia,peasants,nectar,anecdotes,gilt,masterpieces,symbolism,monsoon,drown,strife,esprit,attaining,consular,treason,reckon,gaston,prosper,napier,supremacy,capillary,germain,islington,anchored,yong,vers,mulberry,sinful,cheeses,bradshaw,mythical,abyss,whitehall,malachi,ble,clipping,niece,irresponsible,pleas,softer,paralysis,devastated,tarzan,shutters,flask,arisen,femmes,relentless,ribbed,omnibus,stables,inhabited,hereof,untold,observable,gretchen,lanterns,tulips,vigorously,interfering,idols,designating,nugget,reminding,gusts,xviii,magistrates,procession,spiritually,attentive,rupture,trad,assimilation,lyrical,concorde,angelica,braided,wooded,intensely,propelled,artisans,bastards,bassett,aspiration,appended,slammed,aviator,implicated,seriousness,conformation,intimidation,paladin,ihr,nests,civilized,marched,cassandra,cath,sighted,hopping,destin,rosary,platoon,andres,loneliness,pulley,alleging,synonymous,confectionery,regrets,consciously,cours,footprints,priscilla,stimulates,darkest,implying,conducive,uncontrolled,ballads,mathew,hugely,sevilla,hostages,rosario,fruitful,franks,indemnify,satisfactorily,thinker,contestants,sia,influx,convoy,sled,pyramids,depended,conveyance,tortoise,milo,cultivate,crocker,dialogues,abolition,coax,padre,lees,mari,quattro,foresight,peppermint,tod,castillo,remnants,nailed,alum,frantic,zachary,comrades,cocoon,doth,gladys,bowers,strengthens,qual,dictatorship,breezy,plow,mundane,douglass,barclay,foes,cloths,clowns,lombard,barren,histoire,plead,behaved,embargo,condensation,yokohama,vow,claudio,blot,primera,commentator,patterned,sheen,specter,imam,assent,hove,shading,scrubbed,warts,roundabout,harmed,paternity,conceal,starvation,appointing,seine,flowed,sewn,zulu,rin,barnet,rift,saviour,lapel,turk,cupboard,archipelago,peep,deceptive,undertakings,tinted,congratulate,constance,vanishing,legislator,notifying,aches,kitchener,leaked,genera,idioms,gardiner,gli,poisonous,chime,spence,mischief,argent,delinquency,cou,sentimental,unsuitable,mildly,forging,pew,waitress,caribou,merced,expansive,footing,manu,sligo,remit,bonnet,stumble,undertook,promenade,exhaustion,unborn,wendell,hammers,coasts,emitting,concur,exert,madeline,sanskrit,torre,worldly,wedges,corded,heirloom,pleasantly,portray,pero,esoteric,luxe,messengers,landings,graphically,shameless,communicates,bourgeois,yeh,napkins,unloading,bakers,selma,pears,heats,lucid,lobe,canaan,oppressed,infer,prosecute,thatcher,bret,hauling,inconsistencies,indebtedness,scramble,adversary,elsa,quaint,oswald,dipping,revere,troopers,domaine,olde,guerra,solemn,eruption,celeste,gentry,enchanting,preached,mica,cadets,lads,endured,ensuite,fermentation,careless,chemists,inca,fad,julien,dandy,narcotic,moulin,paine,incompetent,ain,predecessors,lancer,sorcerer,fishers,invoking,muffin,motherhood,wexford,ihre,dressings,partridge,synod,noticing,inte,newmarket,amigo,discerning,caddy,burrows,furnaces,zee,occupant,livingstone,juggling,wildfire,seductive,scala,pamphlets,rambling,kidd,bedside,lausanne,legality,arbitrarily,heb,luz,regulars,robson,mysticism,accompanies,summed,chopin,torches,dominating,joiner,viejo,explorations,guaranty,procure,stillwater,sunsets,cropping,anastasia,arrogance,diverted,forgiven,bleak,christophe,wenn,drudge,dolores,tramp,saliva,chichester,artemis,lessen,weller,syringe,diversions,admiralty,powdered,granger,prevailed,glacial,alleges,shredded,antiquity,zeal,valparaiso,blaming,embark,manned,porte,johanna,granular,sant,orkney,bah,vero,oscillations,sphinx,spiegel,mujer,ceremonial,sonnet,constituencies,sprung,hedges,inflated,crooks,prospecting,quilted,walled,immensely,trafalgar,relapse,descend,jakob,bolster,nietzsche,fol,rocked,rancid,disparity,malice,vom,knapp,swimmers,syllable,painfully,sweating,demolished,catholicism,trident,lemonade,absences,andes,ciudad,josie,persists,propeller,dents,anarchist,submerged,entrusted,essen,calming,intending,cromwell,drummond,dissertations,highlander,solicitations,lar,punto,survives,darcy,funnel,moons,gent,thirsty,freshness,lathe,shabby,punched,petri,virgil,gaa,marbles,cottonwood,mildred,deletions,cleopatra,undecided,startling,inductive,inadvertently,bursting,wird,halves,moulding,melancholy,observance,leaps,halen,galvanized,hoy,teapot,conveys,lends,squire,ache,counterfeit,waller,duval,yoke,resonant,mak,outskirts,expedite,grayson,sweetness,crook,rearing,davison,tins,deliberations,indifference,xix,invading,dives,loot,coyotes,stale,cosmo,levers,cog,incarnation,strained,putty,reacted,admissible,sunless,puzzled,unexplained,patsy,thermometers,fourteenth,compounded,chippewa,eldest,terrifying,climbs,uprising,gasp,swans,tories,hap,remnant,immoral,sacrificed,unequal,weaken,braxton,categorical,cupid,stalking,sturgeon,jap,piers,ensuing,mitigating,tint,dykes,revived,joachim,eet,earle,hosea,sua,haste,flakes,alfalfa,corfu,argyll,emil,joking,rhetorical,simmer,vert,smallpox,overwhelmingly,waterway,migrated,reacts,bain,norbert,complication,aubrey,adaptable,sainte,bitte,fleur,muy,berth,uninterrupted,lint,chalmers,crabs,tuscan,lingo,einer,budding,roam,resemblance,hackney,toto,hebron,saber,cataract,midday,fait,innate,medallion,prominently,kant,nazareth,nadia,glanced,calais,rapture,sunbeam,abruptly,beetles,caspian,impair,stun,shepherds,susanna,philosophies,lager,projecting,goblin,bluffs,parrots,anthems,terrified,nocturnal,nueva,emulate,accuse,hunted,diminishing,lew,ridley,produits,zipped,intrepid,babel,clustered,primate,eyebrows,compromising,willingly,harlequin,revisit,insulting,prominence,cuckoo,parrish,inspires,acacia,fang,netting,contemplating,erasmus,sop,recalling,practising,hermitage,starlight,foyer,palaces,brood,azure,compel,contradictions,festivities,trenches,sabine,doorstep,sniff,dangling,negligent,gliding,woe,meditations,tranquility,halted,liza,drawback,smyrna,hostess,weep,posse,mosquitoes,commun,weldon,frying,hesitation,imprinted,bereavement,surrendered,iam,bestand,westward,converged,leopold,recognizable,ludlow,sprague,saba,embraces,gustav,waxing,gael,sinner,auspices,coles,ergo,dissenting,melee,radcliffe,countess,pleading,crafty,llama,montague,troubling,vowel,reuben,cob,fearing,coronation,isabelle,reluctance,inconsistency,apostolic,summoned,treble,galley,shovel,kam,entail,mashed,aire,pacing,moan,opec,jimmie,henson,unfolding,tottenham,deserts,milking,wilbur,suitably,enormously,aber,cicero,scribe,nellie,sleigh,formulae,fen,sank,frontage,blister,ration,humid,portrayal,guile,lacquer,unfold,hammered,tutti,mined,caucasus,intervening,bale,astronomers,thrills,therefor,sores,fel,pastures,unattended,playwright,carthage,zechariah,selves,naturalization,whispering,dissipation,sprite,keel,leighton,atheism,gripping,cellars,tainted,remission,praxis,affirmation,perturbation,wandered,reeds,angler,astounding,cosy,resend,augment,flares,shedding,glastonbury,funerals,eucalyptus,conservatism,questa,bumped,fortuna,cripple,lofty,proclaim,cropped,merton,ere,richly,ravi,dogma,priori,vaguely,yam,ple,siberia,melons,farley,seer,evils,spontaneously,unavoidable,ruthless,almonds,ecclesiastes,aptitude,vial,chao,sharpening,seniority,prompting,objected,equator,guilds,blatant,favoured,ridges,oysters,gust,cate,receptacle,mendoza,haus,puberty,shorten,shawl,samaritan,bends,grimes,unison,tabular,amir,dormant,nell,restrained,tropics,concerted,avenir,refrigerated,crouch,pence,formulating,lamentations,napkin,emile,contagious,inaccessible,administers,crockett,conspicuous,barbarian,soaking,reforming,gar,intrusive,thyme,parasitic,abusing,receptive,capt,uwe,xvii,vulcan,musk,lucille,executions,refreshed,guarding,atwood,windmill,lice,garter,footed,dedicate,libros,renewing,burroughs,ioc,skim,touche,welt,veal,perpetrators,embarked,quickest,euclid,tremendously,anglais,smashed,oscillation,thunderstorm,retrospect,jog,hailed,bahia,miraculous,hounds,tightening,draining,paroles,sensibility,rags,punching,distinguishes,poi,dazzle,dangle,eaters,exceedingly,inauguration,inquired,repentance,unprotected,merle,savory,evacuated,reclaimed,prefecture,accented,crawley,baum,racket,hannibal,sickle,violently,attest,untouched,comforting,creeping,kerosene,appraised,restorative,chet,peacefully,stature,sentry,pel,assaults,berwick,vices,amo,tolls,degrading,forster,fireman,maniac,antics,deze,formative,recognising,wordsworth,wrongly,cree,physicists,falsely,abbot,officio,consul,plagued,lahore,aiding,kunst,suckers,swallows,patronage,canoes,matilda,fodder,impetus,peeled,whining,arson,hirsch,tapestries,transatlantic,jak,freeing,kilkenny,redress,settles,seaman,skulls,cayenne,treatise,defeats,testimonies,kali,weitere,itch,withdrawing,solicited,jai,gard,brilliantly,deja,mccann,spalding,dill,reopen,potts,erased,resisting,congregational,antiquities,dunham,monsieur,inhaled,fuses,britt,blinded,madras,sacrificing,faiths,tinker,sonora,echoed,elisha,gazing,skepticism,zane,eighties,groupe,freehold,braid,ance,forester,resisted,alp,munro,agar,arundel,shiraz,disgrace,mediate,rein,realisation,irritable,cunning,fists,pennies,jos,hemorrhage,awning,ointment,spilled,tripping,occidental,vigor,chariot,buoy,geraldine,matrimonial,squads,niet,tenn,disclosing,masthead,ursula,disbursements,boucher,chadwick,candidacy,hypnotic,adultery,fis,seventeenth,temperament,prostitutes,healer,hive,circulate,glued,sycamore,belinda,westmoreland,shuts,tenderness,ocular,smelling,dung,keine,scratched,conclusive,alder,polluted,undersigned,lark,oda,carlyle,restores,lullaby,sanderson,hoes,lawns,midas,choking,castor,plentiful,bonner,stately,raced,deuce,oma,squirrels,paddington,drawbacks,evoked,dictates,studded,individuality,spared,anticipating,californian,brownie,undressing,quits,ensign,restraining,blockade,girard,nearing,ruff,burglar,warped,tributes,freezes,knoll,thinning,reddy,primrose,parting,humber,michelangelo,corduroy,torpedo,muffler,troublesome,eucharist,wadsworth,magnetism,hodgson,inventive,speculate,craze,dispatches,craftsmen,desiring,felipe,hoffmann,texan,nombre,grated,submarines,provoke,romana,accommodating,grenoble,calvary,banded,deportation,harald,cuttings,invests,sculptor,kildare,commended,roper,narrowing,sergey,mechanically,profanity,playmate,scum,seasoning,adolf,adjourn,widows,conveying,precincts,volta,mediums,discern,bran,fumes,futile,disqualified,fenced,eel,animate,faro,resembling,buren,totem,experimentally,drinkers,hermione,indus,harms,asserting,affluent,ell,protesting,dix,lonesome,liberated,unconventional,amore,reckoning,fabian,concurrence,closets,carve,metaphors,muster,labourer,heartfelt,pertain,democracies,gideon,mallory,gauntlet,martyrs,cots,victorious,sylvan,beverley,unnatural,swish,confessed,nae,drumming,patching,fret,abiding,luscious,sighting,relic,slipper,augsburg,bil,argyle,cling,prophetic,commune,agatha,tut,haut,gesellschaft,circumcision,neutrality,aqui,snoring,trembling,reproducing,comets,unitarian,governs,gums,delaying,mainz,reconstruct,toned,erred,modelled,expiring,mabel,whistles,jewellers,kann,caron,understandings,dared,herndon,nudge,seeming,rosebud,alf,andromeda,sixteenth,origination,uso,doves,landowner,preachers,leiden,ramona,glib,brutality,fictitious,francesca,rumour,immortality,saffron,ragged,peerless,constitutions,improbable,reiterated,jesuit,excessively,mounds,extraordinarily,parted,munster,sufferers,skunk,interruptions,placer,lingering,brooches,heaps,hydra,anvil,blinking,sweetest,noe,dishonest,stalk,kun,inert,favorably,vocation,tribunals,cedric,favours,witnessing,eject,seventies,rayon,dryden,foreigner,policemen,unfavorable,anomalous,katharine,barter,rowley,modifies,frugal,starry,thanking,nouns,consequent,entrances,danube,evasion,filenames,mayors,gospels,wicket,cora,lazarus,vile,misguided,reunited,conversational,inspirations,blasted,shingles,gresham,cumbersome,immersed,philemon,roasting,accrue,loire,vented,pont,consolation,cer,frazer,outlay,dreaded,airing,alternately,gracefully,intrigued,antagonist,exalted,cadre,serb,jaeger,overthrow,patiently,cabot,controversies,narrated,squat,illuminating,artificially,saucepan,freshest,noi,martyr,hacienda,koran,quito,tiara,elegantly,temptations,skinned,irrigated,hives,groundwork,cyril,kew,resentment,glaciers,peri,manfred,gaping,infringe,porta,inferences,abrupt,gambler,dissection,nightingale,landau,contemplate,amigos,putt,colonization,coon,crock,ailments,disagreed,boldly,narration,unopened,insisting,yeas,brushing,resolves,sacrament,cram,shortening,cloves,marketable,presto,hiram,broadening,hens,bowed,whimsical,harden,molten,repaid,warmly,hogs,sporadic,eyebrow,strickland,unnecessarily,iom,tess,trois,painless,serbs,verdi,annexation,dissatisfaction,alpes,applaud,haben,primo,abolish,climates,uneasy,busiest,fray,florian,clogs,flank,cartel,numerically,perforated,intensified,sexton,postmaster,washes,shrugged,electors,departs,mindful,lurking,hitherto,egyptians,looms,spectre,downright,refractory,counsellor,inexperienced,outraged,belgique,smother,frosty,mules,sash,truro,moaning,ponies,originates,blight,physique,independents,contentious,cheering,archibald,emancipation,duchess,commemorate,spout,perish,hoist,narrower,captivity,peyton,overloaded,shorthand,ceres,bravery,lizards,einen,fergus,sincerity,calder,oar,mullins,flagged,relics,relish,imagining,belongings,lire,legislatures,unchecked,knocks,alfonso,contradict,fleurs,scarcity,ashby,fleeing,filament,abingdon,theorists,hof,southwark,celia,disguised,implanted,thrash,antiquarian,dina,fluency,uniting,behaves,slabs,conceivable,agate,incline,hartmann,bai,soliciting,thoroughbred,calle,oneness,climber,commonplace,intellectually,casanova,himalayan,downfall,bookcases,strides,vanish,ute,transmits,adair,impatient,aforesaid,elbows,truce,bette,stairway,woodrow,sou,boar,vertebrate,laird,multiplicity,objectively,resigns,anguish,petal,perfected,tomlinson,odors,mite,blackstone,clipped,lago,jed,dries,mejor,sikh,annoyance,grating,prostitute,mina,elixir,guardianship,gamblers,autre,peeps,rol,reverence,sardinia,outweigh,verne,gaylord,bunting,avenger,spar,waugh,captivating,tiers,centurion,propagate,prosecuting,montpellier,willem,slavic,nutritious,marguerite,vapour,pluck,cautiously,prick,contingencies,coercion,picard,rubble,scrambled,agitation,chas,truthful,woodpecker,herds,corsica,penetrated,sein,adder,weakest,weakening,nome,thorne,anticipates,poignant,germs,frees,punishable,fractured,waterman,brat,uranus,salient,gabe,censor,semitic,wits,perverted,bordering,widowed,tombstone,begged,flushed,cautions,lavish,roscoe,brighten,vixen,whips,marches,xxi,anew,commandment,undetermined,horner,yah,conceded,circumference,postpone,disproportionate,pheasant,alonso,bally,zijn,guillaume,marrying,carvings,complains,resided,terriers,weasel,venerable,preis,toasted,admirable,illuminate,holbrook,fades,bulge,eller,lucinda,brittle,bandits,politely,desde,watermelon,ingenious,carols,pensioners,obadiah,mannheim,hepburn,fetched,alderman,lockwood,coughing,hiatus,upholstered,evangelist,louvre,spurious,gloom,severn,angelic,astrological,nobility,bayern,afternoons,ramifications,wakes,ashore,workman,swimmer,sitio,unload,loon,marge,wanderers,sips,badness,undertakes,miscarriage,vulgate,stoned,provoked,herr,fables,crumbs,wort,palisades,confidently,commences,dispense,dangerously,figaro,sadie,protested,capitalists,accusing,stink,convent,valdez,childish,adhered,priesthood,jagged,dispersal,overt,verbally,squeak,constituting,nuns,pronounce,scorpions,incompleteness,thurston,dearly,suggestive,osa,electrified,unbalanced,gypsum,slime,baroness,winnings,imaginable,bromide,lui,crusaders,summing,lament,gregor,terraces,canyons,predatory,towne,descendant,disgust,banked,rationality,screwing,dismal,ranches,cochin,wipo,prologue,whaling,patrols,stumbling,swung,outlaws,sinn,waved,libel,ellipse,alarmed,justine,jest,garda,eskimo,caesars,luce,strapped,reluctantly,woodwork,centrifugal,authorship,cavities,buxton,cravings,decidedly,pau,apathy,mercantile,stalled,infused,peaked,stronghold,huxley,moritz,bearded,greasy,vowed,carnage,asher,ingenuity,mort,infested,creeks,bessie,adele,ota,rattan,coroner,irregularities,tiled,elaboration,hectic,lun,snuff,convene,vai,calmly,horribly,dilute,contemplation,sino,uhr,carta,gaseous,afflicted,gloomy,kirkwood,orchards,prophecies,marques,septuagint,pertains,clothed,plummer,italians,talon,repellent,laval,sorcery,abstain,elsie,barring,undermined,tid,bestowed,habeas,inactivity,crewe,grassy,aprons,clumsy,columbian,ayr,pounded,carrington,stint,rousseau,sarcasm,accomplishing,overturned,uphill,maximus,warmed,parable,jolt,affords,deadlock,deriving,quadrangle,elects,liebe,eradicate,likeness,ral,jem,unter,alpaca,degrade,flemish,shred,conseil,steamed,aroused,remittance,sieve,bloch,alienation,reddish,impulses,interpol,pleads,whitby,goliath,caprice,hors,horned,fowl,janus,hester,benevolent,superstition,cohorts,camilla,rarity,limbo,shove,accusation,bernardo,flake,hating,pate,sewers,spores,mahmoud,shears,mucho,flutes,tabernacle,minced,westerly,despatched,munitions,symmetrical,ornate,midwife,uniformed,snug,coveted,prohibitions,moulded,deceived,convict,nai,tossing,regularity,criticised,lawfully,goethe,slade,dumas,jester,notifies,recount,dearest,nook,commensurate,schiller,bowler,wiser,gallant,disbelief,gon,unqualified,cautioned,recollection,locomotives,condemns,fastening,jeweler,nuremberg,ostrich,maud,flirting,misplaced,prosecutions,dido,poisoned,researches,chou,discriminating,exclamation,collingwood,intercepted,ascendant,flung,clovis,eam,railing,cremation,banter,balconies,awaken,pigeons,singularity,signify,granddaughter,subdirectory,bancroft,progeny,alters,gratefully,divergent,fleets,dorian,juli,tackled,shoals,tributary,clique,rosy,satanic,stubbs,durch,torment,mussels,emigration,howl,wel,iglesias,hir,ecclesiastical,crippled,hilltop,tabor,peut,tenet,fifteenth,chute,bohemia,mountainous,fonds,ogre,unforeseen,pickles,submissive,curses,stampede,utilised,trieste,whine,nus,fatality,tierra,looming,zo,sped,ankles,mosques,fuchs,guerilla,squeezing,fisk,canes,follower,euler,alumina,degenerate,spiked,cru,misrepresentation,strung,chanting,wrestler,officiating,hermit,behaving,colbert,josiah,deepen,acadia,eso,remy,pats,valentin,mora,cri,enrico,reciprocity,crease,wis,ook,bartholomew,perseverance,catalonia,yorktown,impede,clasps,tilted,vicar,confines,prank,dass,repent,dio,agreeable,riddles,bennington,pulpit,appreciates,marshes,bellies,corrosive,ambush,palazzo,franciscan,figurative,gait,emphasised,bonfire,aversion,vicente,stiles,stewards,chauffeur,elicit,henrietta,slapped,bitten,lind,salamanca,martyn,dynamo,hobson,stow,summon,skeletons,parchment,lingua,distractions,forfeit,pepe,paddles,unpopular,republics,inspecting,retainer,hardening,loosen,beowulf,undiscovered,einem,imputed,cabs,cheated,willows,hump,delft,communicative,grieving,chastity,faust,fright,harbors,adorned,obnoxious,diligently,decays,mortimer,marvellous,nouvelle,easing,mathieu,picket,thrones,emilia,eyre,maturing,seu,illogical,awakened,beet,suing,brine,lorna,waning,cartwright,armoire,piled,twinkle,lodgings,maitland,supple,geld,soi,fabio,unfit,uttered,rumanian,shaggy,elongated,ordeal,pegs,astronomer,incompetence,flicker,ramsay,relieving,towering,operas,slaughtered,assaulted,mena,rouse,appel,armand,spiel,impurities,stemming,inscriptions,hos,tentatively,tragedies,interlude,oates,dialects,vas,ovid,carcass,casually,scamp,freedman,reprise,zig,lash,ills,simms,danes,pebbles,quicksilver,sacked,omen,forfeited,stipend,conceptions,lii,amulet,informally,sarcastic,indemnification,hawke,complexion,daisies,informant,sorrows,ite,aegean,andere,sluggish,brig,tiempo,marsden,coy,grouse,reginald,wierd,pasted,moths,batavia,evoke,dispositions,haywood,staunton,nit,amorphous,tributaries,townships,nantes,assam,mousse,shameful,chiffon,archaic,elevate,deafness,bec,sala,laureate,contemporaries,syphilis,vigilance,appalling,palmyra,foxes,davie,affixed,ticking,pantheon,gully,bitterness,brill,defy,stor,consumes,lovingly,agua,thrush,bribery,smokes,ventilated,kettles,ascend,nutmeg,chained,magnify,precautionary,travail,livres,fiddler,wholesome,wrists,severed,mites,puddle,azores,vegetative,agora,sob,elaborated,reeve,embellishments,willful,grandeur,plough,pritchard,mansions,macpherson,overheard,persisted,whereabouts,haydn,symphonies,reclining,rodrigo,bounding,annexed,atheists,umpire,orthodoxy,kilt,doubtless,keyed,esquire,cryptic,primus,wherefore,cholera,midsummer,colouring,intoxicated,mysore,jerks,mise,darius,bullion,deflection,hateful,propensity,journalistic,essences,dispensed,lemons,stratum,vendetta,lod,felicia,restrain,clutches,cults,whit,amaze,manassas,rembrandt,estado,easel,reisen,potion,ovation,paddock,numerals,surpassed,vino,gable,johnnie,thirteenth,laced,quill,saa,mares,enthusiastically,fetching,chaps,tendon,bellows,keats,deceit,caro,unmarked,joyous,boswell,venting,infringing,blythe,chisholm,gunner,verso,samoan,absorbent,grossly,cleft,clog,hongkong,impoverished,stabbed,teaspoons,comedians,awnings,sill,lucknow,bleaching,isolde,startled,mathematician,untrue,algonquin,hurried,vir,dieser,staggered,vacated,vente,fitz,dura,fingered,apprentices,cerca,booted,allie,sens,sprouts,bower,moab,wolcott,extremity,orphaned,requisites,prudence,kaufmann,bij,gingerbread,biggs,tasteful,puritan,osiris,affirming,salud,excavations,forearm,distract,seaport,flashed,longs,dawes,buns,deceive,civilisation,starved,amico,colosseum,stipulation,emptiness,maddox,shoemaker,cushioned,dada,osborn,hastily,ful,invader,patriarch,consents,nils,polynesian,swain,lain,groningen,emilio,mourn,abandoning,oddities,soften,troupe,blacksmith,suicides,powerfully,compromises,helene,thirdly,classifying,deepening,unfairly,connexions,calico,wrongs,pores,johnstone,undermining,burnside,colossus,frivolous,indecent,dishonesty,oiled,turnbull,microbes,sharpen,phonetic,oppressive,coined,tito,moray,simeon,onslaught,nationale,noses,treasured,sharpness,corral,fortnight,lia,plunged,reals,modulated,defiant,brisk,meath,jena,ponce,perjury,mua,generality,vigilant,pronto,vistas,eerie,arne,stonewall,wrestlers,jackass,geometrical,priory,epsom,corpses,wiping,mercenaries,bronchitis,therese,whirlwind,howling,apprehension,raisins,turkeys,tio,hora,bobbie,shale,diligent,nachrichten,dann,adversity,wiggins,torts,egress,adjectives,crepe,dum,sheepskin,concave,heresy,armory,forthwith,avert,oat,guise,curiously,fullness,culminating,kipling,vomit,compounding,afar,ebb,shaky,brutally,pennant,nicest,willoughby,necks,lak,mathias,levee,hindus,powerless,populace,deliberation,soles,jetty,luster,overrun,undone,delia,habitual,alhambra,mee,uplift,causeway,murderers,reopened,guid,inhabit,lorenz,conglomerate,fastened,tompkins,extradition,geschichte,perils,jerky,proportionate,compte,algo,boroughs,deliverance,resists,lovell,discourses,subdued,adhering,falk,suspicions,hampered,bruxelles,detriment,prejudices,purported,tron,ine,mangrove,gab,fawn,scaffolding,prin,narrows,sensed,insuring,babcock,rhys,boasting,norah,ascertained,fluctuation,jeannie,ond,twenties,monstrous,stetson,accuses,calibre,nobles,fumble,attrition,atherton,lassen,proverb,darin,mercenary,clams,reis,tightened,levies,speck,gutters,murderous,rudder,amusements,scares,deformed,wretched,decadent,incarcerated,unsurpassed,surpass,annihilation,pietro,memoranda,steaming,magnifying,serra,hideous,abreast,intuitively,extremities,tyrant,decency,papal,sprang,palais,obscured,duets,mountaineers,blount,butchers,apologise,geologist,piccadilly,axioms,mogul,fiercely,varnish,hysteria,nei,insistence,aer,clockwork,mecklenburg,intelligently,fuer,vials,imputation,albrecht,densely,droit,odin,colton,distrust,ulm,assassins,hatton,fraternal,refinements,eloquent,cwt,silas,wondrous,decrees,touchstone,etext,drayton,grieve,reigns,pleasurable,dobbs,tunis,olin,bustling,galt,flue,lucerne,fiasco,emir,deacons,slings,dwarfs,apportionment,thoreau,reins,anson,broadest,scrambling,misfortune,drenched,astonished,kiel,subconscious,agi,incandescent,disappoint,mobs,cris,rehearsals,massa,firewood,serenade,weathered,truffles,anno,kepler,teatro,lawless,gout,coincides,inhuman,gentiles,jardin,fag,rubs,irritated,despise,floated,fresco,auteur,custard,prius,dias,hasan,branched,shipbuilding,mildew,tombs,frown,fulfilment,accords,privy,caretaker,antonia,feeble,gentile,contractions,combatants,annuals,champlain,valence,deteriorated,droits,disobedience,gat,unpack,divination,haw,nationalities,cultivating,triumphant,superbly,hombres,constrain,magicians,gra,hobbes,contended,nazarene,potsdam,genevieve,shiloh,damper,afrika,forgiving,yahweh,madman,sor,slumber,shimmering,rigidity,bane,marius,inventing,chipped,ane,forts,tumbling,interprets,surat,dormitory,confiscated,discharging,unnoticed,ridicule,thaw,vandals,reinstated,lizzy,unpacking,darien,intersect,finden,janvier,garnish,designates,peeling,levis,blindly,unintentional,durant,repertory,toi,disagreements,gatt,bene,fifties,goody,dugout,battleship,talisman,eels,shun,blackwood,giggle,worden,deforestation,streaks,roderick,bor,corinth,perverse,glittering,jails,casket,brigitte,detour,husbandry,visibly,defunct,unveil,circulars,merciful,ines,tun,tipperary,kinship,springtime,philipp,blouses,hemlock,sniffing,uncanny,stork,concede,combustible,fallacy,nicknames,noxious,tunic,farce,drowsiness,chants,ashe,rhone,lunatic,pyrenees,auctioneer,recovers,haggard,manger,chills,whack,drone,breezes,esteemed,godly,spire,distillation,edging,langdon,mathematicians,soe,cymbals,antidote,emblems,caricature,shroud,stead,recoil,reconciled,daze,raisin,amb,amounting,schon,boer,poisons,nameless,trot,musically,intensify,voltaire,harmonies,benito,accumulating,indebted,wald,breathed,misled,mani,culprit,transact,billig,spiced,berne,pron,puncture,nella,lighten,practised,canteen,fein,hysterical,fick,darkened,requisition,shrug,boils,enchantment,greta,covey,donne,pena,loathing,duc,woof,ominous,parlour,hammocks,quieter,poking,tallest,wrestle,entrenched,rectify,virtuous,ous,davy,snails,decipher,incapacity,mittens,ferns,curls,ens,wrecked,wince,friendliness,invincible,healthiest,prometheus,rushes,deities,wor,comanche,melts,trickle,disapprove,erratic,familiarize,insufficiency,drifted,propagated,hardships,sabres,foraging,wasps,chien,mitre,tonnage,corals,mille,continuance,unrecognized,premieres,affectionate,baptiste,unimportant,ferrara,greener,bowles,endowments,grudge,zoological,norse,wetting,bosom,bales,blackbird,causation,persecuted,deciduous,straighten,convocation,merrick,precaution,playmates,philanthropic,maneuvers,stratified,critter,begs,emphasise,uit,adresse,connell,busts,cutaneous,porters,forgery,pereira,infrequent,mull,ort,brandenburg,incision,jumble,cognac,wading,imitate,grasping,borneo,mortuary,bode,thorns,rightful,scarecrow,mosaics,pious,utterance,undeveloped,basalt,undisputed,distracting,urns,unfolds,brocade,seaweed,prevails,candlelight,votive,wafers,messina,schumann,tarts,cuthbert,nance,babble,pessimistic,niches,untill,quid,cadiz,shortwave,overlooks,diversify,hugging,postman,oas,overboard,goddesses,faithless,regained,coolidge,ephraim,foggy,shone,criticizing,leafy,passionately,stroking,matured,dolor,procured,excellency,camels,partie,tou,justifying,eased,slay,deprive,kremlin,thea,lusty,virtuoso,buzzing,dauphin,steed,cowley,paraffin,unites,stimulant,realising,millet,invert,vermilion,grinned,marche,thelma,enlightening,endlessly,hasty,dexterity,puzzling,nods,dieses,sumatra,nigger,scrape,kendrick,prized,arresting,bewitched,resumption,irma,intimidated,traitor,clove,illiterate,widened,bordered,mallet,leech,giver,discontent,gaz,punishing,seedling,dwellers,mouthpiece,nymph,reassuring,astor,myles,prematurely,frail,adventurer,irradiated,awfully,mayflower,arched,enlist,vedic,exemplified,profane,ubi,cornelia,romney,macaroni,electing,dictation,tage,robber,evacuate,tus,conveniences,roving,drinker,softened,peking,fillet,maar,churn,nimbus,nog,smartest,neale,ett,madre,impart,feats,concomitant,donner,scaffold,oui,ano,millie,libro,leisurely,loki,dislikes,mayonnaise,dra,limitless,knopf,hangman,sloping,mitt,constitutionally,disapproval,bavarian,crucified,pocahontas,masons,surges,literatures,unlucky,yawn,distort,mun,wahl,loosing,canopies,handicraft,buscar,piling,basilica,amine,robbers,juliana,lowland,sausages,spake,feud,subordinated,awoke,unheard,prune,endanger,cairn,nomadic,disgusted,olfactory,prolong,fontaine,knits,thinly,tant,garnett,galen,arable,parallelism,brut,vernacular,latitudes,alkali,mowing,foreseen,palmerston,sever,expend,stahl,gist,auntie,afghans,blames,subdivided,happiest,lucca,francine,reserving,nagasaki,wid,indented,humming,disclaim,frans,diameters,exerted,justifies,freiburg,regenerate,titre,tumbler,bonne,improvised,flocks,bothering,garnered,fling,comrade,ascended,juliette,porcupine,chopping,enacting,stabbing,metamorphosis,hilda,wanderer,flattened,dawkins,spitting,inconvenient,seacoast,imperfections,lewes,chancery,raving,hed,executor,anglesey,choirs,wreaths,tasteless,tomahawk,tact,projet,instructive,absorbs,susannah,toutes,mathematically,godwin,drier,bothers,parades,shoved,invokes,cannons,hamish,chromatic,rife,rallying,enoch,carriages,dales,polled,agnostic,emptied,denounced,delusion,rimini,verity,turret,precede,huts,betts,domes,eras,wildest,foodstuffs,wessex,priming,vowels,sulphate,clandestine,migrations,hovering,texte,tamper,pugh,punishments,dagen,heathen,unduly,rigged,domicile,chargeable,fanning,meu,spurred,broughton,wha,osage,peregrine,tabitha,puede,crumb,fostered,culmination,revolves,mend,theoretic,softening,glimpses,hattie,tastefully,capo,grate,lourdes,diseased,kenilworth,margot,socialists,deduced,buttocks,unmanned,rainbows,gunnar,burials,eunice,bountiful,salazar,mesopotamia,jetzt,poseidon,ratify,mexicans,fiend,drapery,bernice,deported,muzzle,entrant,schoolhouse,retribution,yusuf,stallman,slander,basing,baits,fireside,disposing,herzog,suffrage,triumphs,fortifying,sleepless,schiff,watered,lass,fleas,tully,ventured,recite,kneeling,negation,dismay,smelled,jute,heals,prim,trespass,conciliation,compasses,groomed,leaping,impunity,sunken,inaugurated,encountering,infernal,sewell,pang,swag,reared,pampered,inquiring,numero,praising,momentary,commemoration,favre,poli,holstein,serpentine,hangings,lugar,sundry,protestants,therefrom,espace,wrecking,cristo,pique,swore,novembre,fawcett,journeyman,enlighten,descartes,flashy,prowess,abstractions,enriching,trampling,signet,bello,iroquois,digested,rothschild,trumpets,embodies,messe,manhood,kincaid,cannibal,nephews,oblivious,icao,atmospheres,stricter,jeter,memes,roughness,ancients,snapping,jethro,cauliflower,feudal,unbearable,perpetrated,basses,juni,boarded,olympian,sedgwick,livre,mano,interferes,devotions,myra,devotees,acquaintances,sectarian,fathom,cools,segundo,appreciative,innumerable,parramatta,noticeably,furs,atonement,extant,ignacio,unmask,chisel,mysteriously,wayward,redness,dreamland,wands,illustrious,fishy,nao,pauses,intoxication,glimmer,blooded,slamming,syllables,whim,filmy,timid,ismail,tampering,weavers,magically,pied,thyself,rooting,pretended,nigh,therewith,interment,partitioned,aller,populous,modesty,veils,frei,zest,sumptuous,wayside,spotless,wich,summits,ner,banc,barbed,legions,dona,lustre,wer,sunflowers,sommer,ecstatic,campania,blasphemy,wisp,countenance,skinning,sift,ooze,recounts,adventurers,oktober,bigotry,leaky,contradicts,leven,pagans,dinars,diesem,fume,afloat,bruised,flattering,brigades,leur,engrossed,dashes,impeach,atrophy,hur,brag,earls,confers,totality,circumvent,boulders,negotiator,yolanda,muff,maude,odour,bellamy,snag,fringes,gough,excavated,smoothed,affirms,gulch,irrevocable,wieder,moaned,axles,graciously,radiated,bribe,propel,outspoken,verily,ardent,forcibly,presided,shimmer,tremor,gnp,loaned,violins,extravagant,ghent,astute,jamieson,pemberton,inflict,invalidate,ridiculously,legible,towed,disregarded,auguste,puc,salted,attractiveness,calamity,brewed,aristocrats,fiance,sprawling,vulture,mislead,ventral,twa,retard,medio,platters,canto,germanic,harassed,discriminated,estelle,sponges,cavendish,receptacles,jacinto,revered,harassing,dislocation,shingle,timbers,undergoes,tilting,conquering,harass,meditate,hues,alsace,denominated,ostensibly,lumps,facie,emploi,cretaceous,fished,drizzle,bracing,mesure,blackmail,corte,remorse,navarre,clout,jours,wag,fella,mountaineer,pondering,purposely,worshipped,lucifer,unholy,spectacles,dulce,muttered,aquila,hoff,mme,spat,henceforth,argo,strapping,expedient,unconditionally,ices,secreted,buch,chaucer,livery,recapture,chevalier,incompatibility,anchoring,navigable,personas,milieu,stonehenge,injure,knuckles,zoeken,intermission,amazement,medusa,pagoda,manifests,primed,keg,recited,reformers,ensued,justly,throats,aron,barrage,pis,pari,buoyancy,aussi,curled,raoul,peeping,paces,heaviest,walnuts,ena,broadened,lashes,esplanade,prairies,mandel,conical,tricked,etymology,cheaply,allege,draped,subtly,manslaughter,consort,shad,fleeting,sibley,plumb,needlework,caballero,annoyances,uti,bacchus,chuckle,unfolded,israelites,rit,briar,wavy,moulds,hindered,bloated,pranks,mantel,languedoc,fatima,disordered,belated,englishman,winder,paralyzed,junta,shrunk,crammed,aar,hatchet,unsuspecting,dismissing,cetera,windfall,filaments,jocelyn,companionship,creeper,cuando,epidemics,illegitimate,slag,undisturbed,transcendental,georgina,chantilly,farmed,fuentes,malo,complicate,alston,indistinguishable,skillful,groot,compensating,overrated,reasonableness,nuances,knuckle,bastion,scraping,gypsies,concurring,assemblage,watery,tro,juanita,coiled,yucatan,sipping,beatrix,cheerfully,sledge,gilded,murdering,dijon,unbroken,sages,tropic,capella,beim,condemning,entourage,travers,familia,iota,realist,suppressing,scorn,crusades,pris,whirl,pervert,defences,humiliating,circled,withers,sprout,elicited,swirling,campos,clinging,bunches,bagged,negotiators,deviate,blackened,whereupon,muriel,hostilities,atelier,penned,conte,horatio,cheered,bled,throbbing,sleepers,seiten,zeit,sallie,solace,lucien,havre,moles,unloaded,projectile,transplanted,bandages,handcuffs,beacons,stucco,intrinsically,geschichten,impervious,shams,shawls,aos,flourishing,precedes,bruises,instructs,palatine,lor,carnation,kangaroos,slum,ruffle,knack,rivet,aragon,aggie,tilly,sonya,haue,grunt,talmud,grammars,overalls,doubted,ravaged,whistling,upholding,ailing,obeyed,tattooed,ghostly,mutiny,delusions,foresee,rations,bitterly,windmills,perpetrator,cleverly,misunderstandings,amerika,counsellors,amis,sisterhood,lightening,overturn,doit,thoughtfully,mortem,rencontre,risking,proprietors,tatiana,ingress,gros,barbers,retires,duro,commotion,deduce,bolted,materialism,eternally,senseless,rabid,reassure,recollections,probed,pox,hamlets,unwritten,jammed,moveable,housekeeper,agrarian,humana,lovable,sawmill,abram,catharine,consented,perseus,styx,congested,banished,terraced,buttermilk,laces,toil,hugged,flurry,gower,warmest,horrified,walpole,cada,alte,bertram,perturbations,adversaries,aunts,mau,vapors,skylight,gemma,constantinople,monarchs,unsolved,strenuous,roost,unreasonably,shuffling,ludicrous,tenets,albanians,pius,garb,steadfast,reckoned,promissory,overflows,queried,squarely,softness,crayon,rotting,exhilarating,excepted,flavoured,marque,ditches,millionaires,evade,pars,scourge,twig,lapis,bandage,detach,virginity,mala,doctrinal,adaptability,cramped,wept,ganz,racking,corrects,avignon,servicio,vanishes,obedient,selkirk,mur,sects,modo,anxiously,ascribed,strikers,optimist,gratification,seashore,automaton,otros,pierson,unskilled,brigadier,consonant,acetic,unarmed,dyeing,intolerable,republished,tawny,absinthe,hygienic,sufferings,tahitian,propagating,sacraments,layman,vellum,ignatius,emperors,ferro,stalks,stanza,londres,terminations,novices,grasped,bequest,deo,beggars,redeemer,florin,quixote,chaise,paternal,dey,rained,indigent,trellis,trabajo,mythic,crystallization,marries,echoing,recitation,aptly,alleviation,liege,remittances,romances,nieces,characterizes,papyrus,fop,candlestick,circling,hellas,sheik,pints,girdle,siamese,veiled,blotting,intimates,eruptions,javelin,ipsum,stares,eastward,tecumseh,yon,entree,desist,grasshopper,rheumatic,autobiographical,piety,embody,petites,gris,crawled,soiled,dich,froze,superfluous,gai,disarm,sot,tacit,chansons,parenthesis,reorganized,daybreak,rallied,quakers,pentecost,beulah,unveiling,burg,astray,blisters,infirmary,hinted,sanctity,gad,modus,pedantic,beste,dennison,grandes,bullies,notoriously,lucius,kirsty,caustic,rook,gleaming,dominoes,tua,parochial,bertie,moreau,precedents,exiled,howells,pall,mustered,pretext,whisk,flared,kleine,deference,artful,eld,audacity,margate,judson,downwards,moat,inasmuch,plotters,caress,hails,swam,wilfred,mauve,hazy,twitch,alegre,glorified,combed,reclaiming,baptists,paraphrase,flounder,crept,fibrous,swamps,epilogue,hoof,epistle,exiles,wheatley,clapping,finesse,sociale,cordelia,infrequently,favoring,converging,cour,firma,inquisition,reputed,dinah,seduce,bearers,kimono,guesses,foote,impossibility,ceylon,courant,invasions,eminence,canna,liberate,gracie,gunther,hanged,flatter,acquitted,dimmer,sola,cauldron,dredge,tingling,preferring,cordial,reassurance,superintendents,nervousness,delineated,imaginations,quarrel,bess,aryan,tendering,transitive,furthering,connoisseur,idealism,separable,awa,liqueur,spokes,pastime,pursues,bugle,luxemburg,disperse,incoherent,fours,treffen,devout,strom,alva,unfurnished,blinding,inaction,northward,trotter,subversive,contre,impediments,armoured,breathless,intertwined,steen,corkscrew,trop,affections,inherits,mortals,purgatory,vise,comer,tillage,pere,discloses,easterly,lagged,hawker,vertebrates,toughness,disrespect,lagging,uncovering,indeterminate,refreshment,momentarily,festa,langer,lute,rosette,changeable,tragically,waverley,clapham,trumps,justifiable,twofold,sicilian,marlowe,unearned,thwart,potted,chanson,amelie,incurring,gracias,convalescent,terme,mackerel,goings,brim,clinch,provident,leprosy,chum,cometh,fitter,glut,fasten,locksmith,interrupting,sulla,daggers,pleases,moors,arno,geranium,kendal,revolve,choc,waged,waxed,concourse,confine,jaded,mingle,purify,desolate,withdraws,choked,whereof,pape,gruesome,pleadings,defying,sacs,perished,erskine,tentacles,britons,pringle,outcast,faraday,oblong,ophelia,wearer,propriety,attainable,hearsay,roomy,brutus,obscurity,heros,colonists,matting,overflowing,capers,entice,lasso,soot,yonder,virulence,heretic,draught,comical,generalizations,waiters,gasped,geologists,caverns,boarder,bumping,eines,greets,ova,waxes,whiz,bevel,straining,seduced,angrily,croquet,vacate,stanislaus,soundness,marquise,bonjour,xxiii,protracted,siegfried,affaires,digby,eyelid,undeniable,taming,precluded,repressed,perforce,barons,boundless,hopelessly,grandchild,sucre,pasteur,valuables,indignation,sprinkled,menstruation,stuffs,antichrist,emptying,reiterate,himalayas,monopolies,sowing,frills,wad,shearing,ruining,pinion,yew,windward,hermosa,haunts,unsere,brawl,delirium,unfounded,heroism,gillis,rutledge,barrister,neglecting,saxony,karel,vane,alienated,tum,synagogues,entangled,mane,reise,liberating,embarking,tonneau,cynicism,bayonet,considerate,extraneous,janitor,environs,reverses,reunite,hawkeye,steers,ravenna,crockery,juries,presidente,nang,gare,legacies,tial,theologians,arnaud,enticing,embankment,quadruple,crazed,xxii,equipping,fondly,whither,counteract,sighs,discouraging,flasks,preservative,tribulation,bridesmaids,rhea,raided,salaried,mises,intolerant,rarities,battled,obstructions,discredit,grotesque,artistes,perugia,gij,spoils,monasteries,crucible,modena,generalize,hasta,pronouns,misconception,rudimentary,sown,protege,vulgaris,beak,settler,prag,rabble,rung,piped,orpheus,retour,insurgent,rightfully,hilfe,medici,fabrice,marshals,nue,crumbling,relegated,allotments,immer,stagnant,giacomo,follies,dells,cleanly,unclean,seizing,molasses,tablecloth,hutchins,purifying,delineation,schooner,dignified,numbness,papier,machinist,anima,apologized,meshes,grotto,marais,loam,politique,carnations,rivets,jeune,hatching,leveled,graces,corinne,adheres,collusion,rawhide,propos,knotted,agitated,sorter,misused,relieves,linguist,rigorously,erroneously,especial,betray,dario,cui,heywood,suspending,mormons,davids,bennet,proclaiming,purposeful,undress,procrastination,hemel,gauze,precepts,constellations,gazed,skips,forceful,fuente,magdalena,rut,sehr,hera,subterranean,rumored,galicia,amuse,villager,fixer,condensing,emanating,assassinated,brodie,untimely,associating,romp,idiom,tangle,legitimately,congratulated,couriers,unwelcome,concurred,upsets,sceptre,confederacy,matinee,snatched,plunder,maa,impromptu,searchers,gamut,czar,putney,shattering,refute,amphibious,mush,shudder,eyesight,parson,infidelity,firemen,contrived,exhausts,opposites,dreamers,foal,hesse,hesitated,precarious,hodder,pease,testifying,topographical,instructing,dreary,crispin,horrid,dryness,wreckage,paras,captives,despised,conqueror,innocents,unprepared,dost,treacherous,filet,infidel,volley,carnal,larceny,versed,confronts,parliaments,mitigated,youngster,enigmatic,bridle,stretcher,cosa,enfants,leila,berliner,effecting,hallucinations,unravel,smugglers,intimidate,rubens,galilee,frenchman,tiller,orifice,bragging,hordes,beryl,ferre,forerunner,grinning,slashed,watchful,appalled,silenced,vanities,evaporated,affliction,zag,intestines,saute,iba,schuyler,idyllic,satchel,peruse,revel,alleys,crucifixion,hearn,madly,stiller,experimented,comming,steeped,gripe,summa,eyelids,thereupon,archers,steamers,bubbling,forbids,disdain,exhausting,absurdity,magnified,horsemen,alabaster,reigning,deane,georgie,zara,bribes,kidnap,coercive,romanticism,luo,forme,reinstate,unthinkable,lowly,outburst,scant,mattered,fitzroy,ove,raspberries,sorely,pail,obtainable,elvira,mastiff,drummers,reformer,solemnly,liberally,dahlia,concentric,loin,ved,unwarranted,marmalade,sandoval,applauded,ravine,exponents,brice,ressources,californians,procuring,pours,leer,nave,arranges,valhalla,adoration,amity,superiors,decanter,starve,leek,shortness,fronted,lightest,banquets,picnics,compulsion,prerogative,abscess,paraphernalia,heretofore,memento,lina,tumbled,masterful,insoluble,cockburn,harwich,casas,semper,repressive,clos,sweeter,mattie,deutscher,spilling,saucers,gondola,elizabethan,hein,spines,reiter,amphitheatre,stupendous,flutter,acumen,absolut,shiver,lumiere,shatter,pickled,nieuwe,hades,superimposed,burdened,randal,dandelion,nuance,classmate,catechism,driftwood,rosalind,giorni,juin,bigelow,anointed,mythological,interspersed,horseman,nervously,intruders,chaparral,nya,decaying,vez,muses,padlock,oars,gilead,classed,informer,freer,toute,calabria,dismantled,overcame,exertion,solidly,affidavits,weaves,chimera,handkerchief,foaming,tailors,barbarians,splendour,niveau,sheriffs,tassel,admiring,harmonized,khartoum,leans,frankreich,baffled,wasteful,hertford,tripoli,refraction,grainger,penzance,fillets,aztecs,consults,hoi,foils,retract,inaudible,nurtured,frantically,buoys,tait,disintegration,theologian,aquitaine,sigmund,individualism,starboard,precludes,burdensome,brest,renown,murky,truthfully,deutschen,tongs,perpetuate,vigo,cabal,musa,materia,interwoven,beggar,pard,extinguished,silhouettes,abundantly,declination,excesses,mucous,poked,caricatures,artiste,bogen,repose,hasten,tendered,temperance,risque,resembled,helpfulness,omitting,earthy,adored,embellished,feathered,aggrieved,hacer,assisi,aggravating,insulted,fugitives,passe,anecdote,partake,pseudonym,altitudes,carolinas,strikingly,zy,rancher,morn,bodyguard,gnats,solon,eduard,detract,portraying,pitted,enlarging,wrecks,bombardment,buckner,dares,tems,eigen,siesta,satirical,paar,antoinette,ugo,cynic,amenable,runways,frowned,sass,rout,pus,rubies,checkered,hatched,sketching,hypocritical,trample,courtship,cupboards,tolerable,magi,brescia,alonzo,tutto,attenuated,inefficiency,merci,booms,demented,eri,bonaparte,musketeers,twickenham,glee,forgets,grapple,lowlands,stimulants,greenery,proverbial,tranquillity,numa,monastic,uncles,eph,soared,householders,nestor,impediment,hel,anarchists,freund,perilous,devonshire,tanto,violets,nouvelles,nether,nomads,ramble,ambulances,natura,hams,idiotic,parti,cerberus,bering,formosa,erg,bough,hoot,herewith,workmen,grist,penrose,duster,pronoun,signer,sloth,steely,pulleys,fates,stews,nourishment,gravitation,loophole,drags,retrograde,sade,exaggeration,shadowy,liquors,archangel,fenwick,creases,primordial,nourish,vit,uplifted,percival,gingham,batterie,gossamer,hairdresser,plover,weg,mow,disliked,leinster,impurity,worshipping,chasm,nuovo,greenish,regiments,adel,selfishness,reactionary,adriatic,ejected,grappling,hammering,mingling,earnestly,scribes,leed,monologue,amphitheater,vive,signaled,clem,littered,acutely,razors,masse,legumes,speculated,worded,quant,fleshy,desirability,sundown,persistently,decoy,balsam,baruch,verdicts,authorise,outcry,eyeglass,waterside,grime,extortion,cordon,colorless,idealistic,cutlass,rigor,greyhounds,amalgamation,preponderance,cowardly,pretentious,cervantes,wielding,gusto,maidens,weimar,mijn,humbly,langue,unworthy,expectant,laurens,azalea,jeannette,fruition,florentine,dwelt,vlaanderen,oberon,enslaved,vil,cathay,jura,correspondingly,legalized,predicament,hilly,aisles,trusty,gratuitous,fatally,caged,ephemeral,radium,dissimilar,mutilation,kon,waging,infringed,overwhelm,cognizant,profil,andalusia,rowdy,popes,bravely,sportsmen,stumbles,clematis,slashing,leger,incomprehensible,suez,clogged,gabriella,fluctuating,demeanor,shipboard,labourers,paganism,fido,sounder,mest,caledonian,hegel,stench,cursing,pmb,wickedness,crouching,attila,emits,culminated,thefts,sturm,weiter,auld,spanned,ebenezer,closeness,redeeming,polity,scriptural,transylvania,obscenity,gaul,heartache,reigned,entitles,exacting,wanton,pelle,enforces,necessitate,locket,aver,commemorating,reconciling,desolation,gander,bastille,traceable,voila,savor,darkly,faithfulness,resourceful,heraldry,incomparable,dilated,angered,condone,ahora,mademoiselle,constitutionality,viscount,preliminaries,devolved,liquefied,alcatraz,streamed,resorting,garters,adamant,pontoon,tableau,vernal,napoleonic,tennyson,rubicon,disorderly,tala,ivanhoe,destroyers,analogies,frigate,instalment,dazed,sentient,entrust,iti,puffs,burying,dispatching,cyclops,veritable,posterity,keenly,healthful,nem,meine,repealing,gourd,groaned,ferocious,voicing,mons,sacrificial,defies,abnormally,resuming,bruising,flogging,religiously,mundi,encroachment,demande,seaboard,laplace,southerly,humiliated,unearthed,sut,cataracts,subordinates,vagabond,consecrated,oscillating,jib,bodice,foray,opiate,cristal,unmistakable,filly,rhubarb,silencing,aesop,hab,diminishes,tidings,sneaking,unassisted,insidious,dike,immutable,croton,depots,nodding,jasmin,libri,misrepresented,amici,substantiate,algiers,ocho,templar,cedars,fortitude,aloft,mated,wart,tribus,hollander,ruffled,armament,plums,tien,revisiting,fairer,enterprising,prides,grafting,smoothness,trinket,neutralize,vasco,playwrights,wishful,fal,herod,trailed,habitation,rogues,speechless,expanse,preside,arles,colette,delightfully,oeuvres,concealment,unruly,uncompromising,moriarty,obstruct,unbounded,coincided,encased,undertaker,flickering,sive,gush,saddened,bathe,scarred,ignited,crowding,tew,vrouw,gladiators,krebs,stoddard,scrooge,aeroplane,nagging,contemporaneous,precipitated,hiss,outlawed,injuring,bellow,girth,poppies,inlaid,notched,baldness,didactic,lillie,irritability,provocation,lustrous,reeling,desertification,rennes,crests,molto,loafers,slapping,tiene,squires,insures,slaying,mie,frauds,lobes,dios,thundering,remus,coals,succulent,heartily,hic,yellowish,unsuccessfully,moderne,moustache,geen,lobsters,eventful,feasts,stiletto,teacup,rebekah,kein,alvarado,secession,countered,instinctively,conspiracies,chapels,grado,minions,brunt,infraction,gory,glens,strangest,stagnation,displace,countrymen,perishable,lyra,gustave,proteus,denoting,apiece,jeanie,strasse,gammon,storming,islet,conduits,cinco,headway,friars,maples,alluring,ikke,edouard,buzzard,bony,halting,sana,halley,cranks,headwaters,reviving,burrow,universality,veranda,underrated,insatiable,exquisitely,unfriendly,hatches,christened,actuality,teased,murad,attica,flatten,savant,appreciating,stinging,membres,gulls,prescribes,sultry,sinned,globular,asiatic,macaulay,depositing,engravings,showering,fanatical,caper,yann,predicated,montezuma,lentils,quack,bruges,grooms,ousted,cask,grocer,speedily,auberge,negroes,chases,intervened,mezzo,incarnate,chimneys,hela,preoccupied,hither,diggers,glances,tyrants,constantin,giddy,denounce,entertainments,oaths,furness,ripples,herz,bloodshed,maw,viento,upsetting,durante,oxen,nascent,toda,reinforcements,precept,salerno,pavements,murmured,propellers,violinist,himalaya,gibbon,gratifying,delirious,excepting,unlawfully,spanien,urchin,polygamy,utterances,devising,sustains,woodman,gravely,errands,hells,cartes,impulsive,spasms,rationally,psychologie,uproar,savages,craters,wilmot,mockery,railings,paulina,northerly,tenths,quench,passer,projekt,encompassed,broil,hurrah,modestly,epitaph,allahabad,insurrection,brugge,alger,emigrated,barges,nota,tremblant,antennae,fermented,enfant,headmaster,walrus,secretive,grievous,generative,assyrian,repetitions,pensioner,spellbound,bretagne,tengo,domenico,fend,sapphires,compressing,intoxicating,crumble,resorted,lecturing,retreated,senza,magdalene,veer,netted,dispel,warships,tamar,woodbine,straightening,envious,regretted,colic,oni,membre,adolph,farthest,iniquity,fooling,vaulted,warms,formalities,resounding,aku,brazos,saucy,blistering,illuminates,masque,kazan,shillings,gleaned,decomposed,flowery,scandalous,blas,ciel,menacing,elector,lili,neurotic,bituminous,askew,phipps,groan,dusting,lombardy,uncontrollable,shackles,shrines,bridged,consenting,torturing,toile,relentlessly,bracken,couches,decadence,antes,nourishing,herschel,reconsidered,anche,arduous,morten,assimilated,creeps,gripped,sama,unscrupulous,nymphs,unsettled,inseparable,caso,jurist,vestal,dismisses,variously,arran,unintentionally,sprites,dashing,tiring,abate,piloting,decreed,mossy,ores,banque,keyhole,usages,wickham,vieux,bowels,cornet,reversion,sanctuaries,convicts,osman,lodger,santee,thunderbolt,claudius,tremors,apropos,pitiful,winkel,sparrows,bleached,arbiter,locomotion,hus,antimony,hater,buoyant,expel,martine,combatant,swoop,neuter,prejudicial,gente,introspection,meister,mariage,benedictine,reputations,vitally,mavis,undivided,chatted,lured,hurling,brevity,visage,prickly,septembre,astonishment,overshadowed,rescuing,sensibilities,meritorious,beheld,martyrdom,manna,octobre,moorings,buddhists,soars,gnat,housework,gunpowder,undressed,southward,liszt,zwei,zorn,recounted,denials,prussian,adorn,contemplative,awkwardly,etta,projets,lik,belles,stipulations,lifeless,baffle,pared,sobriety,slums,burnet,spaniards,piloted,successively,cucumbers,squaw,snowdon,pomegranate,glas,bouts,transcends,murmur,bookkeeper,crickets,extinguishing,noche,attache,bulging,chemise,epics,smug,flanking,dons,stadt,prejudiced,larva,laziness,mouldings,tireless,leander,growl,gorges,stata,canons,pastimes,diurnal,coolness,busca,recumbent,shipwreck,fader,unconsciously,buffaloes,marne,dissolving,osmond,highness,abstracted,typhoid,perfecting,nez,furtherance,suis,slits,inquires,yule,phantasy,sprache,hoss,crusty,stillness,precipitate,underlie,pharisees,nicknamed,drones,minster,sully,bate,pert,depositions,camped,fraught,perplexed,replenish,necessitated,slowest,unwillingness,sehen,trimmings,esperanza,divan,lehrer,holborn,concours,extraordinaire,eloquence,definitively,natchez,tripped,strewn,rubles,bewildered,beatings,copious,cade,tremble,instantaneously,thump,ghi,pompeii,alluded,aberrations,sojourn,stateroom,palacio,adherents,herbaceous,distinguishable,immaterial,sina,surging,lop,greased,contraband,flagging,willed,wounding,inclement,ange,magpie,stil,robbing,impartiality,phosphates,harpsichord,capes,impersonal,proposer,interpolated,strolling,moro,salvo,twigs,furiously,epitome,joked,breaths,lilian,glancing,discarding,fared,fleck,inflamed,clough,unlink,shadowing,wert,regimental,signifying,tutte,rectified,savoie,flanked,bayonne,primacy,fuego,buckland,centrale,eyeing,bade,insolvent,mists,nuit,carmine,relinquish,emilie,succinct,palpable,eton,estar,inhale,dreamt,convulsions,snowshoes,fiancee,fue,blumen,yolk,mediocrity,rhyming,sucht,transcendent,lichen,lapsed,stroked,gallop,cull,unsatisfied,wmo,minstrel,ewe,contentment,fareham,cranium,politic,exchequer,falsehood,slugs,carcasses,piero,candlesticks,rosalie,mingled,rafts,indulgent,longed,rammed,wailing,shrugs,negros,vertebrae,moans,buffets,aristocracy,eaves,popularly,brinkley,marred,falconer,watchman,venturing,entitle,bagley,alibi,ahoy,jellies,postponement,brooding,juncture,greenleaf,naturalized,pikes,haar,meager,commandant,copernicus,bourgeoisie,plucked,inflexible,flowered,bueno,discord,patrolling,injurious,voiture,utilitarian,compacted,ende,doughnuts,reread,stormed,crucifix,irreverent,censure,carbine,credo,heartless,contented,vultures,forcible,bushy,thickening,moins,porches,inoculation,luxuries,glorify,abner,maris,admixture,heredity,nominally,forza,chloroform,nettle,mismanagement,convincingly,evangeline,descends,mischievous,fateful,complacency,impregnated,insular,lagoons,sensuality,vere,affix,professed,unrivalled,sensuous,owne,sawing,yelp,herding,mammalia,hopped,sceptical,arma,interfered,halcyon,bowing,cogent,parishioners,traversing,uninformed,yorke,aberration,mollie,nef,conclusively,calcareous,tufted,chieftain,gestalt,honeysuckle,zeitschrift,unspoken,ishmael,apprehended,rhoda,jammer,forbidding,sparring,mindanao,adonis,domed,distressing,prettiest,lif,panes,testifies,filipinos,chambre,dainty,crackle,jes,thwarted,alban,planks,orville,belcher,spirals,speculations,sedentary,extermination,plumes,outweighed,transposition,acheter,beets,repel,pali,coleridge,anxieties,poste,onerous,tenderly,bonny,haddock,virginian,pyjamas,finns,oftentimes,entanglement,miserably,savoir,rojas,argosy,elba,stumps,clouded,diverting,derogatory,esteban,xxiv,sear,rouen,inaccuracy,assimilate,medea,regenerated,laine,gottfried,rapp,credence,welling,patrolled,georgette,lovelace,caen,conferring,incite,divulge,wardens,scrubbing,laughable,momentous,footpath,entreprise,harem,fussy,civility,deluge,squadrons,ventricle,fluted,sweetened,pry,venison,shoal,basking,pare,blushing,breathes,lectured,babylonian,annonce,morte,bord,skillfully,heady,confucius,bombarded,celts,bathed,cortes,intractable,corresponded,speckled,enumerate,persuading,onondaga,diphtheria,plaines,hoard,offre,courting,petrie,lading,woodcock,churning,chariots,battalions,unquestionably,presque,reproach,viol,vishnu,cherub,lieder,trumpeter,straws,serrated,puny,emphatically,reassured,perceiving,commendation,leben,contending,patriarchal,spelt,barks,dodging,antiseptic,browned,oed,hendrik,highlanders,ligaments,wurde,upheaval,cringe,crimea,sugarcane,mouthful,gazelle,gauche,minion,complicity,unstrung,tendons,thrives,penchant,drab,roared,prospector,unwise,financier,allegory,harbours,konstantin,acropolis,stifle,tiberius,paradoxical,rousing,sebastopol,knelt,radiating,devour,treachery,petting,inoculated,princesses,rossini,portraiture,incapacitated,attested,ope,nuestra,overcrowded,warring,arouse,ticked,purged,repulsive,sikkim,seclusion,elucidate,fated,frighten,amputation,halts,subtlety,creditable,protruding,appreciable,delicacy,paradis,cinch,futility,dumplings,diesen,upholds,enlistment,inroads,blissful,boasted,zealanders,stirs,platonic,donkeys,etna,averse,siempre,afield,endearing,mishap,lackey,quod,labors,whooping,sonnets,musing,masai,barricade,inquest,snipe,hapless,cuenta,polen,ably,montagne,brun,mirza,beaux,traversed,sparsely,shrinks,channing,fib,ail,innkeeper,mistrust,overcomes,lordship,egregious,cubans,transacted,blaise,chaplains,conventionally,nuestro,perceptive,haber,lard,destitute,platz,disbanded,singly,headless,petrified,emigrants,thane,salve,hindustan,marseilles,beauchamp,grates,fissure,curtail,talker,divorces,vitesse,winks,harte,loopholes,soit,novelists,bestow,homespun,hulls,complimented,intonation,proclaims,dissecting,clamped,retracted,friar,hospitable,melodrama,creased,preparer,postures,trapper,makeshift,tattered,embarrass,slanted,plagues,jota,harvests,surged,blume,natured,clemency,woolly,blemish,ajouter,bushels,tapers,geniuses,rind,whiskers,huntsman,personne,perpetually,soundings,evicted,rara,divisible,accumulations,lightness,avoir,quelle,admirers,marcello,harbinger,mustache,revolutionize,dwindling,beaker,arcades,baggy,jeweled,rejoicing,uomo,ariadne,dickie,quiver,sylvie,frequented,coronet,agnew,discredited,taverns,prodigal,aden,wield,resolute,adage,wetter,jeg,conjure,rote,recitals,adrift,confiscation,stings,budge,ilk,ose,silks,sequins,fringed,goblins,delineate,organist,kneel,illuminations,chuckled,tacitus,armenians,excels,furthest,virulent,masts,garret,commendable,inadequacy,barbaric,deliciously,ruse,persephone,lifelike,culled,muss,presbytery,tumblers,gunshot,desiree,supposing,sculptors,charme,calicut,inde,castilla,zealous,rattlesnake,iridescent,robberies,elms,excelled,twine,meteors,judicious,unaltered,collation,geist,silvio,parke,diction,unoccupied,tigris,pedestals,tribulations,colman,sabina,meilleurs,buckwheat,enshrined,surpasses,yearling,agape,wrenching,damnation,rapidity,bajo,tempus,deleterious,intersecting,garibaldi,alluvial,xxv,incisive,concealing,clutching,drifts,tenement,discernment,chalice,hypocrite,harrowing,prefect,sweetly,cleave,flimsy,strada,delilah,bedded,shivering,formality,produit,mangroves,suffices,bingley,whosoever,comte,tigre,cham,graced,ultimo,statuary,moraine,moravian,intermittently,armaments,grins,chewed,accomplishes,inapplicable,bly,pasha,scour,motionless,notaries,galant,fallow,indictments,aileen,leapt,pelo,widower,quagmire,taffy,purging,cleansed,bem,fainting,theorist,scaring,serviceable,obstructed,indigestion,jackal,snowflakes,massacres,entailed,curative,bier,traitors,igneous,cambio,lull,rinsed,delectable,proletariat,lise,fanciful,bey,mystics,fresher,consummate,brows,technic,veda,ephesus,domesticated,dismayed,steered,remitted,shew,miraculously,lapses,romagna,freemasonry,dwells,penitentiary,shrewd,impatience,italie,crass,spaulding,jot,gott,benevolence,lancelot,suspiciously,eugenia,reprimand,mangled,staunch,shaven,fez,feld,molestation,quarts,yells,lacs,blindfolded,premiers,wraith,nimble,hyacinth,yonge,durst,naturalists,derelict,gle,shrouded,clarissa,brazen,inundated,joie,brahma,anni,veracity,pinocchio,angers,gustavus,raps,unwittingly,counsels,battlefields,antecedent,matty,dorothea,licht,legislate,voluptuous,complacent,germania,grandmothers,dalla,objet,unaccompanied,schooled,picts,foresters,hag,guerre,dorn,ainsi,orinoco,loveless,sharpened,nostrils,cambrian,impure,gridiron,innermost,wry,pilate,pinning,alms,stung,koko,phantoms,retort,congregate,meditative,smirking,chestnuts,expositions,begotten,gainsborough,sparkles,collared,stringed,barnabas,weeding,evasive,smirk,ancora,pausing,grands,replete,inconceivable,antworten,crutches,apportioned,pawnee,accumulates,failings,otra,bristle,classe,terrors,uriah,oblige,visite,panacea,vibrate,penetrates,mayhew,cathedrals,toads,liber,perceives,nubian,stumped,cramp,sodom,imitations,mistletoe,naam,hallowed,appease,hawes,furlong,heralded,linde,clearest,supersede,shovels,renaud,phrasing,quarries,sensibly,vio,mouthed,gills,braids,milder,inexplicable,counterfeiting,expeditious,intently,chrysalis,rechercher,hoary,corse,crocodiles,ronde,eze,zeno,deceiving,oedipus,beamed,scraped,chagrin,vill,tickled,hindrance,discreetly,sparing,emeralds,wanders,disillusioned,preoccupation,stato,restful,aristocratic,scouring,profitably,pinched,purport,plunging,shambles,juillet,marten,admittance,stinking,porridge,symbolize,standstill,unattractive,diffused,firmer,reproduces,promulgation,unshaven,rakes,sante,incognito,silliness,burgh,giggling,coldest,proviso,quando,barnyard,dikes,vento,donal,artifice,dato,glides,allot,witte,vad,progenitor,abomination,erste,mote,argumentation,passively,hurled,vesta,jacky,wold,habe,straightened,deranged,contesting,darwinian,touchy,rafters,unintelligible,whitworth,hinten,infantile,unspeakable,demolish,comforted,disgraceful,worshippers,servitude,aqueduct,framers,streamers,humbled,marcella,radiate,stipulate,proximate,secretions,attains,gallus,idem,hark,perturbed,cemented,dissolves,crowning,bettina,smuggled,punctuated,blunder,euston,zucker,belted,baal,felon,deen,thud,hagar,antlers,doubting,dunkirk,libretto,debatable,reaping,aborigines,estranged,merthyr,ihn,joh,decisively,swims,undeniably,spasm,kom,notables,eminently,snorting,seguro,mercilessly,firs,cobbler,invigorating,heinous,dusky,kultur,esso,linnaeus,infallible,loaves,dieu,heeled,quibble,meandering,incessant,baines,blick,namen,cheery,curbing,harshly,betterment,rump,oben,sweethearts,slush,mutton,coi,blinked,altri,lenore,townshend,zigzag,lesen,dragoon,sympathies,leggings,benefactor,thales,nacht,merrily,vouch,pompey,blackness,transitory,gales,hypocrites,larynx,droughts,ancona,springing,bethune,nocturne,perdue,altruism,ceasing,dutchman,capricious,angelique,harmonize,crescendo,gipsy,frederik,miserables,amalgamated,obeying,gunners,pent,mishaps,subsidence,plastering,promiscuous,asturias,basso,dusted,sago,inlets,fords,pekka,parentage,mutter,litters,brothel,rive,shelled,outlandish,sneezing,sancho,variegated,abysmal,personnes,bourse,tenacity,partir,moslem,fourths,revolutionized,permanence,coincident,inez,minding,permis,enviable,accessions,carpeted,zeke,eloquently,overtaken,hock,subheading,renews,extinguish,oli,lowing,bullied,accruing,dirge,actuated,bluish,tingle,captivated,parlors,lamented,bruise,cesare,perfumed,dames,unfettered,imogen,lewd,thither,rebuke,collated,occasioned,swayed,dupe,bogs,affording,assuredly,allusions,shadowed,seamen,intelligible,overlaid,censors,shakespearean,edict,octavia,boyhood,sustenance,shrew,freya,disrespectful,confounding,dispensation,arian,depreciated,diagonally,cased,laterally,prays,nonce,lemme,elevating,augustin,beresford,loup,likened,bericht,sketched,plage,firmness,injustices,longfellow,unequivocally,perspiration,mirth,serre,pauper,brooms,horus,casi,fois,ushered,remedied,vocations,depuis,scorched,instep,wilfrid,machiavelli,ivor,mignon,houseboat,krieg,clementine,smokeless,stanhope,thorax,recherches,warship,corinthian,rattles,esti,garten,dislocated,marvels,booby,conceivably,persians,injunctions,crunching,exuberant,dus,composure,contradicted,birthright,errant,proofread,rearranged,heifer,earthen,uplands,paget,portcullis,noose,recur,desirous,exemplar,shivers,smitten,rarest,quiero,averted,publique,dissipated,gregorio,masquerading,discernible,looser,ptolemy,lauded,pais,consonants,demarcation,miocene,steeple,concussion,nailing,deadliest,sparingly,penance,priestly,curtailed,lovejoy,rollo,conspicuously,risked,bowled,modernized,blemishes,eagerness,pearly,recklessly,islets,apothecary,gagne,looted,padua,jointed,heyday,voce,pulsating,beaming,dore,taint,lounging,predisposition,outwardly,tumultuous,overseer,chine,crier,decompose,unimaginable,briton,glistening,moonshine,jurgen,leurs,scribble,anselm,fete,puerta,peculiarities,lichtenstein,favourably,beset,romain,involuntarily,swede,discoverer,livers,plowing,militarism,glassy,riddled,wealthiest,shrill,swedes,headland,agitator,utensil,volk,sheba,glows,heighten,surpassing,ladle,pasa,pinks,rusted,naturalistic,dogmatic,tristram,ballon,surly,presente,sonne,fertilized,admirer,seco,gibt,motioned,catastrophes,thickened,indra,candor,sabin,wigwam,animales,beheaded,postmark,helga,bereaved,malin,drugged,motte,volga,rivalries,gnomes,denne,affectionately,uneducated,necessitates,blunders,proportionately,corea,porque,mocked,holler,fain,hae,sint,darrin,mois,cruelly,tapioca,furrow,fewest,parables,drowsy,bushel,beholder,sedition,lutherans,examen,ghastly,vaudeville,succumb,criticise,inquisitive,doorways,sirs,overruled,menagerie,osgood,teamsters,seul,forked,apprehensive,cowards,cielo,cowl,captors,fils,laity,prefixed,arming,amassed,itinerant,felons,dormitories,dearth,palatable,unmasked,instinctive,corpo,sais,restlessness,baptised,burlesque,regaining,perversion,swells,sujet,acquaint,tog,altro,havelock,lengthening,taut,laa,romulus,sommers,doings,financiers,foolishness,unequivocal,noire,arriba,silken,stringing,bazar,thrusting,pavilions,maddy,clung,hie,bist,needlessly,squatting,cordially,wilkie,succumbed,superstitions,spangled,rectory,alli,multum,iliad,graze,looped,unobtrusive,judea,currant,underlies,intricacies,afoot,oddity,gerrit,cornered,auspicious,splashing,hotly,puffed,disapproved,interlaced,instalments,presumptive,comprehensible,tempore,fallacies,theodor,sawdust,metaphorical,leaped,alertness,embers,assemblages,searchlight,heil,swinton,ize,snob,stave,vertu,snowing,bleeds,canaries,semblance,shins,fickle,outnumbered,recht,lukewarm,quai,rotunda,observances,faintly,indiscriminate,alphonse,piu,raison,eyeballs,barricades,devoting,idolatry,decked,introspective,aggravation,sedge,nou,pinching,tine,pretenders,infidels,dweller,diabolic,demonstrable,letzte,priestess,nimrod,irritate,siguiente,beards,churchyard,despicable,canter,reminiscences,racy,stoop,intr,rendu,facile,christiana,coerced,billets,sneeze,sian,dignitaries,somber,overgrown,statesmen,vecchio,advices,coffers,sikhs,awry,celt,lode,elia,zora,rages,clumps,tithe,subordination,fictions,deposed,trending,disinterested,forsake,conspirators,swinburne,unresponsive,baboon,romani,swamped,ensues,habla,seit,elated,buttered,sangre,selfe,stuffy,depress,eccentricity,transgression,idealized,clings,flamboyant,memoria,nachricht,macht,toma,clergyman,sociales,scape,francia,pledging,dependants,rechte,puddings,partisans,mausoleum,idler,dawned,generale,carelessly,narcissus,crusoe,einfach,skimming,stomachs,namesake,slaps,maximilian,gratuity,reorganize,foothold,reggio,usted,madge,gleam,rudyard,supposition,sprinkling,besieged,malaise,draperies,newby,rococo,brabant,superlative,presser,chamois,dwt,voy,seared,tinged,professorship,diamant,leeward,fruitless,tamer,ticklish,alienate,displeasure,connoisseurs,mutilated,usefully,instituting,balzac,moyen,threefold,innocently,deepened,clef,dak,pura,regarder,trice,pretense,jungles,imitating,shreds,petitioned,thad,archway,danse,loudest,ultimatum,shuffled,moy,shelling,visita,zeitung,observant,unhappiness,cinder,pelt,ung,laurels,methodical,engulfed,bequests,monotonous,pythagoras,operatic,malevolent,lessened,stile,reciting,naught,antagonism,prisms,debby,coinage,unproductive,banqueting,nefarious,stoppage,defray,endangering,zealots,weighty,oeuvre,subsided,sahib,gasping,idiocy,frenzied,postulate,senor,trespassing,pendent,edifice,vermin,loosening,dialectic,tantalizing,rhinoceros,adjutant,otro,sickening,pondered,teil,snows,steeper,rangoon,depriving,stalwart,verandah,schreiben,buttery,deformity,cronies,undervalued,invalidity,soundly,dank,pinkerton,canvases,weakens,paulus,ebcdic,politik,lariat,pursuance,scapegoat,anathema,comptes,trifle,forefathers,piraeus,xxvi,eradicated,toga,fram,inadmissible,strasburg,berths,innocuous,heroines,retake,unpacked,gonzalo,clenched,groupes,evaporate,midwinter,compagnie,bellini,undoing,communes,cassava,disappointments,glace,puns,hilt,devoured,inwardly,adeline,smothered,eulogy,siva,lond,forsythe,pernicious,fenster,continua,babbitt,reims,scrimmage,privates,whims,hew,skirmish,roan,nonsensical,gallows,rheumatism,devotee,nieuw,cowardice,fabled,fangs,animosity,wily,wiles,ensue,jaffa,sagging,chemin,crumbled,sybil,pekin,defied,hopelessness,errand,yeoman,slimy,unser,coerce,overhang,ihren,jeunes,sobbing,muslin,deliberative,gute,tattooing,shekels,emigrant,dodo,jahr,thorny,epistles,trampled,anthracite,meditating,merciless,clump,transcribe,atrocity,elinor,proportionally,untrained,beene,thrusts,tiresome,splashed,antonyms,lune,moccasins,parthenon,abounds,salutes,collided,tilde,potash,boarders,lapping,chivalry,corazon,frustrate,sideboard,poaching,montmartre,foiled,flocked,connaught,tether,hyperbole,borghese,schrieb,brahman,charlemagne,pulsing,heralds,sterility,dynasties,prowl,amiable,akt,sittings,undulating,thatched,felice,esto,irrevocably,bunyan,hinders,tubers,unrelenting,expeditiously,antiquated,jerked,sputtering,opulent,mots,dimly,coconuts,confuses,executors,squall,nothingness,hebrides,demeter,antagonistic,bowery,immovable,caterpillars,consigned,rhein,fervor,pret,scooped,exerts,idling,cursory,dissipate,hymen,refuted,ionian,americanism,pessimism,vehemently,velvety,vedere,wheezing,teeming,paradoxes,lampe,foolishly,ordre,eer,inanimate,panting,comers,romaine,wulf,peckham,tacks,veille,effusion,lunacy,loathe,notoriety,showered,brats,huddle,taxicab,confounded,coughs,pretends,faery,eloise,widens,omnipotent,gautier,poise,zeeland,ringed,cima,huddled,unsteady,zwischen,duchy,malacca,wol,magda,carrion,summarily,heine,voi,ejaculations,leopards,dette,sanctified,tradesmen,excitedly,pentru,braced,gaunt,nourished,cornstarch,doch,effie,daffodils,lettre,boden,pollute,bara,kamen,neuer,pomp,noms,stora,sprouting,summoning,annabel,tartar,brownish,rejoin,rosettes,etats,volition,crawls,suave,riddance,gulp,lottie,hac,lurk,smudge,tulle,helplessness,circumstantial,dermot,naturalism,haga,colle,galloping,indestructible,principality,indulging,allusion,bosh,samaria,smeared,gouvernement,liqueurs,winifred,parasol,coloration,stingy,succinctly,devotes,manet,anos,vigour,snares,schnell,illegible,mortars,didst,curiosities,wither,schloss,seamed,calmed,flattered,babbling,roch,admirably,vipers,nightfall,nul,manos,hurl,loyalists,dory,sheltering,forego,castile,klasse,blockquote,tyrol,irreparable,immunities,broiled,superstitious,evangelists,insides,sedative,defraud,toothed,bygone,wilds,intercession,complet,lettered,mirada,paa,apricots,darkening,depressions,mache,toasting,exhale,markt,altars,abolishing,chauncey,recesses,kinsman,payed,overworked,cecile,orbs,aime,mutable,delicacies,toujours,scorching,coffins,jove,cashed,ushers,jewry,copperfield,chapelle,whoop,cacao,andra,annoys,heiress,godhead,canvassing,portia,shyness,angelus,subjecting,momento,escorte,unsightly,frayed,criminality,woolen,repos,levelling,shrapnel,arthurian,burgos,litany,fairest,nutter,bristles,larder,ganges,machen,truthfulness,atrocious,obelisk,valeria,claret,fru,samos,consecration,forbearance,acerca,plastered,apostrophe,stepmother,ruf,lapland,publius,ihnen,jesuits,voluminous,mottled,plu,tosses,manifesting,estella,publics,rien,normandie,scrip,rocher,inadequately,arabella,matti,throng,flemming,shunned,abandons,appetites,turnip,juxtaposition,crushes,carnivorous,berber,mince,banish,flapping,fino,frets,schism,sculptured,suivant,jemima,heretics,dogged,apparition,barristers,scrutinized,earthworks,thrashing,salome,thumping,vara,quenching,hunch,amaryllis,messes,perdition,wintering,topple,chickasaw,pungent,discontinuance,unbridled,astrologer,dut,canvass,manifestly,emphatic,susy,outgrowth,homeward,withered,baiting,surrendering,fortification,mingo,spurt,elation,wail,artistically,elma,epileptic,crag,hace,feller,enmity,sanctum,mazes,jenks,schutz,materialistic,boaz,jahre,gud,oncoming,racked,cloister,provincia,fancied,spoilt,predisposed,hydrochloric,filippo,strode,agen,marchand,disorganized,shaftesbury,littoral,denn,aggressor,giggled,consummation,fronting,zola,heute,unfaithful,executioner,titular,swears,diminutive,paring,damning,matrimony,armas,humbug,signalled,granulated,ailment,homely,perpetuity,stepfather,disprove,dinero,bernhardt,incurable,dixit,shoving,furnishes,anointing,corinna,strictest,domiciled,minx,eclipses,prise,misdemeanors,hadrian,supremely,mensch,hastened,perpetuating,prostrate,provisionally,cocked,raged,boyne,singularly,elam,gobble,preposterous,symbolized,breech,ripening,pyramidal,shee,choruses,obstructing,phosphoric,parquet,vint,pasquale,reparation,amply,damask,rejoined,impotent,spits,papacy,thimble,lacquered,ablaze,simmering,nettie,grasshoppers,senatorial,thawed,unexplored,transpired,toulon,fortifications,dens,loafer,quin,insurmountable,prettier,peu,haystack,komen,chaque,confining,louvain,etchings,impenetrable,gymnastic,tink,purr,duped,stifling,realises,vindicated,bund,invades,oust,suo,dipper,signified,talkers,exemplify,inane,byways,ibsen,justus,bluntly,bask,mermaids,contemplates,inglis,defensible,spinster,goblets,interrogated,yolks,famille,dello,magdeburg,tarnished,deducting,fie,brimming,ridiculed,baie,ionia,olden,herne,unending,abominable,rattled,basse,farmhouses,tambourine,venomous,impressively,inextricably,etexts,tapering,prinz,unjustly,rehearse,apertures,seducing,screeching,reedy,ceded,sido,imbued,fearsome,bureaux,sleds,christendom,biographer,wreak,planta,bridegroom,swarming,hava,accomplice,vivre,moni,mui,ili,servi,irregularity,gash,impeded,gravestone,pompous,sunt,subvert,hanno,instrumentality,barnaby,antwort,impassioned,mous,esau,desperado,flavoring,mouton,bau,contagion,archimedes,desecration,pocketbook,anselmo,misinterpreted,garlands,varma,mongol,audacious,midshipmen,degrades,maggiore,protestantism,soreness,boldness,schip,inhalt,otras,cassius,powdery,exportation,diverge,loosened,misunderstand,virility,inalienable,norden,untamed,eben,viel,xxviii,meddling,objecting,gib,shoddy,salutation,altercation,octagonal,mended,navigators,notches,odysseus,unfavourable,abject,heretical,riveted,quiescent,strangeness,rideau,tincture,erecting,tenderer,wirtschaft,lucian,jaar,persevere,fittest,tarnish,isthmus,giuliano,wordt,hildebrand,feu,treads,lengthen,bahn,prodigious,spoonful,sociable,requisitions,deftly,raucous,toasts,exaggerate,odes,blushed,saddest,grinds,immorality,addington,marcellus,ciencia,wench,celle,spontaneity,illusory,sympathize,faggot,barrows,tantamount,slaughtering,dissected,borrows,frigid,hemispheres,woollen,musick,speculating,pawns,outermost,selwyn,westphalia,augmenting,winded,poder,methinks,rambles,namur,tyme,dawning,lait,klang,congratulating,sempre,flagrant,wane,loins,uneventful,quis,scoundrels,distraught,assassinate,unwavering,confidentially,piecemeal,soll,inferiority,burnished,clothe,swelled,vides,breda,gentleness,staked,rigidly,simile,phalanx,hindering,sloped,sifting,fixe,isobel,loudness,guillotine,reverting,dionysus,leanings,groans,herbst,canker,keener,embellishment,confesses,mistresses,breakwater,smuggler,busily,poached,aram,shopkeeper,hailing,imparted,traduction,contradicting,headlong,captor,indelible,tethered,whiteness,grazed,unfulfilled,acquittal,meilleur,fluently,ascribe,stalked,deluded,trembled,gens,doon,unobserved,labored,tete,twitching,smacks,silber,troughs,unbelievers,hungerford,brothels,skilful,werk,basta,bolder,omits,endures,heeft,silencio,laski,selle,pueden,impersonation,hote,lavinia,intents,unconnected,ovum,pruned,wedded,lashed,valladolid,contentions,bickering,whaler,unobstructed,menschen,fondling,cref,laissez,ricks,spenser,astounded,permanency,smacked,personen,pallas,anatole,sleet,disgraced,philippa,royaume,grooved,resigning,appareil,alcove,termine,ungodly,felling,landes,hout,ois,disclaimed,aucun,upp,appartement,couleur,montagu,steamship,condescending,recounting,breeches,appellation,mitglied,abbe,montes,exemple,handsomely,fille,segovia,untenable,messer,deformities,necktie,huis,xxvii,tardy,disregarding,matron,seaward,uppermost,adolphus,ciphers,nibble,heim,volver,exerting,fenn,fleeces,industrious,foie,decayed,proprietorship,essere,allgemeine,umsonst,harps,hedged,cleanest,selon,teutonic,viceroy,maintenant,ingrained,caspar,swordsman,commissary,yellows,habitually,naman,maxime,majorities,rendus,mummies,conquests,brimstone,quand,trowel,tyndall,profiting,beseech,hitched,mucha,mair,smelt,fatale,margery,yearn,mismo,culprits,trinkets,whig,enchant,austere,earths,selbst,storehouse,cowhide,plumage,antecedents,diabolical,tugs,rapier,unspoiled,haughty,relinquished,assaulting,admirals,cosi,meisjes,esmeralda,captivate,terug,deterred,agostino,apathetic,uninteresting,lyre,yawning,centralization,prunes,buller,cossacks,attuned,herons,raiding,deft,seething,carne,jardins,alligators,instigated,superstructure,husk,grandiose,clerkship,concisely,sah,scepticism,quatre,constancy,plats,countryman,insufficiently,reappear,boudoir,affinities,glades,crutch,rioting,espoused,mamie,frisch,discursive,disputing,unpaved,lieber,repudiation,clarice,dimples,inhabitant,flourishes,colonized,hessian,feder,ardour,hing,erat,arbeit,levant,imitators,talkative,phonograph,speculators,sty,quelques,smelting,cuss,slats,transcribing,manoeuvre,offends,lumpy,landlocked,embattled,wisest,giulio,zin,diminution,ging,rencontres,southernmost,freckles,civilised,airship,galls,ammon,imitated,inflicting,inducement,heave,cud,gegen,proclamations,rarer,slowness,wrongfully,lessening,aurelius,pout,cognate,mire,sufferer,mores,raindrops,elegy,sanctification,sanded,indignant,godless,sloop,politeness,baffling,hurriedly,characterise,purporting,passo,taunt,ick,hinting,schoolboy,bailiff,outpouring,deflected,inflection,lettres,myrrh,infuse,chaff,defaced,mimicking,counseled,showy,altruistic,aldermen,commends,moorish,etre,bobbing,defiantly,colonels,posible,bli,cualquier,pathos,battleships,smartly,laments,spied,playthings,argumentative,roused,aloof,snore,charred,industria,hij,ihrer,dunstan,bolshevik,unsound,hatter,creepers,recreations,profusely,intelligences,sorrel,reverie,colloquial,callous,oom,perplexing,splashes,homesick,gainer,ochre,dois,bystander,quell,repulsion,capitan,balk,imagines,softens,harnessed,exuberance,flocking,unnumbered,outbursts,undying,stubble,bande,amie,envie,tle,quivering,ete,euery,wein,sark,commending,sofort,flattery,soothes,millstone,mortgaged,impossibly,giorno,compels,succes,drunkenness,indulged,habitable,spn,subtleties,ministre,trappings,afterthought,damsel,euphrates,schoen,decorum,hommes,spoiling,yellowing,robs,giselle,earthenware,incendiary,selina,lenient,dined,idly,freda,devilish,aristocrat,scathing,twinkling,nichts,pantomime,familie,wanderings,decimated,overthrown,moored,peered,bores,regrettable,strangled,maxims,cama,engrossing,fere,jezebel,lethargy,komm,frolic,painstaking,goths,finality,toppled,ewes,mending,wrestled,hurtful,alternation,receding,gast,laban,neuen,paix,candelabra,outposts,treading,hedwig,downy,conformed,characteristically,canadien,goldsmiths,swarms,geographers,somos,evolutions,escorting,irregularly,oratory,sharpest,palisade,moccasin,circumcised,growled,auxiliaries,benefactors,terse,insistent,peppered,sterne,avez,utile,frightful,trite,gentler,vex,dilapidated,mien,avance,wollen,dela,stubby,sixpence,hoch,visto,impaled,forays,charon,flanks,pavia,curbed,efficacious,philanthropist,thaddeus,convinces,rede,minder,orator,abet,dien,ropa,sence,steppe,plowed,sires,transgressions,lingers,smothering,encampment,roque,prophesy,recast,misrepresentations,bards,bestial,neuf,buddhas,oozing,vicenza,richelieu,curd,bookish,subdue,raking,denouncing,ascertaining,stags,vittoria,soldered,privateer,milly,vicarious,traverses,seedy,imbedded,elysium,quenched,antithesis,envoyer,awakens,accentuate,squandered,sortie,withal,eyelashes,colliers,minuten,tilden,asti,blindfold,rampart,possessive,feldspar,facades,idealist,constables,mourns,solidified,cura,conceit,needful,locusts,thatch,cappadocia,weathers,grunts,thicket,zou,depraved,continence,treatises,renseignements,sauvage,prying,rascals,voyageurs,rudely,weeps,deplorable,smacking,aggravate,quoth,snowstorm,lacuna,chambres,rawson,levelled,incessantly,toit,apres,flaring,neues,langton,testa,lye,ditty,pestilence,rapide,thoroughfare,skiff,belligerent,impeached,hight,eclipsed,conspired,catacombs,agonizing,bottomless,sows,attributing,londoners,faut,sardis,excruciating,punctual,runaways,boniface,grafted,watercourse,propped,beaton,telegrams,staking,conversing,acetylene,calamities,viennese,fancies,accuser,bystanders,minos,ganymede,enjoined,animating,mercurial,bargained,repugnant,citron,clave,pageants,grosses,tacked,zeigen,supplant,slates,prue,corroborated,andros,tipsy,tabac,recognisable,neuralgia,timbre,clasped,pecking,womanhood,crimean,exorbitant,tish,grieved,experimenter,tallies,serpents,tampered,severally,bedstead,acquis,bostonian,whirlpools,sotto,caressing,reliefs,tassels,culpa,whiter,froth,obliterated,regalia,peerage,deceitful,storied,unprofitable,doublet,astonishingly,dein,cannibalism,menos,mera,pretender,mosses,subside,burney,conspiring,nostra,retaliate,deafening,beleaguered,jarring,baptismal,magdalen,brackish,direkt,vse,tinsel,edel,scrutinize,adverb,mumbled,commis,yams,breve,mut,worthiness,lazily,disarming,ween,woefully,kaj,promontory,eres,paye,smote,taunting,etruscan,outwards,rend,hezekiah,depravity,wealthier,onda,scientifique,disagreeable,drei,castes,corrupting,massif,murat,kine,lus,overtures,pharaohs,fraudulently,plunges,gibberish,cela,tammany,boulevards,redistributing,darken,dowry,chateaux,quam,skirting,adieu,kindling,affluence,passable,shouldered,hilarity,fulfils,predominance,mitten,conquerors,thar,admonition,ferdinando,perchance,rots,demetrius,precocious,rood,sachsen,luzon,moravia,byzantium,gaf,altre,repress,domini,loro,moiety,steeply,darned,locum,denser,moorland,coincidences,divinely,skimmed,lassie,congratulation,seminaries,hotchkiss,trotting,ambushed,combing,travesty,bewildering,hunchback,aback,deepens,griff,enactments,scaly,heaped,fantastically,cobham,oracles,untied,quince,lage,profusion,conjectures,glint,incitement,hansel,figuratively,sorceress,stoic,fatigued,unconsciousness,quarto,improvise,incipient,avalanches,cheval,crackling,creeds,thro,outrun,extenuating,blackberries,amiss,cavernous,snodgrass,darlings,reprieve,shanty,rapping,proffered,rowena,livid,distasteful,distinctively,luft,hares,overturning,attestation,bravado,overpowering,ravings,childless,voix,grecian,proportioned,lavishly,smite,forthright,kritik,foretold,dado,engraver,saddled,tortures,crusts,vamos,loge,presupposes,trickery,adherent,fragen,populi,astrologers,wuz,vindication,opined,falter,chatty,auvergne,philistines,retainers,tener,cherbourg,imperfection,sorrowful,unchanging,predominate,wodehouse,molested,titres,hyena,wedlock,erstwhile,vist,obtuse,caudal,sternly,chanted,jonson,klug,savour,stabs,indecency,lingered,elke,feasting,suffocation,softest,sniffed,lurks,tenses,lawlessness,recollect,alors,projectiles,heures,larch,interrogatories,dess,whet,impatiently,suspecting,dessous,aline,disjointed,seizes,reine,triomphe,thebes,doer,pandemonium,lege,ravished,discerned,seulement,icicles,fanaticism,flamed,godsend,rubbers,eder,anderen,rehearsed,alix,outrageously,bagdad,petticoat,inhabiting,unrestrained,injures,botha,pigtail,appraising,enthralled,strays,embroiled,toussaint,armistice,ellery,damped,southerners,fissures,clinched,forlorn,apologetic,absolution,inordinate,burdett,clank,individualistic,conseils,marts,obra,artemisia,evermore,engendered,manchu,disconcerting,priestley,appropriating,shinto,attentions,regno,gawd,inhaling,calmer,passers,fluttering,irishman,brier,phoenician,hundredth,firstborn,coves,armes,betraying,screech,fetches,paltry,carelessness,threes,broadside,importante,doers,sods,technicalities,thais,groaning,beckons,rejoiced,quickness,jeunesse,onze,entertains,turban,freie,ruffles,infatuation,gaiters,meisje,geben,nulla,plutarch,curving,misrepresent,tankard,xxxix,amorous,kurz,overflowed,jesu,weaned,armchairs,appartements,vagueness,grumble,wronged,politiques,fireflies,hoisting,falsified,dialectical,schatz,labours,espagne,flatly,harsher,inciting,malleable,indecision,unselfish,shem,starke,alight,epochs,nosotros,genial,langues,revolved,ifad,snowed,cachet,fortify,cherubs,armature,implicate,tolling,provisioned,sista,syriac,dived,baffles,infamy,dapper,belfry,elysian,odious,rehearsing,ellipsis,outhouse,romanesque,gobierno,vanquish,imparts,sobs,laudable,thawing,tienen,writs,omnipresent,gesundheit,hovered,devouring,renunciation,stunted,munching,fumbling,purl,lasse,banal,rears,portico,excites,placard,quartermaster,peculiarly,placards,transposed,ganga,thrace,waistcoat,vier,perusal,petrus,childlike,shamelessly,saison,tomo,cloaked,lichens,brotherly,uninhabited,sawn,unbelief,overtaking,transference,arjuna,pliable,mantua,sardines,dictating,studien,crystallized,reprisal,blighted,kunz,dissect,rumbling,perceptible,blazes,encircled,odette,saxons,transcending,snout,goodly,philosophically,directeur,bigot,bramble,persisting,bouillon,scribbled,celibacy,beaucoup,tooting,gruppe,displeased,portant,lather,falstaff,unchallenged,strayed,commutation,spiritualism,gracia,omnia,engender,fini,jurists,cloaks,streaked,downe,chieftains,garrick,perches,scrapes,silhouetted,crouched,juana,gradation,tole,unanimity,radnor,tycho,impeding,reino,grisly,fornication,contro,sassafras,heure,tramps,assis,blossoming,barbary,irate,partisanship,wean,omelet,suh,sheaf,folios,iban,dictum,refutation,posthumous,inclinations,ledges,wenig,muchas,enlisting,roars,swindle,revolting,candied,plaine,macedon,dingy,bons,frieze,staircases,horas,multiplies,impressing,twirling,lachlan,entwicklung,sergeants,overcoat,shak,tyrannical,infinitesimal,scharf,spouting,origine,humbling,truer,limes,katharina,martians,sullen,machin,prolonging,battering,superficially,upstart,ihm,imps,divulged,shrunken,quays,reprehensible,provokes,distancia,dedicating,confessing,forbade,incursions,viele,pieced,arching,bett,gloriously,gourds,worsted,nevermore,sanguine,acorns,slung,rowers,shockingly,viaje,vagrant,empties,bight,entra,fells,morgen,lors,dormer,geht,ahab,prolongation,uprooted,talons,germaine,dualism,intrigues,cannibals,pounce,marchant,vedas,panier,mouthfuls,instilled,calyx,valour,litle,mightily,cuzco,unwieldy,perpetuated,steht,exaggerating,smoldering,peuvent,snub,coarsely,voz,withstanding,thickens,hissing,crumpled,topmost,intrude,behest,pitkin,snatching,resto,charmer,escapades,haphazard,infirm,pontiff,menage,preaches,varios,growling,indescribable,arraignment,eugen,kentish,napping,sabatini,toppling,sten,astley,bouton,excellently,ier,pails,burly,derecho,formule,hillsides,segunda,xxix,contenu,divest,mange,unfairness,abated,sohn,tiniest,mowed,sano,overhauled,caskets,lecteur,congenial,lut,fervently,sprained,harlot,ravages,choix,superhuman,conclave,humanly,altura,livia,causa,dentro,magnificence,sacramental,peddler,eterna,mystere,fayre,glared,adverbs,donc,ugliness,constantia,shavings,lusts,nunca,helplessly,quintessence,throes,malabar,crowbar,blots,nettles,scud,raked,cruised,stupidly,lashing,gaudy,merriman,swoon,buckskin,kommt,recluse,displacing,neapolitan,blacker,haarlem,quel,aspires,telegraphic,quali,frescoes,patted,puritans,gentlewoman,somme,meinen,nouveaux,victors,revels,droves,slur,laetitia,eisen,phrased,puddles,nobleman,kort,assailant,luxuriously,flatness,pardons,debauchery,wij,extravagance,buttress,entrada,junge,rigors,foregone,stellung,overjoyed,bourgogne,newhaven,apologists,fut,allemagne,vind,waddington,refilled,whiff,burrowing,strolled,estos,regen,encrusted,clashed,harpoon,sombre,machinations,hearse,libertad,roamed,approbation,nen,wut,calmness,confound,lengthwise,fatter,abstained,chasse,christen,comparaison,valeur,senile,cobwebs,tusk,hellish,conquers,iglesia,preceptor,claro,ugliest,ungrateful,renounced,clashing,decomposing,sauter,sain,postponing,israelite,graver,flees,torrid,absalom,preconceived,zug,engrave,dishonor,hoarding,bauxite,barrack,compatriots,stereotyped,conscription,maken,philosophie,minna,tradesman,embodying,unscathed,moslems,courageously,snugly,tarry,fevers,interrogate,eocene,muddled,sklaven,leonora,militaire,subjection,punctuality,hoarse,misfortunes,vexed,delos,vanquished,ibi,inquisitor,floored,inheriting,historique,plied,beaters,twang,ombre,conceiving,syrians,mij,indivisible,poetical,stagger,crusted,heraldic,belli,maladies,adjudged,adolphe,fou,wissen,turrets,pression,efter,calms,misgivings,presumes,juggler,obeys,stifled,preposition,vestibule,heer,mournful,ameliorate,scheming,disarmed,baseless,voile,picturing,dismemberment,quartered,agrippa,lioness,appendages,feverish,pavillon,couleurs,neglects,suckling,scythe,heaving,homily,pensive,lado,fum,upshot,sifted,felder,fuerte,boisterous,sate,alleviated,outbuildings,icj,decanters,elevates,poitiers,goed,ferment,bounties,incursion,aurelia,thinned,consternation,hoisted,aeroplanes,auteurs,antigone,chirp,dimmed,yore,scurry,growths,thoth,halve,conversant,torpedoes,sovereigns,unencumbered,eliciting,tamed,fiends,farmyard,condense,garbled,tallow,unforgiving,immobile,indisputable,unkind,prismatic,aunty,paucity,expediency,frisian,lieutenants,philology,prophesied,backwoods,pheasants,slouch,amulets,cargoes,accentuated,eddies,kategorien,disobey,literatur,bandy,watercourses,amicable,prospered,savoury,colloquy,retorted,fiftieth,joyfully,onder,offensively,plausibility,magnate,pillage,vengeful,lunatics,satis,nol,edom,impracticable,misdirected,weer,surrenders,manchuria,playfully,barony,leyden,gruff,snatches,buxom,deciphering,botanist,deine,timidity,musty,silences,guineas,hebben,ministering,strangle,swerve,proscribed,chattering,esser,franconia,dominions,plateaus,berthold,spaniard,plummet,transplanting,onlookers,wissenschaft,phebe,easiness,trepidation,squatters,plantain,pepys,frailty,neutralized,tangier,ismael,guten,bateau,mourners,twos,passageway,reestablish,fondo,parsonage,quien,sulphide,outcasts,mortally,oot,agni,carbonic,unassuming,disillusionment,nouvel,knead,wilful,gaol,erudite,appreciably,equalize,prepositions,petits,tarn,endeavoured,enl,attentively,interred,indiscriminately,encumbered,herodotus,favouring,neutrals,conspire,recompense,colonnade,unde,eustace,abides,yuh,damen,seus,strove,ogni,dissenters,imparting,apologizing,coups,verdant,secrete,libris,twirl,noo,beadle,denizens,cockney,guppy,leeches,convoys,manoeuvres,shapely,rooks,shuddered,stelle,ornamentation,lynching,sommes,perdido,dictatorial,uncomfortably,defenseless,glean,amory,ander,edad,gratified,participle,schlegel,watchmen,galleon,travaux,eten,enim,chafing,betrays,assyria,inwards,corsican,libertine,immeasurable,esthetic,testator,distaste,offshoot,smithson,resolutely,friendliest,uttering,jacobus,construe,algemeen,mourned,despotism,flotilla,fragmentary,anjou,omniscient,gladness,frisky,generalities,condolence,siddhartha,brightening,inimitable,ineffectual,armorial,poppa,thickly,blossomed,cistern,tableaux,latins,phaeton,fecundity,malle,caliph,dysentery,soir,grenier,funnels,pasty,cuffed,peau,tumult,defoe,curate,donned,wilks,allegorical,monotony,reve,ohr,lucile,amazons,manon,unabated,plante,curzon,wohl,marksman,philosophic,denna,troubadour,volgende,truest,hypnotized,voitures,rudeness,felled,alleen,tinned,concoction,flay,patter,seinen,tortoises,roxana,pli,crone,possessor,wintry,gode,admonished,wickedly,laver,shamed,eluded,incriminating,unsealed,misinformed,tambien,journeyed,presenta,sett,magnificently,unpunished,albatros,apostasy,bereft,lucretia,hibernian,vitriol,vicarage,vestry,gleefully,mercies,paralleled,entwined,fosse,taille,resplendent,thrall,barked,cormac,sju,unum,scorned,relapsed,thicken,sanaa,ceci,selene,artfully,pilgrimages,fides,blazed,edda,wheelbarrow,maimed,chor,dernier,duda,pater,meno,mused,jamais,puffing,besten,wielded,futurity,quicksand,trestle,souffle,rebus,proces,sentinels,pardoned,wormwood,sighing,harz,awed,shrank,conceals,glycerine,staub,abolitionist,foamy,aventure,meunier,unpainted,knolls,unwell,unconscionable,wedged,outgrown,evading,commemorated,lurid,annunciation,rumoured,idee,coalesce,brougham,windings,strongholds,burglars,shrimps,stirrup,seria,creo,dictionnaire,finde,flopped,elbe,whitewash,subservient,suivante,stubbornly,benediction,disobedient,seamstress,immortals,euripides,uninitiated,mikko,mond,zwart,briskly,afflictions,buon,zon,weariness,ascendancy,affront,telephoned,treasuries,energetically,tinge,fingal,defection,murmurs,slog,gav,dispersing,tractable,lapped,syl,petitioning,clawed,einmal,winsome,presuming,englishmen,equaled,flog,notte,deferring,quills,oud,practises,unattainable,lengthened,dramatist,grayish,hallucination,exhortation,arousing,hippopotamus,wile,forgeries,chartres,recline,maitre,remembrances,disturbs,chums,determinate,heeded,telephoning,sophocles,humiliate,erfurt,wasser,tomes,ingen,accompaniments,clairvoyant,shriek,ferocity,quoi,withering,procreation,xxxi,exasperated,eerste,groping,soule,pinnacles,miser,scaffolds,reprisals,culpable,unserer,asunder,qualms,unharmed,sheaves,tritt,godmother,impresses,lidia,plusieurs,buttoned,sprouted,armoury,marshalling,longue,omelette,disintegrated,forgetfulness,muerte,stilts,samaritans,knocker,underfoot,roofed,jinn,nunc,primeval,sakes,horsemanship,aviators,destinies,jure,sherbet,nutritive,hurrying,helden,tepid,opportune,intuitions,dissuade,hemmed,personified,cornice,smock,musket,beautify,tannery,sooty,buckled,purveyor,kindled,provencal,schein,stairways,methodists,bourg,pretence,questioner,repute,nakedness,scabbard,covet,debe,rippling,mony,nelle,rationalism,wistful,admires,hissed,overpowered,pervades,mele,tirade,elucidation,prongs,fumbled,acte,confided,mumbling,abstaining,giotto,punkte,lancers,heimlich,waren,confederates,stretchers,demosthenes,warum,avait,devonian,infinitum,justo,antti,ointments,tugging,opulence,appomattox,bentham,coursing,beschreibung,patrician,zacharias,melodramatic,effet,inexperience,palabras,aantal,rime,casement,kalle,serially,gefunden,apprised,thoughtless,comparer,goad,parle,muddle,levites,christus,blasphemous,unaided,candidature,clapped,fatherland,evergreens,recede,dears,willkommen,spry,objets,toki,maggots,calor,hominem,tints,waver,handkerchiefs,punishes,salut,acquiescence,disaffected,manors,chronicled,laure,inundation,earshot,omens,brule,transfiguration,punctured,coughed,repaying,filial,mocks,niggers,refrained,shallower,durer,patriarchs,respectability,commode,overbearing,townspeople,adoring,trodden,reaped,bequeathed,grumbling,elude,decently,metaphorically,tripe,glitters,ahmet,austerity,mitte,informe,enjoin,dazu,boyish,egotistical,neared,claes,rostov,diverging,estoy,uninvited,irkutsk,trappers,aniline,tuk,spilt,forgetful,conceding,brightened,inconveniences,maun,rigour,evinced,uneasiness,afresh,taal,bunks,ducked,situate,sowie,escapade,loomed,egbert,hungarians,clamor,abdallah,hond,pews,workhouse,handbuch,unorganized,whalers,smuggle,laboring,nooks,wud,autocratic,titania,broder,shyly,stewed,disguises,stowed,unmanageable,denunciation,squeal,ducking,throb,scorch,perusing,duels,villainous,caius,pythagorean,steadfastly,abstention,genealogies,ruthlessly,falsify,swagger,flicked,emigrate,arbour,accomplices,nonproprietary,gebraucht,toothless,frankincense,commendations,comprehended,bravest,crevice,papel,telltale,typewritten,progenitors,forges,loosed,madcap,neigh,evie,casimir,persecute,voracious,foret,rescuer,massacred,signification,quarrels,remoteness,dominus,botticelli,balmy,hele,splinters,kleiner,epithet,blonds,ravenous,mongols,camphor,savagery,ober,navigated,dieppe,mies,pretensions,thunders,prins,diogenes,comings,danke,farthing,crevices,wringing,tearful,betwixt,florent,unmistakably,unu,massed,plucking,slavonic,reprimanded,rebelled,thunderous,rolle,encloses,sorties,revives,toleration,suitors,minutiae,deviated,sleight,burman,skirted,coachman,bigots,reappeared,comprehending,reckons,inexhaustible,canny,fainted,pianoforte,rifts,winking,firmament,hovers,thoroughness,confessor,gooseberry,aimlessly,pronouncing,agassiz,dazzled,inborn,manera,ould,consuls,eure,doria,newness,ascetic,bearable,russet,specie,hothouse,incas,skein,virginie,mettle,ojo,endeavored,matin,demonstrative,seis,detta,bigoted,discordant,lilacs,levying,elles,oriel,buoyed,malady,brahmin,grandsons,tempers,quinine,thirtieth,sige,grog,fester,permeated,retards,resentful,headlands,saintly,oude,aught,cornelis,adjuncts,jeweller,wooing,conjunctions,embellish,cordes,moonlit,intercepting,denounces,besser,wegen,dienst,corks,obscuring,tages,nullify,corroborate,envied,chins,runt,nursed,loathsome,cosas,althea,dando,icebergs,sacking,settee,driest,scipio,stealthy,flaunt,mistaking,saxe,dyspepsia,tryst,cede,annihilate,candidly,honorably,shifty,ello,deceptions,snorted,signe,shivered,teem,replenished,assailants,degeneracy,giovanna,consummated,cosimo,cotes,obstinate,farquhar,retrace,revolvers,lurch,gregarious,allee,oor,nightgown,bombard,missus,mystified,drooping,diable,inconsiderate,swirled,darted,warlike,colons,supplication,fretted,gauged,suet,overhanging,impropriety,maligned,thackeray,nought,barbarous,grandi,olly,diu,scepter,writhing,enticed,schmuck,gasps,exclaim,greve,vestiges,rustling,recaptured,marauders,spars,howls,answerable,inky,ock,sneer,allay,derision,zog,dutifully,octavo,jerrold,maddening,plundered,damit,henriette,decry,buen,devant,conspirator,luring,gallantry,hewn,whisked,pericles,desertion,rumania,yow,wherewith,siliceous,mund,circulates,signore,coldly,envoys,restorer,staves,coldness,existe,friesland,orden,riviere,gusty,brazier,bayreuth,sonntag,semaine,godliness,docile,maliciously,vole,cantons,siglo,enveloping,piedra,subito,tangles,meanest,hollows,luckiest,officiate,mumble,espacio,oppress,grandfathers,usury,russes,greedily,vizier,ojos,nostril,tombstones,wavering,barbarism,vienne,alway,surmise,blanch,inscrutable,campagne,syne,xxxii,saluted,protectorate,hieroglyphics,materialist,landlady,blameless,amalia,absurdly,garnished,fernand,corporeal,passivity,partiality,circumscribed,steno,disposes,berta,emanate,rummage,headstrong,plies,scantily,waar,befriended,professing,nestling,piedras,immortalized,leper,animus,dimple,noblest,supine,bloodthirsty,squint,vitals,lamenting,benedetto,vindictive,overtook,goe,palast,triumphed,scanty,difficile,vagaries,undaunted,lucan,hemming,nuevas,defiled,faltering,saracens,tisch,eke,conceited,denys,naissance,laymen,shopkeepers,mortification,combats,indulgences,tard,fattening,drench,digesting,cupola,hund,kommer,canst,idleness,lunge,mahmud,minuet,entombed,fers,diverged,spouts,pontifical,glided,sleeplessness,iago,axed,overdone,socratic,revulsion,rosamond,schwarze,criticising,porpoise,nowe,oligarchy,psychical,rives,houten,fanned,berge,wagging,germinate,chrysanthemums,misdeeds,acto,earnestness,wetted,undercurrent,steerage,granary,befitting,whitish,irreconcilable,giveth,concocted,essayist,epicurean,blacked,refit,boite,unwashed,detaining,shod,oratorio,befall,appurtenances,wearily,northernmost,trollope,enchanter,unscientific,withstood,sandhills,heaviness,knapsack,animaux,calcul,consciences,inflected,linseed,caisse,staccato,dels,agamemnon,dodged,refusals,outrages,cuneiform,footstool,dopo,uncircumcised,emblazoned,mettre,wrangling,dorcas,confiscate,bloods,odours,mongrel,forewarned,degenerated,eventide,impairing,dispossessed,meagre,mopping,iver,fantastical,dorf,yama,laatste,chintz,nebulous,slink,lineal,droll,honouring,grenadier,anachronism,methodically,stiffened,athenians,hautes,aleppo,whimper,whomsoever,viciously,fiddlers,endow,raum,indistinct,counterbalance,razed,anzahl,invents,loungers,wilberforce,manus,tenfold,scoured,schule,carley,knotty,stewardess,furthered,chancel,inexorably,mitglieder,worships,ironed,inhabits,domestication,olof,japon,appendage,geographer,omnis,naphtha,clairvoyance,frente,aeneas,narrates,girdles,heartbroken,parola,lameness,offal,smithy,dawns,frais,couverture,staid,encircling,verte,wove,pithy,caressed,infinitive,hysterically,incantation,blissfully,shirk,pangs,monsignor,fulness,commande,domestics,unpretentious,poachers,galvanic,narr,joven,parlance,lethargic,drunkard,conveyances,steinmetz,cowper,bronzes,essa,knell,profited,flavia,startle,algernon,exterminate,heikki,exalt,nein,zal,interludes,jahren,bide,suitor,russe,bevy,gravelly,inconspicuous,juste,wisps,urbane,hoek,nebuchadnezzar,diffusing,stupor,gratuitously,aimless,parfait,flit,quietness,accede,sicher,overshadow,xli,principale,turnips,statuette,theobald,dwindled,dispenses,fertilizing,ower,narcissist,sextant,falsehoods,swampy,euch,wast,obtenir,donning,cecily,sappho,estancia,wurden,fama,lustful,guano,presbyterians,worshiped,duque,autem,rebuked,cloisters,luella,presumptuous,toothache,presage,boars,afore,dour,moistened,kegs,unadulterated,reciprocate,rade,quia,begat,propelling,ripen,suffocating,athos,grasse,cinq,xxxiii,brawn,frowning,gaius,matchless,boatman,unconcerned,dood,orthography,conjured,assyrians,selv,vaulting,fonte,gossiping,freshen,tugged,gog,outdone,detest,paraded,trifling,undergrowth,enamored,carlotta,ceux,cuatro,methode,ulterior,puro,heracles,whirled,passim,thei,gebruik,vraag,jovial,scoundrel,romany,xxxviii,duplicity,meddle,exaltation,handiwork,andras,joyously,heaping,strident,oration,grunted,riche,pilote,wampum,dreading,humorist,nourishes,vite,cun,combative,winked,unhappily,rube,chronometer,squaring,wring,apparitions,shrieking,graaf,erst,scurvy,peacocks,ophir,wouldst,pocketed,enormity,coarser,hypnotism,oeil,dissociated,exclaims,ceaseless,emblematic,lerwick,fertilize,disengage,commonest,daj,unreserved,lessens,judicially,vend,smattering,taunts,stealthily,ripened,cleverness,roped,sorcerers,clang,sardinian,waltzes,sunlit,attests,parched,peaceable,achtung,stanzas,infuriated,dismounted,incongruous,kindest,stam,intervenes,vieles,bonnets,bared,frenchmen,callow,edicts,lemuel,inattentive,transmutation,sweeten,confide,voiceless,sombrero,isidore,headdress,nuestros,tannin,limite,boughs,naturel,overseers,presentment,sprigs,amiens,diez,prudently,foresees,patronizing,presentable,pales,dais,adornment,precipitating,hearken,insolence,blockhead,einige,patting,hippocrates,elaborately,lundi,gaslight,presides,divested,pith,eaux,transvaal,gaff,disintegrating,folie,frock,bleue,flambeau,fuming,veel,chattel,wrest,forgives,waterless,effectual,unimproved,paddled,inkling,vigils,schoenen,garcons,gauntlets,patria,blacksmiths,menor,ploughing,timon,parsimony,typified,darting,ashen,blunted,snarl,comptoir,echt,pained,inexcusable,laud,mutterings,precipice,geschrieben,recalcitrant,wos,thoughtfulness,harshness,ailes,neuve,limping,darum,utters,processions,gluttony,kneading,etwas,sait,templars,nineveh,mesures,enquired,aphorisms,compleat,consumptive,dalmatia,noisily,readjustment,unaccountable,weise,trickling,commoner,reminiscence,pouvoir,yeux,fui,waned,assented,overcharged,pucker,sanctify,messrs,insolent,octavio,portes,finis,beastly,fortresses,matrons,thun,gawain,guinevere,heresies,annihilated,tardiness,mangan,mose,specks,futur,incredulous,dere,calvinist,suas,buckler,peal,asunto,adroit,dilettante,georgiana,ecstacy,peasantry,oppressors,boeken,corns,faring,dama,unos,pinkish,blurted,tutelage,merited,hacia,peculiarity,decrepit,encroaching,solemnity,equivocal,lieb,amass,maketh,ihrem,disengaged,distilling,effigy,saloons,assailed,incensed,zachariah,veneration,broach,miseries,personification,partes,scuttle,rougher,supplanted,sardonic,aghast,raiment,disused,vetter,stooped,dower,andalusian,wordy,feudalism,achille,magister,bolting,lumbering,fourfold,forgave,antonius,indien,replenishing,immemorial,indwelling,seh,jaunt,genere,ipso,quartier,wallow,unabashed,haf,homeric,overpower,expounded,downpour,dumbfounded,cubits,outlast,frothy,macedonians,labouring,pouvez,nothings,kommen,allgemein,colonist,sorbonne,rares,colla,philippi,adduced,agli,unrequited,mangle,alludes,theseus,commuted,medan,saracen,annulled,covertly,dalle,rapped,foreboding,fortuitous,autumnal,xxxv,sepulchre,kunt,despotic,dicky,beholden,celui,apostate,enda,faltered,queda,entrar,sicherheit,gorse,louse,wilfully,paralysed,tillie,distanced,vespers,scylla,vats,urchins,implore,kindle,pricks,tenements,tithes,thinnest,sipped,mando,pulsation,hitching,xxxiv,obediently,calvinism,milked,vesuvius,disembodied,aylmer,scoff,confidant,nape,disparaging,impolite,bataille,oia,domine,sluice,darke,whistled,furor,austrians,craves,soiree,trouver,enslave,dimanche,grimly,espouse,casks,conjoined,cabled,muchos,lightened,spongy,verner,specious,threshing,infliction,frederica,entranced,deprives,onde,scimitar,holz,uninterested,cavalcade,adulation,loitering,dastardly,ludovic,avarice,sangen,butchered,pointedly,ouverture,rustle,excitable,hermanos,alluding,frere,insipid,unfathomable,ingmar,holiest,arbre,effeminate,vainly,straying,venereal,mercifully,blatt,pansies,acceded,dregs,obscures,millicent,foresaw,befriend,anker,malign,abortive,embarkation,varnished,zarathustra,valent,knoweth,anemones,sacre,hunched,buzzed,pickets,astringent,soothed,vins,premeditated,cherche,aucune,pueblos,sentimentality,tenable,jumbled,triumphantly,leva,vergessen,scolded,fetters,vulgarity,magasin,perpetuation,tafel,pliny,sewed,jubilant,sangamon,continuo,welche,silesia,staat,amputated,reappears,enquiring,masha,redden,kreis,faccia,gae,sobbed,omnium,copie,snuggled,surest,bribed,alarmingly,kosten,bloodless,basle,sigurd,tute,obliterate,dort,perron,pestle,falsity,sapling,elapse,myne,enamelled,torments,tortuous,oiseaux,seafaring,mooted,repented,infirmity,corydon,selfishly,drudgery,pacha,shrubbery,navies,impartially,imperfectly,slanderous,interminable,ancien,soins,indomitable,unseemly,vix,godlike,scrambles,arbeiten,merriment,rotted,thetis,repulsed,garni,brickwork,soulless,abbots,frontispiece,vivacious,bloodshot,salutations,pela,dogmas,forsooth,geordie,orestes,deathbed,indefensible,brutish,trill,venetia,melchior,xerxes,poudre,ramparts,disband,symmetrically,reek,hearers,frigates,availed,externals,principales,damsels,spielen,monotheism,menelaus,morsels,hatte,skirmishes,congratulatory,zuletzt,melodious,baited,veined,kens,norwegians,imitates,conjugal,boldest,hafen,flaubert,enunciated,strictures,flinging,ferme,discouragement,werke,vesper,parapet,filles,usurp,gerade,traduire,peremptory,unrecorded,seiner,gallia,hayne,lorsque,fronds,interposed,jugglers,veri,dessin,weet,naively,nominative,cleaves,doivent,avenging,ploughed,severing,ety,hev,cremona,martyred,afflict,crags,mimicry,intersected,tomkins,winced,literati,trotted,hungrily,scold,chirping,utan,tress,vaunted,astride,nostro,ruy,emancipated,ordain,rapt,wirt,sawed,receded,emboldened,pessimist,sedate,stammered,supposes,genteel,engulf,huguenot,epicurus,gouverneur,upu,hankering,normans,enumerating,toiling,spiteful,governess,alternated,colander,croak,abhor,boek,inexorable,chercher,harmoniously,bijoux,worshiping,gewicht,coolly,accompli,wann,vieille,ellos,hecho,verry,rowed,elfin,ingots,ridding,tegen,troppo,meads,exhaled,demolishing,pratique,calabash,brigantine,zeb,fitzhugh,rioters,persecutions,arriva,cramming,chuckling,disfigured,mers,chios,muro,oreille,transcended,xxxvi,cuerpo,tiel,faintest,bleek,adela,genitive,civile,haupt,testy,physiologist,imprison,repelled,abend,eran,quem,plundering,abhorrent,rebellions,sympathizers,scribbling,phineas,emissary,inhumanity,wem,belittle,repudiated,divina,leonie,sympathetically,permet,elis,liddy,dabei,rollicking,offhand,geraniums,bashful,doze,currants,absolve,conjectured,grandest,kinsmen,lier,welk,shipwrecked,doen,tacitly,dint,reverberation,quickening,waal,mistook,apprehensions,aunque,celestine,schoolmaster,impressionable,gingerly,apologised,riven,taketh,cornfield,fretting,fetter,jeers,manufactory,jarred,theorie,armen,bewilderment,loveliness,ministered,idiomatic,scalping,slav,attics,wilhelmina,hermits,gullies,prerogatives,banishment,tempering,kampf,fallacious,vestments,morsel,leniency,scrupulous,woodsman,bocca,dicta,meisten,aubert,richtig,clumsily,catholique,turpentine,ells,cussed,evaded,thickets,clink,personage,cavallo,vender,daar,bouche,delinquents,furlough,angleterre,snarling,samedi,creaking,bequeath,subjugation,gape,clase,unquestionable,prendre,irritates,whigs,despatches,titian,arras,fathoms,printemps,physic,nuptial,thickest,bulbous,whist,mieux,darauf,expound,eget,exhilaration,ziel,lordships,chanced,fastenings,ketch,treeless,adores,aground,splendidly,feuille,inattention,discolored,traf,sinning,jouer,forestall,vater,moselle,gnawing,crudely,saplings,profuse,dispelling,attainments,gane,couched,bestows,sone,particularity,knighthood,blesses,dure,sickened,tali,canteens,thoroughfares,donatello,penniless,abrogated,druck,kingship,puis,manes,relapsing,arcadian,claud,swart,eschew,vastness,precipitous,detachments,arsenals,hoofd,tramping,vieja,thereabouts,bloed,resultat,betrothed,pourquoi,dispelled,pierrot,duca,sameness,scruples,gloved,bete,dowdy,clamoring,aguas,visitations,recklessness,stirrups,intimated,allspice,squirming,thunderstruck,pleiades,surreptitiously,finery,langen,eugenie,sequestered,hesitating,stoops,stiffening,scrutinizing,allude,sprawled,interesse,tomar,courted,condoned,unsavory,deservedly,blackbirds,vowing,plying,gangrene,purplish,stille,enliven,hollowed,graven,lengua,craved,fracas,envelop,dismount,grudgingly,quae,bole,believeth,unafraid,stamens,omnipotence,irresponsibility,zelf,seaports,conscientiously,boomed,jussi,joust,grander,shackled,weedy,sacra,ipsa,grope,suomen,echte,brightens,muertos,jailer,gleich,gladden,sarcastically,tuft,quickened,reverent,braved,jaune,joli,beckoned,unquestioned,scrawled,savagely,usurped,monstrosity,certains,ravishing,grumbled,disheartening,nobis,stolz,unavoidably,blest,govinda,menial,clayey,delighting,vielen,conjuring,dutiful,absurdities,cabeza,ony,gordian,edification,flinch,xxxvii,despot,affaire,insincere,inger,vuelta,beckoning,vivant,vendre,ignis,begone,lucidity,feuds,toque,wille,primi,hiver,lateness,dier,nunnery,forefinger,rudiments,erwartet,heathens,celibate,simul,clatter,werd,faultless,awkwardness,praiseworthy,mosca,seigneur,ails,frage,vapours,jij,delphine,bruder,remiss,languishing,entrails,erreur,cossack,thrashed,topsail,modicum,malte,solange,ethiopians,rajah,persuasions,steppes,sheathed,derided,encroach,correlative,maire,diametrically,fasted,eunuch,algunos,gazes,virginians,negligently,sistine,higginson,hadden,unmoved,glum,perplexity,particulier,sabe,sulky,guarda,skyward,woeful,grund,droop,neque,dislodge,voyageur,waded,flore,unacknowledged,quietest,carven,aptitudes,bonnes,confusions,fara,alimentary,wus,republik,encroachments,ineffable,hearer,awakes,republique,generis,zit,probity,formas,grubs,unflinching,murmuring,gaan,jungen,kop,triumphal,affable,hijo,worshipers,avons,flail,adulterated,nicodemus,ardor,wissenschaften,veo,missive,ascends,splintered,transacting,vus,nomine,busen,loafing,talus,republicanism,foibles,cose,choses,squatter,waldemar,colourless,unyielding,flabby,enlarges,apace,doktor,harbored,bulwark,stringy,seront,sonorous,breastplate,draughts,heaved,lazare,uel,fashioning,churned,correspondance,dappled,gallic,tacking,feigned,dross,solidity,doge,indecisive,recurs,dripped,epicure,levity,journeying,dito,oppressor,metrical,kopf,immeasurably,tussle,fiendish,glorification,wayfarer,arabians,expanses,nuits,dervish,irrepressible,leider,joppa,wilted,emoluments,egal,conned,mutes,outwit,magnesia,patronize,impassable,serf,koning,buries,vobis,signor,phlegm,reste,freedmen,obliging,hermetically,gravestones,uncommonly,nudged,inhospitable,dissension,intermingled,dwarfed,langs,asters,surmounted,elspeth,salutary,bringt,frosts,ached,defile,odio,ansehen,effectually,unprovoked,apocryphal,pallid,sulphuric,antipathy,atone,douce,storeroom,theodora,paler,lhe,wereld,offing,infest,dampier,hardens,frisk,alister,expelling,obliges,pertained,beneficent,luxuriant,mulatto,plausibly,concubine,complimenting,courtly,dampness,zusammen,platitudes,pois,porphyry,deviating,taunted,ernestine,bubbled,tienes,korte,mortified,upturned,cordage,hobbled,loath,gagner,nibbling,unsophisticated,vexing,longa,digression,astonish,dynastic,cognizance,piquet,loveliest,nearness,vif,procurator,plaintive,exult,claps,disreputable,seraph,dressmaker,fehler,publican,hoar,movimiento,kreuz,rebuffs,reichstag,woche,handmaid,oir,chemises,consuelo,impostor,nomen,ponderous,maisons,scrupulously,plaisir,intruding,baptize,fatigues,asaph,princesse,franche,plucky,dessins,eusebius,untidy,loggia,tribesmen,subsist,tuin,augen,beholding,scarfs,leve,shallows,ersten,unjustifiable,growls,sported,quaking,refraining,commingled,coasting,logement,kindern,conciliatory,stiffen,showman,officiated,distemper,subterfuge,jede,aspired,mathilde,pues,lazaro,mouvement,beispiel,penitent,toyed,anglaise,lamentation,tunc,extol,patrimony,belgians,knave,functionaries,croup,broadcloth,disuse,reeled,quire,goeth,fascinate,garish,baronet,bombastic,francie,scoffed,thieving,minde,thinke,snarled,unearthly,predestination,verbindung,regulus,vidi,trouve,rapides,reviled,coverlet,lustig,bringen,fearfully,musketeer,fiddles,furlongs,fens,ancienne,arraigned,liquide,tanz,whitewashed,gilding,twining,explication,violette,humanely,jungfrau,verdad,perrine,gaiety,alten,uttermost,aristophanes,letitia,overthrew,lave,frowns,fabricius,sheepish,diferentes,antic,abed,edifying,dreadfully,aun,sadder,ravage,contemptible,unfailing,fowls,untoward,gloster,venu,clergymen,fiel,endeavouring,dislodged,casse,obviate,juster,genom,ueber,primero,saluting,beguiling,bayonets,trompe,flavius,gie,playfulness,confluent,orde,deel,lernen,husks,beckon,raved,herren,anfang,jewelled,reaps,fatto,traum,premonition,recut,sureties,montre,grunting,baubles,personages,actes,exigencies,marveled,peloponnesian,gotha,tasso,waffen,cultivator,nihil,quintus,crucify,unsaid,fonctions,untie,instigator,girt,annul,lanky,illa,blushes,shewed,outdo,sycamores,truant,shrieked,ermine,corroboration,juge,circe,capitulation,aspirant,germinal,vindicate,repelling,gesucht,fallible,pantheism,strutting,incalculable,tijd,soliloquy,mammy,beaks,caresses,quello,indolent,ursus,banns,thistles,idiosyncrasies,inducements,ennui,abetted,expending,ista,sweltering,purer,hedgerows,narrowest,disapproving,meses,interrogative,squealing,feverishly,sneaked,obras,drowns,nostri,persuasively,walloon,squalor,panelled,ossian,chaplet,narrate,peleus,ebon,hesiod,maman,bleat,glorifying,gleamed,valiantly,steeds,elli,infallibility,voll,altes,franciscans,comport,malheur,overdo,ragusa,sette,radishes,deeming,flaccid,eum,putrid,unguarded,prodded,fasts,sterner,tras,womanly,surmised,northwards,tiu,mayest,judiciously,worshipper,diderot,ruts,regretting,scolding,bosphorus,dimpled,massing,offen,leathery,hjem,caballos,grimace,bribing,unbecoming,bridles,rinaldo,dejected,vosges,comely,prow,sprig,apulia,squander,swarmed,wields,dragoons,brune,landholders,cradled,dreads,spurring,sollte,plaything,pander,stamm,abominations,viene,reestablished,strangling,cultivators,insignificance,deceiver,helle,sputtered,faites,merrier,simples,ruggles,miel,subsides,nobler,michaelmas,bildung,howled,blanched,allemand,unequalled,cicely,temperamental,dally,malays,nauseous,brandishing,wags,chronicler,allem,fais,disproved,justinian,lutte,dobbin,riz,coquette,menge,remarking,cobweb,punctually,unwillingly,cadeau,undoubted,formless,shipmates,englische,plaats,shorn,doubtfully,typhus,reticent,welter,lande,exertions,insel,sprachen,eins,retentive,gerda,plodding,deserter,rending,gaillard,consign,mantles,neatness,adornments,britannic,becher,unbeliever,parading,gamin,confederated,lume,overwhelms,embankments,quanto,speculator,madmen,listless,wheaten,deprecating,faggots,ducal,downcast,tedium,seamanship,gascoigne,pomegranates,sooth,knie,sportive,hewson,aout,turan,undeserved,principalities,aider,excelling,misadventure,meiner,rond,dramatists,servile,rickety,enchantments,fuori,secondo,figura,prosaic,diadem,pani,outa,bedeutung,sincerest,sagen,tittle,imprudent,keer,trou,nannie,laat,deliberated,snubbed,suffocate,applauding,epithets,toch,floundering,preserver,revolts,espy,deren,hallow,wharves,kunde,canvassed,chastisement,unmitigated,whined,sashes,assail,flirtation,unterhaltung,courtiers,carboniferous,brillant,equanimity,agitators,venerated,curs,neer,assimilating,proudest,subjunctive,harun,perishing,inaugurate,slavs,libres,noiseless,cayley,worshipful,geh,spurned,selim,chastised,zich,forethought,viscera,excitability,madder,exterminated,mette,bronzed,grimy,lascivious,ille,dispassionate,bonheur,charmingly,glimpsed,partaking,firebrand,deprecation,intimation,chequered,glimmering,alphonso,falla,disbelieve,brevet,darf,troyes,exterminating,revolted,bunched,besoin,scrutinised,allez,herded,athanasius,gemacht,deliberating,humaines,londoner,aeschylus,plantagenet,episcopalian,zwar,soldat,nisi,thucydides,tapa,repudiate,advisability,lope,festering,relinquishing,dessa,mercia,furies,piqued,jinks,biddy,compris,theophilus,crony,sambo,stellen,professes,wherewithal,shrieks,taas,ominously,caer,ablution,demure,athene,jist,ipse,parasols,munition,veered,jonge,serfdom,gossips,rawlinson,scuffle,uncritical,infatuated,rhythmically,gaat,riotous,tenga,embittered,unleavened,veces,stockade,parece,bushmen,babylonia,tempts,tempel,uur,devolve,satyr,fearlessly,ajar,pampas,altra,suppers,fluttered,untrustworthy,exhorted,ravines,yokes,howitzer,interjection,stocky,bazaars,himmel,greate,strenuously,wildness,compensations,laxity,deathly,unloved,balked,fairyland,balaam,hamar,rekindled,drams,entreat,brainless,souci,cessing,cocking,railed,abounding,fount,poacher,invisibly,lithe,intercede,tusks,hatten,ayrton,courtier,blotted,impetuous,grammes,shrouds,ambergris,hellen,clearness,embroider,hubbub,robed,unchangeable,wunsch,haya,magisterial,boor,recites,anguished,ailleurs,meteoric,jacopo,equalled,palabra,arithmetical,royally,molle,plantes,dishonorable,thwarting,venise,scurrying,subverted,urbino,effets,broadsword,blankly,auras,bonfires,allt,cloudless,conflagration,xenophon,bevis,dethroned,chapitre,vestige,courrier,cheerfulness,egoism,cataclysm,harried,transshipment,cuore,fatherless,puedo,groen,seers,cretan,roumania,blubber,appeased,coaxed,pageantry,disparage,triste,chimed,phraseology,verdienen,memoire,morass,intimes,righting,moder,tasse,dessus,striding,panelling,braving,prayerful,raad,transfixed,balle,leaven,lout,tucking,unwary,herrings,cubit,begets,groundless,prancing,amelioration,wark,beeld,bezahlen,mightier,enthroned,overburdened,dwindle,lindau,beter,sujets,acquiesce,alacrity,drawbridge,gude,overhauling,girle,pulverized,holier,mauer,everard,uncivil,nondescript,employes,temperaments,consulter,simpleton,brutes,howsoever,unsympathetic,jermyn,dico,rejoinder,condescension,dilate,rasch,tiber,bekanntschaft,feuer,secours,skilfully,abolitionists,flustered,compactly,lasses,fus,corsage,hym,laboured,enumerates,decir,relinquishment,ohg,sall,cession,liken,forfeits,heeding,fata,revenu,helder,verder,caesarea,naturelle,wordless,sleepily,prowling,harmonie,eludes,revelry,deface,propensities,mimicked,mete,algunas,uninjured,rivage,populaire,lief,toddy,disheartened,ruinous,spoor,upanishads,eigene,bewitching,mihi,individu,accusers,sunshade,cuir,hals,furrows,throngs,sarcophagus,dozing,siete,chink,likenesses,pervading,caxton,soames,fermenting,beiden,blithe,paralyze,kazi,tilling,hereunto,daad,languish,feathery,reasoner,adorning,gaily,weib,samt,jubilation,tels,storks,accoutrements,abeyance,ciudades,enfin,suivi,iniquities,nadie,purring,squinting,strolls,encuentra,gradations,conocer,vsed,molest,appetizing,encamped,trifles,sammlung,langage,importantes,suiting,hesitates,paralytic,eastwards,parsimonious,pinafore,alwyn,albertine,disposer,politische,foreknowledge,galleys,sunning,farcical,weel,toiled,incited,rhythmical,rippled,tresses,agitating,oriana,frankness,castilian,bunsen,buenas,susa,sulle,fuera,outlived,anny,repulse,basaltic,hinter,middling,minstrels,personae,wain,englander,gascoyne,knighted,torchlight,teniendo,emanated,southerner,persevered,hounded,butted,longings,galilean,ayant,dominicans,helmsman,meditated,shuddering,homesteads,abrogation,justicia,jutting,deliverer,knecht,aeneid,vehemence,befell,ette,klar,neige,sneered,chattels,brambles,disembark,secede,unmixed,grieves,prises,tumbles,sogenannten,parnassus,debarred,dandelions,abyssinian,maler,bulgarians,coaxing,marshy,terres,inne,preying,grasps,subsisting,freunde,bladders,avions,junto,bloemen,latium,shuttered,alchemists,morose,poore,regretfully,abbeys,dutchmen,agitate,vien,abdication,discontents,botanists,bohemians,blir,foreheads,narrating,gering,pedant,stubbornness,distantly,humaine,averting,pyre,faubourg,wooed,chalky,teamster,beached,fringing,glans,thousandth,sacrilege,demagogue,demean,changement,stipulating,propping,straighter,weirdly,broods,rejoices,limber,hablar,mahomet,telegraphy,lehre,doeth,verschiedenen,chrysostom,blackfeet,waistcoats,chalked,mightiest,marvelously,apse,bailiffs,infirmities,illum,aboot,jolted,manne,jacobite,viendo,freckled,plenipotentiary,philistine,gambled,chaleur,unimaginative,joyeux,gratify,meuse,certainties,zie,fittingly,gelatine,undid,quelque,publick,electioneering,nette,ressource,betel,moisten,demoralized,peopled,suffi,swooped,doctored,soured,quieted,albumen,encircle,carmelite,anges,exhort,voyagers,tendrils,thal,nullification,ostensible,malarial,exasperation,stumpy,jeden,whereon,entente,nala,mainsail,inom,promptness,retraite,excommunicated,scalding,storekeeper,muskets,uglier,witchery,predilection,wavered,climes,firelight,contrivance,anoint,scatters,wallowing,hindrances,braver,repartee,boggy,vragen,termes,chiming,modulations,philanthropists,urteil,retaliated,founds,poplars,knightly,debater,tarde,millinery,appian,irresistibly,endeavoring,comically,substratum,porpoises,snel,persuades,rapports,foreshadowed,meekness,audibly,dewy,obliquely,uneasily,meted,liveth,outre,agin,phoenicia,boven,jaunty,balthazar,squeamish,tono,parmi,eccentricities,pasar,potentialities,anthea,letzten,airships,presuppose,hetty,affectation,abdicate,creak,archdeacon,haciendo,pretension,descents,vicissitudes,dupes,larks,tormentor,tagen,postilion,weal,grudges,perversity,convulsive,inflame,zien,eclat,doric,pathetically,bluster,witching,depreciate,bellum,gendarme,dionysius,imperceptible,fattest,atolls,tibi,parley,jessamine,palatial,prelate,flippant,libations,convivial,trat,adorns,kamer,grubbing,commoners,cultivates,thankfulness,nich,unturned,workroom,zukunft,phoebus,censured,sache,relished,boers,toils,salles,enorme,instigation,veuve,indefatigable,overthrowing,maudlin,excusable,craggy,gushed,extricate,provocations,deplore,defrauded,laut,aplomb,centum,cabbages,epee,truism,employe,fervour,babylonians,fabius,despondent,ostia,cunningly,bathers,turbid,sceptics,pollyanna,bort,privateers,knowe,preoccupations,ludovico,besonders,villainy,feuilles,diverses,maladie,hurtling,squabble,ravin,seest,omnes,methodism,mente,luego,overtakes,predominates,phillis,startlingly,couplet,falta,inimical,imperious,townsmen,sondern,revoir,handfuls,gratia,formant,gongs,eigenen,larga,pentateuch,immobility,purifies,sparkled,interchanged,lulled,disrepute,rechten,implacable,sert,employments,carinthia,attired,uncalled,repels,zat,aika,pliant,reappearance,urbain,avocat,emaciated,gern,vassal,cantos,manse,pining,unknowing,blithely,moderns,fashionably,virginal,augur,colonizing,bodleian,bicameral,chapeau,dramatized,bringeth,paquet,regle,broomstick,suffocated,voulez,marauding,cynically,assuage,estrangement,versicherung,limped,yearned,fondest,parce,frightens,incontinent,amante,perpetrate,nombres,mientras,fiercest,coining,invective,sueur,depose,pacify,sunder,excommunication,grizzled,lade,caballo,loathed,florid,fatalism,despises,chanter,quacks,arme,wend,blackest,reihe,roubles,relented,meinung,tarred,beget,mooi,stenographer,nipped,disguising,invulnerable,flickered,quiere,kummer,hideously,motherly,modele,vexatious,coachmen,girlish,reddening,foremen,shamefully,herculean,tormenting,pleura,bragged,pester,deputation,oppressing,domineering,obtrusive,wrinkling,wiry,labyrinths,jealously,beare,welches,footman,pense,chafe,tapis,schoolboys,alexandrian,sinless,manche,nobly,absolutism,hause,grosser,gudrun,sharer,confidences,wakefulness,monopolize,gehen,consoled,mayores,contrition,diener,resound,unsuspected,archbishops,tarpaulin,abajo,mustapha,cherokees,peaceably,exacted,oddest,purposed,evince,hyenas,schoolmates,luogo,breathlessly,hoarded,naturalness,flings,irritably,gorgeously,helt,noonday,courteously,sinuous,availing,meekly,briefer,serfs,vives,homburg,wailed,ippolito,thunderbolts,tule,hustling,milanese,foran,bloomed,hortense,scrawl,manana,sprechen,foamed,refectory,yearns,unaccustomed,platoons,unbelieving,luminary,quitter,purser,pratiques,furtive,renouncing,accosted,conning,tiempos,incantations,enchantress,parallelogram,wonderment,pasado,groped,warder,morbidly,palfrey,persecuting,feign,swooping,jackals,niceties,outlive,dereliction,exactness,barbarossa,dray,silurian,detaching,sunburned,spasmodic,interlacing,elegante,corne,quietude,roundly,monarchies,trost,rhododendrons,flirted,vraiment,royalist,untroubled,aspirants,sheepishly,denk,haft,parisienne,russie,warily,cadmus,telle,aflame,gits,aright,windlass,studious,fineness,estan,setzen,pharisee,devenir,cercle,urania,amicably,tureen,nuptials,greif,flints,satirist,visiter,pone,camillo,hade,extort,staaten,gleeful,sprightly,grindstone,speaketh,sacredness,menton,petticoats,proffer,haply,pronounces,fussing,stragglers,scowl,tinder,omniscience,vot,leaden,advantageously,kinderen,pounced,statt,wollte,bayeux,tertullian,pompe,fastidious,ensconced,cyprian,sagacity,nipping,fogs,ausbildung,protestations,trickled,lungo,erde,fondled,poids,wistfully,abounded,heureux,disloyal,paralyzing,staggers,contorted,polemical,neighborly,dabbled,villes,piteous,olen,perfunctory,pervaded,doorsteps,falsetto,tatters,whan,puissance,tunics,lepers,gloating,dismembered,hierro,perfidy,minne,meaner,propounded,valois,insubordination,impious,absolved,dishonored,vivir,bathsheba,klara,stilted,hastening,dines,capon,stiffly,folgenden,cacher,festivity,grk,thessaly,folgende,ayre,afire,sowed,proprio,brahmins,gloat,entanglements,clawing,wrangle,autour,immensity,squabbling,acquiesced,rosamund,deinen,consecrate,pursuers,predestined,gneiss,gevonden,rhin,disobeyed,firme,dishonour,lavished,courtesan,unkempt,bassin,zeichen,jeder,interjected,humorously,victoriously,ascents,hingegen,retarding,indiscretion,undertone,adot,decease,stigmatized,tactful,friable,palatinate,liegen,fawning,decoction,resents,orientals,squeaking,tinkling,drie,nostrum,masterly,dunce,fera,butchery,wresting,treacle,frankrijk,foolhardy,bristling,boreas,cherubim,nightcap,massy,consoling,nues,characterises,antiochus,cutlets,hoofs,drawl,veux,manoeuvring,lances,helfen,rivier,imogene,impute,dainties,leghorn,directness,glutton,laquelle,unnaturally,disquiet,deerskin,meest,sufficed,extolling,wearied,barbe,pitied,hame,sibyl,lignes,victoire,erring,geschiedenis,acclamation,ypres,gigante,solamente,berenice,cisterns,kist,panoply,credulity,coiling,capuchin,verkehr,sympathise,piti,sist,noirs,pitying,twitched,clefs,actuel,vem,panted,midshipman,juda,gondolas,swiftness,necessaries,nullity,tuli,tenemos,relishing,unsuited,gurgling,imaginings,hvis,boatswain,hearthstone,fondle,cuddled,superintendence,regeln,betters,joab,corruptions,persevering,transversely,abelard,illusive,octavius,disquieting,ripeness,veering,alguna,tiere,junker,vapid,hohe,pieds,unremitting,rechnung,clenching,cordials,bandaged,evanescent,fevered,indignity,pinches,aglow,midden,sieg,notamment,bullocks,peinture,moyenne,valerius,chucked,ransacked,bugbear,wreaked,hogshead,masques,halfpenny,fetes,kneels,reticence,iambic,lisbeth,deplored,icke,unfashionable,jacobean,loveth,sceptic,vociferous,eunuchs,comed,salz,languished,sneering,coitus,churchman,lisette,cocoons,deserters,ainda,verre,smallness,esas,remotest,retorts,housekeepers,farewells,conscript,redder,cria,troupes,tiptoe,sociability,idealists,xlv,crowing,celles,thankless,avers,hochzeit,schuld,quale,sublimity,birches,crunched,ratifications,ringleader,thundered,fumed,feste,thereunto,compatriot,discontented,droning,yawned,scuttled,wochen,inoffensive,erudition,bedsteads,perrot,strictness,welke,entretien,frivolity,gulped,subtler,vestidos,inviolable,toten,riflemen,insufferable,clasping,landen,interjections,usurpation,brimmed,subjugated,unlearned,prostrated,kaffee,excusing,rejoining,subir,etiam,slanting,maisie,detested,overal,dauntless,pulsations,frugality,apprenticed,reflexion,vomited,loth,undisciplined,signalized,lunged,alii,vergil,wiens,verts,opere,pouting,watling,daher,vrij,creer,cranny,springy,perplex,lamentable,signes,besuchen,rebelling,destitution,rummaging,broached,puckered,squalid,shunning,erhalten,cinders,interrogatory,syndic,cleaving,semicircular,montant,trow,overwork,kirche,farben,roches,pommel,intermixed,logik,rerum,freemen,mellan,linnet,heightening,goede,laddie,bellowed,tante,sair,questi,entier,timbered,sxi,unrighteousness,shrilly,catullus,dulled,nuestras,interlocutor,kingly,chided,turbans,acquit,tota,choisir,hvor,singe,stunden,harping,etwa,akimbo,beeches,seule,augmenter,hieroglyphic,aryans,banishing,unicameral,clamour,sopra,alvar,punkt,dunkel,erle,unadorned,prefaced,wijn,gleichen,verband,majesties,endearment,fealty,disputation,leicht,whoso,thracian,forerunners,exhalation,investiture,animates,ruffian,turkestan,balthasar,ourself,invariable,inclines,southey,patronising,deciphered,shudders,voie,gerne,ardently,granitic,untried,luise,narada,intruded,marmaduke,coppice,autocracy,backwardness,undiminished,caput,connaissance,discomforts,clammy,indisputably,rifled,meglio,pomerania,fane,latterly,flogged,disadvantageous,philological,enamoured,unpalatable,shrugging,disse,persistency,conscripts,chimeras,befits,instants,denunciations,pervade,entrapped,suerte,apaches,archduke,myriads,physiologists,egotism,motherless,cien,tiberias,chaldean,comedie,reciprocated,squabbles,buffoon,tilled,rumbled,mittel,ambos,disobeying,drusilla,sidon,acrid,dijo,trespasses,conversed,ingeniously,howitt,counterbalanced,undertakers,pricked,coppers,reddened,exhortations,wohnung,againe,hijos,poulet,degenerates,demeanour,broadsides,closeted,unceremoniously,genuineness,bungay,poissons,volte,suoi,wirklich,iho,crannies,prospering,dearer,familles,minutely,seditious,trotz,inarticulate,turba,brust,rameau,silvered,youse,seno,poche,neuem,fromage,gunboat,drippings,voici,alida,messager,asceticism,reconciles,disentangle,bestowing,belie,ostend,divining,balustrade,fortieth,adulterous,slyly,shied,plantains,eveline,deferential,enlivened,coterie,magnanimous,plait,guttural,prided,anciens,capsized,breslau,unreality,weiteren,murs,lath,encampments,hindenburg,whiten,derniers,entendre,cuidado,reynard,remarque,katrine,perused,refrains,furrowed,tabernacles,virile,poignancy,detestable,pouce,certaines,sombra,narbonne,voisin,jilted,centurions,poring,quivers,flaunting,peeped,kiu,ellas,quer,wails,gild,debonair,indignantly,invigorated,bucolic,disaffection,grappled,executioners,belial,harde,blessedness,courtesies,misericordia,apotheosis,jette,bettering,tigress,geworden,occhi,chante,bleating,stratagem,squatted,dagon,hugues,atalanta,partage,authoritatively,unpleasantness,bettered,imbecile,gravest,defilement,butting,gobbled,hispaniola,conceives,townsfolk,afflicts,thinness,counteracting,marilla,ramshackle,dullness,syllogism,wrenched,giovane,usurping,arouses,augustinian,scald,rois,rodolphe,heliotrope,aquiline,reapers,uncouth,allein,whimpering,eleazar,portent,fatten,crossly,hadst,fier,admonish,battlements,transgress,leant,lank,governorship,tolled,zealously,aen,dowager,werken,squealed,convents,romane,vertrag,usurper,recitations,inculcate,olla,encumber,blut,golfe,wier,unimpaired,liue,heedless,rancor,trots,providential,freiheit,daresay,kapitel,liberality,principes,semaines,stort,indulges,unthinking,tutta,marcelle,flossie,inestimable,whiles,henne,distrusted,prie,mohawks,ignoble,frankish,jeroboam,timidly,lurked,greyish,imitative,igual,pagodas,ganze,hobble,maan,roten,kannst,tills,repentant,comite,meanness,wege,biding,unassailable,sidan,mutters,singhalese,mammon,cavour,discoverable,letty,tombe,beltane,whir,afflicting,posto,biographers,escrito,hyacinths,demandes,freeholders,ventre,facetious,tinkle,wormed,histoires,weiber,approche,civilly,unhurt,incredulity,yawns,croker,liisa,proscription,foretell,hoards,boccaccio,whimpered,businesslike,egypte,juba,frill,landward,cripples,amusingly,cornices,ostentatious,vrai,pocketing,bereits,shylock,deseo,paymaster,canaanites,carnac,gnarled,doce,gnashing,preuve,plod,damals,covetousness,dammed,piebald,unawares,scornful,crosswise,tuneful,hache,girolamo,quienes,humdrum,distended,faun,parler,folgen,fatness,summe,lente,dangled,fixedly,feebly,objekt,vexation,bastions,bailly,threadbare,emissaries,weh,vertue,subsiding,hebe,purred,lieve,contingents,squirmed,haren,sangue,cringing,saal,kleinen,hys,outstrip,demerits,highwayman,contes,hussars,fatherly,jehu,southwards,swerved,unas,recurred,roams,fuhr,hemos,terrify,licentiate,periode,innerhalb,inflammable,freundin,disowned,parlement,surmount,hellenes,unheeded,siecle,nicholl,magis,wolle,apprendre,habitations,warf,cowering,overhear,tawdry,doublets,saintes,buona,gaspard,skall,canonized,solicitous,findet,vorbei,hulking,realidad,seconde,carcase,caballeros,unwound,whiche,progres,reveille,garrisons,professeur,shames,schicken,predominated,wilden,pittance,gironde,gosse,escutcheon,winging,alcibiades,schatten,curds,sinfulness,recapitulation,trudged,junger,hummed,convalescence,verite,spada,priam,unceasing,disdainful,cackling,blancs,freres,aimer,parsnips,trembles,davon,dryly,ingratitude,postes,godt,largesse,humped,mooie,rowboat,perfections,restive,hackneyed,canticle,peine,naivete,circuitous,frieden,imploring,erebus,abridge,picardy,glisten,clubbed,turnings,unblemished,trenchant,lilla,volleys,hommage,girlhood,freshening,rill,andar,lodgment,clumsiness,witless,regale,crus,siya,amuses,pallor,unwholesome,parsifal,copra,journeymen,filipinas,hippolyte,marsa,galling,vei,quitted,tomba,musta,brawny,quella,fueron,prattle,partakers,climat,ilium,livy,incorruptible,puritanism,carthaginian,assiduously,nibbled,appeasing,piquant,grond,magno,leute,unreservedly,tattle,baste,manier,willst,inseparably,anthers,buttonhole,uncivilized,insensible,seasick,redouble,theodosius,liberte,rostrum,ejaculated,eux,sables,pian,admonitions,shewing,suelo,cower,erfahren,inferiors,singed,gird,territoire,pierces,jugend,kleidung,erfahrungen,solicitude,pawnbroker,reverently,deign,eher,hominy,doting,fuerza,blistered,glittered,hanseatic,pestered,preeminence,billows,biens,etten,carted,despots,gnaw,bandied,liegt,vinden,rijk,perversely,bors,transfigured,dauer,quizzical,couper,informers,resentments,bartered,sugared,spittle,circumspect,demerit,shouldst,roundness,acrimonious,pulpits,warding,unbuttoned,brot,feit,frolics,groat,matins,formes,bellowing,platon,abhorrence,verbo,osten,blackish,emme,aphorism,emanation,miscreants,unction,redan,seguir,noblemen,barque,deride,kirke,houseman,sedges,pitiless,zwarte,portly,jangle,jarl,beauteous,veld,contrive,huguenots,estimable,scowled,ministration,willet,wriggle,impudent,xlii,petted,meist,prude,heroically,phoenicians,enjoining,willen,hustled,jinny,surreptitious,petulant,unfurled,sauf,lits,chinaman,nonchalant,disloyalty,laconic,westwards,nase,paha,askance,misma,binnen,baronial,charrette,denouement,belied,obliquity,satiric,quivered,sche,sanctimonious,natt,ebbs,obed,ezek,heet,stammering,waked,logis,foolscap,sorte,oases,brach,limites,calma,unmeasured,statuettes,nubes,unga,gegeben,satz,twinge,cultus,trudging,narcisse,feasted,rebukes,colquhoun,quadrille,inconnu,lucretius,sprach,ihres,docteur,meubles,whome,repressing,embroideries,booke,ingenio,intellects,brawling,veut,tient,gelatinous,meilleures,figur,gentlemanly,underbrush,bemoan,norsemen,forsaking,souvent,bobbed,diversities,gouden,pontus,unintelligent,holies,annexing,vriend,amas,asylums,satires,coffer,costliest,ravaging,rarefied,nebel,gleichzeitig,leyes,deprecate,lvi,serait,esos,chivalrous,overruling,gendarmerie,konnte,groene,obstinacy,caked,delude,similes,seeme,puertas,recedes,wroth,emetic,gestellt,holde,capitale,steamboats,naturelles,towered,fastness,gautama,alsatian,unrighteous,torpor,leser,desecrated,transgressed,publiques,rawdon,endeared,arsene,pecked,colonne,dozed,outstripped,chaldeans,perdu,repast,annee,majestically,shapeless,heen,contrite,pursed,principio,entreated,heliopolis,chel,righteously,marvelled,seductions,taga,propitious,domesticity,dashwood,veta,chastise,inveterate,peacefulness,extolled,absently,promis,breit,copse,espada,highwaymen,orators,incorrigible,abating,sonore,feigning,passant,liveliest,sixtieth,reproof,filets,baiser,credulous,inflections,lintel,allora,stak,hereupon,clod,alaric,beneficence,impregnable,poca,dessen,penmanship,dese,girded,bessy,inscribe,adelante,serenely,nosing,crowed,vnto,cooped,overwrought,vivacity,incontrovertible,forenoon,clotted,jolyon,certitude,marshalled,approvingly,waif,ruder,suffused,fanden,altijd,artless,morne,cowed,longueur,deeps,forger,busied,venir,kith,vrouwen,valenciennes,komt,noblesse,jostling,satiety,tolerably,consanguinity,wint,convulsion,slumbering,heraclitus,semicircle,vient,squinted,exaggerations,editorship,rapturous,unobtrusively,sabes,choicest,tempestuous,vaillant,bamboos,noticia,signora,flitting,laboriously,inmost,jehan,vorhanden,poesie,snuffed,cannot,vache,sere,slighted,keinen,maner,stammer,inordinately,fidget,borst,comprehends,gleams,sieges,magnifique,pollux,sieben,muzzles,peleg,punic,oser,saman,epirus,fantastique,tilbage,astern,pelted,stoutly,insinuating,auge,leib,unequally,profligate,sated,acht,apprise,bothe,goda,beady,oberst,abdicated,reveries,hauteur,unerring,arter,euer,denizen,elegiac,bivouac,owain,doggedly,hermano,ladyship,kneeled,longe,rire,marcha,problematical,tanden,drapeau,crackled,defenceless,pricking,invalids,eiland,harbouring,droite,fastens,igen,paysage,fleshly,striven,lurched,blotches,persoon,herre,pistil,legen,northumbrian,apprehending,werde,insinuate,deadening,froid,angele,dolt,propria,schreef,agreeably,scouted,intime,splendors,capstan,feint,muscovite,pursuer,letto,wrappings,daunted,candido,ske,aurore,couplets,socialistic,narrowness,dwelleth,mogelijk,moustaches,manzoni,brushwood,arrogantly,traurig,lieux,barricaded,pillaging,vingt,tief,perles,bungling,impel,schlecht,expectantly,perching,solum,broiling,gangway,tantalus,rapacious,uniquement,debased,concubines,jogged,sentido,entangle,steepness,franchi,puritanical,capacious,prefects,clew,biscay,unrolled,tambour,watchword,drummed,verging,interdict,geplaatst,scamper,devoutly,transmigration,deshalb,redoubt,meus,kerk,revenant,instil,boastful,bilious,orsini,despondency,disheveled,exclamations,allegories,entonces,trudge,mincing,scurried,setzt,homesickness,metamorphosed,hussy,stoicism,congregated,covetous,ewer,grootste,doux,directe,hysterics,procures,stimme,aceite,concerne,devours,waists,judaea,leden,quidam,potentate,barbarity,extirpated,charlatan,slouching,susceptibilities,plaited,floe,surtout,agonies,misjudged,writhed,beine,housemaid,eurydice,undeserving,untruth,directement,preyed,relent,zillah,verba,horsehair,seinem,handelt,gien,mandarins,sforza,indifferently,nevil,shuns,teile,retinue,hulda,impostors,stehen,brawls,derangement,mesmo,hinaus,epictetus,impertinent,ouvrir,buffeted,physiognomy,hecuba,oiseau,behooves,misshapen,scrubby,jedoch,unpolished,vales,steadiness,ceaselessly,irishmen,charmes,succor,branche,efecto,ague,sodden,helpe,changements,unavailing,vagabonds,irreverence,ditt,chaises,statesmanship,papst,popolo,saner,tendre,halla,demoralizing,prest,disillusion,frocks,poner,thronged,iets,beseeching,irksome,burgesses,abbess,minuit,uncounted,schoolroom,varus,terrasse,teufel,teaspoonful,rambled,bertin,monta,kneaded,fertilised,rosse,emanations,veiling,squandering,wahrheit,quiescence,gilet,widowhood,eut,swarthy,abyssinia,populaires,poetically,durance,farnese,chid,menaces,desir,ambling,perilously,numbed,acteurs,regel,bathes,drover,wees,dogmatism,chasseur,grudging,reciprocally,effusions,snared,brogue,passeth,gret,namn,squeaked,seance,stilled,bygones,assez,mentre,contentedly,roughest,entreaties,ridiculing,alternations,penitence,discours,avails,velvets,completer,streit,recevoir,tactfully,speake,gericht,borde,drunkards,danton,hurries,smolensk,terreno,tweede,ouvert,duchesse,mingles,strafe,corrals,rectitude,semble,engen,erreichen,encircles,garratt,jorden,uncleanness,viens,pried,supplications,onely,deportment,marchandises,invidious,weten,seraphic,gedanken,malevolence,wetten,alcalde,judicature,vigueur,einzelne,exhorting,libation,facit,soient,duas,rechts,bagatelle,chaine,nonchalantly,drenching,verhaal,subi,chiens,prance,lapsing,suivre,edifices,gruel,fing,exasperating,grievously,hauts,partout,hesitancy,courte,chafed,kennen,interposition,callings,satisfactions,distrustful,incredulously,zij,obsequious,moyens,dissolute,briefest,lamplight,sharpshooters,druggist,absolu,unprincipled,sweated,lieth,flinched,zeer,pacification,nitrogenous,sackcloth,enraptured,indique,boeuf,fidgety,disown,sophistry,illumined,thir,agonized,pickpocket,warbling,shriveled,conformable,imprisoning,incongruity,uselessly,gallantly,bended,drang,poignantly,untiring,hostelry,slumbers,forfeiting,fertig,humphry,numberless,intemperance,definiteness,reproved,privation,westen,peevish,tapio,pedagogue,soothsayer,facings,multiform,peuple,herculaneum,carthaginians,micheline,indelibly,ashy,cependant,cruelties,unseren,cadences,slavish,bawling,awestruck,bluer,felicitous,caravel,calles,plaudits,schooners,mycket,chacun,demander,weniger,eltern,adepts,clefts,kapital,underhand,sophist,heimat,idolatrous,secundum,smouldering,tradespeople,untersuchung,polytheism,varias,revellers,rebuff,appellations,draughtsman,boulet,verandas,pwh,pindar,iscariot,bombast,soyez,bateaux,impulsively,cuarto,seeth,milch,depredations,dews,kalt,temerity,mlle,eluding,adventitious,interdit,corked,deluged,fleecy,antelopes,daub,unanswerable,darkens,excellencies,strahl,isak,gedicht,atque,untainted,eigenschaften,slays,crees,whirring,miserly,troth,contemptuously,frequenting,mannes,celerity,grottoes,marthe,milliner,komma,blase,hoose,exonerate,righted,sayd,travailler,imperishable,degen,spurn,famished,romping,oozed,cuanto,contient,devrait,bidden,tuileries,samen,contraire,vasili,monopolized,abstruse,stripling,overshadowing,succour,whizzing,headman,saat,mellowed,ebenso,contiguity,morts,retracing,similitude,servent,verdure,sward,exclusiveness,anwendung,forse,deines,tira,reclined,throbbed,divines,prostration,wretchedness,admis,festooned,barest,steadfastness,boog,digressions,diocletian,fellers,begrudge,xliii,coxswain,schriften,counselled,sentries,reproaches,pediment,hayti,geef,cassio,meinem,wanneer,baleful,swifter,timotheus,hulp,gelten,miroir,promesse,apenas,hillock,fearlessness,neben,waggon,unalterable,beelzebub,inexpressible,indios,cherishing,crooning,bref,wist,eius,disavow,peals,mariette,backsliding,ziehen,whisking,wantonly,samovar,zweifel,oppresses,footstep,stewing,schnee,acrimony,bristly,soever,ruefully,unfavorably,slothful,sitt,diep,exhorts,moloch,epigram,wafted,keepe,expends,golde,reassuringly,thwarts,sitz,staats,jedenfalls,abhorred,zeigt,sollten,mene,worketh,phosphorescent,sauntered,foundling,illiberal,deserting,onlooker,deathless,assurer,scandinavians,legate,dissuaded,paled,ascribes,hearths,duller,discoverers,furled,denken,caminos,esdras,typify,ganzen,commissariat,seele,abydos,cornfields,ebbing,evelina,resta,portents,venetians,unnerved,demain,participles,harmlessly,purty,possessors,mephistopheles,pologne,seene,fortes,liveliness,godson,passa,peur,conserver,paling,deur,bisher,schwester,autocrat,shouldering,hovel,gauls,conforme,honneur,stirrings,decider,lusitania,rustled,unquenchable,foreseeing,indolence,profundity,lawe,paru,vostro,turgid,exigency,exige,necesario,reined,prend,unenviable,genau,unfeeling,cooing,haine,bishopric,espoir,severest,lesse,beautifying,glistened,encroached,corriente,suppleness,irascible,eigenes,canute,vibrated,denuded,rendre,subjugate,commissaire,gulden,naturaleza,niobe,incorporeal,orderlies,thrushes,dient,ferried,wriggling,crape,mouldy,amant,merest,wordes,perpendicularly,expounding,nutzen,gestern,swaddling,benighted,hysteric,robespierre,tillbaka,exultation,fand,blanke,selfsame,overcoats,calvinists,grovel,soberly,therfore,mellem,gayest,vais,fetid,boatmen,vespasian,singleness,kette,yearnings,remise,unquiet,einzige,herbage,adduce,twaddle,unitarians,unutterable,outshine,parisians,stellt,patronized,aldus,pommes,inelegant,clambered,histrionic,subsists,degenerating,recommande,sergius,taciturn,sways,bristled,flecked,mustering,allemande,sophy,paramaribo,betrothal,boorish,posa,queste,sinon,devoir,hunde,adjoined,soumis,pire,vilest,niin,vassals,throttled,fonder,entrancing,elope,seid,nehmen,welshman,beguiled,besoins,violetta,stillen,sinew,mordant,clotilde,ascribing,zahl,compter,germanicus,declension,fawns,damaris,anodyne,dearie,verum,voller,lequel,enigmas,kinde,bezoek,humored,befalls,endlich,yli,primeros,chere,fussed,anabaptists,xliv,disembarked,burgundian,telles,pente,thumped,superbe,conjectural,tendance,idlers,eigentlich,hoog,contortions,effusive,heilig,cloistered,redoubled,choristers,bosoms,flapped,supernumerary,aqueducts,ngon,reprobate,despues,indiscretions,riper,forsook,hittites,tatler,prelates,unserem,ensigns,sauve,miei,spendthrift,antipodes,chers,grossest,shanties,ploughs,lashings,noemi,loue,persecutors,averred,valueless,imperceptibly,jaren,uden,dise,crevasse,hastens,huizen,davantage,brilliancy,gushes,marechal,surer,frae,traitorous,hacen,levite,quieting,candour,pacified,drin,gored,remunerative,intricacy,coralie,pendulous,eare,mourner,enfold,wirst,troubadours,amours,reentered,paupers,bludgeon,welled,naturae,inconsiderable,cotyledons,cackle,sallow,gemaakt,montagnes,reformatory,demeure,ostentation,ninguna,cherishes,souper,wrathful,thuis,partook,ehe,familiars,blacken,zorg,possibles,vannes,schemer,lika,actuellement,deiner,writhe,friendless,proboscis,fitful,sicut,genii,intrust,illi,dishonoured,unquestioning,desultory,fabrique,pitifully,egen,menacingly,emmeline,linken,disinclined,lackeys,codicil,puerile,kleber,journaux,worthlessness,oblation,franziska,caracalla,civilizing,conseiller,corneille,merken,dorp,palaver,gorgias,tribu,unvarnished,overran,folies,wretches,hoarsely,bonhomme,hellenism,statecraft,familien,propia,flout,studiously,reveled,confounds,pitiable,countrie,reiteration,corsairs,indiscreet,duelling,pedantry,lugged,debilitated,blazon,gars,looseness,neglectful,gamla,pillaged,voces,reasonings,vestido,agathe,niemand,tost,worthily,passy,verfahren,insomuch,anneke,scruple,steadied,coolie,honeyed,recoiled,comprendre,disliking,chinks,unripe,shipmate,convulsed,noce,cleanness,unmolested,insistently,fording,linie,telegraphs,coverts,transgressors,redolent,impudence,ananias,vied,eulogies,weakling,griefs,yoked,steeples,tares,detto,tottering,grossen,scalps,despaired,quails,satiated,plupart,principaux,lightnings,repenting,souldiers,manliness,churchmen,parthian,knowen,chirped,facta,himselfe,derisive,imbibed,hanoverian,samma,warton,equipage,prophesying,abodes,kring,spouted,clanging,windpipe,veronese,guiltless,burnings,caractere,estaba,distresses,retaken,heere,intermingling,foundered,mandat,blinde,dispensations,irretrievably,thralls,crise,connivance,miscreant,bitterest,uncertainly,resenting,kingdome,familiarly,reviens,scowling,swaggering,grandly,publicans,graciousness,footlights,smarting,pueda,hatreds,imperil,salamis,supplie,zweite,censer,surfeit,schneller,obeisance,whelp,fantaisie,monnaie,ignominious,entschieden,sulking,keenest,ungainly,darstellung,bauble,circlet,rouses,dormir,consolations,enslaving,medes,deale,odorous,indefinable,faits,kenne,ironical,sympathized,uncultivated,functionary,suppositions,jehoshaphat,chevaux,elegies,carbines,richt,kaffir,livelier,gervase,grenadiers,bruit,acacias,magnanimity,aleck,propio,fiesole,gallops,dexterous,connaissances,hebt,beaute,hoor,modernes,undignified,stesso,conocimiento,mord,endear,effigies,folge,counteracted,planking,blockhouse,confiance,urbanity,lawgiver,totter,rumpled,scalded,importations,laughingly,prefaces,tenue,idolaters,seducer,haire,tenaciously,moonbeams,inculcated,monate,verschiedene,wohin,generall,reposed,cicerone,mustaches,hasard,leddy,mildest,restlessly,uselessness,lezen,doet,oaken,endroit,harlots,conduite,rouges,humours,humain,voltaic,derriere,xlviii,flot,cudgel,aurait,multifarious,runneth,tenu,llegar,abhors,minarets,wrack,bleiben,vividness,beatitude,husbandman,procureur,stuk,douleur,heaves,xlvii,sagt,passi,subaltern,appui,bharata,longingly,apud,bandes,roseate,ruffians,servir,contralto,tenter,rues,dote,valdemar,curtly,resuscitated,exemples,confidante,rashly,athen,leering,soudan,clearings,pleasantries,louer,uomini,atoning,insinuated,xlvi,warble,prodigies,herbes,phrygia,overige,dardanelles,familiarized,fakir,rato,divinities,ostracism,magasins,buttresses,drovers,obelisks,vierge,doggerel,existences,farre,extravagantly,hauptmann,builded,volle,slandered,demagogues,cephas,flighty,opposer,ejus,gabled,convient,ofta,enrage,sinews,flemings,glanz,serjeant,shadrach,shallowness,ensnared,loyally,sneezed,darkling,subservience,nightingales,gaped,subduing,apoplexy,poorhouse,sunbeams,kaan,brigand,jahrhundert,chasms,jealousies,ditties,dignitary,wenches,dite,gesicht,improbability,shrewdly,sneers,bloodhounds,meed,impish,menaced,seneschal,deafened,hooting,cyrene,dejection,economize,prophetess,hatchets,witz,spoonfuls,unten,ebene,funereal,wrested,deceives,plaint,imperio,demesne,briny,nimbly,supped,calumny,sigismund,herrn,verger,ludicrously,portend,reves,spattered,couloir,straggling,cochon,berthe,acadians,comtesse,jailers,chaud,disastrously,intimations,arzt,xlix,heterodox,manque,codfish,debility,shirking,rustlers,demas,zaken,aloes,obliterating,victuals,certo,dully,leonore,exalting,chide,entrap,indignities,nombreux,rhymed,whirls,compassionately,hussar,scow,voorbeeld,beide,honora,remorseful,obstinately,zei,peste,aggrandizement,jotted,unpopularity,deluding,boileau,naast,charta,royalists,lachen,hennes,nej,achaeans,cravat,genug,pinions,mindre,praetor,peche,sunburnt,superficie,grotesquely,mown,soms,vagrants,transept,patois,atlee,seuil,petrograd,aveva,bulged,bated,seines,thereat,aise,recours,cloven,apollyon,intemperate,confiding,fleisch,eares,compunction,bonum,unceasingly,herdsman,haat,frightfully,reprises,fierceness,remodelled,unpleasantly,szene,bouches,aggressions,spectacled,telegraphed,resounded,mickle,sagacious,moralists,abimelech,gehe,valise,prompter,provincials,distaff,imbibe,hisses,garcon,doel,freude,gnawed,sieht,oog,clattering,traite,bleus,tente,reverberating,incomparably,bearskin,ripens,darunter,benares,recitative,factotum,zoon,screeched,quare,anticipations,determinedly,calamitous,pria,hughie,egli,mopped,sacrilegious,fatuous,elocution,cilicia,retraced,palliation,kunne,misanthropy,protruded,hanse,incompetency,mebbe,plainer,chambermaid,sapping,perfidious,voyaging,humiliations,umbrage,fatiguing,awaking,presencia,portmanteau,moralist,farbe,legere,tormentors,distinctness,expiation,insinuation,indem,alehouse,practicability,swindler,standen,inquisitors,dreamily,frobisher,digo,motivo,gibbet,exactitude,promenades,grise,epitaphs,jostled,mannen,globules,herdsmen,conmigo,reprove,heareth,ipsi,inviolate,zoroaster,orations,vistula,laten,examina,erster,autant,schrift,resemblances,termina,cuales,lordly,complexions,despising,assiduous,verstehen,epigrams,dagny,thenceforth,girths,swerving,surpris,frappe,pobre,lebens,muerto,enfance,gesetz,portentous,conjurer,dramatis,receiued,sergent,hurls,habt,couronne,dullest,erschienen,venal,gebe,grete,lauter,gourmand,wearisome,sortir,exaggerates,gurgle,antislavery,laertes,apologetically,clime,poultice,ministrations,gendarmes,telemachus,sommet,remonstrance,capitulated,karna,prettily,reeking,cheapside,citie,zuerst,persuader,epistolary,flutters,elemente,maitresse,reappearing,dudgeon,pilasters,theban,kennis,unwisely,grammarian,figlio,peruvians,lateran,sente,reverberated,plenitude,faim,unpardonable,robarts,volgens,bowmen,blundering,dishevelled,exorcise,scurrilous,squalls,parla,vaste,jedes,shewn,hiki,vasudeva,objetos,briefe,valets,corruptible,pedlar,impassive,abasement,faints,vicomte,pillory,dieux,inquirers,orte,brahmana,toren,prostituted,quartering,amorites,disavowed,undulations,redressed,waifs,cuyo,siegmund,steg,harangue,liefde,yeomanry,lepanto,matilde,passepartout,gentil,ablest,faveur,dicho,whitest,bastante,handmaiden,humors,sollen,cooed,knabe,gunboats,comradeship,inopportune,exhaling,lurching,plumed,poesy,cheapness,scythian,proche,backe,sapped,starched,tasche,insieme,undistinguished,unes,gayer,seceded,belligerents,baser,ribald,coursed,habitants,brusque,officious,hert,gorka,flannels,contrivances,capitulate,wayfaring,kammer,dejar,disfavor,staden,umgebung,liveries,sieur,devez,anatomist,laundress,bugles,manie,swindlers,clandestinely,sitte,avere,fichte,coolies,edra,briars,tarentum,chaude,unfitness,annihilating,swathed,extorted,tanta,avaricious,entfernt,waft,popish,darning,pasos,crois,fidgeting,resinous,granit,flayed,paramour,enunciation,josue,frailties,haunches,morea,chastened,dropsy,impositions,wriggled,displease,agit,moneyed,halten,peligro,armee,langsam,toutefois,cloche,neatest,howitzers,mantelpiece,proclivities,rache,falkenberg,imitator,agonising,maximilien,tuer,meerschaum,impiety,loiter,actuelle,schwer,begot,suddenness,baneful,templo,wenden,twirled,furtively,betrayer,jingling,arrowroot,welcher,readjusted,assails,priestesses,jostle,admonishing,avocations,allons,humblest,haec,mohammedan,solitudes,insurrections,lodgers,kunna,cacique,exalts,grec,cajole,mhw,swooning,wincing,unswerving,enjoyments,thirsting,savants,kentuckians,monarchical,celebes,divans,immodest,perquisites,flatters,gedichte,herzen,beurre,meni,sayest,lutter,heissen,voeux,juges,papists,jeer,premeditation,waken,tearfully,sagged,pugnacious,companie,bedecked,finalmente,soin,oftener,motioning,saunter,universelle,firmin,llamado,versant,flaxen,pseud,soie,tempter,miscarried,rivulets,corde,appertaining,nostre,prochaine,lohn,partridges,qualche,nooit,swum,dunkle,staan,brakeman,regretful,coasted,democritus,yawl,endast,permettre,drooped,mehrere,exacts,licentious,antiguo,fermer,deadlier,doest,romanus,agog,ponts,liii,yeomen,lothario,maal,charybdis,wazir,habituated,doff,fede,jests,brandished,jeremias,raisons,gouty,twined,comprend,resister,stoics,soldiering,viso,tyrannies,natuur,greenbacks,puesto,sullied,calvinistic,abridgment,frequents,faite,hoffnung,leipsic,bekommen,fiercer,entreaty,creaked,disconcerted,roule,interpose,saan,neveu,hearkened,mournfully,surprize,tenanted,kerchief,marvellously,allerdings,unenforceability,moralizing,phantasmagoria,glutinous,pretexts,recollecting,omdat,jemand,hundredweight,hags,severities,sobered,fournir,coiffure,forasmuch,lige,aliment,moeten,salir,caprices,laufen,blockaded,ignominy,tempests,scythia,recriminations,olim,geeft,dismally,insinuations,smiting,hapsburg,bevor,zeiten,lulls,pompeius,peux,misrule,unasked,illo,kuka,copiously,freien,wildernesses,perpetration,transmuted,abideth,blaspheme,blacking,quelled,threescore,sitteth,keenness,quickens,scornfully,puerperal,multis,worldliness,croaking,ignoramus,howbeit,sisterly,briers,ouvrage,faible,avidity,gascon,bergs,accustom,consiste,venez,prouder,pleaseth,cottonwoods,dienste,superintending,spectres,poetess,moluccas,leguminous,brigands,quarrelsome,moine,damnable,etruscans,poeta,tottered,theil,disdained,shrivel,ouvrages,avaient,firstfruits,sinne,daran,untying,slights,throbs,whitened,genoese,inclosed,couche,dismounting,procede,fattened,planche,vasari,freier,enkel,jupe,heaths,enjoins,terrestre,insuperable,recapitulate,vois,drays,rester,enceinte,starlit,wohnen,inauspicious,prescience,capitaine,magnates,predilections,picketed,knaves,sware,scampered,imposible,academical,krank,ploughman,heilige,mettez,conscientiousness,basilio,morceau,splendide,arabes,cire,acceptation,schlug,novitiate,humoured,idolized,rivulet,seethed,geest,etruria,geboren,senti,allayed,pored,perceval,wagen,antiquary,muscovy,shoemakers,zullen,diggings,legte,emancipate,achter,burghers,ignorantly,ancor,erlaubt,diviner,laisser,bleibt,discoloured,gooseberries,jahres,wolde,quarreling,enterprize,augustan,fruitfulness,slanders,quelli,embalmed,uprightness,stephanus,apposite,milles,slaveholders,kansan,parlez,nimi,arbres,kloster,zulus,limpid,bridled,forecastle,statuesque,polyphemus,knowed,encouragingly,harboured,foole,misschien,dolorous,benefice,unenlightened,sagte,croaked,symbolical,magistracy,alighting,schritte,foretaste,porthos,incoherently,ladylike,iphigenia,pleine,allured,jahrhunderts,lucilla,constitue,sogar,palpably,weder,improbably,expressionless,bowstring,sickens,jolting,soundless,hadde,freest,unspeakably,gestalten,unconquerable,contemplations,foretells,empor,pasteboard,mangy,artaxerxes,misapprehension,perche,reverential,sledges,schoolmate,utiles,denke,befinden,infallibly,unbidden,callousness,bloss,tooke,prefatory,herakles,extirpation,pantaloons,noiselessly,adventuress,fluch,commodious,pincers,freshened,artificer,animo,entangling,quarrelling,blackening,appeareth,partakes,regaled,disputants,freundlich,junks,ingenuous,floundered,entrer,jeered,strabo,assignation,kleider,mismos,sheeted,beefsteak,undervalue,pensar,reden,particuliers,oratorical,sacerdotal,baying,dikke,dieren,fief,poate,repents,cleverer,scheiden,recommandation,nimmer,goaded,ecke,mislaid,rotund,zenobia,pickaxe,babbled,gentlest,sibi,besiege,blandly,hobbling,myn,miletus,scythians,mainspring,dinge,slake,drame,dirent,jedem,speared,attaque,galleons,sensorial,legation,strutted,leafless,deigned,slaver,iseult,recommence,giue,aventures,hellespont,anciennes,dalliance,youthfulness,privations,trouvez,monstrosities,assai,goest,bonbons,chroniclers,vitam,erregt,dignities,livings,ferryman,mockingly,caisses,devolves,perder,chemins,hoeing,debauched,doute,parlons,loquacious,vore,saada,annat,displeasing,intrusted,prudish,pelting,drizzling,soothingly,wayfarers,englanders,flouted,worthies,courtesans,heavenward,theodoric,meget,charmian,bezit,ustedes,exhilarated,ansicht,clanking,repugnance,joyless,execrable,lucrezia,loftier,stolid,unacquainted,simonides,pawing,balcon,visigoths,titter,otranto,defraying,mondes,charlot,deified,grecians,princeps,sumptuously,unemotional,coarseness,universel,enormes,piedi,flamme,selber,flitted,toen,gants,disproportion,counterpane,gulfs,gewalt,surnamed,logique,deare,venerate,tomahawks,scoffs,unsavoury,zephyrs,exemplification,waarom,pleader,lieben,bawl,casque,cleverest,convolutions,siendo,verloren,foretelling,munched,vrienden,receiveth,jene,ostler,waddling,pencilled,escalier,drachm,colline,plebeian,eintritt,ionians,bekannt,grammarians,pflanzen,undefiled,furred,segun,overhearing,puissant,donnez,blundered,meines,congealed,pierres,pouvoirs,maister,yit,blasphemies,covenanted,disparagement,anstatt,minut,teint,sachen,pretences,unimpeachable,meditates,cheerily,faintness,effaced,meself,beguile,revenus,dagar,rearguard,saide,inextricable,rameses,popery,trustful,lewdness,sanat,satiate,sorge,stupefied,treu,caire,brasses,lethe,secondes,tepee,euphemia,joue,measureless,scandalized,jerkin,stunde,aforetime,reflectively,trackless,patroness,impossibilities,inconsolable,shouldest,explicable,plucks,wreathed,criminel,alexius,marksmen,enthusiasms,slaven,standeth,geven,lesbia,quellen,worte,drave,blowed,vare,canting,propitiation,sinewy,gamekeeper,dulcie,agir,maakt,uproarious,gebruikt,penitential,glinting,seeketh,condescend,terrifies,humbler,expence,cavaliere,pettiness,slackened,heur,hija,predominating,auftrag,endureth,unapproachable,boons,vouchsafed,lunga,gamle,philibert,cordiality,billow,relativement,inconstant,effete,storehouses,carcases,crestfallen,iemand,gloomily,pouted,lunching,wakened,eerst,sidled,tartars,ebbed,steckte,issachar,astir,reasserted,trente,hardi,reeked,dispirited,insidiously,divined,revelling,mazzini,befahl,lovelier,odium,fettered,hustings,rasping,besotted,charioteer,papered,primum,clamber,adroitly,ferne,descente,holte,alders,tache,unformed,ducats,watchfulness,gottes,kleines,steamships,hvad,cime,sundered,irretrievable,roguish,tenir,maand,ovat,rapacity,sicken,elopement,ardente,worke,folles,besuch,rummaged,peons,incontestable,languor,israels,frivolities,mantilla,instante,slovenly,ambled,celebre,clementina,necesidad,hesitations,protagoras,curtained,purloined,lounged,rustics,purposeless,visites,skirmishers,flinching,certaine,trumpeters,disbelieved,anderes,tableland,plaatsen,infini,revile,unselfishness,burrowed,prussians,buttercups,footfall,cocoanut,cajoled,sublimely,tribunes,kraal,meilen,whizzed,dritte,multitudinous,javelins,grenzen,beatific,bigness,artificiality,jeering,maltreated,chaperon,consorts,stimmen,priester,muckle,vergeten,causer,respecter,bornes,propter,churlish,treasonable,stowing,twinkled,schal,existenz,swindled,vasta,ridicules,deres,wechsel,gracchus,undine,timorous,soeur,rende,ensnare,spurted,quarrelled,beggarly,mutineers,schwert,inseln,monter,keiner,fascinations,suum,unhesitatingly,vivere,prieur,treacherously,repas,fyra,disengaging,propres,moping,obviated,roue,kracht,merveilles,fuerzas,lunettes,pirandello,blare,historiques,comest,sullenly,kurze,oppressions,steadier,miedo,trebled,demurred,conciliate,contenant,ransomed,donnant,bedchamber,chevaliers,aufs,calme,roughs,drawled,niets,ruhe,florins,einheit,sechs,tagus,lydian,pointes,ehren,remis,vele,imputing,endowing,spangles,peterkin,armer,simplement,brillante,servia,disunion,shepherdess,sextus,linge,lucht,rueful,sterk,unbending,ideen,anderer,beispiele,equinoctial,constante,varuna,jugement,inheritor,ginevra,tarried,remorseless,disputations,querido,apennines,gesehen,wirkung,redoubtable,interessant,antechamber,seasonable,clarisse,moche,platina,anden,viande,ravish,dubiously,battlement,gamester,byword,warded,stygian,referable,rigueur,jangling,parfois,doleful,baize,debasement,besieging,shrewdness,interstices,mayst,parried,demanda,principios,elbowed,zahlung,landschaft,furze,neighbourly,nahe,haast,sensitiveness,gelesen,gascony,pawned,outen,mendicant,exigences,keepeth,beginnen,vindt,giddiness,gebruiken,warders,senat,retributive,pyrrhus,vont,flagon,traduit,innere,geste,barefooted,chattered,overhung,demoralization,pebbly,stellan,abashed,samme,aurelian,sacristy,charitably,joka,boutons,folle,brooded,sylvanus,guter,dandies,oracular,undefended,lecteurs,kleid,hizo,humorists,unities,papiers,rakish,effervescence,enthalten,unworthiness,isaias,moraines,dorrit,unflagging,wur,corroborative,komme,ruffling,voet,hardihood,bougie,calleth,greenness,recrimination,basked,embarrassments,aureole,disgusts,nombreuses,tiden,sledging,igitur,footmen,recoils,quadrupeds,tahi,bewailed,morceaux,roughened,gewoon,thinketh,thoughtlessly,depute,besteht,returne,savours,edes,bulwarks,clods,maoris,mantled,encouragements,unfaithfulness,fenian,boten,eateth,bedraggled,chiffres,readier,ineradicable,floes,steadying,cowered,monseigneur,grotte,verschillende,pluie,dispassionately,mirar,holen,slacken,disgorge,warre,avantages,clamouring,attainder,followeth,communing,mischievously,communistic,jongens,thys,zweiten,chastising,mouvements,derisively,lopped,spoliation,pleasantness,meilleure,montrer,phosphorescence,daba,lustily,avantage,antediluvian,irreligious,vindicating,objeto,ascetics,creuse,scorns,laggard,vues,jadis,blockheads,saddening,llena,malcontents,gentes,nane,satins,danser,unmindful,indescribably,unruffled,inclining,aquellos,drapeaux,animosities,inured,pardoning,weshalb,somit,conoce,giorgione,enfranchisement,rebuking,perceptibly,cierto,vitiated,wizened,wintered,comique,sympathizing,beziehungen,townsman,continuer,gorged,mildness,luckless,maecenas,caracteres,gunwale,indigestible,jowl,prinzessin,unclosed,warten,causas,inclosure,voluptuousness,solide,paroxysm,merchandize,construire,meester,whetted,seraglio,scourges,corroding,lejos,leadeth,soupe,jongen,guiltily,teaspoonfuls,acquainting,parapets,twittering,augurs,admiringly,illumine,selten,awfulness,encamp,henceforward,scalped,huddling,erfolg,combated,evinces,gewinnen,deputed,clambering,surplice,factitious,fitfully,vrede,ascanio,perishes,oncle,laisse,blanches,vieilles,skulking,demur,monstrously,imposts,diaphanous,theodosia,wagged,aske,vilka,peradventure,surmounting,satyrs,grandsire,evasions,lumbered,cortege,rapidement,countenances,beholds,contradistinction,scampering,easie,tourna,sainted,inglorious,contrario,whereat,discuter,defrayed,kirchen,kaum,trouverez,repudiating,insupportable,undisguised,discerns,tantum,juden,deaden,victime,unalloyed,venial,widger,griselda,hansom,nonchalance,frapper,regarde,amoureux,cypresses,phrygian,lamed,workingman,scoffing,hulks,sauvages,breede,ruminating,honorius,abjured,jacobin,communiquer,nere,insincerity,persecutor,dichter,cloches,crevasses,singen,burgher,ferner,unstained,unflinchingly,subsisted,notaire,tamen,entro,songer,surprized,rehoboam,fromme,deputations,ringlets,retourne,scourged,survivals,mollify,commonwealths,blockading,shakspeare,triumphing,ecstasies,rends,nahm,bilden,bedclothes,impertinence,commissaries,languidly,sedulously,venne,grimaces,neger,loftiest,decembre,recommenced,stuhl,pochi,depopulated,upraised,formen,whereunto,fuit,vorst,unfruitful,conceits,shrivelled,geschenk,jesting,begriff,erfahrung,tendril,quoque,dayes,entendu,ercole,indes,beareth,sleighs,pensiero,licentiousness,uren,unshaken,englishwoman,limply,hereward,ahasuerus,pythian,compassed,hablando,unsettle,proconsul,coarsest,jenseits,woord,gentility,assizes,devons,serue,quadruped,honourably,insbesondere,chivalric,helgi,womankind,streng,penknife,copyist,eadem,entwickelt,solemnized,palpitation,haughtily,valentinian,kindreds,counterfeited,sweetmeats,tousled,unfastened,venire,courser,flaunted,canopied,dethrone,vouchsafe,hereabouts,blackguard,unitarianism,gegenwart,garrulous,eftersom,controverted,serviette,venga,amiably,schreibt,sowohl,nappe,fulsome,terribles,gauzy,verie,cornes,noires,echter,mangel,marcher,beetje,vostra,patrie,lvii,dilatory,unco,jagd,debase,hoher,alltid,wollten,distil,cinna,splendours,fronte,abreve,clinking,apposition,maddened,vaster,florentin,slouched,remonter,aguinaldo,sorrowing,revenir,hohenzollern,neere,devient,moeder,exultant,pilfering,trousseau,frisson,kaikki,unconquered,farces,connu,perjured,seeke,eloped,corpuscles,obscurely,dreamless,dadurch,lamely,curdled,haie,schoon,wonted,gallants,dasein,respectably,fixity,zehn,yelping,vaine,croesus,obdurate,ofte,tuuli,absolue,christabel,ransack,belisarius,schlag,taler,piously,quaintly,rationalistic,usque,partis,seras,schritt,disinclination,eingang,aloofness,arminius,dilating,parthia,felucca,premisses,glibly,putrefaction,unfortunates,pottage,ligger,tubercles,herzlich,manservant,unluckily,plumped,disinherited,resounds,crut,anciently,tiens,remaineth,ratione,begetting,gurgled,scheint,hopefulness,poil,voiles,hez,citer,dehors,vindictiveness,potest,lolling,aboue,extorting,adventured,elkaar,clattered,pouvant,oure,unsteadily,sufferance,muu,charmant,mede,raptures,dinna,barrenness,placidly,bawled,enkele,protoplasm,dyspeptic,gaue,diffident,affianced,communs,zeker,guileless,ebbe,wery,opprobrium,geheime,imputations,marchioness,pferd,capriciously,ganske,superintend,bantering,indorsement,perspiring,dissensions,baseness,blotched,implores,gewesen,digne,hillocks,jalousie,straat,nogle,solche,fretful,geheimnis,dresse,inquisitorial,circumspection,unsullied,spirituous,garrisoned,supercilious,soldiery,skirmishing,profaned,ordinaire,prochain,ebullition,avowedly,notwendig,remoter,reflexions,clamorous,sois,scullery,seemeth,etait,blasphemed,disconsolate,einde,antiquaries,quibus,whimsically,spinsters,hohen,fahren,exactions,cupful,lugger,bestimmt,patricians,atoned,tourbillon,causeth,unpromising,geluid,caissons,surcharged,stoff,quarreled,suckled,soort,pulpy,militaires,partaker,pigmy,censures,morir,digged,fust,confessors,kleur,braut,lacerated,promptings,vouched,obligingly,puo,yerself,jael,tragen,spinifex,unexpressed,lunched,scourging,haroun,manfully,vidare,revolutionist,kennt,tracery,ebers,surmises,torno,bedingungen,falle,seemly,catched,saura,habet,preso,naughtiness,derecha,fastidiousness,demoniac,penury,wainscot,supernal,impelling,cellule,einzelnen,modeste,flits,vacillating,jocular,galop,jacobins,forsyte,fathomless,chiding,savoured,algun,marvelling,plentifully,wakeful,conter,dicen,homelike,swooned,unsociable,puisque,allgemeinen,fatta,drear,erreurs,buffoonery,rashness,pensamiento,impels,dissembling,consistence,intimating,dieth,missis,appeler,possa,aemilius,slunk,deswegen,coadjutor,footfalls,lombards,jego,jewess,endued,sorrowfully,iniquitous,tramped,ecclesiastic,agriculturist,hanc,hildegarde,waylaid,blustering,blauwe,uniforme,granaries,ombres,dolch,estaban,deras,dishonourable,bespeaks,smilingly,avow,whar,certa,assize,ducat,suuri,schrijven,nachdem,hundredfold,poing,knickerbockers,hechos,fiers,betook,caressingly,hooted,gjort,instanced,shet,corpulent,jacobites,stumm,veldt,springen,moros,tierras,mystification,eorum,recoiling,pshaw,erscheint,ruban,apoplectic,lingvo,basest,fitly,marchands,flirtations,conocido,unctuous,enlivening,sentir,mauvaise,beaumarchais,plaints,entfernung,startles,colonnades,theatricals,hoogte,intimacies,remonstrated,leichter,braying,nuages,lassitude,leibnitz,moonless,changeless,sagely,unfavourably,valorous,endurable,leid,prolix,trespassed,shews,longtemps,sidelong,principalement,clamored,einigen,scheldt,perte,idiosyncrasy,clucking,glaube,cualquiera,donjon,messieurs,goutte,workingmen,paleness,festen,alack,trivialities,tristesse,discourteous,dimness,besetting,daunt,boue,vorm,indisposed,rente,drog,strategical,thermopylae,ivanovna,landet,skola,amidships,meete,garder,buiten,beeves,nemen,alwayes,looke,preternatural,versuch,conduce,sien,centimes,feare,retourner,neder,earldom,indubitable,juifs,handsomest,decorous,chagrined,gemeinde,imbecility,ouverte,goud,buffeting,doorkeeper,absolument,schwarzenberg,bushrangers,bounteous,steine,lulling,toucher,steeled,patronised,whisperings,detests,haughtiness,ilka,defiling,frenchwoman,betide,estime,emolument,rivalled,prithee,wisse,expedients,beautified,precipices,llevar,walketh,mutta,diffidence,tablespoonful,meum,bestowal,tingled,hangen,conduire,unrelieved,morgon,ariosto,swindling,saragossa,gladiatorial,parthians,parer,reichen,bacchanal,perplexities,ablutions,arten,innan,vallen,tulla,unkindly,lovest,stratagems,carousing,envies,condescended,freighted,gange,compagnies,slackening,pardner,wondrously,dingen,teilen,shimmered,tror,anteroom,agriculturists,marins,slechts,watermen,citoyens,sorti,megara,mayenne,beardless,cheerless,tenido,goot,tuch,wacht,moistening,unprejudiced,explications,dissimulation,restes,pined,inculcating,combien,pensando,oorlog,plaits,fleuve,agrippina,neen,erit,satt,budded,liest,plaintively,devenu,threateningly,profligacy,gwendolen,subtil,meshach,videre,armie,hoffe,hungered,pecho,bluntness,kuin,lebe,gesticulating,pourraient,athwart,hermana,shambling,tenderest,ordains,propound,immoderate,acuteness,hewed,kindnesses,douze,unaccountably,neun,plainest,boire,sech,pesar,gavest,subtlest,racines,partaken,gruffly,etes,welkin,breviary,lineaments,unburied,insatiate,intolerably,discomfiture,puso,mirando,threepence,ebenfalls,libanus,unmercifully,milord,behandlung,velours,tochter,itse,noces,lampes,chary,quas,danach,wouldest,primroses,manumission,mortifying,gondoliers,krijgen,ministres,garbed,adelheid,memnon,nuo,desperadoes,nuage,sesterces,coucher,freunden,civilize,phial,faute,arrant,offrir,appealingly,multe,declamation,miscarry,complacently,unmerited,insubordinate,feux,assuaged,dukedom,efface,dazzlingly,peintre,looketh,whalebone,minutest,ungovernable,wellnigh,meuble,ziet,wittily,schmerz,foolery,exulting,habitant,craned,ennobled,profundo,arbeid,apuleius,pourtant,wantonness,scenting,beziehung,fik,flinty,comanches,ordnung,ceremoniously,gloire,wobei,hollowness,zeggen,jardinier,serai,plw,desierto,fancying,protuberance,largeur,divin,portait,tersely,deploring,sallies,frontiersmen,contraries,armful,envers,extricated,dissemble,bouteille,impost,countenanced,essayed,findeth,gesagt,zustand,pandavas,vaguest,fenetre,passen,feebleness,plodded,lesquels,excellente,gik,nieder,brise,facilement,inflaming,prete,augury,diabolus,revelled,mayhap,humbles,poetes,metier,personnages,demoiselle,unhampered,matelas,puisse,indissoluble,netta,nicety,tablespoonfuls,witticisms,enfeebled,surveiller,revolutionists,cozen,middel,penitents,imprudence,tiptoed,reicher,magyars,civilities,trussed,dulcet,sirrah,rapporter,festal,couteau,baronne,heartrending,devotedly,plancher,amies,steeps,salubrious,spearmen,houden,marriageable,imposture,mutinous,jabbering,tyrian,pourra,peremptorily,whirlwinds,despoiled,lugubrious,ringleaders,begriffe,listlessly,affronted,debout,probablement,daintily,pikemen,deinem,partager,exaction,unlighted,washstand,overspread,losse,piteously,politischen,tager,largess,weightier,plenipotentiaries,muka,insensibly,snart,contento,parchments,uusi,scotchman,repousse,ingratiating,bairn,poisoner,prodigiously,unerringly,qualm,aquel,marseillaise,uncharitable,bestimmung,shiftless,visages,subjoined,pierrette,befindet,daubed,ostentatiously,unvarying,choisi,whereto,cottagers,voluble,ingratiate,helpmate,ligt,soldats,gloaming,adamantine,weinig,kansa,rudest,forcer,einfluss,brunnen,oreilles,varit,braucht,gutes,irresolute,mogen,aarde,smartness,burthen,attente,bekend,lleva,unsparing,bewegung,paard,alcide,espied,effrontery,vacuity,pillared,queerest,impolitic,defiles,byles,indubitably,mottoes,molti,questioningly,generalship,debasing,victimes,demurely,talar,donker,peuples,humains,comun,prettiness,usurpations,plebeians,habia,meurs,philosophique,sloops,regierung,savez,gesang,gick,saturnine,trinken,hungering,unreasoning,morto,thoughtlessness,pobres,rasped,celestials,florrie,turneth,childishness,glauben,revenged,radiantly,gefahr,prohibitory,destine,forestalled,converses,commonplaces,waggons,interet,duenna,outwitted,summat,bespeak,pocos,waarde,wheresoever,compromis,wyth,obwohl,partei,meddlesome,bustled,neckerchief,brahmanas,misgiving,farthings,gebiet,disfigure,rancorous,forsakes,torpid,doctrina,atem,canne,intendant,bereit,fiere,swiftest,confidants,unwonted,astonishes,joues,recondite,sightless,blunderbuss,besondere,chiselled,unconsidered,hottentot,tarda,fausta,beholders,quelles,vertes,invitingly,gloated,wearying,straitened,disdainfully,romish,servitor,ingrate,unvisited,officier,bairns,bedeutet,sorgen,autrement,quinze,entreating,longues,voisine,insensibility,washerwoman,ufer,caldron,offert,summum,reiche,irreproachable,quels,penser,sentimentalist,tenia,avea,sublimate,mitad,deutlich,encima,bowsprit,antrag,childishly,envying,austerities,largeness,hemlocks,chiffre,sadden,passionless,haunch,signifie,thronging,plainness,wolfish,breakfasted,quidem,semblant,ressort,intrepidity,pferde,affectations,filthiness,rayons,sommeil,hateth,spitze,fomented,opfer,dietro,iesus,conjuncture,vivante,docility,moravians,wretchedly,preciso,nosegay,fidgeted,trooped,deadened,brimful,antwoord,mistrusted,florentines,circonstances,bedarf,commencer,fevrier,vyasa,assailing,unseasonable,blod,minstrelsy,voies,paunch,sobriquet,horatius,serapis,soeurs,chaffing,wahr,unlettered,prowled,uninviting,buttoning,agesilaus,entender,jaunes,tragical,charakter,vesture,spricht,richtung,salver,milliers,profoundest,reproachful,petulance,grovelling,companionable,kindliness,convulsively,laudanum,residuum,tombeau,servility,strew,dites,unendurable,ennen,cassock,khasi,aufgabe,excommunicate,erwarten,zaal,arabesques,avowal,interposing,retirer,pathless,revers,juist,trooping,rencontrer,marteau,stanch,perspicacity,pawed,swains,hinzu,undulation,versuchen,highroad,wesen,gondolier,douleurs,ascendency,sammen,hasted,sehnsucht,stupefying,pealed,stets,citoyen,requite,larges,omnibuses,windless,hinc,sanguinary,mohammedans,tyburn,souhaite,firmest,neus,dumbly,allemands,inquisitiveness,fourni,erkennen,bethought,debajo,lebt,slipshod,rundt,produire,heeds,tevens,doted,overmuch,chastening,waxen,cadaverous,stroom,spielt,croire,contriving,waddled,circassian,especie,whin,greediness,preferment,geschreven,ziele,remounted,ontvangen,strewed,artifices,assenting,anaxagoras,unge,cousine,presentiment,sturdily,falleth,quitte,censorious,ouvre,mekka,noontide,ewigkeit,tausend,pranced,augenblick,pudo,glowering,suppliants,heare,personnelle,gezien,schemed,disentangled,qualite,husbandmen,fruitlessly,guerrier,huntsmen,photoplay,dritten,duchies,cuirass,flotte,hireling,overweening,joies,abruptness,sieh,moed,warred,nourriture,niver,conducteur,regicide,dedans,roved,remplacer,ajoute,auquel,siller,touchingly,hisself,bliver,industriously,confusedly,eying,befit,edified,profondeur,portier,malignity,revient,sibylla,karakter,becometh,poort,halloo,pasturage,loisir,puits,voort,soixante,voglia,pandu,geval,pouvait,smarted,paroxysms,coquin,mirthful,vergangenheit,coeval,pharao,ceinture,galvanometer,finna,graceless,slinking,enlever,brocades,ennobling,prevenir,harten,pleasanter,hindoo,falseness,drap,betimes,natuurlijk,procurer,malefactors,lysias,handmaids,gefallen,gaar,straten,dommage,bewail,rhenish,twitter,erano,schar,irreverently,misjudge,revengeful,interdicted,suppliant,monotonously,benignly,certes,averil,sauntering,zusammenhang,gebracht,inexpedient,confiscations,heartiest,untutored,forbears,exulted,uninfluenced,gallies,omne,taches,tourner,marcius,pealing,campagnes,quoniam,leathern,ecclesiastics,interceded,nimmt,intelligibly,craftily,chaplets,abends,englischen,bestaat,makest,nerved,braccio,philosophe,couvert,musketry,caribs,enfranchised,maer,casements,eatable,dets,meanly,profonde,theyr,aspecto,disinterestedness,soumettre,plebe,nier,jeta,blaspheming,benutzt,pantheistic,slumbered,hostler,fous,quartette,hoed,stettin,brusquely,rankled,nonconformists,intonations,scandalously,sirup,exercer,reproachfully,pauvre,rivalling,obtenu,eeuw,howat,existencia,delusive,sepulchral,sarebbe,fuor,pareil,remplir,fourscore,teacheth,guld,droned,balles,traiter,rapporte,wellen,abler,wallowed,recompensed,quil,chamberlains,disgracefully,brung,manches,quei,atteindre,asuras,lamentably,achaean,loups,lowliest,braggart,somersetshire,indisposition,mithridates,reconnu,nutriment,unkindness,tranquille,froh,gardes,talo,rascally,gardien,sanoi,strumpet,zigzags,discoursed,erreicht,haare,accost,manoeuvred,libels,blighting,vileness,blessures,soldados,abase,outcries,stampeded,bithynia,cupidity,soundest,consentement,risings,fervid,truculent,illimitable,gayly,forbearing,kvar,despatching,potentates,putteth,impetuosity,jutted,encomium,decke,behoves,querulous,mener,manchus,pemmican,discomfited,dienen,sidste,steden,mollified,sulphurous,entierement,parterre,subtile,ziemlich,quon,enfolded,gedacht,belongeth,parian,emot,nowise,vaan,verdient,detestation,theophrastus,indiens,sallied,infinitude,unchristian,nachbar,hubo,quaff,scuffling,commotions,belang,numidia,craning,indistinctly,aldrig,zes,houdt,chiefest,casuistry,siis,manchmal,purposing,justness,hundert,simpering,soothsayers,charwoman,mittag,facere,aquella,chasseurs,countersign,frem,cambric,thron,spluttered,leetle,quos,glinted,facon,coupable,lowliness,lesquelles,turc,trundled,desolated,kindles,shineth,woning,falchion,asperity,pousse,dran,secretaire,effulgence,banisters,extricating,valt,hesitatingly,affray,pensively,meretricious,promiscuously,overset,chuse,ruido,undefinable,scorning,multa,lacedaemonians,aristoteles,friede,censers,aufgenommen,tandis,talke,trifled,intelligente,delightedly,chimerical,kanske,importunate,disgraces,zeg,agitations,piratical,indigence,acquirement,mutely,billowy,querelle,suzerainty,imperturbable,milliners,pensa,fecit,gleiche,vacillation,innocente,toilers,snored,heathenism,rancour,apercu,facetiously,riband,pecado,slaine,vaut,disdains,gedaan,hvem,amain,cavil,kohta,huskily,unwarrantable,glowered,curates,anent,wenigen,konnten,worthier,vooral,leered,palmy,religieux,truncheon,hovels,milliards,unlovely,abjure,plenteous,piedmontese,debauch,holocausts,imperatively,philadelphus,darky,ravening,kentuckian,methought,fagot,foulest,rills,gaven,treize,leise,dragoman,micht,affrighted,unsocial,loger,dejectedly,tamely,reposing,ausdruck,phlegmatic,mightest,dispossess,cataloguers,gibe,drily,languorous,paire,tode,foulness,zelfs,calumnies,scythes,shirked,disapprobation,propitiate,hilft,usurpers,lagen,estis,inspirer,gainsay,ambrosial,atteinte,intanto,conciencia,provender,schulter,navire,matronly,andern,sourire,ungracious,overawed,mukaan,relenting,bijna,angesehen,coude,dickon,vapeur,maintenir,sluices,geweest,erziehung,zitten,importe,raisonnable,canot,grundlage,hessians,undreamed,equable,oppressively,chacune,zaak,pourront,indorsed,kasteel,indulgently,takaisin,superfluity,pantalon,gossiped,generalissimo,coquettish,zegt,konung,accepter,expiate,commiseration,voudrais,counterpoise,sawest,inquiringly,betes,romanism,northmen,folgt,cuya,schicksal,travaille,thae,leitung,unfeigned,impalpable,murmurings,conjointly,excitements,zambesi,vilken,comeliness,verra,hambre,indiquer,grossness,cuivre,noget,countrey,carefulness,blijft,douceur,vaporous,oarsmen,seigneurs,toilsome,proprieties,listlessness,waarin,pities,tredje,mortify,gipsies,neapel,unhallowed,injudicious,gesetze,remonstrances,uninterruptedly,revanche,suam,ither,unmanly,mazy,forebodings,fickleness,tuvo,gelukkig,geschlecht,unsheathed,freilich,heiligen,palest,impulsion,empirische,vano,sitten,illis,votaries,factious,braw,verdadero,shabbily,hollande,camarades,slighter,yere,homewards,trous,achten,rapine,materie,snuffing,schwarzen,sterben,bezig,abnegation,yeare,vostre,kerl,widerstand,betrachten,erinnern,betake,arbeiter,klaar,outspread,thim,sendeth,winde,lichaam,zetten,whirr,alarum,doigt,daarom,liten,declara,gebrauch,jambe,paie,unmerciful,apporter,demoiselles,reprobation,lache,burgomaster,camest,sonder,extravagances,esset,fellah,verlassen,gewinn,wakening,vacantly,discoursing,cablegram,tourne,attendre,schlechte,lauf,injuriously,spluttering,felsen,gloried,argives,paarden,japhet,cabane,hende,zacht,promontories,mignonette,supplicate,joindre,freundschaft,pattering,unromantic,sophistical,frescoed,sauver,nobleness,sealskin,bewilder,gwine,zeven,consulship,aminta,brauchen,fuite,unclouded,affability,affright,recantation,threshed,malen,gladdened,weisen,fausse,ruses,expostulation,faisait,heraus,paille,delawares,devait,tirer,reines,galled,esel,verres,atteint,slaveholder,fuisse,meddled,soldaten,protestation,cambyses,enmities,becalmed,genou,verbunden,hver,muut,leprous,lambent,wolken,sacristan,lavishing,wending,disquieted,solchen,benedictions,niggardly,herte,teki,ankunft,solides,gesetzt,dangereux,evincing,vraie,fauteuil,naturels,eue,buckboard,noisome,veinte,malades,impassible,oblations,worten,intoxicate,prenant,graue,entweder,exasperate,curtsey,bestimmten,exclusivement,babyhood,sojourned,censuring,disrespectfully,mesmeric,apprehensively,roofless,despoil,direst,razones,inroad,terminer,vainglorious,wenige,benevolently,archbishopric,hatchway,eigenschaft,pinnace,slighting,vorher,falsch,maintien,ellinor,sepulchres,extirpate,adrianople,imposer,schlimmer,wies,imperiously,kuu,rhetorician,totta,portefeuille,unconcern,toucheth,requited,geburt,suffit,peloponnesus,postern,irremediable,hamilcar,quavering,unperceived,leonine,botte,wonderingly,haversack,liet,ennemi,handen,dawdling,spiritless,thorwald,rejoindre,inutile,signally,loitered,benefices,hewing,abysses,beginnt,mouldering,schmerzen,everlastingly,descried,aquellas,vosotros,miten,froward,elend,audaciously,indelicate,einrichtung,umfang,chinamen,prostrating,ceremonious,slaveholding,unworldly,ideality,fece,fathomed,boord,waan,plafond,erzeugt,gekommen,tranquilly,delectation,honoria,couldst,prattling,suivent,terram,prate,submissively,whithersoever,parcourir,assise,soutenir,girdled,abased,versucht,niemals,antient,semblables,despairingly,alguno,munificence,throwed,gervaise,habitude,impetuously,providentially,veulent,coom,harangued,provincias,wahren,glorying,cockade,unfrequently,inconstancy,betrifft,ninguno,doun,gratifications,impenitent,gayety,arriver,sagesse,kwam,foule,turm,bildet,blijven,sternness,vede,lames,gunst,complot,knapsacks,engross,tristes,appelle,gracefulness,communed,calmest,glutted,largement,dallying,witticism,fatted,blauen,hottentots,penances,brengen,glimmered,bretons,servitors,refus,fehlt,cxar,ewig,airily,gegeven,schluss,maudit,autoridad,kinsfolk,erinnerung,essayer,distrusting,tartary,genoeg,fremde,droops,blandishments,individus,remonstrate,improvident,handsomer,blazoned,vatten,plainte,damps,machten,bonhomie,adverted,soweit,sacerdote,productiveness,gestes,druse,quaver,trouw,ausgang,versuche,wrapt,draweth,prit,tampoco,versification,sojourning,acclamations,aimez,unfaltering,loftiness,emendation,behandelt,clownish,criado,tellement,fordi,remettre,redound,auront,objektive,moodily,discords,outworn,honeycombed,gedanke,venant,anspruch,drauf,trouvent,allers,superannuated,schauen,viands,amiability,kaisers,victualling,religieuse,wirklichkeit,envoie,dicha,strenge,unwearied,punctilious,turne,entscheidung,egotist,jouissance,falsche,schier,ursprung,importunity,distractedly,zele,vexations,seraient,piastres,boche,bewitch,allures,frisking,rottenness,rufen,sentimentalism,clanged,jupes,rechter,privily,ungenerous,asketh,eigenlijk,absented,euboea,fiefs,honom,sympathised,upbraided,thermidor,ignominiously,mischiefs,appertain,joko,perd,enviously,wahrscheinlich,joyed,gegner,einfache,bhishma,clairement,eate,maddest,adresser,cabalistic,conventionality,italiens,aliquid,lidt,whiffs,lleno,manufactories,twelvemonth,undimmed,gjorde,heah,parvenir,faithlessness,vilain,contrives,wistfulness,genannt,geleden,munificent,fortement,glaive,maggior,convoked,veste,malefactor,gelangen,dotage,palliate,oxus,pedants,quaked,malade,affronts,explique,reproaching,excellences,venturesome,roues,severer,fremd,fusillade,muita,feareth,endroits,maanden,bareheaded,girding,anzi,taire,kopje,illud,ilman,maxence,wrings,ferma,hummocks,detraction,dicht,perdre,charbon,foure,subserve,cherubims,toilettes,liebhaber,lenity,songe,respecte,sabots,podia,insolently,blik,dimpling,quiconque,ehre,littleness,homines,gammal,highnesses,awaked,upbraid,unsubstantial,muren,dezelfde,proselyte,authoress,fabel,grandee,pleasantry,setteth,chaldea,pensioned,yeardley,tiefe,considerately,gattung,denkt,poursuite,teuton,pestilent,sofern,bountifully,desisted,senecas,jollity,enrica,inexpressibly,sunshiny,dicitur,handeln,begint,oeufs,amanuensis,dreariness,animi,comprenant,smites,schlacht,schauspieler,bezeichnet,orisons,reposes,vart,hauses,geduld,fieri,mischance,koska,hospitably,metaphysician,vulgarly,construit,invectives,poitrine,perdus,blive,voulu,pompously,discourtesy,hazarded,curtsy,palpitating,marido,plaisirs,ennoble,dira,unsought,palsied,sartin,panegyric,profanation,unfitted,halfe,drinken,imprecations,virtuously,inconceivably,vouloir,assiduity,entstehen,abschied,asiatics,artificers,ohren,murderess,pouvons,radicle,volontaires,villany,forded,superintended,abominably,zweck,familier,enervating,tumults,philippus,pouces,forswear,astuteness,heiter,liebes,kenntnis,gehn,molte,lediglich,musst,hauberk,domestique,geluk,unspotted,altname,legt,bounden,declaimed,unexampled,todes,tearless,basely,vorstellung,labios,vond,hubiera,speakest,teemed,killeth,preternaturally,genommen,pauvres,negress,seien,haranguing,quaintness,verser,stoical,tyd,aptness,retrouve,mehreren,malediction,givest,discreditable,brilliants,unseeing,connived,connais,mourir,reicht,crabbed,obsequies,perverseness,latticed,pleadingly,besiegers,busying,brazo,cudgels,heisst,paroisse,befehl,machte,soldierly,musste,richten,exhalations,rapturously,forelock,luy,esteems,agonised,hirelings,hoogste,jauntily,erscheinen,declivity,vivants,reviling,sixe,altid,retrouver,ailed,garlanded,abjectly,vernunft,churl,vrijheid,guds,rendue,erden,erant,telegraphing,archly,statesmanlike,souverain,yeares,duft,gezegd,kust,woorden,quelconque,dunghill,declaim,bucklers,stouter,seuls,unpractical,sehe,reverenced,derfor,hominum,voeten,liveried,disfavour,genially,gezeigt,modish,plomb,gennem,prier,vorn,deigns,careering,thenceforward,trug,hasdrubal,kanssa,hempen,miltiades,growed,decrepitude,thinkest,effluvia,ordres,figurer,grimness,repassed,meditatively,sinecure,mettent,stopt,riseth,kanzler,invloed,verlust,figger,underrate,laune,jederzeit,pardonable,vnder,choleric,inclose,bided,beggary,desto,boeotia,pleasantest,deil,gashed,exordium,tocsin,alcun,spitefully,gehalten,tonnerre,abbia,brocaded,forwardness,drawling,testily,gebunden,ruhig,unfasten,tyran,precocity,resistless,wangen,spasmodically,mesdames,resignedly,festoons,aboute,varlet,viennent,threatenings,erkenntnis,prevision,dacht,squaws,cesse,mahomed,plunderers,navires,tremblement,comfortless,incautious,luxuriance,petto,creditably,jolies,impressiveness,cheyennes,finit,needeth,superabundance,precipitately,unceremonious,sidewise,anacreon,lisping,sonna,delante,rideaux,prig,gezicht,parfaite,vituperation,manifeste,cabman,fawned,oever,untaught,juley,einiger,voorkomen,gelijk,forsworn,imperilled,sichtbar,promptitude,indiaman,cantered,allurements,bataillon,lasst,omkring,juicio,noin,distressful,justifier,bestimmungen,verbinden,bestimmte,foremast,bestaan,stimmung,meeste,accorder,thirsted,irruption,professedly,geschwind,groweth,stupefaction,lanterne,larmes,harangues,remorselessly,appartient,naturall,stupide,dexterously,extempore,viscid,abaft,auraient,reproving,ottilie,waer,scandale,turnus,helpen,begonnen,pestilential,schaffen,merchantmen,flammen,atter,ensi,circumlocution,queenly,livest,grandees,devenue,adjure,allant,obstreperous,gnaden,olet,heedlessly,soif,lolled,flatterer,stube,sentimentally,gowned,tutelary,hindmost,furent,faibles,monkish,zouaves,ineffectually,contraste,duidelijk,turbaned,guillotined,conformably,meane,zugleich,disdaining,solcher,ouvrier,zieht,lowness,annoncer,unpleasing,disgracing,disant,begon,heartiness,recompence,petulantly,prinzip,casteth,rhetoricians,sulkily,minuteness,solemnities,vexes,tomando,impecunious,avond,menschlichen,loob,aliis,snaky,confessedly,slecht,wheedle,hushing,gxi,corpore,ungraceful,queerly,schwere,parfaitement,holdeth,straggled,picturesquely,mainmast,disquisition,tiefer,vorgestellt,dulness,pistoles,unexceptionable,finnes,soumission,liebt,maie,centaines,havde,mutinied,terwijl,palanquin,contenir,milesian,poursuivre,lacedaemonian,volgen,respire,gehad,untrammelled,stentorian,flatterers,tomber,cantering,minces,foible,questionings,choeur,kehrt,manacled,haud,thereabout,contenta,soone,hauptstadt,daheim,heedlessness,coquetry,wended,getan,leggen,onkel,barbadoes,wifely,tantas,cuius,rouler,expliquer,mortel,worthiest,pusillanimous,personnage,swaggered,accepte,forbore,gravelled,publikum,opportunely,odoriferous,insensate,showeth,causeless,partem,dennoch,imprudently,drollery,makt,uncongenial,feront,noght,philosophes,sententious,reconnoitre,doigts,eatables,intorno,quiera,sabines,catholiques,housetops,rostro,descry,zouden,dachte,drona,complaisance,tinkled,rappelle,bewailing,entrenchments,llegado,stilte,sternest,vijf,vaches,befitted,preeminently,enervated,profiter,ceremonials,sedately,choisis,trone,gabble,searchingly,somewheres,patriotes,tyrannous,wigwams,paysan,blevet,ooit,suffisamment,monosyllables,sluggard,gelegen,dissembled,verlieren,ieder,impudently,jotka,contrariety,unprovided,prinzen,ruhm,cerveau,inclosing,osaa,supping,anteil,diplomatist,barefaced,plighted,faudrait,unterschied,fermes,verborgen,ofttimes,neemt,steersman,caitiff,thebans,keek,aient,seyn,brumaire,embroil,pennon,athirst,gnashed,neighing,durchaus,glaces,magnanimously,compagnon,anchorite,boisterously,chancing,dagegen,tantos,prenez,momente,sterke,provinz,withall,lippen,donnent,consorted,miry,hollanders,perh,exactement,exacte,entend,gewonnen,moindre,humeur,souple,proserpina,fluss,conclure,dotter,effectivement,feelingly,noised,bondmen,unseres,bashfulness,vaunt,wollt,greatcoat,unmeaning,turcs,untrodden,nerveless,insurrectionary,ruisseau,refuser,quondam,zimmern,raillery,faciles,accordant,mixt,ruft,humide,sensibles,prudente,indissolubly,teils,treten,geschlossen,extenuation,favori,compagnons,merriest,loftily,pourrez,placidity,hicieron,gueule,regne,doffed,herodes,quatorze,tegenwoordig,usurer,voluntad,geniality,twopence,froide,rampe,hearkening,flippancy,breastworks,ruleth,pellucid,couvre,frighted,hearest,evadne,kreise,oublier,idees,irreligion,bruits,waarschijnlijk,prodigality,bessere,vuol,enveloppe,freshet,stoutest,takest,livelong,joyeuse,serez,citadelle,appeare,schaden,sublimes,verfassung,opprobrious,cnut,propitiatory,voyez,acquirements,drearily,grenze,estuvo,violences,hideousness,drawed,bewegen,satte,appartenant,paquets,synes,parecer,mechlin,diciendo,collines,cabals,scherz,disait,atli,superscription,lieue,filched,suffrages,darkies,maitres,swineherd,unworthily,disturber,foresaid,redoubts,boding,ouvriers,benumbed,wenigstens,carouse,habere,composedly,paleis,nilus,eenvoudig,heiresses,schien,pistolet,ambuscade,repine,thinges,geheel,amants,jingled,autrefois,breakfasting,noeud,regardez,zufall,drowsily,religieuses,voisins,verfasser,nogen,engraven,nahrung,gaoler,bancs,waarop,jolis,evasively,draps,weisheit,habitantes,brouillard,resentfully,acquaintanceship,declamatory,elate,juif,halb,geister,quiso,gleicher,supplicating,schlaf,zahlreichen,trembler,wickedest,bekannten,adroitness,bestir,helst,multitud,wachten,auxquels,dropt,schoolmistress,obloquy,profitless,mourant,wijze,saidst,flucht,unconcealed,mettant,coursers,disent,mohammedanism,finir,abstemious,krankheit,cannonade,otti,brume,grandmamma,fahrt,moeite,tediousness,verdadera,ongeveer,horreur,licet,ouvertes,warbled,genomen,vuestra,clamors,complaisant,votary,hesper,flossy,zufrieden,geloof,luxuriantly,loopt,haled,grizel,certainement,duquel,inharmonious,amatory,todavia,hindoos,warme,officiers,meaneth,videtur,knavery,dije,blivit,prennent,harrowed,appris,podido,stod,mussulman,unhesitating,sybarite,montrent,leaue,fulco,irresolution,geschickt,schlagen,proverbially,waywardness,maturer,nennen,treiben,servius,bepaald,daraus,faudra,caresse,bijzonder,benignant,appartiennent,domestiques,trifft,arraign,khoja,cawing,fragt,gilds,bottes,antipathies,afeard,bishoprics,marier,bewegt,teutons,whelps,bestehen,victual,healths,heutigen,kertaa,benignity,whitsuntide,gesund,coxcomb,shrewdest,couverts,hecha,jener,undistinguishable,satrap,haen,stateliness,copses,richesse,poursuit,adown,brokenly,coffre,gilberte,eddying,couvent,hawser,circumstanced,werry,muratori,heartlessness,foully,boors,quailed,esquimaux,peint,helas,broils,contenting,troublous,nulle,kinswoman,puissent,bunten,silencieux,gegend,quaffed,fervency,schuldig,sortes,courbe,bethink,eind,comen,serried,careworn,abstractedly,besitzen,unbent,frolicsome,foudre,overrate,directoire,jambes,betweene,stolidly,gerechtigkeit,throned,feind,gnade,saisir,farine,affably,lendemain,aristocracies,hexameter,volontaire,pracht,cravate,aikana,irgendwo,fanns,parricide,strewing,prosperously,allurement,curtsied,mither,recreant,expiated,bedienen,roula,blott,allait,reihen,tournant,entgegen,bareness,shamefaced,bords,perspicuity,gegenstand,visitant,mulle,organes,kriege,connue,annos,enow,jocund,unutterably,entdeckt,winna,brahmanism,appius,inextinguishable,batavian,remarquable,knaben,betokened,griechischen,braccia,merchantman,habited,betrachtet,sympathising,hvide,rejoicings,draga,entreats,conciliated,foeman,confute,voulait,unexpectedness,indispensably,gevoel,endearments,interj,wheedling,touchant,aliud,coyness,quarante,zuvor,tirant,teilnahme,dirige,mantling,extenuate,interessen,battre,quartiers,bracht,vormen,disinherit,restent,aufenthalt,calomel,ouverts,entsteht,disquietude,naething,enormities,kerchiefs,helft,remercie,beruht,genoux,artillerymen,hoeren,flatteries,unfading,gehabt,dight,jouir,waylay,benefactions,angenommen,pitilessly,pattered,varandra,assister,daies,cacha,moest,uncomplaining,tulee,pillowed,courtes,sayde,saisi,linien,temor,imploringly,unsuspicious,picturesqueness,kende,unresisting,besitzt,yez,tronc,begann,musingly,blieb,protract,connus,disconcert,argive,profond,choler,pinioned,tarrying,hatless,baith,epigrammatic,ilmarinen,usurers,boded,dallied,seekest,couverte,dettes,schoot,messire,vorschlag,semblent,geschehen,seelen,traversa,vassalage,offenen,manasses,zuster,breake,auxquelles,designedly,whensoever,conciliating,frucht,discouragements,gingen,semblable,gegensatz,inundations,gelegenheit,scandalised,cinquante,pudiera,algonquins,comported,bange,fasse,servian,stond,unterschiede,propitiated,hogsheads,contumely,ollut,connaitre,provoquer,herrschaft,erinnert,clamoured,lacedaemon,peines,meint,bourgeoise,nerfs,aiment,begge,possit,nomme,plis,piquancy,unpremeditated,desirest,declaiming,bestimmen,marchesa,dizzily,pauperism,samnites,schlief,livrer,sobald,nettled,allerede,odeur,comprends,peroration,preuves,dahin,verbergen,aandacht,vertreter,daarna,lourd,wilfulness,betrekking,grunde,retenir,esteeming,fallait,ressemble,klage,hauing,prolixity,sonner,subterfuges,stof,zahlreiche,harer,expostulated,barbarities,prudery,bivouacked,fusil,langt,passagers,firesides,vicissitude,salido,allerlei,joyousness,vorsicht,behoved,porticoes,gebirge,tragedian,fastnesses,nebst,waarvan,ruminated,reprend,commonalty,lapset,guerres,indorse,suffisante,curst,flounces,upbraiding,revenging,feebler,venger,miteinander,chaffed,overstrained,consolatory,houre,einzigen,spreken,contemporains,heut,augured,verran,sanscrit,halfpence,cutlasses,cupfuls,tremulously,quavered,puir,governesses,besluit,hetzelfde,veracious,wesentlich,readiest,disconsolately,squally,captaine,demandez,inzwischen,seules,cumbrous,palings,satisfait,geschikt,devoirs,rappeler,croit,orten,habent,didna,demoniacal,voraus,distempers,execration,drest,colonnes,tabooed,retenue,guicciardini,gaed,vuestro,cierta,einfachen,hundra,belike,saltpetre,forborne,cuyas,tardily,satisfaire,dicere,verbrechen,zichzelf,superabundant,vilja,versteht,brengt,scudding,verschieden,destinee,deprecatory,larboard,keinem,manuscrit,shrubberies,volkes,pertinacity,amabel,parme,herrlich,hunc,flurried,avevano,deferentially,souviens,mazarine,infiniment,overborne,rempli,goeden,reinen,engager,jocose,shawnees,vaterland,blessure,restant,maist,ursache,oublie,eminences,obscur,afstand,kepe,cailloux,enemigo,toits,weite,pm,video,info,ebay,dvd,website,photos,forums,yahoo,server,pc,feedback,blog,options,audio,fax,rss,porn,faq,sep,powered,electronics,database,microsoft,url,update,downloads,apr,hosting,videos,tech,linux,jun,listings,sony,google,environmental,pics,sponsored,eur,pdf,usr,homepage,lesbian,logo,airport,phones,cnet,hp,eg,ip,cameras,ratings,paypal,thu,rentals,worldwide,anti,nokia,tx,anal,interface,technologies,gmt,xml,input,sexy,mb,multi,graphics,prev,ads,mini,usb,php,trademarks,phentermine,keywords,msn,programming,isbn,az,updates,desktop,pst,fucking,blogs,evaluation,implementation,angeles,networking,australian,kb,connect,dev,vegas,module,pricing,dvds,documentation,coverage,automotive,developing,milf,ringtones,xbox,www,settings,monitoring,nc,llc,hardcore,provider,techniques,rd,websites,servers,keyword,username,fuck,paperback,classifieds,providers,upgrade,auctions,therapy,samsung,affiliate,admin,designated,integrated,cds,ipod,porno,motorola,strategies,affiliates,multimedia,xp,tits,interactive,developer,sitemap,lab,cvs,gamma,weekend,lcd,dj,parking,ct,hentai,laser,icon,basketball,stats,hawaii,nj,clips,rw,vhs,criteria,pubmed,logged,laptop,checkout,tripadvisor,zoom,anime,spam,bytes,gb,bc,consulting,aa,lingerie,shemale,parameters,jazz,profiles,mom,singles,amounts,usd,mg,pharmacy,constitutes,collectibles,infrastructure,intel,soccer,math,healthcare,preview,devel,rs,voyeur,cisco,certification,bookmark,specials,bbc,avg,panasonic,permalink,viagra,src,faqs,trackback,revised,broadband,pda,dsl,webmaster,dna,diff,sql,specs,ss,yeah,sexo,javascript,gps,acc,euro,encyclopedia,interracial,tn,suppliers,playstation,annotation,gnu,lesbians,aol,modules,backup,personals,kevin,perl,bike,utc,albums,verzeichnis,hosted,developers,kits,variables,agenda,template,investor,wildlife,elementary,sponsors,unlimited,printable,hardcover,setup,booking,ericsson,supplier,bluetooth,tm,upcoming,scores,weblog,nh,alerts,mysql,offline,lifestyle,converter,blowjob,safari,pdt,parameter,adapter,processor,node,hockey,micro,laptops,regulatory,db,ph,epinions,affordable,databases,psp,ds,discounts,boobs,jennifer,demo,lg,gourmet,nfl,avatar,dildo,featuring,misc,calculator,holdem,awareness,spyware,packaging,wallpaper,biggest,alumni,hollywood,wikipedia,diabetes,ml,wow,mapping,indexed,grid,plasma,voip,consultants,implemented,sf,blogger,kg,textbooks,seminar,latina,nasa,sexcam,accessibility,templates,tab,router,concrete,folder,womens,css,upload,milfhunter,mc,metro,toshiba,qty,airline,uniprotkb,beastiality,lp,consultant,researchers,unsubscribe,bio,upskirt,exam,logos,milfs,sustainable,pcs,honda,cinema,ag,blowjobs,deluxe,monitors,sci,edt,pmid,recruitment,siemens,expertise,medline,innovative,tampa,ks,python,tutorial,cruises,moderator,tutorials,collectables,scripts,abc,stereo,operational,airlines,livecam,hobbies,telecommunications,bestiality,biz,voltage,nintendo,vinyl,highlights,designers,ongoing,imaging,blackjack,analyst,reliability,gcc,ringtone,oriented,desktops,semester,cumshot,applies,casinos,filters,nv,notebooks,algorithm,semi,proteins,exp,debian,epson,terrorism,cpu,allocated,anytime,nr,layout,initiatives,lol,mp,optimization,genetic,modem,mph,evaluate,toyota,nationwide,vector,limousines,destinations,pipeline,ethernet,postposted,nba,busty,coordinator,epa,coupons,cialis,bb,ron,modeling,memorabilia,alberta,org,okay,workplace,wallpapers,firefox,eligibility,clinic,involvement,placement,vbulletin,funded,motorcycle,presentations,wiki,radar,citysearch,nsw,pci,guestbook,pizza,rc,bmw,mpeg,shoppers,cst,ceo,twiki,counseling,medication,shareware,dicke,configure,institutional,metabolism,rm,pdas,outcomes,sri,thumbnail,api,acrobat,thermal,config,urw,regardless,wishlist,sms,shit,trailers,syndrome,iraqi,foto,tabs,gm,rt,shopper,nikon,customize,sensor,telecom,indicators,thai,emissions,dd,boost,spanking,supplements,icons,tranny,catering,aud,camcorder,implementing,labs,dynamics,crm,rf,cumshots,bukkake,shorts,td,amp,sm,usc,environments,trembl,blvd,amd,emails,wv,insider,seminars,ns,vitamin,processed,functionality,intermediate,billing,diesel,bs,promotional,chevrolet,compaq,authentication,showtimes,sectors,bandwidth,img,schedules,cached,rpm,florist,webcam,nutten,automated,pee,nipples,tvs,manga,mhz,orientation,analog,packard,payday,deadline,robot,assess,gnome,gadgets,automation,impacts,cl,ieee,corp,personalized,gt,conditioning,teenage,nyc,partnerships,slots,toolbar,basically,genes,firewall,scanner,occupational,hs,integer,treatments,camcorders,basics,rv,struct,genetics,punk,enrollment,interfaces,advertisers,deleted,rica,inkjet,peripherals,brochure,bestsellers,eminem,antenna,bikini,decor,lookup,harvard,podcast,interactions,nike,pissing,plugin,latinas,customized,dealtime,temp,intro,zus,fisting,tramadol,jeans,fonts,quiz,mx,sigma,xhtml,recordings,ext,minimal,polyphonic,outsourcing,adjustable,allocation,michelle,ts,demonstrated,handheld,florists,installing,ncaa,phd,blogging,cycling,messaging,pentium,aka,sampling,refinance,cookie,goto,calendars,compatibility,netscape,rankings,measuring,tcp,dv,israeli,medicare,skiing,hewlett,flickr,priorities,bookstore,timing,parenting,fotos,britney,freeware,fucked,pharmaceutical,workforce,nodes,ghz,targeted,organizational,skype,gamecube,rr,titten,excerpt,halloween,methodology,housewares,resistant,recycling,gbp,coding,slideshow,tracker,hiking,jelsoft,headset,distributor,archived,photoshop,jp,bt,diagnostic,rfc,downloaded,sl,seo,isp,nissan,acoustic,cassette,initially,hb,jpg,tc,sunglasses,planner,stadium,mins,sequences,coupon,ssl,gangbang,opt,flu,mlb,tagged,bikes,gp,submissions,oem,lycos,zdnet,broadcasting,artwork,cosmetic,terrorist,informational,ecommerce,dildos,coordination,connector,brad,combo,activation,mitsubishi,constraints,dimensional,mozilla,toner,latex,anymore,oclc,locator,pantyhose,plc,msg,nylon,palestinian,trim,pixels,hispanic,cv,cb,procurement,espn,untitled,totals,marriott,starring,referral,nhl,optimal,protocols,highlight,reuters,fc,gel,omega,evaluated,assignments,fw,doug,saver,grill,gs,aaa,wanna,macintosh,projector,std,herbal,retailer,vitamins,vid,panties,connectivity,algorithms,bbw,collaborative,fda,turbo,thats,hdtv,asin,spotlight,reset,expansys,connecting,logistics,kodak,danish,scenario,fs,approx,symposium,nn,weekends,screenshots,deviant,adapters,macro,mandatory,syndication,gym,kde,viewer,signup,cams,receptor,piss,autos,deployment,proc,directive,fx,dl,starter,upgrades,tapes,governing,retailers,ls,cbs,spec,realty,instructional,phpbb,permissions,biotechnology,outreach,lopez,upskirts,debug,boob,exclude,peeing,equations,bingo,spatial,respondents,lt,ceramic,scanners,atm,xanax,eq,unavailable,assessments,cms,footwear,beijing,utils,phys,sensitivity,calgary,dialog,wellness,antivirus,previews,pickup,nascar,mega,moms,addiction,chrome,ecology,botswana,nav,cyber,verizon,enhancement,clone,dicks,lambda,baseline,silicon,beatles,soundtrack,lc,cnn,lil,participant,scholarships,recreational,electron,motel,sys,solaris,icq,yamaha,medications,homework,advertiser,encryption,downloadable,scsi,focuses,toxic,dns,thumbnails,pty,ws,bizrate,sox,gamespot,wordpress,vulnerability,accountability,celebrate,zoophilia,univ,scheduling,therapeutic,travesti,relocation,np,competitions,tft,jvc,vibrator,cosmetics,concentrations,vibrators,estonia,dt,cgi,showcase,pixel,focusing,viruses,gc,stickers,leasing,lauren,macromedia,additionally,nano,copyrights,mastercard,updating,kijiji,conjunction,cfr,validation,cholesterol,slovenia,folders,routers,starsmerchant,arthritis,bios,pmc,myspace,theorem,nb,stylus,topless,structured,jeep,mba,reload,distributors,levitra,mono,particles,coordinate,widescreen,squirting,rx,apps,gsm,rebate,meetup,ddr,rec,forecasts,sluts,ciao,ampland,chem,shopzilla,payroll,cookbook,uploaded,americas,connectors,twinks,techno,elvis,latvia,jd,gpl,irc,dm,bangkok,photographers,infections,brisbane,configured,amino,clinics,mls,saddam,threesome,handjob,transexuales,technician,inline,executives,audi,staffing,cognitive,closure,ppc,volt,div,playlist,registrar,jc,cancellation,plugins,sensors,freebsd,acer,prostores,reseller,dist,intake,relevance,tucson,swingers,headers,geek,xnxx,hormone,childrens,thumbzilla,avi,pichunter,thehun,columnists,bdsm,ide,valium,rpg,cordless,pd,prot,trivia,adidas,tgp,retro,livesex,statewide,semiconductor,boolean,diy,interact,olympics,identifier,worldsex,jpeg,startup,suzuki,ati,calculators,abs,slovakia,flip,rna,chrysler,plumbing,nuke,projectors,pharmacies,ln,introducing,nicole,latino,uc,asthma,developmental,zope,regulated,gmbh,buf,ld,webshots,sprint,inputs,genome,documented,paperbacks,keyboards,eco,indie,detector,notifications,msgid,transexual,mainstream,evaluating,subcommittee,suse,mf,motels,msgstr,volleyball,mw,adipex,toolbox,ict,browsers,dp,surfing,creativity,oops,nipple,behavioral,bathrooms,sku,ht,insights,midwest,karaoke,nonprofit,hereby,containers,integrate,mobiles,screenshot,kelkoo,consortium,pts,seafood,rh,rrp,playboy,fg,mazda,roster,symantec,wichita,nasdaq,ooo,hz,timer,highs,ipaq,alignment,masturbating,comm,nhs,aye,visibility,reprints,accessing,midlands,analysts,dx,sk,locale,biol,oc,fujitsu,exams,aj,medicaid,treo,infrared,tex,cia,sublimedirectory,poly,dod,wp,naturals,neo,motivation,lenders,pharmacology,bloggers,powerpoint,surplus,sonic,obituaries,belarus,zoning,guitars,lightweight,tp,jm,dpi,scripting,gis,snapshot,caring,expo,dominant,specifics,itunes,cn,newbie,bali,sponsorship,headphones,volkswagen,marker,strengths,emirates,terrorists,airfare,distributions,vaccine,crap,viewpicture,volvo,bookings,minolta,gui,rn,abstracts,pharmaceuticals,andale,remix,thesaurus,ecological,cg,appraisal,maritime,href,benz,wifi,fwd,homeland,championships,disco,endif,lexmark,cleaners,hwy,cashiers,guam,preventing,compliant,hotmail,refurbished,activated,conferencing,trackbacks,marilyn,findlaw,programmer,vocals,yrs,foo,gba,bm,nightlife,footage,howto,entrepreneur,freelance,screensaver,metallica,headline,str,bahrain,academics,pubs,shemales,screensavers,vip,clicks,mardi,sustainability,formatting,nutritional,weblogs,timeline,rj,affiliation,nudist,ensures,sync,telephony,realtors,graphical,aerospace,meaningful,shortcuts,voyeurweb,specifies,logitech,briefing,belkin,accreditation,wav,modular,microphone,moderators,memo,kazakhstan,standings,gratuit,fbi,qatar,porsche,cayman,rp,tba,usgs,kathy,graphs,surround,lows,controllers,consultancy,hc,italiano,rca,fp,sticker,stakeholders,hydrocodone,gst,cornell,mailto,promo,jj,schema,catalogs,quizzes,obj,myanmar,metadata,floppy,handbags,ev,incurred,questionnaire,dept,euros,makeup,troubleshooting,uzbekistan,indexes,pac,rl,erp,gl,ui,dh,fragrances,vpn,fcc,markers,assessing,eds,roommate,webcams,webmasters,df,computational,acdbentity,handhelds,reggae,whats,rides,rehab,allergy,enzyme,zshops,condo,pokemon,amplifier,ambien,worldcat,titanium,contacted,cdt,recorders,casio,postings,postcards,dude,transsexual,pf,informative,girlfriend,bloomberg,beats,scuba,checklist,bangbus,lauderdale,scenarios,gazette,hitachi,divx,batman,hearings,calibration,eval,anaheim,ping,prerequisite,sao,pontiac,regression,trainers,muze,enhancements,renewable,passwords,celebs,gmc,hh,adsl,advisors,finals,fd,acrylic,tuner,asn,toddler,acne,listprice,libs,cadillac,malawi,pk,sagem,knowledgestorm,ppm,referenced,gays,exec,warcraft,catalyst,vcr,prepaid,electro,vietnamese,lexus,maui,handjobs,squirt,plastics,postcard,tsunami,internationally,psi,buses,expedia,pct,wb,smilies,vids,shakira,qld,dk,findarticles,routines,issn,podcasts,sas,ferrari,outputs,insulin,mysimon,ambient,oecd,prostate,adaptor,hyundai,xerox,merger,softball,referrals,quad,firewire,mods,nextel,rwanda,integrating,vsnet,msie,wn,liz,ccd,sv,burlington,researcher,kruger,viral,aruba,realtor,chassis,dubai,llp,pediatric,boc,dg,asus,techrepublic,vg,filme,craps,fuji,brochures,tmp,alot,benchmark,highlighted,antibody,wiring,ul,js,webpage,hostels,pn,wendy,diffs,mumbai,ozone,disciplines,nvidia,pasta,serum,motherboard,runtime,inbox,focal,bibliographic,incl,hq,propecia,nbc,samba,inspections,manually,wt,flex,mv,mpg,retrieval,cindy,lolita,carb,importantly,rb,upc,dui,mh,discrete,sexuality,polyester,kinase,televisions,specializing,pvc,blah,mime,motorcycles,thinkpad,cunt,feof,bunny,chevy,longest,tions,dentists,usda,workstation,flyer,dosage,urls,customise,marijuana,adaptive,enb,gg,fairfield,invision,emacs,jackie,cardiovascular,ww,sparc,cardiac,learners,gd,configuring,guru,convergence,numeric,kinda,malpractice,dylan,rebates,pix,mic,basename,kyle,obesity,vertex,bw,hepatitis,nationally,andorra,mj,waiver,specialties,cingular,bacterial,lf,ata,bufing,pam,dryer,nato,funky,secretariat,scary,mpegs,brunei,slovak,mixer,wc,sbjct,demographic,washer,springer,evaluations,helicopter,hk,powerseller,ratios,maximize,cj,workout,mtv,optimize,leu,namespace,align,peripheral,confidentiality,changelog,orgasm,condos,greensboro,tulsa,fridge,qc,simpsons,upgrading,pgp,frontpage,trauma,flashers,subaru,tf,programmers,pj,monitored,installations,spank,cw,motivated,wr,fioricet,rg,bl,vc,wx,figured,currencies,positioning,heater,promoted,moldova,paxil,temporarily,ntsc,thriller,apnic,frequencies,mariah,usps,bg,planners,intranet,psychiatry,conf,wma,aquarium,cir,looksmart,modems,paintball,prozac,acm,glucose,norm,playback,supervisors,ips,dsc,neural,hometown,transcripts,collectible,handmade,entrepreneurs,robots,keno,gtk,mailman,sanyo,nested,biodiversity,movers,workflow,voyuer,subsidiaries,tamil,garmin,ru,fuzzy,indonesian,therapist,mrna,budgets,toolkit,erotica,dts,qt,airplane,istanbul,sega,viewers,cdna,harassment,barbie,soa,smtp,replication,receptors,optimum,neon,interventions,internship,snowboard,beastality,webcast,evanescence,coordinated,maldives,firmware,lm,canberra,mambo,bool,cho,jumping,antibodies,polymer,immunology,wiley,bbs,spas,convicted,indices,roommates,adware,intl,zoloft,activists,ultram,cursor,stuffed,restructuring,simulations,cz,cleanup,crossword,conceptual,hl,bhutan,liechtenstein,redhead,tractor,unwrap,telecharger,safer,instrumentation,ids,groundwater,gzip,ricky,ctrl,theta,lightbox,swaziland,mediawiki,configurations,ethnicity,lesotho,rfid,retailing,oscommerce,nonfiction,homeowners,racism,vaio,gamers,slr,licensee,bisexual,rel,ign,installer,powershot,bestselling,insure,packaged,behaviors,clarify,activate,tg,pv,sandisk,vitro,cosponsors,hyatt,burundi,demos,btw,psychiatric,tittens,teenagers,grading,valentines,vonage,wetlands,quicktime,underwater,pbs,vanuatu,erotik,supportive,vw,targeting,preschool,dw,hm,jl,hg,megapixel,booklet,cancun,reimbursement,turnover,cheryl,radeon,italicized,chromosome,optimized,ffl,upgraded,colorful,popup,mk,garnet,ppp,oceania,formulation,fresno,handbag,bypass,ies,logout,boyfriend,hogtied,wl,clipart,detectors,newsgroups,spectra,mailbox,athlon,iq,landscaping,mol,korn,directv,viable,deviantart,qa,hunks,appellant,xsl,lithium,ctr,planting,alphabetically,facials,calories,airways,refill,reagan,kazaa,einstein,pornstar,vcd,jumper,majors,headsets,toxicity,sz,denim,greenville,scat,neighborhoods,buick,slipknot,mst,residual,bf,bash,ngos,storesshop,postgraduate,daytona,wastewater,constructor,technicians,debbie,issuance,sj,mbps,nationals,ij,alito,waterfront,diagnosed,biotech,turkmenistan,woodland,iranian,unsecured,kyoto,cis,eb,barcode,xd,regulator,txt,postcode,makefile,ansi,vicodin,shawn,suv,lacrosse,crafted,eritrea,bbq,wh,debit,dmx,edits,unwanted,xr,bn,noaa,lemma,kyrgyzstan,sensing,postgresql,kbps,trac,dolby,ecosystem,pkg,dashboard,nikki,technorati,esl,alzheimer,jk,wk,handler,semantic,globalization,atv,vga,atari,sch,reebok,mfg,jb,blogthis,inspirational,wilmington,faso,sdram,motherboards,blk,inherent,jw,tailored,vodafone,romanian,xt,ucla,celeb,assoc,palo,usability,backyard,novell,refunds,newsroom,tina,kia,taxpayer,fb,cola,boise,bsd,saab,refinancing,cert,buffy,doctoral,backpack,npr,identities,tajikistan,sheraton,snacks,booster,taxable,imc,ufo,linksys,dentistry,renal,fedora,nyse,guideline,freezer,pcr,bnet,binoculars,demographics,enroll,daemon,buddies,kc,crashes,outlines,steroids,pogo,konica,hotline,amps,accountants,coefficient,transvestite,upstream,digg,ladyboy,hussein,biochemistry,duplication,scottsdale,ninja,tj,avalon,voucher,tw,wheelchair,gw,epidemiology,pentagon,diabetic,stressed,libdevel,dvi,biomedical,gameboy,subset,gucci,https,websphere,cheney,zombie,recycled,followup,nih,hdd,bidders,simulator,exporters,ninth,mutant,ssh,authoring,specializes,irvine,olds,ramp,jakarta,tl,pgsql,malls,jensen,impairment,scooter,wap,mcgraw,lr,cheerleader,edu,lotion,substrate,mmc,ashanti,homemade,ukrainian,freshwater,topical,rms,isdn,coded,alcatel,suriname,parkway,femdom,palau,duff,ck,bonuses,scam,biking,microsystems,timeout,aerosmith,resellers,portfolios,ops,semantics,scarface,beige,auditing,rolex,amplifiers,coli,executable,pentax,restart,overstock,eps,hmm,explores,torque,memberships,renting,icann,ticketmaster,cdc,meridia,hsn,oncology,nf,woven,bloglines,audioslave,wikimedia,lipitor,remodeling,redhat,enom,haha,coordinating,holistic,salsa,encarta,childcare,dvr,cdn,soundtracks,napster,wong,debugging,rechargeable,engineered,jerseys,pw,superstore,hex,wg,blogroll,evite,micronesia,dreamweaver,diets,sauna,multiplayer,crt,caicos,qaeda,shareholder,kitts,tivo,deletion,ptr,macau,mudvayne,ceramics,freestyle,organizers,smartphone,cmd,hypertension,searchable,aguilera,servicing,counselling,ecards,acura,clit,cops,fedex,snowboarding,laserjet,cooker,lego,microbiology,internships,sgh,vectors,craigslist,hamas,shane,heaters,rdf,bj,visualization,newswire,hf,spermshack,brokerage,overtime,staind,wd,sourcing,filings,boeing,sizing,exceeded,presley,godsmack,labeling,whois,paradigm,msc,linguistics,snmp,standardized,liu,gta,nutrients,kosovo,barbuda,napa,abt,nickelback,lj,nazi,jenna,arrays,syllabus,rgb,rodriguez,animations,activism,fargo,chairperson,reged,leverage,sgt,anguilla,radisson,apc,hitler,handset,vulnerabilities,pga,activist,palestinians,ldap,prerequisites,maintainer,benq,lx,bv,knoxville,mentoring,pak,mos,didnt,classrooms,residency,deadlines,tk,bookshop,nonetheless,hifi,gf,forex,diagnostics,ew,dreamcast,tumors,vm,kyocera,nudes,rationale,hubs,pasadena,bissau,subway,hpa,fgets,citrus,cameltoe,reuse,sightseeing,therapies,widget,renault,comoros,suede,selector,gop,diaper,hotwire,ngo,pvt,atp,subtotal,coefficients,duplex,mvp,jh,analyzer,charset,clin,nutrient,zhang,underway,govt,cbc,excerpts,formatted,gorillaz,inhibitors,uu,prestigious,deploy,gameplay,autism,taxpayers,martinez,bombing,wwe,metrics,winxp,inability,goo,coronary,bldg,mediated,prom,scans,vaginal,isps,rookie,theatrical,interdisciplinary,kerala,enzymes,analytics,jacuzzi,lesbianas,parser,razr,jt,styling,snack,weezer,randomly,semiconductors,coca,acs,peugeot,bollywood,mentally,horoscopes,noun,xmas,silicone,cpa,dn,scoreboard,proliferation,squid,hw,customised,trilogy,hike,imdb,clic,ars,pharmacist,marley,typepad,xs,deliveries,recruiters,screaming,cygwin,gprs,png,pornography,robotics,chopped,contexts,init,svn,oslo,foreclosures,audits,pesticides,fave,residues,ashlee,viet,orbitz,invasive,helsinki,hardback,vuitton,nextag,inconsistent,narnia,alfa,twp,geoff,rename,atx,markup,breakthrough,ietf,beneficiaries,copier,uncategorized,xm,geforce,defaults,foreclosure,clarification,espresso,hendrix,homeowner,mib,tees,glu,winnt,tec,hydro,nonlinear,spokane,playa,gh,csi,radioactive,desserts,doi,socio,pcmcia,grooming,validate,nederlands,bst,filmography,outerwear,parse,dsp,implementations,attendees,toc,downstream,webcasts,accelerator,masterbating,flyers,tacoma,radiology,locals,mms,tungsten,typed,desc,datasheet,shutdown,xenical,computerworld,tattoos,peptide,sweatshirt,hassle,regents,gn,docket,dll,elsevier,nordic,privat,geometric,taxonomy,deli,intern,nsf,sata,xxxx,megan,allergies,bangalore,clutter,predator,xlibs,belgian,adolescents,djs,coventry,clamp,pricegrabber,cloning,args,madden,smugmug,visually,alright,laguna,limo,aligned,pesticide,transformers,avid,outpatient,lam,encrypted,wholesalers,coldfusion,dcr,shooter,switchboard,vince,fluorescent,cookware,lavigne,param,environmentally,gradient,ncbi,inserts,kvm,programmable,bibtex,chemotherapy,vr,dysfunction,livejournal,diazepam,rodeo,sampler,jovi,timetable,corrosion,positioned,checker,workstations,cathy,darren,cmp,udp,sts,milfseeker,sbc,midland,synchronization,informatics,oakley,rants,tarot,didrex,brenda,purdue,figurines,footer,maternal,jedi,seamless,ghetto,thr,panty,subunit,aires,commercials,regulators,influential,carlson,yy,benchmarks,ug,emi,retrieving,reactor,kiribati,telnet,biker,parked,financials,peanut,converters,nauru,dishwasher,rcs,neurons,ios,feminist,yds,ive,ecosystems,gadget,cctv,leukemia,deco,ticker,habitats,remover,incorporates,brasil,unicode,prod,spreadsheet,lowering,discography,encoded,researching,pediatrics,sushi,asap,onsite,mapquest,deleting,compilations,therapists,appealing,lifestyles,dst,swimwear,applet,pricetool,threesomes,quinn,daewoo,antigen,ultrasound,mgmt,procedural,cern,macros,msa,aussie,advisories,lendingtree,belmont,acad,bilingual,barbecue,localization,customization,gigs,indexing,lori,spacecraft,ivoire,montserrat,telecommunication,coatings,eureka,pcb,sdk,preparedness,systemic,playoffs,adaptors,forecasting,specialize,drm,enya,masterbation,tubing,bloomington,conditioner,plaintiffs,vanessa,nucleotide,bronx,listmania,middot,netgear,panda,crc,symbian,emailed,chf,constants,clr,isuzu,webring,redirect,interoperability,msrp,tuvalu,shampoo,neoplasms,artifacts,vac,pseudo,dinar,carat,microphones,nobel,galaxies,verlag,scrapbook,dummies,magnesium,pagina,kenwood,roundup,imac,faxes,plump,uss,wwii,methyl,campuses,ramada,tesco,dba,architectures,acdbline,getty,cdr,msi,prog,firewalls,tester,polling,fifa,bins,consumables,highbeam,msdn,statistically,mps,agp,cont,adverts,programmed,lohan,unclear,aromatherapy,nederland,stockton,clearwater,trustpass,topology,airborne,antennas,sundance,lifecycle,dhcp,trucking,iraqis,shortcut,racist,profitability,unc,fairmont,globally,aaliyah,reboot,newsgroup,audiovox,phuket,jf,metabolic,sarasota,billed,lim,toons,danielle,exc,relied,mesothelioma,trafficking,eff,bizjournals,michele,kk,cutie,creampie,seoul,printf,columnist,transplantation,jerome,nwt,rammstein,scrapbooking,sequential,uniquely,goodies,auth,gina,sugababes,rsa,rcw,whistler,airfares,huntsville,ths,layouts,servicemagic,herpes,newsgator,contractual,akron,bh,rebounds,compressor,samantha,khz,webmail,carcinoma,taipei,stance,aps,kumar,gemini,kinky,supervisory,ostg,kl,chiropractic,throughput,netbsd,misplace,serviced,opener,vaccines,jigsaw,jumbo,unspecified,jsp,turbine,percentages,lett,maths,probes,frustration,americana,complexes,varsity,insurer,croatian,multicast,certifications,pradesh,px,proton,allegedly,kaplan,linens,roast,testers,debuginfo,complainant,inhibitor,knowledgeable,jimi,hummer,telefonsex,putative,hyperlink,presario,motorsports,getaway,robbins,kimberly,unsure,dinosaur,tac,ashland,dlp,royce,sophomore,antibiotics,landfill,warehousing,filesize,celebrex,verisign,registrations,wavelength,slashdot,transvestites,cheerleaders,friedman,coolpix,blocker,tawnee,hud,mov,entrepreneurship,percentile,linkage,lh,ripper,afp,kd,accomodation,mcafee,counselors,competitiveness,burger,microscopy,hyper,madthumbs,linkin,gmail,utf,scooters,reserveamerica,organisational,ezine,reactive,clipboard,gamer,alexa,pollutants,directorate,savvy,uploads,terri,norms,implants,alibaba,hormones,hype,addr,nfs,urinary,institut,condoms,directives,zelda,fetal,dong,reportedly,edi,kudoz,replay,flavors,ig,quickcheck,ziff,placebo,lotto,textures,pid,dep,seagate,nanotechnology,toggle,emc,spacing,frameworks,mergers,filtration,gpa,cpus,incremental,corr,sbin,scalable,ji,intra,wetland,olson,methodologies,fremont,someday,sha,exporter,mri,hum,ifdef,killers,multicultural,lasers,dataset,savers,powerpc,steelers,enhances,fucks,relational,graffiti,cassettes,pussies,doesnt,tiff,cnc,refrigeration,houghton,countdown,decker,natl,extern,enron,codec,broadcasts,checksum,directional,breeders,lethal,decals,macs,archival,seismic,baccarat,mommy,teenager,smokers,declining,lineup,hotspot,bellevue,hj,req,gigabit,worksheet,allocate,aftermath,roach,continuum,feng,pep,nylons,chipset,msnbc,hillary,factual,carisoprodol,tutoring,spectroscopy,gemstone,psc,phonephone,unregistered,moto,gonzalez,dior,pops,osha,goldberg,preteen,bonding,insurers,prototypes,proactive,issuer,sponsoring,malaysian,easton,sentencing,bulldogs,worthwhile,ideology,cervical,tallahassee,userpic,attribution,acta,yep,iec,differs,starters,uml,bur,kris,sizeof,spi,regs,shinedown,standby,arin,unisex,wallets,identifiable,ethanol,cannabis,rsvp,dynamically,grenadines,constr,subtitle,librarians,manson,autocad,powerbook,swinger,infiniti,ppl,williamsburg,supp,snyder,budgeting,backpacks,resale,mikes,scalar,unresolved,hep,seiko,electromagnetic,arial,tos,zoofilia,hcl,validated,sco,annotate,joomla,helix,sx,env,biomass,phs,hierarchical,lesions,financed,surnames,reconditioned,allergic,rk,abn,eliminates,addict,matte,melanie,secunia,metering,genetically,zebra,runway,admits,chennai,ions,asshole,faroe,glendale,speedway,sweatshirts,yay,activex,logon,recruiter,popcorn,espanol,disadvantaged,trong,niue,ux,supermarket,mfr,boo,hmmm,genomic,helpdesk,refuses,afb,adhd,avian,exe,visas,matrices,anyways,xtreme,etiology,tcl,mellon,webmd,personalised,hospice,zerodegrees,qos,exhibitor,sportswear,recap,toddlers,astro,chanel,jabber,hgh,hx,rotate,fema,subwoofer,amortization,neurology,ack,radiator,competencies,hotspots,trainee,nielsen,podcasting,centennial,tuna,bluegrass,wipe,acronyms,autographed,loader,latency,themed,messy,dmc,ments,empowerment,replacements,subtitles,gcse,acupuncture,workload,highlighting,grassroots,gentoo,redevelopment,cellphone,sax,triggered,frontgate,routinely,asc,uploading,managerial,nsu,celine,finepix,wks,tonnes,hypermail,thunderbird,investigative,letras,bylaws,wmv,lao,facesitting,breastfeeding,mccartney,anglo,kathryn,randomized,motivational,gratuite,gerry,kappa,neuroscience,blender,blaster,remediation,decoder,genocide,heathrow,indy,pantera,sidebar,authored,snoop,winery,rbi,photon,overlay,rusty,pharma,fayetteville,champaign,fyi,xc,pakistani,ics,apa,bitches,urbana,diagnose,secsg,franco,announcing,trivium,amature,showroom,cx,swarovski,liter,akon,brendan,condosaver,amex,classicvacations,blackpool,fh,inuyasha,nominees,cuz,viewsonic,dryers,fujifilm,ams,hallmark,counterparts,paced,engl,asians,seether,milestones,parkinson,mclean,checkboxes,lobbying,mgm,cinemas,islander,encoder,importers,impressum,phe,maroon,kontakt,ers,kawasaki,licences,bose,fountains,clones,crossover,situ,specificity,runoff,osteoporosis,approvals,bea,jukebox,nexus,cancers,tango,melting,garner,aba,karate,qb,optimizing,switchfoot,coldplay,vioxx,tty,bsc,celexa,guitarist,symmetric,kuala,bbb,geeks,jg,repec,insightful,unrated,diva,adsense,exemptions,integrates,csa,bookstores,cimel,hvac,leica,agendas,nws,busch,armani,bipolar,menopause,inbound,shortlist,gainesville,tiava,eclectic,headphone,regimes,readme,binder,xemacs,helicopters,ngc,intercontinental,workspace,customizable,softcover,realtime,electrons,subsystem,appl,kinetic,caffeine,xf,nib,httpd,slac,calorie,graphite,stroller,bowel,sweaters,mafia,futuna,predictable,susceptible,insest,skyline,sulfur,scams,lipid,tao,quot,ritz,networked,localhost,cabling,stills,perimeter,biased,cardiology,playoff,sti,chiang,payload,merrill,oldsmobile,grilled,misty,conserved,searchsearch,rewrite,vending,keygen,janeiro,heh,transexuals,prentice,cumbria,diaz,vegan,congressman,recombinant,ubuntu,superstar,closeout,corel,kayaking,synergy,eta,backpacking,accidentally,bonded,sticking,dudley,osama,oprah,inflatable,beers,glassware,amc,kos,coursework,kayak,mayotte,repetitive,gears,orbital,musicals,lithuanian,amatuer,profiling,reps,hn,sequencing,panoramic,deskjet,rhino,polynomial,tau,nsa,stakeholder,signifies,stochastic,psu,santana,kidding,swansea,airmail,problematic,roadmap,ogg,lesbo,farrell,acknowledgements,tnt,skincare,heroin,mandated,workbook,xslt,hogan,omg,sulfate,timeshare,oldies,complaining,debra,cdrom,cle,thrillers,fortran,timeless,spouses,vv,ninety,tyr,cues,bioinformatics,chung,subpart,scheduler,hypnosis,kat,cornerstone,recycle,sos,lsu,gao,applicability,volatility,uid,hoteles,fav,disneyland,umd,gdb,bro,offs,listserv,fab,cond,tokelau,conformance,diecast,bittorrent,frankie,oa,iu,vf,alprazolam,collaborate,positives,hunk,allocations,lymphoma,rpc,freebies,frontline,thb,tele,imap,winamp,stoke,idg,polymers,grills,phat,zz,escrow,lumpur,dds,infospace,surfers,kauai,licensors,cpc,stresses,webhosting,peoria,peek,alr,ipsec,bournemouth,sudoku,undef,campground,sars,cme,predictive,vlan,aquaculture,sendmail,redesign,nitro,jackpot,cortex,entitlement,secs,mixers,accountancy,policing,michaels,ecc,kj,similarities,kv,hipaa,neutron,duluth,dogg,folklore,dimm,acoustics,pensacola,crs,condominium,wildcats,exhibitors,ssi,redwood,invoices,tyres,westwood,gly,estonian,bomber,songwriter,shania,coaster,typedef,strippers,macmillan,aac,woodworking,cbd,pricerunner,afl,catalytic,bethesda,privatization,sourceforge,sanford,membranes,testosterone,nunavut,biochemical,lennon,suitability,lara,kx,invitational,handcrafted,aftermarket,fellowships,freeway,digitally,hatchback,rfp,coa,subclass,rutgers,sampled,deploying,interacting,roanoke,treadmill,fiberglass,osaka,personalize,broncos,jorge,classifications,diggs,rafting,sle,jv,safaris,contaminants,scr,mitch,mailer,liners,asheville,quinta,kristin,bistro,lw,voodoo,caching,volts,excalibur,bots,sinatra,interpersonal,traumatic,ringer,zipper,meds,briefings,siblings,adversely,pitcairn,pdb,onboard,nucleic,telecoms,hehe,celeron,lynne,invariant,challenger,redistributed,uptake,newsweek,geared,svc,prada,tycoon,maxtor,plone,dcp,biochem,pte,ors,compactflash,antibiotic,vanderbilt,cps,overweight,metasearch,taliban,maureen,trekking,coordinators,digi,shoreline,westin,middleware,mips,roundtable,dementia,levine,ripencc,shoppy,filesystem,pow,docking,guidebook,atreyu,kylie,pilates,backstreet,packers,localized,lic,docume,xy,fte,stl,yd,archiving,disconnect,multilingual,gsa,immunization,ciara,cumming,interviewing,categorized,cmos,transmissions,receivable,ronnie,implant,playlists,thematic,brentwood,correctional,katz,jojo,buffers,talkback,servings,kobe,baylor,otc,frustrating,ssa,zeta,dinnerware,sclerosis,emotionally,carbohydrate,estrogen,odbc,ipods,openbsd,federated,shui,rockford,staging,statistic,torino,schizophrenia,predators,mpi,adhesives,inventories,uf,brokeback,dumping,ow,econ,footjob,warez,magenta,tagging,overly,triggers,constructs,impedance,dragonfly,underoath,refundable,hbo,billboard,huang,sportsbook,layered,neurological,subs,watchdog,starbucks,ibook,viability,kh,filler,smiley,genomics,yi,yum,researched,copiers,ovarian,airplanes,cello,wlan,sweepstakes,antigens,midtown,stabilization,kinetics,cocos,impacted,rumsfeld,beanie,thurs,spaced,freq,segmentation,soaps,courthouse,entrepreneurial,lebanese,psycho,maharashtra,ricoh,nrc,chavez,asst,overload,vikings,kanye,bootstrap,wtf,humane,scm,travelocity,fno,twink,nortel,koh,affiliations,pussycat,appropriated,escherichia,mallorca,reversible,spd,oj,unclassified,bookshelf,htdocs,fps,initialization,expat,raider,farmington,timers,enrolment,glibc,lawmakers,larson,photosmart,centrally,acl,luv,dealership,eyewear,bakersfield,decal,addictive,clarinet,fiona,vn,gigabyte,dbz,rainforest,federally,macos,multinational,pornstars,nope,evo,aspirin,spoilers,machining,malibu,gatwick,shaun,redundancy,emo,detox,skateboard,automate,drosophila,branson,ortho,appraisals,flashes,lakewood,drupal,prac,carers,kramer,usaid,idc,keypad,richland,microbial,adc,caregivers,quark,zyban,electronica,mitochondrial,grinder,angie,octet,wj,cre,dinosaurs,mccoy,vibe,snapshots,ubc,meth,trendy,inpatient,filming,fread,backend,cartier,ageing,containment,keynes,protections,aliases,maximizing,handsfree,tomcat,walmart,interestingly,jules,ernie,elem,organisers,pissed,nite,mckenzie,lenox,darussalam,genital,mcse,cajun,csu,algebraic,astm,kristen,fsa,sgd,chromatography,overdose,nad,gallagher,mueller,cao,ladyboys,orgasms,plantronics,ftd,freezers,ibiza,reese,digimon,gastrointestinal,inspiron,pagerank,asm,smb,contrib,blu,matlab,netware,bse,megapixels,retriever,svalbard,pixar,dhtml,winme,func,gamespy,standalone,antitrust,equine,bros,proto,jared,tehran,dal,anesthesia,filemaker,libtool,wrongful,signage,psy,encode,admins,moc,dau,alvin,accolades,raton,stefani,infertility,servlet,collage,aces,depeche,benchmarking,xxl,teleflora,bankruptcies,gauges,blueprint,mccain,spiderman,bridging,flick,datum,canceled,empowering,ymca,facilitator,bos,macworld,wwf,galveston,rockville,banff,smc,lq,serv,ipo,tek,ipc,timestamp,musica,bib,stevie,rivera,dermatology,sandbox,mdt,pinkworld,cambridgeshire,premiership,luton,conftest,recursive,registerregister,fluorescence,kosher,additives,marketed,mandrake,camper,cpr,liquidity,lasik,galactic,merchandising,ombudsman,registrant,firefighters,placements,ih,elec,levin,academia,amiga,descriptor,pimp,gimp,cyclic,swimsuit,morphology,versace,printprinter,condom,westerns,dodgers,litre,correlations,textual,handsets,gandhi,inks,diarrhea,seahawks,mondays,insertions,itk,kms,couture,ativan,summarize,savesave,laminated,citrix,backups,turismo,animalsex,mayhem,washers,grep,xeon,polymerase,optimisation,easyshare,cvsroot,joplin,dialup,nx,thn,afro,biosynthesis,prosecutors,alloys,getaways,miquelon,wonderland,zine,conn,truman,jin,asynchronous,carla,messageslog,clearinghouse,dwi,facilitates,specialised,ramones,everquest,bernstein,skis,calc,marketers,itc,lipstick,brennan,kpx,saturation,stamford,alamo,comcast,hyderabad,attn,spaghetti,tues,boogie,abramoff,ean,fla,utilizes,lesbos,fasteners,sakai,lk,rajasthan,committing,inlog,laminate,earring,aggregator,datatype,postnuke,ergonomic,dma,sme,kp,refills,ibis,yyyy,unidentified,atl,ims,tractors,vx,spp,coed,audiobooks,sheikh,gk,hernandez,kiwi,ohm,truste,acreage,mfc,fingerprint,sorority,audition,mca,plano,nmr,lortab,leveraging,psychotherapy,mso,htm,stokes,lakers,ats,saxophone,cocktails,steroid,communicator,horticulture,dhs,resets,util,ordinator,bono,acronym,veritas,breathtaking,streamline,crowne,brunch,pundit,figurine,mutants,cyberspace,expiry,exif,goldman,msu,inning,fries,initialize,tlc,sybase,foundry,toxicology,mpls,bodybuilding,fta,nostalgia,acetate,pls,bmx,saratoga,terminator,badminton,cyan,cory,stacey,serif,portability,fsb,yearbook,lubricants,cns,hv,alameda,aerosol,mlm,clemson,goin,philly,coolers,multilateral,costello,audited,galore,aloha,dehydrogenase,aq,gx,postfix,fj,altavista,exponential,shi,gev,secretarial,todays,toaster,cater,omb,bac,kart,cpl,sbs,putin,questionnaires,profileprofile,serials,equivalence,vaughn,aviv,condominiums,schematic,liposuction,swf,apoptosis,pneumatic,sniper,vertices,additive,professionalism,libertarian,rus,washable,normalized,uninstall,scopes,fundraiser,troll,teamwork,auditions,refrigerators,redirected,middletown,widgets,ontology,timberland,mags,videogames,concluding,vallarta,chopper,pinball,pharmacists,surcharge,tbd,ipb,latvian,asu,installs,malware,tsn,nguyen,horsepower,algae,sarbanes,alcoholism,bdd,csc,maximal,prenatal,documenting,scooby,moby,leds,mcbride,scorecard,gln,beirut,conditioners,culturally,ilug,janitorial,propane,appendices,collagen,gj,nigerian,ect,sto,makeover,esc,dragonball,chow,stp,cookbooks,spoiler,ari,avr,lamborghini,polarized,baroque,ppt,jihad,sharepoint,cts,abit,abnormalities,qtr,blogshares,motorsport,septic,citroen,gz,predicts,palmone,expedited,curricula,wmd,pms,raped,configurable,denon,sloan,flawed,cfs,checkpoint,rosenberg,ffi,iriver,callaway,tcm,dorm,lakeside,marquette,interconnection,gilmore,prc,taxis,hates,gamefaqs,cookers,ultraviolet,afc,haitian,dialing,unicef,identifiers,mentors,steiner,licensure,tammy,tz,dcs,soybean,affirmed,posix,brewers,mci,retractable,quickbooks,townhouse,stormwater,sgi,coco,pipelines,rudy,tia,congrats,msds,arafat,srl,splitter,wai,standardization,lakeland,thiscategory,classy,acxiom,triathlon,kbytes,thx,textured,doppler,entropy,snooker,unleashed,lux,nairobi,importer,isl,orioles,rotor,theres,ttl,dreamy,backstage,qq,lubbock,suvs,bmp,gasket,firearm,dss,bam,closures,participatory,micron,budgetary,pcos,ssk,pantie,bombers,spongebob,markus,ideological,wellbutrin,rheumatoid,swindon,cabernet,sek,dsm,understandable,shea,doctorate,binaries,slovenian,showdown,simone,spc,potentials,tempe,hklm,cores,borrowers,osx,bouvet,multifunction,nifty,unveils,skeletal,dems,oahu,rollover,infos,lds,thanx,anthrax,shockwave,westlife,bpm,tamiflu,touchdown,planar,adequacy,iomega,xa,fetisch,eastman,franchising,coppermine,ged,ecard,ue,kn,ferries,faqfaq,muller,fudge,extractor,usergroupsusergroups,svenska,pcg,myocardial,everytime,callback,encompasses,sander,conductivity,atc,vicki,danville,sedona,skateboarding,lexisnexis,deepthroat,outback,reiki,biopsy,peptides,awakenings,pim,sediments,appraiser,smp,gaussian,hustler,tensions,linkages,separator,schultz,adr,concordia,recon,fileplanet,royals,globalisation,borland,pastel,nottinghamshire,strollers,uninsured,picasso,mcgill,discriminatory,headquartered,travelodge,empower,hurley,pedals,teak,bitmap,migraine,sli,enum,lamar,aes,methane,pager,snp,aclu,westchester,nimh,quilting,campgrounds,adm,densities,isd,tional,turnaround,navigational,stargate,saskatoon,cen,minh,fingertips,sba,rockwell,vl,pepsi,rea,oversized,snr,sibling,ecs,burberry,nrs,cfa,inhibit,pps,screenplay,unabridged,ntp,endpoint,labelling,synchronous,heartland,cafeteria,outfitters,opp,homelessness,opengl,efficiencies,blowout,tickboxes,oversee,thresholds,isnt,waveform,deficits,flair,applegate,whitewater,tableware,bernie,workgroup,clement,cli,robotic,mana,mississauga,dialysis,filmed,staten,carole,schwarzenegger,summarizes,sludge,crypto,christensen,heavyweight,lps,zach,pdp,phantomnode,comptroller,scalability,creatine,embl,minimizing,gpo,dq,relativity,mojo,econo,shapiro,rituals,pq,ub,epoxy,watercolor,uncensored,trainees,tori,effluent,infousa,storytelling,polarization,bombings,smes,ionamin,fuckin,charlottesville,xu,aniston,barred,equities,feeders,jboss,mobil,scrolling,diode,kaufman,aloe,buckinghamshire,medford,underlined,whores,gemstones,bmi,viewpoints,exim,appalachian,dealings,phillies,ramblings,janis,centric,optionally,nightclub,geophysical,fictional,golfing,rubin,handlers,topeka,openoffice,bugzilla,linus,taco,mcsg,humboldt,scarves,mla,repertoire,emeritus,macroeconomic,gundam,adaptec,tailed,voyer,hostname,excl,bx,arr,typo,merchantability,autodesk,jn,winged,attacker,catcher,haynes,siyabona,inverter,abi,motivate,mackay,bridgeport,assessor,fullerton,cpp,blockbuster,dz,amarillo,pixmania,pathfinder,bonsai,windshield,tomtom,spf,croydon,convection,jdbc,debugger,boing,ancillary,pointless,alibris,factoring,gyms,inhalation,faucet,bitpipe,arguably,techs,electives,walkman,midget,quan,commissioning,experimentation,saltwater,cpi,nis,wacky,sgml,anemia,biting,reits,savanna,crn,travestis,mmf,cancellations,paging,coe,nudists,fac,asean,airsoft,bontril,proliant,keeling,zh,accesses,jive,bullshit,casper,libstdc,xpress,datasets,webdesign,nicotine,comeback,gannett,curricular,downtime,takeover,lolitas,thessalonians,upto,joaquin,transistor,spotting,wagering,everest,disregard,hanger,outkast,pitbull,rtf,fairview,hires,alienware,mainframe,indo,compilers,guinness,heartbeat,blazer,timezone,merck,tanya,bmc,eia,colleen,bbbonline,participates,syndicated,lexicon,integers,zirconia,shortages,plumbers,jfk,raf,igor,hama,patton,pei,surfer,diapers,eas,waco,physiol,adp,outbound,breakout,fakes,stderr,kev,fomit,injections,remortgage,yogurt,complies,workaround,polytechnic,uber,shoppe,berlios,csr,penthouse,synthase,pistons,emule,sauvignon,bayer,carrera,dvb,cation,scientology,cdma,maxi,msm,rac,feminism,topps,webinar,dewalt,turnout,bruins,clamps,firefly,tabletop,monoclonal,wholesaler,typekey,partnering,mage,sqrt,israelis,cdp,headlights,monophonic,proquest,sergio,swapping,mev,particulate,bedfordshire,rockport,nist,negotiable,subcategories,quarterback,sudbury,hectares,upscale,scrabble,sdn,mta,docbook,kiosk,firstgov,hoodie,hoodia,payout,clinically,metacritic,obligated,decoding,presenters,teal,epstein,weblogic,ity,covington,esd,interconnect,chinatown,mindless,purifier,kz,greedy,rodgers,gloryhole,suppl,hotjobs,downing,gnd,libc,societal,astros,halogen,wyndham,osu,tuesdays,utp,superpages,coaxial,jpy,liam,sesso,arabidopsis,argv,hanoi,ccm,faucets,ballistic,payouts,rockin,supermarkets,bmg,nacional,csv,telstra,contraception,polaroid,underage,cardio,timeshares,atk,qi,logger,kool,oki,birding,detainees,indi,lymph,barrie,pollutant,closeouts,tolkien,undp,jbl,weekday,homecoming,increments,kurdish,chromium,mccormick,pcm,confrontation,shreveport,grower,frederic,unpredictable,dtd,capacitor,burnett,hilfiger,mda,litres,moroccan,nightwish,hess,wheaton,motorized,subgroup,chevelle,vets,assays,ramon,longhorn,backdrop,aerobic,vgroup,thursdays,dansk,tenerife,mayen,oldmedline,dunlop,caa,modernization,xe,fourier,businessman,watersports,lucent,commuter,orthopedic,hhs,tyrosine,shenzhen,initiating,grabs,erickson,marlin,casserole,canoeing,cca,ophthalmology,geile,clubhouse,licensees,evaluates,svg,protesters,fernandez,mvc,sleazydream,patti,mz,sennheiser,sheehan,maven,commute,staged,transgender,customizing,subroutine,pong,hertz,myr,bridgewater,firefighter,propulsion,westfield,catastrophic,fuckers,blower,tata,giclee,groovy,reusable,actuarial,helpline,erectile,timeliness,obstetrics,chaired,agri,repay,prognosis,colombian,pandemic,mpc,fob,dimage,fetus,determinants,durango,noncommercial,opteron,superannuation,ifs,haas,wimbledon,documentaries,mpa,rao,remake,arp,braille,physiopathology,seperate,econpapers,arxiv,pax,kalamazoo,taj,sinus,maverick,anabolic,allegra,lexar,videotape,educ,amplification,larsen,huron,snippets,conserv,dustin,wsop,composites,wolverhampton,banning,cpt,gauteng,ftc,watertown,pathogens,mft,uefa,jacking,radiohead,ooh,subsections,definately,bod,yin,tiki,homepages,handouts,cpm,marvelous,bop,asnblock,stretches,biloxi,indymedia,clapton,beyonce,smf,nabble,intracellular,infoworld,boyz,waltham,geisha,dblp,briefcase,mcmahon,cq,mcgregor,modal,marlboro,grafton,phishing,addendum,foia,kirsten,yorker,memberlistmemberlist,gam,intravenous,ashcroft,loren,newsfeed,carbs,yakima,realtones,xtc,vdata,interpro,engadget,tracey,wac,darfur,fragmentation,behavioural,kiev,paranormal,glossaries,sonyericsson,dex,emoticons,carbohydrates,hms,norwood,appetizers,webmin,stylesheet,goldstein,wnba,englewood,asf,hottie,stripper,pfc,adrenaline,mammalian,opted,meteorology,analyzes,pioneering,ctx,spreadsheets,regain,resize,medically,tweak,mmm,alicante,graders,shrek,universidad,tuners,slider,cymru,fprintf,irq,dads,sdl,ebusiness,hays,cyrus,courtroom,baht,relocating,synth,filthy,subchapter,ttf,optimizations,infocus,bellsouth,sweeney,aca,fpo,layup,laundering,fre,nazis,cumfiesta,newbies,mds,piles,vaginas,bezel,avatars,twiztid,facilitation,ncr,xb,voc,rts,applets,pdfs,cac,teh,undercover,substrates,evansville,joystick,knowledgebase,forrester,xoops,rican,uptime,dooyoo,spammers,nuclei,gupta,tummy,axial,aest,topographic,westport,majordomo,wednesdays,burgers,rai,watchlist,campers,phenotype,countrywide,affirm,directx,resistor,bhd,audubon,commentsblog,snowmobile,publ,cpg,subparagraph,weighting,rectal,mckinney,hershey,embryos,garages,sds,urology,aforementioned,rihanna,tackling,obese,melvin,collaborations,isolates,velcro,worksheets,avaya,srs,wigan,hua,abba,qd,orig,huskies,frey,iz,loyola,gartner,xda,strapon,chaser,astra,expasy,overdrive,ripley,phosphorylation,cfo,depletion,neonatal,qr,mclaren,rowling,vhf,flatbed,golfers,lira,technics,damien,clippers,spirited,gv,staa,recharge,openid,sassy,demux,ribosomal,tdk,filmmakers,transnational,paralegal,spokesperson,fha,teamed,preset,iptables,pocketpc,nox,jams,pancreatic,tran,manicures,sca,tls,prweb,holloway,cdrw,plz,nadu,underwriting,rulemaking,valentino,prolyte,millenium,collectable,stephan,aries,ramps,tackles,dsa,walden,catchment,targus,tactic,ess,partitioning,voicemail,acct,shimano,lingere,parentheses,contextual,qwest,jira,cerevisiae,dyson,toxins,camaro,cryptography,signalling,daycare,murakami,merriam,scorpio,attr,emp,ultrasonic,ashford,intergovernmental,paranoid,dino,xvid,dmoz,ivtools,barron,snorkeling,chilean,avs,suny,gifs,qualifier,hannover,fungal,ligand,aust,peoplesoft,freelists,coastline,omit,flamingo,deformation,orf,pfizer,assembler,renovations,genbank,broadcasters,employability,noodles,retardation,supervising,freeport,lyme,corning,prov,dishnetwork,amg,claremont,moo,cpe,childs,bizkit,blogosphere,endocrine,resp,carlsbad,ammo,bling,chars,mcguire,utilisation,rulings,sst,geophysics,slater,broccoli,foreach,oakwood,mcgee,kissimmee,linker,tetris,tds,synchronized,hsbc,shellfish,astoria,trajectory,epsilon,knowles,astrophysics,hansard,lai,authorisation,vampires,relocate,nerd,dac,glazing,provisioning,mnt,expandable,maserati,bender,reliably,fas,sendo,hasbro,corba,polski,multidisciplinary,ventricular,petersen,bans,macquarie,pta,poy,mao,transferable,yummy,momma,lehigh,concordance,greenberg,trish,electrodes,svcd,cron,darth,cramer,yup,ching,melanoma,thug,yugoslav,occ,cpan,bizjournalshire,tco,shaver,grammy,fibrosis,opel,hummingbird,ported,eeo,polyethylene,parametric,awarding,dkk,superbowl,sse,haskell,flatware,skid,eyeglasses,fenton,polaris,formulations,bgp,parenthood,latinos,artworks,doherty,dnc,bci,allegheny,arenas,aaaa,compressors,exclusives,lounges,consultative,lst,ais,conveyor,normative,surg,rst,longtime,ecm,mckay,spe,solver,ani,lacie,solvents,kudos,jens,creams,poo,handbooks,agm,shawnee,crowley,butalbital,artifact,mdot,coldwell,qs,depts,veterinarian,merseyside,cso,krona,disseminate,puget,coasters,geologic,fleetwood,feldman,endocrinology,replicas,polygon,mcg,kwazulu,servo,riparian,guelph,tenuate,curator,jaime,mower,gamestats,lvl,faxing,meyers,testsuite,stressful,extranet,remastered,teac,neg,rma,eastwood,handspring,gerber,duran,aquarius,stencil,srp,scifi,redirection,showcases,hmv,refinery,abort,drs,schroeder,indent,chardonnay,removals,antrim,accelerating,guesthouse,bz,insiders,duvet,decode,looney,brigham,mts,jewelers,juneau,dilution,veterinarians,colourful,grids,sightings,binutils,spacer,microprocessor,deloitte,claiborne,clie,cdm,spills,assistive,chronograph,refunded,sunnyvale,spamcop,lovin,embracing,minimise,salinity,nbsp,specialising,handout,routledge,ramirez,haiku,paisley,telemarketing,cutoff,visuals,ccs,breads,seg,martina,mclaughlin,headlight,kemp,sla,pipermail,sonneries,clinicians,entertainers,tripp,peterthoeny,blockers,stash,jamaican,semen,endogenous,memorex,showtime,narcotics,oceanfront,flange,realplayer,mcc,mpaa,gogh,allentown,romero,bnwt,predefined,buzznet,melodic,isi,naics,transgenic,axim,brookfield,endorsements,viscosity,cve,bengals,estimator,cls,concurrently,leafs,electrician,mayfield,ftse,samui,bleach,unauthorised,wolverine,individualized,ecn,raffle,shredder,embedding,hydrology,mascot,lube,launcher,mech,primers,caregiver,lupus,sachs,qtek,oy,twn,keane,gator,memberlist,utd,nordstrom,roseville,dishwashers,walla,remixes,cozumel,replicate,taped,mcgrath,biometric,incubation,aggregates,wrangler,asymmetric,cytochrome,xfm,sps,shure,mcs,donating,antec,giveaway,cmc,alyssa,cnt,renter,vmware,patel,honeywell,nightclubs,barrington,luxor,caterers,capacitors,rockefeller,checkbox,itineraries,reagents,christoph,walkers,eek,ensembl,weekdays,computations,wineries,vdc,booker,mattel,diversification,wsdl,matic,xyz,antioxidant,esrb,archos,semesters,naruto,storyline,melrose,streamlined,analysing,airway,iconv,commas,vicky,helvetica,ssp,submitter,cambria,icp,manifestation,subsets,blazers,jupitermedia,merritt,triad,webpages,yp,clinique,fitch,charting,ugm,fixation,bsa,lenovo,alamos,leach,gravitational,cyrillic,prevacid,designee,sunni,netflix,monoxide,groupee,hardin,colorectal,outage,chunky,raptor,ima,coulter,iain,mtn,pbx,quantify,dmesg,elfwood,substitutions,lancome,galleria,inv,hillsborough,booklets,pln,cin,msp,gluten,spanked,orthopaedic,medi,nrt,obispo,minogue,turbines,notepad,crappy,golfer,afs,receivables,scripps,livermore,cirque,ost,marxism,escondido,diffraction,aha,outlining,subtract,bosnian,hydration,havent,preferential,dre,interns,quotas,methodological,aarp,gettysburg,iseries,menlo,walkthrough,bikinis,aopen,bookcrossing,addicts,epithelial,drastically,clarks,groupware,matchmaking,dict,descriptors,aeronautics,radiography,norsk,nps,afr,expr,ejb,refereed,afi,toxin,poynter,filmmaker,grounding,smartphones,calvert,fiduciary,bayesian,saccharomyces,cfp,humps,osi,zimmerman,javier,romantics,trimmer,bookkeeping,hmo,hikes,kickoff,magick,hillsboro,blm,fractal,mtg,guildford,twill,therapeutics,disruptive,kicker,protease,abrams,moreno,newsforge,timex,duffy,racers,cma,pairing,kirkland,gujarat,dkny,catfish,doubletree,brink,transex,tdd,hotpoint,anthologies,retirees,dcc,btu,investigates,chelmsford,anonymity,gotham,lyle,pinot,responsiveness,gazetteer,jacobson,kda,imitrex,monash,binghamton,connolly,homology,rpms,psychedelic,gyn,rhinestone,ely,quadratic,philharmonic,dynamical,cantonese,quran,turnovr,keychain,shakers,inhibited,lexical,openssl,ugg,mathematica,karachi,missoula,abilene,fdid,snes,swat,pune,trashy,expended,webct,pvr,handycam,zn,strategically,dms,anus,dnr,deputies,emergent,erika,authenticate,aligning,nautilus,doulton,rtp,dracula,umm,modding,eap,shaman,letra,mandriva,seti,extracellular,jaipur,stockport,eiffel,plywood,dnp,morbidity,wimax,effexor,binders,custodial,combi,integrator,sonnerie,teri,sectoral,trombone,postsecondary,rbd,ambulatory,lookin,xff,camouflage,beckham,dispensers,firebird,qu,showbiz,hbox,waikiki,lng,pds,antiqua,boxers,asics,barbeque,workouts,ini,mrc,seamlessly,ncc,girlfriends,songbook,hepatic,copeland,swanson,aquifer,ldl,pgs,xga,svensk,stereotypes,marlins,shelly,exiting,saginaw,polyurethane,seks,textus,johansson,spraying,hamburger,reactivity,lieberman,windchill,storefront,eof,codeine,tetex,cheerleading,wellbeing,pkwy,hairdryer,punitive,exon,outsource,thier,siebel,captions,kf,chromosomes,emailing,manic,novotel,ndp,transmitters,nicola,minidv,collaborating,tuxedo,receptus,michelin,bicycling,itt,blueberry,schumacher,socioeconomic,hamster,bushnell,ergonomics,finalize,lumens,sudanese,softpedia,iff,faceplate,packer,ibs,broward,globus,pir,reco,softcore,referencing,typ,guangzhou,nader,militants,resins,cougar,montrose,surreal,irradiation,redesigned,raster,credential,checklists,quirky,oscillator,finalists,encrypt,mgt,sneakers,incontinence,pajamas,murdoch,dali,lubricant,quests,mgr,outsourced,jody,plasmid,schiavo,unbeatable,upstate,lymphocytes,repayments,transsexuals,fueled,mex,xanga,sverige,extrait,pelvic,monochrome,activating,antioxidants,gynecology,mythtv,probabilistic,cooperating,calibrated,phased,godzilla,eweek,airbus,simplex,webhome,aerobics,sabrina,condor,gated,gaap,sasha,ebayer,hmc,bitrate,karnataka,amish,ffm,duh,hyperlinks,clitoris,hse,cribs,reliant,subcontractor,fendi,giveaways,wah,psych,hydrochloride,magnification,twelfth,proponents,priceline,ecco,backpackers,kohler,irb,initialized,ava,silverado,amr,ecu,psychiatrist,lauder,soldering,phono,crd,daryl,trp,lehman,daihatsu,grantee,enhancer,anglers,rottweiler,filefront,visualize,psd,adb,hoses,bidpay,ias,turntable,screenings,pivotal,pai,heuer,fic,nix,lineno,fdi,provo,checkins,plating,lycra,planck,yugioh,reactors,npc,kingsley,careerbuilder,gillette,fluoride,stacking,cochran,suomi,sissy,trang,calculates,thunderstorms,cip,transcriptional,finalized,referees,deerfield,lsc,cochrane,eldorado,esmtp,conservancy,otrs,omim,dielectric,anand,electrophoresis,sprinkler,imbalance,cine,scarlett,xen,novak,backcountry,artistdirect,outboard,pitches,scc,lockheed,raj,iana,elmo,unmatched,scranton,ixus,pinpoint,gabbana,neumann,outta,dieting,andhra,ralf,appraisers,xenon,hybridization,anh,abercrombie,trax,otherosfs,ssc,danbury,nofx,sharma,rockers,palliative,recieve,cufflinks,queues,relisted,beep,dunedin,remanufactured,staffed,lightspeed,grilling,stalin,kaye,bps,camo,shoutbox,toms,homeschool,ccg,lifehouse,windsurfing,pattaya,relocated,untreated,mkdir,riaa,divisional,chihuahua,mcconnell,resell,chandigarh,centrino,osbourne,burnout,classpath,designations,spl,microwaves,coliseum,ephedra,spawning,endothelial,citrate,eduardo,snowman,edmonds,potty,microbiol,shooters,norwalk,bacillus,fk,cla,spooky,belleville,venezuelan,cbr,colby,pab,hom,subpoena,hons,interpretive,bareback,extender,glucosamine,proj,modesto,designjet,typhoon,launchcast,referrer,zhejiang,ricci,superhero,tooling,tomography,berman,vocalist,tidbits,cystic,pacifica,kostenlos,anniversaries,infrastructures,littleton,commenters,cali,fairway,postdoctoral,prs,fairchild,ssb,spinner,evanston,homeopathic,ordinarily,hines,cpd,braking,ece,platelet,messageboard,setback,recipezaar,installers,subcategory,markov,factbook,tuple,fibromyalgia,rootsweb,culver,bratz,bucharest,ntl,lacoste,renters,timberlake,zack,markham,gels,iframes,thinkgeek,nafta,advertisment,mountaineering,screwdriver,hutch,beckett,homeschooling,dealerships,sakura,byu,jupiterweb,phosphatase,mahal,killings,robyn,adirondack,casablanca,sdp,pulaski,mantra,sourced,carousel,mpumalanga,thermostat,infarction,polypropylene,mailboxes,southend,maxell,tundra,vars,youngstown,farmland,skater,iep,imho,disrupt,rampage,fink,jurassic,gpg,gnupg,aliasing,comix,solves,hiroshima,jiang,oscars,boosting,knownsite,macarthur,powerhouse,deodorant,youre,compulsive,perky,reinforcing,extensible,mtb,catheter,practicum,photocopy,zipcode,mcpherson,saharan,pixma,hubbell,lesbienne,timeframe,disarmament,aed,actin,interviewer,vms,wno,dbi,waikato,syslog,orr,gastroenterology,travelmate,composting,mackie,choi,uva,fga,oceanography,vastly,stardust,radiological,commando,bathtub,urdu,aedst,greer,motorway,repositories,freaky,guangdong,merlot,civ,spielberg,lesley,thom,phoneid,salinas,legged,unilateral,dsn,shri,aegis,colloquium,matrox,vk,springsteen,uhf,fatalities,supplementation,embodied,altec,mohammad,verbose,marbella,sth,iterator,recieved,slc,cfl,deterministic,nci,predictor,salmonella,nga,nantucket,viewable,subnet,maximise,lotr,isn,chalets,reimbursed,lau,watermark,totes,mohamed,dyslexia,hubble,thugs,organics,dearborn,feds,yiddish,dopamine,multiplier,winzip,sacd,payoff,spv,sonar,monticello,flasher,subcontractors,evangelism,abortions,lesion,akira,progesterone,ethyl,earthlink,caramel,immunodeficiency,washburn,xtra,capitalized,ceos,maint,pancreas,octopus,xena,neuro,ara,receptionist,cessna,tru,zombies,cambodian,interagency,activision,synchronize,jenn,juegos,titties,tay,hornets,crossfire,ankara,spandex,hdmi,tamara,ctc,capcom,cato,peachtree,handyman,aeg,ethic,harlan,taxon,lcs,indefinite,slackware,cougars,earch,ambience,genet,photopost,uo,infor,neuronal,carrollton,checkers,torrance,yuma,spokeswoman,baccalaureate,tripods,logistic,middlesbrough,personalization,enema,easement,goalie,darkroom,hydrocarbons,gpm,hoh,hla,donaldson,tiscover,recor,mori,adi,rockland,uniqueness,hfs,cascading,metros,hangers,broadcaster,musculus,degraded,topo,viewcvs,eisenhower,flashlights,myyahoo,rosenthal,affordability,latham,jailed,depp,grapefruit,trna,motorbikes,verdana,bonita,nippon,decorators,dwl,jizz,pendleton,psoriasis,mavericks,dianne,earnhardt,amtrak,resid,tostring,lessee,goodyear,utica,overclocking,kitchenaid,cbt,peacekeeping,oti,interferon,aas,selectable,chechnya,rory,woodbridge,jas,intersections,sma,capitalization,epi,responder,qv,thoracic,phaser,forensics,infiltration,serine,bing,schemas,orthogonal,ohms,boosts,stabilized,wordperfect,msgs,zhou,selenium,grinders,mpn,cse,assn,punches,masturbate,parachute,glider,chesney,taos,tong,lotions,adrenal,sixties,booting,cunts,dri,ozzy,elearning,zx,valuations,kidman,jpn,postoperative,cytology,nye,biennial,ifndef,bq,circuitry,cdw,robb,kinja,tweaks,readership,northstar,dif,worthington,groundbreaking,transducer,serotonin,complements,isc,params,radiators,beagle,cadmium,bodoni,speedo,detachable,simplifies,sleeveless,motorists,tbsp,waivers,forsyth,ricerca,agilent,plumper,uterine,apartheid,bnc,businessweek,morphological,windham,ellington,ria,cdi,polio,clp,sharm,alvarez,regatta,chatroom,polarity,overrides,riff,widths,dest,attenuation,kluwer,martins,italiana,telford,shuman,grapevine,russo,daunting,topples,futuristic,autofocus,chai,obsessive,transplants,referrers,junkie,admitting,alsa,galactica,wkh,rotational,withdrawals,pageviews,hartman,finalist,pornographic,armageddon,smallville,selectively,albans,fallout,brownsville,galeria,stalker,kathmandu,nyu,kristina,dps,icmp,sophistication,wrt,messed,oceanside,foxpro,taiwanese,officejet,helens,ppg,sym,combos,cloned,fulham,dahl,pla,nfc,mathews,bestseller,enrique,minidisc,downside,malvinas,honcode,reissue,striker,memos,tensor,whitehead,whoa,brookings,accomodations,integra,laredo,nntp,logiciel,jaguars,mga,tracer,frist,lsd,synthesizer,ejaculating,biodiesel,mcleod,waldorf,microfilm,lear,subsidized,simons,optimizer,zire,pituitary,sow,repeater,teamxbox,bytecode,mccall,wiz,autopsy,joltsearch,ym,itv,colo,ying,bce,inode,glenwood,allstate,horticultural,hahaha,spamming,ssn,wartime,mou,hpv,jain,geriatric,mayan,navman,futon,grannies,hairstyles,nays,webspace,rds,mellitus,multiples,cryptographic,disparate,boardwalk,ineligible,homeopathy,entrants,rallies,simplification,abb,insolvency,roleplaying,affective,wilma,compusa,histogram,wheelchairs,usaf,pennington,lesbiana,liberalization,insensitive,greenpeace,genotype,contaminant,informa,collaborators,malvern,proxies,rewind,issuers,sinh,kerberos,schoolgirls,hilo,stratton,idx,astronaut,instituto,lowry,constipation,aec,sheryl,nashua,ikea,oswego,gbr,koi,sues,cba,mckenna,eudora,candida,sildenafil,adjusts,sqft,pickups,squaretrade,chandra,cheesecake,oth,porting,lubrication,shootout,racine,webserver,vnu,fragmented,chevron,reinsurance,slated,tera,guantanamo,reina,energizer,clarksville,vandalism,acpi,acetaminophen,wolfram,ofthe,contraceptive,necrosis,iva,bonanza,lumbar,disparities,umass,flamenco,osprey,flammable,biometrics,buspar,wasnt,nds,softwares,dbm,alchemist,marr,ssw,mcdonalds,hormonal,vh,calender,distro,virgo,rink,jesolo,unrealistic,rhonda,pov,pings,pcp,inxs,desy,teaser,impairments,courageous,rho,promos,transceiver,warhammer,iterative,catered,callahan,neuron,xlibmesa,pulsar,enewsletter,dav,pedagogy,bcc,afrikaans,ecb,cinematic,ugh,malik,tshirts,fellowes,illus,telefon,maguire,nlm,numeracy,caviar,popups,sleepwear,quads,grady,kelsey,enforceable,bouncy,vcrs,retinal,sponsorships,textrm,screenwriter,vendio,otago,ducati,allele,sylvania,optio,purifiers,commuting,hiphop,kato,kama,bcs,keating,eczema,northland,icu,veg,roadster,confetti,fv,raptors,irda,veggie,dharma,chameleon,hooper,luciano,grp,abrasive,henti,koruna,edp,ensembles,backpacker,bainbridge,scs,comfy,assuring,gettext,registries,eradication,herefordshire,ectaco,doh,jodi,quintet,groupwise,ambiance,chun,damian,bakeries,dmr,fucker,polka,wiper,wrappers,giochi,iterations,svs,ntfs,namespaces,mismatch,fdic,icd,vj,oxides,qualifiers,battered,wellesley,smokey,passwd,vacuums,falun,precip,lagos,rapper,hooters,calligraphy,advantageous,mustek,monique,fearless,ortiz,pref,morningstar,recessed,fmt,palladium,totaled,levitt,vd,shipper,darryl,hobo,nys,merrell,cra,sly,reductase,raul,shenandoah,harnesses,wtc,loma,oshkosh,multivariate,geil,kitchenware,unigene,lans,immunoglobulin,silverstone,uniden,telechargement,remstats,unitary,getnetwise,hospitalization,clubbing,microelectronics,observational,waverly,crashers,schwab,deregulation,vba,carpentry,steinberg,sweetie,mideast,hispanics,podium,paranoia,faceted,sito,gecko,fullscreen,interchangeable,rollins,scp,hst,starship,miele,seeded,cyclists,fey,cmt,nurturing,enzymology,amadeus,usm,galapagos,uconn,picker,xls,mulder,lesbicas,dialer,mooney,syntactic,envision,jetta,downey,codex,lsb,userid,cosmology,noodle,gromit,sargent,bangle,humping,donnie,privatisation,tofu,rq,unhcr,battlestar,intuit,adoptive,cda,minimized,partnered,twat,filibuster,glamorgan,adwords,tulane,usp,facet,behaviours,redneck,imax,xpath,synthesized,encapsulation,samsonite,accordion,rooney,minimally,webpreferences,skoda,matchups,ucc,mailings,ono,beachfront,cem,crosswords,pubchem,integrative,kelowna,embed,gurus,allotted,shutterfly,gerhard,watersheds,trimester,clickable,spyder,electricians,nexium,capricorn,dipped,perm,rte,spectrometry,snippet,pha,permeability,waukesha,igg,scart,wsu,normalization,skillet,neoprene,vlc,offeror,thermo,huber,jarrett,farechase,maintainers,maarten,ginseng,blackout,detergent,rosetta,grenade,occured,karin,lana,fontana,kang,crafting,ivillage,mowers,bratislava,policymakers,sienna,watford,misco,givenchy,reimburse,esperanto,modalities,pcc,lighters,shutting,endemic,spr,carly,hydrologic,stansted,nep,huddersfield,aimee,davey,csp,helpsearchmemberscalendar,ait,transduction,silverman,clarifying,aortic,drc,hoa,starcraft,martens,ficken,structuring,konami,lipids,jurisdictional,desi,cellphones,cordoba,xj,sheppard,dpkg,folsom,triggering,mapa,aip,rackmount,binocular,eda,specialise,rar,remortgages,mckinley,hanks,dosing,strobe,waffle,detectable,pmi,arrowhead,nigga,mcfarlane,paycheck,sweeper,freelancers,seinfeld,tdm,shen,responders,keepsake,birthdate,gettin,upbeat,ayes,amenity,donuts,salty,interacial,cuisinart,nautica,estradiol,hanes,noticias,gmp,schaefer,prototyping,mth,zeros,sporty,tumour,fpic,pdc,atpase,pooled,bora,shu,stabilize,subwoofers,tcs,clueless,sofitel,woodruff,southport,walkthroughs,radiotherapy,minifig,transfusion,sams,zend,newtown,mcmillan,csf,lyn,witt,mcd,unep,newsflash,recombination,messing,budgeted,slogans,flashback,photometry,sutter,inr,knicks,ingestion,mindset,banda,adulthood,inject,prolog,dunk,goofy,mcintyre,aga,guilford,raglan,photonics,cdf,celtics,heterosexual,mappings,jel,snip,fascism,galerias,audiovisual,diagnosing,neutrino,wouldnt,mq,codecs,certifying,dvp,traduzca,csb,subj,asymptotic,isotope,moblog,locales,preventative,brampton,temperate,lott,srv,meier,crore,deserving,banco,diagnoses,thermaltake,ultracet,cortical,itchy,glaucoma,homosexuals,mhc,estee,wysiwyg,oversees,odp,categorised,thelist,diss,cta,diamondbacks,nzd,subtype,psx,thessaloniki,dmv,leafstaff,literate,ayp,bikers,harcourt,bubba,mutt,orwell,mietwagen,bakeware,cleanser,lonsdale,velocities,renewals,tsx,dnl,mtu,salford,ephedrine,longview,closeup,venous,hereunder,ouch,teflon,cys,debadmin,cleans,fpga,everton,rosters,herbicide,marlene,futura,smd,cheddar,ql,tucows,regex,bukake,chs,mcclellan,gopher,distal,zar,frommer,joss,shortfall,harmonica,geothermal,texmf,atlases,kohl,lorazepam,hosp,lewiston,stowe,fluke,khi,estes,hdr,caches,stomp,acidic,anc,doin,tld,gangster,deliverables,censored,fascist,lido,matchbox,trl,businessmen,bpo,incubator,experiential,eraser,jordanian,jiwire,libra,rtl,iea,uniprot,statystyki,pkgsrc,nonprofits,desnudos,czk,ethylene,slows,opm,inhibits,exploratory,spectrometer,outsole,lista,tmc,inset,polynomials,elegans,openers,shasta,dob,inet,cov,fallon,sidekick,tcb,dmca,rewriting,bahama,idl,loretta,lingvosoft,dax,allocating,newell,juveniles,gamermetrics,lcds,ortholog,tasmanian,hydrocarbon,lobbyist,kelvin,secondhand,xo,cheatscodesguides,mdl,clientele,technica,gratuito,hts,arkon,hort,bureaucratic,cooperatives,raceway,sopranos,hotties,gq,terrell,yc,closings,registrars,strlen,faye,cto,lakeview,ospf,tunneling,methamphetamine,murals,bangs,asic,knockout,radon,avantgo,asl,obi,timelines,roget,cristina,visio,autoimmune,coder,replicated,pom,timetables,kline,anorexia,errno,workplaces,harpercollins,clk,heartburn,empathy,ica,motivating,clockwise,frisco,mitzvah,chong,bashing,boosters,cyl,grupo,mikhail,denominator,changeset,cec,jovencitas,texttt,islamabad,freestanding,resilient,eyewitness,spartanburg,hippo,trung,tenancy,offsite,realaudio,clements,dogsex,ticketing,heterogeneity,bodied,dudes,maytag,norco,altos,sleeved,overs,watercraft,scully,cellulose,cathode,monographs,nra,digitized,rotated,gaia,motown,pryor,sato,greeley,ccr,agro,ramos,quizilla,citibank,scotty,pvp,meridien,taxa,brunettes,bic,irl,mfa,endo,unhelpful,microorganisms,twister,krakow,sequoia,emt,activator,incredibles,familial,marquee,resilience,thermodynamics,seton,makita,subgroups,catchy,aia,tig,synaptic,bobcats,zappa,eec,chicas,swahili,nlp,dzwonki,enrolling,commercialization,smt,cataloging,snowboards,sami,tesla,elan,csd,ingrid,longman,unleaded,mesquite,kroner,frm,javadoc,hotbot,denali,inhibitory,phonics,dbs,refs,smh,thaliana,meningitis,motivations,rees,asteroid,donegal,endings,mwf,unlisted,philippians,conductive,sooo,echostar,microscopes,kenmore,reagent,achievable,dla,glamorous,interacts,litchfield,lavoro,hobbynutten,chomsky,venezia,yamamoto,zhu,interleukin,flashcards,homologene,interception,voltages,assignee,kip,bla,algarve,valance,stc,pisces,cpanel,orc,hemingway,gti,hdl,rendition,danmark,yun,sourcebook,hui,matador,smut,nac,dang,bradenton,meetups,bilbao,ewan,cwa,akai,deletes,adjudication,autoconf,rasmussen,bibliographies,milne,fsc,unplugged,ttc,currie,torvalds,neff,tailgate,hollis,lanier,overseeing,escalation,polymorphism,semitism,sevenfold,colocation,woodbury,tshirt,epidemiological,medic,grail,espana,horne,nostalgic,aldrich,tabled,farsi,excelsior,rial,greenspan,dhabi,chobe,tafe,pz,andrei,frazier,criminology,jeanette,constel,talkin,dup,syd,permittee,hangover,capitalize,fsu,motocross,boomers,wedgwood,mcdermott,youngs,lep,grossman,pecan,freshmeat,fnal,benzene,mcp,topper,ittoolbox,manny,arse,osteoarthritis,westlake,czechoslovakia,addictions,taxonomic,judo,mizuno,palmetto,telco,ltc,microarray,electrolux,elephantlist,sparked,qualcomm,whitaker,opc,connelly,conner,hospitalized,fec,opml,cana,ation,entitlements,wingate,healey,jabra,qmail,soybeans,awd,electrostatic,topological,coz,oversize,westinghouse,unk,reb,rios,craftsmanship,cic,pyle,seuss,cheetah,ldp,competed,fridges,hatchery,judgements,msr,zr,corbett,asx,curr,fingerprints,conv,cheesy,ahmedabad,dimlist,winfield,pinto,gallerys,jana,martindale,webstatistics,dhl,mays,risc,hcv,oboe,tzu,hurd,geotrack,kolkata,imation,hematology,expressway,steelhead,ahh,turntables,lindholm,clooney,facilitators,mcnamara,shiva,toners,kenyan,wynn,hsa,motorbike,niles,zippo,sergei,upfront,battlefront,gosh,fansite,colossians,addicting,gerd,copa,gtp,zlib,whitespace,tektronix,doesn,mccullough,cnr,microfiber,mdc,tsa,deployments,stearns,insurgency,boyer,behringer,akg,ttm,perceptual,fz,midlothian,follando,instr,ott,bsn,rambler,drywall,suzy,dekalb,sumo,topsites,hsc,tse,refurbishment,pfam,tdi,grassland,jeffery,councilman,swaps,unbranded,astronauts,lockers,lookups,attackers,actuator,reston,sftp,reinstall,lander,coby,methanol,miscellany,simplifying,slowdown,bridesmaid,transistors,marys,colgate,lousy,pharm,foreseeable,nutritionists,techweb,berkley,resistors,blondie,drwxr,cfc,isu,stm,villanova,iw,tif,cbi,cesar,heuristic,archivist,gallup,valtrex,usn,antimicrobial,biologist,cobol,homolog,fruity,stratus,fips,urea,bumpers,lumix,wildcard,rvs,desnudas,plextor,oxidative,brits,healy,pliers,kayaks,ibanez,marxist,couldnt,naperville,diplomas,fieldwork,damping,immunol,regan,wwwroot,bootleg,intellectuals,winslow,minis,rhs,leftist,tequila,limoges,wildwood,oop,germantown,bergman,gmac,pulitzer,tapered,mollige,toothbrush,delegations,plutonium,factsheet,squarepants,subsurface,guadalupe,halliburton,underscore,borg,glutamine,slutty,mcphee,doa,herbicides,usgenweb,inscribed,chainsaw,tablature,fertilization,glitch,gearbox,stang,alejandro,tensile,varchar,intercom,ase,osg,mckee,envisaged,splice,splicing,campfire,cardbus,hubby,graphing,biologists,improv,hempstead,exilim,xlr,debuts,esi,diskette,ubs,commend,contender,southland,spie,globals,diaspora,anu,moratorium,safes,goodnight,alcoholics,asme,gatlinburg,cai,pharmacol,swe,xorg,newsquest,wavelengths,unclaimed,racquet,cout,cytoplasmic,qaida,kpmg,lanarkshire,steakhouse,stubs,solarium,sedo,fillmore,shox,greenhouses,spotlights,perks,harlow,morrissey,igp,lutz,capacitance,birthstone,primitives,bong,lingual,unframed,iter,vibes,tmdl,programa,republication,zap,veneto,zhao,hippie,acyclovir,benoit,organizes,unaudited,rz,summertime,airbag,lal,sweetwater,bjc,cfm,internationale,krystal,expansions,gms,correlate,linkout,poc,pittsburg,bylaw,kenyon,trims,epiphany,pny,devin,viewfinder,homewood,mcrae,hind,renaming,plainfield,maxon,sprintf,armagh,livechat,pdr,bhp,lyman,notfound,pho,pathogen,zagreb,gayle,ust,overwrite,revitalization,camry,postmodern,jayne,hci,kuhn,typos,glutamate,melton,oneworld,realtone,mikey,telephoto,pooling,jy,drury,ctw,tbs,sct,custer,borderline,surgeries,lobbyists,sfo,zionist,gaskets,photoblog,cushing,nonstop,hummel,corgi,ellie,citigroup,seasonally,uci,bizwomen,dti,malkin,adbrite,psychosocial,butthole,ellsworth,cline,backlog,thema,filmmaking,wwi,townhomes,usf,instapundit,mcmaster,bayside,thinkcentre,cea,biophys,hodgkin,vhosts,laughlin,congresses,electrically,ophthalmic,yz,prong,unreleased,ipa,chaplin,dfw,histology,gilman,klamath,atrial,equalizer,vbscript,helmut,lynda,vax,yak,silt,councilmember,endorses,expos,cherish,aap,undead,pto,critters,blob,kurds,ela,ical,macleod,devry,rahman,fundamentalist,subtraction,superstars,chmod,leveling,piggy,stadiums,playable,uz,sunos,lancia,perf,interconnected,tunning,whitepaper,platt,lexis,virology,csm,purcell,vidal,svcs,subsystems,oxfam,johnstown,beading,robustness,ifn,interplay,ayurveda,mainline,folic,vallejo,ratchet,cee,yl,yee,wicca,cygnus,depiction,jpl,tiered,optima,seward,photons,transactional,lhc,doggy,anodized,exxon,hurdle,donnelly,metastatic,encyclopaedia,errata,divas,ong,trey,thankyou,alerting,insofar,smileys,surrogate,breathable,differed,dickies,gonzo,programmatic,trs,teammates,barrymore,ddd,barracuda,accesskey,appellants,usergroups,initiates,pwd,mation,aiwa,whiting,grizzlies,okidata,methadone,offsets,tryin,jodie,jdk,tallinn,descarga,monterrey,harrogate,lotteries,bozeman,coauthor,cybershot,airflow,thur,oper,stn,unattached,maher,karlsruhe,yuri,cheung,honeymooners,cheaptickets,howie,dieter,centerpiece,mplayer,unwind,outings,crotch,wavelet,nothin,pathogenesis,diodes,realestate,reinstatement,botox,nge,dipole,cleo,norge,kata,tangled,giga,walsall,burnaby,lilo,adf,majorca,agribusiness,validator,jax,pixie,proofing,clits,keyring,vehicular,workbench,deph,landscaped,aziz,lula,nucl,farber,impala,commenter,celsius,flicks,hardwear,prefixes,racquetball,endl,flavours,pundits,unset,murano,optimised,bariatric,hitchhiker,isotopes,entrez,erich,conduction,grabber,orch,peridot,produc,skechers,pacers,salvatore,nts,rbc,neurosci,parton,apec,centerville,mcl,ebuyer,dermatitis,roxio,nagoya,sfc,snowfall,sss,fundraisers,fecal,vorbis,hazzard,lbp,gorman,validating,healthday,newsstand,dossier,psion,tcc,corbin,songwriting,ecg,hinton,nighttime,fluxes,kombat,finders,dictated,darlene,westcott,dca,lua,lpg,opti,proximal,canciones,irix,qp,peroxide,bryn,erm,rfi,outages,complemented,finley,thanh,backlash,gallo,agence,zs,kjv,jonny,biblio,qm,opacity,userland,townsville,turing,veggies,centenary,barclays,eid,drexel,pedagogical,lockhart,fishnet,combinatorial,unintended,raman,rochdale,prnewswire,sthn,smog,ucl,poa,mics,punjabi,prem,katalog,kettering,hayek,brookline,montpelier,titty,ntt,fart,oxidase,qw,caterer,pregnancies,fiori,dateline,stdout,unassigned,adriana,lyndon,groupings,mems,midterm,campsite,dropdown,marketer,huntingdon,jcpenney,gelatin,qvc,adenosine,milliseconds,swatch,redefine,backdoor,jazeera,envisioned,pws,extrem,automating,sempron,cursors,divert,phnom,tbc,kanji,vod,recreate,smackdown,dropout,jrst,fallujah,lockout,moron,tnf,townhouses,horrific,abacus,lifeline,gto,torquay,dao,conjugate,winch,elektra,webtrends,shes,sabotage,blueprints,limos,fraunhofer,warhol,suppressor,dogpile,birt,rensselaer,jocks,unzip,floss,sarge,endnote,leland,telugu,midwifery,huff,pornos,primates,rmi,tangerine,amoxicillin,graz,basingstoke,crawler,angled,comin,longhorns,doha,ebsco,lynchburg,overriding,wilshire,ard,wachovia,groff,ects,lok,invicta,dongle,ecumenical,tanaka,internacional,kwan,cdl,archiv,placid,lenin,marsha,gradients,ritalin,retrieves,ferrous,dhaka,zillion,chino,ltr,caveat,gangbangs,toiletries,bedrock,clio,zines,multipart,forklift,repurchase,orthopedics,wsw,vnc,nfpa,dnf,badgers,chp,kinh,appetizer,disbursement,weblinks,telemetry,consumable,winn,depressive,stabilizer,ovary,rune,accrual,creatively,amateure,abd,interfaith,cay,automata,northwood,payers,gritty,dewitt,rect,ipx,sebring,reborn,bia,lagrange,treadmills,bebop,streamlining,trainings,seeding,ulysses,industrialized,botanic,bronco,moodle,chased,cti,intermediaries,tei,rotations,knoppix,montessori,biomed,murine,entomology,rodent,paradigms,lms,putter,fonda,recursion,flops,initiator,hsu,pobox,zeiss,ferc,tanf,sunscreen,llvm,antidepressants,decentralized,freaking,whittier,elmira,bassist,oakville,skaters,luminosity,emulators,toefl,keychains,karat,modis,ginny,egan,posh,bangles,stereos,submittal,bnib,moh,mink,simulators,nagar,zorro,ecran,ealing,ozark,pfeiffer,miers,vickers,interactivity,corso,constructors,doj,ipm,rnd,jama,lsi,malfunction,magma,smithfield,gtr,canucks,hammersmith,sdi,cricos,blum,parkland,pcbs,werewolf,wnw,midwestern,ezboard,charisma,chilli,iac,suspensions,nss,smi,malnutrition,logcheck,layton,gaines,inbred,intercultural,skateboards,mainboard,goshen,functionally,rabies,catalysts,datetime,readability,dakar,dspace,cappuccino,modulus,krause,cuisines,maclean,tuscaloosa,boosted,sprayed,gearing,glutathione,adoptions,tweaking,angina,geeky,rnb,coupler,lexapro,aig,paisapay,zanussi,minimizes,hillsdale,balboa,penh,wainwright,agc,guadalajara,pinellas,umts,zappos,daimler,spo,tadalafil,everglades,chipping,montage,geelong,ionization,broome,biases,sprawl,marantz,alfredo,haunt,hedging,insulating,mcclure,vbr,qed,waterfowl,adress,reacting,virtualization,itat,collide,syst,mankato,segregated,ests,avengers,technologist,pigments,impacting,lamont,aquariums,rigs,arginine,moot,pleasanton,televised,giftshealth,acd,simplistic,hepa,amphibians,encapsulated,injector,kessler,gardenjewelrykids,leung,edo,impl,grained,relatos,newsday,gmat,dani,announcer,barnsley,cyclobenzaprine,polycarbonate,dvm,marlow,thq,osce,hackett,divider,cortez,associative,cmo,rsync,minivan,victorinox,chimp,flashcoders,giraffe,pia,stroud,lefty,cmg,westside,heres,azimuth,logistical,firenze,okavango,jansen,tween,payback,hydraulics,endpoints,perrin,quantification,coolant,nanaimo,yahooligans,prilosec,hutchison,parsed,shamrock,schmitt,korg,warmers,newt,frontend,itanium,alleles,weiner,ola,halftime,frye,albright,wmf,clemente,handwritten,whsle,launceston,wembley,sandman,mejores,scoops,dwg,truetype,eigenvalues,airbrush,ppb,comms,regexp,quickstart,beaverton,trucker,willamette,chiropractors,tyco,mirroring,massively,aeronautical,lasalle,pwr,wordlet,hanford,plac,exhibitionism,riser,redux,gaim,audiobook,compensatory,couplings,jeezy,monsanto,cleric,rfq,contactos,esri,equiv,macrophages,yao,npt,computes,pickett,oid,charismatic,lda,teleconference,mma,whitepapers,polycom,tux,asymmetry,xpass,cfd,barbour,tijuana,niv,hamiltonian,cdg,algebras,quotient,wildcat,inlay,peta,paco,avocado,octets,dubuque,evaluator,gid,jumpers,edmunds,lerner,manifolds,awg,napoli,kristy,variances,pki,objectivity,sistema,massager,incubated,feedster,federer,turnovers,bev,eai,changers,frs,hereto,osc,clinician,alltel,gss,curacao,rapporteur,arcserve,gump,powerline,aspell,avp,safeguarding,paxton,herbie,yabb,chromosomal,hickman,runescape,salesperson,superfamily,tupac,cassini,tobin,zoos,activates,hibernate,ning,extremists,montego,rohs,cyclical,cytokines,improvisation,mmorpg,toured,tpc,flatts,cmf,archiver,rainer,rsc,covariance,bobble,vargas,gulfport,airfield,flipping,disrupted,restocking,lgbt,extremetech,citrine,neoplasm,rethinking,xfn,orientations,calumet,pellet,doggie,inflow,msw,lymphocyte,weinberg,saigon,whiteboard,wic,brody,invertebrates,elliptic,ffa,agonist,hyperion,partypoker,rockingham,sandler,schweiz,grundig,rethink,musculoskeletal,aggies,prereq,nikita,aetna,truckers,giro,laserdisc,kaspersky,dor,determinant,morpheus,ayers,junkies,ccna,jacquard,assesses,okinawa,autoscan,quantified,pnp,uppsala,distortions,subclasses,glo,condolences,hitter,livelihoods,psf,cala,telluride,apnea,mkt,floodplain,valera,wenger,crusader,backlinks,alphabetic,delonghi,tailoring,shavers,mcdonnell,aborted,blenders,symphonic,asker,huffman,alistair,navarro,modernity,wep,uab,olp,booties,cancels,newsblog,gangsta,mgp,foodservice,teton,newline,prioritize,clashes,crohn,bao,quicklinks,ethos,hauppauge,solenoid,stis,underdog,fredericton,tep,bextra,copywriting,technol,mdr,asteroids,continous,hplc,ovulation,doggystyle,quasar,euthanasia,schulz,okanagan,liters,tarrant,blacklist,clermont,rooftop,ebert,goldfish,witherspoon,slimline,animator,barbra,irreversible,flanagan,encyclopedias,csiro,downtempo,campsites,graco,lighthouses,xg,adt,hemoglobin,tung,svga,postpartum,condi,yoda,jst,dalai,xn,nytimes,kenzo,alden,trampoline,zi,restricts,gees,intakes,dogfart,swearing,ith,montel,ubbcode,yw,ninemsn,lgpl,jsf,psychotic,allyn,higgs,pulsed,ignite,hornet,atypical,contraceptives,slimming,dispatcher,devoid,jms,maricopa,mbs,northfield,idf,elites,fifo,correlates,casters,heisse,easygals,mandalay,haircare,climbers,atty,madera,calibex,mailbag,smartmedia,vilnius,dbl,doping,postwar,strat,bsp,barebone,thrombosis,smarty,whitley,lse,windermere,curtin,dilemmas,cci,gwynedd,edwardian,hppa,saunas,horowitz,cna,undergrad,mocha,escada,knockers,jitter,supernova,loughborough,directtv,feminization,extremist,tuttle,aoc,medway,hobbit,hetatm,multipurpose,dword,herbalife,ocala,cohesive,bjorn,dutton,eich,tonne,lifebook,caster,critiquer,glycol,manicure,medial,neopets,accesories,faxed,bloomsbury,mccabe,ennis,colossal,karting,mcdaniel,aci,brio,baskerville,syndromes,kinney,northridge,acr,emea,trimble,webinars,triples,boutiques,freeview,gro,screener,janine,hanukkah,caf,adsorption,sro,underwriters,foxx,ppi,noc,brunton,mendocino,pima,actuators,internationalization,wht,pixies,pancake,transmembrane,photostream,guerrero,firth,hathaway,emf,beatty,andersson,lunchtime,miro,slams,looping,crates,undated,takahashi,ramadan,lowercase,technologically,anaerobic,satelite,pioneered,tabloid,pred,solubility,troubleshoot,etf,hatcher,coders,insecticides,electrolyte,watanabe,firestone,writeshield,sph,descargar,letterhead,polypeptide,velour,bachelorette,nurs,geospatial,zoned,pubic,pizzeria,mirc,henning,acf,bae,nitrous,airspace,santorini,vdr,tms,convertor,brahms,genomes,workable,ordinate,seminal,rodents,ytd,xin,precursors,relevancy,koala,discus,giftware,realistically,hol,polska,loci,nanotech,subunits,awsome,hula,laramie,toothpaste,maxine,mennonite,subtitled,qms,maidstone,abr,sda,jcb,wpa,fastener,ctf,foxy,sexiest,jupiterimages,categorization,inclusions,fosters,conc,transsexuel,limbaugh,cassie,altman,lethbridge,peng,fillers,symposia,nia,templeton,stds,hav,typography,ebitda,eliminator,accu,saf,gardenjewelrykidsmore,gazebo,preprint,htc,naxos,bobbi,cocker,steph,protonix,systemax,retry,radford,implantation,telex,humberside,globalspec,gsi,kofi,musharraf,detoxification,ree,mcnally,pma,aureus,informationweek,chm,bonneville,hpc,beltway,epicor,arrl,iscsi,grosse,dfi,penang,zippered,simi,brownies,lessor,kinases,panelists,charlene,autistic,riu,equalization,corvallis,reused,volokh,vari,fordham,hydroxy,technologists,snd,dempsey,httpdocs,speakerphone,reissues,shalom,khmer,recordable,dlt,dredging,dtv,extrusion,rtn,preggo,defamation,theron,proteomics,spawned,cep,phendimetrazine,wiener,theorems,samplers,rfa,pasco,hilbert,tamworth,itmj,msd,etfs,cde,praha,zona,landry,crackdown,lifespan,maybach,cysteine,responsibly,slideshows,aceh,techtarget,geotechnical,fantasia,camisole,atoll,shredders,gags,rips,futurama,hari,ironman,ducts,marmot,remand,hawkes,spoof,spammer,presets,separations,penicillin,amman,davos,maturation,internals,bungalows,beckinsale,refractive,grader,ecd,transducers,ctxt,doxygen,rtd,akc,cgc,intercollegiate,zithromax,onkyo,niosh,rainier,furman,newsfeeds,larkin,biztalk,snapper,hefty,ipr,valdosta,ulead,delaney,hairless,lactation,innsbruck,offbeat,teenie,protons,machined,holman,eviction,dic,pio,regionally,thurman,canaria,showcasing,afa,certifies,primes,renton,lambeth,frappr,liturgical,easements,aida,openafs,assword,rving,exogenous,sram,sault,trolls,flor,rfe,oleg,smo,analyzers,scorer,swami,oilers,nik,mandela,listers,ordinated,arlene,dividers,recoverable,gators,intraday,cruces,hollister,enews,lactose,gifford,competitively,rockstar,hampstead,chrono,nahum,raja,nextlast,xinhua,ltl,lofts,feral,neurosurgery,ringgit,ukranian,parmesan,kiosks,pnt,hooking,wip,rawlings,physiotherapy,wrexham,billabong,prepayment,jonesboro,bangers,handgun,miscategorized,itp,desoto,innovator,mitochondria,mewn,sername,usmc,amicus,vijay,redirecting,gma,shih,cervix,biblia,cosby,lufthansa,msnshopping,sewerage,ele,mantis,alerted,lsp,intron,bri,remodel,carpal,natalia,cjk,specialises,condiments,adventist,eggplant,coun,ctv,wycombe,monaghan,blogarama,undocumented,esb,vaccinations,gutierrez,bernd,needham,inuit,wordnet,wedi,keyes,photocopying,tca,avn,dressage,cafepress,phylogenetic,kurtz,morbid,inno,refresher,freakonomics,impreza,cheeky,arco,proponent,brasileiro,kar,rojo,perscription,aic,streisand,eastside,bioethics,redo,piranha,rps,cmu,uncompressed,vps,pseudomonas,sotheby,avionics,minimization,ascot,linearly,dolan,titleist,genesee,grays,fdc,psychiatrists,bom,multiplex,srt,bradbury,babysitting,asd,beehive,aeon,livin,leblanc,shorty,injecting,discontinuity,littlewoods,enquirer,downturn,fission,modulator,spybot,hrc,worldview,choreography,sfx,nth,buffering,denison,killarney,scoping,srm,mammography,epc,nepalese,communicable,enzymatic,melanogaster,extravaganza,kamloops,spss,tftp,rotherham,underestimate,hana,mycareer,pra,cooley,gratuitement,eriksson,schaumburg,exponentially,chechen,carribean,bunnies,choppers,psyc,pedersen,earphones,outflow,scarab,toasters,skiers,eax,jamal,raunchy,biologically,nbr,ptc,qe,zyrtec,riyadh,pell,quicksearch,coates,octane,mtl,krabi,funders,apj,kal,fai,ccp,environmentalists,fatah,ifa,ackerman,gbc,soooo,soapbox,newberry,deanna,bestellen,elongation,webcrawler,wanking,ofsted,yb,dortmund,boardroom,nico,taping,mro,atleast,somatic,fcs,niki,malloc,lanzarote,slump,nerds,laude,mec,simulating,enrol,bts,cflags,xps,datafieldname,wycliffe,dda,apts,aikido,slo,batches,dap,ssr,kournikova,moshe,fsbo,shippers,mtc,cav,rrr,wildflowers,polygons,delimited,noncompliance,upi,sna,vidsvidsvids,herts,bellagio,webapp,haryana,eeg,dlls,babysitter,linotype,produkte,lesbica,pes,mediators,hone,riggs,jockeys,seater,brightstor,deliverable,sanding,buffered,orton,indesign,lakeshore,ctl,aland,clarins,pelham,huf,ronin,comps,mgi,greco,kontakte,edema,leaderboard,mce,hsv,geocities,argc,palos,ori,carotid,citi,squish,cny,gorham,calphalon,blasen,midwives,nara,nab,netbeans,cyclones,tapety,snowflake,blackhawk,weinstein,sterilization,assessors,chenille,dehydration,haircut,fhwa,misconceptions,alternet,undeclared,bari,songwriters,tolerances,incarceration,hierarchies,redondo,lactating,aquamarine,yg,edm,sedimentation,optometry,mobilize,attendee,bmd,dialogs,rpt,viktor,trajectories,federico,openvms,ppo,pag,precio,leapfrog,thermoplastic,sexchat,kingman,deterrent,ghraib,duplicating,tuba,encodes,garamond,cirrus,alanis,kilometer,ballarat,wacom,nsta,actionscript,ivf,modifiers,hijack,thomasville,accorded,fryer,namco,xmms,dammit,produkter,motorhome,ade,mfrs,editable,greats,milosevic,marcy,boron,creighton,wolfenstein,bolivian,rowbox,pauls,phobia,superfund,vcc,sadler,piercings,riffs,briana,geronimo,tetra,freakin,alb,retrofit,cytokine,stylesheets,coalitions,tactile,cinematography,vivitar,wannabe,blogwise,amador,skier,storyteller,bpa,pelicula,ischemia,fms,comput,wristbands,livecams,hibiscus,rheumatology,edn,somers,cray,iol,waterbury,selectivity,carlow,maxx,haggai,demonstrators,raiser,sanger,mullen,periphery,predictors,woodwind,snl,modblog,repo,burnley,antispyware,sumter,rcd,woodside,tylenol,megabytes,backlight,naturist,zephaniah,airbags,plethora,cabriolet,yh,retiree,atol,sonet,anthropological,mikasa,iverson,cae,buckeye,dollhouse,stereotype,uship,ubisoft,escalade,breakaway,produkt,sealants,montclair,dinghy,gnus,melia,feedbacks,concurrency,healthgrades,hoya,revista,lrc,flied,tvr,joliet,ped,chappell,wollongong,peo,blowers,doubleday,guidant,remodeled,eea,bcp,situational,nasd,chakra,dfa,jammu,wetsuits,edc,birkenstock,vivendi,emulsion,fielder,sorta,courseware,biosphere,skb,plumpers,muschi,qcd,ollie,gurgaon,rwxr,federalism,gizmodo,laminating,coltrane,colitis,unincorporated,liang,blogged,cryogenic,antispam,homologous,hassles,symptomatic,rtc,trademanager,bipartisan,rhodium,exchanger,preseason,januar,bumble,intimidating,randi,placenta,abbotsford,upn,dulles,brainstorming,wea,dougherty,sarcoma,sniffer,rotorua,bahasa,iona,bioscience,tricia,residuals,gforge,copd,homie,leesburg,afm,xref,flashpoint,mobygames,cortland,mailers,tented,nicholls,skew,mahoney,infoplease,budd,acn,hollands,muni,modernism,elizabethtown,dunhill,eee,didn,guidebooks,scotts,wye,wsj,biosciences,macgregor,atms,habakkuk,depaul,binge,cyst,hexadecimal,scissor,progra,smyth,mott,jazzy,headboard,diflucan,bronson,standardised,cations,cics,ecole,centos,hysterectomy,housings,wrc,movado,mcdonough,krista,pharmacokinetics,chantal,morristown,riverview,loopback,torsion,ultrastructure,lucida,leftover,sykes,anecdotal,rheims,integrators,unlv,arboretum,sharealike,lowepro,erc,ischemic,illustrators,plugging,macbook,bjp,arent,vignette,qf,homebrew,altoona,pheromone,fireball,decorator,franken,netpbm,antalya,harmonious,nne,recordkeeping,modernisation,myx,sdr,muskegon,daley,modality,liberalisation,utilise,arturo,appellee,granules,multidimensional,rollout,homegrown,datamonitor,reinforces,dirham,leahy,myc,esophageal,kira,approximations,forzieri,intermediates,kgs,albumin,grantees,loveland,maloney,sativa,paramedic,trademarked,edgewood,stressing,potable,limpopo,intensities,oncogene,antidepressant,ballpark,powys,orca,mascara,proline,molina,nema,wipers,snoopy,informationen,esf,riverdale,unleash,juelz,bls,noarch,koss,captioned,paq,summarizing,ucsd,gleason,baritone,independant,chlamydia,relativistic,rotors,driscoll,andalucia,mulher,bagels,subliminal,insecticide,segal,spline,undisclosed,noni,letterman,almeria,bryson,wtb,towson,htaccess,malayalam,crue,loo,pinoy,pallets,uplink,sheboygan,terrence,ghc,gateshead,probationary,abducted,warlock,breakup,fiche,juror,bowden,goggle,metabolites,brainstorm,smu,ahl,bateman,egcs,chirac,museo,coffeehouse,scitech,gcn,trolling,elmore,grads,lz,andi,localpref,kayla,ccl,smeg,donut,libido,fuselage,diabetics,ballerina,crp,morgantown,paseo,ptsd,redheads,curran,diam,ragnarok,hkd,summarised,jx,caitlin,conscientious,bandai,hobs,eft,endometriosis,cushioning,mcneil,belvedere,nar,acetyl,boomer,perinatal,idm,automake,multichannel,petr,daredevil,corcoran,mrp,holliday,daimlerchrysler,bowes,mcgowan,agfa,mep,goss,mulch,jvm,harwood,ranma,marinas,mobipocket,streptococcus,murcia,landfills,mcknight,edd,baud,mcfarland,designline,undies,prepay,kodiak,printout,nonresident,marysville,curso,palmos,dorsey,roo,soulful,websearch,infotrac,mpgs,fouls,openssh,bravenet,etsi,serendipity,tq,sequentially,yogi,landslide,howtos,skool,evolves,iberia,anakin,duffel,goodrich,subfamily,perennials,ary,matchmaker,sagittarius,locates,dysfunctional,maastricht,bulletproof,mcr,uga,stenosis,chg,recentchanges,abrasion,eindhoven,opportunistic,pcl,analogs,bba,hillcrest,cantor,econometric,trafford,opie,cro,elkhart,ringers,diced,fairgrounds,cuyahoga,plt,cartons,mustangs,enc,addons,wstrict,gow,pharmacological,headwear,paediatric,genitals,hendricks,ivr,telemedicine,judi,icom,academically,chilton,cbo,amaya,flickrblog,fulbright,foaf,cllr,xh,fulltext,centrum,tecra,kinks,unisys,preschools,mcallen,contoured,aberdeenshire,icm,schenectady,schematics,dojo,eserver,nin,interfacing,borrowings,hrt,heparin,universiteit,hardcopy,connective,nihon,oso,adkins,dunlap,nsc,irr,clonazepam,wikiname,gaithersburg,biophysics,chromatin,mathis,bulova,roxanne,fca,drg,refurb,wasteland,plotter,findlay,cymraeg,alc,meek,phonebook,doodle,arb,wabash,chronologically,wms,whitfield,mchenry,eide,assy,dusseldorf,mmol,shabbat,nclb,accommodates,cmi,stacker,msf,touchdowns,plasmas,barbell,awk,bibs,sneaky,smarts,lankan,synthetase,lightwave,alignments,coached,jac,framingham,opensource,restroom,videography,lcr,spatially,doanh,preprocessor,cohn,aon,marginally,ocs,bak,cavalli,ddc,grunge,invoicing,bigtits,carney,braintree,southside,vca,flipped,cabrera,mindy,surfaced,glam,cowgirl,loginlogin,mtr,nakamura,layoffs,matures,cty,apm,iggy,margarine,sneaker,glycoprotein,gcs,queued,sab,hydroxide,hanley,cellulite,hwang,mtd,mcqueen,passat,fluff,shifter,cartography,firstprevious,vito,predicates,bcl,douay,zeitgeist,nickelodeon,dru,apar,tending,hernia,preisvergleich,britton,stabilizing,socom,wsis,anil,midsize,pullover,lpn,hoodwinked,photoes,beastie,yucca,harvester,emmett,shay,obstructive,pacman,retroactive,briefed,bebe,krusell,clickz,kermit,gizmo,atherosclerosis,demography,migraines,wallingford,newborns,ljubljana,restarted,rnc,meow,thayer,kilograms,packager,populate,pembrokeshire,arcane,impractical,tcg,decentralization,honeymoons,authoritarian,alu,judaica,tropicana,tyan,cardholder,peavey,gothenburg,geocaching,ident,fluoxetine,tipton,teva,lsa,effortlessly,failover,cysts,primetime,kenosha,kokomo,penney,snorkel,amin,iridium,dwyer,conserving,toppers,cfg,tvc,alternator,nysgrc,underwriter,springhill,panhandle,joann,isoform,borden,bombed,elt,halton,guaranteeing,fasta,gonzaga,boobies,nadine,breitling,nutr,ingersoll,sandia,pacs,azur,helms,beos,srcdir,sherpa,tuff,ligands,smalltalk,sorghum,nucleotides,mmv,ebi,sbd,lmao,enhancers,collaborated,produ,lila,slotted,nnw,fila,decking,boz,accelerators,howstuffworks,neighbourhoods,michal,rab,hideaway,dwayne,coda,cyanide,kostenlose,grotesk,marek,interlibrary,provenance,sra,sog,zinkle,fanfare,mapper,boyce,mlk,dystrophy,infomation,footballs,emailemail,bathurst,fof,duracell,feinstein,magnavox,evra,servlets,tss,neill,epithelium,thc,webbing,bef,jaya,mame,ppe,emusic,tso,epp,glencoe,untested,overviews,affleck,flinders,informationhide,hearst,verifies,reverb,kays,commuters,rcp,welivetogether,crit,sdm,durbin,riken,canceling,brookhaven,gauss,artistry,phpnuke,falkirk,pitts,dtp,kwon,rubric,headlamp,operand,kristi,yasmin,gnl,acdbvertex,illini,macho,ningbo,staphylococcus,busting,foss,gfp,yhoo,sloane,wooster,delong,mdi,nilsson,substring,gac,smelly,gallatin,hangar,ephemera,heli,choo,testicular,miramar,wearable,carling,buildup,weaponry,swann,lian,landline,entrees,corpora,priv,geeklog,antiviral,profiler,lodi,minimalist,wolverines,bbcode,protagonist,rata,freephone,plm,raytheon,refseq,kingfisher,numark,moline,esac,takers,gts,amana,worldcom,hiroyuki,procter,pragma,winkler,walleye,icf,bagel,asbury,alpharetta,syncmaster,wists,xfx,wicklow,tsr,baer,yf,cmr,chil,leftfield,lettings,walkway,coos,petrochemical,fia,chula,zalman,carer,humankind,cmms,hawley,inverters,mccormack,pdu,faceplates,yeats,motorhomes,cie,icts,mcmurray,zucchini,lanai,pwc,chiral,fermi,newsreader,multiculturalism,cuddly,listinfo,shp,primedia,chl,estrada,pricey,shekel,apn,diocesan,readout,clarifies,klm,dimes,revlon,dtr,cranky,paparazzi,zheng,merida,bambi,interceptor,rox,jamster,noritake,banding,nonstick,origami,marketwatch,yeti,arf,umbilical,linz,donates,foursome,lawrenceville,azul,springdale,moisturizing,loeb,isr,huston,gatos,disqualification,suunto,angiotensin,spitfire,wfp,realnetworks,summation,plame,querying,gpc,autonomic,fq,pathname,novartis,ufos,manatee,qh,restructure,larval,zeu,socal,resettlement,mistakenly,radiative,drapes,intimately,koreans,realy,womans,groin,greenway,spamassassin,mata,gigagalleries,algerian,frat,egullet,electrics,joni,stencils,reinventing,reqs,latte,shaolin,shopped,beattie,hrm,hypnotherapy,muppet,abp,checkpoints,tpa,derechos,pieter,timesselect,viacom,strcmp,kardon,sideshow,classifier,westbrook,repro,moser,studi,sdf,colonialism,supermicro,scorers,sitcom,pastries,aldo,azim,authorizations,holsters,neuropathy,backorder,humphreys,metroid,vcs,nikkor,mcf,jacobsen,conjugated,lcc,unethical,vacances,whos,asr,alphanumeric,grumpy,fixedhf,holm,sirens,lfs,benelux,caters,slp,prasad,kirkpatrick,jamahiriya,tol,coagulation,girly,bnp,archdiocese,orbiter,edgewater,lem,keyless,repatriation,tortilla,dissociation,industrie,watercolour,ucb,waite,madsen,mnh,opticians,nop,newmap,mse,bottleneck,regressions,linton,sio,buckeyes,bodywork,applique,jewell,gef,hornby,redefined,empowers,informix,tots,goalkeeper,startseite,blurb,feedburner,dominatrix,norcross,compiles,bancorp,encoders,pmp,boomerang,temecula,ghg,structurally,caveats,homeownership,birdie,disseminating,lanyard,horst,interlock,pagers,esophagus,ocz,sexshow,jackpots,optometrists,zak,krueger,hickey,erode,unlicensed,termite,ibuprofen,drugstore,audiology,gannon,integrals,fremantle,lysine,sizzling,macroeconomics,tors,thule,gtx,eeprom,kaleidoscope,dmitry,thawte,busters,officemax,absorber,nessus,imager,cebu,kannada,sailboat,hectare,netball,furl,holographic,defra,salaam,respirator,countertop,gla,installments,hogg,partying,weatherford,sav,exited,crispy,coffees,knowhere,sequin,bendigo,unis,bandwagon,janssen,myst,polymerization,byval,nozzles,labview,snitz,rpi,hcc,unbelievably,pasting,butyl,ppd,forested,unrivaled,roadways,varna,maidenhead,almanacs,gfx,randomness,middlebury,muon,ringo,svr,caliper,lmb,woolf,innovators,anode,microprocessors,tps,stk,siting,misinformation,aneurysm,closeups,kinsey,prp,cnbc,eroded,tris,lonnie,hartlepool,bol,alastair,agr,fafsa,javac,uclibc,fodor,afrikaanse,colognes,contestant,snell,prescreened,believable,anesthesiology,elmhurst,misha,melatonin,bongo,rmb,mdf,terr,xw,bloke,avc,oxnard,cess,cedex,electrochemical,brevard,brw,brenner,slalom,waterhouse,calif,acces,aquatics,cari,lurker,buffett,chews,hoodies,phony,vila,fsf,gmake,nikko,grasslands,monolithic,polifoniczne,bugtraq,cpage,engr,subcontract,prophylaxis,texinfo,ings,cotswold,guillermo,unstructured,boop,hitman,tla,mercier,restated,nukes,duplicator,mehta,macomb,fundamentalism,australasian,isk,rerun,moda,segmented,cranberries,leas,pleated,handshake,digests,innovate,goode,erisa,jeb,dismantling,ferrell,hellometro,leavenworth,snowmobiling,fora,fdr,gaba,vfs,dlc,byers,codon,webnotify,sfr,pylori,loomis,acidity,gershwin,formaldehyde,welder,cyp,kendra,switcher,ocaml,goldie,mab,gooshing,mockingbird,ponte,xlt,hogwarts,juicer,lloyds,echelon,gabba,arranger,umbro,metallurgy,baa,neq,liteon,queuing,vsize,shiite,valuing,argon,coheed,hooray,flightplan,carefree,souza,kershaw,millar,biotin,salter,testicles,morph,econometrics,remo,msec,marconi,ote,receiverdvb,expatriate,tantra,codified,ncs,overlays,thingy,comforters,conservatories,ruskin,dpf,cyndi,germination,lipoprotein,ayurvedic,planetarium,tribeca,bihar,keenan,discos,eastbourne,robles,gianni,dxf,homebuyers,nogroup,freescale,wiccan,sess,merrimack,groton,billboards,searcher,uttar,mailinglist,metacrawler,priser,osceola,bioterrorism,tourmaline,leatherman,microns,unifying,anaesthesia,videogame,aws,dtc,chc,intranets,escalating,bluebird,iucn,gls,mahjong,interstellar,kenton,underestimated,groupsex,loudspeakers,flexi,vst,junctions,redman,transferase,bvlgari,hampden,nls,selby,wausau,stoppers,snowshoeing,uppercase,cirrhosis,publib,metrology,connexion,stoneware,moncton,traci,krumble,pathogenic,rasmus,raritan,riverfront,humanist,usefull,pompano,skewed,cleary,nepa,ludacris,sequenced,xiao,teaming,flatshare,aromas,positional,alesis,glycine,vee,breakthroughs,cashback,throwback,charlestown,nexrad,gestation,powering,magee,osnews,logins,sadism,emb,muncie,panoramas,plenum,ato,aotearoa,foro,hydrolysis,flac,labia,immunizations,existential,umc,sweaty,segond,addis,beasley,breached,rounder,rectum,nha,perched,jah,dsr,lta,videoconferencing,cytoplasm,makin,sedimentary,laurier,aachen,wnd,olney,massimo,chlorophyll,scop,shipyard,centering,manley,sunroof,dvorak,etch,answerer,briefcases,gwent,bogart,amit,kaufen,untranslated,raffles,reconnect,teeny,benthic,mcmanus,infotech,carlin,lithograph,ure,stoner,repost,iras,resurfacing,kelli,spitzer,jae,dunne,hyperbolic,pstn,bisque,anzeigen,standoff,westbury,solano,kailua,acoustical,photovoltaic,orchestras,redline,reggaeton,qstring,declan,tama,wank,virol,iy,solvers,linuxworld,canadiens,rockabilly,smokin,tumours,loudspeaker,handicapping,tatu,evangelion,excretion,breakage,negra,horsham,jing,petro,notations,midgets,comprar,homemaker,neverwinter,ddt,categorize,geophys,loa,tga,foreskin,jornada,inetpub,premierguide,reflexology,sophos,helphelp,foundries,registrants,sweats,atvs,capstone,adecco,sensei,publicized,transessuale,federalist,objectweb,portrays,postgres,fesseln,hidalgo,prosthetic,kristine,microfiche,dce,watergate,setbacks,karan,cdata,kfc,grandview,amerisuites,aural,gatekeeper,heinemann,decommissioning,nq,gestion,thermodynamic,patrice,profiled,disambiguation,mmmm,bittersweet,mul,gustavo,isolating,xine,bigfoot,nrw,mycobacterium,yamada,coldwater,whitehouse,cultivars,santorum,mugabe,margo,rundown,carbondale,gizmos,effingham,beastility,agus,ucd,dowling,mitac,steels,oakdale,nda,mystique,cortislim,oes,disp,loaders,trouser,oai,hoboken,sepia,differentials,sabi,dancehall,sarajevo,brava,underscores,roadshow,fbo,sabah,russel,nephrology,squamous,mvn,wz,malden,mita,orissa,ise,vfr,chianti,minsk,coffey,domestically,qantas,brandi,artefacts,solihull,tation,tchaikovsky,refineries,ronan,pricewaterhousecoopers,swimsuits,automates,wylie,whomever,sidelines,shaffer,toolbars,preservatives,wagga,kenai,bobs,mortensen,unplanned,characterisation,ppa,mip,peering,fopen,vgn,wmissing,csn,rudd,bourke,pelvis,goodmans,potluck,ioffer,cial,davidoff,creamer,tsc,gfs,contax,columbine,portables,fledged,aquinas,kidz,edonkey,hourglass,pagetop,paloma,gunmen,disables,ssangyong,antiretroviral,moschino,hoyt,okc,lockport,pittsfield,pollack,hoyle,arousal,inhibiting,reo,mammary,trampolines,hillman,trimmers,bridgestone,muvo,wcities,boi,diddy,conveyancing,apl,echinacea,rok,phish,frigidaire,oxo,hah,halibut,penrith,brno,silverware,teoma,rcra,mlo,ideologies,feminists,fff,sculpted,uq,rta,embo,rollin,contraindications,einai,ssrn,oup,rebuttal,underside,alumnus,archeology,preise,ontologies,fenders,frisbee,hmmmm,tipo,hyperactivity,seagull,nanotubes,polos,bonaire,hehehe,fim,reece,elsif,spinners,annealing,maximizes,pld,ctp,eurasia,dickey,ako,carpeting,yorkers,ltte,eukaryotic,bexley,sions,bremer,marisa,frustrations,delgado,resection,dioxin,islamist,brant,hss,kubrick,fft,touchscreen,layoff,facelift,decoded,gry,shitty,dodger,ihs,lessig,zaf,revell,sched,rpgs,euphoria,acuity,popper,lockdown,nsp,transmittal,heatsink,assholes,hayman,novi,equilibria,requester,allrecipes,serialized,hangzhou,bjork,stringer,nanjing,milligrams,jab,snohomish,strathclyde,yoko,intramural,curated,finalised,tania,cdd,gund,tascam,noam,hardstyle,arun,cga,waistband,fibroblasts,leandro,metastasis,userpics,greenbelt,leuven,printk,reachable,pss,radioactivity,caine,gyfer,boch,howdy,cocksucking,marlon,timmy,liga,gregorian,reorder,aerosols,archeological,logarithmic,sexape,robby,completions,yearning,transporters,sandalwood,megs,idp,rapidshare,tsb,omnibook,gamepro,bca,decontamination,tamiya,euclidean,salina,woodford,formalism,aching,nbs,audigy,libexec,eyepiece,bibl,bobcat,freehand,guo,ltsn,itil,nugent,esr,sce,killeen,jamming,applicator,icrc,mezzanine,meghan,cupertino,logfile,zed,humidifier,padilla,susanne,collapses,yung,longwood,krw,mainstay,descr,dtm,atcc,tasman,accessoires,mucosa,dachshund,zf,syringes,breakpoint,telus,stoney,nepali,regimens,wok,canola,slicing,reproducible,experi,skydiving,sof,bogota,discogs,datagram,videographers,cag,nicks,platelets,trannies,pamper,nineties,bracknell,disinfection,perfusion,postseason,tigerdirect,smoothie,punisher,tabbed,tcu,alene,lismore,coquitlam,auctioneers,somethin,daniela,dials,enhydra,kyrgyz,iia,bianchi,iata,zim,buscador,roadrunner,blackhawks,jsr,misfits,quiksilver,nwn,sqlite,siu,tarantino,addi,jkt,buyout,replays,wcs,adrenergic,bottling,caldera,baseman,botanicals,techie,farr,vtech,donde,beyer,versiontracker,pse,hashcode,tradeshow,lewisville,aster,transparencies,bloomingdale,northrop,revo,overkill,nlrb,lazio,enr,diag,chiapas,freedict,disponible,morissette,effortless,hydroelectric,cranial,hindsight,orientated,abrasives,fpc,brl,vpns,feingold,thunderbirds,dha,wot,geog,harrah,wxga,nmfs,boynton,cashing,spousal,abusers,twinlab,vick,aml,sodimm,copley,mallard,twikipreferences,airman,configurator,clc,neurobiology,diamante,dreamworks,corsets,dowd,escrituras,bureaucrats,songtext,wham,phpgroupware,cyclin,conyers,youll,kowloon,fairytale,pickens,bybel,mln,wres,barm,amplitudes,nmap,nvq,ocd,ryu,microcontroller,premiered,institutionalized,hamm,gyno,bhopal,circulatory,centerline,chairmen,guerlain,pedo,hussain,portlet,proscar,histone,opioid,totalling,pyobject,translational,lehmann,keaton,elkins,jamison,interstitial,inest,tanzanite,helical,redlands,sagradas,fondue,windscreen,adderall,othello,supersonic,pocatello,maniacs,sysadmin,foothill,earmarked,highspeed,uncheck,rapes,vlad,cif,photosynthesis,junit,remotes,epo,mcm,ucf,nacl,sfa,empirically,dfes,addon,pon,feelin,callmanager,deteriorating,statenvertaling,cypriot,entert,fascia,woburn,jalan,fryers,cally,layering,geriatrics,picky,conley,boces,barth,lvm,mooring,mcdonell,expats,bizarr,loadavg,perla,micheal,bok,friendster,endoscopy,msx,buzzwords,lumen,airwaves,jagger,setups,inman,schindler,limewire,drawstring,midrange,frodo,superpower,recliner,trisha,trium,utm,grimsby,wyeth,urs,kds,adjuster,impeccable,shari,marketplaces,tefl,sudo,technische,characterizing,gawker,gagging,cyclist,atg,generics,richey,magneto,crunchy,teletext,drwxrwxr,crabtree,underfull,hemscott,webmasterworld,objc,musicmatch,sealant,timberwolves,harriers,shangri,robo,roto,mnem,nnn,aidan,fidel,executables,concertos,vob,extracurricular,haverhill,squirters,hbp,tonal,atr,ashtray,gpu,payton,psychoanalysis,hesitant,poco,nedstat,rcmp,microchip,eroticos,fea,kors,susquehanna,userinfo,modulo,antler,bangladeshi,desking,nikolai,nuys,ludhiana,rdr,spankings,chatrooms,pretreatment,brittney,jer,tianjin,qj,winnebago,mcfadden,notecards,tix,murfreesboro,quaternary,subtracted,tropez,mcgovern,olivetti,hikers,vivaldi,cuties,lnb,gilchrist,preheat,bernadette,microdrive,rookies,overton,potpourri,neiman,seb,sigs,jarhead,momo,uzbek,ttt,dubya,signatory,cim,energized,brite,shs,minimums,needlepoint,deng,camargo,oems,bolle,webrings,ehrlich,azz,firefighting,icalendar,disallow,exch,mclachlan,zaragoza,brixton,efi,kilo,tcmseq,moisturizer,suonerie,remanded,empresa,shoebox,disagrees,lowdown,trove,filer,apologetics,englisch,texarkana,threonine,metart,siti,encephalitis,tomatometer,arias,kenner,anamorphic,subspace,cleats,ifp,circ,pressured,peppermill,sml,clarifications,zionism,pti,retin,klicken,disjoint,ema,openldap,koenig,carats,hijacked,tch,burlingame,checkbook,candice,coworkers,eno,karla,cus,gio,statm,haifa,reincarnation,budweiser,heuristics,tunisian,hologram,macular,eral,refinishing,chia,celestron,leyland,reloading,hombre,munch,basf,rolleyes,bidirectional,ahhh,chica,starfish,kurdistan,boro,heartbreak,preps,irina,mylar,congestive,dmd,schilling,twikivariables,battleground,tectonic,equate,corbis,inflatables,naacp,pathologist,minnetonka,langston,memoriam,underserved,rectifi,elmwood,fukuoka,glbt,rsi,parr,pob,ods,welles,gujarati,sportsline,leno,healthwise,vrml,sida,azres,sapporo,jscript,predictability,pajama,paddlesports,adenocarcinoma,toning,gestational,kravitz,ptcldy,snowball,adl,travelogues,crl,zocor,ecotourism,leadtek,hkcu,morehead,niro,fueling,orthopaedics,crayons,tikes,revamped,olap,curfew,hamlin,brandeis,bree,stylistic,corneal,beckman,crusher,riva,prefs,militaria,marshfield,elo,swank,matisse,villeroy,proactively,mccarty,zas,acdbcircle,horney,modeler,progressives,grosvenor,linger,creationism,dork,claritin,psychosis,fei,firsthand,gigi,cranston,hayley,ags,muted,turbidity,mountable,kiki,vz,avondale,oceanographic,zzz,tsg,epl,nonzero,iwork,scavenger,touted,candace,kava,kronos,adjuvant,tyneside,travolta,sari,preventable,bumpy,aleph,lga,conroy,mastermind,vaccinated,coburn,rawk,acceptability,stryker,surcharges,noticeboard,chapin,permutation,colpo,ucsc,mulligan,fod,ketchup,alimony,tng,viscous,skk,cmm,unambiguous,emphysema,epistemology,grantham,avila,solana,toolkits,soloist,rejuvenation,chn,jse,anaconda,bsnl,carfax,leveraged,wega,scanjet,ibc,meng,burley,efa,freesex,plasmids,steffen,xz,woofer,lada,hinckley,millimeter,snape,rollercoaster,tdc,connery,newswatch,roundups,keylogger,parka,scouse,unists,timo,hea,spock,ffs,bmj,farrar,decompression,draco,mika,galena,msft,inactivation,metafilter,mbna,lymphatic,ofc,gian,berks,hdv,wirral,boxset,ashrae,ilford,allman,kroon,gmo,sdc,builtin,lisboa,coc,rollback,westgate,thd,bobo,crockpot,weaning,snowshoe,hijackthis,backside,fetchmail,candlewood,angelfire,ucsf,painkiller,nutty,fenway,restrooms,myeloma,scallops,osteopathic,vividly,rmit,countermeasures,ofertas,gwinnett,dirs,duvall,wildflower,stackable,greensburg,barebones,merino,stooges,chatsworth,jello,mtime,barium,toric,looting,kiefer,agg,mauro,shearer,decca,hydrophobic,unsw,millard,btn,terraserver,returnable,ohs,resuscitation,cancelling,rns,nrg,stratification,oliveira,cahill,grumman,webdav,adagio,sunburst,ayumi,sev,zt,bela,swt,startups,ranting,udaipur,tonya,erupted,ghostscript,meltdown,rainwater,gellar,alm,vy,cnrs,redefining,shar,vesicles,piccolo,scalia,resizing,showrooms,verifiable,lobo,nunn,boyds,havens,bacterium,zb,sideline,bushing,ligament,penpals,translocation,costco,serialization,wst,playgrounds,universidade,fong,hbs,zips,ntot,eigenvalue,conductance,albemarle,mudd,dvs,niels,explodes,lindy,coimbatore,panzer,audioscrobbler,keri,soviets,tweeter,poncho,sids,faerie,oooh,oceana,ayn,wakeboarding,stinger,yuba,chipsets,anastacia,collapsing,yaoi,gwyneth,kuwaiti,jalbum,storageworks,duplicators,cubicle,rana,winfrey,avanti,iop,blige,papaya,auger,macclesfield,mongoose,crossfade,instrumentals,iconic,sulfide,dawg,mahler,maurer,auschwitz,gambit,accom,stb,uxbridge,baan,baumatic,slt,landis,fredrick,jogger,occlusion,jz,charlize,covent,reinvestment,ssdasdas,chatterbox,neutrons,fss,silo,polystyrene,amon,jodhpur,intelligencer,dundas,netmag,molokai,pluralism,kobayashi,tetanus,bcd,neuromuscular,fkq,caribe,iit,nphase,multifamily,timres,nrcs,farnham,coors,execs,hauser,citeseer,hiker,manuf,strategist,electroclash,outlays,ktm,zloty,osmosis,mojave,renova,hsp,soothe,mariposa,bir,advancements,franck,bock,fsm,leary,slurry,ker,dte,soulmates,marissa,sga,beretta,chiropractor,vibrational,sandusky,obsidian,dressers,winger,endeavours,argonne,runnin,bfi,gaye,colfax,logics,camedia,ctd,optimise,ernesto,voeg,adamson,coeds,subdirectories,asain,guilder,comparator,sealer,sleazy,onstage,todas,waterproofing,devlin,riel,pinky,lewisham,mints,wdm,avocent,invertebrate,brea,rebellious,carnitine,trib,webex,pairings,guesthouses,yikes,exorcism,grilles,mim,cultivar,orson,teammate,idn,hrvatska,sequencer,grandparent,demonic,wonka,prezzo,opto,collaboratively,oberlin,nrl,gorda,newburgh,alcoa,mums,facs,lossless,mmp,beasteality,imbalances,andean,superconducting,spectroscopic,armpit,dect,mew,worsening,symp,igf,metalworking,groundhog,clomid,ginkgo,decedent,dimethyl,retval,openurl,baku,telescopic,vespa,phasing,lactate,poughkeepsie,dodson,monorail,bookworm,enero,sabbatical,ced,skeptic,backlit,smr,kentech,lamette,gita,itm,ath,hennepin,foucault,onshore,acls,pwm,florals,millimeters,krauss,asca,wicks,pathologists,fanfiction,pathol,toxics,ipcc,kinesiology,potions,tern,squirts,delmar,storybook,grenades,rls,etrex,contrasted,opting,hauled,taupe,renta,grd,odeo,jiangsu,osd,hookup,myron,atb,ctg,doreen,altima,keepsakes,seawater,ecko,zarqawi,contenders,conveyors,accenture,iagora,haier,crutchfield,fulfills,rota,kelso,petaluma,ifrs,servicios,printmaking,miata,julianne,dotnet,reconstructive,metcalf,vicksburg,gri,bookshelves,supermodels,glycerol,wiseman,sliders,carhartt,redford,itemized,rsp,defamatory,eir,matheson,amalfi,currentversion,renminbi,yap,mangas,bottlenecks,pyrex,huffington,sculpting,sedans,dpt,hoobastank,launchers,finishers,psychologically,ssm,schaeffer,northside,interdependence,microfinance,droplets,inducted,fos,uninitialized,conor,repercussions,woking,longmont,medion,monika,hydrological,runes,hobbyhuren,ents,ortega,breweries,landon,burrell,forecaster,quickie,stephane,parabolic,boreal,bankroll,bioassay,martinsville,ldem,interventional,teensex,tabulation,joop,creampies,trier,arbitrage,dogwood,convergent,enviar,hutt,majoring,techwr,glitches,dugg,qwerty,equivalency,rela,sedation,quik,rosemont,xk,harmonics,devi,highschool,orvis,centimeters,lavatory,destructor,accelerates,opts,relocations,wilco,tricare,beckley,ryde,januari,kee,blacksburg,anova,midfielder,tornadoes,nand,ladd,docklands,mgs,tanzanian,padi,msl,clamav,megastore,xander,eon,winelands,syllabi,elif,lorne,noida,visalia,mykonos,wcc,krieger,safeway,sheri,prosite,wikis,mozzarella,glenda,uta,dqg,waterville,yonkers,republish,endoscopic,dilbert,vfd,transen,konqueror,feliz,biscayne,sexocean,debconf,disproportionately,taskbar,libero,synchrotron,tet,memorize,marquez,williston,muppets,volumetric,umpires,shuttles,jumpstart,motogp,hyperplasia,nber,donahue,parodies,prado,legit,humax,scrapped,ingo,dillard,orphanage,disruptions,erasure,preamp,pde,mcallister,ziegler,loewe,dowload,msb,iptv,bondi,freelancer,felton,dpp,umax,radars,dmg,materiel,megadeth,cooperstown,sdh,staffers,mawr,daw,comptia,teddies,upsilon,sizable,coenzyme,enzo,afterlife,mather,ncurses,harddrive,cml,counterpoint,batesville,skywalker,franke,takashi,wristband,jimenez,esque,chiller,barra,ales,worthing,zna,jonathon,psr,sump,breadcrumb,sucrose,amro,portege,neogeo,renewables,filipina,sgs,mbas,ihop,cortisol,banshee,supersedes,bullseye,prezzi,rbs,pacino,cajon,downloader,seabrook,leif,jrr,iwc,taranaki,chronically,merkel,megaman,setq,preschoolers,vcl,unenforceable,lto,busi,noone,rotc,fisheye,oaxaca,gerontology,microsano,predation,gaas,kilimanjaro,exacerbated,emr,infestation,yarra,volker,linearity,huey,aerials,stylist,porosity,schofield,alam,sprayer,tirol,sfu,gliders,corby,wenatchee,prognostic,unregulated,mult,pittman,bbl,hadith,ots,kdelibs,jayhawks,teesside,rav,lobos,reportable,dickerson,carotene,filesystems,enrollees,cena,sanjay,compaction,juicers,gemm,methionine,lala,toplist,holyoke,dewpoint,rdiff,osp,delimiter,forsaken,richfield,hangout,striptease,jhi,amf,sonicwall,burgeoning,unicast,amnesia,cipro,cherie,klip,libxt,menswear,inthevip,wrenches,actuate,capote,cvd,flexeril,molar,databank,montevideo,sunglass,lhs,kassel,followings,shipley,accretion,asha,bullpen,mamas,schreiber,gnc,dysplasia,freeroll,efl,igs,utopian,kota,iden,dil,wia,sosa,negril,hyped,epidermal,autopilot,garza,decrypt,batik,crain,subd,utilising,dsu,fermanagh,idr,interoperable,mam,delano,sonja,plex,compat,replaceable,forint,nudism,netcom,formulary,irvin,galery,hounslow,fosamax,striping,excavating,recoveries,mrsa,mainstreaming,awt,hola,hoody,dci,geri,seasonings,marcelo,pantech,fcp,scaricare,roxbury,clamping,whiplash,dildoes,takeoff,wiggle,truely,henna,cartesian,gamezone,yank,llewellyn,shag,asymmetrical,universitat,williamstown,trolleys,interlocking,doped,headband,internetweek,outperform,ncp,harmonization,hamid,differentiating,hitters,konrad,wickets,restarting,bcm,xilinx,wideband,tmobile,rocha,pbox,aea,stevenage,moorhead,directorio,restructured,aerodynamic,hopewell,evaluative,zuma,annuaire,subtracting,bram,kuna,logbook,xor,louth,pict,truetones,gabor,rotates,ezcontentobjecttreenode,leanne,bgcolor,rescues,wim,corsa,causality,tiling,ethnographic,waffles,doubly,fandango,powermac,catalysis,annexes,lisle,pushj,naylor,wrongdoing,paducah,gunter,iranians,aat,commandos,abcd,repeatable,deh,epiphone,scf,weekender,milner,schott,welders,semifinals,quantization,surfacing,vegetarians,hagerstown,polyclonal,transponder,gottlieb,withdrawl,geneid,tierney,glock,guatemalan,iguana,glaring,cifras,salman,choker,ecologically,scoreboards,mohr,dpa,spaceship,digimax,moremi,btc,technologie,tunica,powerbuilder,aorta,unconfirmed,dimitri,degenerative,delve,torrey,celica,beloit,nir,substr,lowrance,ballantine,crimp,bss,mousepad,umbria,oregano,rashid,microtek,geary,boaters,soyo,visualisation,brianna,handlebars,weightloss,interconnects,playtime,enrollments,gyllenhaal,criticality,geoscience,mhonarc,golive,deville,meh,moseley,spacers,unido,deferral,hersh,hilliard,vlsi,keegan,feces,uy,bute,activewear,transcriptions,metered,bugfixes,cami,interna,quintessential,babycenter,gardena,cultura,stockpile,psychics,pediatr,williamsport,westlaw,hetero,meteorite,extruded,lakh,starware,phage,laszlo,hernando,vogt,wolfpack,lags,eldridge,wray,hajj,edirectory,longstanding,knitwear,apocalyptic,fatties,darmstadt,mco,ucsb,fillings,marti,aberystwyth,infineon,fdd,inflows,tmpl,estuarine,lita,nubuck,socialization,estock,mbit,valign,caving,vec,alkyl,artichoke,leasehold,directgov,ubiquitin,fuerteventura,hairdressing,dhhs,fecha,nio,wsi,quigley,yellowpages,pretec,biomechanics,microcomputer,discipleship,hella,womack,magnifier,acdbtext,pitney,esters,haan,ofcom,ablation,nutcracker,dosages,prn,zm,dfs,multiplexing,indentation,hazmat,eac,dalhousie,ahem,retardant,shankar,overheads,southfield,iee,gnustep,spm,azkaban,dermal,metar,sizeable,aftershave,lahaina,earners,tenderloin,dji,ipp,chee,hamburgers,oliva,gaultier,cios,margie,nms,wandsworth,caltech,stapleton,gsc,francophone,sqm,xoxo,coord,mocking,nri,serengeti,raccoon,shrinkage,prd,uris,hamsters,codphentermine,thrashers,calibrate,gilmour,rambo,cleburne,serrano,niacin,strawberrynet,wesson,ormond,oxycontin,bibliographical,wynne,glyph,nagios,marinated,marko,sfas,genotypes,conde,alford,madurai,evacuees,urbanization,kilgore,unwired,elseif,pneumoniae,skyscraper,ebags,gnn,tooled,intermec,charlottetown,submersible,condensate,matchup,undefeated,krs,movin,kino,vidio,photographing,pocono,footjobs,trackers,kinkade,unify,dissident,sperry,iframe,tur,commu,xterm,swapped,stent,vermillion,angiography,areaconnect,brockton,daz,abcdefghijklmnopqrstuvwxyz,dunst,livonia,specialisation,nsi,walgreens,plasticity,crux,nhra,armband,leamington,mosley,iga,stemmed,appleby,grayscale,labonte,lek,cartoonist,flotation,geol,deterrence,cardin,aardvark,cosmological,dothan,isotopic,hadleionov,langford,ssg,understated,obit,unt,randomised,amphetamine,shia,grout,reba,wrx,rsgi,bharat,sls,slg,kilometre,tristar,gippsland,pastels,stallions,paramedics,fishbase,rolla,curie,bootable,skit,sourcewatch,decimals,boe,catania,countertops,paola,elwood,hocking,prerelease,seqtype,femoral,anz,visceral,fructose,edta,silverstein,broderick,zooming,hamasaki,keswick,extinguisher,subpoenas,spiele,rincon,pll,donny,vitale,fledgling,boinc,traversal,bagder,erick,kcal,midfield,hypersensitivity,redshift,glaser,sado,cusco,imagemagick,uic,fernandes,prosthesis,jsc,omron,alberghi,electricals,kelp,taker,placeholder,moulton,yall,npdes,massages,catalist,metarating,tupelo,syriana,batt,dbms,asb,videotapes,backseat,kauffman,manipulations,accomodate,tioga,aylesbury,submenu,kwacha,chondroitin,sandpiper,vamp,overarching,janes,selectors,condoleezza,internationals,estuaries,schulze,osti,paleontology,emporio,stepper,reykjavik,waterskiing,renfrewshire,superheroes,marg,leftovers,mariano,bangboat,guestrooms,urethane,stoughton,paphos,sprinklers,accum,bms,datsun,sainsbury,chefmoz,helo,yvette,procmail,midsole,ayuda,geochemistry,reflectivity,moog,anth,durand,linea,butterworth,datagrid,metetra,rodrigues,apprenticeships,oncol,dop,asymptomatic,retails,offroad,simpletech,gandalf,minot,evidentiary,kpa,whelan,synthesize,doan,localisation,laparoscopic,pem,hotelguide,bayview,overridden,sorensen,hinds,managment,racially,stinky,riverton,expertly,mgc,langkawi,ftpd,colloidal,guarantor,imperialist,suc,veneers,reaffirmed,zambezi,tibia,raquel,wpt,kiddie,tulare,venturi,sundries,linebacker,danzig,neurol,beanies,irreducible,trixie,ridgeway,henckels,srb,verifier,dimensionname,eurasian,galbraith,pesky,underwire,salvia,aep,radioshack,sportstar,alana,upd,duma,osh,ddbj,stah,scripted,ated,mutagenesis,posada,vocalists,tiburon,lpc,geiger,cmyk,everlast,obits,jekyll,sportsbooks,andaman,hallam,spoofing,rockhampton,reauthorization,poolside,xiamen,trc,pita,chopard,skeptics,nast,motorist,kwik,peritoneal,jaffe,freebie,harare,tunbridge,spycam,lowes,lineto,ncaab,publicize,neohapsis,sanibel,bulimia,newquay,intros,ladybug,analyser,armando,conwy,algorithmic,rectifier,banknotes,aem,bookshot,bassoon,scrapbooks,hydropower,clearances,denominational,dominguez,meas,tamron,dfid,vlans,spreader,deu,otolaryngology,ezines,vbseo,snowmobiles,oca,phen,educa,lagrangian,dubrovnik,idt,eases,hippocampus,crim,repeaters,longoria,matsushita,reimbursements,kotor,encodings,yuen,eqs,eca,actionable,gangbangsquad,cornea,overfull,southgate,minibar,kitchenette,ols,liberian,tuc,hth,repairers,liczniki,rcc,numerology,armitage,brac,barware,corsi,normalize,gsp,bcr,krt,buffs,tamoxifen,phenotypes,kinross,kieran,informatie,mccallum,triplet,geosciences,sonics,timmins,django,pllc,lotta,upg,nhtsa,swissprot,archaeologists,voss,pussys,moveto,tentacle,stx,iaudio,prednisone,salespeople,motility,dengue,gaiman,incineration,dumont,shanks,bissell,organza,centralised,unbreakable,supersized,depictions,wml,sexcams,kaffe,karim,aww,gtc,pbl,cael,separators,informatique,resetting,indepth,funnies,cumin,chicagoland,keystrokes,setters,inertial,payless,ona,pec,payee,cinematographer,preorder,oig,teenies,ppv,ventilator,annonces,camelbak,klear,micrograms,pediatrician,cymbal,convective,haymarket,nosed,bre,shogun,rescheduled,bala,sidestep,readline,preemption,microbiological,corticosteroids,pseudoephedrine,stockholder,engnet,quanta,sturgis,synapse,cwd,innostream,airplay,uppers,sib,pitman,bodrum,leathers,embossing,redirects,fuzz,roscommon,meryl,izmir,meticulous,multiplexer,menorca,dendritic,minima,wstnsand,naproxen,operands,mikael,conceptually,crichton,cct,nics,hardwoods,clarita,xfs,capping,parisian,humanism,hiroshi,hipster,accel,annualized,sandi,npa,becca,basildon,khoa,testis,uclinux,unusable,tigger,approximated,dhea,consulates,wonkette,versioning,breakdowns,dbh,periodontal,macmall,iphoto,uncredited,recordi,lacroix,rupiah,bullish,hippy,klik,northerner,xsd,mackintosh,kenney,fabricators,mutated,layne,moonstone,scilly,sheng,fsp,yk,strep,offical,hps,tampere,testo,synergies,fundamentalists,amyloid,emachines,understandably,icarus,appletalk,goff,dialed,geoxtrack,bemidji,harcore,intermodal,spx,catalunya,baymont,niall,mitts,rik,nappy,diario,khalid,fuchsia,chowhound,muscat,ffff,kmart,handover,knott,butterfield,hialeah,finney,salamander,driveways,ummm,ayres,lukas,cavan,aswell,skippy,marginalized,sooners,cityguide,maritimes,permanente,texaco,bookmakers,speci,hgtv,contacto,mbc,marston,newsline,coverages,bap,specialities,loca,systematics,renderer,matsui,rework,snowmass,deq,rosh,coffs,cleansers,acu,webby,footbed,inicio,moretrade,apogee,allergens,worsen,mlc,applica,tankers,whopping,issey,rtr,bes,cust,brookes,anim,tull,informatica,computeractive,finline,permissionrole,quickcam,shunt,rodeway,scrollbar,breen,voyuerweb,mbe,kenshin,dpm,clackamas,synch,patten,leppard,allis,estimators,functionalities,rmt,downes,koffice,evidences,mux,dbx,fetishes,isaacs,outrigger,enclave,fibrillation,licorice,statically,ipl,dixons,goldmine,lhasa,developmentally,ziggy,ingles,senders,steamy,atf,madhya,marinade,passwort,extinguishers,stratosphere,tbilisi,updater,geico,fld,cabos,companys,tinputimage,ggg,nicaraguan,icn,wanganui,sconces,insulator,endometrial,mohan,hegemony,focussing,gallerie,bioperl,eprint,tennant,ebp,tryptophan,checkin,gilroy,extensibility,aei,qg,mcculloch,thang,lorem,seng,bianco,salma,consortia,asimov,renato,bungee,murdock,hokkaido,alternates,brdrs,configures,multilevel,mvs,pce,albertson,renoir,getclass,perthshire,mucus,suspenders,realtek,morons,dismantle,pharos,obp,zovirax,twikiguest,reimplemented,eavesdropping,orgs,numerator,gds,nme,resurgence,metastases,gino,timings,mecha,carburetor,merges,lightboxes,icra,jeopardize,ltp,loews,fanlisting,flet,bds,hyland,experian,screenwriting,svp,keyrings,hca,hdc,hydrolase,koa,mobilized,accutane,zonealarm,sexkontakte,canaveral,flagler,someplace,vcard,antibacterial,rund,extremism,edgy,fluctuate,tasked,nagpur,funroll,tema,flips,petsmart,libuclibc,chaney,aventis,macrophage,palmas,useable,ferndale,saipan,councilor,tcr,myinfo,jellyfish,newington,reissued,mpv,noa,airconditioning,wiggles,bho,synths,kennesaw,rubbermaid,spector,medica,ayer,incumbents,ashok,vern,writable,usepa,reflectance,mobo,bunn,chiba,uint,tgb,yj,coliform,selena,olmsted,broomfield,darpa,nonpoint,realignment,undermines,ferreira,sasl,defibrillators,kraus,certs,nwa,jstor,aarhus,supercomputer,bouncer,phenol,jigs,loudoun,lifetimes,grundy,histamine,byline,mbox,mustafa,bedlam,ioexception,abdel,bothell,synergistic,aur,lippincott,maplewood,tillman,maints,rhp,handball,shandong,cch,stylized,folate,lenoir,manitou,cytometry,goofs,wokingham,connors,musc,ripon,nypd,plexus,systolic,hyman,unreachable,deepak,desarrollo,tian,jisc,merc,covina,noonan,ufc,modernist,waring,janie,fams,yasser,weathering,totalitarian,putters,waypoint,prx,interrelated,delray,lifedrive,santander,southbound,solidworks,cronin,averatec,huren,patios,firebox,synopses,venta,sadr,tuples,brdrnone,diarrhoea,sonatas,barbecues,walther,deadwood,mancini,rpmlib,milpitas,commonsense,bsi,piii,romford,emporia,digidesign,violators,phrasebook,reconfiguration,sledding,lakefront,excision,traceability,yangon,booktitle,lemony,recursively,ney,kilda,auctioned,hennessy,basset,antwerpen,paltrow,rda,limiter,imtoo,jmp,cornwell,dah,blueberries,notting,comprehensively,amar,deftones,apg,zyxel,kno,limelight,schmid,alg,bme,solis,cdx,mju,hoosiers,criss,glynn,aerotek,unmet,toa,competes,olathe,ciw,compositional,sez,trig,taylormade,catawba,mbytes,ordinal,tth,inglewood,gila,magnitudes,downed,firstname,metairie,polluting,wellcome,pedicure,duplexes,edgewall,webchanges,backplane,daschle,transceivers,disrupting,biodegradable,spore,meps,phpmyadmin,bloodrayne,tessa,unrealized,hei,artistas,roomate,acetone,alanine,elko,dvdrw,spt,ries,inthe,blitzkrieg,nickels,banbury,igm,snf,optra,choctaw,issaquah,interactively,fredrik,aventura,ewa,dpic,mufflers,quarks,refactoring,monrovia,forman,marrakech,optoma,walkways,heineken,shelbyville,oxidized,bugfix,sharif,bloodstream,yx,underpinning,resistivity,hollinger,conformal,racquets,sherri,dbd,nevermind,moa,tenchi,potters,detergents,cheri,bombardier,subsp,cytotoxic,frag,eseminars,colophon,morin,ico,tatum,unforgiven,thesauri,gaffney,harrell,toowoomba,friendfinder,uts,bootsnall,relais,allocates,freecom,yoo,kabbalah,dgs,punks,chorley,ivanov,unannotated,endian,dari,patchy,haters,mutex,worldnow,giuliani,hina,millennia,pathophysiology,frith,pao,doran,remixed,hypoxia,newyork,penile,hemi,positron,metallurgical,ordinating,caregiving,molybdenum,easley,plo,psn,hexagonal,throated,contravention,bacteriol,healers,superbike,biosafety,binomial,engels,staybridge,mullet,canfield,hardball,orem,scholl,renovate,dvdr,phenterminebuy,metformin,actuary,addressbook,xquery,csl,purdy,rattus,xian,latches,ardmore,cosmetology,emitter,wif,grils,yom,ralston,estados,begining,apartamentos,sassoon,tna,hotlog,duquesne,oclug,formatter,rhinestones,shootings,splitters,gdm,pizzas,contig,whittaker,trafic,winders,walkie,adorama,uucp,postmarked,devolution,avion,innes,reunification,izumi,caenorhabditis,moderating,gadsden,cthulhu,eurostar,dooley,diebold,unsaturated,hotsync,ryerson,bfd,nonexistent,liquidated,decoders,validates,dae,jackman,biophysical,mendes,lasagna,landers,belton,qing,docu,tapas,calla,curriculums,supermodel,rezoning,schumer,exclusivity,motivates,debuted,lifeguard,chrissy,havasu,kei,danforth,kilmarnock,bignaturals,hendersonville,poweredge,sequels,licensor,pantone,granby,laboratoire,headteacher,viajes,etosha,ndc,coexistence,leona,dpr,brownfield,aguilar,supervises,orthologs,pataki,redistricting,jil,amritsar,lpi,pram,acqua,mekong,anesthetic,dsi,maduras,pfi,paperless,perc,fansites,sherbrooke,egyptienne,hyn,anisotropy,heaton,rennie,sno,redox,cladding,seaworld,hotlist,trumbull,retransmission,luau,tiscali,overlaps,meticulously,sitka,ucs,lsr,hellboy,jakub,hanselman,rangemaster,interceptions,rrc,dyna,appt,nonviolent,evangelicals,cunny,goddamn,wolfowitz,epping,accra,bimbo,jamboree,multicolor,tritium,ptfe,leaching,sauer,cricinfo,isomorphism,lsat,estab,stockbridge,invariants,jillian,islip,egp,didier,capistrano,yardage,neve,enviro,gte,bodybuilders,ranchers,bremerton,wbc,radii,schwinn,expander,regt,referer,electrolysis,signatories,wetsuit,flatrate,vendita,nazionale,peroxidase,folkestone,angkor,delcampe,taylors,rahul,mmr,zp,vserver,neurologic,chd,opac,cmv,macabre,neurontin,popeye,gruber,excerpted,spotter,pyongyang,hmos,beltonen,chamonix,recycler,declarative,semaphore,dprk,carmarthenshire,tristate,standardize,recyclable,knickers,overloading,angioplasty,fanboy,sharapova,moen,irin,deseret,eastbay,bfa,androgen,parkes,kilogram,pacemaker,duarte,evaluators,tarball,nears,kapoor,pah,allard,mog,tures,standout,lll,holley,ogs,ptt,sfs,transamerica,bdrm,comparability,buckhead,industrialization,cabana,mbr,yoshi,skokie,catwalk,homesite,pecos,stinson,blurry,etrust,minibus,coty,denby,openbook,unfunded,jobsite,dls,levinson,kasey,disbursed,cristian,ballooning,nats,antineoplastic,amplify,shitting,coden,congressmen,dft,xsp,strapless,qualitatively,struc,whitefish,flourished,ejection,puyallup,bonham,miu,cosplay,gazduire,dodgy,parasitology,thymus,handlebar,sanborn,beale,lesbianism,locators,belive,mnogosearch,aoa,childress,pppoe,phytoplankton,wireline,handpainted,suprise,neath,casseroles,generational,coppola,burrito,sandton,spylog,biltmore,coriander,edtv,chopra,streamflow,montoya,lesbien,manipulative,hypnotize,liaisons,backers,evocative,mcclelland,centerfold,burch,chesterton,warlord,guage,powerball,snider,creuset,wildland,oster,conti,sichuan,wrigley,bollinger,sensitivities,offshoring,uiq,bayes,vipix,amphibian,substation,optically,ceasefire,haag,alj,swartz,nanoparticles,affine,sitios,woot,obo,uname,employmentnew,sepa,asrock,hijacking,blurbs,downsizing,subcutaneous,creatinine,factorization,netbios,fleshlight,reliever,ender,indenture,arlen,trailblazer,coney,avenida,ern,shocker,barnstable,ioctl,bronte,refrigerant,caterham,bajar,movei,barkley,datacenter,presidio,transfection,fung,legg,moyer,roux,rectangles,caseload,catharines,pdx,wget,collaborator,cruzer,eeoc,tnc,cnw,sausalito,clas,xenopus,reflectors,endorsing,qingdao,kiwanis,onlinephentermine,replicator,assertive,aldershot,weirdness,oblast,townhall,sunnyside,datos,pham,glycogen,tain,selangor,detainee,brd,hoosier,balearic,toluene,jini,tubal,longford,johansen,photocopies,haccp,narconon,dyno,blakely,klonopin,photonic,kyiv,tami,hijackers,buell,informazioni,mccracken,ultrasonography,cale,alyson,taupo,possum,milligan,rosacea,transgendered,thos,toxicological,mackey,ristorante,obama,dvc,jermaine,platypus,breakbeat,karina,jang,thereunder,kink,winton,holla,multilayer,strcpy,xzibit,mohair,chore,agb,prt,abm,kgb,preemptive,guzman,subcontracting,counterterrorism,communicators,embodiments,sociedad,taskforce,gatineau,pertussis,concentrator,astrophysical,apap,pairwise,nagy,hofstra,kbs,filmstrip,shortcake,hsm,chilliwack,bidorbuy,tetracycline,lovett,motorhead,salam,hofmann,paramilitary,flipper,eyeball,outfitter,rsl,minden,hardwick,immunological,wifes,phenyl,telefax,giao,famously,hattiesburg,telematics,tsai,maier,lca,bossier,franchisees,falco,armin,ique,controllable,surfactant,telecommuting,culvert,prescriptive,wcag,hott,spanner,mchugh,firehouse,currys,diadora,laporte,wgbh,telekom,puri,factsheets,karts,orthodontic,visors,leste,lithography,bonobo,hamptons,proofreading,rmx,evokes,jdm,dehydrated,whyte,interop,initializing,manfrotto,waveguide,pnc,aussies,murtha,reinhard,permaculture,suburbia,kamal,catwoman,optimally,darko,windstar,polymorphisms,sexist,mdm,embryology,styrene,alumnae,inducible,riesling,triage,ees,krugman,mrt,mazatlan,silencer,foreclosed,chernobyl,rigby,allergen,crystallography,frosting,gallbladder,photogallery,nightwear,sconce,vgc,drivetrain,skelton,ovaries,mamob,phenterminecheap,daddies,impressionist,tourisme,hpi,clif,fairways,watercolors,klipsch,tekken,lactic,bydd,katana,ameriquest,boson,culo,milled,mcarthur,analgesic,mya,btec,geez,crocheted,acetylcholine,modblogs,pud,firsts,ferrets,enlight,wop,twas,menzies,agonists,eisner,staroffice,acg,photometric,fokus,ntc,buzzer,tok,trams,vickie,tinnitus,vectra,benidorm,gerrard,marketworks,libertarians,downers,kevlar,sequestration,yoshida,inositol,praia,follicle,itemsshow,brunner,indore,inspectorate,ultralight,toutputimage,saudis,octal,debilitating,twd,keypress,notifyall,hdf,corrs,turku,centrifuge,curators,multipoint,quang,marla,mths,caffe,projective,fandom,cws,kao,debacle,argh,tts,plantings,landmines,kes,sdd,khaled,kimmel,famc,tva,arbitrators,deakin,instock,gilligan,unh,unpossible,waldron,kihei,daq,bronchial,emg,nanoscale,hmong,brownfields,emmylou,antcn,unilaterally,hypoglycemia,sodomy,bukakke,bigpond,famosas,nsync,zd,revaluation,conditionally,moira,tenured,padd,amato,debentures,rfcs,acyl,rehoboth,lmc,dht,drucker,lmi,tham,cigna,dlr,nifl,sealy,axa,carrey,ige,dde,foy,evesham,mcneill,manitowoc,baguette,haves,erections,overpriced,grantor,sux,orbiting,soares,gsl,ihep,resubmit,bader,gymboree,kyo,yunnan,miyake,rah,saggy,subtypes,moultrie,vasquez,iogear,merch,uplinked,cognos,northbound,cardigans,ket,rasa,taglines,usernames,gpsmap,ngn,midweek,pirelli,rialto,tvw,durations,bustle,trawl,shredding,reiner,risers,taekwondo,ebxml,unedited,inhaler,granularity,albatross,pez,formalized,retraining,naa,nervosa,jit,catv,certificated,spicer,karsten,surfboard,scl,garfunkel,handguns,ideograph,papillon,dmn,citywide,stingray,bmo,toscana,analsex,larsson,franchisee,puente,epr,twikiusers,tustin,physik,savute,slinky,cubase,weatherproof,parkplatz,roadsidethoughts,oxy,pthread,postmenopausal,mixtape,tuxedos,fujian,batters,gogo,nca,minivans,yerevan,duffle,scraper,posner,bwv,technet,sdsu,decl,lombardi,musi,unger,gophers,brando,ksc,multifunctional,noes,relist,webjay,vtr,haworth,transfected,dockers,swg,screwdrivers,tir,guitarists,manta,christa,sff,moffat,surfboards,deteriorate,compo,roos,eesti,caulfield,midpoint,orland,malagasy,shoplocal,standardisation,matlock,nair,polymorphic,emd,phenomenology,substantiated,slk,phong,bandera,cred,lorry,recaps,fet,resolver,kagan,chiu,anthropologist,opcode,jugg,revamp,herbarium,grb,readonly,arista,barcelo,unknowns,kean,coq,cpo,brosnan,chamomile,tgf,mobilizing,anya,allo,geddes,wayland,cerro,methylation,ecol,clanlib,jayson,prostatic,uj,metcalfe,oppenheimer,mcclintock,android,primaries,converges,lation,anisotropic,voorraad,ucr,mxn,ambrosia,springboard,rubella,eisenberg,bif,constitutive,vesa,signoff,guggenheim,sapphic,killington,otr,intec,xem,instawares,kearns,showcased,summerfield,cooperatively,oshawa,targa,triplets,hec,billionaire,leucine,jobless,slingshot,cutout,disgruntled,coker,selinux,crosslinks,resurrected,skyscrapers,spamalot,sfp,noob,crb,moviefone,beecher,goog,mdgs,democratization,biostatistics,sakaiproject,cilantro,equ,xilisoft,zc,terracotta,garvey,harford,pcie,dartford,dicaprio,rosso,onlinebuy,gilliam,certiorari,walkin,contributory,applescript,esol,giggles,suture,jacobi,fark,autoblog,glaxosmithkline,dof,sextoys,tice,accor,buford,uspto,balfour,calipers,penalized,pyruvate,loggers,envi,kissinger,rmc,whew,orchestrated,conformational,choreographer,mcsa,impressionism,bucknell,martino,cranbrook,taz,ocp,subdomain,precios,simcoe,abnormality,varicose,newtonian,genova,libor,infomatics,hyannis,howland,federations,syed,urination,bewertung,broadcom,cautionary,escalate,spotters,kucinich,noosa,sider,mitral,dafa,verdes,inproceedings,crestwood,takingitglobal,dmz,antisocial,baz,gangsters,daemons,foundational,probs,huntley,kanpur,uah,elven,isotropic,adodb,enlaces,edelman,rubinstein,flier,griswold,ome,carcinogenic,micr,rrna,goverment,mercado,lum,dekker,supercharged,magicyellow,primavera,timescale,fico,overwritten,marcinho,kor,erb,keanu,edina,perle,lebron,terminally,bundaberg,lbo,breyer,kochi,pirated,leavers,vpl,pubsulike,aquifers,nittany,dakine,rescuers,amsoil,revitalize,messageboards,lakeville,apotheon,eukaryota,permeable,rsm,lastname,pxi,faxless,napalm,annuncio,usmle,racetrack,atenolol,riveting,cbbc,absorbers,xseries,biweekly,parkside,rez,hows,posi,derailed,shoebuy,ashworth,keira,meadville,skynyrd,threechannel,fid,rua,monologues,subroutines,subspecies,penton,eoc,figleaves,bab,ketchikan,immagini,shafer,qca,broiler,ctn,lickers,akbar,cbl,skimpy,fisa,reflexive,drool,godin,exchangers,interbase,sepsis,appli,boxdata,laing,oscillators,choline,doolittle,trikes,pdm,joerg,removers,grisham,diffuser,indesit,rouble,kamasutra,camila,belo,zac,postnatal,koizumi,tallied,ikezoe,niggas,lorain,tko,keying,ballpoint,kq,lupin,eidos,computerised,maf,rsv,munson,ftm,munoz,hbv,jeffersonville,willfully,orienteering,eoe,cavs,humphries,puss,ngs,podiatry,truffle,taka,beal,kalahari,blockage,hallo,abo,recv,obstet,bulma,chicos,cliche,sadc,tolar,screenname,chlorinated,hypothesized,upbringing,fmc,newry,zonal,defun,unsustainable,maas,ghostbusters,interdependent,rockwood,dbe,asda,civics,literals,unanticipated,seminoles,plist,tabulated,workloads,chemo,vhdl,pretrial,fermilab,hotplug,rotator,krups,myosin,mtx,carpool,honky,matsumoto,armpits,clug,gasolina,caruso,fsh,joysticks,visualized,bosworth,soic,clitoral,bers,carsten,riverwalk,convertibles,literotica,pgm,ringetoner,tpm,floorplan,oscilloscope,getz,mgd,dictators,levees,annandale,hillel,jeffries,pacheco,slacker,miva,sns,gca,xchange,kraftwerk,bandana,pentecostal,extrapolation,fennel,telemark,spg,quy,datasheets,smit,flywheel,futons,interviewees,mosfet,maryville,oskar,ital,quarkxpress,nondiscrimination,republika,icici,fixings,leith,kickboxing,deming,deactivated,caliente,oligonucleotide,crtc,golgi,channeling,stopwatch,maroc,lemieux,subscript,starfleet,odi,substandard,phenterminephentermine,phoned,ncl,gmtime,convener,becuase,dailies,dansguardian,miramax,busta,maury,cng,jizzshot,moya,nackt,commercialisation,cunni,cardinality,machado,insurances,qn,tinting,epidemiologic,isset,burnie,bushings,radionuclide,typeface,changeover,jian,termites,dotnetnuke,decryption,etnies,subsec,cxx,grinnell,alexei,helly,protestors,signings,parnell,gretna,guida,abl,farscape,hdtvs,sde,cyborg,yanks,hematopoietic,clot,imprints,opensolaris,inflationary,elie,traceroute,fgm,cuddle,workbooks,fallback,permutations,downer,abelian,cabela,transferee,quantitatively,sheepdog,cameraman,pinochet,replicating,tci,slashes,streetpilot,renovating,paralympic,dwarves,cakewalk,pyro,phenterminediscount,tye,bna,uwa,stinks,trx,behav,blackfoot,kuo,schaffer,kemper,glycemic,plesk,slicer,joshi,realtytrac,sandburg,dnb,nwi,reza,operable,wargames,guerrillas,saito,tce,fullsize,auc,anzac,kulkarni,rabbis,mendelssohn,investigational,photojournalism,anaal,christiansen,centaur,rubio,transando,rapist,ert,pratchett,climatology,baise,labtec,prioritization,pinhole,hdpe,bioengineering,dirac,mcu,alveolar,westmeath,lewinsky,webx,acco,soya,moz,exorcist,biofeedback,atrios,honduran,seaview,douche,rsh,soundcard,resistive,sylvain,chubb,snooper,atn,dbase,katja,icr,firepower,agu,ges,cissp,mangalore,laois,ime,unmodified,keystroke,zell,parkersburg,yoon,gillmor,joyner,vinnie,ccf,grocers,simulates,flathead,castellano,sigia,vesting,misspelled,prono,headcount,panache,inu,hallelujah,joes,cayuga,nob,tpb,glug,zodb,gubernatorial,goran,bauhaus,sarawak,sparky,sebastien,wirelessly,wpi,sysop,factored,eula,ohh,bsb,polymeric,salivary,mfi,ftaa,async,dnd,kristian,circadian,analgesics,flintshire,prakash,productos,phenotypic,pelagic,agronomy,vss,aironet,weightlifting,yugo,audiophile,unidos,motorcycling,raine,testbed,pediatricians,fingerprinting,bunbury,tasking,gmd,emulated,tweaked,phonological,barco,gomes,osf,faridabad,aprs,snappy,opa,colonic,jeroen,qin,zircon,svt,dansko,caspase,encinitas,tuo,remoting,ploy,achat,freefind,spellings,canopus,dme,gaulle,maplin,dutchess,wattage,puke,distinfo,leia,expeditionary,amortized,truckee,albury,humanistic,travelogue,triglycerides,gstreamer,leavitt,shotguns,discounting,etoys,thirties,swipe,dionne,ebscohost,tns,geoquote,upkeep,truncation,gdi,bausch,pomeroy,harrods,downgrade,roomates,biliary,dumpster,universalist,acdbarc,ywca,oceanview,fazendo,shayne,tomy,resized,yorkie,qx,matteo,shanahan,japonica,froogle,rehnquist,megabyte,ginsberg,vivienne,penticton,inseam,csh,pressurized,sld,faves,edf,massagers,ente,timesheet,anniston,sigur,toughbook,histological,clays,pcx,suzie,honeycomb,denier,udo,etcetera,reopening,herrmann,ifr,quantifying,qigong,cbn,kurzweil,chanukah,programas,fumbles,jobseekers,nitrite,catchers,mouser,rrs,knysna,arti,andrey,textarea,weis,pesto,ilm,ponderosa,kroatien,transitioning,whoops,catamaran,preoperative,cbe,verilog,helios,qz,wheelbase,narayan,voyforums,csg,unctad,monomer,refueling,ilife,biennium,coho,pellepennan,quartile,anwar,infobank,hexagon,ceu,geodetic,anda,emporis,ahmadinejad,lubes,consensual,altimeter,nmi,psm,lawler,sharpener,stellenbosch,soundex,setenv,mpt,goldfinger,asahi,ascorbic,himachal,dichotomy,communigate,covalent,cantrell,tarpon,bluffton,radix,orthologous,taichi,borealis,nerf,rosedale,policyholders,nst,racecourse,extraterrestrial,kok,servicemen,starwood,asco,nui,phylogeny,jis,tiesto,ameri,plankton,pkt,seamus,sublets,unthreaded,microstrategy,cleanups,fitchburg,flowchart,tacky,sauk,supercomputing,antiwar,illawarra,benetton,menopausal,workgroups,relive,ketchum,nieuws,mirago,reproducibility,abalone,ashmore,ssx,eachother,gsx,juggs,ded,geometries,petzl,edie,quirks,sbe,bundy,pina,crayola,acceptor,iri,precondition,padova,indica,roddick,teasers,beveled,consumerism,flr,yeovil,boneless,intracranial,kbd,tatoo,gameday,solute,tupperware,ridgefield,gce,quadro,mumps,trucos,mopar,haggis,electromechanical,styli,whipple,fpm,arcata,perego,guwahati,loudon,legolas,rockaway,exhibitionist,woolley,msps,toolset,ferragamo,bott,godiva,nsn,vfw,masculinity,schrader,bld,lightfoot,capitalizing,rucker,browsed,hcg,freenet,bundling,cannondale,mcat,blt,mencken,commerical,dagenham,codename,nesgc,profess,rearrange,warfarin,stdin,rohan,overheating,condon,inflate,npd,gunnison,hhh,sfmt,devonport,copywriter,bodybuilder,poss,psigate,ecp,airforce,fleischer,atmel,rasta,ravel,jupiterresearch,flycatcher,cusack,jenni,gbps,bombshell,llbean,arnie,subdomains,kale,pcd,shemp,findtech,huck,vouyer,horrendous,complainants,addy,ehs,fabricating,mmo,verdate,cyberpunk,enotes,pecans,ababa,whitehorse,barak,juke,schnauzer,hairdressers,prioritized,rainforests,exo,rabin,workday,eared,earphone,passaic,vme,hypermedia,udb,jinx,illiteracy,carcinogens,offres,addressee,thefreedictionary,informants,tics,sublimation,harnessing,extenders,fishman,hmi,tsk,inj,wvu,zimmermann,dupage,belarusian,maia,lynyrd,messianic,mexicana,generalist,gastronomy,ugs,huckleberry,ridgewood,pii,dua,phan,lightsaber,vivanco,catheters,azerbaijani,whitmore,footy,joinery,wasatch,octagon,equates,sorenson,eames,tacos,misspellings,trivandrum,kingsville,magnetics,rce,halide,metabolite,clo,genders,headgear,gretzky,harming,insole,colvin,kano,thurrock,cardstock,journaling,univers,aragorn,principled,namibian,slacks,mcsd,wmp,fairmount,physica,subtropical,sager,trk,bowflex,subcommittees,jia,ramesh,sitepoint,prawn,phylum,mephisto,prf,mundial,waveforms,algal,schafer,riddell,gimmicks,reparations,injectable,sher,trondheim,mhs,libwww,phenix,tlv,rena,tcpdump,quinlan,ecampus,kaya,ethically,sity,fkk,freeradius,nmh,puffin,freeride,ahern,shaper,locksmiths,lichfield,cheater,tora,hsi,bootcamp,torus,mondeo,cotta,oac,evi,jre,vignettes,aculaser,waxman,raping,oryza,leashes,babydoll,srgb,practicality,winer,thon,battelle,inp,europcar,pancreatitis,americus,immunohistochemistry,woodlawn,filigree,forecasted,bypassing,chock,chocolat,messier,gravis,edson,nathalie,calendario,blenheim,clarksburg,trigonometry,virusscan,flanges,bowlers,tsi,ipos,harlingen,keypads,sosui,campanile,vassar,regress,ghosh,iab,hao,ntu,ivey,techdirt,pmt,minutemen,pias,celiac,hough,ingested,hypothyroidism,boyfriends,jeong,equifax,baroda,cybernetics,tissot,daf,prefered,rappers,discontinuation,mpe,elgar,cumulus,brltty,klan,goku,offsetting,airmen,halliwell,ionizing,angebote,morphy,bookmaker,curio,hookers,amalgam,notional,webactive,bechtel,zambian,reinhardt,bridgend,bendix,dists,magnetometer,populist,mimo,bsu,renfrew,hesperia,chautauqua,mnemonic,interviewers,garageband,invariance,meriden,aspartate,aramis,pleural,tsu,mediating,gabriele,resonator,provincetown,afx,surpluses,ertl,holger,castlevania,vaniqa,finisher,ead,quartets,heber,muschis,anthropogenic,thermos,macroscopic,torrington,gillingham,geopolitical,flaherty,varietal,assfucked,engle,gorillas,ihc,shatner,euc,juarez,helicobacter,epidural,luisa,teardrop,anion,glosspost,numeral,mdx,orthodontics,tabby,cyngor,onl,claddagh,abf,therm,myeloid,pugs,sprocket,roh,unilever,ctu,genomebrowser,sima,hants,maclaren,chairmans,yim,workflows,adn,ansel,dragostea,hrvatski,ayala,bfg,tonawanda,imovie,regionals,kami,jansport,fanfic,tasha,nikkei,snm,lynnwood,glucophage,bicentennial,arl,radiologic,kts,agosto,mineralogy,corsicana,harrier,sciencedirect,krugerpark,oireachtas,esposito,adjusters,olympiad,fname,iar,allende,ldc,sited,surry,strainer,paragliding,whitetail,pagemaker,astrid,tripled,gwar,atwater,overpayment,faeroe,wisenut,nagel,blatantly,chicano,chongqing,corporates,applicators,erasing,svetlana,fleer,bossa,deuces,fud,dalian,anycom,gunfire,mcnair,subtilis,hdi,percutaneous,cursos,cols,urth,northbrook,rmk,mgf,voli,leann,pixmaps,gigablast,metronome,blackman,fliers,rdbms,imprimir,grouper,negate,roessler,intrastate,manawatu,blass,ainsworth,denzel,tfl,moped,appointees,bunkers,refrigerate,ligase,otp,beleive,warlords,hatteras,symlink,almeida,blogcritics,cochlear,janelle,alphabets,atta,foldable,hydroponics,precast,univer,purest,fatboy,cei,westerners,camarillo,kelty,volunteerism,pdq,openacs,hor,newham,energie,radiographic,kinematics,errol,otabletest,isobaric,hba,gratuitos,innd,eads,personalise,tbl,fso,patenting,reciprocating,rto,subcellular,crosbie,harmonisation,dunfermline,janesville,egroupware,caritas,tsm,egf,roa,debhelper,nsaids,milt,burleson,pba,ragtime,adopters,impor,philo,backseatbangers,rushville,saitek,synthesizers,vulva,arapahoe,posey,minuteman,zinfandel,mayoral,fortis,medicina,gallary,honeys,pinus,interlink,greening,tesol,artnet,crw,bansko,brien,silvery,guevara,thinkin,sedu,automakers,igmp,overtake,semicolon,bubbly,edwardsville,ques,homebuyer,nodal,mpo,unbeaten,rawls,ocx,ork,sheeting,hallways,alzheimers,snooze,kestrel,nadh,americorps,prawns,nonpartisan,naps,domina,eldon,palomar,riedel,hoppers,onscreen,gdk,distillers,uploader,caltrans,tyra,cocksuckers,mtbe,hypertensive,xie,chinchilla,bucs,transformational,sailboats,heisman,grn,jct,exemplifies,arrhythmia,astrometric,workwear,tolstoy,asperger,koop,newydd,transpose,lpr,xray,ferrer,microeconomics,kafka,telly,grandstand,toyo,slurp,allocator,islas,ila,westland,instantiated,lewisburg,stylists,blackwater,vivi,hippies,pul,larkspur,kea,lesben,motherwell,ahs,cappella,neocon,getname,coyle,rudi,departamento,winrar,mussel,britax,diwali,raines,dso,wyse,geourl,etheridge,docomo,webindex,accrediting,stapler,pheromones,woodson,imm,volcom,telewest,lcp,bisexuals,ozzie,kitsap,oic,cutest,hoon,mpp,cte,dymo,yolo,quinton,jorgensen,printouts,tempt,credentialing,scalloped,sealey,galvin,etudes,gurney,bluefly,schweitzer,jawa,geochemical,allegany,aldridge,digitizing,aki,organically,chatboard,lomb,uddi,yng,roleplay,pavillion,barstow,patna,rootkit,spearhead,leonid,sunnis,reticulum,dulcimer,unl,kalman,npl,coronal,rendell,transparently,mfs,freeform,gianfranco,tantric,reif,woodhouse,lifter,seymore,ogle,sayin,cpas,videographer,gpe,stallone,uams,pula,trudeau,buss,ouest,korner,fatherhood,debussy,qsl,reflexes,hlth,wyman,kingsport,gauthier,vadim,magnetization,trd,aitken,millers,titted,clerics,busses,trai,underpin,ajc,dumbledore,vinny,delicately,webroot,yip,producti,teksty,pullout,dmi,yellowcard,sbi,dmt,nce,birdhouse,bnd,neko,chillicothe,peacekeepers,schmitz,rimming,solent,propylene,supercross,zsh,multnomah,foxconn,fuelled,biohazard,horrifying,parque,toffee,fpl,riemann,horsesex,mahatma,mubarak,bachmann,caswell,chiron,hailey,pippin,nbp,ramallah,isoforms,dictyostelium,tauranga,hawkeyes,maxxum,eire,knowit,topanga,geller,parliamentarians,inadvertent,utes,boardman,denham,rofl,homophobia,winches,uptodate,centralia,eschaton,hoaxes,hillingdon,buble,hairspray,acdsee,offerte,urb,intellicast,minn,frc,antisense,pelosi,shader,gisborne,grafts,hillbilly,intifada,carina,fon,ehow,vpi,brunel,rtx,roald,externalities,metzger,balsamic,classically,calorimeter,necked,idiopathic,lileks,tahoma,ogc,unidirectional,westbound,layla,galeries,cabinetry,suarez,stipulates,towertalk,optimizes,serializable,universite,ald,ringsurf,toques,rayleigh,dropouts,fws,gamecocks,gazprom,braden,amet,sinusitis,rusk,fractals,depressants,clec,tryouts,rushmore,shel,adapts,farlex,emac,phl,remax,wizbang,endnotes,rodman,dissidents,iterate,conair,ember,vsa,neolithic,mgx,acuvue,vetoed,uruguayan,corrigan,libxml,etronics,simian,atmos,msk,iib,multimode,teensforcash,annu,sunbury,girardeau,dbg,morrisville,netmeeting,asso,estore,universes,ganglia,ghanaian,resonances,subjectivity,microarrays,easypic,abbeville,newsre,cobble,flightgear,spode,berea,mckinnon,bucky,plunger,xing,siggraph,bookends,klingon,moreland,lowery,histograms,moll,floorplans,netherland,frasier,rossignol,polyline,laroche,cytosol,disposals,xforms,mosul,motu,amersham,chordata,crafters,kingsbury,yoox,hyphen,dermalogica,moreton,glycoproteins,aristide,unsorted,rambus,ptf,scorsese,patricks,microwarehouse,bch,blyth,grampian,livedaily,nces,alizee,detain,andrzej,optimus,alfie,immunisation,pfaltzgraff,eyelets,swordfish,legals,hendry,homogeneity,hartland,recreated,leaded,hunan,supersonics,amstrad,vinaigrette,scd,mch,nintendogs,dvx,unreadable,plattsburgh,balsa,aya,brasserie,gcl,salton,paulson,dvdplayer,silverton,enduro,peepshow,givens,bristow,pecuniary,vintages,ozarks,johor,zia,mucosal,prehistory,histidine,mti,drape,tectonics,lorentz,distributive,sharps,seguridad,ghd,gilberto,doomsday,otters,gervais,mews,scarring,daydream,gooding,snicket,bicarbonate,boggs,wps,dietitian,itf,harriman,paprika,haviland,novato,dyn,hornsby,biden,disallowed,zahn,jordi,correo,frida,chappelle,resourcing,methuen,zoneinfo,adelphi,orbison,geffen,informatik,novella,brie,galeon,silos,lrwxrwxrwx,shortstop,cua,dordrecht,permissive,creston,prec,nco,nehru,bromwich,disposables,estrogens,mulholland,rui,haz,eol,odometer,tooltip,ibb,mosby,druids,aggregators,herfirstbigcock,rti,arvada,fixme,rodger,tively,gizmondo,cucina,ivo,griddle,pricelist,juventus,conroe,multipliers,aparthotel,kitesurfing,couplers,aftershaves,rehabilitate,patina,scansoft,quadra,sousa,phonology,dunkin,deat,plasmodium,bums,undersea,aretha,lts,boxster,staf,bcg,overexpression,vanadium,wilkerson,riverboat,voa,kohn,bgl,jiu,ipi,contl,ottumwa,gynecologic,unstoppable,pedometer,shortfalls,ksa,bookmarking,ingham,yoder,esu,vbs,barbershop,drinkware,idiosyncratic,googlebot,floppies,tashkent,foxboro,allstar,hervey,fes,kilowatt,evga,nikos,tance,varian,mops,coughlin,commutative,lansdowne,bcbg,syrah,affx,angiogenesis,nicosia,nematode,kegg,pkr,enso,administratively,tma,capa,ronaldo,leverages,cco,cancerous,banderas,gmane,vq,gabriela,secretory,mmx,pinehurst,nro,reassessment,ippp,chillers,elbert,sunil,yuki,periodicity,trypsin,bursary,dependability,overdraft,deirdre,colonia,mycoplasma,lesbains,adelphia,scribner,aro,activites,uaw,frankel,cacti,bugaboo,palmdale,aeration,kita,muscletech,watersport,paf,nxt,uscg,yitp,gibb,gener,nak,unm,zhong,chowder,expatriates,centerpieces,freaked,curbs,tdp,gruppensex,triphosphate,acronis,wcw,prostaglandin,completo,darwinports,abiword,hippocampal,atlassian,technik,vineland,commentaires,ters,stuttering,forcefully,depo,edinburg,kwanzaa,kzsu,mascots,harrisonburg,cadbury,scoble,aor,conundrum,bullard,aiff,comedic,apical,synoptic,miyazaki,beryllium,disinfectant,sentra,joi,jokers,wci,piglet,wildcards,tresor,sketchbook,bbd,halliday,manolo,tifton,repre,hendrickson,windhoek,lomond,atapi,hbh,eccles,ofa,dcu,spatula,intergenerational,epub,cates,featurette,gotcha,kindersley,drifter,cvsnt,ogy,lagerfeld,lewin,youve,unaids,larue,stardom,assad,glenview,brantford,kelis,nola,lxr,toastmasters,appr,recs,ranchi,exotics,articulating,jiffy,goodall,gconf,verkaufen,scalextric,ryobi,qname,immerse,farris,joinwelcome,cce,wittenberg,capone,mtp,busines,rebounding,usborne,hirsute,prelim,prepress,rop,militias,ttd,commodores,ecnext,dbf,goldsboro,ashburn,roslyn,neverland,coolio,lindbergh,freeciv,indice,vertebral,ectopic,abcs,lge,bnl,coulomb,minton,oban,restatement,wakeboard,unscheduled,dbc,visser,clipland,thermocouple,masala,clt,drw,rosas,rdram,mcclain,maki,rosenbaum,eagan,slv,sunburn,pleistocene,nips,sfi,canisters,kas,waddell,solvency,lynette,plainview,fielded,blowfish,zyprexa,altrincham,workin,afton,topologies,touts,pino,xelibri,lora,mendez,undelete,samuels,rajesh,soros,unjustified,nfo,crf,digitale,sitcoms,analogues,leukaemia,ukulele,paperboard,fied,cobain,trillian,offaly,girlie,ilcs,friggin,wq,davinci,oxon,expressionengine,bains,rse,callbacks,cdv,hannity,replicates,sidewinder,queueing,slugger,humidifiers,desai,watermarks,hingis,vacanze,onenote,montebello,streetcar,stoker,fulcrum,sadistic,cassiopeia,corwin,qut,martingale,saucony,winslet,criticizes,baytown,synchronizing,reclassification,woohoo,htl,caithness,takeaway,timeouts,reit,dietz,devo,morgage,koo,ducky,bola,mdb,multimodal,recenter,hematite,hensley,asterix,hokies,blumenthal,multinationals,aag,debs,playin,emeril,mcalester,adria,shipman,burzi,incinerator,muenchen,convening,unorthodox,fibroblast,gloryholes,carrick,immersive,darmowe,catagory,glob,cisplatin,rpa,fertiliser,nuova,halstead,voids,vig,reinvent,pender,bellied,oilfield,afrique,ream,mila,roundtrip,mpl,kickin,hiatt,droid,addenda,restorations,boll,knightley,worksite,lcg,typename,aris,isv,doctype,balinese,sportster,dence,lesbi,saversoftware,bursaries,cuny,cardiopulmonary,biologic,wanadoo,shiatsu,homewares,dpc,qk,schizophrenic,unplug,albergo,pressroom,gingrich,basra,greenbrier,superoxide,porcine,oldfield,wxdxh,luder,shim,manx,understatement,geda,tormented,immanuel,whistleblower,hopi,idd,gol,bayswater,lyne,epox,kennewick,subtree,inshore,ibd,hepnames,benn,kettler,clots,reducer,naturists,lvd,flonase,sympa,hinsdale,trav,spina,meatballs,underrepresented,bpl,etb,brane,tightness,tracklisting,horizonte,rgd,concatenation,suffixes,kilmer,cloverdale,barbera,seascape,amdt,linings,horseradish,telepharmacy,itasca,varbusiness,paulsen,cortina,ides,hazelnut,ashfield,chaco,reintegration,pampering,boland,airtime,surrealism,imi,eit,clamshell,tonk,luminance,ixtapa,gryphon,ecos,cair,rochas,farnsworth,synchronisation,suresh,minnow,bloor,gumbo,faqforum,kunal,jossey,rci,upa,melamine,wonwinglo,episodic,xcel,jurys,descendents,ezmlm,twikiaccesscontrol,tonos,lated,montero,divisive,soci,guia,gastonia,inappropriately,valentina,lubricating,itworld,deca,branford,kody,accruals,epitope,jdj,crenshaw,perlman,medallions,rokr,usg,microtel,rsx,graff,jcsg,fds,cooney,whittle,gmthttp,rayburn,etat,suppressant,hecht,sportsnation,sso,ccnp,reworked,etl,catapult,vries,procurve,cbot,elitist,convoluted,iberian,optoelectronics,mailscanner,kazakh,stimulator,schoolchildren,commweb,thornhill,tweezers,lani,ouvir,filetype,bearcats,fanclub,boehringer,brasileira,webservices,kinematic,chemie,inoue,unsupervised,norvegicus,copycat,orrin,snooping,hashem,telesyn,mcb,imple,dorms,elist,laminates,ingalls,checksums,tandberg,iirc,mackinnon,roddy,margolis,erotaste,pimps,mcdougall,smg,mpx,fhm,travelzoo,thermally,teleconferencing,albino,cargill,hyd,visualizing,mothercare,sprinter,isomorphic,pepperdine,cvc,mahon,conjugation,macally,anklets,impasse,disinformation,beavis,delicatessens,intensively,echocardiography,pav,amok,riddick,sexism,ordinates,gallaries,baldur,elon,beasty,arty,leukocyte,chau,cotter,peptidase,fsi,postmodernism,osm,squeaky,silicate,alcohols,zydeco,testi,trujillo,predictably,weider,shareholding,giordano,cardiomyopathy,aprilia,mcnabb,lenz,homeencarta,disconnection,scada,spacetime,trb,awol,espa,bionic,batista,bookshops,feynman,captioning,sibelius,obstetric,marigold,ostsee,martel,hcfa,ino,ctm,whi,typesetting,ervin,chroma,steinbeck,pusy,biblioteca,neutrophils,dunbartonshire,lollipop,brash,avl,opi,declaratory,corus,elph,naf,htp,hydrate,ubb,littlefield,neutrinos,aso,bric,subways,tui,leominster,ncsa,snipsnap,negativity,arcview,picasa,tortillas,awww,dara,ragga,innova,doorbell,ebc,sgl,unsettling,snps,explicito,phila,bugger,persson,embolism,iip,silverplate,lats,ovc,roebuck,sbp,lipton,starling,coreldraw,haney,globemedia,adrenalin,murphys,nicklaus,yardley,afghani,tst,hrd,haulers,energize,prohibitive,sydd,nida,barcodes,dlink,includ,orgie,macnn,danni,imaged,sprayers,lindberg,filesharing,calibrations,atorvastatin,teague,vantec,lattices,cucamonga,warne,derwent,hospitls,flintstones,rotisserie,orcs,scallop,biostar,computationally,jobseeker,siem,sunbathing,ronda,npg,cerritos,kaz,chard,pershing,clotting,zhi,programm,singlet,morningside,simm,egr,hackensack,taf,kinshasa,availablity,lrd,lugs,kiddies,cpsc,hebert,asta,gato,cimarron,crowell,fanart,nagin,gfi,collapsible,helsing,haringey,phu,stes,prophylactic,rosenfeld,cityscape,tradeoff,sask,instill,ypsilanti,lifes,imate,firestorm,homestay,inept,peet,shiseido,steves,sascha,reconstructing,okt,droplet,dhe,lakota,revises,ipt,macrae,parlay,bdt,woodville,xlarge,proform,gothamist,coexist,advisement,fulltime,macosx,metra,cyg,turtleneck,aquos,hcs,tsar,isbl,gigabytes,triangulation,burleigh,anarchism,stabilizers,gbic,ciba,activa,cgt,terrance,smoothies,orsay,belling,bnsf,opps,representational,kagome,snark,woodard,malignancy,makati,cbm,bwi,farah,sitewide,newfound,collider,candi,lgf,boylston,swi,rizzo,wristwatch,owensboro,papas,subscribes,lah,wining,cies,ganesh,castleton,zippers,decaf,emphasises,cbp,crx,shakur,rso,euroffice,roush,caloric,plaintext,ofm,daniele,nucleoside,xsi,buttercup,oakes,searle,shuppan,lanyards,cushman,admissibility,courtenay,aspartame,sleuth,trudy,neem,magix,cosh,aurangabad,golding,ethnography,yamaguchi,bhs,bulkhead,kain,abta,herzegowina,minas,paradiso,cityscapes,oit,replenishment,autobytel,kroger,dexamethasone,strunk,yoghurt,nationalists,tfs,definable,bruin,psychoanalytic,reserva,nasser,simp,zmailer,birthing,collinsville,dimer,powells,abebooks,stemware,landsat,peebles,dewar,docked,burp,radioisotopes,obstetricians,vinson,efx,naia,idb,fahey,multisync,worley,oms,kerri,arith,democratically,datasource,mcelroy,cze,shopgenie,udev,nicol,camara,degas,benassi,prefabricated,gastro,accessor,meteorites,notts,lipoproteins,attleboro,parenteral,biosystems,cerebrovascular,fsn,bahraini,actuaries,delicatessen,rng,marianna,creatas,kidderminster,waukegan,antifungal,promulgate,mvr,socorro,maximized,bde,dlx,erythromycin,dtg,nady,leibniz,flix,cusp,homers,crandall,holcomb,beaulieu,tct,abington,pointy,hamradio,meso,monmouthshire,danvers,tpl,baptisms,backprevious,carnaval,recompile,mainboards,fclose,melodias,cliquez,doberman,installshield,fasb,estas,htpc,stover,cerruti,brainerd,oxycodone,istituto,revs,maha,compressive,wombat,antenne,patek,zippy,neteller,odeon,sbir,backslash,townhome,victorville,amityville,arpa,trannys,goers,chipper,gulfstream,modulate,xserver,infosec,agt,underwired,ambiguities,khai,norepinephrine,kundalini,elkton,carcassonne,saygrace,appending,marathi,songbooks,islamists,recursos,newcomb,stampa,newscast,vtp,stockwell,nederlandse,outtakes,boos,lavie,fina,retinopathy,deportes,tremont,barrio,buggies,zacks,exercisable,speedup,holl,efc,cibc,ontological,thinkstock,flashbacks,kennett,dentures,eckerd,xetra,stg,reimbursable,informit,cdbg,yeltsin,nitrates,aeruginosa,rpath,archaeologist,mitotic,generalised,outliers,sug,frac,cowon,semifinal,deactivate,studie,kazakstan,sva,citesummary,kubota,chroot,falciparum,shifters,undetected,mepis,caries,microstructure,ringwood,pleaser,compuserve,disassembly,miter,propositional,javaworld,ssd,writeups,hoskins,buytop,frome,talkie,loy,exxonmobil,emeryville,gamepad,metazoa,kml,maul,taoiseach,siskiyou,censuses,offseason,scienze,shelved,etd,carryover,fagan,jada,wholeheartedly,polyps,avast,northport,inelastic,puebla,idps,warrenton,traffickers,neckline,aerodynamics,eto,satcodx,leviathan,dfg,classico,harvmac,wrinkled,minimising,bifurcation,kimi,npcs,astrazeneca,poetics,jef,miniseries,yesterdays,dcm,issa,toxicol,libdir,angolan,waynesboro,relayed,fcst,ulcerative,bgs,airlift,downlink,endothelium,suppresses,weinberger,appointee,darcs,hashes,nuff,anza,borehole,flt,htdig,hain,nodules,bowdoin,tunable,memcpy,ucp,panelist,opr,transsexuelle,mailroom,nijmegen,medalist,ryman,gmos,recessive,putas,abou,encrypting,enola,rippers,steyn,redefinition,infield,reformat,atchison,yangtze,zw,peels,preterm,mindfulness,hwnd,stances,synapses,hashing,gere,lrg,unmounted,armoires,archetypes,behemoth,stereophonics,obsessions,piosenek,mhp,thrower,prana,trike,bmps,distillery,estudios,ceredigion,funnier,rickard,disengagement,gratuita,gifting,lpga,esse,maglite,iodide,bakker,hariri,digitization,fistula,campaigners,kel,acca,lauri,rockwall,kellysearch,crawfish,tigi,symbolizes,liverishome,thay,ecuadorian,injectors,natick,mornington,booklist,centrist,inria,torbay,femur,methotrexate,landslides,separatist,jelinek,darwen,aung,outlooks,matrimonials,busybox,openview,lifeboat,hara,tuskegee,aly,ciprofloxacin,gul,reconfigure,ahn,instantiation,trw,spambayes,shelburne,programma,lbl,escalated,lucasarts,eastbound,grits,apoptotic,pulldown,redditch,trendnet,iupui,nsr,treehouse,payson,jaz,hedrick,lineman,streamlines,reengineering,cleaver,prodotti,inflight,tracksuit,polyphonics,skidmore,catia,overuse,mge,newsprint,visakhapatnam,miko,hemorrhoids,haulage,torrie,usergroup,poms,mostrar,convolution,endtime,maura,hefce,abbie,mfp,galician,golem,conifer,phenylalanine,wareham,nonpublic,henk,inversely,beebe,dancefloor,eyelet,immunologic,chengdu,beeswax,lanham,crosswalk,lecken,kitsch,scand,sweeteners,farnborough,jalandhar,publi,visioneer,sprints,reinhold,emptive,compa,hrk,faked,manilow,burnsville,banyan,opinionated,quirk,hnl,caterina,blinks,fiore,rationing,tellers,jrnl,waterborne,astron,nity,gree,tradeoffs,goldeneye,occuring,calientes,recomend,functor,trowbridge,niu,mmvi,obe,gyro,technews,shampoos,unfiltered,sabha,bundesliga,enix,communique,cantina,cafta,polyamide,selectmen,lncs,luge,necromancer,carcinomas,subcontinent,dodds,seaton,transcriptase,balmoral,specifier,subsidize,icl,galaxie,ldflags,hiya,nappies,crippling,xul,nti,aspherical,misheard,ecw,sundial,odom,flaky,schlesinger,kryptonite,typology,hydrangea,preamps,aesthetically,vrs,alvaro,htg,heston,ghia,sophomores,binh,allrefer,dcf,scarica,chorale,ooc,fredonia,tiaras,sdio,distr,dscp,cogeneration,flite,harddisk,kennedys,telefono,saleen,bosco,cyclase,dreamcatcher,csw,braddock,ethnically,wbt,morro,smurf,yeager,gelding,blurring,deva,fom,mastectomy,cassell,sarnia,jaundice,lastest,asterisks,nympho,jeffers,hyun,cooktop,fddi,aspergillus,agric,kdc,medics,mwh,photosite,gip,affirmations,variational,socializing,crankshaft,isls,mensaje,tagline,airframe,beater,preowned,dietetic,storedge,redacted,rittenhouse,stereotypical,klass,fpa,treks,victimization,parallax,zante,splices,imagenes,rete,akita,nonresidential,hellman,durex,robison,tof,lpd,seri,freetype,nexis,ldv,collegefuckfest,aiu,molloy,carcinogen,brs,catalyzed,heatwave,yv,spindles,herron,sita,watchtower,fabrizio,unmanaged,gtg,preteens,heme,renumbered,omr,cowell,hyip,crossbow,speciation,tfc,whidbey,betta,imt,emmet,jewelery,lumina,statistician,symmetries,observatories,bupropion,telligent,fungicide,aiptek,crosstalk,mello,deepsand,litas,haart,worx,coyne,adenovirus,hakim,countywide,gnucash,puree,stott,sdg,mandeville,portugese,maurizio,tachycardia,aja,eaa,warrick,cosine,veb,patong,ballina,summarise,accrington,rnas,haddon,xpc,swath,azeri,wta,ulf,kleen,cvm,meehan,jenifer,infiltrate,mapinfo,knightsbridge,renounce,jesper,blairsville,copilot,koontz,fma,northgate,phobias,metaframe,nutritionist,effector,bumsen,rcm,hairstyle,nesbitt,diuretics,cemetary,iap,discards,basie,discontinuous,iqbal,uncorrected,stillman,chloro,bighorn,heartbreaking,xxxvogue,leitrim,prg,justifications,gimmick,brasilia,recordin,abra,trn,zg,acrylics,recensione,fouled,wiretap,dvrs,vocs,moniker,scholes,sharpeners,calida,nse,calloway,tpicd,prods,hfc,ltda,snk,waypoints,nrm,underscored,herrick,starwars,smbs,unreported,phelan,guarani,tampon,easels,sxga,webform,artista,elkhorn,ventana,sublet,chiltern,antares,peaking,stichting,forall,menuitem,marshmallow,hawai,nfa,cals,seltzer,utep,homeostasis,swp,akamai,goodie,milkshake,thrasher,switchers,brussel,hartwell,aup,electrolytes,machu,unshaved,gor,ilya,maneuvering,gaby,softwood,ajay,croupier,hausa,compacts,similiar,elev,egos,rhinitis,dreamhack,aop,beastialty,whedon,microcontrollers,dreamhost,overcrowding,retractions,pinging,catheterization,holton,smears,jmd,melo,exons,mariachi,igi,bday,reseal,compositing,oskaloosa,coopers,psone,versione,storys,escher,hotfix,rmp,gaynor,biota,dossiers,arpt,winsor,hairdryers,axon,morrowind,puter,chubbyland,deflation,pdo,dreyfus,worsened,darlin,treme,reconstituted,aveda,legge,kasper,mugler,yorks,ddi,badlands,deploys,pols,internets,backstroke,resultados,spooner,musicmoz,toothbrushes,bugatti,abrahams,comentarios,brandywine,callaghan,diskettes,resonate,intellivision,castelle,advertises,fives,titusville,plas,royston,nace,digitaladvisor,adesso,geekbuddy,lipoic,hazelwood,gravatar,outfield,carcinogenesis,gdr,phenolic,incrementally,pqi,lenght,acompanhante,orm,terrapins,daria,vander,ccie,mathml,legalization,allendale,modernize,orl,gert,restarts,juris,brookside,streamer,rollei,accumulator,picchu,abril,crocus,zl,citizenry,accountemps,swenson,unfpa,ewido,centreville,alisa,kingsway,erlangen,offtopic,laundromat,redeemable,maxillofacial,slutsfree,glp,baumann,revolutionaries,chillin,cardomain,creamed,tarp,schering,aten,bikaner,chimpanzee,petco,flurries,rau,miki,meson,parathyroid,cmb,analgesia,nqa,theyre,elp,altera,jeddah,nannies,pawtucket,bimonthly,senna,wardrobes,surgically,nongovernmental,inge,rmdir,miso,itx,hydrostatic,attrib,cheaters,hagan,canlii,leong,koehler,clostridium,nerdy,mcnulty,megastores,imperatives,bpd,archetype,kkk,oren,halsey,artic,techworld,vnd,shamanism,numara,csx,reiserfs,roussillon,cheadle,crea,alcorn,ences,bowser,fizz,rationalize,karoo,unearth,biopsies,inconclusive,hookups,herrin,thermostats,canoscan,moldovan,jamiroquai,xerces,subclause,classname,makefiles,bettie,sheesh,birdwatching,speakeasy,harpers,hayashi,epitopes,drivel,blandford,foci,toppings,cantilever,biloba,pth,tweety,initializes,keck,fisica,macromolecular,eic,skagit,kimura,baca,pareto,lymphoid,apacer,forklifts,pvs,refuges,jal,habana,stateless,virtua,cerebellum,vtk,breville,statehood,dct,palgrave,bledsoe,insanely,inglese,aidable,bubblegum,aphex,wroclaw,rajkot,taxidermy,esubscribe,cartagena,juergen,itravel,pashmina,gustafson,jacqui,salim,barnum,anthropologists,glues,undercut,eci,cstv,watsonville,roaster,redbridge,hypertrophy,raza,duron,xserve,wobble,fergie,bohr,boilermakers,counterstrike,hinterland,sufi,milfcruiser,afdc,niggaz,housewarming,regenerative,corre,liquidators,clegg,bagless,bleachers,deodorants,bacteriophage,sheena,prez,brasileiros,transect,thumbshots,soloists,borges,sinusoidal,manpage,lazer,babys,crossovers,parsers,lsl,chuan,hauler,cataloguing,oralsex,storia,fotosearch,usfs,leappad,interesdting,headroom,fortnightly,yerba,kuta,clearfield,huggins,washoe,srg,stabilisation,sayers,publis,intangibles,tameside,summerville,uvm,whalen,kusadasi,hcp,flak,ual,cubed,yuck,concacaf,textbox,erythrocytes,dinky,divo,injunctive,honed,coincidentally,kolb,kruse,microm,portugues,pil,tht,deathmatch,publica,mde,pollination,ews,synchro,etobicoke,midori,chutney,jrs,naturopathic,dermatologist,thumbnailpost,casein,chillout,stefanie,chewable,direc,quintana,normals,villeneuve,scrum,everyman,lopes,eastland,footballers,xviewg,metropole,swarthmore,multicenter,fett,sagebrush,convenor,pco,proteome,warheads,radiologist,liao,westview,optus,medicinenet,hitches,britten,palettes,vma,depauw,gunman,agassi,panoz,uwb,movi,scanlon,nutri,mitra,guilders,filmpje,indexer,ofdm,ullman,coachella,localised,recom,downgraded,ncep,lalique,weill,jeez,varadero,chicco,athabasca,redd,azusa,unbuffered,phoning,rtty,spacey,fmla,albatron,breakpoints,sperma,aran,ciencias,mortage,legato,agarose,avoca,reservados,russellville,oneonta,badass,cfi,pesca,carvalho,nass,mainpage,mccord,kellie,allstars,darwinism,tariq,workarounds,omia,flannery,rediff,lecithin,okmulgee,lates,recertification,phosphorylated,fusing,nerc,avermedia,abuser,sevens,mukherjee,anatomic,watercooler,gatsby,litho,mischa,bangla,menard,rattling,artes,vacaville,teo,enermax,hypo,hadron,gosford,legalize,millbrook,epinephrine,transom,liebherr,mwc,biel,vcu,mils,oreal,picayune,rabanne,gorbachev,norelco,playset,massacration,frontman,garvin,autologous,wiretaps,duggan,jrc,chantelle,liddell,enraged,gir,adrien,blotter,jq,menubar,gagnon,sitters,rdc,jod,meteo,cept,bih,programing,humpback,fournier,alquiler,reprocessing,chaz,bartending,sshd,opodo,patiala,jaques,glc,fantastico,schiffer,preclinical,sfn,conklin,wheelers,deductive,cunard,pygmy,jewett,environnement,biddle,basu,tachometer,bks,nonproliferation,cacharel,elysees,orchestration,adipose,usu,freeservers,potting,uncomplicated,piaa,progs,ues,tobey,sife,wenzel,debi,baez,tana,gedcom,uvc,puccini,seca,ligation,deconstruction,inductance,topicparent,zanaflex,medicus,dmitri,reallocation,kalispell,haight,teleport,skylights,rehabilitative,swab,latimer,boombox,prorated,bbr,pansy,reassignment,hydrodynamic,confirmations,postulated,unlabeled,tosca,brentford,integrin,ranlib,differentiates,skelaxin,velo,multiprocessor,tabla,celluloid,identically,saddlery,whiteside,eurail,endicott,dingo,sessional,pagination,webtopiclist,infopop,accc,iie,burl,truncate,hightower,polygraph,allianz,digress,overseen,scg,thotlib,bluetake,cowes,mailorder,fetuses,lowndes,shr,childbearing,aaj,crayfish,minotaur,heist,mayne,repaint,asq,contr,zool,spastic,suprised,illuminati,piezoelectric,rfps,cutouts,ilc,vinton,enw,meir,tanita,tpr,subsidised,arcsec,wrestlemania,fhs,getter,mimics,watermarking,aftercare,coombs,wolfson,sefton,compu,bonaventure,appz,ecl,gview,temperatura,diastolic,defaulted,cesarean,dialling,rescinded,chitika,tsvn,discoloration,chelan,morel,iles,kashmiri,stacie,collages,enabler,ogo,mowbray,schuler,finlay,gezondheid,ylang,lufkin,tenge,acosta,turbotax,herbals,moderates,piotr,chairmanship,covad,comunidad,moores,hurghada,malformed,mks,seatbelt,dumbbell,chasers,hamer,sherwin,redissemination,stine,mcmullen,skopje,gpx,supplementing,lowrider,liaise,citric,opentype,jpmorgan,nitride,achievers,unbonded,cowen,subdir,rehearing,balmain,crissy,nake,wtp,scn,mendota,makoto,alloc,ultradev,viaggio,cig,scipy,depositary,redhill,caveman,nunez,starfire,whitlock,pelletier,lanark,yada,sandro,jervis,placemats,pathologic,darden,bunnyteens,gordo,otitis,ordinators,bma,leningrad,harkin,eatery,peony,economia,cytosolic,glycerin,tailings,shirtless,darla,rayman,boardhost,frontera,crumpler,hargreaves,mkportal,nucleon,pkc,dov,ndt,hideout,lrs,calcite,fpu,fts,spud,mang,nology,luiz,belden,lense,hendrick,publicati,unverified,untapped,vario,pmsa,recensioni,xq,tev,batty,briscoe,dwr,fingernails,ocarina,camus,mackinac,itis,saks,hahahaha,romenesko,croc,ftes,keyspan,aoe,reposted,cgs,moduli,mra,ery,payoffs,tpi,maywood,buchan,roberson,defrost,ecr,coleraine,arianna,biomarkers,consecutively,bongs,loox,idrc,pretzels,anmelden,vdd,underdeveloped,mktg,yancey,feta,peres,assemblyman,enforcer,suk,customarily,cillin,jett,bility,mingw,ltv,sarees,aaas,bloopers,framemaker,piscataway,cytoskeleton,wuhan,maximising,hoists,fichier,amitriptyline,sgr,scrubber,gratuites,reentry,playtex,communi,buisness,freepics,kbit,marmaris,logarithm,granola,inefficiencies,monocular,kankakee,tandy,ferrite,formato,gaysex,dbus,autorun,nivel,ayatollah,undifferentiated,flowershop,evp,vazquez,reaffirm,dynix,pictur,collette,oooo,dian,doxycycline,weblogging,cluttered,sportsmanship,relievers,hwa,vikram,booktopia,lampoon,airtight,firming,mrtg,shoreham,annular,hallmarks,sparking,anale,ikon,lanl,gfdl,commandline,usfws,adic,nns,pmd,rfd,ized,rsd,guardianfilms,gryffindor,ror,blogspot,thao,obsolescence,linguists,blogads,xinjiang,recode,onus,heinlein,oks,kimble,reservists,blaupunkt,statins,descendancy,obsoleted,phim,betacam,mlp,rearrangement,disulfide,myer,bypassed,onefit,interp,neutralizing,tirana,occupiers,kingpin,bnm,relaying,bga,amilo,overlord,daffodil,ukiah,devotionals,figueroa,imd,warenkorb,dfo,habib,archivos,lymphocytic,kala,deering,undetectable,infact,vermeil,silage,ejaculate,smithers,gaeilge,swr,goudy,inkl,bilge,texto,satb,prolactin,bejeweled,bastrop,sunbelt,chewy,paginas,decimation,coen,hypotension,stateful,pypy,busby,gaither,tta,patterning,rdp,cheep,ldr,denbighshire,wittgenstein,preexisting,coffeemaker,braveheart,pbr,ctt,ginsburg,superconductivity,eurostat,kyi,amygdala,corrie,lonestar,dueling,challengers,reshape,photoset,electrolytic,hasegawa,gainers,calidad,tinkerbell,aldara,poway,physiologic,optimality,riyal,hwn,dremel,cerebellar,dth,dancin,summarises,choy,heartwarming,unwin,strider,eastlake,hyp,cannonball,mathcad,skipton,patently,bitmaps,biopharmaceutical,analytically,sll,aramaic,bogged,incremented,homem,valorem,publicist,acb,muzik,tempera,recyclers,pillsbury,seach,intermediation,lacing,aggregating,soundboard,teapots,rif,neb,archivo,smartdisk,boho,titration,tschechien,sef,boney,oxidoreductase,lino,lcm,skimmer,mccullagh,gats,extrinsic,erlbaum,sketchy,gooseneck,bof,tiffin,pacer,battersea,noname,gung,asv,sasaki,outboards,owings,xue,tbi,interlaken,kampala,jcc,tentec,kilpatrick,pixmap,bitty,pge,dtmf,prosser,ojai,stethoscope,monotonic,ebookmall,perot,medien,kahuna,washroom,jacoby,neurotransmitter,intercity,broadview,micros,straus,flack,amortisation,pfu,tonite,vonnegut,distros,teething,subsector,mechanistic,orbis,flawlessly,lidar,frp,whatnot,tripartite,studebaker,cartographic,rwd,preconditions,gardenia,adland,miembro,irland,linwood,biotic,kowalski,marymount,zathura,highgate,fudforum,takeshi,taro,mpd,crowder,socialize,scunthorpe,deepwater,clickbank,ruleset,viscose,perso,novica,manhunt,pavers,elks,aalborg,occupier,lunchbox,euchre,proporta,mitosis,paychecks,bellaire,suitcases,postel,mdg,tutu,paisa,wbs,slidell,psb,vocab,mmhg,clocking,sks,hemorrhagic,plein,hitchens,fone,crores,classifiers,novosibirsk,greenwald,rtt,copacabana,videorecording,kickstart,biggie,neutralization,pvm,ksu,kph,pdl,preprocessing,particulates,skylark,llandudno,squirrelmail,oviedo,pauly,bromsgrove,starsky,prion,simfree,pennywise,grier,apd,diphosphate,lbj,interscan,pipers,tronic,surfside,tsunamis,dordogne,hotlinks,neely,jeri,proteasome,transl,goulburn,vtkusers,energizing,butane,stf,bluebonnet,htf,stmt,inked,novatech,iid,elektronik,maturities,nameserver,tomlin,jigsaws,distorting,kamikaze,quaid,juggernaut,gordonii,latrobe,bboard,consultancies,handley,gramercy,ccb,derrida,mgb,bioavailability,ucas,tdr,nochex,lilith,foreplay,waas,mccaffrey,privatized,uncovers,gargoyle,stockists,ostream,lenmar,mamiya,mildura,insn,bodega,hardworking,dockets,dedham,ered,stomping,kottayam,carle,eest,pondicherry,mpr,fiddling,panamanian,buyitnow,bungie,goya,superclass,categoria,buyback,uhh,gigolo,tmj,vangelis,kingwood,arn,dorling,maximization,wls,absenteeism,quantifiable,pion,sliver,leptin,sxsw,bummer,isometric,retraction,amboy,dunning,grinch,okeechobee,shouldnt,teeniefiles,gcj,whatcom,bbe,unb,sws,hydrocortisone,cerebrospinal,susana,rumba,bouchard,yesteryear,orthotics,spunk,superdrive,jolene,jalapeno,propellant,touchpad,raisers,mdma,confocal,jochen,caddo,dcl,expatica,bitstream,igo,bartenders,refilling,modell,keighley,rangefinder,nostdinc,oficial,lanparty,monza,sportfishing,rlc,exacerbate,beckwith,anemone,equivalently,duxbury,zhen,cordele,ebel,ninjas,milla,incase,mva,zinn,comercial,segfault,wisden,maingate,costner,powerpuff,gsfc,lycoming,regula,lastminute,winbook,talladega,optiplex,syrups,chiles,estimations,jaxx,cercla,slb,absolutly,guesswork,tradeshows,javascripts,irritant,warcry,optura,combinatorics,graceland,encino,disconnects,castello,monolith,mct,geos,hls,intrusions,glories,prelims,kanawha,yglesias,squibb,memset,edirol,mandala,alexey,homecare,dugan,calmodulin,ameritech,umar,timepieces,nonfarm,anklet,wsp,byrnes,determinism,addams,moeller,normality,wiesbaden,deflect,taoism,ikeda,chakras,samara,unsung,gargoyles,massaging,ajmer,lossy,mitogen,hurwitz,gulliver,bul,aerodrome,darkside,intensification,raya,ruger,rba,gennaio,seaford,ungarn,vincenzo,warszawa,dillinger,bandon,odell,riddim,perforation,cida,annika,uart,tryout,proxima,fst,lladro,parameterized,assfucking,manageability,crystalspace,pandas,choiceshirts,taa,servertime,fmii,nepean,tracklist,indio,tino,bernal,hbr,homogenous,policyholder,distributional,tidewater,ngfl,erlang,starz,follicular,grupos,oq,gonorrhea,blaqboard,listeria,afaik,lawmaker,datatypes,arie,flavorful,apu,fyrom,refunding,subcontracts,moissanite,finchley,mediates,polyacrylamide,bizzare,standish,conus,competences,jtag,compatability,millville,coches,biathlon,mico,moxie,biff,paulette,chania,suu,backspace,aways,fugue,dissonance,medicated,initio,bestality,hypothermia,carman,timberline,defenselink,sunfire,mckean,smithville,mtf,rebooting,storytellers,lamisil,morphing,chua,sevenoaks,haplotypes,fiskars,speer,lathes,refillable,yearbooks,engin,kyushu,tricycle,penne,amphetamines,systemworks,keele,afficher,trillium,nena,bulfinch,transients,hil,concedes,swot,howarth,andante,farmingdale,bitching,overtly,rateitall,tubulin,gmx,bannister,omer,humanoid,infringements,stylebox,tiredness,branden,panning,wasabi,morecambe,hawkesbury,cocksucker,sak,kilobytes,breather,slu,adjudicated,methylene,wholeness,gnue,gynecol,uas,nacogdoches,simcity,hummingbirds,garnier,kath,cppflags,educause,cotswolds,heifers,sephora,joao,tremblay,gynaecology,vertebrata,blackcomb,ffxi,ottomans,rodin,ecac,actu,nde,lockable,dslr,evaporator,antihistamines,uninstaller,airliner,bibdate,unwrapped,dumbass,brc,arrhythmias,netweaver,sateen,rtos,eip,moteur,fotopage,uhm,birr,autosomal,protec,purim,rhododendron,canadienne,profes,pjm,ddl,underlay,granule,setfont,cookin,gillett,rocklin,welland,ageless,nuernberg,bleep,emedia,regensburg,gama,xfree,sills,berwyn,howler,hardtop,carded,lipo,zandt,reformatted,internment,dominick,mahmood,avent,swaying,igloo,ambler,voyeurism,bachman,referential,hydrating,adaware,dewpt,repressor,galego,neilson,scorecards,newlines,arcana,aau,transworld,nmc,discoideum,wairarapa,fogerty,beit,heidegger,backhoe,leftists,quinnipiac,mannequin,malloy,enviroment,mako,anl,noyes,eprom,trashed,ryanair,betsey,rath,lobbies,silvertone,cupcakes,artest,netfilter,voldemort,oldenburg,bazooka,gerbera,cient,psg,mittal,camellia,pronouncements,fonseca,rescind,asps,asheron,mance,viggo,qar,hepatocellular,styrofoam,malfunctions,lindner,linc,salida,dunwoody,dioxins,shaq,epmi,excavator,adolescente,redcar,urac,oncolink,cartoonstock,cwm,bibb,gymnast,inexpensively,isystem,evol,nmda,hazen,davide,forceps,motherfucker,ccw,mainframes,sapulpa,costas,searcy,labelle,adjoint,mclennan,killa,lipscomb,monocytes,requestor,cyn,splint,digitech,mrnas,llamas,multifaceted,gamez,voorhees,boas,solvay,thorsten,yeo,terk,privatevoyeur,coolmax,rebooted,toskana,unidiff,radionuclides,tilburg,decoys,pariah,offerors,wmi,darnell,meaty,gages,zapata,supt,bartleby,vermeer,pinstripe,hemodialysis,artis,tov,amateursex,dailey,egret,cornhuskers,fontconfig,jordans,guildhall,hasselblad,piney,unbundled,kusastro,onclick,functioned,toca,houseware,kdebase,ysgol,griggs,nicd,mdp,umi,fullmetal,pappas,aransas,tacacs,movem,abundances,oulu,fractionation,cdb,blitzer,ruc,karte,cashflow,retouching,brattleboro,eprops,cya,ubud,fmri,infosys,displacements,jerez,dhc,ielts,fellas,mno,picturemate,unicorns,playroom,dandruff,albers,discworld,leaved,existance,unionists,bloodlines,follett,irn,ramsar,woodburn,efs,auk,lockergnome,oocytes,armadillo,bsr,captiva,rinehart,brom,tlp,gensat,filers,lle,retrievers,pacifier,thurmond,stroudsburg,dominik,vivek,nla,inmarsat,unprofessional,hydrographic,mcadams,wailea,nforce,scones,paediatrics,nzdt,ilog,finkelstein,candylist,appalachia,marist,musgrave,vakantie,varanasi,yushchenko,relativism,jardine,schuylkill,ericson,schweizer,stravinsky,keds,ananda,nsx,jud,tripwire,aves,rediscovered,headstone,depleting,junkyard,perma,copthorne,multitasking,distrib,byob,tunstall,hager,spearheaded,nacho,underlining,heshe,jcr,catalogued,rawlins,springville,differentially,powwows,tsui,inductor,chalabi,encephalopathy,grote,ebs,raipur,custodians,guardia,jlo,khalil,overstated,webtv,insulators,kass,weds,servizi,quicklink,qso,dumbest,prowler,loadings,epos,sizzle,desalination,copolymer,duplo,lawnmower,skf,nontraditional,piet,ghaziabad,dredged,vct,marcasite,kamp,scoliosis,arwen,artie,fifths,austell,fernie,carport,dubbing,weblist,maximo,bax,searls,scuk,uiuc,crustaceans,yorkville,wayback,gcg,ural,calibur,girona,haig,perk,zander,samir,freee,avia,developement,pptp,beac,urbanized,trentino,marzo,dfl,lpa,jiri,mccollum,affymetrix,bevan,ichiro,dtt,cofe,loyalist,verma,daybed,rimes,quimby,barone,thomasnet,koeln,endocrinol,evaporative,gwybodaeth,preshrunk,hezbollah,naga,mmu,februar,finalizing,printhead,blanton,zellweger,manhole,eroding,emap,searchgals,typewriters,tabasco,cpb,coffman,lsm,rhodesia,halpern,purebred,netapp,masochism,millington,bergamot,shutout,willson,chown,prosthetics,proms,zk,karol,underlines,mosh,bakelite,kirkby,intermountain,holtz,prensa,vegf,galesburg,lba,klondike,webstat,reeder,neoplastic,applesauce,fibreglass,kenji,gluon,feisty,hynes,clogging,nonverbal,etoile,orangeburg,ladybird,concat,milliken,byproduct,specializations,chaintech,swa,porterville,kbyte,bizwiz,congruent,boehm,selva,rainey,aphis,rfs,tarantula,egovernment,udf,snuggle,shang,batten,inop,lough,vigrx,trios,bvi,unallocated,nau,condiciones,wss,modi,componentartscstamp,dyk,maldon,xantrex,dlg,edx,karzai,navi,brockport,cort,softgels,engravers,wether,hangin,handicaps,associazione,khu,nfb,dohc,clu,capps,vijayawada,griffon,biologics,bluescript,instantiate,paperweight,dilation,izzy,bedspread,knudsen,jabberwacky,kiowa,overtones,gsr,faithfull,quezon,pragmatism,rct,usi,wiretapping,fabricate,exabyte,pitty,kcl,pendragon,opment,kva,meeker,bootlegs,jimbo,jarrow,mullin,gridsphere,activesync,macwarehouse,vela,wikiusername,hessen,eyelash,gob,antifreeze,beamer,feedblitz,harvick,clicker,immobilized,dalmatian,hemodynamic,reshaping,contessa,elc,stagecoach,googling,maxpreps,jessup,faisal,ruddy,magazzino,jippii,academe,fjord,flybase,alpena,psl,junebug,grissom,shiki,knockoff,kommentar,westpac,gosling,novosti,mendel,adtran,wasserman,transexuais,aslan,hoge,fouling,macfarlane,hideshow,trailhead,edg,bayshore,preprints,grs,duction,anesthetics,nalgene,iaf,khao,berhad,savedrop,magnifiers,chitty,goldwater,lesbiens,jumpin,payables,victimized,tabu,inactivated,respirators,ataxia,mssql,storylines,camaraderie,carpark,internetworking,gawk,planing,termini,avaliable,scho,buysafe,hds,iad,pleasantville,fabrications,wtd,loh,jamshedpur,denture,gaudi,bluefield,telesales,vpc,ppr,jetsons,protagonists,fjd,anoka,boliviano,curtiss,wagoner,storyboard,trol,rajiv,xfce,axons,dmso,immunotherapy,namorada,neva,zakynthos,weitz,quercus,nhhs,amara,microcosm,raia,bizarro,mehmet,christos,categorically,autoresponder,aad,adolfo,welwyn,nzlug,vci,catnip,whittington,sorel,boned,vittorio,seta,tomasz,annes,tonka,nath,toth,tomaso,ascap,livedoor,schlampen,altamonte,scotweb,pillowcases,medlineplus,ambiente,masterson,nlc,fibonacci,bridgeton,wmds,tyrrell,junky,ballasts,jbuilder,cnf,nagano,hardman,roadmate,interleaved,peirce,pusher,egm,thetford,rtm,gnostic,coreutils,uninstalling,heft,ambivalent,startpage,difranco,mmi,typist,estudio,seiu,moisturizers,cardiol,lamination,bibi,mof,carpe,scottie,blackrock,pons,fistful,somethings,itl,staffer,rhiannon,linspire,cornucopia,newsfactor,countering,worldpay,catan,almaty,appraise,runny,braunfels,reorg,icg,javax,sema,albumlist,heraklion,stressors,shg,collocation,mccauley,vesicle,stuffers,prego,ichat,lubricated,sinha,pharmacia,aggiungi,shakin,cyr,vce,vigilante,gauging,lipase,constabulary,biochim,epcot,cricketer,defibrillator,rcn,drooling,stoll,staines,tnd,adversarial,tbn,softwa,pbc,ptp,demonstrator,boingo,voyeurs,aoki,banerjee,hondo,hysteresis,workspaces,campion,lugano,mobilisation,pruitt,foals,aciphex,sculpt,iskin,soledad,bagpipes,devaluation,beastyality,segway,mineralization,grc,trafficked,stedman,gurl,mcginnis,dvips,klee,garber,wizardry,fervent,headrest,dermatol,chaperone,huygens,eurythmics,transboundary,reclassified,delusional,tosh,pimpin,husqvarna,faxpress,tinkering,unneeded,babar,pago,hussey,officeconnect,mickelson,leukocytes,wesnoth,hydride,npp,zondervan,pele,opeth,kottke,hometwat,ogm,mauna,kilns,bpi,kst,harbin,assemblers,karst,wada,selfless,gynecologists,enewsletters,willi,bip,nami,guestbooks,sharjah,aguirre,krug,dongs,drv,schoolers,kidnappers,lemmon,ilan,gnutella,deutsches,liquidator,evers,uniross,grassley,stowaway,brainer,organiza,cellog,channeled,tastings,deccan,aiaa,neurosciences,factorial,librarianship,texmacs,vocabularies,blasters,livable,tifa,nant,libjava,ramblers,counterproductive,catskill,environmentalism,ufs,gwalior,ubl,kilts,balenciaga,alamitos,newsburst,septum,animators,signifi,neoclassical,mediaeval,piezo,escudo,pineville,botanica,petter,adenine,fren,lysis,pastas,helicase,dredd,efinancialcareers,diehl,kiley,kwd,ihousing,yoruba,malformations,embarassed,alexia,checkup,commited,nanotube,becta,trados,portofino,lifesaving,danh,sctp,tayside,rani,playmobil,tualatin,razorbacks,ionized,perodua,trg,subst,cpap,molex,vitara,fostex,zmk,placental,parses,saic,newsmakers,dshield,homocysteine,juego,metamorphic,cld,otcbb,moet,rado,watchguard,sugarland,singularities,trophic,ekg,dacia,reversi,insemination,houma,quetzal,shoshone,linder,homing,highbury,eizo,podiatrists,conch,crossref,hda,poppins,chaim,cytotoxicity,xugana,weevil,integrations,clarkston,ritek,morgue,unpatched,kickers,referers,kitt,servizio,biosecurity,leviton,twl,etx,electrification,peninsular,juggle,yeshiva,sociologist,wsc,sartre,finitely,spect,kathie,ards,corny,brazilians,lundy,histocompatibility,woolwich,irp,handango,cosgrove,sulfuric,renderings,msh,trt,ldcs,lect,kollam,edgerton,bulleted,acupressure,thotbool,hiawatha,nhfb,ahps,operon,ugandan,paton,suspends,categorie,stratigraphy,howes,surfed,steins,babu,andrade,agarwal,ncd,surefire,cori,planetside,snorkelling,waterworks,luk,headlamps,anaesthetic,isomerase,fdisk,dunstable,awb,hendon,accreditations,doral,nta,macadamia,takin,marriot,bfs,disqualify,ttp,sixt,beazley,rashes,najaf,hwg,bukit,antiaging,psychol,dfe,bedingfield,equated,swig,lightscribe,unionist,lytham,clocked,duced,complementing,keycode,pennants,camas,eamon,zaurus,qnx,srx,delux,uli,grrl,bookie,boggling,skewers,richman,photodisc,oto,uav,cnhi,umberto,bautista,zooms,newsdesk,roadblocks,klum,goh,goebel,pou,homophobic,diamondback,foosball,rept,spurgeon,lumberjack,marv,epidermis,mobley,oktoberfest,photoshoot,rhinoplasty,peptic,bauman,tannins,psychotropic,tilley,malaya,hypothalamus,shostakovich,scherer,tsh,manipulator,calabasas,coromandel,pliner,timestamps,pango,edexcel,snc,nim,gwaith,breaststroke,oroville,mitsumi,ichi,mobius,deductibles,nikola,berrien,peacemaker,ilia,bookmarked,letterbox,halal,agl,noor,noll,filenet,freeland,kirsch,roadhouse,charted,microtubule,cubicles,blau,ladysmith,gatti,ection,switchable,mcminnville,hcm,interactives,altus,phospholipase,transformative,samuelson,completly,anhydrous,germplasm,gradzone,gdansk,jenner,parkin,unmoderated,wagers,beliefnet,hotbar,canis,ravioli,enrolments,walling,marblehead,dvt,cameltoes,ribosome,carnivals,srf,speedman,instrume,moffett,augustana,topsoil,latifah,isomers,pettit,lemans,telescoping,gamedesire,koha,balancer,picton,underhill,dinghies,chooser,argentinian,ahrq,apparels,timescales,cef,athenian,mcewan,sexshop,zermatt,mha,geert,bugging,trento,lyndhurst,nex,wdc,symbiotic,wds,dyslexic,nomic,tecnica,mmap,wishbone,mcad,prm,bashir,licenced,larissa,collab,squirter,infecting,penetrations,protea,argento,polyvinyl,ganglion,ruud,bunt,solgar,lipper,chimpanzees,jdo,testcases,tda,hamza,meeks,athol,centimeter,excreted,paros,azzaro,nappa,sirna,sexvideos,nonprescription,lyd,firework,crlf,localize,tablatures,jndi,vigorish,dcd,schulte,gioco,chested,universit,thrivent,jie,hydrothermal,smalley,hoke,ramen,coleoptera,intensifying,copyleft,llb,outfitted,khtml,chatterjee,adoptee,augusto,resnick,intersects,grandmaster,nusa,deadball,cksum,historiography,amistad,bellacor,trcdsembl,campagnolo,downgrades,sexbilder,scrapping,pdoc,haskins,bullhead,rhett,mimosa,wildfires,ellyn,hryvnia,halved,cfml,vatu,ecademy,dolore,shauna,multilink,funchal,ximian,bergamo,quarterfinals,hobbyist,reardon,homozygous,glyn,popset,torsten,puller,mathworks,namm,dena,mdksa,dcom,danskin,bexar,dinning,pfd,misfit,hamden,hardie,redfield,scotus,quotable,cranfield,asides,beacuse,musicstrands,kla,unternehmen,teg,roseland,pgbuildfarm,volo,zirconium,noelle,httpwww,agement,guan,tcf,opencube,shao,mears,rectification,omc,duisburg,pows,hsphere,entertai,keeler,highpoint,stratospheric,newegg,preeminent,nonparametric,mistral,percocet,zeroes,kth,divisor,wanderlust,ugc,cleat,decentralisation,shite,verna,immediacy,trak,swingin,eckert,casco,olivet,resi,bergeron,felonies,gasification,vibrio,animale,leda,artesia,casebook,nhc,gruppo,fotokasten,yaw,searing,detonation,gse,approximating,hollingsworth,obasanjo,pinewood,tangential,ridgway,headhunter,ero,sharkey,clwyd,bretton,bustier,apologizes,manoj,muskogee,pismo,resortquest,diskeeper,lathrop,pala,glebe,xterra,pml,seahorse,geneve,wpointer,softener,breaching,maelstrom,prioritizing,jsa,annunci,modelos,seraphim,raymarine,dodgeball,munity,assfuck,alopecia,singaporean,nowak,keyboarding,beachside,sparco,robeson,navbar,fsr,contribs,lineages,sumitomo,dermatologists,marbled,probleme,irv,blackmore,bothersome,draconian,troup,approver,pcgs,saville,srinivasan,poldek,perfor,articular,gwynn,trackball,asis,mansell,unf,werewolves,magazin,sible,vla,autocorrelation,waltrip,mombasa,schroder,alachua,hks,duns,ornl,cabrio,guanine,bridgetown,rhsa,luka,cpf,roadstar,creditcard,frf,michaela,willett,brews,baskin,hamel,zoids,semantically,cagliari,eggert,valkyrie,airlie,salas,gnomemeeting,benji,nent,cashew,unproven,myocardium,kap,gini,prek,cypher,paraiso,nightline,cursive,organises,hydrated,csk,schwanz,martinsburg,liguria,hsieh,forties,pgc,sayre,photosynthetic,pips,tongued,lifetips,walcott,cname,unapproved,emm,nematodes,jaclyn,kell,gremlins,bolero,togethers,dicom,paroxetine,vivien,gpr,bru,ilt,lished,tortola,mav,powertrain,telkom,immunized,nuneaton,fica,trulia,ricochet,kurosawa,aberrant,nld,ukr,wyandotte,odpm,pgk,dumber,ruptured,insoles,starlet,earner,kem,radiologists,polydor,nutraceuticals,zoomed,groupie,brinkmann,thrombin,aco,laminar,immunoglobulins,jamnagar,camber,vxi,colliery,incubators,procimagem,sweeties,landfall,seanad,intramurals,kwok,borderless,methyltransferase,suwannee,lgs,cjd,hyperlinked,birkenhead,torrevieja,purposefully,gutted,serveur,grr,morrell,ouachita,imran,slat,freeways,multithreaded,newlyweds,documentum,ebm,xiang,burnin,reelection,hales,rutter,uunet,vitreous,noord,centrelink,lempicka,iru,countable,dolomite,salvaged,soyuz,frick,lwp,afterglow,ferent,maes,mandi,secunderabad,millwork,sampo,takedown,colostrum,cfnm,judeo,wisc,lata,sexi,homies,tarmac,customisation,conservator,pipettes,goon,artefact,expository,complementarity,cosco,mercosur,tfm,benzodiazepines,mii,netmask,stalling,molnar,hmso,huw,aliso,decors,oldman,nuevos,acis,somthing,zabasearch,steuben,minicom,hausfrau,goldfields,rickey,minichamps,usagi,bisexuales,rothman,shana,srivastava,oemig,beefy,senha,pica,pucci,skits,shenyang,mussolini,kootenay,ethnology,donohue,cyc,childers,mahjongg,davao,tajik,codemasters,mydd,charade,arnhem,bobbin,istudy,rugrats,dancewear,mechanized,ject,mayes,canmore,reassigned,nnnn,crema,bursa,cfu,svm,riccardo,realvideo,lites,krall,centrifugation,welds,braunschweig,coptic,securityfocus,reorganisation,conglomerates,dehumidifiers,dumper,hamill,halston,iau,wfc,spiny,arezzo,mbeki,invisionfree,dropkick,elastomer,wahoo,anagram,fogdog,finnegan,gof,newsworthy,defs,sensitization,hyperactive,sidi,antenatal,elektro,nordsee,yuna,pluggable,hemophilia,kola,revitalizing,seepage,alitalia,orale,wri,ory,bcf,wooten,nonviolence,baume,berkman,ashdown,diciembre,purports,fcuk,shillong,mondial,brushless,technicolor,narragansett,barenaked,pandagon,rehabilitated,outdoorliving,expendable,ponca,tigard,soulmate,kaine,maxis,poppers,allposters,commercio,dods,tsl,volusia,iic,thm,elibrary,datebook,rapists,ultrasparc,seabed,orly,complicating,suzi,texturing,correspondences,groomsmen,avo,latour,manipur,arnett,suzhou,headboards,cil,palomino,kol,pomeranian,diptera,gericom,steiff,cordis,erythrocyte,myelin,fragility,drucken,reso,hov,tsukuba,kustom,invoiced,hannigan,hangul,montauk,modulators,irvington,tsang,brownian,mousepads,saml,archivists,herringbone,bodom,harrahs,daiwa,juanes,nids,moorcock,ccu,eyeliner,totalled,syp,woken,aphids,cutthroat,coincidental,lepidoptera,buda,tarrytown,vaseline,bluewater,strontium,burdick,crustal,hackman,shopnbc,aicpa,psal,albicans,seduces,epps,kroll,unambiguously,staley,cutbacks,hemet,ariana,pch,cgmp,mcas,multimeter,anubis,htr,analyte,peseta,enh,glitz,kewl,bidi,winsock,lvs,moldings,peltier,iod,ior,trackmania,ballets,doylestown,spaceflight,quicklist,proportionality,overruns,yadav,sordid,qpf,mentorship,lyx,tained,oligonucleotides,bbci,spidey,videotaped,regnow,jukeboxes,xpdf,portishead,irt,splunk,kommentare,citywire,crud,nev,febs,adu,ird,ribeiro,abrahamsson,epidemiol,coms,vdo,outro,pneumococcal,tilton,brookstone,apic,avenge,alleviating,sportif,inservice,punts,tives,sora,tgs,daugherty,yarrow,wakeup,meatloaf,mumford,datafile,buchen,zzzz,objectclass,polices,dogging,cursus,plasminogen,kinsella,lindgren,asymptotically,duce,wonderwall,crick,pvd,enveloped,mnfrs,caseiro,instabilities,muskoka,jeni,thalia,apac,reforestation,paradoxically,dren,dubbo,inductors,opin,symlinks,gamestracker,secam,gatorade,irm,cava,rupp,wacker,lanta,cres,yue,oligo,chairpersons,incesto,spca,zapper,materialized,accolade,memorized,squidoo,interpretative,roping,rauch,oxymoron,reciever,maryann,pentagram,viv,infusions,slvr,choppy,robotech,spb,servic,saya,univeristy,bahamian,gos,fwy,nocd,stipends,stirlingshire,caerphilly,riboflavin,fiu,kalb,ubiquity,vandal,romper,bitumen,nolo,shimizu,postpost,rummy,paleo,unrhyw,pinscher,constructively,sufjan,christiane,spliced,finca,gpf,iaa,iesg,brecon,kiran,trekearth,repeatability,gunning,byblos,tadpole,mitsui,storytime,berserk,wellman,cardiologist,jammin,leis,hirst,fellatio,ggc,terran,breadcrumbs,lorena,remaster,tpg,cifrada,curvy,envisage,boneca,basements,sharpton,crucially,lfn,imao,antonin,soundgarden,carrara,bron,decoupling,monroeville,environmentalist,msha,eastenders,adultfriendfinder,bein,stef,fpgas,mistreatment,rbl,qlogic,shona,sutcliffe,previousprevious,infective,estrella,gans,shards,vcds,acadian,kahului,phonetics,comittment,blix,biocompare,whimsy,frameset,kot,nyack,lolo,carboxylic,pkgconfig,dipartimento,traceback,svlug,microdermabrasion,waterbody,jeeps,tiverton,wundef,spay,gilmer,ceqa,bodog,followups,internat,biarritz,gurps,bessemer,iceman,pegged,liberator,rediscover,lovecraft,wavefront,bhangra,zuni,epm,meningococcal,ketone,glazer,yashica,geodesic,congruence,tenkaichi,omani,tenuous,reuter,surfactants,cohomology,epicenter,toke,dwf,santas,kutcher,christo,lucio,phenomenological,debriefing,miniskirts,ansmann,mfps,lentil,kannur,backer,albedo,flsa,pauli,mcewen,danner,angora,redstone,lxwxh,informacion,phyto,libpam,blo,cocky,pitchfork,stratocaster,mohegan,brazzaville,broussard,beano,interconnections,willa,toiletry,sats,beko,exchangeable,colm,arabe,stretchy,starburst,dzd,neurologist,leonards,kitties,dottie,rspb,fwrite,homicides,forde,ipf,travelpro,haemophilus,ronny,hubris,bottomline,kosova,neuropsychological,genitalia,waiving,swirls,dampers,comhairle,cheech,eigenvectors,extrapolated,chaining,defected,yurasov,gakkai,justia,campylobacter,northumbria,seidel,kenseth,pmr,kare,dumbo,holocene,jwin,superconductors,yeung,polygram,egon,distillate,unweighted,gramm,safeco,bentonville,ishikawa,vuv,strachan,bayard,escalator,periwinkle,breakin,rsmo,publishi,darmowy,outfile,choreographed,obrazki,accross,yag,gravesend,lovemaking,boucheron,farrow,annulment,kwai,tubbs,bartow,tonbridge,lesbico,panerai,spate,belladonna,lexi,sobering,carcinogenicity,djf,semis,pcv,suppressors,leachate,dingle,mbendi,celina,hydroponic,hoyer,xia,kovacs,recalculate,maltreatment,hitchin,medtronic,meerut,whsmith,fontsize,relaxes,kis,halos,cracow,saco,webcomics,ife,sauder,dioceses,uct,postdoc,biceps,leela,hydrant,hamstring,darrow,tinderbox,sify,naw,ganguly,streetwise,imprinting,dandenong,colecovision,gnuplot,nucleation,werbung,prb,blr,croce,deviance,goldfrapp,tetrahedron,materialize,homeworld,foodborne,baixar,stagg,fondness,ellicott,merchandiser,ler,djia,eastleigh,blacklisted,freetext,wxhxd,multiplicative,metis,urethra,dalrymple,retroactively,hartnett,gcd,kilos,multivitamin,vientiane,koji,scran,bwp,emoticon,mercator,lyricist,macromolecules,fungicides,amines,karcher,cssa,freetown,beneficially,tugrik,monotype,ishii,kempinski,pigmented,mipsel,ridership,athenaeum,twikiweb,mpm,faking,clsid,kenobi,endoplasmic,motorised,lomax,geraldton,eck,cssrule,auerbach,metlife,apocalyptica,masa,risotto,follicles,ashtabula,sussman,exmouth,melua,cvss,pana,stimulators,gnf,uvic,asustek,dieta,famvir,conflicted,retirements,sixers,metab,gregoire,burris,creat,rajan,brainwashed,berenstain,crittenden,antoni,gbs,associ,yankovic,gnvq,rogaine,kek,gridlock,integrable,chalkboard,dopod,unranked,karlsson,anaemia,natur,permian,bartley,unaffiliated,slrs,montreux,partici,starbuck,infractions,karon,treviso,backdrops,turkmen,standups,sowell,aktuelle,gleeson,lss,globulin,woah,nte,midob,violator,boxcar,sagan,aviso,pounder,vieira,kronor,tocopherol,keiko,newsrx,lesbe,pharmacokinetic,intercepts,tirelessly,adsorbed,ksh,plunkett,guenther,penta,phospholipid,reiterates,wuc,oversaw,arraylist,qy,outsourcer,eyeshadow,pushbutton,doujinshi,catagories,pilar,paltz,viaduct,pugster,elastomers,evenflo,mmk,wadi,secularism,cellspacing,trekker,llm,pakistanis,glyphs,neuroblastoma,loftus,gigli,thorp,seeley,producten,glandular,aligns,rejuvenate,grt,northants,ifconfig,sherrill,wintasks,xenia,whangarei,hra,expres,nadir,recoup,rnai,fyr,franchised,batchelor,relocatable,warhead,backfill,fascists,kedar,adjacency,iberostar,mancha,gorton,insta,jni,cellpadding,larnaca,carmarthen,endgame,streamlight,golan,thomann,totten,curbside,samhsa,howrah,planer,hermaphrodite,gavel,bassinets,footjoy,fairtrade,gah,prestwick,paoli,alben,laconia,berkowitz,inputting,dimming,indiatimes,arcgis,goof,landmine,boracay,appro,notifier,wirth,valerian,bucher,wts,saad,weisz,enrollee,authenticating,wheatland,zildjian,revisor,faauto,profs,pheonix,seitz,administrivia,foams,leh,orbitals,hammerhead,dotcom,xof,klezmer,fosgate,walworth,niguel,quickfind,isakmp,facia,stalemate,multimediacard,motrin,glx,classifies,ischia,ankh,mohali,incurs,feist,ldb,netzero,rationalization,eef,brokering,viewport,isas,masterbate,geneseo,grammer,garantie,sanofi,malignancies,yaesu,jpegs,spitz,chea,limassol,lobbied,splat,nostradamus,gallium,mobb,mannered,dorada,nalin,sorbet,lunenburg,phc,tdma,bodycare,jobsearch,sharia,topiary,cataloged,camsex,avm,kimber,extendable,ager,pella,optometrist,tinh,bogey,kana,pipette,bln,coveralls,teng,stayz,isolator,wicking,cph,zany,umatilla,austral,applauds,taks,interferometer,barbican,ohana,rebs,cerf,criminally,mkv,adio,psychopathology,lkr,leyton,cartoonists,appellees,indira,redraw,pictbridge,mahesh,beng,ncar,gord,nanometer,faceless,moyers,oregonian,aftershock,gena,leggett,wsdot,classique,menon,spiro,whiteboards,strategists,dnv,loti,kaos,hydrotherapy,marionette,islay,myv,typeof,igt,nitty,ddb,quintile,freightliner,monkees,lindley,dehumidifier,industrials,bouncers,transfered,mages,dmb,roseanne,chk,trigraphs,rer,bettis,cyberlink,browsable,workhorse,iterated,mcfly,kyd,pooping,preferentially,fraternities,diuretic,octubre,castell,emerg,sampras,gephardt,zimbabwean,unexpired,westmorland,biscotti,mavica,everyones,shaikh,nampa,youngblood,plana,refractor,bouldering,flemington,dysphagia,redesigning,milken,xsel,zooplankton,gsd,philatelic,modularity,parkview,keto,marrone,wallmounting,tias,marengo,quiche,epoc,resales,maduro,murrieta,fairplay,ddp,woodinville,registro,transcriber,notarized,neocons,franchisor,diab,vying,morehouse,lauper,bedspreads,pooch,morphism,gripper,tavistock,negated,javabeans,nashik,atomki,musicianship,viaggi,bbn,cady,adios,purview,bosque,xxxl,dyfed,biomaterials,overpass,berners,goaltender,speedometer,ultrium,carteret,fatwa,bottomed,superscript,rwandan,proteinase,coolermaster,maca,haircuts,crewneck,discriminant,bayfield,mishra,morey,multiplexers,pcga,stade,carnivore,codingsequence,knowledgealert,egalitarian,pombe,yamato,jenson,mortgagee,middlefield,iiyama,schell,midler,nags,caplan,anyplace,haridwar,sternberg,ventilating,retreating,shopsafe,mohave,brion,immun,zapf,mingus,prolly,trichy,microform,olsson,jdc,dosimetry,smelter,rayovac,takeda,mbt,ied,dynamism,fileattachment,rabat,devs,mellor,manmade,somaliland,hashtable,sdb,conto,furtado,statics,saleh,puja,kamera,eport,killian,rucksack,janette,powerware,phenylephrine,cupcake,karp,bodum,celular,zamora,qian,dws,psig,polycystic,titts,krzysztof,parsippany,raggedy,eason,epg,bsg,payloads,alon,cebit,wedgewood,daten,pbi,annexe,cyclen,customizations,stunningly,hugger,junio,jtc,xcd,prequel,strathmore,champloo,billerica,talley,estoppel,ameritrade,torr,cytomegalovirus,bpel,domus,madigan,supercool,ysl,contaminate,rxlist,sailormoon,ubid,plovdiv,mcsweeney,govideo,bassinet,taillights,typhimurium,dez,fci,visionaries,salesmen,nicki,skagen,hibernation,ponders,rrsp,middleburg,innkeepers,mcauliffe,gardasee,pcn,asce,aromatics,interplanetary,landcare,towneplace,downloaden,discontinuing,bork,sealers,weybridge,wusthof,interbank,hullabaloo,erratum,contreras,sandwell,novgorod,earbud,jds,coastlines,echolist,guntur,lmp,trunking,foxtrot,rosanna,patchouli,inequities,testes,defaulting,alpert,securitization,nsfw,borer,originators,postid,phx,censoring,hashimoto,oriole,chipotle,slocum,ipeople,rdg,reusing,saeed,wetzel,mensa,shiner,chal,rhesus,streptomyces,datagrams,invalidated,shenanigans,mkii,sandford,lennart,pract,npi,travelguide,championed,biosolids,billable,givers,tmdls,cockroaches,testcase,faraway,cfengine,umbc,underwritten,biofuels,cyberhome,dinh,zegna,tarps,sociologists,ellesmere,ostomy,vso,sena,ingest,gazebos,sirloin,cyclophosphamide,bitdefender,catz,bpp,giancarlo,kategorie,arjan,valery,kmc,insp,recomended,dataport,pfaff,manuale,rog,niven,mahi,ghs,atsdr,rangeland,commonality,xid,midis,cwc,regrettably,navidad,yahoogroups,kaw,ston,ves,pulau,playbook,digipak,jetblue,kavanagh,exhibitionists,armidale,arquette,copland,namib,cne,cheapflights,wyvern,lucene,muffled,vincennes,inlays,lockets,whitey,brin,wharfedale,guyanese,laryngeal,outfielder,nonattainment,softimage,cellgroupdata,literatura,myoplex,yorba,bct,pva,slapstick,cottrell,dialers,subculture,cmx,modded,skids,roselle,klub,marathons,tgt,skeet,toucan,masterclass,nnp,calcio,oxidizing,alo,kennebec,zj,intergalactic,biomolecular,cii,powweb,mcwilliams,phosphorous,photocopiers,obligor,matcher,listbox,voigt,fdl,dawley,scribus,lessors,npn,luminaries,karats,bridger,slm,hadronic,fairport,piecewise,recharging,dmm,unionville,intermedia,goetz,urinal,joystiq,grosso,sobaka,payphone,rockfish,duodenal,uninstalled,leiter,coworker,escuela,cyclades,longterm,taber,screenplays,gpt,shiites,ntop,farcry,jitsu,lactobacillus,uniontown,cloner,otaku,hoyas,kandahar,kerrville,akers,neuropsychology,multimap,allston,femininity,trask,accuweather,deferment,wam,fmp,portlets,glsa,westmont,waders,cellulare,homehome,frogger,hass,rya,seqres,hellfire,havering,montfort,chokes,eharmony,knowsley,bordellchat,cvsweb,houdini,umr,canarias,babyshambles,bridgette,cinque,drezner,hsin,alcan,stas,outlier,naira,neverending,masson,khanna,systeme,hillsong,camshaft,exotica,milburn,bijou,destdir,innervation,gga,oqo,cunha,reefer,techspot,hibernia,alpina,iarc,constraining,nym,dard,estefan,fuser,lepton,pergamon,wiktionary,razer,poznan,netscreen,manda,npv,xmb,kingstown,topix,batsman,wavelets,cogs,bigtitsroundasses,barnhart,scofield,ebrd,desorption,bellflower,watertight,stevia,photocopier,haverford,talc,penises,gwendolyn,buynow,nairn,prolab,lundberg,backordered,coh,mononuclear,unocal,brunson,greenlee,emer,txdot,prichard,conferees,renata,ternary,footballer,sisyphus,directfb,foolproof,chastain,lakshmi,dsb,megane,cdo,someones,rebelde,morrigan,mymovies,tiananmen,immunosuppressive,mcveigh,stylin,brower,mpltext,aibo,pdd,depositor,ofcourse,ecdl,redenvelope,acidophilus,deci,defensively,analytica,cnd,hrp,tnr,tryon,forgo,barca,pahrump,foros,pickabook,hellraiser,lithographs,educates,ediets,gopal,signers,digext,netbackup,dimensionality,triax,rnase,aman,angell,bochum,eyepieces,earbuds,americablog,makeovers,unprocessed,pfa,widctlpar,clausen,punbb,centra,monson,infogrames,azt,xalan,hydroxyl,medpix,interacted,gpi,polishes,canoga,numismatic,avoidable,brantley,adenoma,aah,prostaglandins,powercolor,beaconsfield,lakhs,mhd,lesbisch,flammability,truancy,jharkhand,channelweb,givn,flatiron,midlife,guerin,indianola,unavailability,rooter,wanaka,lompoc,widener,cll,kmail,websense,vmi,residencies,cablevision,pye,disrupts,onetime,kenzie,gating,boingboing,sevier,eberhard,chek,edr,kharagpur,fotze,cvp,deflated,infestations,judgmental,meiji,antipsychotic,uwm,infn,slaughterhouse,stix,asg,bagging,brainwashing,dmp,disconnecting,thera,mclellan,rong,telcos,wilmer,sphincter,orgys,newsom,infill,fairhaven,etude,stereotyping,talib,dreamstime,rearranging,geographies,tipp,programmatically,handicapper,plantar,ogaming,xss,academie,quarrying,approachable,sweetener,braised,knut,tibco,fseek,vided,burk,spigot,skilling,hunterdon,nailer,roxette,hepatocytes,coupes,universitet,mauricio,lov,hnd,roseburg,berlusconi,chloroplast,charing,kansai,buzzword,nepad,pistachio,arv,lanvin,riverbank,lilypond,predominately,metalware,saugus,nmac,giza,lancs,culpepper,rohm,pretzel,warping,twc,raitt,iyer,connotations,iiia,wilber,yardstick,neutrophil,supernatant,solu,segmental,multitudes,imperium,radley,supercharger,imagen,thicknesses,brk,spew,vestibular,klausner,riba,witten,orth,calaveras,naep,deceleration,bcn,consignee,aldehyde,pronged,baring,jacked,bigalow,gyd,centerfolds,ortofon,cropland,wnt,nazism,kingswood,operationally,trix,testicle,rioja,bhi,technolo,lindstrom,pinter,minox,wofford,guaifenesin,hup,bifida,stratigraphic,dundalk,snipers,kshirsagar,ridgecrest,placerville,gosport,sjc,ircd,rubrics,kerouac,ebx,harken,foc,cooperated,nwo,cano,kearny,shopinfo,tlb,etp,obie,greaves,versity,amoco,inzest,msdos,gabby,dumbbells,ncaaf,ximage,homotopy,ironwood,adiabatic,pend,licznik,cck,sabian,saxton,patties,hopkinton,biotherm,ethno,videochat,cantwell,accelerometer,filip,whl,productio,milli,pdi,bedava,penobscot,grav,llcs,fmr,pimsleur,micky,setcl,johnathan,alisha,gambier,enterta,crosley,usace,byrds,sgm,darrel,isola,laminator,krazy,diaryland,bhubaneshwar,quadrature,summerland,alessandra,gsn,dentry,catskills,tablecloths,herder,gec,cinematical,outfall,unzipped,plcc,osb,interchangeably,concurs,wef,deformations,farting,nonspecific,mek,ohhh,atopic,harker,culling,limon,murata,zealot,arca,jmc,toot,rino,sisley,iveco,gooey,bielefeld,parrott,veillard,lisinopril,nprm,tookie,shanti,burkett,wemon,turmeric,carnelian,zea,geom,dorman,hmac,abstracting,parietal,glyphosate,underpants,appleseed,mandating,prequalification,macross,kondo,muzi,bidet,grubb,redif,oam,domenici,transdermal,abramson,recreating,snot,ductile,dimensionless,carex,contractually,kippur,fibroids,courtyards,calderon,dogster,flattening,sterilized,pkcs,unformatted,cvr,insulate,afd,tuolumne,cobblestone,showplace,stockpiles,mandir,autore,ashish,meijer,camberley,babson,fiennes,meteorologist,colonoscopy,lofi,tryp,duromine,alkaloids,quesnel,ake,initrd,centrality,pisses,campaigned,twinning,imag,taster,greenlight,musicbrainz,sourdough,warrantless,mzm,croat,arbors,canwest,homedics,anydvd,jnr,odm,dnn,ashtrays,punters,dropper,sarkar,szabo,wack,ecx,fette,axl,yoy,spyro,kendo,surinam,suze,xenophobia,krypton,heisenberg,dvcam,nary,ninn,csis,reconfigurable,smil,courchevel,kittie,lipman,doz,bsl,chucky,schlampe,webdev,doubleclick,bushman,pornofilm,ood,conexant,hydroxylase,rme,multipass,woodwinds,telefoon,ricotta,motorways,gandhinagar,nsg,edelweiss,frampton,humidor,vacationing,naturalizer,dinesh,techassist,airdrie,schiphol,bruner,tangy,cfe,gurnee,bogdan,farina,gant,cokin,tricity,cutaway,artsy,severability,transferor,cliches,nosferatu,indycar,klimt,onetouch,dooney,oconee,smartbargains,prl,sackville,camberwell,hotlines,hazelton,nlg,reaffirms,anleitung,webalizer,libboost,golds,pfs,imei,corante,recipesource,ranching,seguin,calderdale,anzeige,toothpick,volser,westcoast,forwarders,aab,likable,ashburton,natrol,sonstiges,shoestring,vsx,hosa,brads,winsite,whirling,doghouse,displaytime,bda,ranitidine,elit,grebe,standup,playgirl,flexion,ibex,geomagnetic,lowestoft,blobs,footers,reiss,lewistown,droppings,designator,causative,brt,woolrich,gwasanaethau,keefe,tfp,loveseat,diethylpropion,karyn,handedly,uncontested,fov,doxorubicin,nerja,cardiologists,militarily,fsus,inflating,sputnik,barometric,joburg,assertequals,gladwell,regrowth,lusaka,lampwork,adultos,cybersex,banca,doughnut,martz,cribbage,mela,rondo,tigr,personel,wcpo,activ,uiconstraints,typescript,inetd,scuola,piste,pppd,enos,ondemand,altamont,steubenville,rur,danielson,barfly,vegetarianism,extractors,dictaphone,callsign,martinis,envisions,flexibly,nakd,natwest,wilsons,ccn,reposition,msci,orginal,hobbyists,anat,fleshbot,weta,sindh,pcf,glick,obsoletes,mammogram,sani,webcasting,soggy,apha,ecologist,ararat,narrowband,bph,webstore,maus,reinstalling,gendered,relateddiagram,kingsland,ssid,rackets,litigants,shimon,ducted,ebsq,crisps,modelle,wristwatches,xenadrine,linac,identifications,dressy,authenticator,arash,cristobal,stewie,depositories,pcre,setpoint,rockdale,evita,ballmer,hemphill,taormina,plath,pickers,boardgamegeek,serbo,oci,noviembre,mappoint,surn,minisd,madmums,mosher,digitallife,grahame,forecasters,linoleum,shearling,stockster,firstcall,dorint,wmc,culverts,cuticle,codebase,rdfs,lter,pimples,hdb,shorted,loghi,spunky,razz,komatsu,bietet,madisonville,readies,jovenes,deuterium,totalitarianism,trigonometric,selmer,popcap,verbosity,aashto,pavarotti,syncing,vanden,majeure,beret,fallbrook,audiovideo,muay,longshot,rollaway,yor,nonstandard,tbr,manoa,laundries,whoo,tefal,tothe,crv,amx,falign,goleta,holst,ebola,redbook,rangel,consolidates,disaggregated,chromatographic,supersport,golly,flumotion,seagrass,congratulates,anais,grievant,reinstalled,entreprises,clemons,eurovision,airplus,panchkula,shahid,phospholipids,elsinore,opendocument,ankeny,canzoni,wakeman,moana,wobbly,seagulls,megawatts,denning,temas,illuminator,marylebone,symbolically,erotico,linx,randle,nhu,unsubstantiated,centroid,monogrammed,gambian,tailgating,colville,vpu,russische,sgp,soccernet,zing,downunder,snips,allawi,lockup,cholinergic,lhr,barthelemy,babymint,benning,implantable,ligo,haddad,univariate,katia,motorcross,sangha,shn,myfonts,usuarios,caml,resiliency,barossa,astrobiology,disinfectants,kawai,uktv,dreamtime,berkshires,inhumane,trobe,unlocks,auctex,pogues,panicked,developerworks,bullitt,toed,smartcard,kushner,hardcoresex,crump,gunderson,paramus,cepr,lma,politica,randomization,rinsing,reschedule,tob,hostal,preempt,resold,cyclo,phosphor,frontenac,wipeout,mambots,unscented,ipfw,ergonomically,roosters,homologues,loring,ionosphere,belvidere,trotsky,airworthiness,sistemas,devsource,retroviral,llnl,keyloggers,amgen,marci,willey,yau,groucho,foreshore,gusset,dissapointed,dtds,mibs,metalwork,refering,punting,triphasil,scab,bhavnagar,creedence,musee,wellstone,lleol,gpib,tidbit,allyson,teriyaki,impoundment,interrelationships,gres,coffeecup,maru,joon,josephus,ulong,maputo,chev,krispy,dogtown,abernathy,raz,fermion,weltweit,fluor,bergstrom,inoperable,esrc,asdf,gollum,ceus,macintyre,srd,cyclonic,cft,unsubscribing,shawna,pinyin,ipac,ramone,fethiye,multipath,hakusho,tein,treeview,atd,wonderswan,eugenics,dustjacket,emmanuelle,dlocaledir,molotov,sandpaper,hbc,fannin,interscope,eba,melayu,hardiness,liss,phew,furuno,moynihan,johnsons,heng,dro,carbonated,waives,wraparound,jfs,ejackulation,reboots,headliner,sqr,bustin,powernetworker,vul,superposition,supremes,insite,fanzine,laney,purportedly,antigenic,rurouni,dietetics,assembles,veracruz,hausfrauen,wsf,benzo,vietcong,chairwoman,petrochemicals,pata,cntr,nettime,techies,bentyxxo,xango,radish,gatto,checkmate,gantt,valli,tuv,starlets,plavix,roomba,aficionado,motivator,bijan,riv,storrs,tabula,reigate,emmons,sandstorm,laci,taoist,nameplate,axp,wcb,mothering,billard,chrysanthemum,reconstructions,innodb,sunspot,aisha,fluorine,healdsburg,retype,fishin,likud,cyberread,pme,rothwell,kmf,creationist,wth,setlist,scrollbars,bocelli,zuckerman,vtd,ampicillin,arcy,wasn,cowbell,rater,everson,angebot,cezanne,tamagotchi,earpiece,franca,thymidine,disa,gearlog,tranche,volum,prsp,openvpn,mcentire,londra,kaur,unconstrained,datadirect,souter,redfern,tulum,nyy,pagesize,osteopathy,stavanger,cated,autry,fip,rooftops,findpage,discourages,benitez,boater,shackleton,weirdo,congresswoman,dalek,tass,itrip,myob,helloween,reperfusion,fieldhouse,manukau,libname,eucharistic,mong,homeware,ckt,winmx,mobic,farts,rourke,lackawanna,villiers,comercio,huy,brooksville,falwell,gwb,donwload,wrth,attrs,knockoffs,esm,bionicle,hygienist,nichole,quidditch,dartmoor,rowlett,stapled,gardenweb,butternut,nummer,groban,asw,arora,yatsura,warr,hainan,esg,logoff,cockroach,xanadu,computable,occup,playgroup,tintin,ethnicities,webposition,crafter,roby,disassemble,boltzmann,caos,abidjan,anise,grainy,hospitalizations,notizie,zoek,sepultura,walkabout,pepperoni,optimising,cityreview,boathouse,katt,weissman,siri,herkimer,namecite,refreshingly,aph,ryland,sculptural,neurophysiology,gsk,hermanus,mocldy,ngage,annexure,ipchains,yosef,tlds,gozo,pso,helton,outflows,saas,asthmatic,guillemot,realizations,linguistically,jaco,mckinsey,dezember,hylafax,reconstitution,amateurwebcam,lumberton,interviewee,intereco,portola,hematologic,sgc,rebbe,pinup,transcendence,surah,brendon,farberware,statisticians,swatches,perioperative,maoist,henkel,lilangeni,trapeze,lemmings,extents,spams,omagh,workcentre,sunbird,cellophane,deland,blevins,sacha,cardholders,dddd,accessori,qo,araujo,mylist,pcu,kloczek,enet,seperated,clusty,rolfe,cuttack,provantage,dominio,hyperbaric,nannofossil,logansport,bulldozer,blacksonblondes,subprime,overpayments,sharpie,modutils,whitehaven,whaley,currier,taproot,topsite,delorme,rayner,aio,rossum,urbanism,colloquia,ewr,capillaries,mountainside,menthol,blackouts,starkey,eves,hpux,canby,dragonflies,montrail,findfont,aigner,urusei,soundblaster,beatle,webzine,propranolol,inescapable,swabs,absorbance,lbw,audiofile,simba,mohd,redgoldfish,cornbread,jcaho,appendixes,aod,crestview,keynotes,fotolia,subnets,cau,espanola,busnes,froggy,decarboxylase,elfman,throughs,prioritise,oreck,schottland,bagpipe,terns,erythematosus,ftrs,excitatory,mcevoy,fujita,niagra,yq,dribble,hardwired,hosta,grambling,exten,seeger,ringgold,sondheim,interconnecting,inkjets,ebv,underpinnings,lazar,laxatives,mythos,soname,colloid,hiked,defrag,zanesville,oxidant,umbra,poppin,trebuchet,pyrite,partido,drunks,submitters,branes,mahdi,agoura,manchesteronline,blunkett,lapd,kidder,hotkey,tirupur,parkville,crediting,tmo'
| mit |
allinpaybusiness/ACS | allinpay projects/creditscorekeras/classkeras.py | 1 | 11551 | # -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import sys;
sys.path.append("allinpay projects")
from imp import reload
import creditscore.creditscore
reload(creditscore.creditscore)
from creditscore.creditscore import CreditScore
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.model_selection import KFold
from sklearn import preprocessing
from sklearn.svm import SVC
from sklearn.model_selection import GridSearchCV
from sklearn.decomposition import PCA
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.layers import Embedding
from keras.layers import LSTM
from keras.regularizers import l2
from keras.optimizers import SGD
from keras.callbacks import EarlyStopping
class CreditScoreKeras(CreditScore):
def dnn1_model(self, X_train, y_train, X_test, nepoch, batches):
#建立DNN模型
model = Sequential()
model.add(Dense(64, input_dim=X_train.shape[1], kernel_regularizer=l2(0.01)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(64, kernel_regularizer=l2(0.01)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(64, kernel_regularizer=l2(0.01)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='rmsprop')
early_stopping = EarlyStopping(monitor='val_loss', patience=2)
#训练模型
model.fit(X_train.values, y_train.values, epochs=nepoch, batch_size=int(X_train.shape[0]/batches),validation_split=0.2,callbacks=[early_stopping])
#预测
probability = model.predict_proba(X_test.values)
return probability
def dnn2_model(self, X_train, y_train, X_test, nepoch, batches):
#建立DNN模型
model = Sequential()
model.add(Dense(32, input_dim=X_train.shape[1], kernel_regularizer=l2(0.01)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(64, kernel_regularizer=l2(0.01)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128, kernel_regularizer=l2(0.01)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(256, kernel_regularizer=l2(0.01)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128, kernel_regularizer=l2(0.01)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(64, kernel_regularizer=l2(0.01)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(32, kernel_regularizer=l2(0.01)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='rmsprop')
early_stopping = EarlyStopping(monitor='val_loss', patience=2)
#训练模型
model.fit(X_train.values, y_train.values, epochs=nepoch, batch_size=int(X_train.shape[0]/batches),validation_split=0.2,callbacks=[early_stopping])
#预测
probability = model.predict_proba(X_test.values)
return probability
def keras_dnn_trainandtest(self, testsize, cv, feature_sel, varthreshold, pca, nepoch, batches, nclusters, cmethod, resmethod, deepmodel):
#分割数据集为训练集和测试集
data_feature = self.data.ix[:, self.data.columns != 'default']
data_target = self.data['default']
X_train, X_test, y_train, y_test = train_test_split(data_feature, data_target, test_size=testsize, random_state=0)
#对训练集做变量粗分类和woe转化,并据此对测试集做粗分类和woe转化
X_train, X_test = self.binandwoe_traintest(X_train, y_train, X_test, nclusters, cmethod)
#是否对特征做PCA变换
if pca == True:
pca = PCA(n_components=0.95, svd_solver='full')
pca.fit(X_train)
#print(pca.explained_variance_ratio_)
X_train = pd.DataFrame(pca.transform(X_train))
X_test = pd.DataFrame(pca.transform(X_test))
#训练并预测模型
if deepmodel == 'dnn1':
probability = self.dnn1_model(X_train, y_train, X_test, nepoch, batches)
elif deepmodel == 'dnn2':
probability = self.dnn2_model(X_train, y_train, X_test, nepoch, batches)
predresult = pd.DataFrame({'target' : y_test, 'probability' : probability[:,0]})
return predresult
def keras_SVC_dnn_trainandtest(self, testsize, cv, feature_sel, varthreshold, pca, nepoch, batches, nclusters, cmethod, resmethod, deepmodel):
#keras预测违约概率
#分割数据集为训练集和测试集
data_feature = self.data.ix[:, self.data.columns != 'default']
data_target = self.data['default']
X_train, X_test, y_train, y_test = train_test_split(data_feature, data_target, test_size=testsize, random_state=0)
#对训练集做变量粗分类和woe转化,并据此对测试集做粗分类和woe转化
X_train, X_test = self.binandwoe_traintest(X_train, y_train, X_test, nclusters, cmethod)
#是否对特征做PCA变换
if pca == True:
pca = PCA(n_components=0.95, svd_solver='full')
pca.fit(X_train)
#print(pca.explained_variance_ratio_)
X_train = pd.DataFrame(pca.transform(X_train))
X_test = pd.DataFrame(pca.transform(X_test))
#训练并预测模型
if deepmodel == 'dnn1':
probability = self.dnn1_model(X_train, y_train, X_test, nepoch, batches)
elif deepmodel == 'dnn2':
probability = self.dnn2_model(X_train, y_train, X_test, nepoch, batches)
#训练并预测SVC模型
tuned_parameters = [{'kernel': ['rbf'], 'gamma': [1e-1, 1e-2, 1e-3, 1e-4], 'C': [1, 10, 100, 1000]},
{'kernel': ['linear'], 'C': [1, 10, 100, 1000]},
{'kernel': ['sigmoid'], 'gamma': [1e-1, 1e-2, 1e-3, 1e-4], 'C': [1, 10, 100, 1000]}]
classifier = GridSearchCV(SVC(probability=True), tuned_parameters, cv=5)
classifier.fit(X_train, y_train)
svcpred = classifier.predict(X_test)
probability[svcpred==1] = 1
predresult = pd.DataFrame({'target' : y_test, 'probability' : probability[:,0]})
return predresult
def keras_dnn_trainandtest_kfold(self, nsplit, cv, feature_sel, varthreshold, pca, nepoch, batches, nclusters, cmethod, resmethod, deepmodel):
data_feature = self.data.ix[:, self.data.columns != 'default']
data_target = self.data['default']
#将数据集分割成k个分段分别进行训练和测试,对每个分段,该分段为测试集,其余数据为训练集
kf = KFold(n_splits=nsplit, shuffle=True)
predresult = pd.DataFrame()
for train_index, test_index in kf.split(data_feature):
X_train, X_test = data_feature.iloc[train_index, ], data_feature.iloc[test_index, ]
y_train, y_test = data_target.iloc[train_index, ], data_target.iloc[test_index, ]
#如果随机抽样造成train或者test中只有一个分类,跳过此次预测
if (len(y_train.unique()) == 1) or (len(y_test.unique()) == 1):
continue
#对训练集做变量粗分类和woe转化,并据此对测试集做粗分类和woe转化
X_train, X_test = self.binandwoe_traintest(X_train, y_train, X_test, nclusters, cmethod)
#是否对特征做PCA变换
if pca == True:
pca = PCA(n_components=0.95, svd_solver='full')
pca.fit(X_train)
#print(pca.explained_variance_ratio_)
X_train = pd.DataFrame(pca.transform(X_train))
X_test = pd.DataFrame(pca.transform(X_test))
#训练并预测模型
if deepmodel == 'dnn1':
probability = self.dnn1_model(X_train, y_train, X_test, nepoch, batches)
elif deepmodel == 'dnn2':
probability = self.dnn2_model(X_train, y_train, X_test, nepoch, batches)
temp = pd.DataFrame({'target' : y_test, 'probability' : probability[:,0]})
predresult = pd.concat([predresult, temp], ignore_index = True)
return predresult
def keras_SVC_dnn_trainandtest_kfold(self, nsplit, cv, feature_sel, varthreshold, pca, nepoch, batches, nclusters, cmethod, resmethod, deepmodel):
data_feature = self.data.ix[:, self.data.columns != 'default']
data_target = self.data['default']
#将数据集分割成k个分段分别进行训练和测试,对每个分段,该分段为测试集,其余数据为训练集
kf = KFold(n_splits=nsplit, shuffle=True)
predresult = pd.DataFrame()
for train_index, test_index in kf.split(data_feature):
X_train, X_test = data_feature.iloc[train_index, ], data_feature.iloc[test_index, ]
y_train, y_test = data_target.iloc[train_index, ], data_target.iloc[test_index, ]
#如果随机抽样造成train或者test中只有一个分类,跳过此次预测
if (len(y_train.unique()) == 1) or (len(y_test.unique()) == 1):
continue
#对训练集做变量粗分类和woe转化,并据此对测试集做粗分类和woe转化
X_train, X_test = self.binandwoe_traintest(X_train, y_train, X_test, nclusters, cmethod)
#是否对特征做PCA变换
if pca == True:
pca = PCA(n_components=0.95, svd_solver='full')
pca.fit(X_train)
#print(pca.explained_variance_ratio_)
X_train = pd.DataFrame(pca.transform(X_train))
X_test = pd.DataFrame(pca.transform(X_test))
#训练并预测模型
if deepmodel == 'dnn1':
probability = self.dnn1_model(X_train, y_train, X_test, nepoch, batches)
elif deepmodel == 'dnn2':
probability = self.dnn2_model(X_train, y_train, X_test, nepoch, batches)
#训练并预测SVC模型
tuned_parameters = [{'kernel': ['rbf'], 'gamma': [1e-1, 1e-2, 1e-3, 1e-4], 'C': [1, 10, 100, 1000]},
{'kernel': ['linear'], 'C': [1, 10, 100, 1000]},
{'kernel': ['sigmoid'], 'gamma': [1e-1, 1e-2, 1e-3, 1e-4], 'C': [1, 10, 100, 1000]}]
classifier = GridSearchCV(SVC(probability=True), tuned_parameters, cv=5)
classifier.fit(X_train, y_train)
svcpred = classifier.predict(X_test)
probability[svcpred==1] = 1
temp = pd.DataFrame({'target' : y_test, 'probability' : probability[:,0]})
predresult = pd.concat([predresult, temp], ignore_index = True)
return predresult
| apache-2.0 |
michellemorales/OpenMM | models/lfads/lfads.py | 4 | 85893 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ==============================================================================
"""
LFADS - Latent Factor Analysis via Dynamical Systems.
LFADS is an unsupervised method to decompose time series data into
various factors, such as an initial condition, a generative
dynamical system, control inputs to that generator, and a low
dimensional description of the observed data, called the factors.
Additionally, the observations have a noise model (in this case
Poisson), so a denoised version of the observations is also created
(e.g. underlying rates of a Poisson distribution given the observed
event counts).
The main data structure being passed around is a dataset. This is a dictionary
of data dictionaries.
DATASET: The top level dictionary is simply name (string -> dictionary).
The nested dictionary is the DATA DICTIONARY, which has the following keys:
'train_data' and 'valid_data', whose values are the corresponding training
and validation data with shape
ExTxD, E - # examples, T - # time steps, D - # dimensions in data.
The data dictionary also has a few more keys:
'train_ext_input' and 'valid_ext_input', if there are know external inputs
to the system being modeled, these take on dimensions:
ExTxI, E - # examples, T - # time steps, I = # dimensions in input.
'alignment_matrix_cxf' - If you are using multiple days data, it's possible
that one can align the channels (see manuscript). If so each dataset will
contain this matrix, which will be used for both the input adapter and the
output adapter for each dataset. These matrices, if provided, must be of
size [data_dim x factors] where data_dim is the number of neurons recorded
on that day, and factors is chosen and set through the '--factors' flag.
'alignment_bias_c' - See alignment_matrix_cxf. This bias will used to
the offset for the alignment transformation. It will *subtract* off the
bias from the data, so pca style inits can align factors across sessions.
If one runs LFADS on data where the true rates are known for some trials,
(say simulated, testing data, as in the example shipped with the paper), then
one can add three more fields for plotting purposes. These are 'train_truth'
and 'valid_truth', and 'conversion_factor'. These have the same dimensions as
'train_data', and 'valid_data' but represent the underlying rates of the
observations. Finally, if one needs to convert scale for plotting the true
underlying firing rates, there is the 'conversion_factor' key.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import os
import tensorflow as tf
from distributions import LearnableDiagonalGaussian, DiagonalGaussianFromInput
from distributions import diag_gaussian_log_likelihood
from distributions import KLCost_GaussianGaussian, Poisson
from distributions import LearnableAutoRegressive1Prior
from distributions import KLCost_GaussianGaussianProcessSampled
from utils import init_linear, linear, list_t_bxn_to_tensor_bxtxn, write_data
from utils import log_sum_exp, flatten
from plot_lfads import plot_lfads
class GRU(object):
"""Gated Recurrent Unit cell (cf. http://arxiv.org/abs/1406.1078).
"""
def __init__(self, num_units, forget_bias=1.0, weight_scale=1.0,
clip_value=np.inf, collections=None):
"""Create a GRU object.
Args:
num_units: Number of units in the GRU
forget_bias (optional): Hack to help learning.
weight_scale (optional): weights are scaled by ws/sqrt(#inputs), with
ws being the weight scale.
clip_value (optional): if the recurrent values grow above this value,
clip them.
collections (optional): List of additonal collections variables should
belong to.
"""
self._num_units = num_units
self._forget_bias = forget_bias
self._weight_scale = weight_scale
self._clip_value = clip_value
self._collections = collections
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
@property
def state_multiplier(self):
return 1
def output_from_state(self, state):
"""Return the output portion of the state."""
return state
def __call__(self, inputs, state, scope=None):
"""Gated recurrent unit (GRU) function.
Args:
inputs: A 2D batch x input_dim tensor of inputs.
state: The previous state from the last time step.
scope (optional): TF variable scope for defined GRU variables.
Returns:
A tuple (state, state), where state is the newly computed state at time t.
It is returned twice to respect an interface that works for LSTMs.
"""
x = inputs
h = state
if inputs is not None:
xh = tf.concat(axis=1, values=[x, h])
else:
xh = h
with tf.variable_scope(scope or type(self).__name__): # "GRU"
with tf.variable_scope("Gates"): # Reset gate and update gate.
# We start with bias of 1.0 to not reset and not update.
r, u = tf.split(axis=1, num_or_size_splits=2, value=linear(xh,
2 * self._num_units,
alpha=self._weight_scale,
name="xh_2_ru",
collections=self._collections))
r, u = tf.sigmoid(r), tf.sigmoid(u + self._forget_bias)
with tf.variable_scope("Candidate"):
xrh = tf.concat(axis=1, values=[x, r * h])
c = tf.tanh(linear(xrh, self._num_units, name="xrh_2_c",
collections=self._collections))
new_h = u * h + (1 - u) * c
new_h = tf.clip_by_value(new_h, -self._clip_value, self._clip_value)
return new_h, new_h
class GenGRU(object):
"""Gated Recurrent Unit cell (cf. http://arxiv.org/abs/1406.1078).
This version is specialized for the generator, but isn't as fast, so
we have two. Note this allows for l2 regularization on the recurrent
weights, but also implicitly rescales the inputs via the 1/sqrt(input)
scaling in the linear helper routine to be large magnitude, if there are
fewer inputs than recurrent state.
"""
def __init__(self, num_units, forget_bias=1.0,
input_weight_scale=1.0, rec_weight_scale=1.0, clip_value=np.inf,
input_collections=None, recurrent_collections=None):
"""Create a GRU object.
Args:
num_units: Number of units in the GRU
forget_bias (optional): Hack to help learning.
input_weight_scale (optional): weights are scaled ws/sqrt(#inputs), with
ws being the weight scale.
rec_weight_scale (optional): weights are scaled ws/sqrt(#inputs),
with ws being the weight scale.
clip_value (optional): if the recurrent values grow above this value,
clip them.
input_collections (optional): List of additonal collections variables
that input->rec weights should belong to.
recurrent_collections (optional): List of additonal collections variables
that rec->rec weights should belong to.
"""
self._num_units = num_units
self._forget_bias = forget_bias
self._input_weight_scale = input_weight_scale
self._rec_weight_scale = rec_weight_scale
self._clip_value = clip_value
self._input_collections = input_collections
self._rec_collections = recurrent_collections
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
@property
def state_multiplier(self):
return 1
def output_from_state(self, state):
"""Return the output portion of the state."""
return state
def __call__(self, inputs, state, scope=None):
"""Gated recurrent unit (GRU) function.
Args:
inputs: A 2D batch x input_dim tensor of inputs.
state: The previous state from the last time step.
scope (optional): TF variable scope for defined GRU variables.
Returns:
A tuple (state, state), where state is the newly computed state at time t.
It is returned twice to respect an interface that works for LSTMs.
"""
x = inputs
h = state
with tf.variable_scope(scope or type(self).__name__): # "GRU"
with tf.variable_scope("Gates"): # Reset gate and update gate.
# We start with bias of 1.0 to not reset and not update.
r_x = u_x = 0.0
if x is not None:
r_x, u_x = tf.split(axis=1, num_or_size_splits=2, value=linear(x,
2 * self._num_units,
alpha=self._input_weight_scale,
do_bias=False,
name="x_2_ru",
normalized=False,
collections=self._input_collections))
r_h, u_h = tf.split(axis=1, num_or_size_splits=2, value=linear(h,
2 * self._num_units,
do_bias=True,
alpha=self._rec_weight_scale,
name="h_2_ru",
collections=self._rec_collections))
r = r_x + r_h
u = u_x + u_h
r, u = tf.sigmoid(r), tf.sigmoid(u + self._forget_bias)
with tf.variable_scope("Candidate"):
c_x = 0.0
if x is not None:
c_x = linear(x, self._num_units, name="x_2_c", do_bias=False,
alpha=self._input_weight_scale,
normalized=False,
collections=self._input_collections)
c_rh = linear(r*h, self._num_units, name="rh_2_c", do_bias=True,
alpha=self._rec_weight_scale,
collections=self._rec_collections)
c = tf.tanh(c_x + c_rh)
new_h = u * h + (1 - u) * c
new_h = tf.clip_by_value(new_h, -self._clip_value, self._clip_value)
return new_h, new_h
class LFADS(object):
"""LFADS - Latent Factor Analysis via Dynamical Systems.
LFADS is an unsupervised method to decompose time series data into
various factors, such as an initial condition, a generative
dynamical system, inferred inputs to that generator, and a low
dimensional description of the observed data, called the factors.
Additoinally, the observations have a noise model (in this case
Poisson), so a denoised version of the observations is also created
(e.g. underlying rates of a Poisson distribution given the observed
event counts).
"""
def __init__(self, hps, kind="train", datasets=None):
"""Create an LFADS model.
train - a model for training, sampling of posteriors is used
posterior_sample_and_average - sample from the posterior, this is used
for evaluating the expected value of the outputs of LFADS, given a
specific input, by averaging over multiple samples from the approx
posterior. Also used for the lower bound on the negative
log-likelihood using IWAE error (Importance Weighed Auto-encoder).
This is the denoising operation.
prior_sample - a model for generation - sampling from priors is used
Args:
hps: The dictionary of hyper parameters.
kind: the type of model to build (see above).
datasets: a dictionary of named data_dictionaries, see top of lfads.py
"""
print("Building graph...")
all_kinds = ['train', 'posterior_sample_and_average', 'prior_sample']
assert kind in all_kinds, 'Wrong kind'
if hps.feedback_factors_or_rates == "rates":
assert len(hps.dataset_names) == 1, \
"Multiple datasets not supported for rate feedback."
num_steps = hps.num_steps
ic_dim = hps.ic_dim
co_dim = hps.co_dim
ext_input_dim = hps.ext_input_dim
cell_class = GRU
gen_cell_class = GenGRU
def makelambda(v): # Used with tf.case
return lambda: v
# Define the data placeholder, and deal with all parts of the graph
# that are dataset dependent.
self.dataName = tf.placeholder(tf.string, shape=())
# The batch_size to be inferred from data, as normal.
# Additionally, the data_dim will be inferred as well, allowing for a
# single placeholder for all datasets, regardless of data dimension.
if hps.output_dist == 'poisson':
# Enforce correct dtype
assert np.issubdtype(
datasets[hps.dataset_names[0]]['train_data'].dtype, int), \
"Data dtype must be int for poisson output distribution"
data_dtype = tf.int32
elif hps.output_dist == 'gaussian':
assert np.issubdtype(
datasets[hps.dataset_names[0]]['train_data'].dtype, float), \
"Data dtype must be float for gaussian output dsitribution"
data_dtype = tf.float32
else:
assert False, "NIY"
self.dataset_ph = dataset_ph = tf.placeholder(data_dtype,
[None, num_steps, None],
name="data")
self.train_step = tf.get_variable("global_step", [], tf.int64,
tf.zeros_initializer(),
trainable=False)
self.hps = hps
ndatasets = hps.ndatasets
factors_dim = hps.factors_dim
self.preds = preds = [None] * ndatasets
self.fns_in_fac_Ws = fns_in_fac_Ws = [None] * ndatasets
self.fns_in_fatcor_bs = fns_in_fac_bs = [None] * ndatasets
self.fns_out_fac_Ws = fns_out_fac_Ws = [None] * ndatasets
self.fns_out_fac_bs = fns_out_fac_bs = [None] * ndatasets
self.datasetNames = dataset_names = hps.dataset_names
self.ext_inputs = ext_inputs = None
if len(dataset_names) == 1: # single session
if 'alignment_matrix_cxf' in datasets[dataset_names[0]].keys():
used_in_factors_dim = factors_dim
in_identity_if_poss = False
else:
used_in_factors_dim = hps.dataset_dims[dataset_names[0]]
in_identity_if_poss = True
else: # multisession
used_in_factors_dim = factors_dim
in_identity_if_poss = False
for d, name in enumerate(dataset_names):
data_dim = hps.dataset_dims[name]
in_mat_cxf = None
in_bias_1xf = None
align_bias_1xc = None
if datasets and 'alignment_matrix_cxf' in datasets[name].keys():
dataset = datasets[name]
print("Using alignment matrix provided for dataset:", name)
in_mat_cxf = dataset['alignment_matrix_cxf'].astype(np.float32)
if in_mat_cxf.shape != (data_dim, factors_dim):
raise ValueError("""Alignment matrix must have dimensions %d x %d
(data_dim x factors_dim), but currently has %d x %d."""%
(data_dim, factors_dim, in_mat_cxf.shape[0],
in_mat_cxf.shape[1]))
if datasets and 'alignment_bias_c' in datasets[name].keys():
dataset = datasets[name]
print("Using alignment bias provided for dataset:", name)
align_bias_c = dataset['alignment_bias_c'].astype(np.float32)
align_bias_1xc = np.expand_dims(align_bias_c, axis=0)
if align_bias_1xc.shape[1] != data_dim:
raise ValueError("""Alignment bias must have dimensions %d
(data_dim), but currently has %d."""%
(data_dim, in_mat_cxf.shape[0]))
if in_mat_cxf is not None and align_bias_1xc is not None:
# (data - alignment_bias) * W_in
# data * W_in - alignment_bias * W_in
# So b = -alignment_bias * W_in to accommodate PCA style offset.
in_bias_1xf = -np.dot(align_bias_1xc, in_mat_cxf)
in_fac_lin = init_linear(data_dim, used_in_factors_dim, do_bias=True,
mat_init_value=in_mat_cxf,
bias_init_value=in_bias_1xf,
identity_if_possible=in_identity_if_poss,
normalized=False, name="x_2_infac_"+name,
collections=['IO_transformations'])
in_fac_W, in_fac_b = in_fac_lin
fns_in_fac_Ws[d] = makelambda(in_fac_W)
fns_in_fac_bs[d] = makelambda(in_fac_b)
with tf.variable_scope("glm"):
out_identity_if_poss = False
if len(dataset_names) == 1 and \
factors_dim == hps.dataset_dims[dataset_names[0]]:
out_identity_if_poss = True
for d, name in enumerate(dataset_names):
data_dim = hps.dataset_dims[name]
in_mat_cxf = None
if datasets and 'alignment_matrix_cxf' in datasets[name].keys():
dataset = datasets[name]
in_mat_cxf = dataset['alignment_matrix_cxf'].astype(np.float32)
if datasets and 'alignment_bias_c' in datasets[name].keys():
dataset = datasets[name]
align_bias_c = dataset['alignment_bias_c'].astype(np.float32)
align_bias_1xc = np.expand_dims(align_bias_c, axis=0)
out_mat_fxc = None
out_bias_1xc = None
if in_mat_cxf is not None:
out_mat_fxc = np.linalg.pinv(in_mat_cxf)
if align_bias_1xc is not None:
out_bias_1xc = align_bias_1xc
if hps.output_dist == 'poisson':
out_fac_lin = init_linear(factors_dim, data_dim, do_bias=True,
mat_init_value=out_mat_fxc,
bias_init_value=out_bias_1xc,
identity_if_possible=out_identity_if_poss,
normalized=False,
name="fac_2_logrates_"+name,
collections=['IO_transformations'])
out_fac_W, out_fac_b = out_fac_lin
elif hps.output_dist == 'gaussian':
out_fac_lin_mean = \
init_linear(factors_dim, data_dim, do_bias=True,
mat_init_value=out_mat_fxc,
bias_init_value=out_bias_1xc,
normalized=False,
name="fac_2_means_"+name,
collections=['IO_transformations'])
out_fac_W_mean, out_fac_b_mean = out_fac_lin_mean
mat_init_value = np.zeros([factors_dim, data_dim]).astype(np.float32)
bias_init_value = np.ones([1, data_dim]).astype(np.float32)
out_fac_lin_logvar = \
init_linear(factors_dim, data_dim, do_bias=True,
mat_init_value=mat_init_value,
bias_init_value=bias_init_value,
normalized=False,
name="fac_2_logvars_"+name,
collections=['IO_transformations'])
out_fac_W_mean, out_fac_b_mean = out_fac_lin_mean
out_fac_W_logvar, out_fac_b_logvar = out_fac_lin_logvar
out_fac_W = tf.concat(
axis=1, values=[out_fac_W_mean, out_fac_W_logvar])
out_fac_b = tf.concat(
axis=1, values=[out_fac_b_mean, out_fac_b_logvar])
else:
assert False, "NIY"
preds[d] = tf.equal(tf.constant(name), self.dataName)
data_dim = hps.dataset_dims[name]
fns_out_fac_Ws[d] = makelambda(out_fac_W)
fns_out_fac_bs[d] = makelambda(out_fac_b)
pf_pairs_in_fac_Ws = zip(preds, fns_in_fac_Ws)
pf_pairs_in_fac_bs = zip(preds, fns_in_fac_bs)
pf_pairs_out_fac_Ws = zip(preds, fns_out_fac_Ws)
pf_pairs_out_fac_bs = zip(preds, fns_out_fac_bs)
def _case_with_no_default(pairs):
def _default_value_fn():
with tf.control_dependencies([tf.Assert(False, ["Reached default"])]):
return tf.identity(pairs[0][1]())
return tf.case(pairs, _default_value_fn, exclusive=True)
this_in_fac_W = _case_with_no_default(pf_pairs_in_fac_Ws)
this_in_fac_b = _case_with_no_default(pf_pairs_in_fac_bs)
this_out_fac_W = _case_with_no_default(pf_pairs_out_fac_Ws)
this_out_fac_b = _case_with_no_default(pf_pairs_out_fac_bs)
# External inputs (not changing by dataset, by definition).
if hps.ext_input_dim > 0:
self.ext_input = tf.placeholder(tf.float32,
[None, num_steps, ext_input_dim],
name="ext_input")
else:
self.ext_input = None
ext_input_bxtxi = self.ext_input
self.keep_prob = keep_prob = tf.placeholder(tf.float32, [], "keep_prob")
self.batch_size = batch_size = int(hps.batch_size)
self.learning_rate = tf.Variable(float(hps.learning_rate_init),
trainable=False, name="learning_rate")
self.learning_rate_decay_op = self.learning_rate.assign(
self.learning_rate * hps.learning_rate_decay_factor)
# Dropout the data.
dataset_do_bxtxd = tf.nn.dropout(tf.to_float(dataset_ph), keep_prob)
if hps.ext_input_dim > 0:
ext_input_do_bxtxi = tf.nn.dropout(ext_input_bxtxi, keep_prob)
else:
ext_input_do_bxtxi = None
# ENCODERS
def encode_data(dataset_bxtxd, enc_cell, name, forward_or_reverse,
num_steps_to_encode):
"""Encode data for LFADS
Args:
dataset_bxtxd - the data to encode, as a 3 tensor, with dims
time x batch x data dims.
enc_cell: encoder cell
name: name of encoder
forward_or_reverse: string, encode in forward or reverse direction
num_steps_to_encode: number of steps to encode, 0:num_steps_to_encode
Returns:
encoded data as a list with num_steps_to_encode items, in order
"""
if forward_or_reverse == "forward":
dstr = "_fwd"
time_fwd_or_rev = range(num_steps_to_encode)
else:
dstr = "_rev"
time_fwd_or_rev = reversed(range(num_steps_to_encode))
with tf.variable_scope(name+"_enc"+dstr, reuse=False):
enc_state = tf.tile(
tf.Variable(tf.zeros([1, enc_cell.state_size]),
name=name+"_enc_t0"+dstr), tf.stack([batch_size, 1]))
enc_state.set_shape([None, enc_cell.state_size]) # tile loses shape
enc_outs = [None] * num_steps_to_encode
for i, t in enumerate(time_fwd_or_rev):
with tf.variable_scope(name+"_enc"+dstr, reuse=True if i > 0 else None):
dataset_t_bxd = dataset_bxtxd[:,t,:]
in_fac_t_bxf = tf.matmul(dataset_t_bxd, this_in_fac_W) + this_in_fac_b
in_fac_t_bxf.set_shape([None, used_in_factors_dim])
if ext_input_dim > 0 and not hps.inject_ext_input_to_gen:
ext_input_t_bxi = ext_input_do_bxtxi[:,t,:]
enc_input_t_bxfpe = tf.concat(
axis=1, values=[in_fac_t_bxf, ext_input_t_bxi])
else:
enc_input_t_bxfpe = in_fac_t_bxf
enc_out, enc_state = enc_cell(enc_input_t_bxfpe, enc_state)
enc_outs[t] = enc_out
return enc_outs
# Encode initial condition means and variances
# ([x_T, x_T-1, ... x_0] and [x_0, x_1, ... x_T] -> g0/c0)
self.ic_enc_fwd = [None] * num_steps
self.ic_enc_rev = [None] * num_steps
if ic_dim > 0:
enc_ic_cell = cell_class(hps.ic_enc_dim,
weight_scale=hps.cell_weight_scale,
clip_value=hps.cell_clip_value)
ic_enc_fwd = encode_data(dataset_do_bxtxd, enc_ic_cell,
"ic", "forward",
hps.num_steps_for_gen_ic)
ic_enc_rev = encode_data(dataset_do_bxtxd, enc_ic_cell,
"ic", "reverse",
hps.num_steps_for_gen_ic)
self.ic_enc_fwd = ic_enc_fwd
self.ic_enc_rev = ic_enc_rev
# Encoder control input means and variances, bi-directional encoding so:
# ([x_T, x_T-1, ..., x_0] and [x_0, x_1 ... x_T] -> u_t)
self.ci_enc_fwd = [None] * num_steps
self.ci_enc_rev = [None] * num_steps
if co_dim > 0:
enc_ci_cell = cell_class(hps.ci_enc_dim,
weight_scale=hps.cell_weight_scale,
clip_value=hps.cell_clip_value)
ci_enc_fwd = encode_data(dataset_do_bxtxd, enc_ci_cell,
"ci", "forward",
hps.num_steps)
if hps.do_causal_controller:
ci_enc_rev = None
else:
ci_enc_rev = encode_data(dataset_do_bxtxd, enc_ci_cell,
"ci", "reverse",
hps.num_steps)
self.ci_enc_fwd = ci_enc_fwd
self.ci_enc_rev = ci_enc_rev
# STOCHASTIC LATENT VARIABLES, priors and posteriors
# (initial conditions g0, and control inputs, u_t)
# Note that zs represent all the stochastic latent variables.
with tf.variable_scope("z", reuse=False):
self.prior_zs_g0 = None
self.posterior_zs_g0 = None
self.g0s_val = None
if ic_dim > 0:
self.prior_zs_g0 = \
LearnableDiagonalGaussian(batch_size, ic_dim, name="prior_g0",
mean_init=0.0,
var_min=hps.ic_prior_var_min,
var_init=hps.ic_prior_var_scale,
var_max=hps.ic_prior_var_max)
ic_enc = tf.concat(axis=1, values=[ic_enc_fwd[-1], ic_enc_rev[0]])
ic_enc = tf.nn.dropout(ic_enc, keep_prob)
self.posterior_zs_g0 = \
DiagonalGaussianFromInput(ic_enc, ic_dim, "ic_enc_2_post_g0",
var_min=hps.ic_post_var_min)
if kind in ["train", "posterior_sample_and_average"]:
zs_g0 = self.posterior_zs_g0
else:
zs_g0 = self.prior_zs_g0
if kind in ["train", "posterior_sample_and_average", "prior_sample"]:
self.g0s_val = zs_g0.sample
else:
self.g0s_val = zs_g0.mean
# Priors for controller, 'co' for controller output
self.prior_zs_co = prior_zs_co = [None] * num_steps
self.posterior_zs_co = posterior_zs_co = [None] * num_steps
self.zs_co = zs_co = [None] * num_steps
self.prior_zs_ar_con = None
if co_dim > 0:
# Controller outputs
autocorrelation_taus = [hps.prior_ar_atau for x in range(hps.co_dim)]
noise_variances = [hps.prior_ar_nvar for x in range(hps.co_dim)]
self.prior_zs_ar_con = prior_zs_ar_con = \
LearnableAutoRegressive1Prior(batch_size, hps.co_dim,
autocorrelation_taus,
noise_variances,
hps.do_train_prior_ar_atau,
hps.do_train_prior_ar_nvar,
num_steps, "u_prior_ar1")
# CONTROLLER -> GENERATOR -> RATES
# (u(t) -> gen(t) -> factors(t) -> rates(t) -> p(x_t|z_t) )
self.controller_outputs = u_t = [None] * num_steps
self.con_ics = con_state = None
self.con_states = con_states = [None] * num_steps
self.con_outs = con_outs = [None] * num_steps
self.gen_inputs = gen_inputs = [None] * num_steps
if co_dim > 0:
# gen_cell_class here for l2 penalty recurrent weights
# didn't split the cell_weight scale here, because I doubt it matters
con_cell = gen_cell_class(hps.con_dim,
input_weight_scale=hps.cell_weight_scale,
rec_weight_scale=hps.cell_weight_scale,
clip_value=hps.cell_clip_value,
recurrent_collections=['l2_con_reg'])
with tf.variable_scope("con", reuse=False):
self.con_ics = tf.tile(
tf.Variable(tf.zeros([1, hps.con_dim*con_cell.state_multiplier]), \
name="c0"),
tf.stack([batch_size, 1]))
self.con_ics.set_shape([None, con_cell.state_size]) # tile loses shape
con_states[-1] = self.con_ics
gen_cell = gen_cell_class(hps.gen_dim,
input_weight_scale=hps.gen_cell_input_weight_scale,
rec_weight_scale=hps.gen_cell_rec_weight_scale,
clip_value=hps.cell_clip_value,
recurrent_collections=['l2_gen_reg'])
with tf.variable_scope("gen", reuse=False):
if ic_dim == 0:
self.gen_ics = tf.tile(
tf.Variable(tf.zeros([1, gen_cell.state_size]), name="g0"),
tf.stack([batch_size, 1]))
else:
self.gen_ics = linear(self.g0s_val, gen_cell.state_size,
identity_if_possible=True,
name="g0_2_gen_ic")
self.gen_states = gen_states = [None] * num_steps
self.gen_outs = gen_outs = [None] * num_steps
gen_states[-1] = self.gen_ics
gen_outs[-1] = gen_cell.output_from_state(gen_states[-1])
self.factors = factors = [None] * num_steps
factors[-1] = linear(gen_outs[-1], factors_dim, do_bias=False,
normalized=True, name="gen_2_fac")
self.rates = rates = [None] * num_steps
# rates[-1] is collected to potentially feed back to controller
with tf.variable_scope("glm", reuse=False):
if hps.output_dist == 'poisson':
log_rates_t0 = tf.matmul(factors[-1], this_out_fac_W) + this_out_fac_b
log_rates_t0.set_shape([None, None])
rates[-1] = tf.exp(log_rates_t0) # rate
rates[-1].set_shape([None, hps.dataset_dims[hps.dataset_names[0]]])
elif hps.output_dist == 'gaussian':
mean_n_logvars = tf.matmul(factors[-1],this_out_fac_W) + this_out_fac_b
mean_n_logvars.set_shape([None, None])
means_t_bxd, logvars_t_bxd = tf.split(axis=1, num_or_size_splits=2,
value=mean_n_logvars)
rates[-1] = means_t_bxd
else:
assert False, "NIY"
# We support mulitple output distributions, for example Poisson, and also
# Gaussian. In these two cases respectively, there are one and two
# parameters (rates vs. mean and variance). So the output_dist_params
# tensor will variable sizes via tf.concat and tf.split, along the 1st
# dimension. So in the case of gaussian, for example, it'll be
# batch x (D+D), where each D dims is the mean, and then variances,
# respectively. For a distribution with 3 parameters, it would be
# batch x (D+D+D).
self.output_dist_params = dist_params = [None] * num_steps
self.log_p_xgz_b = log_p_xgz_b = 0.0 # log P(x|z)
for t in range(num_steps):
# Controller
if co_dim > 0:
# Build inputs for controller
tlag = t - hps.controller_input_lag
if tlag < 0:
con_in_f_t = tf.zeros_like(ci_enc_fwd[0])
else:
con_in_f_t = ci_enc_fwd[tlag]
if hps.do_causal_controller:
# If controller is causal (wrt to data generation process), then it
# cannot see future data. Thus, excluding ci_enc_rev[t] is obvious.
# Less obvious is the need to exclude factors[t-1]. This arises
# because information flows from g0 through factors to the controller
# input. The g0 encoding is backwards, so we must necessarily exclude
# the factors in order to keep the controller input purely from a
# forward encoding (however unlikely it is that
# g0->factors->controller channel might actually be used in this way).
con_in_list_t = [con_in_f_t]
else:
tlag_rev = t + hps.controller_input_lag
if tlag_rev >= num_steps:
# better than zeros
con_in_r_t = tf.zeros_like(ci_enc_rev[0])
else:
con_in_r_t = ci_enc_rev[tlag_rev]
con_in_list_t = [con_in_f_t, con_in_r_t]
if hps.do_feed_factors_to_controller:
if hps.feedback_factors_or_rates == "factors":
con_in_list_t.append(factors[t-1])
elif hps.feedback_factors_or_rates == "rates":
con_in_list_t.append(rates[t-1])
else:
assert False, "NIY"
con_in_t = tf.concat(axis=1, values=con_in_list_t)
con_in_t = tf.nn.dropout(con_in_t, keep_prob)
with tf.variable_scope("con", reuse=True if t > 0 else None):
con_outs[t], con_states[t] = con_cell(con_in_t, con_states[t-1])
posterior_zs_co[t] = \
DiagonalGaussianFromInput(con_outs[t], co_dim,
name="con_to_post_co")
if kind == "train":
u_t[t] = posterior_zs_co[t].sample
elif kind == "posterior_sample_and_average":
u_t[t] = posterior_zs_co[t].sample
else:
u_t[t] = prior_zs_ar_con.samples_t[t]
# Inputs to the generator (controller output + external input)
if ext_input_dim > 0 and hps.inject_ext_input_to_gen:
ext_input_t_bxi = ext_input_do_bxtxi[:,t,:]
if co_dim > 0:
gen_inputs[t] = tf.concat(axis=1, values=[u_t[t], ext_input_t_bxi])
else:
gen_inputs[t] = ext_input_t_bxi
else:
gen_inputs[t] = u_t[t]
# Generator
data_t_bxd = dataset_ph[:,t,:]
with tf.variable_scope("gen", reuse=True if t > 0 else None):
gen_outs[t], gen_states[t] = gen_cell(gen_inputs[t], gen_states[t-1])
gen_outs[t] = tf.nn.dropout(gen_outs[t], keep_prob)
with tf.variable_scope("gen", reuse=True): # ic defined it above
factors[t] = linear(gen_outs[t], factors_dim, do_bias=False,
normalized=True, name="gen_2_fac")
with tf.variable_scope("glm", reuse=True if t > 0 else None):
if hps.output_dist == 'poisson':
log_rates_t = tf.matmul(factors[t], this_out_fac_W) + this_out_fac_b
log_rates_t.set_shape([None, None])
rates[t] = dist_params[t] = tf.exp(log_rates_t) # rates feed back
rates[t].set_shape([None, hps.dataset_dims[hps.dataset_names[0]]])
loglikelihood_t = Poisson(log_rates_t).logp(data_t_bxd)
elif hps.output_dist == 'gaussian':
mean_n_logvars = tf.matmul(factors[t],this_out_fac_W) + this_out_fac_b
mean_n_logvars.set_shape([None, None])
means_t_bxd, logvars_t_bxd = tf.split(axis=1, num_or_size_splits=2,
value=mean_n_logvars)
rates[t] = means_t_bxd # rates feed back to controller
dist_params[t] = tf.concat(
axis=1, values=[means_t_bxd, tf.exp(logvars_t_bxd)])
loglikelihood_t = \
diag_gaussian_log_likelihood(data_t_bxd,
means_t_bxd, logvars_t_bxd)
else:
assert False, "NIY"
log_p_xgz_b += tf.reduce_sum(loglikelihood_t, [1])
# Correlation of inferred inputs cost.
self.corr_cost = tf.constant(0.0)
if hps.co_mean_corr_scale > 0.0:
all_sum_corr = []
for i in range(hps.co_dim):
for j in range(i+1, hps.co_dim):
sum_corr_ij = tf.constant(0.0)
for t in range(num_steps):
u_mean_t = posterior_zs_co[t].mean
sum_corr_ij += u_mean_t[:,i]*u_mean_t[:,j]
all_sum_corr.append(0.5 * tf.square(sum_corr_ij))
self.corr_cost = tf.reduce_mean(all_sum_corr) # div by batch and by n*(n-1)/2 pairs
# Variational Lower Bound on posterior, p(z|x), plus reconstruction cost.
# KL and reconstruction costs are normalized only by batch size, not by
# dimension, or by time steps.
kl_cost_g0_b = tf.zeros_like(batch_size, dtype=tf.float32)
kl_cost_co_b = tf.zeros_like(batch_size, dtype=tf.float32)
self.kl_cost = tf.constant(0.0) # VAE KL cost
self.recon_cost = tf.constant(0.0) # VAE reconstruction cost
self.nll_bound_vae = tf.constant(0.0)
self.nll_bound_iwae = tf.constant(0.0) # for eval with IWAE cost.
if kind in ["train", "posterior_sample_and_average"]:
kl_cost_g0_b = 0.0
kl_cost_co_b = 0.0
if ic_dim > 0:
g0_priors = [self.prior_zs_g0]
g0_posts = [self.posterior_zs_g0]
kl_cost_g0_b = KLCost_GaussianGaussian(g0_posts, g0_priors).kl_cost_b
kl_cost_g0_b = hps.kl_ic_weight * kl_cost_g0_b
if co_dim > 0:
kl_cost_co_b = \
KLCost_GaussianGaussianProcessSampled(
posterior_zs_co, prior_zs_ar_con).kl_cost_b
kl_cost_co_b = hps.kl_co_weight * kl_cost_co_b
# L = -KL + log p(x|z), to maximize bound on likelihood
# -L = KL - log p(x|z), to minimize bound on NLL
# so 'reconstruction cost' is negative log likelihood
self.recon_cost = - tf.reduce_mean(log_p_xgz_b)
self.kl_cost = tf.reduce_mean(kl_cost_g0_b + kl_cost_co_b)
lb_on_ll_b = log_p_xgz_b - kl_cost_g0_b - kl_cost_co_b
# VAE error averages outside the log
self.nll_bound_vae = -tf.reduce_mean(lb_on_ll_b)
# IWAE error averages inside the log
k = tf.cast(tf.shape(log_p_xgz_b)[0], tf.float32)
iwae_lb_on_ll = -tf.log(k) + log_sum_exp(lb_on_ll_b)
self.nll_bound_iwae = -iwae_lb_on_ll
# L2 regularization on the generator, normalized by number of parameters.
self.l2_cost = tf.constant(0.0)
if self.hps.l2_gen_scale > 0.0 or self.hps.l2_con_scale > 0.0:
l2_costs = []
l2_numels = []
l2_reg_var_lists = [tf.get_collection('l2_gen_reg'),
tf.get_collection('l2_con_reg')]
l2_reg_scales = [self.hps.l2_gen_scale, self.hps.l2_con_scale]
for l2_reg_vars, l2_scale in zip(l2_reg_var_lists, l2_reg_scales):
for v in l2_reg_vars:
numel = tf.reduce_prod(tf.concat(axis=0, values=tf.shape(v)))
numel_f = tf.cast(numel, tf.float32)
l2_numels.append(numel_f)
v_l2 = tf.reduce_sum(v*v)
l2_costs.append(0.5 * l2_scale * v_l2)
self.l2_cost = tf.add_n(l2_costs) / tf.add_n(l2_numels)
# Compute the cost for training, part of the graph regardless.
# The KL cost can be problematic at the beginning of optimization,
# so we allow an exponential increase in weighting the KL from 0
# to 1.
self.kl_decay_step = tf.maximum(self.train_step - hps.kl_start_step, 0)
self.l2_decay_step = tf.maximum(self.train_step - hps.l2_start_step, 0)
kl_decay_step_f = tf.cast(self.kl_decay_step, tf.float32)
l2_decay_step_f = tf.cast(self.l2_decay_step, tf.float32)
kl_increase_steps_f = tf.cast(hps.kl_increase_steps, tf.float32)
l2_increase_steps_f = tf.cast(hps.l2_increase_steps, tf.float32)
self.kl_weight = kl_weight = \
tf.minimum(kl_decay_step_f / kl_increase_steps_f, 1.0)
self.l2_weight = l2_weight = \
tf.minimum(l2_decay_step_f / l2_increase_steps_f, 1.0)
self.timed_kl_cost = kl_weight * self.kl_cost
self.timed_l2_cost = l2_weight * self.l2_cost
self.weight_corr_cost = hps.co_mean_corr_scale * self.corr_cost
self.cost = self.recon_cost + self.timed_kl_cost + \
self.timed_l2_cost + self.weight_corr_cost
if kind != "train":
# save every so often
self.seso_saver = tf.train.Saver(tf.global_variables(),
max_to_keep=hps.max_ckpt_to_keep)
# lowest validation error
self.lve_saver = tf.train.Saver(tf.global_variables(),
max_to_keep=hps.max_ckpt_to_keep_lve)
return
# OPTIMIZATION
if not self.hps.do_train_io_only:
self.train_vars = tvars = \
tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,
scope=tf.get_variable_scope().name)
else:
self.train_vars = tvars = \
tf.get_collection('IO_transformations',
scope=tf.get_variable_scope().name)
print("done.")
print("Model Variables (to be optimized): ")
total_params = 0
for i in range(len(tvars)):
shape = tvars[i].get_shape().as_list()
print(" ", i, tvars[i].name, shape)
total_params += np.prod(shape)
print("Total model parameters: ", total_params)
grads = tf.gradients(self.cost, tvars)
grads, grad_global_norm = tf.clip_by_global_norm(grads, hps.max_grad_norm)
opt = tf.train.AdamOptimizer(self.learning_rate, beta1=0.9, beta2=0.999,
epsilon=1e-01)
self.grads = grads
self.grad_global_norm = grad_global_norm
self.train_op = opt.apply_gradients(
zip(grads, tvars), global_step=self.train_step)
self.seso_saver = tf.train.Saver(tf.global_variables(),
max_to_keep=hps.max_ckpt_to_keep)
# lowest validation error
self.lve_saver = tf.train.Saver(tf.global_variables(),
max_to_keep=hps.max_ckpt_to_keep)
# SUMMARIES, used only during training.
# example summary
self.example_image = tf.placeholder(tf.float32, shape=[1,None,None,3],
name='image_tensor')
self.example_summ = tf.summary.image("LFADS example", self.example_image,
collections=["example_summaries"])
# general training summaries
self.lr_summ = tf.summary.scalar("Learning rate", self.learning_rate)
self.kl_weight_summ = tf.summary.scalar("KL weight", self.kl_weight)
self.l2_weight_summ = tf.summary.scalar("L2 weight", self.l2_weight)
self.corr_cost_summ = tf.summary.scalar("Corr cost", self.weight_corr_cost)
self.grad_global_norm_summ = tf.summary.scalar("Gradient global norm",
self.grad_global_norm)
if hps.co_dim > 0:
self.atau_summ = [None] * hps.co_dim
self.pvar_summ = [None] * hps.co_dim
for c in range(hps.co_dim):
self.atau_summ[c] = \
tf.summary.scalar("AR Autocorrelation taus " + str(c),
tf.exp(self.prior_zs_ar_con.logataus_1xu[0,c]))
self.pvar_summ[c] = \
tf.summary.scalar("AR Variances " + str(c),
tf.exp(self.prior_zs_ar_con.logpvars_1xu[0,c]))
# cost summaries, separated into different collections for
# training vs validation. We make placeholders for these, because
# even though the graph computes these costs on a per-batch basis,
# we want to report the more reliable metric of per-epoch cost.
kl_cost_ph = tf.placeholder(tf.float32, shape=[], name='kl_cost_ph')
self.kl_t_cost_summ = tf.summary.scalar("KL cost (train)", kl_cost_ph,
collections=["train_summaries"])
self.kl_v_cost_summ = tf.summary.scalar("KL cost (valid)", kl_cost_ph,
collections=["valid_summaries"])
l2_cost_ph = tf.placeholder(tf.float32, shape=[], name='l2_cost_ph')
self.l2_cost_summ = tf.summary.scalar("L2 cost", l2_cost_ph,
collections=["train_summaries"])
recon_cost_ph = tf.placeholder(tf.float32, shape=[], name='recon_cost_ph')
self.recon_t_cost_summ = tf.summary.scalar("Reconstruction cost (train)",
recon_cost_ph,
collections=["train_summaries"])
self.recon_v_cost_summ = tf.summary.scalar("Reconstruction cost (valid)",
recon_cost_ph,
collections=["valid_summaries"])
total_cost_ph = tf.placeholder(tf.float32, shape=[], name='total_cost_ph')
self.cost_t_summ = tf.summary.scalar("Total cost (train)", total_cost_ph,
collections=["train_summaries"])
self.cost_v_summ = tf.summary.scalar("Total cost (valid)", total_cost_ph,
collections=["valid_summaries"])
self.kl_cost_ph = kl_cost_ph
self.l2_cost_ph = l2_cost_ph
self.recon_cost_ph = recon_cost_ph
self.total_cost_ph = total_cost_ph
# Merged summaries, for easy coding later.
self.merged_examples = tf.summary.merge_all(key="example_summaries")
self.merged_generic = tf.summary.merge_all() # default key is 'summaries'
self.merged_train = tf.summary.merge_all(key="train_summaries")
self.merged_valid = tf.summary.merge_all(key="valid_summaries")
session = tf.get_default_session()
self.logfile = os.path.join(hps.lfads_save_dir, "lfads_log")
self.writer = tf.summary.FileWriter(self.logfile)
def build_feed_dict(self, train_name, data_bxtxd, ext_input_bxtxi=None,
keep_prob=None):
"""Build the feed dictionary, handles cases where there is no value defined.
Args:
train_name: The key into the datasets, to set the tf.case statement for
the proper readin / readout matrices.
data_bxtxd: The data tensor
ext_input_bxtxi (optional): The external input tensor
keep_prob: The drop out keep probability.
Returns:
The feed dictionary with TF tensors as keys and data as values, for use
with tf.Session.run()
"""
feed_dict = {}
B, T, _ = data_bxtxd.shape
feed_dict[self.dataName] = train_name
feed_dict[self.dataset_ph] = data_bxtxd
if self.ext_input is not None and ext_input_bxtxi is not None:
feed_dict[self.ext_input] = ext_input_bxtxi
if keep_prob is None:
feed_dict[self.keep_prob] = self.hps.keep_prob
else:
feed_dict[self.keep_prob] = keep_prob
return feed_dict
@staticmethod
def get_batch(data_extxd, ext_input_extxi=None, batch_size=None,
example_idxs=None):
"""Get a batch of data, either randomly chosen, or specified directly.
Args:
data_extxd: The data to model, numpy tensors with shape:
# examples x # time steps x # dimensions
ext_input_extxi (optional): The external inputs, numpy tensor with shape:
# examples x # time steps x # external input dimensions
batch_size: The size of the batch to return
example_idxs (optional): The example indices used to select examples.
Returns:
A tuple with two parts:
1. Batched data numpy tensor with shape:
batch_size x # time steps x # dimensions
2. Batched external input numpy tensor with shape:
batch_size x # time steps x # external input dims
"""
assert batch_size is not None or example_idxs is not None, "Problems"
E, T, D = data_extxd.shape
if example_idxs is None:
example_idxs = np.random.choice(E, batch_size)
ext_input_bxtxi = None
if ext_input_extxi is not None:
ext_input_bxtxi = ext_input_extxi[example_idxs,:,:]
return data_extxd[example_idxs,:,:], ext_input_bxtxi
@staticmethod
def example_idxs_mod_batch_size(nexamples, batch_size):
"""Given a number of examples, E, and a batch_size, B, generate indices
[0, 1, 2, ... B-1;
[B, B+1, ... 2*B-1;
...
]
returning those indices as a 2-dim tensor shaped like E/B x B. Note that
shape is only correct if E % B == 0. If not, then an extra row is generated
so that the remainder of examples is included. The extra examples are
explicitly to to the zero index (see randomize_example_idxs_mod_batch_size)
for randomized behavior.
Args:
nexamples: The number of examples to batch up.
batch_size: The size of the batch.
Returns:
2-dim tensor as described above.
"""
bmrem = batch_size - (nexamples % batch_size)
bmrem_examples = []
if bmrem < batch_size:
#bmrem_examples = np.zeros(bmrem, dtype=np.int32)
ridxs = np.random.permutation(nexamples)[0:bmrem].astype(np.int32)
bmrem_examples = np.sort(ridxs)
example_idxs = range(nexamples) + list(bmrem_examples)
example_idxs_e_x_edivb = np.reshape(example_idxs, [-1, batch_size])
return example_idxs_e_x_edivb, bmrem
@staticmethod
def randomize_example_idxs_mod_batch_size(nexamples, batch_size):
"""Indices 1:nexamples, randomized, in 2D form of
shape = (nexamples / batch_size) x batch_size. The remainder
is managed by drawing randomly from 1:nexamples.
Args:
nexamples: number of examples to randomize
batch_size: number of elements in batch
Returns:
The randomized, properly shaped indicies.
"""
assert nexamples > batch_size, "Problems"
bmrem = batch_size - nexamples % batch_size
bmrem_examples = []
if bmrem < batch_size:
bmrem_examples = np.random.choice(range(nexamples),
size=bmrem, replace=False)
example_idxs = range(nexamples) + list(bmrem_examples)
mixed_example_idxs = np.random.permutation(example_idxs)
example_idxs_e_x_edivb = np.reshape(mixed_example_idxs, [-1, batch_size])
return example_idxs_e_x_edivb, bmrem
def shuffle_spikes_in_time(self, data_bxtxd):
"""Shuffle the spikes in the temporal dimension. This is useful to
help the LFADS system avoid overfitting to individual spikes or fast
oscillations found in the data that are irrelevant to behavior. A
pure 'tabula rasa' approach would avoid this, but LFADS is sensitive
enough to pick up dynamics that you may not want.
Args:
data_bxtxd: numpy array of spike count data to be shuffled.
Returns:
S_bxtxd, a numpy array with the same dimensions and contents as
data_bxtxd, but shuffled appropriately.
"""
B, T, N = data_bxtxd.shape
w = self.hps.temporal_spike_jitter_width
if w == 0:
return data_bxtxd
max_counts = np.max(data_bxtxd)
S_bxtxd = np.zeros([B,T,N])
# Intuitively, shuffle spike occurances, 0 or 1, but since we have counts,
# Do it over and over again up to the max count.
for mc in range(1,max_counts+1):
idxs = np.nonzero(data_bxtxd >= mc)
data_ones = np.zeros_like(data_bxtxd)
data_ones[data_bxtxd >= mc] = 1
nfound = len(idxs[0])
shuffles_incrs_in_time = np.random.randint(-w, w, size=nfound)
shuffle_tidxs = idxs[1].copy()
shuffle_tidxs += shuffles_incrs_in_time
# Reflect on the boundaries to not lose mass.
shuffle_tidxs[shuffle_tidxs < 0] = -shuffle_tidxs[shuffle_tidxs < 0]
shuffle_tidxs[shuffle_tidxs > T-1] = \
(T-1)-(shuffle_tidxs[shuffle_tidxs > T-1] -(T-1))
for iii in zip(idxs[0], shuffle_tidxs, idxs[2]):
S_bxtxd[iii] += 1
return S_bxtxd
def shuffle_and_flatten_datasets(self, datasets, kind='train'):
"""Since LFADS supports multiple datasets in the same dynamical model,
we have to be careful to use all the data in a single training epoch. But
since the datasets my have different data dimensionality, we cannot batch
examples from data dictionaries together. Instead, we generate random
batches within each data dictionary, and then randomize these batches
while holding onto the dataname, so that when it's time to feed
the graph, the correct in/out matrices can be selected, per batch.
Args:
datasets: A dict of data dicts. The dataset dict is simply a
name(string)-> data dictionary mapping (See top of lfads.py).
kind: 'train' or 'valid'
Returns:
A flat list, in which each element is a pair ('name', indices).
"""
batch_size = self.hps.batch_size
ndatasets = len(datasets)
random_example_idxs = {}
epoch_idxs = {}
all_name_example_idx_pairs = []
kind_data = kind + '_data'
for name, data_dict in datasets.items():
nexamples, ntime, data_dim = data_dict[kind_data].shape
epoch_idxs[name] = 0
random_example_idxs, _ = \
self.randomize_example_idxs_mod_batch_size(nexamples, batch_size)
epoch_size = random_example_idxs.shape[0]
names = [name] * epoch_size
all_name_example_idx_pairs += zip(names, random_example_idxs)
np.random.shuffle(all_name_example_idx_pairs) # shuffle in place
return all_name_example_idx_pairs
def train_epoch(self, datasets, batch_size=None, do_save_ckpt=True):
"""Train the model through the entire dataset once.
Args:
datasets: A dict of data dicts. The dataset dict is simply a
name(string)-> data dictionary mapping (See top of lfads.py).
batch_size (optional): The batch_size to use
do_save_ckpt (optional): Should the routine save a checkpoint on this
training epoch?
Returns:
A tuple with 6 float values:
(total cost of the epoch, epoch reconstruction cost,
epoch kl cost, KL weight used this training epoch,
total l2 cost on generator, and the corresponding weight).
"""
ops_to_eval = [self.cost, self.recon_cost,
self.kl_cost, self.kl_weight,
self.l2_cost, self.l2_weight,
self.train_op]
collected_op_values = self.run_epoch(datasets, ops_to_eval, kind="train")
total_cost = total_recon_cost = total_kl_cost = 0.0
# normalizing by batch done in distributions.py
epoch_size = len(collected_op_values)
for op_values in collected_op_values:
total_cost += op_values[0]
total_recon_cost += op_values[1]
total_kl_cost += op_values[2]
kl_weight = collected_op_values[-1][3]
l2_cost = collected_op_values[-1][4]
l2_weight = collected_op_values[-1][5]
epoch_total_cost = total_cost / epoch_size
epoch_recon_cost = total_recon_cost / epoch_size
epoch_kl_cost = total_kl_cost / epoch_size
if do_save_ckpt:
session = tf.get_default_session()
checkpoint_path = os.path.join(self.hps.lfads_save_dir,
self.hps.checkpoint_name + '.ckpt')
self.seso_saver.save(session, checkpoint_path,
global_step=self.train_step)
return epoch_total_cost, epoch_recon_cost, epoch_kl_cost, \
kl_weight, l2_cost, l2_weight
def run_epoch(self, datasets, ops_to_eval, kind="train", batch_size=None,
do_collect=True, keep_prob=None):
"""Run the model through the entire dataset once.
Args:
datasets: A dict of data dicts. The dataset dict is simply a
name(string)-> data dictionary mapping (See top of lfads.py).
ops_to_eval: A list of tensorflow operations that will be evaluated in
the tf.session.run() call.
batch_size (optional): The batch_size to use
do_collect (optional): Should the routine collect all session.run
output as a list, and return it?
keep_prob (optional): The dropout keep probability.
Returns:
A list of lists, the internal list is the return for the ops for each
session.run() call. The outer list collects over the epoch.
"""
hps = self.hps
all_name_example_idx_pairs = \
self.shuffle_and_flatten_datasets(datasets, kind)
kind_data = kind + '_data'
kind_ext_input = kind + '_ext_input'
total_cost = total_recon_cost = total_kl_cost = 0.0
session = tf.get_default_session()
epoch_size = len(all_name_example_idx_pairs)
evaled_ops_list = []
for name, example_idxs in all_name_example_idx_pairs:
data_dict = datasets[name]
data_extxd = data_dict[kind_data]
if hps.output_dist == 'poisson' and hps.temporal_spike_jitter_width > 0:
data_extxd = self.shuffle_spikes_in_time(data_extxd)
ext_input_extxi = data_dict[kind_ext_input]
data_bxtxd, ext_input_bxtxi = self.get_batch(data_extxd, ext_input_extxi,
example_idxs=example_idxs)
feed_dict = self.build_feed_dict(name, data_bxtxd, ext_input_bxtxi,
keep_prob=keep_prob)
evaled_ops_np = session.run(ops_to_eval, feed_dict=feed_dict)
if do_collect:
evaled_ops_list.append(evaled_ops_np)
return evaled_ops_list
def summarize_all(self, datasets, summary_values):
"""Plot and summarize stuff in tensorboard.
Note that everything done in the current function is otherwise done on
a single, randomly selected dataset (except for summary_values, which are
passed in.)
Args:
datasets, the dictionary of datasets used in the study.
summary_values: These summary values are created from the training loop,
and so summarize the entire set of datasets.
"""
hps = self.hps
tr_kl_cost = summary_values['tr_kl_cost']
tr_recon_cost = summary_values['tr_recon_cost']
tr_total_cost = summary_values['tr_total_cost']
kl_weight = summary_values['kl_weight']
l2_weight = summary_values['l2_weight']
l2_cost = summary_values['l2_cost']
has_any_valid_set = summary_values['has_any_valid_set']
i = summary_values['nepochs']
session = tf.get_default_session()
train_summ, train_step = session.run([self.merged_train,
self.train_step],
feed_dict={self.l2_cost_ph:l2_cost,
self.kl_cost_ph:tr_kl_cost,
self.recon_cost_ph:tr_recon_cost,
self.total_cost_ph:tr_total_cost})
self.writer.add_summary(train_summ, train_step)
if has_any_valid_set:
ev_kl_cost = summary_values['ev_kl_cost']
ev_recon_cost = summary_values['ev_recon_cost']
ev_total_cost = summary_values['ev_total_cost']
eval_summ = session.run(self.merged_valid,
feed_dict={self.kl_cost_ph:ev_kl_cost,
self.recon_cost_ph:ev_recon_cost,
self.total_cost_ph:ev_total_cost})
self.writer.add_summary(eval_summ, train_step)
print("Epoch:%d, step:%d (TRAIN, VALID): total: %.2f, %.2f\
recon: %.2f, %.2f, kl: %.2f, %.2f, l2: %.5f,\
kl weight: %.2f, l2 weight: %.2f" % \
(i, train_step, tr_total_cost, ev_total_cost,
tr_recon_cost, ev_recon_cost, tr_kl_cost, ev_kl_cost,
l2_cost, kl_weight, l2_weight))
csv_outstr = "epoch,%d, step,%d, total,%.2f,%.2f, \
recon,%.2f,%.2f, kl,%.2f,%.2f, l2,%.5f, \
klweight,%.2f, l2weight,%.2f\n"% \
(i, train_step, tr_total_cost, ev_total_cost,
tr_recon_cost, ev_recon_cost, tr_kl_cost, ev_kl_cost,
l2_cost, kl_weight, l2_weight)
else:
print("Epoch:%d, step:%d TRAIN: total: %.2f recon: %.2f, kl: %.2f,\
l2: %.5f, kl weight: %.2f, l2 weight: %.2f" % \
(i, train_step, tr_total_cost, tr_recon_cost, tr_kl_cost,
l2_cost, kl_weight, l2_weight))
csv_outstr = "epoch,%d, step,%d, total,%.2f, recon,%.2f, kl,%.2f, \
l2,%.5f, klweight,%.2f, l2weight,%.2f\n"% \
(i, train_step, tr_total_cost, tr_recon_cost,
tr_kl_cost, l2_cost, kl_weight, l2_weight)
if self.hps.csv_log:
csv_file = os.path.join(self.hps.lfads_save_dir, self.hps.csv_log+'.csv')
with open(csv_file, "a") as myfile:
myfile.write(csv_outstr)
def plot_single_example(self, datasets):
"""Plot an image relating to a randomly chosen, specific example. We use
posterior sample and average by taking one example, and filling a whole
batch with that example, sample from the posterior, and then average the
quantities.
"""
hps = self.hps
all_data_names = datasets.keys()
data_name = np.random.permutation(all_data_names)[0]
data_dict = datasets[data_name]
has_valid_set = True if data_dict['valid_data'] is not None else False
cf = 1.0 # plotting concern
# posterior sample and average here
E, _, _ = data_dict['train_data'].shape
eidx = np.random.choice(E)
example_idxs = eidx * np.ones(hps.batch_size, dtype=np.int32)
train_data_bxtxd, train_ext_input_bxtxi = \
self.get_batch(data_dict['train_data'], data_dict['train_ext_input'],
example_idxs=example_idxs)
truth_train_data_bxtxd = None
if 'train_truth' in data_dict and data_dict['train_truth'] is not None:
truth_train_data_bxtxd, _ = self.get_batch(data_dict['train_truth'],
example_idxs=example_idxs)
cf = data_dict['conversion_factor']
# plotter does averaging
train_model_values = self.eval_model_runs_batch(data_name,
train_data_bxtxd,
train_ext_input_bxtxi,
do_average_batch=False)
train_step = train_model_values['train_steps']
feed_dict = self.build_feed_dict(data_name, train_data_bxtxd,
train_ext_input_bxtxi, keep_prob=1.0)
session = tf.get_default_session()
generic_summ = session.run(self.merged_generic, feed_dict=feed_dict)
self.writer.add_summary(generic_summ, train_step)
valid_data_bxtxd = valid_model_values = valid_ext_input_bxtxi = None
truth_valid_data_bxtxd = None
if has_valid_set:
E, _, _ = data_dict['valid_data'].shape
eidx = np.random.choice(E)
example_idxs = eidx * np.ones(hps.batch_size, dtype=np.int32)
valid_data_bxtxd, valid_ext_input_bxtxi = \
self.get_batch(data_dict['valid_data'],
data_dict['valid_ext_input'],
example_idxs=example_idxs)
if 'valid_truth' in data_dict and data_dict['valid_truth'] is not None:
truth_valid_data_bxtxd, _ = self.get_batch(data_dict['valid_truth'],
example_idxs=example_idxs)
else:
truth_valid_data_bxtxd = None
# plotter does averaging
valid_model_values = self.eval_model_runs_batch(data_name,
valid_data_bxtxd,
valid_ext_input_bxtxi,
do_average_batch=False)
example_image = plot_lfads(train_bxtxd=train_data_bxtxd,
train_model_vals=train_model_values,
train_ext_input_bxtxi=train_ext_input_bxtxi,
train_truth_bxtxd=truth_train_data_bxtxd,
valid_bxtxd=valid_data_bxtxd,
valid_model_vals=valid_model_values,
valid_ext_input_bxtxi=valid_ext_input_bxtxi,
valid_truth_bxtxd=truth_valid_data_bxtxd,
bidx=None, cf=cf, output_dist=hps.output_dist)
example_image = np.expand_dims(example_image, axis=0)
example_summ = session.run(self.merged_examples,
feed_dict={self.example_image : example_image})
self.writer.add_summary(example_summ)
def train_model(self, datasets):
"""Train the model, print per-epoch information, and save checkpoints.
Loop over training epochs. The function that actually does the
training is train_epoch. This function iterates over the training
data, one epoch at a time. The learning rate schedule is such
that it will stay the same until the cost goes up in comparison to
the last few values, then it will drop.
Args:
datasets: A dict of data dicts. The dataset dict is simply a
name(string)-> data dictionary mapping (See top of lfads.py).
"""
hps = self.hps
has_any_valid_set = False
for data_dict in datasets.values():
if data_dict['valid_data'] is not None:
has_any_valid_set = True
break
session = tf.get_default_session()
lr = session.run(self.learning_rate)
lr_stop = hps.learning_rate_stop
i = -1
train_costs = []
valid_costs = []
ev_total_cost = ev_recon_cost = ev_kl_cost = 0.0
lowest_ev_cost = np.Inf
while True:
i += 1
do_save_ckpt = True if i % 10 ==0 else False
tr_total_cost, tr_recon_cost, tr_kl_cost, kl_weight, l2_cost, l2_weight = \
self.train_epoch(datasets, do_save_ckpt=do_save_ckpt)
# Evaluate the validation cost, and potentially save. Note that this
# routine will not save a validation checkpoint until the kl weight and
# l2 weights are equal to 1.0.
if has_any_valid_set:
ev_total_cost, ev_recon_cost, ev_kl_cost = \
self.eval_cost_epoch(datasets, kind='valid')
valid_costs.append(ev_total_cost)
# > 1 may give more consistent results, but not the actual lowest vae.
# == 1 gives the lowest vae seen so far.
n_lve = 1
run_avg_lve = np.mean(valid_costs[-n_lve:])
# conditions for saving checkpoints:
# KL weight must have finished stepping (>=1.0), AND
# L2 weight must have finished stepping OR L2 is not being used, AND
# the current run has a lower LVE than previous runs AND
# len(valid_costs > n_lve) (not sure what that does)
if kl_weight >= 1.0 and \
(l2_weight >= 1.0 or \
(self.hps.l2_gen_scale == 0.0 and self.hps.l2_con_scale == 0.0)) \
and (len(valid_costs) > n_lve and run_avg_lve < lowest_ev_cost):
lowest_ev_cost = run_avg_lve
checkpoint_path = os.path.join(self.hps.lfads_save_dir,
self.hps.checkpoint_name + '_lve.ckpt')
self.lve_saver.save(session, checkpoint_path,
global_step=self.train_step,
latest_filename='checkpoint_lve')
# Plot and summarize.
values = {'nepochs':i, 'has_any_valid_set': has_any_valid_set,
'tr_total_cost':tr_total_cost, 'ev_total_cost':ev_total_cost,
'tr_recon_cost':tr_recon_cost, 'ev_recon_cost':ev_recon_cost,
'tr_kl_cost':tr_kl_cost, 'ev_kl_cost':ev_kl_cost,
'l2_weight':l2_weight, 'kl_weight':kl_weight,
'l2_cost':l2_cost}
self.summarize_all(datasets, values)
self.plot_single_example(datasets)
# Manage learning rate.
train_res = tr_total_cost
n_lr = hps.learning_rate_n_to_compare
if len(train_costs) > n_lr and train_res > np.max(train_costs[-n_lr:]):
_ = session.run(self.learning_rate_decay_op)
lr = session.run(self.learning_rate)
print(" Decreasing learning rate to %f." % lr)
# Force the system to run n_lr times while at this lr.
train_costs.append(np.inf)
else:
train_costs.append(train_res)
if lr < lr_stop:
print("Stopping optimization based on learning rate criteria.")
break
def eval_cost_epoch(self, datasets, kind='train', ext_input_extxi=None,
batch_size=None):
"""Evaluate the cost of the epoch.
Args:
data_dict: The dictionary of data (training and validation) used for
training and evaluation of the model, respectively.
Returns:
a 3 tuple of costs:
(epoch total cost, epoch reconstruction cost, epoch KL cost)
"""
ops_to_eval = [self.cost, self.recon_cost, self.kl_cost]
collected_op_values = self.run_epoch(datasets, ops_to_eval, kind=kind,
keep_prob=1.0)
total_cost = total_recon_cost = total_kl_cost = 0.0
# normalizing by batch done in distributions.py
epoch_size = len(collected_op_values)
for op_values in collected_op_values:
total_cost += op_values[0]
total_recon_cost += op_values[1]
total_kl_cost += op_values[2]
epoch_total_cost = total_cost / epoch_size
epoch_recon_cost = total_recon_cost / epoch_size
epoch_kl_cost = total_kl_cost / epoch_size
return epoch_total_cost, epoch_recon_cost, epoch_kl_cost
def eval_model_runs_batch(self, data_name, data_bxtxd, ext_input_bxtxi=None,
do_eval_cost=False, do_average_batch=False):
"""Returns all the goodies for the entire model, per batch.
Args:
data_name: The name of the data dict, to select which in/out matrices
to use.
data_bxtxd: Numpy array training data with shape:
batch_size x # time steps x # dimensions
ext_input_bxtxi: Numpy array training external input with shape:
batch_size x # time steps x # external input dims
do_eval_cost (optional): If true, the IWAE (Importance Weighted
Autoencoder) log likeihood bound, instead of the VAE version.
do_average_batch (optional): average over the batch, useful for getting
good IWAE costs, and model outputs for a single data point.
Returns:
A dictionary with the outputs of the model decoder, namely:
prior g0 mean, prior g0 variance, approx. posterior mean, approx
posterior mean, the generator initial conditions, the control inputs (if
enabled), the state of the generator, the factors, and the rates.
"""
session = tf.get_default_session()
feed_dict = self.build_feed_dict(data_name, data_bxtxd,
ext_input_bxtxi, keep_prob=1.0)
# Non-temporal signals will be batch x dim.
# Temporal signals are list length T with elements batch x dim.
tf_vals = [self.gen_ics, self.gen_states, self.factors,
self.output_dist_params]
tf_vals.append(self.cost)
tf_vals.append(self.nll_bound_vae)
tf_vals.append(self.nll_bound_iwae)
tf_vals.append(self.train_step) # not train_op!
if self.hps.ic_dim > 0:
tf_vals += [self.prior_zs_g0.mean, self.prior_zs_g0.logvar,
self.posterior_zs_g0.mean, self.posterior_zs_g0.logvar]
if self.hps.co_dim > 0:
tf_vals.append(self.controller_outputs)
tf_vals_flat, fidxs = flatten(tf_vals)
np_vals_flat = session.run(tf_vals_flat, feed_dict=feed_dict)
ff = 0
gen_ics = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
gen_states = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
factors = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
out_dist_params = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
costs = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
nll_bound_vaes = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
nll_bound_iwaes = [np_vals_flat[f] for f in fidxs[ff]]; ff +=1
train_steps = [np_vals_flat[f] for f in fidxs[ff]]; ff +=1
if self.hps.ic_dim > 0:
prior_g0_mean = [np_vals_flat[f] for f in fidxs[ff]]; ff +=1
prior_g0_logvar = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
post_g0_mean = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
post_g0_logvar = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
if self.hps.co_dim > 0:
controller_outputs = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
# [0] are to take out the non-temporal items from lists
gen_ics = gen_ics[0]
costs = costs[0]
nll_bound_vaes = nll_bound_vaes[0]
nll_bound_iwaes = nll_bound_iwaes[0]
train_steps = train_steps[0]
# Convert to full tensors, not lists of tensors in time dim.
gen_states = list_t_bxn_to_tensor_bxtxn(gen_states)
factors = list_t_bxn_to_tensor_bxtxn(factors)
out_dist_params = list_t_bxn_to_tensor_bxtxn(out_dist_params)
if self.hps.ic_dim > 0:
prior_g0_mean = prior_g0_mean[0]
prior_g0_logvar = prior_g0_logvar[0]
post_g0_mean = post_g0_mean[0]
post_g0_logvar = post_g0_logvar[0]
if self.hps.co_dim > 0:
controller_outputs = list_t_bxn_to_tensor_bxtxn(controller_outputs)
if do_average_batch:
gen_ics = np.mean(gen_ics, axis=0)
gen_states = np.mean(gen_states, axis=0)
factors = np.mean(factors, axis=0)
out_dist_params = np.mean(out_dist_params, axis=0)
if self.hps.ic_dim > 0:
prior_g0_mean = np.mean(prior_g0_mean, axis=0)
prior_g0_logvar = np.mean(prior_g0_logvar, axis=0)
post_g0_mean = np.mean(post_g0_mean, axis=0)
post_g0_logvar = np.mean(post_g0_logvar, axis=0)
if self.hps.co_dim > 0:
controller_outputs = np.mean(controller_outputs, axis=0)
model_vals = {}
model_vals['gen_ics'] = gen_ics
model_vals['gen_states'] = gen_states
model_vals['factors'] = factors
model_vals['output_dist_params'] = out_dist_params
model_vals['costs'] = costs
model_vals['nll_bound_vaes'] = nll_bound_vaes
model_vals['nll_bound_iwaes'] = nll_bound_iwaes
model_vals['train_steps'] = train_steps
if self.hps.ic_dim > 0:
model_vals['prior_g0_mean'] = prior_g0_mean
model_vals['prior_g0_logvar'] = prior_g0_logvar
model_vals['post_g0_mean'] = post_g0_mean
model_vals['post_g0_logvar'] = post_g0_logvar
if self.hps.co_dim > 0:
model_vals['controller_outputs'] = controller_outputs
return model_vals
def eval_model_runs_avg_epoch(self, data_name, data_extxd,
ext_input_extxi=None):
"""Returns all the expected value for goodies for the entire model.
The expected value is taken over hidden (z) variables, namely the initial
conditions and the control inputs. The expected value is approximate, and
accomplished via sampling (batch_size) samples for every examples.
Args:
data_name: The name of the data dict, to select which in/out matrices
to use.
data_extxd: Numpy array training data with shape:
# examples x # time steps x # dimensions
ext_input_extxi (optional): Numpy array training external input with
shape: # examples x # time steps x # external input dims
Returns:
A dictionary with the averaged outputs of the model decoder, namely:
prior g0 mean, prior g0 variance, approx. posterior mean, approx
posterior mean, the generator initial conditions, the control inputs (if
enabled), the state of the generator, the factors, and the output
distribution parameters, e.g. (rates or mean and variances).
"""
hps = self.hps
batch_size = hps.batch_size
E, T, D = data_extxd.shape
E_to_process = hps.ps_nexamples_to_process
if E_to_process > E:
print("Setting number of posterior samples to process to : ", E)
E_to_process = E
if hps.ic_dim > 0:
prior_g0_mean = np.zeros([E_to_process, hps.ic_dim])
prior_g0_logvar = np.zeros([E_to_process, hps.ic_dim])
post_g0_mean = np.zeros([E_to_process, hps.ic_dim])
post_g0_logvar = np.zeros([E_to_process, hps.ic_dim])
if hps.co_dim > 0:
controller_outputs = np.zeros([E_to_process, T, hps.co_dim])
gen_ics = np.zeros([E_to_process, hps.gen_dim])
gen_states = np.zeros([E_to_process, T, hps.gen_dim])
factors = np.zeros([E_to_process, T, hps.factors_dim])
if hps.output_dist == 'poisson':
out_dist_params = np.zeros([E_to_process, T, D])
elif hps.output_dist == 'gaussian':
out_dist_params = np.zeros([E_to_process, T, D+D])
else:
assert False, "NIY"
costs = np.zeros(E_to_process)
nll_bound_vaes = np.zeros(E_to_process)
nll_bound_iwaes = np.zeros(E_to_process)
train_steps = np.zeros(E_to_process)
for es_idx in range(E_to_process):
print("Running %d of %d." % (es_idx+1, E_to_process))
example_idxs = es_idx * np.ones(batch_size, dtype=np.int32)
data_bxtxd, ext_input_bxtxi = self.get_batch(data_extxd,
ext_input_extxi,
batch_size=batch_size,
example_idxs=example_idxs)
model_values = self.eval_model_runs_batch(data_name, data_bxtxd,
ext_input_bxtxi,
do_eval_cost=True,
do_average_batch=True)
if self.hps.ic_dim > 0:
prior_g0_mean[es_idx,:] = model_values['prior_g0_mean']
prior_g0_logvar[es_idx,:] = model_values['prior_g0_logvar']
post_g0_mean[es_idx,:] = model_values['post_g0_mean']
post_g0_logvar[es_idx,:] = model_values['post_g0_logvar']
gen_ics[es_idx,:] = model_values['gen_ics']
if self.hps.co_dim > 0:
controller_outputs[es_idx,:,:] = model_values['controller_outputs']
gen_states[es_idx,:,:] = model_values['gen_states']
factors[es_idx,:,:] = model_values['factors']
out_dist_params[es_idx,:,:] = model_values['output_dist_params']
costs[es_idx] = model_values['costs']
nll_bound_vaes[es_idx] = model_values['nll_bound_vaes']
nll_bound_iwaes[es_idx] = model_values['nll_bound_iwaes']
train_steps[es_idx] = model_values['train_steps']
print('bound nll(vae): %.3f, bound nll(iwae): %.3f' \
% (nll_bound_vaes[es_idx], nll_bound_iwaes[es_idx]))
model_runs = {}
if self.hps.ic_dim > 0:
model_runs['prior_g0_mean'] = prior_g0_mean
model_runs['prior_g0_logvar'] = prior_g0_logvar
model_runs['post_g0_mean'] = post_g0_mean
model_runs['post_g0_logvar'] = post_g0_logvar
model_runs['gen_ics'] = gen_ics
if self.hps.co_dim > 0:
model_runs['controller_outputs'] = controller_outputs
model_runs['gen_states'] = gen_states
model_runs['factors'] = factors
model_runs['output_dist_params'] = out_dist_params
model_runs['costs'] = costs
model_runs['nll_bound_vaes'] = nll_bound_vaes
model_runs['nll_bound_iwaes'] = nll_bound_iwaes
model_runs['train_steps'] = train_steps
return model_runs
def write_model_runs(self, datasets, output_fname=None):
"""Run the model on the data in data_dict, and save the computed values.
LFADS generates a number of outputs for each examples, and these are all
saved. They are:
The mean and variance of the prior of g0.
The mean and variance of approximate posterior of g0.
The control inputs (if enabled)
The initial conditions, g0, for all examples.
The generator states for all time.
The factors for all time.
The output distribution parameters (e.g. rates) for all time.
Args:
datasets: a dictionary of named data_dictionaries, see top of lfads.py
output_fname: a file name stem for the output files.
"""
hps = self.hps
kind = hps.kind
for data_name, data_dict in datasets.items():
data_tuple = [('train', data_dict['train_data'],
data_dict['train_ext_input']),
('valid', data_dict['valid_data'],
data_dict['valid_ext_input'])]
for data_kind, data_extxd, ext_input_extxi in data_tuple:
if not output_fname:
fname = "model_runs_" + data_name + '_' + data_kind + '_' + kind
else:
fname = output_fname + data_name + '_' + data_kind + '_' + kind
print("Writing data for %s data and kind %s." % (data_name, data_kind))
model_runs = self.eval_model_runs_avg_epoch(data_name, data_extxd,
ext_input_extxi)
full_fname = os.path.join(hps.lfads_save_dir, fname)
write_data(full_fname, model_runs, compression='gzip')
print("Done.")
def write_model_samples(self, dataset_name, output_fname=None):
"""Use the prior distribution to generate batch_size number of samples
from the model.
LFADS generates a number of outputs for each sample, and these are all
saved. They are:
The mean and variance of the prior of g0.
The control inputs (if enabled)
The initial conditions, g0, for all examples.
The generator states for all time.
The factors for all time.
The output distribution parameters (e.g. rates) for all time.
Args:
dataset_name: The name of the dataset to grab the factors -> rates
alignment matrices from.
output_fname: The name of the file in which to save the generated
samples.
"""
hps = self.hps
batch_size = hps.batch_size
print("Generating %d samples" % (batch_size))
tf_vals = [self.factors, self.gen_states, self.gen_ics,
self.cost, self.output_dist_params]
if hps.ic_dim > 0:
tf_vals += [self.prior_zs_g0.mean, self.prior_zs_g0.logvar]
if hps.co_dim > 0:
tf_vals += [self.prior_zs_ar_con.samples_t]
tf_vals_flat, fidxs = flatten(tf_vals)
session = tf.get_default_session()
feed_dict = {}
feed_dict[self.dataName] = dataset_name
feed_dict[self.keep_prob] = 1.0
np_vals_flat = session.run(tf_vals_flat, feed_dict=feed_dict)
ff = 0
factors = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
gen_states = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
gen_ics = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
costs = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
output_dist_params = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
if hps.ic_dim > 0:
prior_g0_mean = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
prior_g0_logvar = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
if hps.co_dim > 0:
prior_zs_ar_con = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
# [0] are to take out the non-temporal items from lists
gen_ics = gen_ics[0]
costs = costs[0]
# Convert to full tensors, not lists of tensors in time dim.
gen_states = list_t_bxn_to_tensor_bxtxn(gen_states)
factors = list_t_bxn_to_tensor_bxtxn(factors)
output_dist_params = list_t_bxn_to_tensor_bxtxn(output_dist_params)
if hps.ic_dim > 0:
prior_g0_mean = prior_g0_mean[0]
prior_g0_logvar = prior_g0_logvar[0]
if hps.co_dim > 0:
prior_zs_ar_con = list_t_bxn_to_tensor_bxtxn(prior_zs_ar_con)
model_vals = {}
model_vals['gen_ics'] = gen_ics
model_vals['gen_states'] = gen_states
model_vals['factors'] = factors
model_vals['output_dist_params'] = output_dist_params
model_vals['costs'] = costs.reshape(1)
if hps.ic_dim > 0:
model_vals['prior_g0_mean'] = prior_g0_mean
model_vals['prior_g0_logvar'] = prior_g0_logvar
if hps.co_dim > 0:
model_vals['prior_zs_ar_con'] = prior_zs_ar_con
full_fname = os.path.join(hps.lfads_save_dir, output_fname)
write_data(full_fname, model_vals, compression='gzip')
print("Done.")
@staticmethod
def eval_model_parameters(use_nested=True, include_strs=None):
"""Evaluate and return all of the TF variables in the model.
Args:
use_nested (optional): For returning values, use a nested dictoinary, based
on variable scoping, or return all variables in a flat dictionary.
include_strs (optional): A list of strings to use as a filter, to reduce the
number of variables returned. A variable name must contain at least one
string in include_strs as a sub-string in order to be returned.
Returns:
The parameters of the model. This can be in a flat
dictionary, or a nested dictionary, where the nesting is by variable
scope.
"""
all_tf_vars = tf.global_variables()
session = tf.get_default_session()
all_tf_vars_eval = session.run(all_tf_vars)
vars_dict = {}
strs = ["LFADS"]
if include_strs:
strs += include_strs
for i, (var, var_eval) in enumerate(zip(all_tf_vars, all_tf_vars_eval)):
if any(s in include_strs for s in var.name):
if not isinstance(var_eval, np.ndarray): # for H5PY
print(var.name, """ is not numpy array, saving as numpy array
with value: """, var_eval, type(var_eval))
e = np.array(var_eval)
print(e, type(e))
else:
e = var_eval
vars_dict[var.name] = e
if not use_nested:
return vars_dict
var_names = vars_dict.keys()
nested_vars_dict = {}
current_dict = nested_vars_dict
for v, var_name in enumerate(var_names):
var_split_name_list = var_name.split('/')
split_name_list_len = len(var_split_name_list)
current_dict = nested_vars_dict
for p, part in enumerate(var_split_name_list):
if p < split_name_list_len - 1:
if part in current_dict:
current_dict = current_dict[part]
else:
current_dict[part] = {}
current_dict = current_dict[part]
else:
current_dict[part] = vars_dict[var_name]
return nested_vars_dict
@staticmethod
def spikify_rates(rates_bxtxd):
"""Randomly spikify underlying rates according a Poisson distribution
Args:
rates_bxtxd: a numpy tensor with shape:
Returns:
A numpy array with the same shape as rates_bxtxd, but with the event
counts.
"""
B,T,N = rates_bxtxd.shape
assert all([B > 0, N > 0]), "problems"
# Because the rates are changing, there is nesting
spikes_bxtxd = np.zeros([B,T,N], dtype=np.int32)
for b in range(B):
for t in range(T):
for n in range(N):
rate = rates_bxtxd[b,t,n]
count = np.random.poisson(rate)
spikes_bxtxd[b,t,n] = count
return spikes_bxtxd
| gpl-2.0 |
heli522/scikit-learn | examples/bicluster/plot_spectral_coclustering.py | 274 | 1736 | """
==============================================
A demo of the Spectral Co-Clustering algorithm
==============================================
This example demonstrates how to generate a dataset and bicluster it
using the the Spectral Co-Clustering algorithm.
The dataset is generated using the ``make_biclusters`` function, which
creates a matrix of small values and implants bicluster with large
values. The rows and columns are then shuffled and passed to the
Spectral Co-Clustering algorithm. Rearranging the shuffled matrix to
make biclusters contiguous shows how accurately the algorithm found
the biclusters.
"""
print(__doc__)
# Author: Kemal Eren <kemal@kemaleren.com>
# License: BSD 3 clause
import numpy as np
from matplotlib import pyplot as plt
from sklearn.datasets import make_biclusters
from sklearn.datasets import samples_generator as sg
from sklearn.cluster.bicluster import SpectralCoclustering
from sklearn.metrics import consensus_score
data, rows, columns = make_biclusters(
shape=(300, 300), n_clusters=5, noise=5,
shuffle=False, random_state=0)
plt.matshow(data, cmap=plt.cm.Blues)
plt.title("Original dataset")
data, row_idx, col_idx = sg._shuffle(data, random_state=0)
plt.matshow(data, cmap=plt.cm.Blues)
plt.title("Shuffled dataset")
model = SpectralCoclustering(n_clusters=5, random_state=0)
model.fit(data)
score = consensus_score(model.biclusters_,
(rows[:, row_idx], columns[:, col_idx]))
print("consensus score: {:.3f}".format(score))
fit_data = data[np.argsort(model.row_labels_)]
fit_data = fit_data[:, np.argsort(model.column_labels_)]
plt.matshow(fit_data, cmap=plt.cm.Blues)
plt.title("After biclustering; rearranged to show biclusters")
plt.show()
| bsd-3-clause |
margulies/topography | sandbox/macaque/clustering_embedding_macaque.py | 2 | 1476 | #!/usr/bin/python
import sys, os, h5py, scipy, numpy as np
from sklearn.utils.arpack import eigsh
from sklearn.cluster import KMeans
from scipy.io.matlab import savemat
def main(argv):
# Set defaults:
n_components_embedding = 25
comp_min = 2
comp_max = 20 + 1
varname = 'data'
filename = './test'
# Import files
f = h5py.File(('%s.mat' % filename),'r')
dataCorr = np.array(f.get('%s' % varname))
# Prep matrix
K = (dataCorr + 1) / 2.
v = np.sqrt(np.sum(K, axis=1))
A = K/(v[:, None] * v[None, :])
del K
A = np.squeeze(A * [A > 0])
# Run embedding
lambdas, vectors = eigsh(A, k=n_components_embedding)
lambdas = lambdas[::-1]
vectors = vectors[:, ::-1]
psi = vectors/vectors[:, 0][:, None]
lambdas = lambdas[1:] / (1 - lambdas[1:])
embedding = psi[:, 1:(n_components_embedding + 1)] * lambdas[:n_components_embedding][None, :]
# Run kmeans clustering
def kmeans(embedding, n_components):
est = KMeans(n_clusters=n_components, n_jobs=-1, init='k-means++', n_init=300)
est.fit_transform(embedding)
labels = est.labels_
data = labels.astype(np.float)
return data
results = list()
for n_components in xrange(comp_min,comp_max):
results.append(kmeans(embedding, n_components))
savemat(('%s_results.mat' % filename), {'results':results})
if __name__ == "__main__":
main(sys.argv[1:])
| mit |
likelyzhao/mxnet | example/image-classification/fine-tune.py | 13 | 2426 | import os
import argparse
import logging
logging.basicConfig(level=logging.DEBUG)
from common import find_mxnet
from common import data, fit, modelzoo
import mxnet as mx
def get_fine_tune_model(symbol, arg_params, num_classes, layer_name):
"""
symbol: the pre-trained network symbol
arg_params: the argument parameters of the pre-trained model
num_classes: the number of classes for the fine-tune datasets
layer_name: the layer name before the last fully-connected layer
"""
all_layers = sym.get_internals()
net = all_layers[layer_name+'_output']
net = mx.symbol.FullyConnected(data=net, num_hidden=num_classes, name='fc')
net = mx.symbol.SoftmaxOutput(data=net, name='softmax')
new_args = dict({k:arg_params[k] for k in arg_params if 'fc' not in k})
return (net, new_args)
if __name__ == "__main__":
# parse args
parser = argparse.ArgumentParser(description="fine-tune a dataset",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
train = fit.add_fit_args(parser)
data.add_data_args(parser)
aug = data.add_data_aug_args(parser)
parser.add_argument('--pretrained-model', type=str,
help='the pre-trained model')
parser.add_argument('--layer-before-fullc', type=str, default='flatten0',
help='the name of the layer before the last fullc layer')
# use less augmentations for fine-tune
data.set_data_aug_level(parser, 1)
# use a small learning rate and less regularizations
parser.set_defaults(image_shape='3,224,224', num_epochs=30,
lr=.01, lr_step_epochs='20', wd=0, mom=0)
args = parser.parse_args()
# load pretrained model
dir_path = os.path.dirname(os.path.realpath(__file__))
(prefix, epoch) = modelzoo.download_model(
args.pretrained_model, os.path.join(dir_path, 'model'))
if prefix is None:
(prefix, epoch) = (args.pretrained_model, args.load_epoch)
sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch)
# remove the last fullc layer
(new_sym, new_args) = get_fine_tune_model(
sym, arg_params, args.num_classes, args.layer_before_fullc)
# train
fit.fit(args = args,
network = new_sym,
data_loader = data.get_rec_iter,
arg_params = new_args,
aux_params = aux_params)
| apache-2.0 |
DonBeo/scikit-learn | sklearn/datasets/tests/test_20news.py | 42 | 2416 | """Test the 20news downloader, if the data is available."""
import numpy as np
import scipy.sparse as sp
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import SkipTest
from sklearn import datasets
def test_20news():
try:
data = datasets.fetch_20newsgroups(
subset='all', download_if_missing=False, shuffle=False)
except IOError:
raise SkipTest("Download 20 newsgroups to run this test")
# Extract a reduced dataset
data2cats = datasets.fetch_20newsgroups(
subset='all', categories=data.target_names[-1:-3:-1], shuffle=False)
# Check that the ordering of the target_names is the same
# as the ordering in the full dataset
assert_equal(data2cats.target_names,
data.target_names[-2:])
# Assert that we have only 0 and 1 as labels
assert_equal(np.unique(data2cats.target).tolist(), [0, 1])
# Check that the number of filenames is consistent with data/target
assert_equal(len(data2cats.filenames), len(data2cats.target))
assert_equal(len(data2cats.filenames), len(data2cats.data))
# Check that the first entry of the reduced dataset corresponds to
# the first entry of the corresponding category in the full dataset
entry1 = data2cats.data[0]
category = data2cats.target_names[data2cats.target[0]]
label = data.target_names.index(category)
entry2 = data.data[np.where(data.target == label)[0][0]]
assert_equal(entry1, entry2)
def test_20news_vectorized():
# This test is slow.
raise SkipTest("Test too slow.")
bunch = datasets.fetch_20newsgroups_vectorized(subset="train")
assert_true(sp.isspmatrix_csr(bunch.data))
assert_equal(bunch.data.shape, (11314, 107428))
assert_equal(bunch.target.shape[0], 11314)
assert_equal(bunch.data.dtype, np.float64)
bunch = datasets.fetch_20newsgroups_vectorized(subset="test")
assert_true(sp.isspmatrix_csr(bunch.data))
assert_equal(bunch.data.shape, (7532, 107428))
assert_equal(bunch.target.shape[0], 7532)
assert_equal(bunch.data.dtype, np.float64)
bunch = datasets.fetch_20newsgroups_vectorized(subset="all")
assert_true(sp.isspmatrix_csr(bunch.data))
assert_equal(bunch.data.shape, (11314 + 7532, 107428))
assert_equal(bunch.target.shape[0], 11314 + 7532)
assert_equal(bunch.data.dtype, np.float64)
| bsd-3-clause |
iofun/colony | colony/system/kmeans.py | 1 | 19928 | # -*- coding: utf-8 -*-
'''
Colony k-means clustering system logic.
'''
# This file is part of colony.
__author__ = 'Team Machine'
__doc__ = '''
#k-means algorithm
k-means clustering is a method of vector quantization,
popular for cluster analysis in data mining.
k-means clustering aims to partition n observations
into k clusters in which each observation belongs to
the cluster with the nearest mean, serving as a prototype
of the cluster.
This results in a partitioning of the data space into Voronoi cells.
'''
# data wrangler
from sklearn.feature_extraction import DictVectorizer
# dimensionality reduction
from sklearn.decomposition import TruncatedSVD
# text data wrangling stuff
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
# scikit-learn parallel tools
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import Normalizer
# scikit-learn k-means clusters
from sklearn.cluster import KMeans, MiniBatchKMeans
# scikit-learn metrics
from sklearn import metrics
# seconds since the epoch
from time import time
import numpy as np
# standard python tools
import arrow
import motor
import msgpack
import uuid
# this colony code base!
from colony.messages import kmeans
from colony.messages import Feature, Forecast, Metric
from colony.messages import Resource, ClusterResource
from colony.tools import clean_structure
from colony.tools import clean_result
# tornado old gen
from tornado import gen
class Prediction(object):
'''
KMeans cluster prediction
'''
@gen.engine
def new_prediction(self, dataset, callback):
'''
# new prediction dataset
'''
try:
message = Forecast(dataset)
message.validate()
except Exception, e:
callback(None, e)
return
message = clean_structure(message)
message = yield gen.Task(self.db.predictions.insert, message)
message, error = message.args
if error:
callback(None, error)
return
#message = {
# 'uuid': message.get('uuid'),
# 'forecast': np.asarray(message.get('doing!'))
#}
callback(message.get('uuid'), None)
@gen.engine
def get_prediction(self, model_uuid, prediction_uuid, callback):
'''
Get specific k-means model.
'''
if not model_type:
model_type = 'k-means'
try:
message = yield motor.Op(
self.db.predictions.find_one, {
'uuid':prediction_uuid,
'model_type': model_type
},{'_id':0}
)
if message:
message = kmeans.Cluster(message)
message.validate()
except Exception, e:
callback(None, e)
return
callback(message, None)
@gen.engine
def get_predictions(self, model_uuid, page_num, callback):
'''
Get k-means models.
'''
model_type = 'k-means'
page_num = int(page_num)
page_size = self.settings['page_size']
result = []
query = self.db.predictions.find({'active':True},{'_id':0})
query = query.sort([
('uuid', -1)
]).skip(page_num * page_size).limit(page_size)
try:
for record in (yield motor.Op(query.to_list)):
result.append(kmeans.Cluster(record))
struct = {'result': result}
message = kmeans.ClusterResult(struct)
message.validate()
except Exception, e:
callback(None, e)
return
message = clean_result(message)
callback(message, None)
class Trainer(object):
'''
Cluster trainer
'''
@gen.engine
def get_feature(self, feature_uuid, callback):
'''
'''
pass
@gen.engine
def gen_features(self, feature_uuid, callback):
'''
'''
pass
@gen.engine
def new_feature_set(self, dataset, callback):
'''
# new feature dataset
'''
try:
features = Feature(dataset)
features.validate()
except Exception, e:
callback(None, e)
return
features = clean_structure(features)
message = yield gen.Task(self.db.features.insert, features)
message, error = message.args
if error:
callback(None, error)
return
message = {
'uuid': features.get('uuid'),
'features': np.asarray(features.get('dimensions'))
}
callback(message, None)
@gen.engine
def train_new_cluster(self, features, centroids, callback):
'''
Do the actual clustering.
'''
number_seeds = (centroids if centroids else self.settings['number_seeds'])
minibatch = self.settings['minibatch']
max_iter = self.settings['max_iter']
number_init = self.settings['number_init']
max_no_improvement = self.settings['max_no_improvement']
batch_size = self.settings['batch_size']
verbose = self.settings['verbose']
try:
if minibatch:
km = MiniBatchKMeans(
n_clusters=number_seeds,
init='k-means++',
max_iter=max_iter,
max_no_improvement=max_no_improvement,
n_init=number_init,
batch_size=batch_size,
verbose=verbose
)
else:
km = KMeans(
n_clusters=number_seeds,
init='k-means++',
max_iter=max_iter,
n_init=number_init,
verbose=verbose
)
start = time()
km.fit(features)
cluster_time = time() - start
message = {
'cluster':km,
'cluster_time': cluster_time
}
except Exception, e:
callback(None, e)
return
callback(message, None)
class Wrangler(object):
'''
Data Wrangler
'''
@gen.engine
def dict_feature_extraction(self, dataset, callback):
'''
# dictionary feature extraction.
This transformer turns lists of mappings (dict-like objects)
of feature names to feature values into Numpy arrays or
scipy.sparse matrices for use with scikit-learn estimators.
'''
use_sparse = self.settings['use_sparse']
start = time()
if use_sparse:
print("Extracting features from the training dataset using sparse vectorizer")
vectorizer = DictVectorizer(sparce=use_sparse)
else:
print("Extracting features from the training dataset")
vectorizer = DictVectorizer(sparse=False)
features = vectorizer.fit_transform(dataset)
print("done in %fs" % (time() - start))
print("n_samples: %d, n_features: %d" % features.shape)
return features
@gen.engine
def text_feature_extraction(self, callback):
'''
# text feature extraction.
'''
use_hashing = self.settings['use_hashing']
use_idf = self.settings['use_idf']
n_features = self.settings['n_features']
print('''Extracting features from the training \
dataset using a sparse vectorizer''')
# we're don't using it, why?
start = time()
if use_hashing:
if use_idf:
# Perform an IDF normalization
# on the output of HashingVectorizer
hasher = HashingVectorizer(
n_features=n_features,
stop_words='english',
non_negative=True,
norm=None,
binary=False
)
vectorizer = Pipeline((
('hasher', hasher),
('tf_idf', TfidfTransformer())
))
else:
vectorizer = HashingVectorizer(
n_features=n_features,
stop_words='english',
non_negative=False,
norm='l2',
binary=False
)
else:
vectorizer = TfidfVectorizer(
max_df=0.5,
max_features=n_features,
stop_words='english',
use_idf=use_idf
)
return vectorizer
@gen.engine
def dimensionality_reduction(self, feature, callback):
'''
Performing dimensionality reduction using LSA
'''
n_components = self.settings['n_components']
if n_components:
print("Performing dimensionality reduction using LSA")
start = time()
# Vectorizer results are normalized,
# which makes KMeans behave as spherical k-means for better results.
lsa = TruncatedSVD(n_components)
feature = lsa.fit_transform(feature)
# Since LSA/SVD results are not normalized,
# we have to redo the normalization.
feature = Normalizer(copy=False).fit_transform(feature)
print("done in %fs" % (time() - start))
return feature
class Cluster(object):
'''
k-means cluster logic
'''
@property
def n_clusters(self):
'''
The number of clusters to form as well
as the number of centroids to generate.
'''
return self.n_clusters
@property
def max_iter(self):
'''
Maximum number of iterations
of the k-means algorithm for a single run.
'''
return self.max_iter
@property
def n_init(self):
'''
Number of time the k-means algorithm will be run
with different centroid seeds. The final results
will be the best output of n_init consecutive runs
in terms of inertia.
'''
return self.n_init
@property
def init(self):
'''
Method for initialization, defaults to
'k-means++', 'random' or an ndarray.
'''
return self.init
@property
def precompute_distances(self):
'''
Precompute distances (faster but takes more memory).
'''
return self.precompute_distances
@property
def tol(self):
'''
Relative tolerance w.r.t. inertia to declare convergence
float, optional default: 1e-4
'''
return self.tol
@property
def n_jobs(self):
'''
The number of jobs to use for the computation.
This works by breaking down the pairwise matrix
into n_jobs even slices and computing them in parallel.
'''
return self.n_jobs
@property
def random_state(self):
'''
The generator used to initialize the centers.
If an integer is given, it fixes the seed.
Defaults to the global numpy random number generator.
'''
return self.random_state
@gen.engine
def new_model(self, struct, callback):
'''
Create a new cluster model.
'''
try:
cluster = kmeans.Cluster(struct)
cluster.validate()
except Exception, e:
callback(None, e)
return
cluster = clean_structure(cluster)
message = yield gen.Task(self.db.models.insert, cluster)
message, error = message.args
if error:
callback(None, error)
return
# push message to the right channel
callback(cluster.get('uuid'), None)
@gen.engine
def delete_model(self, model_uuid, callback):
'''
Delete k-means model
'''
try:
result = yield motor.Op( self.db.models.remove,
{'uuid':model_uuid} )
except Exception, e:
callback(None, e)
return
callback(result, None)
@gen.engine
def replace_model(self, struct, model_uuid, callback):
'''
Replace k-means model
'''
try:
cluster = kmeans.Cluster(struct)
cluster.validate()
except Exception, e:
callback(None, e)
return
cluster = clean_structure(cluster)
message = yield gen.Task( self.db.models.update,
{'uuid': model_uuid},
cluster )
message, error = message.args
# missing crash_and_die
if error:
callback(None, error)
return
if not message.get('updatedExisting'):
error = {'uuid': model_uuid, 'replaced': False}
callback(None, error)
return
# push message to the right channel
callback(model_uuid, None)
@gen.engine
def get_model(self, model_type, model_uuid, callback):
'''
Get specific k-means model.
'''
if not model_type:
model_type = 'k-means'
try:
message = yield motor.Op(
self.db.models.find_one, {
'uuid':model_uuid,
'model_type': model_type
},{'_id':0}
)
if message:
message = kmeans.Cluster(message)
message.validate()
except Exception, e:
callback(None, e)
return
callback(message, None)
@gen.engine
def get_models(self, model_type, page_num, callback):
'''
Get k-means models.
'''
model_type = 'k-means'
page_num = int(page_num)
page_size = self.settings['page_size']
result = []
query = self.db.models.find({'active':True},{'_id':0})
query = query.sort([
('uuid', -1)
]).skip(page_num * page_size).limit(page_size)
try:
for record in (yield motor.Op(query.to_list)):
result.append(kmeans.Cluster(record))
struct = {'result': result}
message = kmeans.ClusterResult(struct)
message.validate()
except Exception, e:
callback(None, e)
return
message = clean_result(message)
callback(message, None)
@gen.engine
def new_resource(self, struct, callback):
'''
Create a new cluster resource
'''
try:
message = ClusterResource(struct)
message.validate()
message = message.to_primitive()
except Exception, e:
callback(None, e)
return
resource = ''.join(('resources.', message['resource']))
try:
message = yield motor.Op(
self.db.models.update,
{'uuid': message['model_uuid']},
{
'$addToSet': {
''.join((resource, '.contains')): message['uuid']
},
'$inc': {
'resources.total': 1,
''.join((resource, '.total')): 1
}
}
)
except Exception, e:
callback(None, e)
return
callback(message, None)
@gen.engine
def check_exist(self, model_uuid, callback):
'''
Check if cluster exist
'''
try:
exist = yield motor.Op(self.db.models.find_one,
{'uuid': model_uuid},
{'uuid':1, '_id':0})
exist = (True if exist else False)
except Exception, e:
callback(None, e)
callback(exist, None)
@gen.engine
def check_type(self, model_uuid, model_type, callback):
'''
Check cluster type
'''
try:
check_type = yield motor.Op(self.db.models.find_one,
{'uuid': model_uuid,
'model_type': model_type},
{'model_type':1,'_id':0})
check_type = (True if check_type else False)
except Exception, e:
callback(None, e)
callback(check_type, None)
@gen.engine
def get_centroids(self, model_uuid, callback):
'''
Get cluster centroid seeds
'''
try:
centroids = yield motor.Op(self.db.models.find_one,
{'uuid': model_uuid},
{'centroids':1,'_id':0})
centroids = (centroids if centroids else False)
except Exception, e:
callback(None, e)
callback(centroids, None)
@gen.engine
def new_kmeans_unit(self, cluster_labels, cluster_unique_labels, cluster_centers, callback):
'''
New kmeans cluster
The labels over the training data can be found in the labels attributes.
'''
try:
struct = {
'labels': cluster_labels,
'unique_labels': cluster_unique_labels,
'centers': cluster_centers
}
unit = Unit(struct)
unit.validate()
except Exception, e:
callback(None, e)
return
unit = clean_structure(unit)
message = yield ge.Task(self.db.units.insert, unit)
message, error = message.args
if error:
callback(None, error)
return
callback({'uuid': unit.get('uuid')}, None)
@gen.engine
def new_metrics(self, feature_uuid, feature_data, feature_labels, cluster_labels, callback):
'''
Create new metrics
'''
try:
message = {
'homogeneity': metrics.homogeneity_score(feature_labels, cluster_labels),
'completeness': metrics.completeness_score(feature_labels, cluster_labels),
'v_measure': metrics.v_measure_score(feature_labels, cluster_labels),
'adjusted_rand': metrics.adjusted_rand_score(feature_labels, cluster_labels),
'silhouette': metrics.silhouette_score(feature_data, feature_labels, sample_size=1000),
'feature_uuid': feature_uuid,
}
metric = Metric(message)
metric.validate()
except Exception, e:
callback(None, e)
return
metric = clean_structure(metric)
message = yield gen.Task(self.db.metrics.insert, metric)
message, error = message.args
if error:
callback(None, error)
return
callback({'uuid': metric.get('uuid')}, None)
@gen.engine
def set_cluster_time(self, model_uuid, cluster_time, callback):
'''
Set cluster time
'''
try:
message = yield motor.Op(self.db.models.update,
{'uuid': model_uuid},
{'$set': {'cluster_time': cluster_time}})
except Exception, e:
callback(None, e)
return
callback(message, None) | agpl-3.0 |
tensorflow/tensorflow-experimental_link_static_libraries_once | tensorflow/python/data/ops/options.py | 4 | 26308 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""API for specifying `tf.data` options."""
import enum
from absl import logging
from tensorflow.core.framework import dataset_options_pb2
from tensorflow.core.framework import model_pb2
from tensorflow.python.data.util import options as options_lib
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
@tf_export("data.experimental.AutotuneAlgorithm")
class AutotuneAlgorithm(enum.Enum):
"""Represents the type of autotuning algorithm to use.
DEFAULT: The default behavior is implementation specific and may change over
time.
HILL_CLIMB: In each optimization step, this algorithm chooses the optimial
parameter and increases its value by 1.
GRADIENT_DESCENT: In each optimization step, this algorithm updates the
parameter values in the optimal direction.
MAX_PARALLELISM: Similar to HILL_CLIMB but uses a relaxed stopping condition,
allowing the optimization to oversubscribe the CPU.
STAGE_BASED: In each optimization step, this algorithm chooses the worst
bottleneck parameter and increases its value by 1.
"""
DEFAULT = 0
HILL_CLIMB = 1
GRADIENT_DESCENT = 2
MAX_PARALLELISM = 3
STAGE_BASED = 4
@classmethod
def _to_proto(cls, obj):
if obj == cls.DEFAULT:
return model_pb2.AutotuneAlgorithm.DEFAULT
if obj == cls.HILL_CLIMB:
return model_pb2.AutotuneAlgorithm.HILL_CLIMB
if obj == cls.GRADIENT_DESCENT:
return model_pb2.AutotuneAlgorithm.GRADIENT_DESCENT
if obj == cls.MAX_PARALLELISM:
return model_pb2.AutotuneAlgorithm.MAX_PARALLELISM
if obj == cls.STAGE_BASED:
return model_pb2.AutotuneAlgorithm.STAGE_BASED
raise ValueError(
f"Invalid `obj.` Supported values include `DEFAULT`, `HILL_CLIMB` "
f"`GRADIENT_DESCENT`, and `STAGE_BASED`. Got {obj.name}.")
@classmethod
def _from_proto(cls, pb):
if pb == model_pb2.AutotuneAlgorithm.DEFAULT:
return cls.DEFAULT
if pb == model_pb2.AutotuneAlgorithm.HILL_CLIMB:
return cls.HILL_CLIMB
if pb == model_pb2.AutotuneAlgorithm.GRADIENT_DESCENT:
return cls.GRADIENT_DESCENT
if pb == model_pb2.AutotuneAlgorithm.MAX_PARALLELISM:
return cls.MAX_PARALLELISM
if pb == model_pb2.AutotuneAlgorithm.STAGE_BASED:
return cls.STAGE_BASED
raise ValueError(
f"Invalid `pb.` Supported values include `DEFAULT`, `HILL_CLIMB`, "
f"`GRADIENT_DESCENT` and `STAGE_BASED`. Got {pb}.")
@tf_export("data.experimental.AutoShardPolicy")
class AutoShardPolicy(enum.IntEnum):
"""Represents the type of auto-sharding to use.
OFF: No sharding will be performed.
AUTO: Attempts FILE-based sharding, falling back to DATA-based sharding.
FILE: Shards by input files (i.e. each worker will get a set of files to
process). When this option is selected, make sure that there is at least as
many files as workers. If there are fewer input files than workers, a runtime
error will be raised.
DATA: Shards by elements produced by the dataset. Each worker will process the
whole dataset and discard the portion that is not for itself. Note that for
this mode to correctly partitions the dataset elements, the dataset needs to
produce elements in a deterministic order.
HINT: Looks for the presence of `shard(SHARD_HINT, ...)` which is treated as a
placeholder to replace with `shard(num_workers, worker_index)`.
"""
# LINT.IfChange
OFF = -1
AUTO = 0
FILE = 1
DATA = 2
HINT = 3
# LINT.ThenChange(//tensorflow/python/data/experimental/ops/data_service_ops.py:tf_data_service_sharding_policy)
@classmethod
def _to_proto(cls, obj):
"""Convert enum to proto."""
if obj == cls.OFF:
return dataset_options_pb2.AutoShardPolicy.OFF
if obj == cls.FILE:
return dataset_options_pb2.AutoShardPolicy.FILE
if obj == cls.DATA:
return dataset_options_pb2.AutoShardPolicy.DATA
if obj == cls.AUTO:
return dataset_options_pb2.AutoShardPolicy.AUTO
if obj == cls.HINT:
return dataset_options_pb2.AutoShardPolicy.HINT
raise ValueError(
f"Invalid `obj.` Supported values include `OFF`, `FILE`, `DATA`,"
f"`AUTO`, and `HINT`. Got {obj.name}."
)
@classmethod
def _from_proto(cls, pb):
"""Convert proto to enum."""
if pb == dataset_options_pb2.AutoShardPolicy.OFF:
return cls.OFF
if pb == dataset_options_pb2.AutoShardPolicy.FILE:
return cls.FILE
if pb == dataset_options_pb2.AutoShardPolicy.DATA:
return cls.DATA
if pb == dataset_options_pb2.AutoShardPolicy.AUTO:
return cls.AUTO
if pb == dataset_options_pb2.AutoShardPolicy.HINT:
return cls.HINT
raise ValueError(
f"Invalid `pb.` Supported values include `OFF`, `FILE`, `DATA`,"
f"`AUTO`, and `HINT`. Got {pb}."
)
@tf_export("data.experimental.ExternalStatePolicy")
class ExternalStatePolicy(enum.Enum):
"""Represents how to handle external state during serialization.
See the `tf.data.Options.experimental_external_state_policy` documentation
for more information.
"""
WARN = 0
IGNORE = 1
FAIL = 2
@classmethod
def _to_proto(cls, obj):
"""Convert enum to proto."""
if obj == cls.IGNORE:
return dataset_options_pb2.ExternalStatePolicy.POLICY_IGNORE
if obj == cls.FAIL:
return dataset_options_pb2.ExternalStatePolicy.POLICY_FAIL
if obj == cls.WARN:
return dataset_options_pb2.ExternalStatePolicy.POLICY_WARN
raise ValueError(
f"Invalid `obj.` Supported values include `POLICY_IGNORE`,"
f"`POLICY_FAIL`, `POLICY_WARN`. Got {obj.name}.")
@classmethod
def _from_proto(cls, pb):
"""Convert proto to enum."""
if pb == dataset_options_pb2.ExternalStatePolicy.POLICY_IGNORE:
return cls.IGNORE
if pb == dataset_options_pb2.ExternalStatePolicy.POLICY_FAIL:
return cls.FAIL
if pb == dataset_options_pb2.ExternalStatePolicy.POLICY_WARN:
return cls.WARN
raise ValueError(
f"Invalid `pb.` Supported values include `POLICY_IGNORE`,"
f"`POLICY_FAIL`, `POLICY_WARN`. Got {pb}.")
@tf_export("data.experimental.AutotuneOptions")
class AutotuneOptions(options_lib.OptionsBase):
"""Represents options for autotuning dataset performance.
```python
options = tf.data.Options()
options.autotune.enabled = False
dataset = dataset.with_options(options)
```
"""
enabled = options_lib.create_option(
name="enabled",
ty=bool,
docstring="Whether to automatically tune performance knobs. If None, "
"defaults to True.")
cpu_budget = options_lib.create_option(
name="cpu_budget",
ty=int,
docstring="When autotuning is enabled (through `autotune`), determines "
"the CPU budget to use. Values greater than the number of schedulable "
"CPU cores are allowed but may result in CPU contention. If None, "
"defaults to the number of schedulable CPU cores.")
ram_budget = options_lib.create_option(
name="ram_budget",
ty=int,
docstring="When autotuning is enabled (through `autotune`), determines "
"the RAM budget to use. Values greater than the available RAM in bytes "
"may result in OOM. If None, defaults to half of the available RAM in "
"bytes.")
autotune_algorithm = options_lib.create_option(
name="autotune_algorithm",
ty=AutotuneAlgorithm,
docstring="When autotuning is enabled (through `autotune`), determines "
"the algorithm to use.")
def _to_proto(self):
pb = dataset_options_pb2.AutotuneOptions()
if self.enabled is not None:
pb.enabled = self.enabled
if self.cpu_budget is not None:
pb.cpu_budget = self.cpu_budget
if self.ram_budget is not None:
pb.ram_budget = self.ram_budget
if self.autotune_algorithm is not None:
pb.autotune_algorithm = AutotuneAlgorithm._to_proto( # pylint: disable=protected-access
self.autotune_algorithm)
return pb
def _from_proto(self, pb):
if pb.WhichOneof("optional_enabled") is not None:
self.enabled = pb.enabled
if pb.WhichOneof("optional_cpu_budget") is not None:
self.cpu_budget = pb.cpu_budget
if pb.WhichOneof("optional_ram_budget") is not None:
self.ram_budget = pb.ram_budget
if pb.WhichOneof("optional_autotune_algorithm") is not None:
self.autotune_algorithm = AutotuneAlgorithm._from_proto( # pylint: disable=protected-access
pb.autotune_algorithm)
def _set_mutable(self, mutable):
"""Change the mutability value to `mutable` on this options and children."""
# pylint: disable=protected-access
object.__setattr__(self, "_mutable", mutable)
@tf_export("data.experimental.DistributeOptions")
class DistributeOptions(options_lib.OptionsBase):
"""Represents options for distributed data processing.
You can set the distribution options of a dataset through the
`experimental_distribute` property of `tf.data.Options`; the property is
an instance of `tf.data.experimental.DistributeOptions`.
```python
options = tf.data.Options()
options.experimental_distribute.auto_shard_policy = tf.data.experimental.AutoShardPolicy.OFF
dataset = dataset.with_options(options)
```
"""
auto_shard_policy = options_lib.create_option(
name="auto_shard_policy",
ty=AutoShardPolicy,
docstring="The type of sharding to use. See "
"`tf.data.experimental.AutoShardPolicy` for additional information.",
default_factory=lambda: AutoShardPolicy.AUTO)
num_devices = options_lib.create_option(
name="num_devices",
ty=int,
docstring=
"The number of devices attached to this input pipeline. This will be "
"automatically set by `MultiDeviceIterator`.")
def _to_proto(self):
pb = dataset_options_pb2.DistributeOptions()
pb.auto_shard_policy = AutoShardPolicy._to_proto(self.auto_shard_policy) # pylint: disable=protected-access
if self.num_devices is not None:
pb.num_devices = self.num_devices
return pb
def _from_proto(self, pb):
self.auto_shard_policy = AutoShardPolicy._from_proto(pb.auto_shard_policy) # pylint: disable=protected-access
if pb.WhichOneof("optional_num_devices") is not None:
self.num_devices = pb.num_devices
@tf_export("data.experimental.OptimizationOptions")
class OptimizationOptions(options_lib.OptionsBase):
"""Represents options for dataset optimizations.
You can set the optimization options of a dataset through the
`experimental_optimization` property of `tf.data.Options`; the property is
an instance of `tf.data.experimental.OptimizationOptions`.
```python
options = tf.data.Options()
options.experimental_optimization.noop_elimination = True
options.experimental_optimization.apply_default_optimizations = False
dataset = dataset.with_options(options)
```
"""
apply_default_optimizations = options_lib.create_option(
name="apply_default_optimizations",
ty=bool,
docstring=
"Whether to apply default graph optimizations. If False, only graph "
"optimizations that have been explicitly enabled will be applied.")
filter_fusion = options_lib.create_option(
name="filter_fusion",
ty=bool,
docstring=
"Whether to fuse filter transformations. If None, defaults to False.")
filter_parallelization = options_lib.create_option(
name="filter_parallelization",
ty=bool,
docstring=
"Whether to parallelize stateless filter transformations. If None, "
"defaults to False.")
inject_prefetch = options_lib.create_option(
name="inject_prefetch",
ty=bool,
docstring=
"Whether to inject prefetch transformation as the last transformation "
"when the last transformation is a synchronous transformation. If None, "
"defaults to True.")
map_and_batch_fusion = options_lib.create_option(
name="map_and_batch_fusion",
ty=bool,
docstring=
"Whether to fuse map and batch transformations. If None, defaults to "
"True.")
map_and_filter_fusion = options_lib.create_option(
name="map_and_filter_fusion",
ty=bool,
docstring=
"Whether to fuse map and filter transformations. If None, defaults to "
"False.")
map_fusion = options_lib.create_option(
name="map_fusion",
ty=bool,
docstring="Whether to fuse map transformations. If None, defaults to "
"False.")
map_parallelization = options_lib.create_option(
name="map_parallelization",
ty=bool,
docstring=
"Whether to parallelize stateless map transformations. If None, defaults "
"to True.")
noop_elimination = options_lib.create_option(
name="noop_elimination",
ty=bool,
docstring=
"Whether to eliminate no-op transformations. If None, defaults to True.")
parallel_batch = options_lib.create_option(
name="parallel_batch",
ty=bool,
docstring="Whether to parallelize copying of batch elements. If None, "
"defaults to True.")
shuffle_and_repeat_fusion = options_lib.create_option(
name="shuffle_and_repeat_fusion",
ty=bool,
docstring="Whether to fuse shuffle and repeat transformations. If None, "
"defaults to True.")
def _to_proto(self):
pb = dataset_options_pb2.OptimizationOptions()
if self.apply_default_optimizations is not None:
pb.apply_default_optimizations = self.apply_default_optimizations
if self.filter_fusion is not None:
pb.filter_fusion = self.filter_fusion
if self.filter_parallelization is not None:
pb.filter_parallelization = self.filter_parallelization
if self.inject_prefetch is not None:
pb.inject_prefetch = self.inject_prefetch
if self.map_and_batch_fusion is not None:
pb.map_and_batch_fusion = self.map_and_batch_fusion
if self.map_and_filter_fusion is not None:
pb.map_and_filter_fusion = self.map_and_filter_fusion
if self.map_fusion is not None:
pb.map_fusion = self.map_fusion
if self.map_parallelization is not None:
pb.map_parallelization = self.map_parallelization
if self.noop_elimination is not None:
pb.noop_elimination = self.noop_elimination
if self.parallel_batch is not None:
pb.parallel_batch = self.parallel_batch
if self.shuffle_and_repeat_fusion is not None:
pb.shuffle_and_repeat_fusion = self.shuffle_and_repeat_fusion
return pb
def _from_proto(self, pb):
if pb.WhichOneof("optional_apply_default_optimizations") is not None:
self.apply_default_optimizations = pb.apply_default_optimizations
if pb.WhichOneof("optional_filter_fusion") is not None:
self.filter_fusion = pb.filter_fusion
if pb.WhichOneof("optional_filter_parallelization") is not None:
self.filter_parallelization = pb.filter_parallelization
if pb.WhichOneof("optional_inject_prefetch") is not None:
self.inject_prefetch = pb.inject_prefetch
if pb.WhichOneof("optional_map_and_batch_fusion") is not None:
self.map_and_batch_fusion = pb.map_and_batch_fusion
if pb.WhichOneof("optional_map_and_filter_fusion") is not None:
self.map_and_filter_fusion = pb.map_and_filter_fusion
if pb.WhichOneof("optional_map_fusion") is not None:
self.map_fusion = pb.map_fusion
if pb.WhichOneof("optional_map_parallelization") is not None:
self.map_parallelization = pb.map_parallelization
if pb.WhichOneof("optional_noop_elimination") is not None:
self.noop_elimination = pb.noop_elimination
if pb.WhichOneof("optional_parallel_batch") is not None:
self.parallel_batch = pb.parallel_batch
if pb.WhichOneof("optional_shuffle_and_repeat_fusion") is not None:
self.shuffle_and_repeat_fusion = pb.shuffle_and_repeat_fusion
def _set_mutable(self, mutable):
"""Change the mutability value to `mutable` on this options and children."""
# pylint: disable=protected-access
object.__setattr__(self, "_mutable", mutable)
@deprecation.deprecated_endpoints("data.experimental.ThreadingOptions")
@tf_export("data.experimental.ThreadingOptions", "data.ThreadingOptions")
class ThreadingOptions(options_lib.OptionsBase):
"""Represents options for dataset threading.
You can set the threading options of a dataset through the
`experimental_threading` property of `tf.data.Options`; the property is
an instance of `tf.data.ThreadingOptions`.
```python
options = tf.data.Options()
options.threading.private_threadpool_size = 10
dataset = dataset.with_options(options)
```
"""
max_intra_op_parallelism = options_lib.create_option(
name="max_intra_op_parallelism",
ty=int,
docstring=
"If set, it overrides the maximum degree of intra-op parallelism.")
private_threadpool_size = options_lib.create_option(
name="private_threadpool_size",
ty=int,
docstring=
"If set, the dataset will use a private threadpool of the given size. "
"The value 0 can be used to indicate that the threadpool size should be "
"determined at runtime based on the number of available CPU cores.")
def _to_proto(self):
pb = dataset_options_pb2.ThreadingOptions()
if self.max_intra_op_parallelism is not None:
pb.max_intra_op_parallelism = self.max_intra_op_parallelism
if self.private_threadpool_size is not None:
pb.private_threadpool_size = self.private_threadpool_size
return pb
def _from_proto(self, pb):
if pb.WhichOneof("optional_max_intra_op_parallelism") is not None:
self.max_intra_op_parallelism = pb.max_intra_op_parallelism
if pb.WhichOneof("optional_private_threadpool_size") is not None:
self.private_threadpool_size = pb.private_threadpool_size
@tf_export("data.Options")
class Options(options_lib.OptionsBase):
"""Represents options for `tf.data.Dataset`.
A `tf.data.Options` object can be, for instance, used to control which static
optimizations to apply to the input pipeline graph or whether to use
performance modeling to dynamically tune the parallelism of operations such as
`tf.data.Dataset.map` or `tf.data.Dataset.interleave`.
The options are set for the entire dataset and are carried over to datasets
created through tf.data transformations.
The options can be set by constructing an `Options` object and using the
`tf.data.Dataset.with_options(options)` transformation, which returns a
dataset with the options set.
>>> dataset = tf.data.Dataset.range(42)
>>> options = tf.data.Options()
>>> options.deterministic = False
>>> dataset = dataset.with_options(options)
>>> print(dataset.options().deterministic)
False
Note: A known limitation of the `tf.data.Options` implementation is that the
options are not preserved across tf.function boundaries. In particular, to
set options for a dataset that is iterated within a tf.function, the options
need to be set within the same tf.function.
"""
autotune = options_lib.create_option(
name="autotune",
ty=AutotuneOptions,
docstring="The autotuning options associated with the dataset. See "
"`tf.data.experimental.AutotuneOptions` for more details.",
default_factory=AutotuneOptions)
deterministic = options_lib.create_option(
name="deterministic",
ty=bool,
docstring=
"Whether the outputs need to be produced in deterministic order. If None,"
" defaults to True.")
experimental_deterministic = options_lib.create_option(
name="experimental_deterministic",
ty=bool,
docstring="DEPRECATED. Use `deterministic` instead.")
experimental_distribute = options_lib.create_option(
name="experimental_distribute",
ty=DistributeOptions,
docstring=
"The distribution strategy options associated with the dataset. See "
"`tf.data.experimental.DistributeOptions` for more details.",
default_factory=DistributeOptions)
experimental_external_state_policy = options_lib.create_option(
name="experimental_external_state_policy",
ty=ExternalStatePolicy,
docstring="This option can be used to override the default policy for "
"how to handle external state when serializing a dataset or "
"checkpointing its iterator. There are three settings available - "
"IGNORE: External state is ignored without a warning; WARN: External "
"state is ignored and a warning is logged; FAIL: External state results "
"in an error.")
experimental_optimization = options_lib.create_option(
name="experimental_optimization",
ty=OptimizationOptions,
docstring=
"The optimization options associated with the dataset. See "
"`tf.data.experimental.OptimizationOptions` for more details.",
default_factory=OptimizationOptions)
experimental_slack = options_lib.create_option(
name="experimental_slack",
ty=bool,
docstring="Whether to introduce 'slack' in the last `prefetch` of the "
"input pipeline, if it exists. This may reduce CPU contention with "
"accelerator host-side activity at the start of a step. The slack "
"frequency is determined by the number of devices attached to this "
"input pipeline. If None, defaults to False.")
experimental_threading = options_lib.create_option(
name="experimental_threading",
ty=ThreadingOptions,
docstring="DEPRECATED. Use `threading` instead.")
threading = options_lib.create_option(
name="threading",
ty=ThreadingOptions,
docstring="The threading options associated with the dataset. See "
"`tf.data.ThreadingOptions` for more details.",
default_factory=ThreadingOptions)
def __getattribute__(self, name):
if name == "experimental_threading":
logging.warning("options.experimental_threading is deprecated. "
"Use options.threading instead.")
return getattr(self, "threading")
if name == "experimental_deterministic":
# TODO(aaudibert): Uncomment after internal uses have been updated.
# logging.warning("options.experimental_deterministic is deprecated. "
# "Use options.deterministic instead.")
return getattr(self, "deterministic")
return super(Options, self).__getattribute__(name)
def __setattr__(self, name, value):
if name == "experimental_threading":
logging.warning("options.experimental_threading is deprecated. "
"Use options.threading instead.")
super(Options, self).__setattr__("threading", value)
return
if name == "experimental_deterministic":
# TODO(aaudibert): Uncomment after internal uses have been updated.
# logging.warning("options.experimental_deterministic is deprecated. "
# "Use options.deterministic instead.")
super(Options, self).__setattr__("deterministic", value)
return
super(Options, self).__setattr__(name, value)
def _to_proto(self):
pb = dataset_options_pb2.Options()
if self.deterministic is not None:
pb.deterministic = self.deterministic
pb.autotune_options.CopyFrom(self.autotune._to_proto()) # pylint: disable=protected-access
pb.distribute_options.CopyFrom(self.experimental_distribute._to_proto()) # pylint: disable=protected-access
if self.experimental_external_state_policy is not None:
pb.external_state_policy = (
ExternalStatePolicy._to_proto( # pylint: disable=protected-access
self.experimental_external_state_policy))
pb.optimization_options.CopyFrom(self.experimental_optimization._to_proto()) # pylint: disable=protected-access
if self.experimental_slack is not None:
pb.slack = self.experimental_slack
pb.threading_options.CopyFrom(self.threading._to_proto()) # pylint: disable=protected-access
return pb
def _from_proto(self, pb):
if pb.WhichOneof("optional_deterministic") is not None:
self.deterministic = pb.deterministic
self.autotune._from_proto(pb.autotune_options) # pylint: disable=protected-access
self.experimental_distribute._from_proto(pb.distribute_options) # pylint: disable=protected-access
if pb.WhichOneof("optional_external_state_policy") is not None:
self.experimental_external_state_policy = (
ExternalStatePolicy._from_proto( # pylint: disable=protected-access
pb.external_state_policy))
self.experimental_optimization._from_proto(pb.optimization_options) # pylint: disable=protected-access
if pb.WhichOneof("optional_slack") is not None:
self.experimental_slack = pb.slack
self.threading._from_proto(pb.threading_options) # pylint: disable=protected-access
def _set_mutable(self, mutable):
"""Change the mutability value to `mutable` on this options and children."""
# pylint: disable=protected-access
object.__setattr__(self, "_mutable", mutable)
self.autotune._set_mutable(mutable)
self.experimental_distribute._set_mutable(mutable)
self.experimental_optimization._set_mutable(mutable)
self.threading._set_mutable(mutable)
def merge(self, options):
"""Merges itself with the given `tf.data.Options`.
If this object and the `options` to merge set an option differently, a
warning is generated and this object's value is updated with the `options`
object's value.
Args:
options: The `tf.data.Options` to merge with.
Returns:
New `tf.data.Options` object which is the result of merging self with
the input `tf.data.Options`.
"""
return options_lib.merge_options(self, options)
| apache-2.0 |
arabenjamin/scikit-learn | examples/linear_model/plot_omp.py | 379 | 2263 | """
===========================
Orthogonal Matching Pursuit
===========================
Using orthogonal matching pursuit for recovering a sparse signal from a noisy
measurement encoded with a dictionary
"""
print(__doc__)
import matplotlib.pyplot as plt
import numpy as np
from sklearn.linear_model import OrthogonalMatchingPursuit
from sklearn.linear_model import OrthogonalMatchingPursuitCV
from sklearn.datasets import make_sparse_coded_signal
n_components, n_features = 512, 100
n_nonzero_coefs = 17
# generate the data
###################
# y = Xw
# |x|_0 = n_nonzero_coefs
y, X, w = make_sparse_coded_signal(n_samples=1,
n_components=n_components,
n_features=n_features,
n_nonzero_coefs=n_nonzero_coefs,
random_state=0)
idx, = w.nonzero()
# distort the clean signal
##########################
y_noisy = y + 0.05 * np.random.randn(len(y))
# plot the sparse signal
########################
plt.figure(figsize=(7, 7))
plt.subplot(4, 1, 1)
plt.xlim(0, 512)
plt.title("Sparse signal")
plt.stem(idx, w[idx])
# plot the noise-free reconstruction
####################################
omp = OrthogonalMatchingPursuit(n_nonzero_coefs=n_nonzero_coefs)
omp.fit(X, y)
coef = omp.coef_
idx_r, = coef.nonzero()
plt.subplot(4, 1, 2)
plt.xlim(0, 512)
plt.title("Recovered signal from noise-free measurements")
plt.stem(idx_r, coef[idx_r])
# plot the noisy reconstruction
###############################
omp.fit(X, y_noisy)
coef = omp.coef_
idx_r, = coef.nonzero()
plt.subplot(4, 1, 3)
plt.xlim(0, 512)
plt.title("Recovered signal from noisy measurements")
plt.stem(idx_r, coef[idx_r])
# plot the noisy reconstruction with number of non-zeros set by CV
##################################################################
omp_cv = OrthogonalMatchingPursuitCV()
omp_cv.fit(X, y_noisy)
coef = omp_cv.coef_
idx_r, = coef.nonzero()
plt.subplot(4, 1, 4)
plt.xlim(0, 512)
plt.title("Recovered signal from noisy measurements with CV")
plt.stem(idx_r, coef[idx_r])
plt.subplots_adjust(0.06, 0.04, 0.94, 0.90, 0.20, 0.38)
plt.suptitle('Sparse signal recovery with Orthogonal Matching Pursuit',
fontsize=16)
plt.show()
| bsd-3-clause |
heli522/scikit-learn | sklearn/setup.py | 224 | 2856 | import os
from os.path import join
import warnings
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info, BlasNotFoundError
import numpy
libraries = []
if os.name == 'posix':
libraries.append('m')
config = Configuration('sklearn', parent_package, top_path)
config.add_subpackage('__check_build')
config.add_subpackage('svm')
config.add_subpackage('datasets')
config.add_subpackage('datasets/tests')
config.add_subpackage('feature_extraction')
config.add_subpackage('feature_extraction/tests')
config.add_subpackage('cluster')
config.add_subpackage('cluster/tests')
config.add_subpackage('covariance')
config.add_subpackage('covariance/tests')
config.add_subpackage('cross_decomposition')
config.add_subpackage('decomposition')
config.add_subpackage('decomposition/tests')
config.add_subpackage("ensemble")
config.add_subpackage("ensemble/tests")
config.add_subpackage('feature_selection')
config.add_subpackage('feature_selection/tests')
config.add_subpackage('utils')
config.add_subpackage('utils/tests')
config.add_subpackage('externals')
config.add_subpackage('mixture')
config.add_subpackage('mixture/tests')
config.add_subpackage('gaussian_process')
config.add_subpackage('gaussian_process/tests')
config.add_subpackage('neighbors')
config.add_subpackage('neural_network')
config.add_subpackage('preprocessing')
config.add_subpackage('manifold')
config.add_subpackage('metrics')
config.add_subpackage('semi_supervised')
config.add_subpackage("tree")
config.add_subpackage("tree/tests")
config.add_subpackage('metrics/tests')
config.add_subpackage('metrics/cluster')
config.add_subpackage('metrics/cluster/tests')
# add cython extension module for isotonic regression
config.add_extension(
'_isotonic',
sources=['_isotonic.c'],
include_dirs=[numpy.get_include()],
libraries=libraries,
)
# some libs needs cblas, fortran-compiled BLAS will not be sufficient
blas_info = get_info('blas_opt', 0)
if (not blas_info) or (
('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', [])):
config.add_library('cblas',
sources=[join('src', 'cblas', '*.c')])
warnings.warn(BlasNotFoundError.__doc__)
# the following packages depend on cblas, so they have to be build
# after the above.
config.add_subpackage('linear_model')
config.add_subpackage('utils')
# add the test directory
config.add_subpackage('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| bsd-3-clause |
likelyzhao/mxnet | docs/conf.py | 11 | 6195 | # -*- coding: utf-8 -*-
import sys, os, re, subprocess
import mock
from recommonmark import parser
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
libpath = os.path.join(curr_path, '../python/')
sys.path.insert(0, libpath)
sys.path.insert(0, curr_path)
# -- mock out modules
MOCK_MODULES = ['numpy', 'numpy.testing', 'scipy', 'scipy.sparse', 'sklearn', 'matplotlib']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = mock.Mock()
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.2'
# General information about the project.
project = u'mxnet'
author = u'%s developers' % project
copyright = u'2015-2017, %s' % author
github_doc_root = 'https://github.com/dmlc/mxnet/tree/master/docs/'
doc_root = 'http://mxnet.io/'
# add markdown parser
source_parsers = {
'.md': parser.CommonMarkParser,
'.Rmd': parser.CommonMarkParser
}
# Version information.
# from mxnet import libinfo
# version = libinfo.__version__
# release = libinfo.__version__
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.napoleon',
'sphinx.ext.mathjax',
'breathe',
'mxdoc'
]
# Use breathe to include doxygen documents
breathe_projects = {'mxnet' : 'doxygen/xml/'}
breathe_default_project = 'mxnet'
autodoc_member_order = 'bysource'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
# source_suffix = '.rst'
source_suffix = ['.rst', '.md', '.Rmd']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# Version and release are passed from CMake.
#version = None
# The full version, including alpha/beta/rc tags.
#release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['virtualenv']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
suppress_warnings = [
'image.nonlocal_uri',
]
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'mxnet-theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_static']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': 'relations.html'
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'formatdoc'
| apache-2.0 |
arabenjamin/scikit-learn | sklearn/linear_model/stochastic_gradient.py | 129 | 50966 | # Authors: Peter Prettenhofer <peter.prettenhofer@gmail.com> (main author)
# Mathieu Blondel (partial_fit support)
#
# License: BSD 3 clause
"""Classification and regression using Stochastic Gradient Descent (SGD)."""
import numpy as np
import scipy.sparse as sp
from abc import ABCMeta, abstractmethod
from ..externals.joblib import Parallel, delayed
from .base import LinearClassifierMixin, SparseCoefMixin
from ..base import BaseEstimator, RegressorMixin
from ..feature_selection.from_model import _LearntSelectorMixin
from ..utils import (check_array, check_random_state, check_X_y,
deprecated)
from ..utils.extmath import safe_sparse_dot
from ..utils.multiclass import _check_partial_fit_first_call
from ..utils.validation import check_is_fitted
from ..externals import six
from .sgd_fast import plain_sgd, average_sgd
from ..utils.fixes import astype
from ..utils.seq_dataset import ArrayDataset, CSRDataset
from ..utils import compute_class_weight
from .sgd_fast import Hinge
from .sgd_fast import SquaredHinge
from .sgd_fast import Log
from .sgd_fast import ModifiedHuber
from .sgd_fast import SquaredLoss
from .sgd_fast import Huber
from .sgd_fast import EpsilonInsensitive
from .sgd_fast import SquaredEpsilonInsensitive
LEARNING_RATE_TYPES = {"constant": 1, "optimal": 2, "invscaling": 3,
"pa1": 4, "pa2": 5}
PENALTY_TYPES = {"none": 0, "l2": 2, "l1": 1, "elasticnet": 3}
SPARSE_INTERCEPT_DECAY = 0.01
"""For sparse data intercept updates are scaled by this decay factor to avoid
intercept oscillation."""
DEFAULT_EPSILON = 0.1
"""Default value of ``epsilon`` parameter. """
class BaseSGD(six.with_metaclass(ABCMeta, BaseEstimator, SparseCoefMixin)):
"""Base class for SGD classification and regression."""
def __init__(self, loss, penalty='l2', alpha=0.0001, C=1.0,
l1_ratio=0.15, fit_intercept=True, n_iter=5, shuffle=True,
verbose=0, epsilon=0.1, random_state=None,
learning_rate="optimal", eta0=0.0, power_t=0.5,
warm_start=False, average=False):
self.loss = loss
self.penalty = penalty
self.learning_rate = learning_rate
self.epsilon = epsilon
self.alpha = alpha
self.C = C
self.l1_ratio = l1_ratio
self.fit_intercept = fit_intercept
self.n_iter = n_iter
self.shuffle = shuffle
self.random_state = random_state
self.verbose = verbose
self.eta0 = eta0
self.power_t = power_t
self.warm_start = warm_start
self.average = average
self._validate_params()
self.coef_ = None
if self.average > 0:
self.standard_coef_ = None
self.average_coef_ = None
# iteration count for learning rate schedule
# must not be int (e.g. if ``learning_rate=='optimal'``)
self.t_ = None
def set_params(self, *args, **kwargs):
super(BaseSGD, self).set_params(*args, **kwargs)
self._validate_params()
return self
@abstractmethod
def fit(self, X, y):
"""Fit model."""
def _validate_params(self):
"""Validate input params. """
if not isinstance(self.shuffle, bool):
raise ValueError("shuffle must be either True or False")
if self.n_iter <= 0:
raise ValueError("n_iter must be > zero")
if not (0.0 <= self.l1_ratio <= 1.0):
raise ValueError("l1_ratio must be in [0, 1]")
if self.alpha < 0.0:
raise ValueError("alpha must be >= 0")
if self.learning_rate in ("constant", "invscaling"):
if self.eta0 <= 0.0:
raise ValueError("eta0 must be > 0")
# raises ValueError if not registered
self._get_penalty_type(self.penalty)
self._get_learning_rate_type(self.learning_rate)
if self.loss not in self.loss_functions:
raise ValueError("The loss %s is not supported. " % self.loss)
def _get_loss_function(self, loss):
"""Get concrete ``LossFunction`` object for str ``loss``. """
try:
loss_ = self.loss_functions[loss]
loss_class, args = loss_[0], loss_[1:]
if loss in ('huber', 'epsilon_insensitive',
'squared_epsilon_insensitive'):
args = (self.epsilon, )
return loss_class(*args)
except KeyError:
raise ValueError("The loss %s is not supported. " % loss)
def _get_learning_rate_type(self, learning_rate):
try:
return LEARNING_RATE_TYPES[learning_rate]
except KeyError:
raise ValueError("learning rate %s "
"is not supported. " % learning_rate)
def _get_penalty_type(self, penalty):
penalty = str(penalty).lower()
try:
return PENALTY_TYPES[penalty]
except KeyError:
raise ValueError("Penalty %s is not supported. " % penalty)
def _validate_sample_weight(self, sample_weight, n_samples):
"""Set the sample weight array."""
if sample_weight is None:
# uniform sample weights
sample_weight = np.ones(n_samples, dtype=np.float64, order='C')
else:
# user-provided array
sample_weight = np.asarray(sample_weight, dtype=np.float64,
order="C")
if sample_weight.shape[0] != n_samples:
raise ValueError("Shapes of X and sample_weight do not match.")
return sample_weight
def _allocate_parameter_mem(self, n_classes, n_features, coef_init=None,
intercept_init=None):
"""Allocate mem for parameters; initialize if provided."""
if n_classes > 2:
# allocate coef_ for multi-class
if coef_init is not None:
coef_init = np.asarray(coef_init, order="C")
if coef_init.shape != (n_classes, n_features):
raise ValueError("Provided ``coef_`` does not match dataset. ")
self.coef_ = coef_init
else:
self.coef_ = np.zeros((n_classes, n_features),
dtype=np.float64, order="C")
# allocate intercept_ for multi-class
if intercept_init is not None:
intercept_init = np.asarray(intercept_init, order="C")
if intercept_init.shape != (n_classes, ):
raise ValueError("Provided intercept_init "
"does not match dataset.")
self.intercept_ = intercept_init
else:
self.intercept_ = np.zeros(n_classes, dtype=np.float64,
order="C")
else:
# allocate coef_ for binary problem
if coef_init is not None:
coef_init = np.asarray(coef_init, dtype=np.float64,
order="C")
coef_init = coef_init.ravel()
if coef_init.shape != (n_features,):
raise ValueError("Provided coef_init does not "
"match dataset.")
self.coef_ = coef_init
else:
self.coef_ = np.zeros(n_features,
dtype=np.float64,
order="C")
# allocate intercept_ for binary problem
if intercept_init is not None:
intercept_init = np.asarray(intercept_init, dtype=np.float64)
if intercept_init.shape != (1,) and intercept_init.shape != ():
raise ValueError("Provided intercept_init "
"does not match dataset.")
self.intercept_ = intercept_init.reshape(1,)
else:
self.intercept_ = np.zeros(1, dtype=np.float64, order="C")
# initialize average parameters
if self.average > 0:
self.standard_coef_ = self.coef_
self.standard_intercept_ = self.intercept_
self.average_coef_ = np.zeros(self.coef_.shape,
dtype=np.float64,
order="C")
self.average_intercept_ = np.zeros(self.standard_intercept_.shape,
dtype=np.float64,
order="C")
def _make_dataset(X, y_i, sample_weight):
"""Create ``Dataset`` abstraction for sparse and dense inputs.
This also returns the ``intercept_decay`` which is different
for sparse datasets.
"""
if sp.issparse(X):
dataset = CSRDataset(X.data, X.indptr, X.indices, y_i, sample_weight)
intercept_decay = SPARSE_INTERCEPT_DECAY
else:
dataset = ArrayDataset(X, y_i, sample_weight)
intercept_decay = 1.0
return dataset, intercept_decay
def _prepare_fit_binary(est, y, i):
"""Initialization for fit_binary.
Returns y, coef, intercept.
"""
y_i = np.ones(y.shape, dtype=np.float64, order="C")
y_i[y != est.classes_[i]] = -1.0
average_intercept = 0
average_coef = None
if len(est.classes_) == 2:
if not est.average:
coef = est.coef_.ravel()
intercept = est.intercept_[0]
else:
coef = est.standard_coef_.ravel()
intercept = est.standard_intercept_[0]
average_coef = est.average_coef_.ravel()
average_intercept = est.average_intercept_[0]
else:
if not est.average:
coef = est.coef_[i]
intercept = est.intercept_[i]
else:
coef = est.standard_coef_[i]
intercept = est.standard_intercept_[i]
average_coef = est.average_coef_[i]
average_intercept = est.average_intercept_[i]
return y_i, coef, intercept, average_coef, average_intercept
def fit_binary(est, i, X, y, alpha, C, learning_rate, n_iter,
pos_weight, neg_weight, sample_weight):
"""Fit a single binary classifier.
The i'th class is considered the "positive" class.
"""
# if average is not true, average_coef, and average_intercept will be
# unused
y_i, coef, intercept, average_coef, average_intercept = \
_prepare_fit_binary(est, y, i)
assert y_i.shape[0] == y.shape[0] == sample_weight.shape[0]
dataset, intercept_decay = _make_dataset(X, y_i, sample_weight)
penalty_type = est._get_penalty_type(est.penalty)
learning_rate_type = est._get_learning_rate_type(learning_rate)
# XXX should have random_state_!
random_state = check_random_state(est.random_state)
# numpy mtrand expects a C long which is a signed 32 bit integer under
# Windows
seed = random_state.randint(0, np.iinfo(np.int32).max)
if not est.average:
return plain_sgd(coef, intercept, est.loss_function,
penalty_type, alpha, C, est.l1_ratio,
dataset, n_iter, int(est.fit_intercept),
int(est.verbose), int(est.shuffle), seed,
pos_weight, neg_weight,
learning_rate_type, est.eta0,
est.power_t, est.t_, intercept_decay)
else:
standard_coef, standard_intercept, average_coef, \
average_intercept = average_sgd(coef, intercept, average_coef,
average_intercept,
est.loss_function, penalty_type,
alpha, C, est.l1_ratio, dataset,
n_iter, int(est.fit_intercept),
int(est.verbose), int(est.shuffle),
seed, pos_weight, neg_weight,
learning_rate_type, est.eta0,
est.power_t, est.t_,
intercept_decay,
est.average)
if len(est.classes_) == 2:
est.average_intercept_[0] = average_intercept
else:
est.average_intercept_[i] = average_intercept
return standard_coef, standard_intercept
class BaseSGDClassifier(six.with_metaclass(ABCMeta, BaseSGD,
LinearClassifierMixin)):
loss_functions = {
"hinge": (Hinge, 1.0),
"squared_hinge": (SquaredHinge, 1.0),
"perceptron": (Hinge, 0.0),
"log": (Log, ),
"modified_huber": (ModifiedHuber, ),
"squared_loss": (SquaredLoss, ),
"huber": (Huber, DEFAULT_EPSILON),
"epsilon_insensitive": (EpsilonInsensitive, DEFAULT_EPSILON),
"squared_epsilon_insensitive": (SquaredEpsilonInsensitive,
DEFAULT_EPSILON),
}
@abstractmethod
def __init__(self, loss="hinge", penalty='l2', alpha=0.0001, l1_ratio=0.15,
fit_intercept=True, n_iter=5, shuffle=True, verbose=0,
epsilon=DEFAULT_EPSILON, n_jobs=1, random_state=None,
learning_rate="optimal", eta0=0.0, power_t=0.5,
class_weight=None, warm_start=False, average=False):
super(BaseSGDClassifier, self).__init__(loss=loss, penalty=penalty,
alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=fit_intercept,
n_iter=n_iter, shuffle=shuffle,
verbose=verbose,
epsilon=epsilon,
random_state=random_state,
learning_rate=learning_rate,
eta0=eta0, power_t=power_t,
warm_start=warm_start,
average=average)
self.class_weight = class_weight
self.classes_ = None
self.n_jobs = int(n_jobs)
def _partial_fit(self, X, y, alpha, C,
loss, learning_rate, n_iter,
classes, sample_weight,
coef_init, intercept_init):
X, y = check_X_y(X, y, 'csr', dtype=np.float64, order="C")
n_samples, n_features = X.shape
self._validate_params()
_check_partial_fit_first_call(self, classes)
n_classes = self.classes_.shape[0]
# Allocate datastructures from input arguments
self._expanded_class_weight = compute_class_weight(self.class_weight,
self.classes_, y)
sample_weight = self._validate_sample_weight(sample_weight, n_samples)
if self.coef_ is None or coef_init is not None:
self._allocate_parameter_mem(n_classes, n_features,
coef_init, intercept_init)
elif n_features != self.coef_.shape[-1]:
raise ValueError("Number of features %d does not match previous data %d."
% (n_features, self.coef_.shape[-1]))
self.loss_function = self._get_loss_function(loss)
if self.t_ is None:
self.t_ = 1.0
# delegate to concrete training procedure
if n_classes > 2:
self._fit_multiclass(X, y, alpha=alpha, C=C,
learning_rate=learning_rate,
sample_weight=sample_weight, n_iter=n_iter)
elif n_classes == 2:
self._fit_binary(X, y, alpha=alpha, C=C,
learning_rate=learning_rate,
sample_weight=sample_weight, n_iter=n_iter)
else:
raise ValueError("The number of class labels must be "
"greater than one.")
return self
def _fit(self, X, y, alpha, C, loss, learning_rate, coef_init=None,
intercept_init=None, sample_weight=None):
if hasattr(self, "classes_"):
self.classes_ = None
X, y = check_X_y(X, y, 'csr', dtype=np.float64, order="C")
n_samples, n_features = X.shape
# labels can be encoded as float, int, or string literals
# np.unique sorts in asc order; largest class id is positive class
classes = np.unique(y)
if self.warm_start and self.coef_ is not None:
if coef_init is None:
coef_init = self.coef_
if intercept_init is None:
intercept_init = self.intercept_
else:
self.coef_ = None
self.intercept_ = None
if self.average > 0:
self.standard_coef_ = self.coef_
self.standard_intercept_ = self.intercept_
self.average_coef_ = None
self.average_intercept_ = None
# Clear iteration count for multiple call to fit.
self.t_ = None
self._partial_fit(X, y, alpha, C, loss, learning_rate, self.n_iter,
classes, sample_weight, coef_init, intercept_init)
return self
def _fit_binary(self, X, y, alpha, C, sample_weight,
learning_rate, n_iter):
"""Fit a binary classifier on X and y. """
coef, intercept = fit_binary(self, 1, X, y, alpha, C,
learning_rate, n_iter,
self._expanded_class_weight[1],
self._expanded_class_weight[0],
sample_weight)
self.t_ += n_iter * X.shape[0]
# need to be 2d
if self.average > 0:
if self.average <= self.t_ - 1:
self.coef_ = self.average_coef_.reshape(1, -1)
self.intercept_ = self.average_intercept_
else:
self.coef_ = self.standard_coef_.reshape(1, -1)
self.standard_intercept_ = np.atleast_1d(intercept)
self.intercept_ = self.standard_intercept_
else:
self.coef_ = coef.reshape(1, -1)
# intercept is a float, need to convert it to an array of length 1
self.intercept_ = np.atleast_1d(intercept)
def _fit_multiclass(self, X, y, alpha, C, learning_rate,
sample_weight, n_iter):
"""Fit a multi-class classifier by combining binary classifiers
Each binary classifier predicts one class versus all others. This
strategy is called OVA: One Versus All.
"""
# Use joblib to fit OvA in parallel.
result = Parallel(n_jobs=self.n_jobs, backend="threading",
verbose=self.verbose)(
delayed(fit_binary)(self, i, X, y, alpha, C, learning_rate,
n_iter, self._expanded_class_weight[i], 1.,
sample_weight)
for i in range(len(self.classes_)))
for i, (_, intercept) in enumerate(result):
self.intercept_[i] = intercept
self.t_ += n_iter * X.shape[0]
if self.average > 0:
if self.average <= self.t_ - 1.0:
self.coef_ = self.average_coef_
self.intercept_ = self.average_intercept_
else:
self.coef_ = self.standard_coef_
self.standard_intercept_ = np.atleast_1d(intercept)
self.intercept_ = self.standard_intercept_
def partial_fit(self, X, y, classes=None, sample_weight=None):
"""Fit linear model with Stochastic Gradient Descent.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Subset of the training data
y : numpy array, shape (n_samples,)
Subset of the target values
classes : array, shape (n_classes,)
Classes across all calls to partial_fit.
Can be obtained by via `np.unique(y_all)`, where y_all is the
target vector of the entire dataset.
This argument is required for the first call to partial_fit
and can be omitted in the subsequent calls.
Note that y doesn't need to contain all labels in `classes`.
sample_weight : array-like, shape (n_samples,), optional
Weights applied to individual samples.
If not provided, uniform weights are assumed.
Returns
-------
self : returns an instance of self.
"""
if self.class_weight in ['balanced', 'auto']:
raise ValueError("class_weight '{0}' is not supported for "
"partial_fit. In order to use 'balanced' weights, "
"use compute_class_weight('{0}', classes, y). "
"In place of y you can us a large enough sample "
"of the full training set target to properly "
"estimate the class frequency distributions. "
"Pass the resulting weights as the class_weight "
"parameter.".format(self.class_weight))
return self._partial_fit(X, y, alpha=self.alpha, C=1.0, loss=self.loss,
learning_rate=self.learning_rate, n_iter=1,
classes=classes, sample_weight=sample_weight,
coef_init=None, intercept_init=None)
def fit(self, X, y, coef_init=None, intercept_init=None, sample_weight=None):
"""Fit linear model with Stochastic Gradient Descent.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training data
y : numpy array, shape (n_samples,)
Target values
coef_init : array, shape (n_classes, n_features)
The initial coefficients to warm-start the optimization.
intercept_init : array, shape (n_classes,)
The initial intercept to warm-start the optimization.
sample_weight : array-like, shape (n_samples,), optional
Weights applied to individual samples.
If not provided, uniform weights are assumed. These weights will
be multiplied with class_weight (passed through the
contructor) if class_weight is specified
Returns
-------
self : returns an instance of self.
"""
return self._fit(X, y, alpha=self.alpha, C=1.0,
loss=self.loss, learning_rate=self.learning_rate,
coef_init=coef_init, intercept_init=intercept_init,
sample_weight=sample_weight)
class SGDClassifier(BaseSGDClassifier, _LearntSelectorMixin):
"""Linear classifiers (SVM, logistic regression, a.o.) with SGD training.
This estimator implements regularized linear models with stochastic
gradient descent (SGD) learning: the gradient of the loss is estimated
each sample at a time and the model is updated along the way with a
decreasing strength schedule (aka learning rate). SGD allows minibatch
(online/out-of-core) learning, see the partial_fit method.
For best results using the default learning rate schedule, the data should
have zero mean and unit variance.
This implementation works with data represented as dense or sparse arrays
of floating point values for the features. The model it fits can be
controlled with the loss parameter; by default, it fits a linear support
vector machine (SVM).
The regularizer is a penalty added to the loss function that shrinks model
parameters towards the zero vector using either the squared euclidean norm
L2 or the absolute norm L1 or a combination of both (Elastic Net). If the
parameter update crosses the 0.0 value because of the regularizer, the
update is truncated to 0.0 to allow for learning sparse models and achieve
online feature selection.
Read more in the :ref:`User Guide <sgd>`.
Parameters
----------
loss : str, 'hinge', 'log', 'modified_huber', 'squared_hinge',\
'perceptron', or a regression loss: 'squared_loss', 'huber',\
'epsilon_insensitive', or 'squared_epsilon_insensitive'
The loss function to be used. Defaults to 'hinge', which gives a
linear SVM.
The 'log' loss gives logistic regression, a probabilistic classifier.
'modified_huber' is another smooth loss that brings tolerance to
outliers as well as probability estimates.
'squared_hinge' is like hinge but is quadratically penalized.
'perceptron' is the linear loss used by the perceptron algorithm.
The other losses are designed for regression but can be useful in
classification as well; see SGDRegressor for a description.
penalty : str, 'none', 'l2', 'l1', or 'elasticnet'
The penalty (aka regularization term) to be used. Defaults to 'l2'
which is the standard regularizer for linear SVM models. 'l1' and
'elasticnet' might bring sparsity to the model (feature selection)
not achievable with 'l2'.
alpha : float
Constant that multiplies the regularization term. Defaults to 0.0001
l1_ratio : float
The Elastic Net mixing parameter, with 0 <= l1_ratio <= 1.
l1_ratio=0 corresponds to L2 penalty, l1_ratio=1 to L1.
Defaults to 0.15.
fit_intercept : bool
Whether the intercept should be estimated or not. If False, the
data is assumed to be already centered. Defaults to True.
n_iter : int, optional
The number of passes over the training data (aka epochs). The number
of iterations is set to 1 if using partial_fit.
Defaults to 5.
shuffle : bool, optional
Whether or not the training data should be shuffled after each epoch.
Defaults to True.
random_state : int seed, RandomState instance, or None (default)
The seed of the pseudo random number generator to use when
shuffling the data.
verbose : integer, optional
The verbosity level
epsilon : float
Epsilon in the epsilon-insensitive loss functions; only if `loss` is
'huber', 'epsilon_insensitive', or 'squared_epsilon_insensitive'.
For 'huber', determines the threshold at which it becomes less
important to get the prediction exactly right.
For epsilon-insensitive, any differences between the current prediction
and the correct label are ignored if they are less than this threshold.
n_jobs : integer, optional
The number of CPUs to use to do the OVA (One Versus All, for
multi-class problems) computation. -1 means 'all CPUs'. Defaults
to 1.
learning_rate : string, optional
The learning rate schedule:
constant: eta = eta0
optimal: eta = 1.0 / (t + t0) [default]
invscaling: eta = eta0 / pow(t, power_t)
where t0 is chosen by a heuristic proposed by Leon Bottou.
eta0 : double
The initial learning rate for the 'constant' or 'invscaling'
schedules. The default value is 0.0 as eta0 is not used by the
default schedule 'optimal'.
power_t : double
The exponent for inverse scaling learning rate [default 0.5].
class_weight : dict, {class_label: weight} or "balanced" or None, optional
Preset for the class_weight fit parameter.
Weights associated with classes. If not given, all classes
are supposed to have weight one.
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
warm_start : bool, optional
When set to True, reuse the solution of the previous call to fit as
initialization, otherwise, just erase the previous solution.
average : bool or int, optional
When set to True, computes the averaged SGD weights and stores the
result in the ``coef_`` attribute. If set to an int greater than 1,
averaging will begin once the total number of samples seen reaches
average. So average=10 will begin averaging after seeing 10 samples.
Attributes
----------
coef_ : array, shape (1, n_features) if n_classes == 2 else (n_classes,\
n_features)
Weights assigned to the features.
intercept_ : array, shape (1,) if n_classes == 2 else (n_classes,)
Constants in decision function.
Examples
--------
>>> import numpy as np
>>> from sklearn import linear_model
>>> X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]])
>>> Y = np.array([1, 1, 2, 2])
>>> clf = linear_model.SGDClassifier()
>>> clf.fit(X, Y)
... #doctest: +NORMALIZE_WHITESPACE
SGDClassifier(alpha=0.0001, average=False, class_weight=None, epsilon=0.1,
eta0=0.0, fit_intercept=True, l1_ratio=0.15,
learning_rate='optimal', loss='hinge', n_iter=5, n_jobs=1,
penalty='l2', power_t=0.5, random_state=None, shuffle=True,
verbose=0, warm_start=False)
>>> print(clf.predict([[-0.8, -1]]))
[1]
See also
--------
LinearSVC, LogisticRegression, Perceptron
"""
def __init__(self, loss="hinge", penalty='l2', alpha=0.0001, l1_ratio=0.15,
fit_intercept=True, n_iter=5, shuffle=True, verbose=0,
epsilon=DEFAULT_EPSILON, n_jobs=1, random_state=None,
learning_rate="optimal", eta0=0.0, power_t=0.5,
class_weight=None, warm_start=False, average=False):
super(SGDClassifier, self).__init__(
loss=loss, penalty=penalty, alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=fit_intercept, n_iter=n_iter, shuffle=shuffle,
verbose=verbose, epsilon=epsilon, n_jobs=n_jobs,
random_state=random_state, learning_rate=learning_rate, eta0=eta0,
power_t=power_t, class_weight=class_weight, warm_start=warm_start,
average=average)
def _check_proba(self):
check_is_fitted(self, "t_")
if self.loss not in ("log", "modified_huber"):
raise AttributeError("probability estimates are not available for"
" loss=%r" % self.loss)
@property
def predict_proba(self):
"""Probability estimates.
This method is only available for log loss and modified Huber loss.
Multiclass probability estimates are derived from binary (one-vs.-rest)
estimates by simple normalization, as recommended by Zadrozny and
Elkan.
Binary probability estimates for loss="modified_huber" are given by
(clip(decision_function(X), -1, 1) + 1) / 2.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Returns
-------
array, shape (n_samples, n_classes)
Returns the probability of the sample for each class in the model,
where classes are ordered as they are in `self.classes_`.
References
----------
Zadrozny and Elkan, "Transforming classifier scores into multiclass
probability estimates", SIGKDD'02,
http://www.research.ibm.com/people/z/zadrozny/kdd2002-Transf.pdf
The justification for the formula in the loss="modified_huber"
case is in the appendix B in:
http://jmlr.csail.mit.edu/papers/volume2/zhang02c/zhang02c.pdf
"""
self._check_proba()
return self._predict_proba
def _predict_proba(self, X):
if self.loss == "log":
return self._predict_proba_lr(X)
elif self.loss == "modified_huber":
binary = (len(self.classes_) == 2)
scores = self.decision_function(X)
if binary:
prob2 = np.ones((scores.shape[0], 2))
prob = prob2[:, 1]
else:
prob = scores
np.clip(scores, -1, 1, prob)
prob += 1.
prob /= 2.
if binary:
prob2[:, 0] -= prob
prob = prob2
else:
# the above might assign zero to all classes, which doesn't
# normalize neatly; work around this to produce uniform
# probabilities
prob_sum = prob.sum(axis=1)
all_zero = (prob_sum == 0)
if np.any(all_zero):
prob[all_zero, :] = 1
prob_sum[all_zero] = len(self.classes_)
# normalize
prob /= prob_sum.reshape((prob.shape[0], -1))
return prob
else:
raise NotImplementedError("predict_(log_)proba only supported when"
" loss='log' or loss='modified_huber' "
"(%r given)" % self.loss)
@property
def predict_log_proba(self):
"""Log of probability estimates.
This method is only available for log loss and modified Huber loss.
When loss="modified_huber", probability estimates may be hard zeros
and ones, so taking the logarithm is not possible.
See ``predict_proba`` for details.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Returns
-------
T : array-like, shape (n_samples, n_classes)
Returns the log-probability of the sample for each class in the
model, where classes are ordered as they are in
`self.classes_`.
"""
self._check_proba()
return self._predict_log_proba
def _predict_log_proba(self, X):
return np.log(self.predict_proba(X))
class BaseSGDRegressor(BaseSGD, RegressorMixin):
loss_functions = {
"squared_loss": (SquaredLoss, ),
"huber": (Huber, DEFAULT_EPSILON),
"epsilon_insensitive": (EpsilonInsensitive, DEFAULT_EPSILON),
"squared_epsilon_insensitive": (SquaredEpsilonInsensitive,
DEFAULT_EPSILON),
}
@abstractmethod
def __init__(self, loss="squared_loss", penalty="l2", alpha=0.0001,
l1_ratio=0.15, fit_intercept=True, n_iter=5, shuffle=True,
verbose=0, epsilon=DEFAULT_EPSILON, random_state=None,
learning_rate="invscaling", eta0=0.01, power_t=0.25,
warm_start=False, average=False):
super(BaseSGDRegressor, self).__init__(loss=loss, penalty=penalty,
alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=fit_intercept,
n_iter=n_iter, shuffle=shuffle,
verbose=verbose,
epsilon=epsilon,
random_state=random_state,
learning_rate=learning_rate,
eta0=eta0, power_t=power_t,
warm_start=warm_start,
average=average)
def _partial_fit(self, X, y, alpha, C, loss, learning_rate,
n_iter, sample_weight,
coef_init, intercept_init):
X, y = check_X_y(X, y, "csr", copy=False, order='C', dtype=np.float64)
y = astype(y, np.float64, copy=False)
n_samples, n_features = X.shape
self._validate_params()
# Allocate datastructures from input arguments
sample_weight = self._validate_sample_weight(sample_weight, n_samples)
if self.coef_ is None:
self._allocate_parameter_mem(1, n_features,
coef_init, intercept_init)
elif n_features != self.coef_.shape[-1]:
raise ValueError("Number of features %d does not match previous data %d."
% (n_features, self.coef_.shape[-1]))
if self.average > 0 and self.average_coef_ is None:
self.average_coef_ = np.zeros(n_features,
dtype=np.float64,
order="C")
self.average_intercept_ = np.zeros(1,
dtype=np.float64,
order="C")
self._fit_regressor(X, y, alpha, C, loss, learning_rate,
sample_weight, n_iter)
return self
def partial_fit(self, X, y, sample_weight=None):
"""Fit linear model with Stochastic Gradient Descent.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Subset of training data
y : numpy array of shape (n_samples,)
Subset of target values
sample_weight : array-like, shape (n_samples,), optional
Weights applied to individual samples.
If not provided, uniform weights are assumed.
Returns
-------
self : returns an instance of self.
"""
return self._partial_fit(X, y, self.alpha, C=1.0,
loss=self.loss,
learning_rate=self.learning_rate, n_iter=1,
sample_weight=sample_weight,
coef_init=None, intercept_init=None)
def _fit(self, X, y, alpha, C, loss, learning_rate, coef_init=None,
intercept_init=None, sample_weight=None):
if self.warm_start and self.coef_ is not None:
if coef_init is None:
coef_init = self.coef_
if intercept_init is None:
intercept_init = self.intercept_
else:
self.coef_ = None
self.intercept_ = None
if self.average > 0:
self.standard_intercept_ = self.intercept_
self.standard_coef_ = self.coef_
self.average_coef_ = None
self.average_intercept_ = None
# Clear iteration count for multiple call to fit.
self.t_ = None
return self._partial_fit(X, y, alpha, C, loss, learning_rate,
self.n_iter, sample_weight,
coef_init, intercept_init)
def fit(self, X, y, coef_init=None, intercept_init=None,
sample_weight=None):
"""Fit linear model with Stochastic Gradient Descent.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training data
y : numpy array, shape (n_samples,)
Target values
coef_init : array, shape (n_features,)
The initial coefficients to warm-start the optimization.
intercept_init : array, shape (1,)
The initial intercept to warm-start the optimization.
sample_weight : array-like, shape (n_samples,), optional
Weights applied to individual samples (1. for unweighted).
Returns
-------
self : returns an instance of self.
"""
return self._fit(X, y, alpha=self.alpha, C=1.0,
loss=self.loss, learning_rate=self.learning_rate,
coef_init=coef_init,
intercept_init=intercept_init,
sample_weight=sample_weight)
@deprecated(" and will be removed in 0.19.")
def decision_function(self, X):
"""Predict using the linear model
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Returns
-------
array, shape (n_samples,)
Predicted target values per element in X.
"""
return self._decision_function(X)
def _decision_function(self, X):
"""Predict using the linear model
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Returns
-------
array, shape (n_samples,)
Predicted target values per element in X.
"""
check_is_fitted(self, ["t_", "coef_", "intercept_"], all_or_any=all)
X = check_array(X, accept_sparse='csr')
scores = safe_sparse_dot(X, self.coef_.T,
dense_output=True) + self.intercept_
return scores.ravel()
def predict(self, X):
"""Predict using the linear model
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Returns
-------
array, shape (n_samples,)
Predicted target values per element in X.
"""
return self._decision_function(X)
def _fit_regressor(self, X, y, alpha, C, loss, learning_rate,
sample_weight, n_iter):
dataset, intercept_decay = _make_dataset(X, y, sample_weight)
loss_function = self._get_loss_function(loss)
penalty_type = self._get_penalty_type(self.penalty)
learning_rate_type = self._get_learning_rate_type(learning_rate)
if self.t_ is None:
self.t_ = 1.0
random_state = check_random_state(self.random_state)
# numpy mtrand expects a C long which is a signed 32 bit integer under
# Windows
seed = random_state.randint(0, np.iinfo(np.int32).max)
if self.average > 0:
self.standard_coef_, self.standard_intercept_, \
self.average_coef_, self.average_intercept_ =\
average_sgd(self.standard_coef_,
self.standard_intercept_[0],
self.average_coef_,
self.average_intercept_[0],
loss_function,
penalty_type,
alpha, C,
self.l1_ratio,
dataset,
n_iter,
int(self.fit_intercept),
int(self.verbose),
int(self.shuffle),
seed,
1.0, 1.0,
learning_rate_type,
self.eta0, self.power_t, self.t_,
intercept_decay, self.average)
self.average_intercept_ = np.atleast_1d(self.average_intercept_)
self.standard_intercept_ = np.atleast_1d(self.standard_intercept_)
self.t_ += n_iter * X.shape[0]
if self.average <= self.t_ - 1.0:
self.coef_ = self.average_coef_
self.intercept_ = self.average_intercept_
else:
self.coef_ = self.standard_coef_
self.intercept_ = self.standard_intercept_
else:
self.coef_, self.intercept_ = \
plain_sgd(self.coef_,
self.intercept_[0],
loss_function,
penalty_type,
alpha, C,
self.l1_ratio,
dataset,
n_iter,
int(self.fit_intercept),
int(self.verbose),
int(self.shuffle),
seed,
1.0, 1.0,
learning_rate_type,
self.eta0, self.power_t, self.t_,
intercept_decay)
self.t_ += n_iter * X.shape[0]
self.intercept_ = np.atleast_1d(self.intercept_)
class SGDRegressor(BaseSGDRegressor, _LearntSelectorMixin):
"""Linear model fitted by minimizing a regularized empirical loss with SGD
SGD stands for Stochastic Gradient Descent: the gradient of the loss is
estimated each sample at a time and the model is updated along the way with
a decreasing strength schedule (aka learning rate).
The regularizer is a penalty added to the loss function that shrinks model
parameters towards the zero vector using either the squared euclidean norm
L2 or the absolute norm L1 or a combination of both (Elastic Net). If the
parameter update crosses the 0.0 value because of the regularizer, the
update is truncated to 0.0 to allow for learning sparse models and achieve
online feature selection.
This implementation works with data represented as dense numpy arrays of
floating point values for the features.
Read more in the :ref:`User Guide <sgd>`.
Parameters
----------
loss : str, 'squared_loss', 'huber', 'epsilon_insensitive', \
or 'squared_epsilon_insensitive'
The loss function to be used. Defaults to 'squared_loss' which refers
to the ordinary least squares fit. 'huber' modifies 'squared_loss' to
focus less on getting outliers correct by switching from squared to
linear loss past a distance of epsilon. 'epsilon_insensitive' ignores
errors less than epsilon and is linear past that; this is the loss
function used in SVR. 'squared_epsilon_insensitive' is the same but
becomes squared loss past a tolerance of epsilon.
penalty : str, 'none', 'l2', 'l1', or 'elasticnet'
The penalty (aka regularization term) to be used. Defaults to 'l2'
which is the standard regularizer for linear SVM models. 'l1' and
'elasticnet' might bring sparsity to the model (feature selection)
not achievable with 'l2'.
alpha : float
Constant that multiplies the regularization term. Defaults to 0.0001
l1_ratio : float
The Elastic Net mixing parameter, with 0 <= l1_ratio <= 1.
l1_ratio=0 corresponds to L2 penalty, l1_ratio=1 to L1.
Defaults to 0.15.
fit_intercept : bool
Whether the intercept should be estimated or not. If False, the
data is assumed to be already centered. Defaults to True.
n_iter : int, optional
The number of passes over the training data (aka epochs). The number
of iterations is set to 1 if using partial_fit.
Defaults to 5.
shuffle : bool, optional
Whether or not the training data should be shuffled after each epoch.
Defaults to True.
random_state : int seed, RandomState instance, or None (default)
The seed of the pseudo random number generator to use when
shuffling the data.
verbose : integer, optional
The verbosity level.
epsilon : float
Epsilon in the epsilon-insensitive loss functions; only if `loss` is
'huber', 'epsilon_insensitive', or 'squared_epsilon_insensitive'.
For 'huber', determines the threshold at which it becomes less
important to get the prediction exactly right.
For epsilon-insensitive, any differences between the current prediction
and the correct label are ignored if they are less than this threshold.
learning_rate : string, optional
The learning rate:
constant: eta = eta0
optimal: eta = 1.0/(alpha * t)
invscaling: eta = eta0 / pow(t, power_t) [default]
eta0 : double, optional
The initial learning rate [default 0.01].
power_t : double, optional
The exponent for inverse scaling learning rate [default 0.25].
warm_start : bool, optional
When set to True, reuse the solution of the previous call to fit as
initialization, otherwise, just erase the previous solution.
average : bool or int, optional
When set to True, computes the averaged SGD weights and stores the
result in the ``coef_`` attribute. If set to an int greater than 1,
averaging will begin once the total number of samples seen reaches
average. So ``average=10 will`` begin averaging after seeing 10 samples.
Attributes
----------
coef_ : array, shape (n_features,)
Weights assigned to the features.
intercept_ : array, shape (1,)
The intercept term.
average_coef_ : array, shape (n_features,)
Averaged weights assigned to the features.
average_intercept_ : array, shape (1,)
The averaged intercept term.
Examples
--------
>>> import numpy as np
>>> from sklearn import linear_model
>>> n_samples, n_features = 10, 5
>>> np.random.seed(0)
>>> y = np.random.randn(n_samples)
>>> X = np.random.randn(n_samples, n_features)
>>> clf = linear_model.SGDRegressor()
>>> clf.fit(X, y)
... #doctest: +NORMALIZE_WHITESPACE
SGDRegressor(alpha=0.0001, average=False, epsilon=0.1, eta0=0.01,
fit_intercept=True, l1_ratio=0.15, learning_rate='invscaling',
loss='squared_loss', n_iter=5, penalty='l2', power_t=0.25,
random_state=None, shuffle=True, verbose=0, warm_start=False)
See also
--------
Ridge, ElasticNet, Lasso, SVR
"""
def __init__(self, loss="squared_loss", penalty="l2", alpha=0.0001,
l1_ratio=0.15, fit_intercept=True, n_iter=5, shuffle=True,
verbose=0, epsilon=DEFAULT_EPSILON, random_state=None,
learning_rate="invscaling", eta0=0.01, power_t=0.25,
warm_start=False, average=False):
super(SGDRegressor, self).__init__(loss=loss, penalty=penalty,
alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=fit_intercept,
n_iter=n_iter, shuffle=shuffle,
verbose=verbose,
epsilon=epsilon,
random_state=random_state,
learning_rate=learning_rate,
eta0=eta0, power_t=power_t,
warm_start=warm_start,
average=average)
| bsd-3-clause |
negrinho/deep_architect | examples/benchmarks/main.py | 1 | 3186 | # Run configs to make it easier to run the code.
import deep_architect.search_logging as sl
import deep_architect.utils as ut
# Make sure that only one GPU is visible.
if __name__ == '__main__':
cfg = ut.get_config()
if cfg['use_gpu']:
import deep_architect.contrib.misc.gpu_utils as gpu_utils
gpu_id = gpu_utils.get_available_gpu(0.1, 5.0)
print("Using GPU %d" % gpu_id)
assert gpu_id is not None
gpu_utils.set_visible_gpus([gpu_id])
from deep_architect.contrib.misc.datasets.loaders import load_mnist
from deep_architect.contrib.misc.evaluators.tensorflow.classification import SimpleClassifierEvaluator
from deep_architect.contrib.misc.datasets.dataset import InMemoryDataset
import deep_architect.visualization as vi
import searchers as local_se
import search_spaces as local_ss
def run_searcher(searcher, evaluator, num_samples, get_evaluation_logger):
for idx in range(num_samples):
evaluation_logger = get_evaluation_logger(idx)
(inputs, outputs, hyperp_value_lst,
searcher_eval_token) = searcher.sample()
results = evaluator.eval(inputs, outputs)
evaluation_logger.log_config(hyperp_value_lst, searcher_eval_token)
evaluation_logger.log_results(results)
vi.draw_graph(
outputs,
True,
True,
print_to_screen=False,
out_folderpath=evaluation_logger.get_user_data_folderpath())
searcher.update(results['validation_accuracy'], searcher_eval_token)
def main():
num_classes = 10
(Xtrain, ytrain, Xval, yval, Xtest, ytest) = load_mnist('data/mnist')
train_dataset = InMemoryDataset(Xtrain, ytrain, True)
val_dataset = InMemoryDataset(Xval, yval, False)
test_dataset = InMemoryDataset(Xtest, ytest, False)
evaluator = SimpleClassifierEvaluator(
train_dataset,
val_dataset,
num_classes,
ut.join_paths(['temp', 'benchmarks', cfg['search_name']]),
max_eval_time_in_minutes=cfg['max_eval_time_in_minutes'],
log_output_to_terminal=True,
test_dataset=test_dataset)
for rep_i in range(cfg['num_repetitions']):
for search_space_name in cfg['search_space_name_lst']:
for searcher_name in cfg['searcher_name_lst']:
folderpath = ut.join_paths([
cfg['logs_folderpath'], cfg['search_name'],
search_space_name, searcher_name
])
sl.create_search_folderpath(
folderpath,
'rep%d' % rep_i,
abort_if_exists=True,
delete_if_exists=cfg["delete_if_exists"],
create_parent_folders=True)
search_space_fn = local_ss.name_to_search_space_fn[
search_space_name](num_classes)
searcher = local_se.name_to_get_searcher_fn[searcher_name](
search_space_fn)
run_searcher(
searcher, evaluator, cfg['num_samples'] -
search_logger.get_current_evaluation_id(), search_logger)
if __name__ == '__main__':
main() | mit |
lmcinnes/umap | umap/tests/conftest.py | 1 | 5868 | # ===========================
# Testing (session) Fixture
# ==========================
import pytest
import numpy as np
from scipy import sparse
from sklearn.datasets import load_iris
from umap import UMAP, AlignedUMAP
# Globals, used for all the tests
SEED = 189212 # 0b101110001100011100
np.random.seed(SEED)
# Spatial and Binary Data
# -----------------------
@pytest.fixture(scope="session")
def spatial_data():
# - Spatial Data
spatial_data = np.random.randn(10, 20)
# Add some all zero data for corner case test
return np.vstack([spatial_data, np.zeros((2, 20))])
@pytest.fixture(scope="session")
def binary_data():
binary_data = np.random.choice(a=[False, True], size=(10, 20), p=[0.66, 1 - 0.66])
# Add some all zero data for corner case test
binary_data = np.vstack([binary_data, np.zeros((2, 20), dtype="bool")])
return binary_data
# Sparse Spatial and Binary Data
# ------------------------------
@pytest.fixture(scope="session")
def sparse_spatial_data(spatial_data, binary_data):
return sparse.csr_matrix(spatial_data * binary_data)
@pytest.fixture(scope="session")
def sparse_binary_data(binary_data):
return sparse.csr_matrix(binary_data)
# Nearest Neighbour Data
# -----------------------
@pytest.fixture(scope="session")
def nn_data():
nn_data = np.random.uniform(0, 1, size=(1000, 5))
nn_data = np.vstack(
[nn_data, np.zeros((2, 5))]
) # Add some all zero data for corner case test
return nn_data
@pytest.fixture(scope="session")
def binary_nn_data():
binary_nn_data = np.random.choice(
a=[False, True], size=(1000, 5), p=[0.66, 1 - 0.66]
)
binary_nn_data = np.vstack(
[binary_nn_data, np.zeros((2, 5), dtype="bool")]
) # Add some all zero data for corner case test
return binary_nn_data
@pytest.fixture(scope="session")
def sparse_nn_data():
return sparse.random(1000, 50, density=0.5, format="csr")
# Data With Repetitions
# ---------------------
@pytest.fixture(scope="session")
def repetition_dense():
# Dense data for testing small n
return np.array(
[
[5, 6, 7, 8],
[5, 6, 7, 8],
[5, 6, 7, 8],
[5, 6, 7, 8],
[5, 6, 7, 8],
[5, 6, 7, 8],
[1, 1, 1, 1],
[1, 2, 3, 4],
[1, 1, 2, 1],
]
)
@pytest.fixture(scope="session")
def spatial_repeats(spatial_data):
# spatial data repeats
spatial_repeats = np.vstack(
[np.repeat(spatial_data[0:2], [2, 0], axis=0), spatial_data, np.zeros((2, 20))]
)
# Add some all zero data for corner case test. Make the first three rows identical
# binary Data Repeat
return spatial_repeats
@pytest.fixture(scope="session")
def binary_repeats(binary_data):
binary_repeats = np.vstack(
[
np.repeat(binary_data[0:2], [2, 0], axis=0),
binary_data,
np.zeros((2, 20), dtype="bool"),
]
)
# Add some all zero data for corner case test. Make the first three rows identical
return binary_repeats
@pytest.fixture(scope="session")
def sparse_spatial_data_repeats(spatial_repeats, binary_repeats):
return sparse.csr_matrix(spatial_repeats * binary_repeats)
@pytest.fixture(scope="session")
def sparse_binary_data_repeats(binary_repeats):
return sparse.csr_matrix(binary_repeats)
@pytest.fixture(scope="session")
def sparse_test_data(nn_data, binary_nn_data):
return sparse.csr_matrix(nn_data * binary_nn_data)
@pytest.fixture(scope="session")
def iris():
return load_iris()
@pytest.fixture(scope="session")
def iris_selection():
return np.random.choice([True, False], 150, replace=True, p=[0.75, 0.25])
@pytest.fixture(scope="session")
def aligned_iris(iris):
slices = [iris.data[i : i + 50] for i in range(0, 125, 25)]
target = [iris.target[i : i + 50] for i in range(0, 125, 25)]
return slices, target
@pytest.fixture(scope="session")
def aligned_iris_relations():
return [{a: a + 25 for a in range(25)} for i in range(4)]
@pytest.fixture(scope="session")
def iris_model(iris):
return UMAP(n_neighbors=10, min_dist=0.01, random_state=42).fit(iris.data)
@pytest.fixture(scope="session")
def iris_model_large(iris):
return UMAP(
n_neighbors=10,
min_dist=0.01,
random_state=42,
force_approximation_algorithm=True,
).fit(iris.data)
@pytest.fixture(scope="session")
def iris_subset_model(iris, iris_selection):
return UMAP(n_neighbors=10, min_dist=0.01, random_state=42).fit(
iris.data[iris_selection]
)
@pytest.fixture(scope="session")
def iris_subset_model_large(iris, iris_selection):
return UMAP(
n_neighbors=10,
min_dist=0.01,
random_state=42,
force_approximation_algorithm=True,
).fit(iris.data[iris_selection])
@pytest.fixture(scope="session")
def supervised_iris_model(iris):
return UMAP(n_neighbors=10, min_dist=0.01, n_epochs=200, random_state=42).fit(
iris.data, iris.target
)
@pytest.fixture(scope="session")
def aligned_iris_model(aligned_iris, aligned_iris_relations):
data, target = aligned_iris
model = AlignedUMAP()
model.fit(data, relations=aligned_iris_relations)
return model
# UMAP Distance Metrics
# ---------------------
@pytest.fixture(scope="session")
def spatial_distances():
return (
"euclidean",
"manhattan",
"chebyshev",
"minkowski",
"hamming",
"canberra",
"braycurtis",
"cosine",
"correlation",
)
@pytest.fixture(scope="session")
def binary_distances():
return (
"jaccard",
"matching",
"dice",
"kulsinski",
"rogerstanimoto",
"russellrao",
"sokalmichener",
"sokalsneath",
"yule",
)
| bsd-3-clause |
arabenjamin/scikit-learn | sklearn/ensemble/tests/test_voting_classifier.py | 140 | 6926 | """Testing for the boost module (sklearn.ensemble.boost)."""
import numpy as np
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.linear_model import LogisticRegression
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import VotingClassifier
from sklearn.grid_search import GridSearchCV
from sklearn import datasets
from sklearn import cross_validation
from sklearn.datasets import make_multilabel_classification
from sklearn.svm import SVC
from sklearn.multiclass import OneVsRestClassifier
# Load the iris dataset and randomly permute it
iris = datasets.load_iris()
X, y = iris.data[:, 1:3], iris.target
def test_majority_label_iris():
"""Check classification by majority label on dataset iris."""
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
clf3 = GaussianNB()
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='hard')
scores = cross_validation.cross_val_score(eclf,
X,
y,
cv=5,
scoring='accuracy')
assert_almost_equal(scores.mean(), 0.95, decimal=2)
def test_tie_situation():
"""Check voting classifier selects smaller class label in tie situation."""
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
eclf = VotingClassifier(estimators=[('lr', clf1), ('rf', clf2)],
voting='hard')
assert_equal(clf1.fit(X, y).predict(X)[73], 2)
assert_equal(clf2.fit(X, y).predict(X)[73], 1)
assert_equal(eclf.fit(X, y).predict(X)[73], 1)
def test_weights_iris():
"""Check classification by average probabilities on dataset iris."""
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
clf3 = GaussianNB()
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='soft',
weights=[1, 2, 10])
scores = cross_validation.cross_val_score(eclf,
X,
y,
cv=5,
scoring='accuracy')
assert_almost_equal(scores.mean(), 0.93, decimal=2)
def test_predict_on_toy_problem():
"""Manually check predicted class labels for toy dataset."""
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
clf3 = GaussianNB()
X = np.array([[-1.1, -1.5],
[-1.2, -1.4],
[-3.4, -2.2],
[1.1, 1.2],
[2.1, 1.4],
[3.1, 2.3]])
y = np.array([1, 1, 1, 2, 2, 2])
assert_equal(all(clf1.fit(X, y).predict(X)), all([1, 1, 1, 2, 2, 2]))
assert_equal(all(clf2.fit(X, y).predict(X)), all([1, 1, 1, 2, 2, 2]))
assert_equal(all(clf3.fit(X, y).predict(X)), all([1, 1, 1, 2, 2, 2]))
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='hard',
weights=[1, 1, 1])
assert_equal(all(eclf.fit(X, y).predict(X)), all([1, 1, 1, 2, 2, 2]))
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='soft',
weights=[1, 1, 1])
assert_equal(all(eclf.fit(X, y).predict(X)), all([1, 1, 1, 2, 2, 2]))
def test_predict_proba_on_toy_problem():
"""Calculate predicted probabilities on toy dataset."""
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
clf3 = GaussianNB()
X = np.array([[-1.1, -1.5], [-1.2, -1.4], [-3.4, -2.2], [1.1, 1.2]])
y = np.array([1, 1, 2, 2])
clf1_res = np.array([[0.59790391, 0.40209609],
[0.57622162, 0.42377838],
[0.50728456, 0.49271544],
[0.40241774, 0.59758226]])
clf2_res = np.array([[0.8, 0.2],
[0.8, 0.2],
[0.2, 0.8],
[0.3, 0.7]])
clf3_res = np.array([[0.9985082, 0.0014918],
[0.99845843, 0.00154157],
[0., 1.],
[0., 1.]])
t00 = (2*clf1_res[0][0] + clf2_res[0][0] + clf3_res[0][0]) / 4
t11 = (2*clf1_res[1][1] + clf2_res[1][1] + clf3_res[1][1]) / 4
t21 = (2*clf1_res[2][1] + clf2_res[2][1] + clf3_res[2][1]) / 4
t31 = (2*clf1_res[3][1] + clf2_res[3][1] + clf3_res[3][1]) / 4
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='soft',
weights=[2, 1, 1])
eclf_res = eclf.fit(X, y).predict_proba(X)
assert_almost_equal(t00, eclf_res[0][0], decimal=1)
assert_almost_equal(t11, eclf_res[1][1], decimal=1)
assert_almost_equal(t21, eclf_res[2][1], decimal=1)
assert_almost_equal(t31, eclf_res[3][1], decimal=1)
try:
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='hard')
eclf.fit(X, y).predict_proba(X)
except AttributeError:
pass
else:
raise AssertionError('AttributeError for voting == "hard"'
' and with predict_proba not raised')
def test_multilabel():
"""Check if error is raised for multilabel classification."""
X, y = make_multilabel_classification(n_classes=2, n_labels=1,
allow_unlabeled=False,
random_state=123)
clf = OneVsRestClassifier(SVC(kernel='linear'))
eclf = VotingClassifier(estimators=[('ovr', clf)], voting='hard')
try:
eclf.fit(X, y)
except NotImplementedError:
return
def test_gridsearch():
"""Check GridSearch support."""
clf1 = LogisticRegression(random_state=1)
clf2 = RandomForestClassifier(random_state=1)
clf3 = GaussianNB()
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='soft')
params = {'lr__C': [1.0, 100.0],
'voting': ['soft', 'hard'],
'weights': [[0.5, 0.5, 0.5], [1.0, 0.5, 0.5]]}
grid = GridSearchCV(estimator=eclf, param_grid=params, cv=5)
grid.fit(iris.data, iris.target)
| bsd-3-clause |
arabenjamin/scikit-learn | sklearn/tests/test_learning_curve.py | 224 | 10791 | # Author: Alexander Fabisch <afabisch@informatik.uni-bremen.de>
#
# License: BSD 3 clause
import sys
from sklearn.externals.six.moves import cStringIO as StringIO
import numpy as np
import warnings
from sklearn.base import BaseEstimator
from sklearn.learning_curve import learning_curve, validation_curve
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.datasets import make_classification
from sklearn.cross_validation import KFold
from sklearn.linear_model import PassiveAggressiveClassifier
class MockImprovingEstimator(BaseEstimator):
"""Dummy classifier to test the learning curve"""
def __init__(self, n_max_train_sizes):
self.n_max_train_sizes = n_max_train_sizes
self.train_sizes = 0
self.X_subset = None
def fit(self, X_subset, y_subset=None):
self.X_subset = X_subset
self.train_sizes = X_subset.shape[0]
return self
def predict(self, X):
raise NotImplementedError
def score(self, X=None, Y=None):
# training score becomes worse (2 -> 1), test error better (0 -> 1)
if self._is_training_data(X):
return 2. - float(self.train_sizes) / self.n_max_train_sizes
else:
return float(self.train_sizes) / self.n_max_train_sizes
def _is_training_data(self, X):
return X is self.X_subset
class MockIncrementalImprovingEstimator(MockImprovingEstimator):
"""Dummy classifier that provides partial_fit"""
def __init__(self, n_max_train_sizes):
super(MockIncrementalImprovingEstimator,
self).__init__(n_max_train_sizes)
self.x = None
def _is_training_data(self, X):
return self.x in X
def partial_fit(self, X, y=None, **params):
self.train_sizes += X.shape[0]
self.x = X[0]
class MockEstimatorWithParameter(BaseEstimator):
"""Dummy classifier to test the validation curve"""
def __init__(self, param=0.5):
self.X_subset = None
self.param = param
def fit(self, X_subset, y_subset):
self.X_subset = X_subset
self.train_sizes = X_subset.shape[0]
return self
def predict(self, X):
raise NotImplementedError
def score(self, X=None, y=None):
return self.param if self._is_training_data(X) else 1 - self.param
def _is_training_data(self, X):
return X is self.X_subset
def test_learning_curve():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
with warnings.catch_warnings(record=True) as w:
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=3, train_sizes=np.linspace(0.1, 1.0, 10))
if len(w) > 0:
raise RuntimeError("Unexpected warning: %r" % w[0].message)
assert_equal(train_scores.shape, (10, 3))
assert_equal(test_scores.shape, (10, 3))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_unsupervised():
X, _ = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y=None, cv=3, train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_verbose():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
old_stdout = sys.stdout
sys.stdout = StringIO()
try:
train_sizes, train_scores, test_scores = \
learning_curve(estimator, X, y, cv=3, verbose=1)
finally:
out = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = old_stdout
assert("[learning_curve]" in out)
def test_learning_curve_incremental_learning_not_possible():
X, y = make_classification(n_samples=2, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
# The mockup does not have partial_fit()
estimator = MockImprovingEstimator(1)
assert_raises(ValueError, learning_curve, estimator, X, y,
exploit_incremental_learning=True)
def test_learning_curve_incremental_learning():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockIncrementalImprovingEstimator(20)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=3, exploit_incremental_learning=True,
train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_incremental_learning_unsupervised():
X, _ = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockIncrementalImprovingEstimator(20)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y=None, cv=3, exploit_incremental_learning=True,
train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_batch_and_incremental_learning_are_equal():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
train_sizes = np.linspace(0.2, 1.0, 5)
estimator = PassiveAggressiveClassifier(n_iter=1, shuffle=False)
train_sizes_inc, train_scores_inc, test_scores_inc = \
learning_curve(
estimator, X, y, train_sizes=train_sizes,
cv=3, exploit_incremental_learning=True)
train_sizes_batch, train_scores_batch, test_scores_batch = \
learning_curve(
estimator, X, y, cv=3, train_sizes=train_sizes,
exploit_incremental_learning=False)
assert_array_equal(train_sizes_inc, train_sizes_batch)
assert_array_almost_equal(train_scores_inc.mean(axis=1),
train_scores_batch.mean(axis=1))
assert_array_almost_equal(test_scores_inc.mean(axis=1),
test_scores_batch.mean(axis=1))
def test_learning_curve_n_sample_range_out_of_bounds():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0, 1])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0.0, 1.0])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0.1, 1.1])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0, 20])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[1, 21])
def test_learning_curve_remove_duplicate_sample_sizes():
X, y = make_classification(n_samples=3, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(2)
train_sizes, _, _ = assert_warns(
RuntimeWarning, learning_curve, estimator, X, y, cv=3,
train_sizes=np.linspace(0.33, 1.0, 3))
assert_array_equal(train_sizes, [1, 2])
def test_learning_curve_with_boolean_indices():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
cv = KFold(n=30, n_folds=3)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=cv, train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_validation_curve():
X, y = make_classification(n_samples=2, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
param_range = np.linspace(0, 1, 10)
with warnings.catch_warnings(record=True) as w:
train_scores, test_scores = validation_curve(
MockEstimatorWithParameter(), X, y, param_name="param",
param_range=param_range, cv=2
)
if len(w) > 0:
raise RuntimeError("Unexpected warning: %r" % w[0].message)
assert_array_almost_equal(train_scores.mean(axis=1), param_range)
assert_array_almost_equal(test_scores.mean(axis=1), 1 - param_range)
| bsd-3-clause |
margulies/topography | utils/network_eigenvector_centrality.py | 6 | 2062 | import os, h5py
from time import time
import numpy as np
from scipy import sparse
from sklearn.utils import extmath
"""Import data:
"""
def importData(sub)
f = h5py.File(('/scr/litauen1/%s.hcp.lh.mat' % sub),'r')
data = np.array(f.get('connData'))
cortex = np.array(f.get('cortex')) - 1
return data, cortex
data, cortex = importData('')
print("Computing the principal singular vectors using randomized_svd")
t0 = time()
U, s, V = extmath.randomized_svd(data, 5, n_iter=3)
print("done in %0.3fs" % (time() - t0))
def centrality_scores(X, alpha=0.85, max_iter=100, tol=1e-10):
"""Power iteration computation of the principal eigenvector
This method is also known as Google PageRank and the implementation
is based on the one from the NetworkX project (BSD licensed too)
with copyrights by:
Aric Hagberg <hagberg@lanl.gov>
Dan Schult <dschult@colgate.edu>
Pieter Swart <swart@lanl.gov>
"""
n = X.shape[0]
X = X.copy()
incoming_counts = np.asarray(X.sum(axis=1)).ravel()
print("Normalizing the graph")
for i in incoming_counts.nonzero()[0]:
X.data[X.indptr[i]:X.indptr[i + 1]] *= 1.0 / incoming_counts[i]
dangle = np.asarray(np.where(X.sum(axis=1) == 0, 1.0 / n, 0)).ravel()
scores = np.ones(n, dtype=np.float32) / n # initial guess
for i in range(max_iter):
print("power iteration #%d" % i)
prev_scores = scores
scores = (alpha * (scores * X + np.dot(dangle, prev_scores))
+ (1 - alpha) * prev_scores.sum() / n)
# check convergence: normalized l_inf norm
scores_max = np.abs(scores).max()
if scores_max == 0.0:
scores_max = 1.0
err = np.abs(scores - prev_scores).max() / scores_max
print("error: %0.6f" % err)
if err < n * tol:
return scores
return scores
print("Computing principal eigenvector score using a power iteration method")
t0 = time()
scores = centrality_scores(X, max_iter=100, tol=1e-10)
print("done in %0.3fs" % (time() - t0)) | mit |
margulies/topography | utils/eigenvector_centrality.py | 6 | 2062 | import os, h5py
from time import time
import numpy as np
from scipy import sparse
from sklearn.utils import extmath
"""Import data:
"""
def importData(sub)
f = h5py.File(('/scr/litauen1/%s.hcp.lh.mat' % sub),'r')
data = np.array(f.get('connData'))
cortex = np.array(f.get('cortex')) - 1
return data, cortex
data, cortex = importData('')
print("Computing the principal singular vectors using randomized_svd")
t0 = time()
U, s, V = extmath.randomized_svd(data, 5, n_iter=3)
print("done in %0.3fs" % (time() - t0))
def centrality_scores(X, alpha=0.85, max_iter=100, tol=1e-10):
"""Power iteration computation of the principal eigenvector
This method is also known as Google PageRank and the implementation
is based on the one from the NetworkX project (BSD licensed too)
with copyrights by:
Aric Hagberg <hagberg@lanl.gov>
Dan Schult <dschult@colgate.edu>
Pieter Swart <swart@lanl.gov>
"""
n = X.shape[0]
X = X.copy()
incoming_counts = np.asarray(X.sum(axis=1)).ravel()
print("Normalizing the graph")
for i in incoming_counts.nonzero()[0]:
X.data[X.indptr[i]:X.indptr[i + 1]] *= 1.0 / incoming_counts[i]
dangle = np.asarray(np.where(X.sum(axis=1) == 0, 1.0 / n, 0)).ravel()
scores = np.ones(n, dtype=np.float32) / n # initial guess
for i in range(max_iter):
print("power iteration #%d" % i)
prev_scores = scores
scores = (alpha * (scores * X + np.dot(dangle, prev_scores))
+ (1 - alpha) * prev_scores.sum() / n)
# check convergence: normalized l_inf norm
scores_max = np.abs(scores).max()
if scores_max == 0.0:
scores_max = 1.0
err = np.abs(scores - prev_scores).max() / scores_max
print("error: %0.6f" % err)
if err < n * tol:
return scores
return scores
print("Computing principal eigenvector score using a power iteration method")
t0 = time()
scores = centrality_scores(X, max_iter=100, tol=1e-10)
print("done in %0.3fs" % (time() - t0)) | mit |
ChadFulton/statsmodels | statsmodels/graphics/tests/test_tsaplots.py | 1 | 6606 | from statsmodels.compat.python import lmap, BytesIO
from distutils.version import LooseVersion
import numpy as np
import pandas as pd
from numpy.testing import assert_equal, assert_
import pytest
import statsmodels.api as sm
from statsmodels.graphics.tsaplots import (plot_acf, plot_pacf, month_plot,
quarter_plot, seasonal_plot)
import statsmodels.tsa.arima_process as tsp
try:
import matplotlib.pyplot as plt
except ImportError:
pass
pandas_lt_0_19_2 = LooseVersion(pd.__version__) < '0.19.1'
@pytest.mark.matplotlib
def test_plot_acf(close_figures):
# Just test that it runs.
fig = plt.figure()
ax = fig.add_subplot(111)
ar = np.r_[1., -0.9]
ma = np.r_[1., 0.9]
armaprocess = tsp.ArmaProcess(ar, ma)
rs = np.random.RandomState(1234)
acf = armaprocess.generate_sample(100, distrvs=rs.standard_normal)
plot_acf(acf, ax=ax, lags=10)
plot_acf(acf, ax=ax)
plot_acf(acf, ax=ax, alpha=None)
@pytest.mark.matplotlib
def test_plot_acf_irregular(close_figures):
# Just test that it runs.
fig = plt.figure()
ax = fig.add_subplot(111)
ar = np.r_[1., -0.9]
ma = np.r_[1., 0.9]
armaprocess = tsp.ArmaProcess(ar, ma)
rs = np.random.RandomState(1234)
acf = armaprocess.generate_sample(100, distrvs=rs.standard_normal)
plot_acf(acf, ax=ax, lags=np.arange(1, 11))
plot_acf(acf, ax=ax, lags=10, zero=False)
plot_acf(acf, ax=ax, alpha=None, zero=False)
@pytest.mark.matplotlib
def test_plot_pacf(close_figures):
# Just test that it runs.
fig = plt.figure()
ax = fig.add_subplot(111)
ar = np.r_[1., -0.9]
ma = np.r_[1., 0.9]
armaprocess = tsp.ArmaProcess(ar, ma)
rs = np.random.RandomState(1234)
pacf = armaprocess.generate_sample(100, distrvs=rs.standard_normal)
plot_pacf(pacf, ax=ax)
plot_pacf(pacf, ax=ax, alpha=None)
@pytest.mark.matplotlib
def test_plot_pacf_kwargs(close_figures):
# Just test that it runs.
fig = plt.figure()
ax = fig.add_subplot(111)
ar = np.r_[1., -0.9]
ma = np.r_[1., 0.9]
armaprocess = tsp.ArmaProcess(ar, ma)
rs = np.random.RandomState(1234)
pacf = armaprocess.generate_sample(100, distrvs=rs.standard_normal)
buff = BytesIO()
plot_pacf(pacf, ax=ax)
fig.savefig(buff, format='rgba')
buff_linestyle = BytesIO()
fig_linestyle = plt.figure()
ax = fig_linestyle.add_subplot(111)
plot_pacf(pacf, ax=ax, ls='-')
fig_linestyle.savefig(buff_linestyle, format='rgba')
buff_with_vlines = BytesIO()
fig_with_vlines = plt.figure()
ax = fig_with_vlines.add_subplot(111)
vlines_kwargs = {'linestyles': 'dashdot'}
plot_pacf(pacf, ax=ax, vlines_kwargs=vlines_kwargs)
fig_with_vlines.savefig(buff_with_vlines, format='rgba')
buff.seek(0)
buff_linestyle.seek(0)
buff_with_vlines.seek(0)
plain = buff.read()
linestyle = buff_linestyle.read()
with_vlines = buff_with_vlines.read()
assert_(plain != linestyle)
assert_(with_vlines != plain)
assert_(linestyle != with_vlines)
@pytest.mark.matplotlib
def test_plot_acf_kwargs(close_figures):
# Just test that it runs.
fig = plt.figure()
ax = fig.add_subplot(111)
ar = np.r_[1., -0.9]
ma = np.r_[1., 0.9]
armaprocess = tsp.ArmaProcess(ar, ma)
rs = np.random.RandomState(1234)
acf = armaprocess.generate_sample(100, distrvs=rs.standard_normal)
buff = BytesIO()
plot_acf(acf, ax=ax)
fig.savefig(buff, format='rgba')
buff_with_vlines = BytesIO()
fig_with_vlines = plt.figure()
ax = fig_with_vlines.add_subplot(111)
vlines_kwargs = {'linestyles': 'dashdot'}
plot_acf(acf, ax=ax, vlines_kwargs=vlines_kwargs)
fig_with_vlines.savefig(buff_with_vlines, format='rgba')
buff.seek(0)
buff_with_vlines.seek(0)
plain = buff.read()
with_vlines = buff_with_vlines.read()
assert_(with_vlines != plain)
@pytest.mark.matplotlib
def test_plot_pacf_irregular(close_figures):
# Just test that it runs.
fig = plt.figure()
ax = fig.add_subplot(111)
ar = np.r_[1., -0.9]
ma = np.r_[1., 0.9]
armaprocess = tsp.ArmaProcess(ar, ma)
rs = np.random.RandomState(1234)
pacf = armaprocess.generate_sample(100, distrvs=rs.standard_normal)
plot_pacf(pacf, ax=ax, lags=np.arange(1, 11))
plot_pacf(pacf, ax=ax, lags=10, zero=False)
plot_pacf(pacf, ax=ax, alpha=None, zero=False)
@pytest.mark.skipif(pandas_lt_0_19_2, reason='pandas too old')
@pytest.mark.matplotlib
def test_plot_month(close_figures):
dta = sm.datasets.elnino.load_pandas().data
dta['YEAR'] = dta.YEAR.astype(int).apply(str)
dta = dta.set_index('YEAR').T.unstack()
dates = pd.to_datetime(['-'.join([x[1], x[0]]) for x in dta.index.values])
# test dates argument
fig = month_plot(dta.values, dates=dates, ylabel='el nino')
# test with a TimeSeries DatetimeIndex with no freq
dta.index = pd.DatetimeIndex(dates)
fig = month_plot(dta)
# w freq
dta.index = pd.DatetimeIndex(dates, freq='MS')
fig = month_plot(dta)
# test with a TimeSeries PeriodIndex
dta.index = pd.PeriodIndex(dates, freq='M')
fig = month_plot(dta)
@pytest.mark.skipif(pandas_lt_0_19_2, reason='pandas too old')
@pytest.mark.matplotlib
def test_plot_quarter(close_figures):
dta = sm.datasets.macrodata.load_pandas().data
dates = lmap('Q'.join, zip(dta.year.astype(int).apply(str),
dta.quarter.astype(int).apply(str)))
# test dates argument
quarter_plot(dta.unemp.values, dates)
# test with a DatetimeIndex with no freq
dta.set_index(pd.to_datetime(dates), inplace=True)
quarter_plot(dta.unemp)
# w freq
# see pandas #6631
dta.index = pd.DatetimeIndex(pd.to_datetime(dates), freq='QS-Oct')
quarter_plot(dta.unemp)
# w PeriodIndex
dta.index = pd.PeriodIndex(pd.to_datetime(dates), freq='Q')
quarter_plot(dta.unemp)
@pytest.mark.matplotlib
def test_seasonal_plot(close_figures):
rs = np.random.RandomState(1234)
data = rs.randn(20,12)
data += 6*np.sin(np.arange(12.0)/11*np.pi)[None,:]
data = data.ravel()
months = np.tile(np.arange(1,13),(20,1))
months = months.ravel()
df = pd.DataFrame([data,months],index=['data','months']).T
grouped = df.groupby('months')['data']
labels = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']
fig = seasonal_plot(grouped, labels)
ax = fig.get_axes()[0]
output = [tl.get_text() for tl in ax.get_xticklabels()]
assert_equal(labels, output)
| bsd-3-clause |
heli522/scikit-learn | examples/manifold/plot_manifold_sphere.py | 257 | 5101 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
=============================================
Manifold Learning methods on a severed sphere
=============================================
An application of the different :ref:`manifold` techniques
on a spherical data-set. Here one can see the use of
dimensionality reduction in order to gain some intuition
regarding the manifold learning methods. Regarding the dataset,
the poles are cut from the sphere, as well as a thin slice down its
side. This enables the manifold learning techniques to
'spread it open' whilst projecting it onto two dimensions.
For a similar example, where the methods are applied to the
S-curve dataset, see :ref:`example_manifold_plot_compare_methods.py`
Note that the purpose of the :ref:`MDS <multidimensional_scaling>` is
to find a low-dimensional representation of the data (here 2D) in
which the distances respect well the distances in the original
high-dimensional space, unlike other manifold-learning algorithms,
it does not seeks an isotropic representation of the data in
the low-dimensional space. Here the manifold problem matches fairly
that of representing a flat map of the Earth, as with
`map projection <http://en.wikipedia.org/wiki/Map_projection>`_
"""
# Author: Jaques Grobler <jaques.grobler@inria.fr>
# License: BSD 3 clause
print(__doc__)
from time import time
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import NullFormatter
from sklearn import manifold
from sklearn.utils import check_random_state
# Next line to silence pyflakes.
Axes3D
# Variables for manifold learning.
n_neighbors = 10
n_samples = 1000
# Create our sphere.
random_state = check_random_state(0)
p = random_state.rand(n_samples) * (2 * np.pi - 0.55)
t = random_state.rand(n_samples) * np.pi
# Sever the poles from the sphere.
indices = ((t < (np.pi - (np.pi / 8))) & (t > ((np.pi / 8))))
colors = p[indices]
x, y, z = np.sin(t[indices]) * np.cos(p[indices]), \
np.sin(t[indices]) * np.sin(p[indices]), \
np.cos(t[indices])
# Plot our dataset.
fig = plt.figure(figsize=(15, 8))
plt.suptitle("Manifold Learning with %i points, %i neighbors"
% (1000, n_neighbors), fontsize=14)
ax = fig.add_subplot(251, projection='3d')
ax.scatter(x, y, z, c=p[indices], cmap=plt.cm.rainbow)
try:
# compatibility matplotlib < 1.0
ax.view_init(40, -10)
except:
pass
sphere_data = np.array([x, y, z]).T
# Perform Locally Linear Embedding Manifold learning
methods = ['standard', 'ltsa', 'hessian', 'modified']
labels = ['LLE', 'LTSA', 'Hessian LLE', 'Modified LLE']
for i, method in enumerate(methods):
t0 = time()
trans_data = manifold\
.LocallyLinearEmbedding(n_neighbors, 2,
method=method).fit_transform(sphere_data).T
t1 = time()
print("%s: %.2g sec" % (methods[i], t1 - t0))
ax = fig.add_subplot(252 + i)
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
plt.title("%s (%.2g sec)" % (labels[i], t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
# Perform Isomap Manifold learning.
t0 = time()
trans_data = manifold.Isomap(n_neighbors, n_components=2)\
.fit_transform(sphere_data).T
t1 = time()
print("%s: %.2g sec" % ('ISO', t1 - t0))
ax = fig.add_subplot(257)
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
plt.title("%s (%.2g sec)" % ('Isomap', t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
# Perform Multi-dimensional scaling.
t0 = time()
mds = manifold.MDS(2, max_iter=100, n_init=1)
trans_data = mds.fit_transform(sphere_data).T
t1 = time()
print("MDS: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(258)
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
plt.title("MDS (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
# Perform Spectral Embedding.
t0 = time()
se = manifold.SpectralEmbedding(n_components=2,
n_neighbors=n_neighbors)
trans_data = se.fit_transform(sphere_data).T
t1 = time()
print("Spectral Embedding: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(259)
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
plt.title("Spectral Embedding (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
# Perform t-distributed stochastic neighbor embedding.
t0 = time()
tsne = manifold.TSNE(n_components=2, init='pca', random_state=0)
trans_data = tsne.fit_transform(sphere_data).T
t1 = time()
print("t-SNE: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(250)
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
plt.title("t-SNE (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
plt.show()
| bsd-3-clause |
melizalab/arfx | arfx/collect.py | 1 | 8811 | # -*- coding: utf-8 -*-
# -*- mode: python -*-
"""
Specialized script to collect data across channels and entries
Copyright (C) 2018 Dan Meliza <dan // AT // meliza.org>
"""
import os
import operator
import numpy as np
import logging
import arf
from .core import __version__, setup_log
from . import io
log = logging.getLogger("arfx-collect")
def any_type(dset):
return True
def first(dict, fun):
"""For a nested dict, return the first value of fun(subdict)"""
for v in dict.values():
return fun(v)
def all_items_equal(dict, fun):
"""For a nested dict, returns True iff all values of subdict[key] are equal"""
ss = set(fun(v) for v in dict.values())
return len(ss) <= 1
def channel_properties(entry, channels=None, predicate=any_type):
"""Returns a dict with channel names and required channel properties"""
return {
name: {
"sampling_rate": dset.attrs.get("sampling_rate", None),
"units": dset.attrs.get("units", None),
"dtype": dset.dtype,
"samples": dset.shape[0],
"channels": arf.count_channels(dset),
"chunksize": dset.chunks[0],
}
for name, dset in entry.items()
if predicate(dset) and (channels is None or name in channels)
}
def check_entry_consistency(arfp, entries=None, channels=None, predicate=any_type):
"""Check whether all entries in arfp have the required channels
Raises a warning if units and sampling rates do not match across channels.
entries - if not None, restrict to entries with supplied names
channels - if not None, only check datasets with supplied names
filter - a predicate on dataset (e.g. arf.is_time_series)
If consistent, returns
[ [included entry names in order of creation alphabetically],
{
channel_name: {'samping_rate', 'units', 'channels'},
...
}
]
If not consistent across entries, logs an error and returns None. If
sampling rate and units are not consistent within an entry, logs a warning.
"""
import h5py as h5
log.info("checking entry consistency")
# FIXME catch error when file does not track creation order
entry_names = []
channel_props = None
for entry_name in arf.keys_by_creation(arfp):
if entries is not None and entry_name in entries:
continue
entry = arfp[entry_name]
if not isinstance(entry, h5.Group):
continue
props = channel_properties(entry, channels, predicate)
sample_counts = set(v.pop("samples") for v in props.values())
if len(sample_counts) > 1:
log.error("sample count differs across channels in entry %s", entry_name)
return
if channel_props is None:
channel_props = props
elif props != channel_props:
log.error("channels in entry %s do not match", entry_name)
return
entry_names.append(entry_name)
return entry_names, channel_props
def iter_entry_chunks(entry, channels, predicate):
"""Iterate through the datasets in entry (that match predicate), yielding chunks"""
from tqdm import tqdm
props = channel_properties(entry, channels, predicate)
nchannels = len(props)
nsamples = first(props, operator.itemgetter("samples"))
dtype = first(props, operator.itemgetter("dtype"))
size = first(props, operator.itemgetter("chunksize"))
log.info(" - '%s' -> %d samples", entry.name, nsamples)
for i in tqdm(range(0, nsamples, size), unit="chunk"):
n = min(size, nsamples - i)
out = np.empty((n, nchannels), dtype=dtype)
log.debug(" - %d - %d", i, i + n)
for j, chan_name in enumerate(props):
dset = entry[chan_name]
out[:, j] = dset[i : i + n]
yield out
def collect_sampled_script(argv=None):
from natsort import natsorted
import argparse
p = argparse.ArgumentParser(
prog="arfx-collect-sampled",
description="Collect sampled data from arf files across channels and entries"
"into a flat binary array. The output file can be any format that supports multiple channels; "
"for example, wav or dat (raw binary)",
)
p.add_argument("--version", action="version", version="%(prog)s " + __version__)
p.add_argument(
"-v", "--verbose", help="show verbose log messages", action="store_true"
)
p.add_argument(
"--dry-run",
action="store_true",
help="check entry consistency but don't write file",
)
p.add_argument(
"-d",
"--dtype",
help="convert data to specified type (default is to use as stored)",
)
p.add_argument(
"-c",
"--channels",
metavar="CHANNEL",
nargs="+",
help="list of channels to unpack (default all)",
)
p.add_argument(
"-C",
"--channel-file",
help="file with list of channels to unpack, one per line",
)
p.add_argument(
"-e",
"--entries",
help="list of entries to unpack (default all)",
metavar="ENTRY",
nargs="+",
)
p.add_argument(
"--start",
type=int,
help="if set, only collect data after this time point (in samples)",
)
p.add_argument(
"--stop",
type=int,
help="if set, only collect data before this time point (in samples)",
)
p.add_argument(
"--mountain-params",
action="store_true",
help="create mountainlab params.json file",
)
p.add_argument("arffile", help="the ARF file to unpack")
p.add_argument("outfile", help="the output file (will be overwritten)")
args = p.parse_args(argv)
setup_log(log, args.verbose)
if args.channel_file is not None:
with open(args.channel_file, "rt") as fp:
if args.channels is None:
args.channels = []
for line in fp:
if line.startswith("#"):
continue
else:
args.channels.append(line.strip())
with arf.open_file(args.arffile, "r") as arfp:
log.info("unpacking '%s'", args.arffile)
arf.check_file_version(arfp)
entry_names, channel_props = check_entry_consistency(
arfp, args.entries, args.channels, predicate=arf.is_time_series
)
if not all_items_equal(channel_props, operator.itemgetter("sampling_rate")):
log.warn(" - warning: not all datasets have the same sampling rate")
if not all_items_equal(channel_props, operator.itemgetter("units")):
log.warn(" - warning: not all datasets have the same units")
nentries = len(entry_names)
nchannels = sum(channel_props[c]["channels"] for c in channel_props)
sampling_rate = first(channel_props, operator.itemgetter("sampling_rate"))
if args.dtype is None:
dtype = first(channel_props, operator.itemgetter("dtype"))
else:
dtype = args.dtype
log.info(" - channels (%d):", nchannels)
for cname in natsorted(channel_props):
log.info(" - %s", cname)
if args.mountain_params:
import json
path = os.path.join(os.path.dirname(args.outfile), "params.json")
log.info("writing mountainlab metadata to '%s'", path)
data = {"samplerate": int(sampling_rate), "spike_sign": -1}
with open(path, "wt") as jfp:
json.dump(data, jfp)
log.info("opening '%s' for output", args.outfile)
log.info(" - sampling rate = %f", sampling_rate)
log.info(" - dtype = '%s'", dtype)
log.info(" - entries (%d):", nentries)
if args.dry_run:
log.info(" - dry run, ending script")
return
sample_count = 0
with io.open(
args.outfile,
mode="w",
sampling_rate=sampling_rate,
dtype=dtype,
nchannels=nchannels,
) as ofp:
for entry_name in natsorted(entry_names):
entry = arfp[entry_name]
for chunk in iter_entry_chunks(
entry, args.channels, arf.is_time_series
):
if args.start is None or sample_count > args.start:
ofp.write(chunk.astype(dtype))
sample_count += chunk.shape[0]
if args.stop is not None and sample_count > args.stop:
log.info(
" - stopping as requested after writing %d samples",
sample_count - (args.start or 0),
)
return
| gpl-2.0 |
anntzer/scipy | scipy/spatial/_procrustes.py | 10 | 4427 | """
This module provides functions to perform full Procrustes analysis.
This code was originally written by Justin Kucynski and ported over from
scikit-bio by Yoshiki Vazquez-Baeza.
"""
import numpy as np
from scipy.linalg import orthogonal_procrustes
__all__ = ['procrustes']
def procrustes(data1, data2):
r"""Procrustes analysis, a similarity test for two data sets.
Each input matrix is a set of points or vectors (the rows of the matrix).
The dimension of the space is the number of columns of each matrix. Given
two identically sized matrices, procrustes standardizes both such that:
- :math:`tr(AA^{T}) = 1`.
- Both sets of points are centered around the origin.
Procrustes ([1]_, [2]_) then applies the optimal transform to the second
matrix (including scaling/dilation, rotations, and reflections) to minimize
:math:`M^{2}=\sum(data1-data2)^{2}`, or the sum of the squares of the
pointwise differences between the two input datasets.
This function was not designed to handle datasets with different numbers of
datapoints (rows). If two data sets have different dimensionality
(different number of columns), simply add columns of zeros to the smaller
of the two.
Parameters
----------
data1 : array_like
Matrix, n rows represent points in k (columns) space `data1` is the
reference data, after it is standardised, the data from `data2` will be
transformed to fit the pattern in `data1` (must have >1 unique points).
data2 : array_like
n rows of data in k space to be fit to `data1`. Must be the same
shape ``(numrows, numcols)`` as data1 (must have >1 unique points).
Returns
-------
mtx1 : array_like
A standardized version of `data1`.
mtx2 : array_like
The orientation of `data2` that best fits `data1`. Centered, but not
necessarily :math:`tr(AA^{T}) = 1`.
disparity : float
:math:`M^{2}` as defined above.
Raises
------
ValueError
If the input arrays are not two-dimensional.
If the shape of the input arrays is different.
If the input arrays have zero columns or zero rows.
See Also
--------
scipy.linalg.orthogonal_procrustes
scipy.spatial.distance.directed_hausdorff : Another similarity test
for two data sets
Notes
-----
- The disparity should not depend on the order of the input matrices, but
the output matrices will, as only the first output matrix is guaranteed
to be scaled such that :math:`tr(AA^{T}) = 1`.
- Duplicate data points are generally ok, duplicating a data point will
increase its effect on the procrustes fit.
- The disparity scales as the number of points per input matrix.
References
----------
.. [1] Krzanowski, W. J. (2000). "Principles of Multivariate analysis".
.. [2] Gower, J. C. (1975). "Generalized procrustes analysis".
Examples
--------
>>> import numpy as np
>>> from scipy.spatial import procrustes
The matrix ``b`` is a rotated, shifted, scaled and mirrored version of
``a`` here:
>>> a = np.array([[1, 3], [1, 2], [1, 1], [2, 1]], 'd')
>>> b = np.array([[4, -2], [4, -4], [4, -6], [2, -6]], 'd')
>>> mtx1, mtx2, disparity = procrustes(a, b)
>>> round(disparity)
0.0
"""
mtx1 = np.array(data1, dtype=np.double, copy=True)
mtx2 = np.array(data2, dtype=np.double, copy=True)
if mtx1.ndim != 2 or mtx2.ndim != 2:
raise ValueError("Input matrices must be two-dimensional")
if mtx1.shape != mtx2.shape:
raise ValueError("Input matrices must be of same shape")
if mtx1.size == 0:
raise ValueError("Input matrices must be >0 rows and >0 cols")
# translate all the data to the origin
mtx1 -= np.mean(mtx1, 0)
mtx2 -= np.mean(mtx2, 0)
norm1 = np.linalg.norm(mtx1)
norm2 = np.linalg.norm(mtx2)
if norm1 == 0 or norm2 == 0:
raise ValueError("Input matrices must contain >1 unique points")
# change scaling of data (in rows) such that trace(mtx*mtx') = 1
mtx1 /= norm1
mtx2 /= norm2
# transform mtx2 to minimize disparity
R, s = orthogonal_procrustes(mtx1, mtx2)
mtx2 = np.dot(mtx2, R.T) * s
# measure the dissimilarity between the two datasets
disparity = np.sum(np.square(mtx1 - mtx2))
return mtx1, mtx2, disparity
| bsd-3-clause |
heli522/scikit-learn | sklearn/ensemble/forest.py | 175 | 62555 | """Forest of trees-based ensemble methods
Those methods include random forests and extremely randomized trees.
The module structure is the following:
- The ``BaseForest`` base class implements a common ``fit`` method for all
the estimators in the module. The ``fit`` method of the base ``Forest``
class calls the ``fit`` method of each sub-estimator on random samples
(with replacement, a.k.a. bootstrap) of the training set.
The init of the sub-estimator is further delegated to the
``BaseEnsemble`` constructor.
- The ``ForestClassifier`` and ``ForestRegressor`` base classes further
implement the prediction logic by computing an average of the predicted
outcomes of the sub-estimators.
- The ``RandomForestClassifier`` and ``RandomForestRegressor`` derived
classes provide the user with concrete implementations of
the forest ensemble method using classical, deterministic
``DecisionTreeClassifier`` and ``DecisionTreeRegressor`` as
sub-estimator implementations.
- The ``ExtraTreesClassifier`` and ``ExtraTreesRegressor`` derived
classes provide the user with concrete implementations of the
forest ensemble method using the extremely randomized trees
``ExtraTreeClassifier`` and ``ExtraTreeRegressor`` as
sub-estimator implementations.
Single and multi-output problems are both handled.
"""
# Authors: Gilles Louppe <g.louppe@gmail.com>
# Brian Holt <bdholt1@gmail.com>
# Joly Arnaud <arnaud.v.joly@gmail.com>
# Fares Hedayati <fares.hedayati@gmail.com>
#
# License: BSD 3 clause
from __future__ import division
import warnings
from warnings import warn
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy.sparse import issparse
from ..base import ClassifierMixin, RegressorMixin
from ..externals.joblib import Parallel, delayed
from ..externals import six
from ..feature_selection.from_model import _LearntSelectorMixin
from ..metrics import r2_score
from ..preprocessing import OneHotEncoder
from ..tree import (DecisionTreeClassifier, DecisionTreeRegressor,
ExtraTreeClassifier, ExtraTreeRegressor)
from ..tree._tree import DTYPE, DOUBLE
from ..utils import check_random_state, check_array, compute_sample_weight
from ..utils.validation import DataConversionWarning, NotFittedError
from .base import BaseEnsemble, _partition_estimators
from ..utils.fixes import bincount
__all__ = ["RandomForestClassifier",
"RandomForestRegressor",
"ExtraTreesClassifier",
"ExtraTreesRegressor",
"RandomTreesEmbedding"]
MAX_INT = np.iinfo(np.int32).max
def _generate_sample_indices(random_state, n_samples):
"""Private function used to _parallel_build_trees function."""
random_instance = check_random_state(random_state)
sample_indices = random_instance.randint(0, n_samples, n_samples)
return sample_indices
def _generate_unsampled_indices(random_state, n_samples):
"""Private function used to forest._set_oob_score fuction."""
sample_indices = _generate_sample_indices(random_state, n_samples)
sample_counts = bincount(sample_indices, minlength=n_samples)
unsampled_mask = sample_counts == 0
indices_range = np.arange(n_samples)
unsampled_indices = indices_range[unsampled_mask]
return unsampled_indices
def _parallel_build_trees(tree, forest, X, y, sample_weight, tree_idx, n_trees,
verbose=0, class_weight=None):
"""Private function used to fit a single tree in parallel."""
if verbose > 1:
print("building tree %d of %d" % (tree_idx + 1, n_trees))
if forest.bootstrap:
n_samples = X.shape[0]
if sample_weight is None:
curr_sample_weight = np.ones((n_samples,), dtype=np.float64)
else:
curr_sample_weight = sample_weight.copy()
indices = _generate_sample_indices(tree.random_state, n_samples)
sample_counts = bincount(indices, minlength=n_samples)
curr_sample_weight *= sample_counts
if class_weight == 'subsample':
with warnings.catch_warnings():
warnings.simplefilter('ignore', DeprecationWarning)
curr_sample_weight *= compute_sample_weight('auto', y, indices)
elif class_weight == 'balanced_subsample':
curr_sample_weight *= compute_sample_weight('balanced', y, indices)
tree.fit(X, y, sample_weight=curr_sample_weight, check_input=False)
else:
tree.fit(X, y, sample_weight=sample_weight, check_input=False)
return tree
def _parallel_helper(obj, methodname, *args, **kwargs):
"""Private helper to workaround Python 2 pickle limitations"""
return getattr(obj, methodname)(*args, **kwargs)
class BaseForest(six.with_metaclass(ABCMeta, BaseEnsemble,
_LearntSelectorMixin)):
"""Base class for forests of trees.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=10,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None):
super(BaseForest, self).__init__(
base_estimator=base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params)
self.bootstrap = bootstrap
self.oob_score = oob_score
self.n_jobs = n_jobs
self.random_state = random_state
self.verbose = verbose
self.warm_start = warm_start
self.class_weight = class_weight
def apply(self, X):
"""Apply trees in the forest to X, return leaf indices.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
X_leaves : array_like, shape = [n_samples, n_estimators]
For each datapoint x in X and for each tree in the forest,
return the index of the leaf x ends up in.
"""
X = self._validate_X_predict(X)
results = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
backend="threading")(
delayed(_parallel_helper)(tree, 'apply', X, check_input=False)
for tree in self.estimators_)
return np.array(results).T
def fit(self, X, y, sample_weight=None):
"""Build a forest of trees from the training set (X, y).
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The training input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csc_matrix``.
y : array-like, shape = [n_samples] or [n_samples, n_outputs]
The target values (class labels in classification, real numbers in
regression).
sample_weight : array-like, shape = [n_samples] or None
Sample weights. If None, then samples are equally weighted. Splits
that would create child nodes with net zero or negative weight are
ignored while searching for a split in each node. In the case of
classification, splits are also ignored if they would result in any
single class carrying a negative weight in either child node.
Returns
-------
self : object
Returns self.
"""
# Validate or convert input data
X = check_array(X, dtype=DTYPE, accept_sparse="csc")
if issparse(X):
# Pre-sort indices to avoid that each individual tree of the
# ensemble sorts the indices.
X.sort_indices()
# Remap output
n_samples, self.n_features_ = X.shape
y = np.atleast_1d(y)
if y.ndim == 2 and y.shape[1] == 1:
warn("A column-vector y was passed when a 1d array was"
" expected. Please change the shape of y to "
"(n_samples,), for example using ravel().",
DataConversionWarning, stacklevel=2)
if y.ndim == 1:
# reshape is necessary to preserve the data contiguity against vs
# [:, np.newaxis] that does not.
y = np.reshape(y, (-1, 1))
self.n_outputs_ = y.shape[1]
y, expanded_class_weight = self._validate_y_class_weight(y)
if getattr(y, "dtype", None) != DOUBLE or not y.flags.contiguous:
y = np.ascontiguousarray(y, dtype=DOUBLE)
if expanded_class_weight is not None:
if sample_weight is not None:
sample_weight = sample_weight * expanded_class_weight
else:
sample_weight = expanded_class_weight
# Check parameters
self._validate_estimator()
if not self.bootstrap and self.oob_score:
raise ValueError("Out of bag estimation only available"
" if bootstrap=True")
random_state = check_random_state(self.random_state)
if not self.warm_start:
# Free allocated memory, if any
self.estimators_ = []
n_more_estimators = self.n_estimators - len(self.estimators_)
if n_more_estimators < 0:
raise ValueError('n_estimators=%d must be larger or equal to '
'len(estimators_)=%d when warm_start==True'
% (self.n_estimators, len(self.estimators_)))
elif n_more_estimators == 0:
warn("Warm-start fitting without increasing n_estimators does not "
"fit new trees.")
else:
if self.warm_start and len(self.estimators_) > 0:
# We draw from the random state to get the random state we
# would have got if we hadn't used a warm_start.
random_state.randint(MAX_INT, size=len(self.estimators_))
trees = []
for i in range(n_more_estimators):
tree = self._make_estimator(append=False)
tree.set_params(random_state=random_state.randint(MAX_INT))
trees.append(tree)
# Parallel loop: we use the threading backend as the Cython code
# for fitting the trees is internally releasing the Python GIL
# making threading always more efficient than multiprocessing in
# that case.
trees = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
backend="threading")(
delayed(_parallel_build_trees)(
t, self, X, y, sample_weight, i, len(trees),
verbose=self.verbose, class_weight=self.class_weight)
for i, t in enumerate(trees))
# Collect newly grown trees
self.estimators_.extend(trees)
if self.oob_score:
self._set_oob_score(X, y)
# Decapsulate classes_ attributes
if hasattr(self, "classes_") and self.n_outputs_ == 1:
self.n_classes_ = self.n_classes_[0]
self.classes_ = self.classes_[0]
return self
@abstractmethod
def _set_oob_score(self, X, y):
"""Calculate out of bag predictions and score."""
def _validate_y_class_weight(self, y):
# Default implementation
return y, None
def _validate_X_predict(self, X):
"""Validate X whenever one tries to predict, apply, predict_proba"""
if self.estimators_ is None or len(self.estimators_) == 0:
raise NotFittedError("Estimator not fitted, "
"call `fit` before exploiting the model.")
return self.estimators_[0]._validate_X_predict(X, check_input=True)
@property
def feature_importances_(self):
"""Return the feature importances (the higher, the more important the
feature).
Returns
-------
feature_importances_ : array, shape = [n_features]
"""
if self.estimators_ is None or len(self.estimators_) == 0:
raise NotFittedError("Estimator not fitted, "
"call `fit` before `feature_importances_`.")
all_importances = Parallel(n_jobs=self.n_jobs,
backend="threading")(
delayed(getattr)(tree, 'feature_importances_')
for tree in self.estimators_)
return sum(all_importances) / len(self.estimators_)
class ForestClassifier(six.with_metaclass(ABCMeta, BaseForest,
ClassifierMixin)):
"""Base class for forest of trees-based classifiers.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=10,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None):
super(ForestClassifier, self).__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight)
def _set_oob_score(self, X, y):
"""Compute out-of-bag score"""
X = check_array(X, dtype=DTYPE, accept_sparse='csr')
n_classes_ = self.n_classes_
n_samples = y.shape[0]
oob_decision_function = []
oob_score = 0.0
predictions = []
for k in range(self.n_outputs_):
predictions.append(np.zeros((n_samples, n_classes_[k])))
for estimator in self.estimators_:
unsampled_indices = _generate_unsampled_indices(
estimator.random_state, n_samples)
p_estimator = estimator.predict_proba(X[unsampled_indices, :],
check_input=False)
if self.n_outputs_ == 1:
p_estimator = [p_estimator]
for k in range(self.n_outputs_):
predictions[k][unsampled_indices, :] += p_estimator[k]
for k in range(self.n_outputs_):
if (predictions[k].sum(axis=1) == 0).any():
warn("Some inputs do not have OOB scores. "
"This probably means too few trees were used "
"to compute any reliable oob estimates.")
decision = (predictions[k] /
predictions[k].sum(axis=1)[:, np.newaxis])
oob_decision_function.append(decision)
oob_score += np.mean(y[:, k] ==
np.argmax(predictions[k], axis=1), axis=0)
if self.n_outputs_ == 1:
self.oob_decision_function_ = oob_decision_function[0]
else:
self.oob_decision_function_ = oob_decision_function
self.oob_score_ = oob_score / self.n_outputs_
def _validate_y_class_weight(self, y):
y = np.copy(y)
expanded_class_weight = None
if self.class_weight is not None:
y_original = np.copy(y)
self.classes_ = []
self.n_classes_ = []
y_store_unique_indices = np.zeros(y.shape, dtype=np.int)
for k in range(self.n_outputs_):
classes_k, y_store_unique_indices[:, k] = np.unique(y[:, k], return_inverse=True)
self.classes_.append(classes_k)
self.n_classes_.append(classes_k.shape[0])
y = y_store_unique_indices
if self.class_weight is not None:
valid_presets = ('auto', 'balanced', 'balanced_subsample', 'subsample', 'auto')
if isinstance(self.class_weight, six.string_types):
if self.class_weight not in valid_presets:
raise ValueError('Valid presets for class_weight include '
'"balanced" and "balanced_subsample". Given "%s".'
% self.class_weight)
if self.class_weight == "subsample":
warn("class_weight='subsample' is deprecated and will be removed in 0.18."
" It was replaced by class_weight='balanced_subsample' "
"using the balanced strategy.", DeprecationWarning)
if self.warm_start:
warn('class_weight presets "balanced" or "balanced_subsample" are '
'not recommended for warm_start if the fitted data '
'differs from the full dataset. In order to use '
'"balanced" weights, use compute_class_weight("balanced", '
'classes, y). In place of y you can use a large '
'enough sample of the full training set target to '
'properly estimate the class frequency '
'distributions. Pass the resulting weights as the '
'class_weight parameter.')
if (self.class_weight not in ['subsample', 'balanced_subsample'] or
not self.bootstrap):
if self.class_weight == 'subsample':
class_weight = 'auto'
elif self.class_weight == "balanced_subsample":
class_weight = "balanced"
else:
class_weight = self.class_weight
with warnings.catch_warnings():
if class_weight == "auto":
warnings.simplefilter('ignore', DeprecationWarning)
expanded_class_weight = compute_sample_weight(class_weight,
y_original)
return y, expanded_class_weight
def predict(self, X):
"""Predict class for X.
The predicted class of an input sample is a vote by the trees in
the forest, weighted by their probability estimates. That is,
the predicted class is the one with highest mean probability
estimate across the trees.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
y : array of shape = [n_samples] or [n_samples, n_outputs]
The predicted classes.
"""
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return self.classes_.take(np.argmax(proba, axis=1), axis=0)
else:
n_samples = proba[0].shape[0]
predictions = np.zeros((n_samples, self.n_outputs_))
for k in range(self.n_outputs_):
predictions[:, k] = self.classes_[k].take(np.argmax(proba[k],
axis=1),
axis=0)
return predictions
def predict_proba(self, X):
"""Predict class probabilities for X.
The predicted class probabilities of an input sample is computed as
the mean predicted class probabilities of the trees in the forest. The
class probability of a single tree is the fraction of samples of the same
class in a leaf.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
such arrays if n_outputs > 1.
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
# Check data
X = self._validate_X_predict(X)
# Assign chunk of trees to jobs
n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs)
# Parallel loop
all_proba = Parallel(n_jobs=n_jobs, verbose=self.verbose,
backend="threading")(
delayed(_parallel_helper)(e, 'predict_proba', X,
check_input=False)
for e in self.estimators_)
# Reduce
proba = all_proba[0]
if self.n_outputs_ == 1:
for j in range(1, len(all_proba)):
proba += all_proba[j]
proba /= len(self.estimators_)
else:
for j in range(1, len(all_proba)):
for k in range(self.n_outputs_):
proba[k] += all_proba[j][k]
for k in range(self.n_outputs_):
proba[k] /= self.n_estimators
return proba
def predict_log_proba(self, X):
"""Predict class log-probabilities for X.
The predicted class log-probabilities of an input sample is computed as
the log of the mean predicted class probabilities of the trees in the
forest.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
such arrays if n_outputs > 1.
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return np.log(proba)
else:
for k in range(self.n_outputs_):
proba[k] = np.log(proba[k])
return proba
class ForestRegressor(six.with_metaclass(ABCMeta, BaseForest, RegressorMixin)):
"""Base class for forest of trees-based regressors.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=10,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False):
super(ForestRegressor, self).__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start)
def predict(self, X):
"""Predict regression target for X.
The predicted regression target of an input sample is computed as the
mean predicted regression targets of the trees in the forest.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
y : array of shape = [n_samples] or [n_samples, n_outputs]
The predicted values.
"""
# Check data
X = self._validate_X_predict(X)
# Assign chunk of trees to jobs
n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs)
# Parallel loop
all_y_hat = Parallel(n_jobs=n_jobs, verbose=self.verbose,
backend="threading")(
delayed(_parallel_helper)(e, 'predict', X, check_input=False)
for e in self.estimators_)
# Reduce
y_hat = sum(all_y_hat) / len(self.estimators_)
return y_hat
def _set_oob_score(self, X, y):
"""Compute out-of-bag scores"""
X = check_array(X, dtype=DTYPE, accept_sparse='csr')
n_samples = y.shape[0]
predictions = np.zeros((n_samples, self.n_outputs_))
n_predictions = np.zeros((n_samples, self.n_outputs_))
for estimator in self.estimators_:
unsampled_indices = _generate_unsampled_indices(
estimator.random_state, n_samples)
p_estimator = estimator.predict(
X[unsampled_indices, :], check_input=False)
if self.n_outputs_ == 1:
p_estimator = p_estimator[:, np.newaxis]
predictions[unsampled_indices, :] += p_estimator
n_predictions[unsampled_indices, :] += 1
if (n_predictions == 0).any():
warn("Some inputs do not have OOB scores. "
"This probably means too few trees were used "
"to compute any reliable oob estimates.")
n_predictions[n_predictions == 0] = 1
predictions /= n_predictions
self.oob_prediction_ = predictions
if self.n_outputs_ == 1:
self.oob_prediction_ = \
self.oob_prediction_.reshape((n_samples, ))
self.oob_score_ = 0.0
for k in range(self.n_outputs_):
self.oob_score_ += r2_score(y[:, k],
predictions[:, k])
self.oob_score_ /= self.n_outputs_
class RandomForestClassifier(ForestClassifier):
"""A random forest classifier.
A random forest is a meta estimator that fits a number of decision tree
classifiers on various sub-samples of the dataset and use averaging to
improve the predictive accuracy and control over-fitting.
The sub-sample size is always the same as the original
input sample size but the samples are drawn with replacement if
`bootstrap=True` (default).
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="gini")
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
Note: this parameter is tree-specific.
max_features : int, float, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=sqrt(n_features)`.
- If "sqrt", then `max_features=sqrt(n_features)` (same as "auto").
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Ignored if ``max_leaf_nodes`` is not None.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=2)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
Note: this parameter is tree-specific.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
If not None then ``max_depth`` will be ignored.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=True)
Whether bootstrap samples are used when building trees.
oob_score : bool
Whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity of the tree building process.
warm_start : bool, optional (default=False)
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest.
class_weight : dict, list of dicts, "balanced", "balanced_subsample" or None, optional
Weights associated with classes in the form ``{class_label: weight}``.
If not given, all classes are supposed to have weight one. For
multi-output problems, a list of dicts can be provided in the same
order as the columns of y.
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
The "balanced_subsample" mode is the same as "balanced" except that weights are
computed based on the bootstrap sample for every tree grown.
For multi-output, the weights of each column of y will be multiplied.
Note that these weights will be multiplied with sample_weight (passed
through the fit method) if sample_weight is specified.
Attributes
----------
estimators_ : list of DecisionTreeClassifier
The collection of fitted sub-estimators.
classes_ : array of shape = [n_classes] or a list of such arrays
The classes labels (single output problem), or a list of arrays of
class labels (multi-output problem).
n_classes_ : int or list
The number of classes (single output problem), or a list containing the
number of classes for each output (multi-output problem).
n_features_ : int
The number of features when ``fit`` is performed.
n_outputs_ : int
The number of outputs when ``fit`` is performed.
feature_importances_ : array of shape = [n_features]
The feature importances (the higher, the more important the feature).
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
oob_decision_function_ : array of shape = [n_samples, n_classes]
Decision function computed with out-of-bag estimate on the training
set. If n_estimators is small it might be possible that a data point
was never left out during the bootstrap. In this case,
`oob_decision_function_` might contain NaN.
References
----------
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
See also
--------
DecisionTreeClassifier, ExtraTreesClassifier
"""
def __init__(self,
n_estimators=10,
criterion="gini",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
bootstrap=True,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None):
super(RandomForestClassifier, self).__init__(
base_estimator=DecisionTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"random_state"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
class RandomForestRegressor(ForestRegressor):
"""A random forest regressor.
A random forest is a meta estimator that fits a number of classifying
decision trees on various sub-samples of the dataset and use averaging
to improve the predictive accuracy and control over-fitting.
The sub-sample size is always the same as the original
input sample size but the samples are drawn with replacement if
`bootstrap=True` (default).
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="mse")
The function to measure the quality of a split. The only supported
criterion is "mse" for the mean squared error.
Note: this parameter is tree-specific.
max_features : int, float, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Ignored if ``max_leaf_nodes`` is not None.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=2)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
Note: this parameter is tree-specific.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
If not None then ``max_depth`` will be ignored.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=True)
Whether bootstrap samples are used when building trees.
oob_score : bool
whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity of the tree building process.
warm_start : bool, optional (default=False)
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest.
Attributes
----------
estimators_ : list of DecisionTreeRegressor
The collection of fitted sub-estimators.
feature_importances_ : array of shape = [n_features]
The feature importances (the higher, the more important the feature).
n_features_ : int
The number of features when ``fit`` is performed.
n_outputs_ : int
The number of outputs when ``fit`` is performed.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
oob_prediction_ : array of shape = [n_samples]
Prediction computed with out-of-bag estimate on the training set.
References
----------
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
See also
--------
DecisionTreeRegressor, ExtraTreesRegressor
"""
def __init__(self,
n_estimators=10,
criterion="mse",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
bootstrap=True,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False):
super(RandomForestRegressor, self).__init__(
base_estimator=DecisionTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"random_state"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
class ExtraTreesClassifier(ForestClassifier):
"""An extra-trees classifier.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and use averaging to improve the predictive accuracy
and control over-fitting.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="gini")
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
Note: this parameter is tree-specific.
max_features : int, float, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=sqrt(n_features)`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Ignored if ``max_leaf_nodes`` is not None.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=2)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
Note: this parameter is tree-specific.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
If not None then ``max_depth`` will be ignored.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=False)
Whether bootstrap samples are used when building trees.
oob_score : bool
Whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity of the tree building process.
warm_start : bool, optional (default=False)
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest.
class_weight : dict, list of dicts, "balanced", "balanced_subsample" or None, optional
Weights associated with classes in the form ``{class_label: weight}``.
If not given, all classes are supposed to have weight one. For
multi-output problems, a list of dicts can be provided in the same
order as the columns of y.
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
The "balanced_subsample" mode is the same as "balanced" except that weights are
computed based on the bootstrap sample for every tree grown.
For multi-output, the weights of each column of y will be multiplied.
Note that these weights will be multiplied with sample_weight (passed
through the fit method) if sample_weight is specified.
Attributes
----------
estimators_ : list of DecisionTreeClassifier
The collection of fitted sub-estimators.
classes_ : array of shape = [n_classes] or a list of such arrays
The classes labels (single output problem), or a list of arrays of
class labels (multi-output problem).
n_classes_ : int or list
The number of classes (single output problem), or a list containing the
number of classes for each output (multi-output problem).
feature_importances_ : array of shape = [n_features]
The feature importances (the higher, the more important the feature).
n_features_ : int
The number of features when ``fit`` is performed.
n_outputs_ : int
The number of outputs when ``fit`` is performed.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
oob_decision_function_ : array of shape = [n_samples, n_classes]
Decision function computed with out-of-bag estimate on the training
set. If n_estimators is small it might be possible that a data point
was never left out during the bootstrap. In this case,
`oob_decision_function_` might contain NaN.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
See also
--------
sklearn.tree.ExtraTreeClassifier : Base classifier for this ensemble.
RandomForestClassifier : Ensemble Classifier based on trees with optimal
splits.
"""
def __init__(self,
n_estimators=10,
criterion="gini",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
bootstrap=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None):
super(ExtraTreesClassifier, self).__init__(
base_estimator=ExtraTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes", "random_state"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
class ExtraTreesRegressor(ForestRegressor):
"""An extra-trees regressor.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and use averaging to improve the predictive accuracy
and control over-fitting.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="mse")
The function to measure the quality of a split. The only supported
criterion is "mse" for the mean squared error.
Note: this parameter is tree-specific.
max_features : int, float, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Ignored if ``max_leaf_nodes`` is not None.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=2)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
Note: this parameter is tree-specific.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
If not None then ``max_depth`` will be ignored.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=False)
Whether bootstrap samples are used when building trees.
Note: this parameter is tree-specific.
oob_score : bool
Whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity of the tree building process.
warm_start : bool, optional (default=False)
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest.
Attributes
----------
estimators_ : list of DecisionTreeRegressor
The collection of fitted sub-estimators.
feature_importances_ : array of shape = [n_features]
The feature importances (the higher, the more important the feature).
n_features_ : int
The number of features.
n_outputs_ : int
The number of outputs.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
oob_prediction_ : array of shape = [n_samples]
Prediction computed with out-of-bag estimate on the training set.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
See also
--------
sklearn.tree.ExtraTreeRegressor: Base estimator for this ensemble.
RandomForestRegressor: Ensemble regressor using trees with optimal splits.
"""
def __init__(self,
n_estimators=10,
criterion="mse",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
bootstrap=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False):
super(ExtraTreesRegressor, self).__init__(
base_estimator=ExtraTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"random_state"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
class RandomTreesEmbedding(BaseForest):
"""An ensemble of totally random trees.
An unsupervised transformation of a dataset to a high-dimensional
sparse representation. A datapoint is coded according to which leaf of
each tree it is sorted into. Using a one-hot encoding of the leaves,
this leads to a binary coding with as many ones as there are trees in
the forest.
The dimensionality of the resulting representation is
``n_out <= n_estimators * max_leaf_nodes``. If ``max_leaf_nodes == None``,
the number of leaf nodes is at most ``n_estimators * 2 ** max_depth``.
Read more in the :ref:`User Guide <random_trees_embedding>`.
Parameters
----------
n_estimators : int
Number of trees in the forest.
max_depth : int
The maximum depth of each tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Ignored if ``max_leaf_nodes`` is not None.
min_samples_split : integer, optional (default=2)
The minimum number of samples required to split an internal node.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
If not None then ``max_depth`` will be ignored.
sparse_output : bool, optional (default=True)
Whether or not to return a sparse CSR matrix, as default behavior,
or to return a dense array compatible with dense pipeline operators.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity of the tree building process.
warm_start : bool, optional (default=False)
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest.
Attributes
----------
estimators_ : list of DecisionTreeClassifier
The collection of fitted sub-estimators.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
.. [2] Moosmann, F. and Triggs, B. and Jurie, F. "Fast discriminative
visual codebooks using randomized clustering forests"
NIPS 2007
"""
def __init__(self,
n_estimators=10,
max_depth=5,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_leaf_nodes=None,
sparse_output=True,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False):
super(RandomTreesEmbedding, self).__init__(
base_estimator=ExtraTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"random_state"),
bootstrap=False,
oob_score=False,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start)
self.criterion = 'mse'
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = 1
self.max_leaf_nodes = max_leaf_nodes
self.sparse_output = sparse_output
def _set_oob_score(self, X, y):
raise NotImplementedError("OOB score not supported by tree embedding")
def fit(self, X, y=None, sample_weight=None):
"""Fit estimator.
Parameters
----------
X : array-like or sparse matrix, shape=(n_samples, n_features)
The input samples. Use ``dtype=np.float32`` for maximum
efficiency. Sparse matrices are also supported, use sparse
``csc_matrix`` for maximum efficiency.
Returns
-------
self : object
Returns self.
"""
self.fit_transform(X, y, sample_weight=sample_weight)
return self
def fit_transform(self, X, y=None, sample_weight=None):
"""Fit estimator and transform dataset.
Parameters
----------
X : array-like or sparse matrix, shape=(n_samples, n_features)
Input data used to build forests. Use ``dtype=np.float32`` for
maximum efficiency.
Returns
-------
X_transformed : sparse matrix, shape=(n_samples, n_out)
Transformed dataset.
"""
# ensure_2d=False because there are actually unit test checking we fail
# for 1d.
X = check_array(X, accept_sparse=['csc'], ensure_2d=False)
if issparse(X):
# Pre-sort indices to avoid that each individual tree of the
# ensemble sorts the indices.
X.sort_indices()
rnd = check_random_state(self.random_state)
y = rnd.uniform(size=X.shape[0])
super(RandomTreesEmbedding, self).fit(X, y,
sample_weight=sample_weight)
self.one_hot_encoder_ = OneHotEncoder(sparse=self.sparse_output)
return self.one_hot_encoder_.fit_transform(self.apply(X))
def transform(self, X):
"""Transform dataset.
Parameters
----------
X : array-like or sparse matrix, shape=(n_samples, n_features)
Input data to be transformed. Use ``dtype=np.float32`` for maximum
efficiency. Sparse matrices are also supported, use sparse
``csr_matrix`` for maximum efficiency.
Returns
-------
X_transformed : sparse matrix, shape=(n_samples, n_out)
Transformed dataset.
"""
return self.one_hot_encoder_.transform(self.apply(X))
| bsd-3-clause |
YihaoLu/statsmodels | statsmodels/examples/example_functional_plots.py | 32 | 1367 | '''Functional boxplots and rainbow plots
see docstrings for an explanation
Author: Ralf Gommers
'''
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
import statsmodels.api as sm
#Load the El Nino dataset. Consists of 60 years worth of Pacific Ocean sea
#surface temperature data.
data = sm.datasets.elnino.load()
#Create a functional boxplot:
#We see that the years 1982-83 and 1997-98 are outliers; these are
#the years where El Nino (a climate pattern characterized by warming
#up of the sea surface and higher air pressures) occurred with unusual
#intensity.
fig = plt.figure()
ax = fig.add_subplot(111)
res = sm.graphics.fboxplot(data.raw_data[:, 1:], wfactor=2.58,
labels=data.raw_data[:, 0].astype(int),
ax=ax)
ax.set_xlabel("Month of the year")
ax.set_ylabel("Sea surface temperature (C)")
ax.set_xticks(np.arange(13, step=3) - 1)
ax.set_xticklabels(["", "Mar", "Jun", "Sep", "Dec"])
ax.set_xlim([-0.2, 11.2])
#Create a rainbow plot:
fig = plt.figure()
ax = fig.add_subplot(111)
res = sm.graphics.rainbowplot(data.raw_data[:, 1:], ax=ax)
ax.set_xlabel("Month of the year")
ax.set_ylabel("Sea surface temperature (C)")
ax.set_xticks(np.arange(13, step=3) - 1)
ax.set_xticklabels(["", "Mar", "Jun", "Sep", "Dec"])
ax.set_xlim([-0.2, 11.2])
plt.show()
| bsd-3-clause |
khushhallchandra/Deep-Learning | run.py | 1 | 3707 | """
import library
"""
import numpy
import matplotlib.pyplot as plt
import pandas
import math
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error
# fix random seed for reproducibility
numpy.random.seed(7)
# load the dataset
dataframe = pandas.read_csv('international-airline-passengers.csv', usecols=[1], engine='python', skipfooter=3)
dataset = dataframe.values
dataset = dataset.astype('float32')
# normalize the dataset
scaler = MinMaxScaler(feature_range=(0, 1))
dataset = scaler.fit_transform(dataset)
"""
A simple method that we can use is to split the ordered dataset into train and test datasets. The code below
calculates the index of the split point and separates the data into the training datasets with 67% of the
observations that we can use to train our model, leaving the remaining 33% for testing the model.
"""
# split into train and test sets
train_size = int(len(dataset) * 0.67)
test_size = len(dataset) - train_size
train, test = dataset[0:train_size,:], dataset[train_size:len(dataset),:]
print "train_data_size: "+str(len(train)), " test_data_size: "+str(len(test))
# convert an array of values into a dataset matrix
def create_dataset(dataset, look_back=1):
dataX, dataY = [], []
for i in range(len(dataset)-look_back-1):
a = dataset[i:(i+look_back), 0]
dataX.append(a)
dataY.append(dataset[i + look_back, 0])
return numpy.array(dataX), numpy.array(dataY)
# reshape into X=t and Y=t+1
look_back = 1
trainX, trainY = create_dataset(train, look_back)
testX, testY = create_dataset(test, look_back)
# reshape input to be [samples, time steps, features]
trainX = numpy.reshape(trainX, (trainX.shape[0], 1, trainX.shape[1]))
testX = numpy.reshape(testX, (testX.shape[0], 1, testX.shape[1]))
""" The network has a visible layer with 1 input, a hidden layer with
4 LSTM blocks or neurons and an output layer that makes a single value
prediction. The default sigmoid activation function is used for the
LSTM blocks. The network is trained for 100 epochs and a batch size of
1 is used."""
# create and fit the LSTM network
model = Sequential()
model.add(LSTM(4, input_dim=look_back))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(trainX, trainY, nb_epoch=100, batch_size=1, verbose=2)
# make predictions
trainPredict = model.predict(trainX)
testPredict = model.predict(testX)
# invert predictions
trainPredict = scaler.inverse_transform(trainPredict)
trainY = scaler.inverse_transform([trainY])
testPredict = scaler.inverse_transform(testPredict)
testY = scaler.inverse_transform([testY])
# calculate root mean squared error
trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0]))
print(trainY[0])
print(trainPredict[:,0])
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0]))
print(testY[0])
print(testPredict[:,0])
print('Test Score: %.2f RMSE' % (testScore))
# shift train predictions for plotting
trainPredictPlot = numpy.empty_like(dataset)
trainPredictPlot[:, :] = numpy.nan
trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict
# shift test predictions for plotting
testPredictPlot = numpy.empty_like(dataset)
testPredictPlot[:, :] = numpy.nan
testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict
# plot baseline and predictions
plt.plot(scaler.inverse_transform(dataset))
plt.plot(trainPredictPlot)
plt.plot(testPredictPlot)
plt.show()
| mit |
anntzer/scipy | scipy/_lib/tests/test_warnings.py | 10 | 4295 | """
Tests which scan for certain occurrences in the code, they may not find
all of these occurrences but should catch almost all. This file was adapted
from NumPy.
"""
import os
from pathlib import Path
import ast
import tokenize
import scipy
import pytest
class ParseCall(ast.NodeVisitor):
def __init__(self):
self.ls = []
def visit_Attribute(self, node):
ast.NodeVisitor.generic_visit(self, node)
self.ls.append(node.attr)
def visit_Name(self, node):
self.ls.append(node.id)
class FindFuncs(ast.NodeVisitor):
def __init__(self, filename):
super().__init__()
self.__filename = filename
self.bad_filters = []
self.bad_stacklevels = []
def visit_Call(self, node):
p = ParseCall()
p.visit(node.func)
ast.NodeVisitor.generic_visit(self, node)
if p.ls[-1] == 'simplefilter' or p.ls[-1] == 'filterwarnings':
if node.args[0].s == "ignore":
self.bad_filters.append(
"{}:{}".format(self.__filename, node.lineno))
if p.ls[-1] == 'warn' and (
len(p.ls) == 1 or p.ls[-2] == 'warnings'):
if self.__filename == "_lib/tests/test_warnings.py":
# This file
return
# See if stacklevel exists:
if len(node.args) == 3:
return
args = {kw.arg for kw in node.keywords}
if "stacklevel" not in args:
self.bad_stacklevels.append(
"{}:{}".format(self.__filename, node.lineno))
@pytest.fixture(scope="session")
def warning_calls():
# combined "ignore" and stacklevel error
base = Path(scipy.__file__).parent
bad_filters = []
bad_stacklevels = []
for path in base.rglob("*.py"):
# use tokenize to auto-detect encoding on systems where no
# default encoding is defined (e.g., LANG='C')
with tokenize.open(str(path)) as file:
tree = ast.parse(file.read(), filename=str(path))
finder = FindFuncs(path.relative_to(base))
finder.visit(tree)
bad_filters.extend(finder.bad_filters)
bad_stacklevels.extend(finder.bad_stacklevels)
return bad_filters, bad_stacklevels
@pytest.mark.slow
def test_warning_calls_filters(warning_calls):
bad_filters, bad_stacklevels = warning_calls
# We try not to add filters in the code base, because those filters aren't
# thread-safe. We aim to only filter in tests with
# np.testing.suppress_warnings. However, in some cases it may prove
# necessary to filter out warnings, because we can't (easily) fix the root
# cause for them and we don't want users to see some warnings when they use
# SciPy correctly. So we list exceptions here. Add new entries only if
# there's a good reason.
allowed_filters = (
os.path.join('datasets', '_fetchers.py'),
os.path.join('datasets', '__init__.py'),
os.path.join('optimize', '_optimize.py'),
os.path.join('sparse', '__init__.py'), # np.matrix pending-deprecation
os.path.join('stats', '_discrete_distns.py'), # gh-14901
os.path.join('stats', '_continuous_distns.py'),
)
bad_filters = [item for item in bad_filters if item.split(':')[0] not in
allowed_filters]
if bad_filters:
raise AssertionError(
"warning ignore filter should not be used, instead, use\n"
"numpy.testing.suppress_warnings (in tests only);\n"
"found in:\n {}".format(
"\n ".join(bad_filters)))
@pytest.mark.slow
@pytest.mark.xfail(reason="stacklevels currently missing")
def test_warning_calls_stacklevels(warning_calls):
bad_filters, bad_stacklevels = warning_calls
msg = ""
if bad_filters:
msg += ("warning ignore filter should not be used, instead, use\n"
"numpy.testing.suppress_warnings (in tests only);\n"
"found in:\n {}".format("\n ".join(bad_filters)))
msg += "\n\n"
if bad_stacklevels:
msg += "warnings should have an appropriate stacklevel:\n {}".format(
"\n ".join(bad_stacklevels))
if msg:
raise AssertionError(msg)
| bsd-3-clause |
heli522/scikit-learn | sklearn/tests/test_isotonic.py | 228 | 11087 | import numpy as np
import pickle
from sklearn.isotonic import (check_increasing, isotonic_regression,
IsotonicRegression)
from sklearn.utils.testing import (assert_raises, assert_array_equal,
assert_true, assert_false, assert_equal,
assert_array_almost_equal,
assert_warns_message, assert_no_warnings)
from sklearn.utils import shuffle
def test_permutation_invariance():
# check that fit is permuation invariant.
# regression test of missing sorting of sample-weights
ir = IsotonicRegression()
x = [1, 2, 3, 4, 5, 6, 7]
y = [1, 41, 51, 1, 2, 5, 24]
sample_weight = [1, 2, 3, 4, 5, 6, 7]
x_s, y_s, sample_weight_s = shuffle(x, y, sample_weight, random_state=0)
y_transformed = ir.fit_transform(x, y, sample_weight=sample_weight)
y_transformed_s = ir.fit(x_s, y_s, sample_weight=sample_weight_s).transform(x)
assert_array_equal(y_transformed, y_transformed_s)
def test_check_increasing_up():
x = [0, 1, 2, 3, 4, 5]
y = [0, 1.5, 2.77, 8.99, 8.99, 50]
# Check that we got increasing=True and no warnings
is_increasing = assert_no_warnings(check_increasing, x, y)
assert_true(is_increasing)
def test_check_increasing_up_extreme():
x = [0, 1, 2, 3, 4, 5]
y = [0, 1, 2, 3, 4, 5]
# Check that we got increasing=True and no warnings
is_increasing = assert_no_warnings(check_increasing, x, y)
assert_true(is_increasing)
def test_check_increasing_down():
x = [0, 1, 2, 3, 4, 5]
y = [0, -1.5, -2.77, -8.99, -8.99, -50]
# Check that we got increasing=False and no warnings
is_increasing = assert_no_warnings(check_increasing, x, y)
assert_false(is_increasing)
def test_check_increasing_down_extreme():
x = [0, 1, 2, 3, 4, 5]
y = [0, -1, -2, -3, -4, -5]
# Check that we got increasing=False and no warnings
is_increasing = assert_no_warnings(check_increasing, x, y)
assert_false(is_increasing)
def test_check_ci_warn():
x = [0, 1, 2, 3, 4, 5]
y = [0, -1, 2, -3, 4, -5]
# Check that we got increasing=False and CI interval warning
is_increasing = assert_warns_message(UserWarning, "interval",
check_increasing,
x, y)
assert_false(is_increasing)
def test_isotonic_regression():
y = np.array([3, 7, 5, 9, 8, 7, 10])
y_ = np.array([3, 6, 6, 8, 8, 8, 10])
assert_array_equal(y_, isotonic_regression(y))
x = np.arange(len(y))
ir = IsotonicRegression(y_min=0., y_max=1.)
ir.fit(x, y)
assert_array_equal(ir.fit(x, y).transform(x), ir.fit_transform(x, y))
assert_array_equal(ir.transform(x), ir.predict(x))
# check that it is immune to permutation
perm = np.random.permutation(len(y))
ir = IsotonicRegression(y_min=0., y_max=1.)
assert_array_equal(ir.fit_transform(x[perm], y[perm]),
ir.fit_transform(x, y)[perm])
assert_array_equal(ir.transform(x[perm]), ir.transform(x)[perm])
# check we don't crash when all x are equal:
ir = IsotonicRegression()
assert_array_equal(ir.fit_transform(np.ones(len(x)), y), np.mean(y))
def test_isotonic_regression_ties_min():
# Setup examples with ties on minimum
x = [0, 1, 1, 2, 3, 4, 5]
y = [0, 1, 2, 3, 4, 5, 6]
y_true = [0, 1.5, 1.5, 3, 4, 5, 6]
# Check that we get identical results for fit/transform and fit_transform
ir = IsotonicRegression()
ir.fit(x, y)
assert_array_equal(ir.fit(x, y).transform(x), ir.fit_transform(x, y))
assert_array_equal(y_true, ir.fit_transform(x, y))
def test_isotonic_regression_ties_max():
# Setup examples with ties on maximum
x = [1, 2, 3, 4, 5, 5]
y = [1, 2, 3, 4, 5, 6]
y_true = [1, 2, 3, 4, 5.5, 5.5]
# Check that we get identical results for fit/transform and fit_transform
ir = IsotonicRegression()
ir.fit(x, y)
assert_array_equal(ir.fit(x, y).transform(x), ir.fit_transform(x, y))
assert_array_equal(y_true, ir.fit_transform(x, y))
def test_isotonic_regression_ties_secondary_():
"""
Test isotonic regression fit, transform and fit_transform
against the "secondary" ties method and "pituitary" data from R
"isotone" package, as detailed in: J. d. Leeuw, K. Hornik, P. Mair,
Isotone Optimization in R: Pool-Adjacent-Violators Algorithm
(PAVA) and Active Set Methods
Set values based on pituitary example and
the following R command detailed in the paper above:
> library("isotone")
> data("pituitary")
> res1 <- gpava(pituitary$age, pituitary$size, ties="secondary")
> res1$x
`isotone` version: 1.0-2, 2014-09-07
R version: R version 3.1.1 (2014-07-10)
"""
x = [8, 8, 8, 10, 10, 10, 12, 12, 12, 14, 14]
y = [21, 23.5, 23, 24, 21, 25, 21.5, 22, 19, 23.5, 25]
y_true = [22.22222, 22.22222, 22.22222, 22.22222, 22.22222, 22.22222,
22.22222, 22.22222, 22.22222, 24.25, 24.25]
# Check fit, transform and fit_transform
ir = IsotonicRegression()
ir.fit(x, y)
assert_array_almost_equal(ir.transform(x), y_true, 4)
assert_array_almost_equal(ir.fit_transform(x, y), y_true, 4)
def test_isotonic_regression_reversed():
y = np.array([10, 9, 10, 7, 6, 6.1, 5])
y_ = IsotonicRegression(increasing=False).fit_transform(
np.arange(len(y)), y)
assert_array_equal(np.ones(y_[:-1].shape), ((y_[:-1] - y_[1:]) >= 0))
def test_isotonic_regression_auto_decreasing():
# Set y and x for decreasing
y = np.array([10, 9, 10, 7, 6, 6.1, 5])
x = np.arange(len(y))
# Create model and fit_transform
ir = IsotonicRegression(increasing='auto')
y_ = assert_no_warnings(ir.fit_transform, x, y)
# Check that relationship decreases
is_increasing = y_[0] < y_[-1]
assert_false(is_increasing)
def test_isotonic_regression_auto_increasing():
# Set y and x for decreasing
y = np.array([5, 6.1, 6, 7, 10, 9, 10])
x = np.arange(len(y))
# Create model and fit_transform
ir = IsotonicRegression(increasing='auto')
y_ = assert_no_warnings(ir.fit_transform, x, y)
# Check that relationship increases
is_increasing = y_[0] < y_[-1]
assert_true(is_increasing)
def test_assert_raises_exceptions():
ir = IsotonicRegression()
rng = np.random.RandomState(42)
assert_raises(ValueError, ir.fit, [0, 1, 2], [5, 7, 3], [0.1, 0.6])
assert_raises(ValueError, ir.fit, [0, 1, 2], [5, 7])
assert_raises(ValueError, ir.fit, rng.randn(3, 10), [0, 1, 2])
assert_raises(ValueError, ir.transform, rng.randn(3, 10))
def test_isotonic_sample_weight_parameter_default_value():
# check if default value of sample_weight parameter is one
ir = IsotonicRegression()
# random test data
rng = np.random.RandomState(42)
n = 100
x = np.arange(n)
y = rng.randint(-50, 50, size=(n,)) + 50. * np.log(1 + np.arange(n))
# check if value is correctly used
weights = np.ones(n)
y_set_value = ir.fit_transform(x, y, sample_weight=weights)
y_default_value = ir.fit_transform(x, y)
assert_array_equal(y_set_value, y_default_value)
def test_isotonic_min_max_boundaries():
# check if min value is used correctly
ir = IsotonicRegression(y_min=2, y_max=4)
n = 6
x = np.arange(n)
y = np.arange(n)
y_test = [2, 2, 2, 3, 4, 4]
y_result = np.round(ir.fit_transform(x, y))
assert_array_equal(y_result, y_test)
def test_isotonic_sample_weight():
ir = IsotonicRegression()
x = [1, 2, 3, 4, 5, 6, 7]
y = [1, 41, 51, 1, 2, 5, 24]
sample_weight = [1, 2, 3, 4, 5, 6, 7]
expected_y = [1, 13.95, 13.95, 13.95, 13.95, 13.95, 24]
received_y = ir.fit_transform(x, y, sample_weight=sample_weight)
assert_array_equal(expected_y, received_y)
def test_isotonic_regression_oob_raise():
# Set y and x
y = np.array([3, 7, 5, 9, 8, 7, 10])
x = np.arange(len(y))
# Create model and fit
ir = IsotonicRegression(increasing='auto', out_of_bounds="raise")
ir.fit(x, y)
# Check that an exception is thrown
assert_raises(ValueError, ir.predict, [min(x) - 10, max(x) + 10])
def test_isotonic_regression_oob_clip():
# Set y and x
y = np.array([3, 7, 5, 9, 8, 7, 10])
x = np.arange(len(y))
# Create model and fit
ir = IsotonicRegression(increasing='auto', out_of_bounds="clip")
ir.fit(x, y)
# Predict from training and test x and check that min/max match.
y1 = ir.predict([min(x) - 10, max(x) + 10])
y2 = ir.predict(x)
assert_equal(max(y1), max(y2))
assert_equal(min(y1), min(y2))
def test_isotonic_regression_oob_nan():
# Set y and x
y = np.array([3, 7, 5, 9, 8, 7, 10])
x = np.arange(len(y))
# Create model and fit
ir = IsotonicRegression(increasing='auto', out_of_bounds="nan")
ir.fit(x, y)
# Predict from training and test x and check that we have two NaNs.
y1 = ir.predict([min(x) - 10, max(x) + 10])
assert_equal(sum(np.isnan(y1)), 2)
def test_isotonic_regression_oob_bad():
# Set y and x
y = np.array([3, 7, 5, 9, 8, 7, 10])
x = np.arange(len(y))
# Create model and fit
ir = IsotonicRegression(increasing='auto', out_of_bounds="xyz")
# Make sure that we throw an error for bad out_of_bounds value
assert_raises(ValueError, ir.fit, x, y)
def test_isotonic_regression_oob_bad_after():
# Set y and x
y = np.array([3, 7, 5, 9, 8, 7, 10])
x = np.arange(len(y))
# Create model and fit
ir = IsotonicRegression(increasing='auto', out_of_bounds="raise")
# Make sure that we throw an error for bad out_of_bounds value in transform
ir.fit(x, y)
ir.out_of_bounds = "xyz"
assert_raises(ValueError, ir.transform, x)
def test_isotonic_regression_pickle():
y = np.array([3, 7, 5, 9, 8, 7, 10])
x = np.arange(len(y))
# Create model and fit
ir = IsotonicRegression(increasing='auto', out_of_bounds="clip")
ir.fit(x, y)
ir_ser = pickle.dumps(ir, pickle.HIGHEST_PROTOCOL)
ir2 = pickle.loads(ir_ser)
np.testing.assert_array_equal(ir.predict(x), ir2.predict(x))
def test_isotonic_duplicate_min_entry():
x = [0, 0, 1]
y = [0, 0, 1]
ir = IsotonicRegression(increasing=True, out_of_bounds="clip")
ir.fit(x, y)
all_predictions_finite = np.all(np.isfinite(ir.predict(x)))
assert_true(all_predictions_finite)
def test_isotonic_zero_weight_loop():
# Test from @ogrisel's issue:
# https://github.com/scikit-learn/scikit-learn/issues/4297
# Get deterministic RNG with seed
rng = np.random.RandomState(42)
# Create regression and samples
regression = IsotonicRegression()
n_samples = 50
x = np.linspace(-3, 3, n_samples)
y = x + rng.uniform(size=n_samples)
# Get some random weights and zero out
w = rng.uniform(size=n_samples)
w[5:8] = 0
regression.fit(x, y, sample_weight=w)
# This will hang in failure case.
regression.fit(x, y, sample_weight=w)
| bsd-3-clause |
codezakh/plotutil | plotutil.py | 1 | 6575 | """A group of utility functions for validating data and graphing them, as well as automating pulling
from created dictionaries."""
# This library contains utility functions for visualizing the results of clustering algorithms
# from scikit learn. It relies on matplotlib, seaborn, and pylab. This exists because the natural
# input to most machine learning algorithms is an array of vectors. The resulting predictions along
# with their tags can be represented succintly as a tuple containing a vector and the label given
# to it. However, most graphing functions require a list of the coordinates in each dimensions;
# this necessiates splitting the list of tuples vertically for passing to the graphing function.
import pandas as pd
import numpy as np
import matplotlib as plt
import pylab as pyl
import seaborn as sns
from mpl_toolkits.mplot3d import Axes3D
import bisect
import datetime
import warnings
from sklearn import cluster
from hmmlearn import hmm
def tuple_check(NateTuple):
"""Takes in a tuple, returns true only if every member of the tuple is a number."""
filtered_tuple = np.isnan(NateTuple)
if all(item==False for item in filtered_tuple):
return True
else:
return False
def pull_from_tag(tag_to_pull,whichpair,list_to_pull_from):
"""Returns all items with tag_to_pull from iterable list_to_pull_from using whichpair to
determine which element to take out"""
if whichpair == 1:
return [x for x,y in list_to_pull_from if y == tag_to_pull] #decides whether to return first element or second
else:
return [y for x,y in list_to_pull_from if x == tag_to_pull]
def tuple_list_creator(list_to_generate_from):
"""Takes in a list of lists of tuples, and then slices them vertically to return a lists of lists of x-
dimensions the same as that of the tuple represented as a vector."""
list_to_return = []
for x in list_to_generate_from:
list_to_return.append(zip(*x)) #this is the part doing the slicing
return list_to_return
colormap = ['#66FF66','#008000','#000066','#8080FF','#660000','#FF4D4D','#990099','#FF33FF','#808000','#FFFF4D','#B26B00','#FFAD33','#476B6B','#A3C2C2','#6B2400','#D6AD99','#FFFFFF','#000000']
#colormap is a list that provides HTML color codes for makePlot to use. It can represent up to
#eighteen different data sets.
def makePlot_3d(coordinate_list):
"""Creates a 3d plot of objects with multiple tags from coordinate list.
coordinate_list is a list of tuples of lists, where each tuple element is a set of
coordinates for that particular list. Ex: [([x,x,x,x],[y,y,y,y],[z,z,z,z]),...]"""
plotObjectBox = pyl.figure() #creates a figure
plotObjectBox_ax = plotObjectBox.add_subplot(111, projection='3d') #adds a subplot
togetherlist = zip(coordinate_list,colormap[:len(coordinate_list)-1]) #creates a tuple list
for x,y in togetherlist: #associates each set of coordinates with an html color tag
plotObjectBox_ax.scatter(x[0], x[1],x[2],c=y)
def index(a, x):
"""Locate the leftmost value exactly equal to x, arg a is list, x=key
Returns item if found, returns False if item not found,"""
i = bisect.bisect_left(a, x)
if i != len(a) and a[i] == x:
return i
else:
return False
def timeTrack_recordnew(datetimeseries):
"""Takes in a datetimeseries, returns list of skips [(skiplength, index)...]"""
breaklist = []
mylen = range(0,len(datetimeseries)-1)
for x in mylen:
if datetimeseries[x+1] != datetimeseries[x]+timedelta(seconds=1):
nextstep = x+1
breaklist.append([datetimeseries[nextstep]-datetimeseries[x],x])
else:
continue
return breaklist
def access_DFrow(indextopull,dataFrameToPullFrom):
"""access_DFrow(indextopull,dataFrameToPullFrom)-> return row"""
listToReturn =[] #list to return
for x in dataFrameToPullFrom.keys():
TEMP_chainvar = dataFrameToPullFrom[x]
listToReturn.append(TEMP_chainvar[indextopull])
return listToReturn
def PullDate(date,framename):
timeseries = pd.to_datetime(framename['time'])
startdate = timeseries[0]
return index(timeseries, startdate.replace(day=date,hour=0,second=0,minute=0))
def sliceDF(tupleIndex, frameInUse):
"""Creates a dataframe bookended by a tuple"""
myframe = pd.DataFrame()
for x in frameInUse.keys():
myframe[x]=frameInUse[x][tupleIndex[0]:tupleIndex[1]:1]
return myframe
def SliceMaker(framename,colname):
zippedDateSlices = [] #will hold the tuples of start and end indices
fullDateIndexList = [] #will hold the list of day indexes
for x in range(1,32):
fullDateIndexList.append(PullDate(x,framename))
for x in range(len(fullDateIndexList)):
if x==len(fullDateIndexList)-1:
break
elif fullDateIndexList[x]==False :
continue
else:
mytuple = (fullDateIndexList[x],fullDateIndexList[x+1])
zippedDateSlices.append(mytuple)
listofDayFrames = []
for x in zippedDateSlices:
listofDayFrames.append(sliceDF(x,framename))
return listofDayFrames
def makeKDE(series,clusnum):
""""Series is a series and clusnum is the number of clusters.
Returns a (dataframe,kmeans object)"""
stouse = np.array(series.dropna())
artouse = np.resize(stouse,(len(stouse),1))
kmetouse = cluster.MiniBatchKMeans(n_clusters = clusnum)
kmetouse.fit(artouse)
predtouse = kmetouse.predict(artouse)
frametoret = pd.DataFrame()
ziplist = zip(predtouse,stouse)
for x in range(clusnum):
frametoret[str(x)] = pd.Series([z for y,z in ziplist if y ==x])
return frametoret,kmetouse
def HMMmaker(kclus,DFlist,statenum,s_name):
"""Takes in a kmeans object and a list of dataframes containing days."""
detlist = []
warnings.filterwarnings("ignore", category=DeprecationWarning)
for x in DFlist:
benchHMM=hmm.GaussianHMM(n_components=statenum)
x['pred'+s_name] = kclus.predict(np.resize(x[s_name],(len(x[s_name]),1)))
benchHMM.fit([np.reshape(x['pred'+s_name],(len(x),1))])
print np.linalg.det(benchHMM.transmat_)
detlist.append(np.linalg.det(benchHMM.transmat_))
return detlist
def proper_convert(nanDaylist):
trashlist = []
for x in nanDaylist:
trashlist.append(x.dropna(subset=['hr','accel_magnitude','skin_temp']))
validatedList = []
for x in trashlist:
if len(x)==0 :
print 'Dropped'
else:
validatedList.append(x)
print 'Total dropped:'+str(len(trashlist)-len(validatedList))
return validatedList | mit |
raticate/AS-RANK | 3_store_cust_cone_size.py | 1 | 3256 | import os, sys, MySQLdb
from datetime import datetime, timedelta
import time
import datetime as dt
db = MySQLdb.connect(host="localhost",
user="root",
passwd="",
db="ASRank")
cur = db.cursor()
def process_file(date, ext):
current_timestamp = int(date)
print 'I am parsing ', date + ext
current = date + ext
file = 'data.caida.org/datasets/as-relationships/serial-1/' + current
check = "alert.txt"
## dezip file
try:
if '.ppdc-ases.txt.gz' in file and os.path.isfile(file):
command = 'gzip -d ' + file
check = file[:-3]
os.system(command)
elif '.ppdc-ases.txt.bz2' in file and os.path.isfile(file) :
command = 'bunzip2 ' + file
check = file[:-4]
os.system(command)
except:
print 'no need to dezip'
if os.path.isfile(check):
#open file, read each line
with open (check, 'r') as fh:
for line1 in fh:
# skip headers
if not line1.startswith("#"):
# split line to obtain as and customers
tokens = line1.split()
as1 = tokens[0]
customers = tokens[1:]
print as1
sql_command = """ INSERT IGNORE INTO CustomerConeSize (IPversion, AS1, Size, startdate) VALUES (%s, %s, %s, %s); """
cur.execute(sql_command, (4, as1, len(customers), current_timestamp))
db.commit()
else:
print "didn't find", check
## function ends
## download all the relationship if needed
link = """data.caida.org/datasets/as-relationships/"""
command = """wget --no-parent -r """ +link
print '\n download list of files :', command
os.system(command)
## Load the list of treated files :
list_treated_files = []
try:
with open('list_of_treated_files_rel3.txt', 'r') as fg:
for line in fg:
line= str(line).strip()
if line not in list_treated_files:
list_treated_files.append(str(line).strip())
except:
with open('list_of_treated_files_rel3.txt', 'a') as fk:
print
## Current date
A = str(datetime.now() + timedelta(days=-1))
table = A.split(' ')
date_info = table[0].split('-')
date_info_end = [int(date_info[0]), int(date_info[1]) ]
#print date_info_end
date_info_start = [1998, 01]
k_year = date_info_start[0]
k_month = date_info_start[1]
while (k_year <= date_info_end[0]) :
if k_month >9:
elmt = str(k_year) + str(k_month) + '01'
else:
elmt = str(k_year) + '0' + str(k_month) + '01'
if k_month == 12:
k_month = 1
k_year +=1
elif k_month<12:
k_month +=1
#output = ['.as-rel.txt.gz', '.ppdc-ases.txt.gz']
output = ['.ppdc-ases.txt.gz', '.ppdc-ases.txt.bz2']
for ext in output:
current = elmt + ext
if str(current).strip() not in list_treated_files:
process_file(elmt, ext)
with open('list_of_treated_files_rel3.txt', 'a') as fh:
fh.write('%s \n' %(elmt+ext))
else:
print 'do not treat ', elmt + ext
| mit |
ifuding/Kaggle | SVPC/Code/philly/main.py | 1 | 33932 | """
This version has improvements based on new feature engg techniques observed from different kernels. Below are few of them:
- https://www.kaggle.com/graf10a/lightgbm-lb-0-9675
- https://www.kaggle.com/rteja1113/lightgbm-with-count-features?scriptVersionId=2815638
- https://www.kaggle.com/nuhsikander/lgbm-new-features-corrected?scriptVersionId=2852561
- https://www.kaggle.com/aloisiodn/lgbm-starter-early-stopping-0-9539 (Original script)
"""
import pandas as pd
import time
import numpy as np
import gc
from feature_engineer import gen_features
from feature_engineer import timer
import keras_train
from nfold_train import nfold_train, models_eval
import tensorflow as tf
import os
import shutil
from lcc_sample import neg_sample
from tensorflow.python.keras.models import load_model,Model
from sklearn import preprocessing
from tensorflow.python.keras.preprocessing.text import Tokenizer, text_to_word_sequence
from tensorflow.python.keras.preprocessing.sequence import pad_sequences
from CNN_Keras import get_word2vec_embedding
import lightgbm as lgb
import pickle
from RankGauss import rank_INT, rank_INT_DF
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from multiprocessing import Pool
import concurrent.futures
import glob
from leak_cols import LEAK_LIST
flags = tf.app.flags
flags.DEFINE_string('input-training-data-path', "../../Data/", 'data dir override')
flags.DEFINE_string('output-model-path', ".", 'model dir override')
flags.DEFINE_string('model_type', "k", 'model type')
flags.DEFINE_integer('nfold', 10, 'number of folds')
flags.DEFINE_integer('ensemble_nfold', 5, 'number of ensemble models')
flags.DEFINE_string('emb_dim', '5', 'term embedding dim')
flags.DEFINE_integer('epochs', 1, 'number of Epochs')
flags.DEFINE_integer('batch_size', 128, 'Batch size')
flags.DEFINE_integer('batch_interval', 1000, 'batch print interval')
flags.DEFINE_float("emb_dropout", 0, "embedding dropout")
flags.DEFINE_string('full_connect_hn', "64, 32", 'full connect hidden units')
flags.DEFINE_float("full_connect_dropout", 0, "full connect drop out")
flags.DEFINE_bool("stacking", False, "Whether to stacking")
flags.DEFINE_bool("load_stacking_data", False, "Whether to load stacking data")
flags.DEFINE_bool("debug", False, "Whether to load small data for debuging")
flags.DEFINE_bool("neg_sample", False, "Whether to do negative sample")
flags.DEFINE_bool("lcc_sample", False, "Whether to do lcc sample")
flags.DEFINE_integer("sample_C", 1, "sample rate")
flags.DEFINE_bool("load_only_singleCnt", False, "Whether to load only singleCnt")
flags.DEFINE_bool("log_transform", False, "Whether to do log transform")
flags.DEFINE_bool("split_train_val", False, "Whether to split train and validate")
flags.DEFINE_integer("train_eval_len", 25000000, "train_eval_len")
flags.DEFINE_integer("eval_len", 2500000, "eval_len")
flags.DEFINE_bool("test_for_train", False, "Whether to use test data for train")
flags.DEFINE_bool("search_best_iteration", True, "Whether to search best iteration")
flags.DEFINE_integer("best_iteration", 1, "best iteration")
flags.DEFINE_string('search_iterations', "100,1500,100", 'search iterations')
flags.DEFINE_string('input-previous-model-path', "../../Data/", 'data dir override')
flags.DEFINE_bool("blend_tune", False, "Whether to tune the blen")
flags.DEFINE_integer('vocab_size', 300000, 'vocab size')
flags.DEFINE_string('max_len', 100, 'max description sequence length')
# flags.DEFINE_integer('max_title_len', 100, 'max title sequence length')
flags.DEFINE_bool("load_wv_model", True, "Whether to load word2vec model")
flags.DEFINE_string('wv_model_type', "fast_text", 'word2vec model type')
flags.DEFINE_string('wv_model_file', "wiki.en.vec.indata", 'word2vec model file')
flags.DEFINE_integer('gram_embedding_dim', '300', 'gram embedding dim')
flags.DEFINE_string('kernel_size_list', "1,2,3", 'kernel size list')
flags.DEFINE_string('filter_size', "32", 'cnn filter size list')
flags.DEFINE_string('rnn_units', "0", 'rnn_units')
flags.DEFINE_bool("uniform_init_emb", False, "Whether to uniform init the embedding")
flags.DEFINE_bool("fix_wv_model", True, "Whether to fix word2vec model")
flags.DEFINE_bool("lgb_boost_dnn", True, "Whether to fix word2vec model")
flags.DEFINE_integer('lgb_ensemble_nfold', 5, 'number of lgb ensemble models')
flags.DEFINE_bool("load_from_pickle", True, "Whether to load from pickle")
flags.DEFINE_bool("vae_mse", True, "vae_mse")
flags.DEFINE_integer('vae_intermediate_dim', 100, 'vae_intermediate_dim')
flags.DEFINE_integer('vae_latent_dim', 100, 'vae_latent_dim')
flags.DEFINE_bool("load_from_vae", False, "load_from_vae")
flags.DEFINE_bool("predict_feature", False, "predict_feature")
flags.DEFINE_bool("aug_data", False, "aug_data")
flags.DEFINE_bool("leak_test_for_train", False, "leak_test_for_train")
FLAGS = flags.FLAGS
path = FLAGS.input_training_data_path
HIST_SIZE = 1000
SORT_LEN = 1000
top_cols = ['f190486d6', '58e2e02e6', 'eeb9cd3aa', '9fd594eec', '6eef030c1',
'15ace8c9f', 'fb0f5dbfe', '58e056e12', '20aa07010', '024c577b9',
'd6bb78916', 'b43a7cfd5', '58232a6fb', '1702b5bf0', '324921c7b',
'62e59a501', '2ec5b290f', '241f0f867', 'fb49e4212', '66ace2992',
'f74e8f13d', '5c6487af1', '963a49cdc', '26fc93eb7', '1931ccfdd',
'703885424', '70feb1494', '491b9ee45', '23310aa6f', 'e176a204a',
'6619d81fc', '1db387535', 'fc99f9426', '91f701ba2', '0572565c2',
'190db8488', 'adb64ff71', 'c47340d97', 'c5a231d81', '0ff32eb98']
def select_pred(df, col):
print ('Append column: ', col)
file_name = glob.glob(path + '_' + col + '_2018_07_30_*.csv')[0]
print(file_name)
pred_col = pd.read_csv(file_name, index_col = 'ID')
# exit(0)
pred_col[col + '_p'] = pred_col['target']
pred_col[col + '_new'] = df[col]
select_s = (df[col] == 0) #& (df[col + '_p'] >= 319) & (df[col + '_p'] <= 319612000)
pred_col[col + '_new'][select_s] = pred_col[col + '_p'][select_s]
return pred_col
# def select_pred(s):
# col = s.name
# print ('Append column: ', col)
# pred_col = pd.read_csv(path + '_' + col + '_2018_07_25_07.csv', index_col = 'ID')
# pred_col[col + '_p'] = pred_col['target']
# pred_col[col + '_new'] = s
# select_s = (s == 0) #& (df[col + '_p'] >= 319) & (df[col + '_p'] <= 319612000)
# pred_col[col + '_new'][select_s] = pred_col[col + '_p'][select_s]
# return pred_col
def append_pred_columns(df):
# select_pred(df, top_cols[0])
MAX_WORKERS = 8
cols = top_cols #[:5]
print(cols)
col_ind_begin = 0
col_len = len(cols)
while col_ind_begin < col_len:
col_ind_end = min(col_ind_begin + MAX_WORKERS, col_len)
with concurrent.futures.ThreadPoolExecutor(max_workers = MAX_WORKERS) as executor:
future_predict = {executor.submit(select_pred, df, cols[ind]): ind for ind in range(col_ind_begin, col_ind_end)}
for future in concurrent.futures.as_completed(future_predict):
ind = future_predict[future]
try:
pred_cols = future.result()
df[[cols[ind] + '_p', cols[ind] + '_new']] = pred_cols[[cols[ind] + '_p', cols[ind] + '_new']]
except Exception as exc:
print('%dth feature normalize generate an exception: %s' % (ind, exc))
col_ind_begin = col_ind_end
if col_ind_begin % 100 == 0:
print('Gen %d normalized features' % col_ind_begin)
def Min_Max_Normalize(s):
return (s - s.min()) / (s.max() - s.min() + 1e-9)
def Avg_Std_Normalize(s):
return (s - s.avg()) / s.std()
def Normalize(df, func):
MAX_WORKERS = 8
cols = list(df.columns.values)
# print(cols)
col_ind_begin = 0
col_len = len(cols)
while col_ind_begin < col_len:
col_ind_end = min(col_ind_begin + MAX_WORKERS, col_len)
with concurrent.futures.ThreadPoolExecutor(max_workers = MAX_WORKERS) as executor:
future_predict = {executor.submit(func, df[cols[ind]]): ind for ind in range(col_ind_begin, col_ind_end)}
for future in concurrent.futures.as_completed(future_predict):
ind = future_predict[future]
try:
df[cols[ind]] = future.result()
except Exception as exc:
print('%dth feature normalize generate an exception: %s' % (ind, exc))
col_ind_begin = col_ind_end
if col_ind_begin % 10000 == 0:
print('Gen %d normalized features' % col_ind_begin)
def SortColumn(s, r):
temp = np.array(sorted(s,reverse=True))
if r % 10000 == 0:
print ("sort rows: ", r)
return r, temp, np.sum(temp > 0)
def SortData(df, sort_len):
CPU_CORES = 8
sort_array = df.copy().values
res = [SortColumn(sort_array[r, :], r) for r in range(sort_array.shape[0])]
# with Pool(processes=CPU_CORES) as p:
# res = [p.apply_async(SortColumn, args=(sort_array[r, :], r)) for r in range(sort_array.shape[0])]
max_valid_column = 0
for r in res:
r, temp, valid_column = r #.get()
sort_array[r] = temp
if valid_column > max_valid_column:
max_valid_column = valid_column
print ("max_valid_column: ", max_valid_column)
sort_len = min(sort_len, sort_array.shape[1])
return pd.DataFrame(sort_array[:, :sort_len], index = df.index, columns = ['sort_' + str(i) for i in range(sort_len)])
def CalcHistMeta(r, HistSize):
hist = np.zeros(HistSize)
# print (r)
for d in r:
# if d != 0:
vid = int(d * (HistSize - 1.0));
hist[vid] += 1
return hist
def HistProcess(df):
df = df.copy()
df = df.apply(np.log1p)
df_max = df.max().max()
df_min = df.min().min()
print ("df min: ", df_min, "max: ", df_max)
df_local = (df - df_min) / (df_max - df_min)
return df_local
def CalcHist(df, HistSize):
df_local = df #HistProcess(df)
hist_list = []
for i in range(df_local.shape[0]):
hist_list.append(CalcHistMeta(df_local.iloc[i], HistSize))
# print (hist_list)
# exit(0)
if i % 10000 == 0:
print ("Calc Hist Rows: ", i)
return pd.DataFrame(np.array(hist_list), index = df.index, columns = ['hist_' + str(i) for i in range(HistSize)])
def norm_cal_hist(df, hist_size):
return CalcHist(HistProcess(df), hist_size)
def gen_statistic_features(df):
statistic_features = pd.DataFrame(index = df.index)
statistic_features["nz_mean"] = df.apply(lambda x: x[x!=0].mean(), axis=1)
statistic_features["nz_max"] = df.apply(lambda x: x[x!=0].max(), axis=1)
statistic_features["nz_min"] = df.apply(lambda x: x[x!=0].min(), axis=1)
statistic_features["mean"] = df.apply(lambda x: x.mean(), axis=1)
statistic_features["ez"] = df.apply(lambda x: len(x[x==0]), axis=1)
return statistic_features
def gen_features(df, prefix, hist_size = HIST_SIZE, sort_len = SORT_LEN):
print ('Gen features for cols: ', prefix, hist_size, sort_len)
statistic_features = gen_statistic_features(df)
sort_features = SortData(df, sort_len)
hist_features = norm_cal_hist(df, hist_size)
features = pd.concat([sort_features, hist_features, statistic_features], axis = 1, sort = False)
features_rename_dict = dict([(f, prefix + f) for f in features.columns.values])
features.rename(columns = features_rename_dict, inplace = True)
return features
def find_top_nz_cols(df, cols_list, k):
cols_nz_dict = {}
for i in range(len(cols_list)):
cols = cols_list[i]
nz_num = df[cols].apply(lambda x: len(x[x != 0])).sum()
cols_nz_dict[i] = nz_num
sort_cols = pd.Series(cols_nz_dict).sort_values(ascending = False).index.values[:k]
sort_cols = [cols_list[i] for i in sort_cols]
print ('Top nz cols: ', sort_cols)
return sort_cols
def gen_group_features(df, all_hist_size, all_sort_len, group_hist_size, group_sort_len, group_num):
all_cols_features = gen_features(df, 'all_cols_', all_hist_size, all_sort_len)
cols_group_features = []
for cols in find_top_nz_cols(df, LEAK_LIST, group_num):
single_group_features = gen_features(df[cols], cols[0] + '_', group_hist_size, group_sort_len)
cols_group_features.append(single_group_features)
return pd.concat([all_cols_features] + cols_group_features, axis = 1, sort = False)
def AugData(df, df_local, col_select_rate):
print ('Aug data using col_select_rate: ', col_select_rate)
df_shape = df.shape
hist_array = np.zeros((df_shape[0], HIST_SIZE))
# sort_len = int(df_shape[1] * col_select_rate / 2)
sort_array = np.zeros((df_shape[0], SORT_LEN))
max_array = np.zeros(df_shape[0])
min_array = np.zeros(df_shape[0])
mean_array = np.zeros(df_shape[0])
nz_mean_array = np.zeros(df_shape[0])
nz_min_array = np.zeros(df_shape[0])
pred_max_array = np.zeros(df_shape[0])
pred_min_array = np.zeros(df_shape[0])
pred_mean_array = np.zeros(df_shape[0])
pred_nz_mean_array = np.zeros(df_shape[0])
pred_nz_min_array = np.zeros(df_shape[0])
rest_empty_num = 0
select_array = np.random.choice([True, False], df_shape, p = [col_select_rate, 1 - col_select_rate])
for i in range(df.shape[0]):
r_select = df_local.iloc[i].values[select_array[i]]
r_pred = df.iloc[i].values[~select_array[i]]
hist_array[i] = CalcHistMeta(r_select, HIST_SIZE)
sort_array[i] = np.array(sorted(r_select,reverse=True)[:SORT_LEN])
max_array[i] = r_select.max()
min_array[i] = r_select.min()
mean_array[i] = r_select.mean()
if r_select[r_select != 0].size > 0:
nz_mean_array[i] = r_select[r_select != 0].mean()
nz_min_array[i] = r_select[r_select != 0].min()
if r_pred[r_pred != 0].size > 0:
pred_max_array[i] = r_pred.max()
pred_min_array[i] = r_pred.min()
pred_mean_array[i] = r_pred.mean()
pred_nz_mean_array[i] = r_pred[r_pred != 0].mean()
pred_nz_min_array[i] = r_pred[r_pred != 0].min()
else:
rest_empty_num += 1
if i % 1000 == 0:
print ("Aug Data Rows: ", i)
print ('Rest empty number when doing data augmentation data: ', rest_empty_num)
df_array = np.c_[hist_array, sort_array, max_array, min_array, mean_array, nz_mean_array, nz_min_array,
pred_max_array, pred_min_array, pred_mean_array, pred_nz_mean_array, pred_nz_min_array]
statistic_cols = ['max', 'min', 'mean', 'nz_mean', 'nz_min', 'pred_max', 'pred_min', 'pred_mean', 'pred_nz_mean', 'pred_nz_min']
train_cols = ['hist_' + str(i) for i in range(HIST_SIZE)] + ['sort_' + str(i) for i in range(SORT_LEN)] + ['max', 'min', 'mean', 'nz_mean', 'nz_min']
cols = ['hist_' + str(i) for i in range(HIST_SIZE)] + ['sort_' + str(i) for i in range(SORT_LEN)] + statistic_cols
return pd.DataFrame(df_array, index = df.index, columns = cols), train_cols
def LoadAugDdata(target):
print("\nData Load Stage")
# if FLAGS.load_from_pickle:
with open(path + 'train_test_nonormalize.pickle', 'rb') as handle:
df, test_ID, y_train, train_row = pickle.load(handle)
# target = 'pred_nz_mean'
aug_data_list = []
# df = df[:2000]
# df_local = HistProcess(df)
train_cols = ['hist_' + str(i) for i in range(HIST_SIZE)] + ['sort_' + str(i) for i in range(SORT_LEN)] + ['max', 'min', 'mean', 'nz_mean', 'nz_min']
# train_cols = ['sort_' + str(i) for i in range(SORT_LEN)] + ['max', 'min', 'mean', 'nz_mean', 'nz_min']
select_rates = np.arange(0.6, 0.85, 0.025)
# with Pool(processes=8) as p:
# res = [p.apply_async(AugData, args=(df, df_local, select_rates[i])) for i in range(len(select_rates))]
# res = [r.get() for r in res]
# for aug_data, train_cols in res:
# # with open(path + 'aug_data_sort_hist_' + str(i) + '.pickle', 'wb+') as handle:
# # pickle.dump(aug_data, handle, protocol=pickle.HIGHEST_PROTOCOL)
# aug_data = aug_data[aug_data[target] != 0]
# aug_data = aug_data.apply(np.log1p)
# aug_data_list.append(aug_data)
for i in select_rates:
# if os.path.isfile(path + 'aug_data_sort_hist_' + str(i) + '.pickle'):
# continue
# aug_data, train_cols = AugData(df, df_local, i)
# with open(path + 'aug_data_sort_hist_' + str(i) + '.pickle', 'wb+') as handle:
# pickle.dump(aug_data, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open(path + 'aug_data_sort_hist_' + str(i) + '.pickle', 'rb') as handle:
aug_data = pickle.load(handle)
aug_data = aug_data[aug_data[target] != 0]
# aug_data = aug_data.apply(np.log1p).astype(np.float32)
aug_data_list.append(aug_data)
# Load Test Data
with open(path + 'sort_df_log1p_minmaxnorm.pickle', 'rb+') as handle:
sort_df = pickle.load(handle)
with open(path + 'hist_df_log1p_minmaxnorm.pickle', 'rb+') as handle:
hist_df = pickle.load(handle)
with open(path + 'statistic_features_use_full_cols_log1p_minmaxnorm.pickle', 'rb+') as handle:
statistic_features = pickle.load(handle)
test_data = pd.concat([hist_df, sort_df, statistic_features], axis = 1, sort = False)
print ("test_data: \n", test_data.head())
test_id = test_data.index
df = pd.concat(aug_data_list, axis = 0, sort = False)
print ("df: \n", df.head())
train_data = df[train_cols]
train_label = df[target].apply(np.log1p)
# test_data = None
# test_id = None
leak_target = None
valide_data = None
valide_label = None
weight = None
keras_train.USED_FEATURE_LIST = list(train_data.columns.values)
return train_data, train_label, test_data, test_id, valide_data, valide_label, weight, leak_target
def load_data(col):
print("\nData Load Stage")
if FLAGS.load_from_pickle:
with open(path + 'train_test_nonormalize.pickle', 'rb') as handle:
df, test_ID, y_train, train_row = pickle.load(handle)
train_leak_target = pd.read_csv(path + 'train_target_leaktarget_38_1_2018_08_20_03_07_22.csv', index_col = 'ID')
test_leak_target = pd.read_csv(path + 'test_target_leaktarget_38_1_2018_08_19_17_15_26.csv', index_col = 'ID')
leak_target = train_leak_target.append(test_leak_target)['leak_target'].apply(np.log1p)
# leak_target = leak_target.loc[df[train_row:].index, 'leak_target']
# leak_target = leak_target[leak_target != 0]
print ('leak_target shape: ', leak_target.shape)
print("Shape before append columns: ", df.shape)
origin_cols = df.columns
# df = HistProcess(df)
# hist_df = CalcHist(df, HIST_SIZE)
# with open(path + 'hist_df_log1p_minmaxnorm.pickle', 'wb+') as handle:
# pickle.dump(hist_df, handle, protocol=pickle.HIGHEST_PROTOCOL)
# # Sort every row by column value
# sort_df = SortData(df)
# with open(path + 'sort_df_log1p_minmaxnorm.pickle', 'wb+') as handle:
# pickle.dump(sort_df, handle, protocol=pickle.HIGHEST_PROTOCOL)
# exit(0)
with open(path + 'sort_df.pickle', 'rb+') as handle:
sort_df = pickle.load(handle)
with open(path + 'hist_df.pickle', 'rb+') as handle:
hist_df = pickle.load(handle)
# append_pred_columns(df)
top_cols_pred = [col + '_p' for col in top_cols]
top_cols_new = [col + '_new' for col in top_cols]
# df[top_cols_pred + top_cols_new].to_csv('new_cols.csv')
if FLAGS.model_type == 'r':
df = df[top_cols_new[::-1]]
# rnn_pred = pd.read_csv(path + 'sub_2018_07_31_11_15_04.csv', index_col = 'ID')
# df['rnn_pred'] = rnn_pred['target']
# df["nz_mean"] = df[top_cols].apply(lambda x: x[x!=0].mean(), axis=1)
# df["nz_max"] = df[top_cols].apply(lambda x: x[x!=0].max(), axis=1)
# df["nz_min"] = df[top_cols].apply(lambda x: x[x!=0].min(), axis=1)
# df["ez"] = df[top_cols].apply(lambda x: len(x[x==0]), axis=1)
# df["mean"] = df[top_cols].apply(lambda x: x.mean(), axis=1)
# df["max"] = df[top_cols].apply(lambda x: x.max(), axis=1)
# df["min"] = df[top_cols].apply(lambda x: x.min(), axis=1)
# df["nz_mean"] = df.apply(lambda x: x[x!=0].mean(), axis=1)
# df["nz_min"] = df.apply(lambda x: x[x!=0].min(), axis=1)
# df["mean"] = df.apply(lambda x: x.mean(), axis=1)
# df["max"] = df.apply(lambda x: x.max(), axis=1)
# df["min"] = df.apply(lambda x: x.min(), axis=1)
# statistic_features_cols = ['max', 'min', 'mean', 'nz_mean', 'nz_min']
# with open(path + 'statistic_features_use_full_cols_log1p_minmaxnorm.pickle', 'wb+') as handle:
# pickle.dump(df[statistic_features_cols], handle, protocol=pickle.HIGHEST_PROTOCOL)
# exit(0)
# df["pred_mean"] = df[top_cols_pred].apply(lambda x: x.mean(), axis=1)
# df["pred_max"] = df[top_cols_pred].apply(lambda x: x.max(), axis=1)
# df["pred_min"] = df[top_cols_pred].apply(lambda x: x.min(), axis=1)
# statistic_features_cols = ["nz_mean", "nz_max", "nz_min", "ez", "mean", "max", "min", "pred_mean", "pred_max", "pred_min"]
# with open(path + 'statistic_features.pickle', 'wb+') as handle:
# pickle.dump(df[statistic_features_cols], handle, protocol=pickle.HIGHEST_PROTOCOL)
# with open(path + 'statistic_features.pickle', 'rb+') as handle:
# statistic_features = pickle.load(handle)
# pred_mean = pd.read_csv(path + '_pred_mean_2018_08_14_14.csv', index_col = 'ID').rename(columns = {'target': 'aug_pred_mean'})
# pred_max = pd.read_csv(path + '_pred_max_2018_08_14_14.csv', index_col = 'ID').rename(columns = {'target': 'aug_pred_max'})
# pred_nz_mean = pd.read_csv(path + '_pred_nz_mean_2018_08_14_16.csv', index_col = 'ID').rename(columns = {'target': 'aug_pred_nz_mean'})
# pred_nz_min = pd.read_csv(path + '_pred_nz_min_2018_08_14_17.csv', index_col = 'ID').rename(columns = {'target': 'aug_pred_nz_min'})
# df.drop(columns = origin_cols, inplace = True)
# df = pd.concat([sort_df, hist_df, statistic_features, pred_mean, pred_max, pred_nz_mean, pred_nz_min], axis = 1, sort = False).fillna(0)
all_hist_size = 100
all_sort_len = 100
group_hist_size = 100
group_sort_len = 20
group_num = 4
df = gen_group_features(df, all_hist_size, all_sort_len, group_hist_size, group_sort_len, group_num).fillna(0)
# y_train = y_train[:1000]
feature_label = '_'.join([str(all_hist_size), str(all_sort_len), str(group_hist_size), str(group_sort_len)])
with open(path + 'group_features_' + feature_label + '.pickle', 'wb+') as handle:
pickle.dump(df, handle, protocol=pickle.HIGHEST_PROTOCOL)
# with open(path + 'group_features.pickle', 'rb+') as handle:
# df = pickle.load(handle)
print("Shape after append columns: ", df.shape)
# with Pool(processes=8) as p:
# res = [p.apply_async(select_pred, args=(df, col)) for col in top_cols[:5]]
# res = [r.get() for r in res]
# exit(0)
# pred_col1_filter = pred_col1[(pred_col1['target'] >= 319) & (pred_col1['target'] <= 319612000)]
# exit(0)
# df[cols].to_csv(path + 'df.csv')
# df = df.apply(np.log1p) #df[cols].apply(np.log1p)
# print(df.head)
# exit(0)
# print(df)
print("Do normalize...")
df = df.apply(np.log1p)
# df = (df - df.mean())/ df.std()
# Normalize(df, Min_Max_Normalize)
print(df.head())
# df = rank_INT_DF(df) #df.apply(rank_INT)
# with open(path + 'train_test_rank_int_rand_tie.pickle', 'wb+') as handle:
# pickle.dump([df, test_ID, y_train, train_row], handle, protocol=pickle.HIGHEST_PROTOCOL)
# exit(0)
else:
if FLAGS.debug:
nrow = 100
else:
nrow = None
train = pd.read_csv(path + '/train.csv', nrows = nrow, index_col = 'ID')
test = pd.read_csv(path + '/test.csv', nrows = nrow, index_col = 'ID')
test_ID = test.index #['ID']
y_train = train['target']
y_train = np.log1p(y_train)
# train.drop("ID", axis = 1, inplace = True)
train.drop("target", axis = 1, inplace = True)
# test.drop("ID", axis = 1, inplace = True)
# cols_with_onlyone_val = train.columns[train.nunique() == 1]
# with open('cols_with_onlyone_val.pickle', 'wb+') as handle:
# pickle.dump(cols_with_onlyone_val, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open('cols_with_onlyone_val.pickle', 'rb') as handle:
cols_with_onlyone_val = pickle.load(handle)
print ("cols_with_onlyone_val: ", cols_with_onlyone_val)
df = train.append(test)
train_row = train.shape[0]
df.drop(cols_with_onlyone_val.values, axis=1, inplace=True)
# test.drop(cols_with_onlyone_val.values, axis=1, inplace=True)
NUM_OF_DECIMALS = 32
df = df.round(NUM_OF_DECIMALS)
# test = test.round(NUM_OF_DECIMALS)
# colsToRemove = []
# columns = train.columns
# for i in range(len(columns)-1):
# v = train[columns[i]].values
# for j in range(i + 1,len(columns)):
# if np.array_equal(v, train[columns[j]].values):
# colsToRemove.append(columns[j])
# with open('colsToRemove.pickle', 'wb+') as handle:
# pickle.dump(colsToRemove, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open('colsToRemove.pickle', 'rb') as handle:
colsToRemove = pickle.load(handle)
print ("dupCols: ", colsToRemove)
df.drop(colsToRemove, axis=1, inplace=True)
with open(path + 'train_test_nonormalize.pickle', 'wb+') as handle:
pickle.dump([df, test_ID, y_train, train_row], handle, protocol=pickle.HIGHEST_PROTOCOL)
exit(0)
for col in df.columns.values:
figure = df[col].hist(histtype = 'step')
plt.savefig(path + "/figures/df_" + col + ".png")
exit(0)
print("Do normalize...")
# df = (df - df.mean())/ df.std()
df = (df - df.min())/ (df.max() - df.min())
# df = rank_INT_DF(df) #df.apply(rank_INT)
with open(path + 'train_test_minmax.pickle', 'wb+') as handle:
pickle.dump([df, test_ID, y_train, train_row], handle, protocol=pickle.HIGHEST_PROTOCOL)
# exit(0)
if FLAGS.load_from_vae:
vae_data = np.load(path + 'vae_data.npy')
cols = ["vae_" + str(i) for i in range(vae_data.shape[1])]
vae_df = pd.DataFrame(vae_data, columns = cols, index = df.index)
df[cols] = vae_df
print("after add vae shape: ", df.shape)
# print(df.head)
if FLAGS.lgb_boost_dnn:
keras_train.USED_FEATURE_LIST += ['lgb_pred']
if FLAGS.model_type == 'v' or FLAGS.model_type == 'r':
train_data = df
else:
train_data = df.iloc[:train_row, :]
# Append leak rows
if FLAGS.leak_test_for_train:
valid_leak_test_data = leak_target[df[train_row:].index]
valid_leak_test_data = valid_leak_test_data[valid_leak_test_data != 0]
print ('Leak test lenght: ', valid_leak_test_data.shape[0])
train_data = train_data.append(df.loc[valid_leak_test_data.index])
y_train = y_train.append(valid_leak_test_data)
if FLAGS.predict_feature:
valid_idx = (df[col] != 0)
# print(valid_idx)
train_data = df.loc[valid_idx, df.columns != col]
test_data = df.loc[:, df.columns != col]
train_label = df.loc[valid_idx, col]
test_id = df.index
else:
if FLAGS.model_type == 'r':
test_data = df
test_id = df.index
else:
test_data = df.iloc[train_row:, :]
test_id = test_ID
train_label = y_train.values
valide_data = None
valide_label = None
weight = None
keras_train.USED_FEATURE_LIST = list(train_data.columns.values)
return train_data, train_label, test_data, test_id, valide_data, valide_label, weight, leak_target
def sub(models, stacking_data = None, stacking_label = None, stacking_test_data = None, test = None, \
scores_text = None, tid = None, sub_re = None, col = None, leak_target = None, aug_data_target = None):
tmp_model_dir = "./model_dir/"
if not os.path.isdir(tmp_model_dir):
os.makedirs(tmp_model_dir, exist_ok=True)
if FLAGS.stacking:
np.save(os.path.join(tmp_model_dir, "stacking_train_data.npy"), stacking_data)
np.save(os.path.join(tmp_model_dir, "stacking_train_label.npy"), stacking_label)
np.save(os.path.join(tmp_model_dir, "stacking_test_data.npy"), stacking_test_data)
elif FLAGS.model_type == 'v':
np.save(os.path.join(tmp_model_dir, "vae_data.npy"), stacking_data)
else:
# if FLAGS.load_stacking_data:
# sub2[coly] = sub_re
# else:
sub_re = pd.DataFrame(models_eval(models, test),columns=["target"],index=tid)
sub_re["target"] = np.expm1(sub_re["target"].values)
# sub_re["target"][leak_target.index] = leak_target
# blend = sub2 #blend[sub2.columns]
if FLAGS.predict_feature:
time_label = "_" + col + time.strftime('_%Y_%m_%d_%H', time.gmtime())
sub_name = tmp_model_dir + time_label + ".csv"
elif FLAGS.aug_data:
time_label = "_" + aug_data_target + time.strftime('_%Y_%m_%d_%H', time.gmtime())
sub_name = tmp_model_dir + time_label + ".csv"
else:
time_label = time.strftime('_%Y_%m_%d_%H_%M_%S', time.gmtime())
sub_name = tmp_model_dir + "sub" + time_label + ".csv"
sub_re.to_csv(sub_name)
# save model to file
for i, model in enumerate(models):
if (model[1] == 'l'):
model_name = tmp_model_dir + "model_" + str(i) + time_label + ".txt"
model[0].save_model(model_name)
elif (model[1] == 'k' or model[1] == 'r'):
model_name = tmp_model_dir + "model_" + str(i) + time_label + ".h5"
model[0].model.save(model_name)
scores_text_frame = pd.DataFrame(scores_text, columns = ["score_text"])
score_text_file = tmp_model_dir + "score_text" + time_label + ".csv"
scores_text_frame.to_csv(score_text_file, index=False)
scores = scores_text_frame["score_text"]
for i in range(FLAGS.epochs):
scores_epoch = scores.loc[scores.str.startswith('epoch:{0}'.format(i + 1))].map(lambda s: float(s.split()[1]))
print ("Epoch{0} mean:{1} std:{2} min:{3} max:{4} median:{5}".format(i + 1, \
scores_epoch.mean(), scores_epoch.std(), scores_epoch.min(), scores_epoch.max(), scores_epoch.median()))
if not os.path.isdir(FLAGS.output_model_path):
os.makedirs(FLAGS.output_model_path, exist_ok=True)
for fileName in os.listdir(tmp_model_dir):
dst_file = os.path.join(FLAGS.output_model_path, fileName)
if os.path.exists(dst_file):
os.remove(dst_file)
shutil.move(os.path.join(tmp_model_dir, fileName), FLAGS.output_model_path)
if __name__ == "__main__":
def train_sub(col):
scores_text = []
aug_data_target = None
if FLAGS.aug_data:
aug_data_target = 'pred_nz_min'
train_data, train_label, test_data, tid, valide_data, valide_label, weight, leak_target = LoadAugDdata(aug_data_target)
else:
train_data, train_label, test_data, tid, valide_data, valide_label, weight, leak_target = load_data(col)
if not FLAGS.load_stacking_data:
models, stacking_data, stacking_label, stacking_test_data = nfold_train(train_data, train_label, flags = FLAGS, \
model_types = [FLAGS.model_type], scores = scores_text, test_data = test_data, \
valide_data = valide_data, valide_label = valide_label, cat_max = None, emb_weight = None, leak_target = leak_target)
else:
for i in range(train_label.shape[1]):
models, stacking_data, stacking_label, stacking_test_data = nfold_train(train_data, train_label[:, i], flags = FLAGS, \
model_types = [FLAGS.model_type], scores = scores_text, emb_weight = emb_weight, test_data = test_data \
# , valide_data = train_data[:100], valide_label = train_label[:100, i]
)
sub_re[:, i] = models_eval(models, test_data)
sub(models, stacking_data = stacking_data, stacking_label = stacking_label, stacking_test_data = stacking_test_data, \
test = test_data, scores_text = scores_text, tid = tid, col = col, leak_target = leak_target, aug_data_target = aug_data_target)
if FLAGS.predict_feature:
col_num = 0
for col in top_cols:
train_sub(col)
col_num += 1
# if col_num == 5:
# break
else:
train_sub(None) | apache-2.0 |
ChadFulton/statsmodels | statsmodels/tools/tests/test_grouputils.py | 1 | 11793 | from statsmodels.compat.python import PY37
import numpy as np
import pandas as pd
from statsmodels.tools.grouputils import Grouping
from statsmodels.tools.tools import categorical
from statsmodels.datasets import grunfeld, anes96
from pandas.util import testing as ptesting
import pytest
class CheckGrouping(object):
def test_reindex(self):
# smoke test
self.grouping.reindex(self.grouping.index)
def test_count_categories(self):
self.grouping.count_categories(level=0)
np.testing.assert_equal(self.grouping.counts, self.expected_counts)
def test_sort(self):
# data frame
sorted_data, index = self.grouping.sort(self.data)
expected_sorted_data = self.data.sort_index()
ptesting.assert_frame_equal(sorted_data, expected_sorted_data)
np.testing.assert_(isinstance(sorted_data, pd.DataFrame))
np.testing.assert_(not index.equals(self.grouping.index))
# make sure it copied
if hasattr(sorted_data, 'equals'): # newer pandas
np.testing.assert_(not sorted_data.equals(self.data))
# 2d arrays
sorted_data, index = self.grouping.sort(self.data.values)
np.testing.assert_array_equal(sorted_data,
expected_sorted_data.values)
np.testing.assert_(isinstance(sorted_data, np.ndarray))
# 1d series
series = self.data[self.data.columns[0]]
sorted_data, index = self.grouping.sort(series)
expected_sorted_data = series.sort_index()
ptesting.assert_series_equal(sorted_data, expected_sorted_data)
np.testing.assert_(isinstance(sorted_data, pd.Series))
if hasattr(sorted_data, 'equals'):
np.testing.assert_(not sorted_data.equals(series))
# 1d array
array = series.values
sorted_data, index = self.grouping.sort(array)
expected_sorted_data = series.sort_index().values
np.testing.assert_array_equal(sorted_data, expected_sorted_data)
np.testing.assert_(isinstance(sorted_data, np.ndarray))
@pytest.mark.xfail(condition=PY37,
reason='Unexplained conversion to complex on Python 3.7')
def test_transform_dataframe(self):
names = self.data.index.names
transformed_dataframe = self.grouping.transform_dataframe(
self.data,
lambda x : x.mean(),
level=0)
expected = self.data.reset_index().groupby(names[0]
).apply(lambda x : x.mean())[
self.data.columns]
np.testing.assert_array_equal(transformed_dataframe,
expected.values)
if len(names) > 1:
transformed_dataframe = self.grouping.transform_dataframe(
self.data, lambda x : x.mean(),
level=1)
expected = self.data.reset_index().groupby(names[1]
).apply(lambda x :
x.mean())[
self.data.columns]
np.testing.assert_array_equal(transformed_dataframe,
expected.values)
@pytest.mark.xfail(condition=PY37,
reason='Unexplained conversion to complex on Python 3.7')
def test_transform_array(self):
names = self.data.index.names
transformed_array = self.grouping.transform_array(
self.data.values,
lambda x : x.mean(),
level=0)
expected = self.data.reset_index().groupby(names[0]
).apply(lambda x : x.mean())[
self.data.columns]
np.testing.assert_array_equal(transformed_array,
expected.values)
if len(names) > 1:
transformed_array = self.grouping.transform_array(
self.data.values,
lambda x : x.mean(), level=1)
expected = self.data.reset_index().groupby(names[1]
).apply(lambda x :
x.mean())[
self.data.columns]
np.testing.assert_array_equal(transformed_array,
expected.values)
def test_transform_slices(self):
names = self.data.index.names
transformed_slices = self.grouping.transform_slices(
self.data.values,
lambda x, idx : x.mean(0),
level=0)
expected = self.data.reset_index().groupby(names[0]).mean()[
self.data.columns]
np.testing.assert_allclose(transformed_slices, expected.values,
rtol=1e-12, atol=1e-25)
if len(names) > 1:
transformed_slices = self.grouping.transform_slices(
self.data.values,
lambda x, idx : x.mean(0),
level=1)
expected = self.data.reset_index().groupby(names[1]
).mean()[
self.data.columns]
np.testing.assert_allclose(transformed_slices, expected.values,
rtol=1e-12, atol=1e-25)
def test_dummies_groups(self):
# smoke test, calls dummy_sparse under the hood
self.grouping.dummies_groups()
if len(self.grouping.group_names) > 1:
self.grouping.dummies_groups(level=1)
def test_dummy_sparse(self):
data = self.data
self.grouping.dummy_sparse()
expected = categorical(data.index.get_level_values(0).values,
drop=True)
np.testing.assert_equal(self.grouping._dummies.toarray(), expected)
if len(self.grouping.group_names) > 1:
self.grouping.dummy_sparse(level=1)
expected = categorical(data.index.get_level_values(1).values,
drop=True)
np.testing.assert_equal(self.grouping._dummies.toarray(),
expected)
class TestMultiIndexGrouping(CheckGrouping):
@classmethod
def setup_class(cls):
grun_data = grunfeld.load_pandas().data
multi_index_data = grun_data.set_index(['firm', 'year'])
multi_index_panel = multi_index_data.index
cls.grouping = Grouping(multi_index_panel)
cls.data = multi_index_data
cls.expected_counts = [20] * 11
class TestIndexGrouping(CheckGrouping):
@classmethod
def setup_class(cls):
grun_data = grunfeld.load_pandas().data
index_data = grun_data.set_index(['firm'])
index_group = index_data.index
cls.grouping = Grouping(index_group)
cls.data = index_data
cls.expected_counts = [20] * 11
def test_init_api():
# make a multi-index panel
grun_data = grunfeld.load_pandas().data
multi_index_panel = grun_data.set_index(['firm', 'year']).index
grouping = Grouping(multi_index_panel)
# check group_names
np.testing.assert_array_equal(grouping.group_names, ['firm', 'year'])
# check shape
np.testing.assert_array_equal(grouping.index_shape, (11, 20))
# check index_int
np.testing.assert_array_equal(grouping.labels,
[[ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
8, 8, 8, 8, 8, 8, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
10, 10, 10, 10, 10, 10, 10, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
17, 18, 19, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
14, 15, 16, 17, 18, 19, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 16, 17, 18, 19, 0, 1, 2, 3, 4, 5, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 0, 1, 2, 3, 4,
5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 0, 1,
2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
19, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
13, 14, 15, 16, 17, 18, 19, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 0, 1, 2, 3, 4, 5, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 0, 1, 2, 3,
4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]])
grouping = Grouping(multi_index_panel, names=['firms', 'year'])
np.testing.assert_array_equal(grouping.group_names, ['firms', 'year'])
# make a multi-index grouping
anes_data = anes96.load_pandas().data
multi_index_groups = anes_data.set_index(['educ', 'income',
'TVnews']).index
grouping = Grouping(multi_index_groups)
np.testing.assert_array_equal(grouping.group_names,
['educ', 'income', 'TVnews'])
np.testing.assert_array_equal(grouping.index_shape, (7, 24, 8))
# make a list multi-index panel
list_panel = multi_index_panel.tolist()
grouping = Grouping(list_panel, names=['firms', 'year'])
np.testing.assert_array_equal(grouping.group_names, ['firms', 'year'])
np.testing.assert_array_equal(grouping.index_shape, (11, 20))
# make a list multi-index grouping
list_groups = multi_index_groups.tolist()
grouping = Grouping(list_groups, names=['educ', 'income', 'TVnews'])
np.testing.assert_array_equal(grouping.group_names,
['educ', 'income', 'TVnews'])
np.testing.assert_array_equal(grouping.index_shape, (7, 24, 8))
# single-variable index grouping
index_group = multi_index_panel.get_level_values(0)
grouping = Grouping(index_group)
# the original multi_index_panel had it's name changed inplace above
np.testing.assert_array_equal(grouping.group_names, ['firms'])
np.testing.assert_array_equal(grouping.index_shape, (220,))
# single variable list grouping
list_group = multi_index_panel.get_level_values(0).tolist()
grouping = Grouping(list_group)
np.testing.assert_array_equal(grouping.group_names, ["group0"])
np.testing.assert_array_equal(grouping.index_shape, 11*20)
# test generic group names
grouping = Grouping(list_groups)
np.testing.assert_array_equal(grouping.group_names,
['group0', 'group1', 'group2'])
| bsd-3-clause |
openai/triton | python/bench/bench_matmul.py | 1 | 2403 | import torch
import triton
def rounded_linspace(low, high, steps, div):
ret = torch.linspace(low, high, steps)
ret = torch.div(ret.int() + div - 1, div, rounding_mode='trunc') * div
ret = torch.unique(ret)
return list(map(int, ret))
# Square benchmarks
nt = {False: "n", True: "t"}
square_confs = [
triton.testing.Benchmark(
x_names=["M", "N", "K"],
x_vals=rounded_linspace(512, 8192, 32, 128),
line_arg="provider",
line_vals=["cublas", "triton", "cutlass"],
line_names=["cuBLAS", "Triton", "CUTLASS"],
ylabel="TFLOPS",
plot_name=f"matmul-square-{nt[AT]}{nt[BT]}",
args={"AT": AT, "BT": BT, "dtype": torch.float16},
) for AT in [False] for BT in [False]
]
# Transformer training benchmarks
transformer_confs = [
triton.testing.Benchmark(
x_names=[x],
x_vals=rounded_linspace(NK // 16, NK, 32, 128),
line_arg="provider",
line_vals=["cublas", "triton", "cutlass"],
line_names=["cuBLAS", "Triton", "CUTLASS"],
ylabel="TFLOPS",
plot_name=f"matmul-M{M}-{'NK'.replace(x, '')}{NK}",
args={"M": M, 'NK'.replace(x, ''): NK, "AT": False, "BT": False, "dtype": torch.float16}
) for NK in [12288]
for i, x in enumerate(["N", "K"])
for M in [2048]
]
@triton.testing.perf_report(square_confs)
def bench_op(M, N, K, AT, BT, dtype, provider, warmup=25, rep=75):
a = torch.rand((K, M) if AT else (M, K), device="cuda", dtype=dtype)
b = torch.rand((N, K) if BT else (K, N), device="cuda", dtype=dtype)
if AT:
a = a.t()
if BT:
b = b.t()
tflops = lambda ms: 2. * M * N * K / ms * 1e-9
if provider == "cublas":
ms, min_ms, max_ms = triton.testing.do_bench(lambda: torch.matmul(a, b), warmup=warmup, rep=rep)
return tflops(ms), tflops(max_ms), tflops(min_ms)
if provider == "triton":
ms, min_ms, max_ms = triton.testing.do_bench(lambda: triton.ops.matmul(a, b), warmup=warmup, rep=rep)
return tflops(ms), tflops(max_ms), tflops(min_ms)
if provider == "cutlass":
cutlass_matmul = triton.testing.cutlass_matmul
try:
ms, min_ms, max_ms = triton.testing.do_bench(lambda: cutlass_matmul(a, b), warmup=warmup, rep=rep)
return tflops(ms), tflops(max_ms), tflops(min_ms)
except Exception:
return None
return None
| mit |
maberyick/RPi-EPOC | EpocArmData/Features/feat_extraction_original.py | 1 | 47980 | from scipy import mean
from copy import copy
import matplotlib.mlab as mlab
from pylab import psd
import csv
import os.path
from itertools import islice
import time
from sklearn import preprocessing
import numpy as np
from scipy.fftpack import fft, rfftfreq, ifft, rfft, irfft
from scipy.signal import hanning, hann, hamming, blackman
from scipy.signal import butter, lfilter, filtfilt, welch, resample
import matplotlib.pyplot as plt
from scipy import signal
from numpy.linalg import inv
import time as timpo
from os import getcwd as getdir
from os import path as pth
from Library.pyeeg import pfd,hjorth_com
from scipy.io import savemat
import sys
from PyQt4 import QtCore, QtGui
from os.path import exists
from matplotlib.patches import Polygon
import matplotlib.patches as mpatches
from scipy.io import savemat, whosmat, loadmat
from Library import loadmat_new
import random as rndm
from itertools import chain
import collections
from matplotlib.colors import Normalize
from sklearn.preprocessing import StandardScaler
from sklearn.cross_validation import StratifiedShuffleSplit
from sklearn.cross_validation import train_test_split
from sklearn.grid_search import GridSearchCV
from sklearn.metrics import classification_report
from sklearn.svm import SVC
from sklearn.externals import joblib
"""---------------------------------------------------------------------
Alpha
|EEG|-|remove_DC|-|uV_convert|-|Filter|-|epoching|-|Scale|-|Extract Features|-|Save|
Alpha Ratio [Active(High) vs Standard(Low)]
Hjorth complexity [Active(High) vs Standard(Low)]
Beta
|EEG|-|remove_DC|-|uV_convert|-|Filter|-|epoching|-|Scale|-|Extract Features|-|Save|
Beta+Gamma Ratio [Active(High) vs Standard(Low)]
Petrosian Fractal Dimension [Active(High) vs Standard(Low)]
EMG
|EMG|-|remove_DC|-|uV_convert|-|Filter|-|epoching|-|Extract Features|-|Save|
Frobenius Norm [Active(High) vs Standard(Low)]
Hjorth complexity [Active(High) vs Standard(Low)]
---------------------------------------------------------------------"""
#-------------------------- Constants ----------------------------------
dir_wrk = getdir()
dir_fol = ['1','2','3','4','5','6','7','8','9','10']
dir_fol_emg = ['1','2','3','4','5']
cs0 = [0,3,4,7,8,11,12,15,16,19,20,23,24,27,28]
cs1 = [1,2,5,6,9,10,13,14,17,18,21,22,25,26,29]
cs0_b = [0,1,4,5,8,9,12,13,16,17,20,21,24,25,28,29]
cs1_b = [2,3,6,7,10,11,14,15,18,19,22,23,26,27,30,31]
emg_channel = ['AF3','AF4'];emg_folder = ['Left_Wink','Right_Wink']
classes = {'active','nonactive','standard','offline'}
classes_emg = {'active','standard','offline','eog'}
classes_ext = {'eog','emg'}
train_n = 100; total_n = 150;data_n = 100; test_n = 50
train_n_alfa = 50; total_n_alfa = 75;data_n_alfa = 50; test_n_alfa = 25
train_n_emg = 50; total_n_emg = 75;data_n_emg = 50; test_n_emg = 25
train_n_beta = 128; total_n_beta = 192;data_n_beta = 128; test_n_beta = 64
numClss = 4;
Dists = ['Active',' non-Active', 'Standard', 'Offline']
Dists_b = ['Active',' non-Active', 'Standard', 'Offline', 'EOG', 'EMG-Wink']
Dists_emg = ['Active', 'Standard', 'Offline', 'EOG']
seq_ = [];seq_b = []; seq_m = []
for x in range(total_n):exec "seq_.append('part_%s')" % (x)
for x in range(total_n_beta):exec "seq_b.append('part_%s')" % (x)
for x in range(total_n_emg):exec "seq_m.append('part_%s')" % (x)
#---------------------------- Plot Normalizer --------------------------
class MidpointNormalize(Normalize):
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
#--------------------------- HPF ---------------------------------------
def filtering(data):
samprate = 128
cutlow = 2.0
nyq = samprate/2.0
low = cutlow / nyq
b,a = butter(5,low,btype='highpass',analog=0)
data_f = filtfilt(b,a,data)
return data_f
#--------------------------- Alpha Parameters --------------------------
def parametrs_alpha(val_):
temp_val1 = []; temp_val2 = []
fq_, px_ = welch(val_, nperseg=256, nfft=1023, fs = 128,
noverlap=100, scaling='density'
)
fq1_up = 12.0; fq1_dwn = 8.0
fq2_up = 30.0; fq2_dwn = 4.0
for i in range(len(px_)):
if fq_[i]<=fq1_up and fq_[i]>=fq1_dwn:temp_val1.append(px_[i])
elif fq_[i]<=fq2_up and fq_[i]>=fq2_dwn:temp_val2.append(px_[i])
vaf_eng1 = sum(temp_val1)
vaf_eng2 = sum(temp_val2)
vaf_r = vaf_eng1 / vaf_eng2
vaf_hjorth_com = (hjorth_com(val_))/100.0
vaf_tt = {'a1_ratio':vaf_r,'hjr_com':vaf_hjorth_com}
return vaf_tt
#--------------------------- Beta Parameters --------------------------
def parametrs_beta(val_):
temp_val1 = []; temp_val2 = []
fq_, px_ = welch(val_, nperseg=256, nfft=1023, fs = 128,
noverlap=100, scaling='density'
)
# BETA 3 + GAMMA
fq1_up = 38.0; fq1_dwn = 20.0
# DIV RANGE
fq2_up = 42.0; fq2_dwn = 14.0
for i in range(len(px_)):
if fq_[i]<=fq1_up and fq_[i]>=fq1_dwn:temp_val1.append(px_[i])
elif fq_[i]<=fq2_up and fq_[i]>=fq2_dwn:temp_val2.append(px_[i])
vaf_eng1 = sum(temp_val1)
vaf_eng2 = sum(temp_val2)
vaf_r = vaf_eng1 / vaf_eng2
vaf_pfd = pfd(val_)
vaf_tt = {'a1_ratio':vaf_r,'pfd':vaf_pfd}
return vaf_tt
#--------------------------- EMG Parameters ----------------------------
def parametrs_emg(val_):
vaf_froben = np.linalg.norm(val_)
vaf_hjorth_com = hjorth_com(val_)
vaf_tt = {'a1_froben':vaf_froben, 'a2_hjorth':vaf_hjorth_com}
return vaf_tt
#--------------------------- Prepros (no DC level (uV) )----------------
def dc2uV(val_):
vaf_ = ( val_ - np.average(val_) )*0.51
return vaf_
#--------------------------- Normalization -----------------------------
def normalz(val_):
vaf_ = preprocessing.scale(val_)
return vaf_
#-------------------------- Dictionar ----------------------------------
def dic_values_alfa():
dic_fol={'folder_1':0,'folder_2':0,'folder_3':0,'folder_4':0,
'folder_5':0}
dic_fol_st = {'folder_1':0,'folder_2':0,'folder_3':0,'folder_4':0,
'folder_5':0}
dic_fol_of = {'folder_1':0,'folder_2':0,'folder_3':0,'folder_4':0,
'folder_5':0}
seq_ = []
for x in range(75):exec "seq_.append('part_%s')" % (x)
dic_active = dict.fromkeys(seq_,0)
dic_nonactive = dict.fromkeys(seq_,0)
dic_standard = dict.fromkeys(seq_,0)
dic_offline = dict.fromkeys(seq_,0)
return dic_fol,dic_fol_st,dic_fol_of,dic_active,dic_nonactive,dic_standard,dic_offline
def dic_values_emg():
seq_ = []
for x in range(75):exec "seq_.append('part_%s')" % (x)
dic_active = dict.fromkeys(seq_,0)
dic_standard = dict.fromkeys(seq_,0)
dic_offline = dict.fromkeys(seq_,0)
dic_eog = dict.fromkeys(seq_,0)
return dic_active,dic_standard,dic_offline,dic_eog
def dic_values_beta():
seq_ = [];seq_a = []
for x in range(192):exec "seq_.append('part_%s')" % (x)
for x in range(75):exec "seq_a.append('part_%s')" % (x)
dic_active = dict.fromkeys(seq_,0)
dic_nonactive = dict.fromkeys(seq_,0)
dic_standard = dict.fromkeys(seq_,0)
dic_offline = dict.fromkeys(seq_,0)
dic_eog = dict.fromkeys(seq_a,0)
dic_emg = dict.fromkeys(seq_a,0)
return dic_active,dic_nonactive,dic_standard,dic_offline,dic_eog,dic_emg
#-------------------------- Save alpha parameters ----------------------
def alpha_extra_():
active_ch=[];active_st=[];active_of=[];non_active_ch=[]
dic_fol,dic_fol_st,dic_fol_of,dic_active,dic_nonactive,dic_standard,dic_offline = dic_values_alfa()
for j, dir_name in enumerate(dic_fol):
exec "ch_ = np.load('%s/channels/Alpha/Test_1_A/%s/O2.py')" %(dir_wrk,dir_fol_emg[j])
len_st = [0,len(ch_),2*len(ch_),3*len(ch_),4*len(ch_),5*len(ch_)]
exec "st_ = np.load('%s/channels/Beta/Standard/O2.py')" %(dir_wrk)
st_ = st_[len_st[j]:len_st[j+1]]
exec "of_ = np.load('%s/channels/Beta/Offline/O2.py')" %(dir_wrk)
of_ = of_[len_st[j]:len_st[j+1]]
#standard --
st_ = dc2uV(st_)
st_ = filtering(st_)
st_ = np.split(st_,32)
st_ = np.delete(st_,[0,31],0)
#Offline --
of_ = dc2uV(of_)
of_ = filtering(of_)
of_ = np.split(of_,32)
of_ = np.delete(of_,[0,31],0)
#channel --
ch_ = dc2uV(ch_)
ch_ = filtering(ch_)
ch_ = np.split(ch_,32)
ch_ = np.delete(ch_,[0,31],0)
#-- normalize --
for i in range(len(ch_)):
ch_[i] = normalz(ch_[i])
st_[i] = normalz(st_[i])
of_[i] = normalz(of_[i])
dic_fol[dir_name] = ch_
dic_fol_st[dir_name] = st_
dic_fol_of[dir_name] = of_
#-- parameters --
for k,dir_name in enumerate(dic_fol):
for l in range(len(dic_fol[dir_name])):
temp_val1 = parametrs_alpha(dic_fol[dir_name][l])
temp_val2 = parametrs_alpha(dic_fol_st[dir_name][l])
temp_val3 = parametrs_alpha(dic_fol_of[dir_name][l])
if l in cs0:
active_ch.append(temp_val1)
active_st.append(temp_val2)
active_of.append(temp_val3)
elif l in cs1:
non_active_ch.append(temp_val1)
#-- archives --
for x,seq_name in enumerate(dic_active):
dic_active[seq_name] = active_ch[x]
dic_nonactive[seq_name] = non_active_ch[x]
dic_standard[seq_name] = active_st[x]
dic_offline[seq_name] = active_of[x]
exec "savemat('%s/Features/Parameters_files/Test_1_A.mat', {'active':dic_active,'nonactive':dic_nonactive,'standard':dic_standard,'offline':dic_offline}, do_compression=1)" %(dir_wrk)
#-------------------------- Save Beta parameters -----------------------
def beta_extra_():
active_ch=[];active_st=[];active_of=[];active_eo=[];active_em=[];non_active_ch=[]
active_tmp=[];non_active_tmp=[];active_tmp_st=[];active_tmp_of=[]
active_tmp_eo=[];active_tmp_em=[]
dic_active,dic_nonactive,dic_standard,dic_offline,dic_eog,dic_emg = dic_values_beta()
exec "ch_ = np.load('%s/channels/Beta/Test_1_B/AF3.py')" %(dir_wrk)
exec "st_ = np.load('%s/channels/Beta/Standard/AF3.py')" %(dir_wrk)
exec "of_ = np.load('%s/channels/Beta/Offline/AF3.py')" %(dir_wrk)
exec "eo_ = np.load('%s/channels/EMG/EOG/AF3.py')" %(dir_wrk)
exec "em_ = np.load('%s/channels/EMG/Left_Wink/AF3.py')" %(dir_wrk)
#channel --
ch_ = dc2uV(ch_)
ch_ = filtering(ch_)
ch_ = np.split(ch_,32)
for i in range(len(ch_)):
ch_n = np.delete(ch_[i],range(6143,6399),0)
ch_n = np.split(ch_n,12)
if i in cs0_b:
for x in range(len(ch_n)):
active_tmp.append(ch_n[x])
elif i in cs1_b:
for x in range(len(ch_n)):
non_active_tmp.append(ch_n[x])
#standard --
st_ = dc2uV(st_)
st_ = filtering(st_)
st_ = np.split(st_,32)
for i in range(len(st_)):
st_n = np.delete(st_[i],range(6143,6399),0)
st_n = np.split(st_n,12)
if i in cs0_b:
for x in range(len(st_n)):
active_tmp_st.append(st_n[x])
#Offline --
of_ = dc2uV(of_)
of_ = filtering(of_)
of_ = np.split(of_,32)
for i in range(len(of_)):
of_n = np.delete(of_[i],range(6143,6399),0)
of_n = np.split(of_n,12)
if i in cs0_b:
for x in range(len(of_n)):
active_tmp_of.append(of_n[x])
#EOG --
eo_ = dc2uV(eo_)
eo_ = filtering(eo_)
eo_ = np.split(eo_,75)
#EMG --
em_ = dc2uV(em_)
em_ = filtering(em_)
em_ = np.split(em_,75)
#-- normalize --
for i in range(len(active_tmp)):
active_tmp[i] = normalz(active_tmp[i])
non_active_tmp[i] = normalz(non_active_tmp[i])
active_tmp_st[i] = normalz(active_tmp_st[i])
active_tmp_of[i] = normalz(active_tmp_of[i])
for i in range(len(eo_)):
eo_[i] = normalz(eo_[i])
em_[i] = normalz(em_[i])
#-- parameters --
for l in range(len(active_tmp)):
active_ch.append(parametrs_beta(active_tmp[l]))
non_active_ch.append(parametrs_beta(non_active_tmp[l]))
active_st.append(parametrs_beta(active_tmp_st[l]))
active_of.append(parametrs_beta(active_tmp_of[l]))
for l in range(len(eo_)):
active_eo.append(parametrs_beta(eo_[l]))
active_em.append(parametrs_beta(em_[l]))
#-- archives --
for x,seq_name in enumerate(dic_active):
dic_active[seq_name] = active_ch[x]
dic_nonactive[seq_name] = non_active_ch[x]
dic_standard[seq_name] = active_st[x]
dic_offline[seq_name] = active_of[x]
for x,seq_name in enumerate(dic_eog):
dic_eog[seq_name] = active_eo[x]
dic_emg[seq_name] = active_em[x]
exec "savemat('%s/Features/Parameters_files/Test_1_B.mat', {'active':dic_active,'nonactive':dic_nonactive,'standard':dic_standard,'offline':dic_offline,'eog':dic_eog,'emg':dic_emg}, do_compression=1)" %(dir_wrk)
#-------------------------- Save EMG parameters ------------------------
def emg_extra_():
for xx,yy in enumerate(emg_folder):
active_ch=[];active_st=[];active_of=[];active_eo=[]
dic_active,dic_standard,dic_offline,dic_eog = dic_values_emg()
exec "ch_ = np.load('%s/channels/EMG/%s/%s.py')" %(dir_wrk,emg_folder[xx],emg_channel[xx])
exec "eo_ = np.load('%s/channels/EMG/EOG/%s.py')" %(dir_wrk,emg_channel[xx])
exec "st_ = np.load('%s/channels/Beta/Standard/%s.py')" %(dir_wrk,emg_channel[xx])
exec "of_ = np.load('%s/channels/Beta/Offline/%s.py')" %(dir_wrk,emg_channel[xx])
st_ = st_[0:len(ch_)]
of_ = of_[0:len(ch_)]
#standard --
st_ = dc2uV(st_)
st_ = filtering(st_)
st_ = np.split(st_,75)
#offline --
of_ = dc2uV(of_)
of_ = filtering(of_)
of_ = np.split(of_,75)
#eog --
eo_ = dc2uV(eo_)
eo_ = filtering(eo_)
eo_ = np.split(eo_,75)
#channel --
ch_ = dc2uV(ch_)
ch_ = filtering(ch_)
ch_ = np.split(ch_,75)
#-- parameters --
for l in range(len(ch_)):
temp_val1 = parametrs_emg(ch_[l])
temp_val2 = parametrs_emg(st_[l])
temp_val3 = parametrs_emg(of_[l])
temp_val4 = parametrs_emg(eo_[l])
# if l % 2 == 0:
active_ch.append(temp_val1)
active_st.append(temp_val2)
active_of.append(temp_val3)
active_eo.append(temp_val4)
# else:
# non_active_ch.append(temp_val1)
#-- archives --
for x,seq_name in enumerate(dic_active):
dic_active[seq_name] = active_ch[x]
dic_standard[seq_name] = active_st[x]
dic_offline[seq_name] = active_of[x]
dic_eog[seq_name] = active_eo[x]
exec "savemat('%s/Features/Parameters_files/%s.mat', {'active':dic_active,'standard':dic_standard,'offline':dic_offline,'eog':dic_eog}, do_compression=1)" %(dir_wrk,emg_folder[xx])
#-------------------------- Plotting of Alpha --------------------------
def plot_alpha_():
for x,cls in enumerate(classes):
exec "%s_ratio = []" % (cls)
exec "%s_hjr = []" % (cls)
#-------------------------------------------------------------------
exec "mat_contents = loadmat_new.loadmat('%s/Features/Parameters_files/Test_1_A.mat')" %(dir_wrk)
# 4 classes
active = mat_contents['active'];
nonactive = mat_contents['nonactive']
standard = mat_contents['standard']
offline = mat_contents['offline']
#-------------------------------------------------------------------
for y,cls in enumerate(classes):
for x in range(total_n_alfa):
exec "%s_ratio.append(%s[seq_[%s]]['a1_ratio'])" % (cls,cls,x)
exec "%s_hjr.append(%s[seq_[%s]]['hjr_com'])" % (cls,cls,x)
#-------------------------------------------------------------------
data1 = [active_ratio, nonactive_ratio, standard_ratio, offline_ratio]
data2 = [active_hjr, nonactive_hjr, standard_hjr, offline_hjr]
#-------------------------------------------------------------------
fig1, (ax1,ax2) = plt.subplots(2)
for x in range(1,3):
exec "bp%s = ax%s.boxplot(data%s, notch=1, sym='k+', vert=1)" %(x,x,x)
#-------------------------------------------------------------------
for x in range(1,3):
exec "ax%s.yaxis.grid(True, linestyle='-', which='major', color='lightgrey',alpha=0.5)" %(x)
exec "ax%s.xaxis.grid(True, linestyle='-', which='major', color='lightgrey',alpha=0.5)" %(x)
fig1.canvas.set_window_title('Parametros de Alpha')
ax1.set_axisbelow(True)
ax1.set_xlabel('Class')
ax1.set_title('Alpha Ratio')
ax2.set_title('Hjorth complexity')
for x in range(1,3):
exec "ax%s.set_ylabel('Value')" %(x)
plt.setp((ax1,ax2), xticklabels=Dists)
#-------------------------------------------------------------------
fig1.tight_layout()
#-------------------------- Plotting of Beta --------------------------
def plot_beta_():
for x,cls in enumerate(classes):
exec "%s_ratio = []" % (cls)
exec "%s_pfd = []" % (cls)
for x,cls in enumerate(classes_ext):
exec "%s_ratio = []" % (cls)
exec "%s_pfd = []" % (cls)
#-------------------------------------------------------------------
exec "mat_contents = loadmat_new.loadmat('%s/Features/Parameters_files/Test_1_B.mat')" %(dir_wrk)
# 4 classes
active = mat_contents['active'];
nonactive = mat_contents['nonactive']
standard = mat_contents['standard']
offline = mat_contents['offline']
eog = mat_contents['eog']
emg = mat_contents['emg']
#-------------------------------------------------------------------
for y,cls in enumerate(classes):
for x in range(total_n_beta):
exec "%s_ratio.append(%s[seq_b[%s]]['a1_ratio'])" % (cls,cls,x)
exec "%s_pfd.append(%s[seq_b[%s]]['pfd'])" % (cls,cls,x)
for y,cls in enumerate(classes_ext):
for x in range(total_n_emg):
exec "%s_ratio.append(%s[seq_b[%s]]['a1_ratio'])" % (cls,cls,x)
exec "%s_pfd.append(%s[seq_b[%s]]['pfd'])" % (cls,cls,x)
#-------------------------------------------------------------------
data1 = [active_ratio, nonactive_ratio, standard_ratio, offline_ratio, eog_ratio, emg_ratio ]
data2 = [active_pfd, nonactive_pfd, standard_pfd, offline_pfd, eog_pfd, emg_pfd]
#-------------------------------------------------------------------
fig1, (ax1,ax2) = plt.subplots(2)
for x in range(1,3):
exec "bp%s = ax%s.boxplot(data%s, notch=1, sym='k+', vert=1)" %(x,x,x)
#-------------------------------------------------------------------
for x in range(1,3):
exec "ax%s.yaxis.grid(True, linestyle='-', which='major', color='lightgrey',alpha=0.5)" %(x)
exec "ax%s.xaxis.grid(True, linestyle='-', which='major', color='lightgrey',alpha=0.5)" %(x)
fig1.canvas.set_window_title('Parametros de Beta')
ax1.set_axisbelow(True)
ax1.set_xlabel('Class')
ax1.set_title('Beta Ratio')
ax2.set_title('Petrosian Fractal Dimension')
for x in range(1,3):
exec "ax%s.set_ylabel('Value')" %(x)
plt.setp((ax1,ax2), xticklabels=Dists_b)
#-------------------------------------------------------------------
fig1.tight_layout()
#-------------------------- Plotting of Wleft --------------------------
def plot_wleft_():
for x,cls in enumerate(classes_emg):
exec "%s_froben = []" % (cls)
exec "%s_hjorth = []" % (cls)
#-------------------------------------------------------------------
exec "mat_contents = loadmat_new.loadmat('%s/Features/Parameters_files/Left_Wink.mat')" %(dir_wrk)
# 4 classes
active = mat_contents['active']
standard = mat_contents['standard']
offline = mat_contents['offline']
eog = mat_contents['eog']
#-------------------------------------------------------------------
for y,cls in enumerate(classes_emg):
for x in range(total_n_emg):
exec "%s_froben.append(%s[seq_[%s]]['a1_froben'])" % (cls,cls,x)
exec "%s_hjorth.append(%s[seq_[%s]]['a2_hjorth'])" % (cls,cls,x)
#-------------------------------------------------------------------
data1 = [active_froben, standard_froben, offline_froben, eog_froben]
data2 = [active_hjorth, standard_hjorth, offline_hjorth, eog_hjorth]
#-------------------------------------------------------------------
fig1, (ax1,ax2) = plt.subplots(2)
for x in range(1,3):
exec "bp%s = ax%s.boxplot(data%s, notch=1, sym='k+', vert=1)" %(x,x,x)
#-------------------------------------------------------------------
for x in range(1,3):
exec "ax%s.yaxis.grid(True, linestyle='-', which='major', color='lightgrey',alpha=0.5)" %(x)
exec "ax%s.xaxis.grid(True, linestyle='-', which='major', color='lightgrey',alpha=0.5)" %(x)
fig1.canvas.set_window_title('Parametros de Parpadeo Izq.')
ax1.set_axisbelow(True)
ax1.set_xlabel('Class')
ax1.set_title('Frobenius Norm')
ax2.set_title('Hjorth complexity')
for x in range(1,3):
exec "ax%s.set_ylabel('Value')" %(x)
plt.setp((ax1,ax2), xticklabels=Dists_emg)
#-------------------------------------------------------------------
fig1.tight_layout()
#-------------------------- Plotting of Wright -------------------------
def plot_wright_():
for x,cls in enumerate(classes_emg):
exec "%s_froben = []" % (cls)
exec "%s_hjorth = []" % (cls)
#-------------------------------------------------------------------
exec "mat_contents = loadmat_new.loadmat('%s/Features/Parameters_files/Right_Wink.mat')" %(dir_wrk)
# 4 classes
active = mat_contents['active'];
standard = mat_contents['standard']
offline = mat_contents['offline']
eog = mat_contents['eog']
#-------------------------------------------------------------------
for y,cls in enumerate(classes_emg):
for x in range(total_n_emg):
exec "%s_froben.append(%s[seq_[%s]]['a1_froben'])" % (cls,cls,x)
exec "%s_hjorth.append(%s[seq_[%s]]['a2_hjorth'])" % (cls,cls,x)
#-------------------------------------------------------------------
data1 = [active_froben, standard_froben, offline_froben, eog_froben]
data2 = [active_hjorth, standard_hjorth, offline_hjorth, eog_hjorth]
#-------------------------------------------------------------------
fig1, (ax1,ax2) = plt.subplots(2)
for x in range(1,3):
exec "bp%s = ax%s.boxplot(data%s, notch=1, sym='k+', vert=1)" %(x,x,x)
#-------------------------------------------------------------------
for x in range(1,3):
exec "ax%s.yaxis.grid(True, linestyle='-', which='major', color='lightgrey',alpha=0.5)" %(x)
exec "ax%s.xaxis.grid(True, linestyle='-', which='major', color='lightgrey',alpha=0.5)" %(x)
fig1.canvas.set_window_title('Parametros de Parpadeo Der.')
ax1.set_axisbelow(True)
ax1.set_xlabel('Class')
ax1.set_title('Frobenius Norm')
ax2.set_title('Hjorth complexity')
for x in range(1,3):
exec "ax%s.set_ylabel('Value')" %(x)
plt.setp((ax1,ax2), xticklabels=Dists_emg)
#-------------------------------------------------------------------
fig1.tight_layout()
#-------------------------- SVM Alpha ----------------------------------
def svm_alpha_():
#------------------------------ labels -----------------------------
# data full
target_names = np.array(['active','nonactive','standard','offline'])
# data 2D
target_names_2d = np.array(['active','standard'])
feature_names_2d = ['a1_ratio','hjr_com']
#------------------------------ Variables ------------------------------
seq_ = []
for x in range(total_n_emg):exec "seq_.append('part_%s')" % (x)
svm_data = [];svm_target = [];svm_target_test = [];svm_test = []
for x,cls in enumerate(target_names):
exec "%s_ratio = []" % (cls)
exec "%s_hjr = []" % (cls)
#------------------------------ Extract Feats ----------------------
exec "mat_contents = loadmat_new.loadmat('%s/Features/Parameters_files/Test_1_A.mat')" %(dir_wrk)
active = mat_contents['active'];
nonactive = mat_contents['nonactive']
standard = mat_contents['standard']
offline = mat_contents['offline']
for y,cls in enumerate(target_names):
exec "%s_class = []" %(cls)
exec "%s_class_data = []" %(cls)
exec "%s_class_test = []" %(cls)
exec "%s_class_target = []" %(cls)
#------------------------------ Create classes ---------------------
for y,cls in enumerate(target_names):
for x in range(total_n_emg):
exec "%s_class.append(np.asarray(collections.OrderedDict(sorted(%s[seq_[%s]].items())).values()))" % (cls,cls,x)
exec "rndm.shuffle(%s_class)" %(cls)
#------------------------------ test and data extract --------------
for y,cls in enumerate(target_names):
for x in range(total_n_emg):
exec "if %s < test_n_emg:%s_class_test.append(%s_class[x])" %(x,cls,cls)
exec "if %s >= test_n_emg:%s_class_data.append(%s_class[x])" %(x,cls,cls)
for y,cls in enumerate(target_names):
exec "%s_class_test = np.asarray(%s_class_test)" %(cls,cls)
exec "%s_class_data = np.asarray(%s_class_data)" %(cls,cls)
#------------------------------ Create targets ---------------------
for x in range(numClss):
exec "class_%s = [x]*50" %(x)
exec "class_test_%s = [x]*25" %(x)
exec "svm_target.append(class_%s)" %(x)
exec "svm_target_test.append(class_test_%s)" %(x)
#------------------------------ test and data file ---------------------
for y,cls in enumerate(target_names):
exec "svm_data.append(%s_class_data)" %(cls)
exec "svm_test.append(%s_class_test)" %(cls)
for y,cls in enumerate(['target_test','target','data','test']):
exec "svm_%s = list(chain.from_iterable(svm_%s))" %(cls,cls)
exec "svm_%s = np.asarray(svm_%s)" %(cls,cls)
#------------------------------ Train SVM --------------------------
X = svm_data; y = svm_target; T = svm_test;yy = svm_target_test
#------------------------------ Data Binary Class and Feats --------
# Train part
X_2d = np.delete(X,range(50,85)+range(100,130)+range(150,185),axis=0)
y_2d = y[y < 2]
# Test part
T_2d = np.delete(T,range(25,43)+range(50,64)+range(75,93),axis=0)
yy_2d = yy[yy < 2]
#------------------------------ Standardize data ------------
scaler = StandardScaler()
X_2d = scaler.fit_transform(X_2d)
T_2d_scaled = scaler.transform(T_2d)
#------------------------------ Create Classifier ------------------
manual_param = {'C':100,'gamma':0.1}
clf = SVC(gamma=manual_param['gamma'], C=manual_param['C'])
clf.fit(X_2d, y_2d)
#------------------------------ Create Classifier ------------------
C_range = np.logspace(1, 3, 3)
gamma_range = np.logspace(-3, -1, 3)
param_grid = dict(gamma=gamma_range, C=C_range)
cv = StratifiedShuffleSplit(y_2d, n_iter=100, test_size=0.2, random_state=42)
grid = GridSearchCV(SVC(), param_grid=param_grid, cv=cv)
#------------------------------ Best parameter Aprox.---------------
grid.fit(X_2d, y_2d)
best_param = grid.best_params_
best_score = grid.best_score_
manual_score = clf.score(T_2d_scaled,yy_2d)
C_2d_range = [1e1, 1e2, 1e3];
gamma_2d_range = [1e-3, 1e-2, 0.1]
classifiers = []
for C in C_2d_range:
for gamma in gamma_2d_range:
clf_auto = SVC(C=C, gamma=gamma)
clf_auto.fit(X_2d, y_2d)
classifiers.append((C, gamma, clf_auto))
#------------------------------ Parameters Visualization -----------
fig = plt.figure(figsize=(8, 6))
max_x = max(X_2d[:, 0]); min_x = min(X_2d[:, 0])
max_y = max(X_2d[:, 1]); min_y = min(X_2d[:, 1])
xx, yy = np.meshgrid(np.linspace((min_x-abs(min_x*(0.15))),
(max_x+max_x*(0.15)), 200),
np.linspace((min_y-abs(min_y*(0.15))),
(max_y+max_y*(0.15)), 200))
for (k, (C, gamma, clf_auto)) in enumerate(classifiers):
# evaluate decision function in a grid
Z = clf_auto.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# visualize decision function for these parameters
plt.subplot(len(C_2d_range), len(gamma_2d_range), k + 1)
plt.title("gamma=10^%d, C=10^%d" % (np.log10(gamma), np.log10(C)),
size='medium')
# visualize parameter's effect on decision function
plt.pcolormesh(xx, yy, -Z, cmap=plt.cm.RdBu)
plt.scatter(X_2d[:, 0], X_2d[:, 1], c=y_2d, cmap=plt.cm.RdBu_r)
plt.xticks(())
plt.yticks(())
plt.axis('tight')
#------------------------------ Save SVM and Scaler to a File ------
joblib.dump(clf, dir_wrk+'/Features/Dump_Files/alphaSVM.pkl',compress=3)
joblib.dump(scaler,dir_wrk+'/Features/Dump_Files/alphaScaler.pkl',compress=3)
#------------------------------ Save SVM data files to a File ------
exec "savemat('%s/Features/SVM_DataFile/Test_1_A_SVM_2D.mat', {'svm data':X_2d,'svm target':y_2d,'svm test':T_2d, 'svm target test':yy_2d, 'features names':feature_names_2d,'target names':target_names_2d}, do_compression=1)" %(dir_wrk)
fig.canvas.set_window_title('Parametros RBF de SVM para "Alpha"')
return manual_param, manual_score, best_param, best_score
#-------------------------- SVM Beta ----------------------------------
def svm_beta_():
#------------------------------ labels -----------------------------
# data full
target_names_all = np.array(['active','nonactive','standard','offline','eog','emg'])
target_names = np.array(['active','nonactive','standard','offline'])
target_names_extra = np.array(['eog','emg'])
# data 2D
target_names_2d = np.array(['active','standard'])
feature_names_2d = ['a1_ratio','pfd']
#------------------------------ Variables ------------------------------
seq_ = []
for x in range(total_n_beta):exec "seq_.append('part_%s')" % (x)
for x in range(total_n_emg):exec "seq_b.append('part_%s')" % (x)
svm_data = [];svm_target = [];svm_target_test = [];svm_test = []
for x,cls in enumerate(target_names_all):
exec "%s_ratio = []" % (cls)
exec "%s_hjr = []" % (cls)
#------------------------------ Extract Feats ----------------------
exec "mat_contents = loadmat_new.loadmat('%s/Features/Parameters_files/Test_1_B.mat')" %(dir_wrk)
active = mat_contents['active'];
nonactive = mat_contents['nonactive']
standard = mat_contents['standard']
offline = mat_contents['offline']
eog = mat_contents['eog']
emg = mat_contents['emg']
for y,cls in enumerate(target_names_all):
exec "%s_class = []" %(cls)
exec "%s_class_data = []" %(cls)
exec "%s_class_test = []" %(cls)
exec "%s_class_target = []" %(cls)
#------------------------------ Create classes ---------------------
for y,cls in enumerate(target_names):
for x in range(total_n_beta):
exec "%s_class.append(np.asarray(collections.OrderedDict(sorted(%s[seq_[%s]].items())).values()))" % (cls,cls,x)
exec "rndm.shuffle(%s_class)" %(cls)
for y,cls in enumerate(target_names_extra):
for x in range(total_n_emg):
exec "%s_class.append(np.asarray(collections.OrderedDict(sorted(%s[seq_[%s]].items())).values()))" % (cls,cls,x)
exec "rndm.shuffle(%s_class)" %(cls)
#------------------------------ test and data extract --------------
for y,cls in enumerate(target_names):
for x in range(total_n_beta):
exec "if %s < test_n_beta:%s_class_test.append(%s_class[x])" %(x,cls,cls)
exec "if %s >= test_n_beta:%s_class_data.append(%s_class[x])" %(x,cls,cls)
for y,cls in enumerate(target_names):
exec "%s_class_test = np.asarray(%s_class_test)" %(cls,cls)
exec "%s_class_data = np.asarray(%s_class_data)" %(cls,cls)
for y,cls in enumerate(target_names_extra):
for x in range(total_n_emg):
exec "if %s < test_n_emg:%s_class_test.append(%s_class[x])" %(x,cls,cls)
exec "if %s >= test_n_emg:%s_class_data.append(%s_class[x])" %(x,cls,cls)
for y,cls in enumerate(target_names_extra):
exec "%s_class_test = np.asarray(%s_class_test)" %(cls,cls)
exec "%s_class_data = np.asarray(%s_class_data)" %(cls,cls)
#------------------------------ Create targets ---------------------
for x in range(numClss):
exec "class_%s = [x]*128" %(x)
exec "class_test_%s = [x]*64" %(x)
exec "svm_target.append(class_%s)" %(x)
exec "svm_target_test.append(class_test_%s)" %(x)
#------------------------------ test and data file ---------------------
for y,cls in enumerate(target_names_all):
exec "svm_data.append(%s_class_data)" %(cls)
exec "svm_test.append(%s_class_test)" %(cls)
for y,cls in enumerate(['target_test','target','data','test']):
exec "svm_%s = list(chain.from_iterable(svm_%s))" %(cls,cls)
exec "svm_%s = np.asarray(svm_%s)" %(cls,cls)
#------------------------------ Train SVM --------------------------
X = svm_data; y = svm_target; T = svm_test;yy = svm_target_test
#------------------------------ Data Binary Class and Feats --------
#train_n_emg = 50; total_n_emg = 75;data_n_emg = 50; test_n_emg = 25
#train_n_beta = 128; total_n_beta = 192;data_n_beta = 128; test_n_beta = 64
#['active','nonactive','standard','offline', 'eog', 'emg']
# TRAIN PART
#( [0,127] [128,255] [256,383] [384,511] [512,561] [562,611])
#( [128] [24] [32] [24] [24] [24] )
# TEST PART
#( [0,63] [64,127] [128,191] [192,255] [256,280] [281,305])
#( [64] [12] [16] [12] [12] [12] )
# Train part
X_2d = np.delete(X,range(128,232)+range(256,352)+range(384,488)+range(512,538)+range(562,588),axis=0)
# X_2d = np.delete(X,range(128,218)+range(256,332)+range(384,474),axis=0)
y_2d = y[y < 2]
# Test part
T_2d = np.delete(T,range(64,116)+range(128,176)+range(192,244)+range(256,269)+range(281,294),axis=0)
# T_2d = np.delete(T,range(64,109)+range(128,166)+range(192,237),axis=0)
yy_2d = yy[yy < 2]
#------------------------------ Standardize data ------------
scaler = StandardScaler()
X_2d = scaler.fit_transform(X_2d)
T_2d_scaled = scaler.transform(T_2d)
#------------------------------ Create Classifier ------------------
manual_param = {'C':100,'gamma':0.001}
clf = SVC(gamma=manual_param['gamma'], C=manual_param['C'])
clf.fit(X_2d, y_2d)
#------------------------------ Create Classifier ------------------
C_range = np.logspace(1, 3, 3)
gamma_range = np.logspace(-3, -1, 3)
param_grid = dict(gamma=gamma_range, C=C_range)
cv = StratifiedShuffleSplit(y_2d, n_iter=100, test_size=0.2, random_state=42)
grid = GridSearchCV(SVC(), param_grid=param_grid, cv=cv)
#------------------------------ Best parameter Aprox.---------------
grid.fit(X_2d, y_2d)
best_param = grid.best_params_
best_score = grid.best_score_
manual_score = clf.score(T_2d_scaled,yy_2d)
C_2d_range = [1e1, 1e2, 1e3];
gamma_2d_range = [1e-3, 1e-2, 0.1]
classifiers = []
for C in C_2d_range:
for gamma in gamma_2d_range:
clf_auto = SVC(C=C, gamma=gamma)
clf_auto.fit(X_2d, y_2d)
classifiers.append((C, gamma, clf_auto))
#------------------------------ Parameters Visualization -----------
fig = plt.figure(figsize=(8, 6))
max_x = max(X_2d[:, 0]); min_x = min(X_2d[:, 0])
max_y = max(X_2d[:, 1]); min_y = min(X_2d[:, 1])
xx, yy = np.meshgrid(np.linspace((min_x-abs(min_x*(0.15))),
(max_x+max_x*(0.15)), 200),
np.linspace((min_y-abs(min_y*(0.15))),
(max_y+max_y*(0.15)), 200))
for (k, (C, gamma, clf_auto)) in enumerate(classifiers):
# evaluate decision function in a grid
Z = clf_auto.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# visualize decision function for these parameters
plt.subplot(len(C_2d_range), len(gamma_2d_range), k + 1)
plt.title("gamma=10^%d, C=10^%d" % (np.log10(gamma), np.log10(C)),
size='medium')
# visualize parameter's effect on decision function
plt.pcolormesh(xx, yy, -Z, cmap=plt.cm.RdBu)
plt.scatter(X_2d[:, 0], X_2d[:, 1], c=y_2d, cmap=plt.cm.RdBu_r)
plt.xticks(())
plt.yticks(())
plt.axis('tight')
#------------------------------ Save SVM and Scaler to a File ------
joblib.dump(clf, dir_wrk+'/Features/Dump_Files/betaSVM.pkl',compress=3)
joblib.dump(scaler,dir_wrk+'/Features/Dump_Files/betaScaler.pkl',compress=3)
#------------------------------ Save SVM data files to a File ------
exec "savemat('%s/Features/SVM_DataFile/Test_1_B_SVM_2D.mat', {'svm data':X_2d,'svm target':y_2d,'svm test':T_2d, 'svm target test':yy_2d, 'features names':feature_names_2d,'target names':target_names_2d}, do_compression=1)" %(dir_wrk)
fig.canvas.set_window_title('Parametros RBF de SVM para "Beta"')
return manual_param, manual_score, best_param, best_score
#-------------------------- SVM Wleft ----------------------------------
def svm_wleft_():
#------------------------------ labels -----------------------------
# data full
target_names = np.array(['active','standard','offline','eog'])
# data 2D
target_names_2d = np.array(['active','standard'])
feature_names_2d = ['a1_froben','a2_hjorth']
#------------------------------ Variables --------------------------
seq_ = []
for x in range(total_n_emg):exec "seq_.append('part_%s')" % (x)
svm_data = [];svm_target = [];svm_target_test = [];svm_test = []
for x,cls in enumerate(target_names):
exec "%s_froben = []" % (cls)
exec "%s_hjr = []" % (cls)
#------------------------------ Extract Feats ----------------------
exec "mat_contents = loadmat_new.loadmat('%s/Features/Parameters_files/Left_Wink.mat')" %(dir_wrk)
active = mat_contents['active'];
standard = mat_contents['standard']
offline = mat_contents['offline']
eog = mat_contents['eog']
for y,cls in enumerate(target_names):
exec "%s_class = []" %(cls)
exec "%s_class_data = []" %(cls)
exec "%s_class_test = []" %(cls)
exec "%s_class_target = []" %(cls)
#------------------------------ Create classes ---------------------
for y,cls in enumerate(target_names):
for x in range(total_n_emg):
exec "%s_class.append(np.asarray(collections.OrderedDict(sorted(%s[seq_[%s]].items())).values()))" % (cls,cls,x)
exec "rndm.shuffle(%s_class)" %(cls)
#------------------------------ test and data extract --------------
for y,cls in enumerate(target_names):
for x in range(total_n_emg):
exec "if %s < test_n_emg:%s_class_test.append(%s_class[x])" %(x,cls,cls)
exec "if %s >= test_n_emg:%s_class_data.append(%s_class[x])" %(x,cls,cls)
for y,cls in enumerate(target_names):
exec "%s_class_test = np.asarray(%s_class_test)" %(cls,cls)
exec "%s_class_data = np.asarray(%s_class_data)" %(cls,cls)
#------------------------------ Create targets ---------------------
for x in range(numClss):
exec "class_%s = [x]*50" %(x)
exec "class_test_%s = [x]*25" %(x)
exec "svm_target.append(class_%s)" %(x)
exec "svm_target_test.append(class_test_%s)" %(x)
#------------------------------ test and data file ---------------------
for y,cls in enumerate(target_names):
exec "svm_data.append(%s_class_data)" %(cls)
exec "svm_test.append(%s_class_test)" %(cls)
for y,cls in enumerate(['target_test','target','data','test']):
exec "svm_%s = list(chain.from_iterable(svm_%s))" %(cls,cls)
exec "svm_%s = np.asarray(svm_%s)" %(cls,cls)
#------------------------------ Train SVM --------------------------
X = svm_data; y = svm_target; T = svm_test;yy = svm_target_test
#------------------------------ Data Binary Class and Feats --------
#0,49 50,99 100,149 150,199
#50 20 15 15
#0,24 25,49 50,74 75,99
#25 11 7 7
# Train part
X_2d = np.delete(X,range(50,80)+range(100,135)+range(150,185),axis=0)
y_2d = y[y < 2]
# Test part
T_2d = np.delete(T,range(25,39)+range(50,68)+range(75,93),axis=0)
yy_2d = yy[yy < 2]
#------------------------------ Standardize data ------------
scaler = StandardScaler()
X_2d = scaler.fit_transform(X_2d)
T_2d_scaled = scaler.transform(T_2d)
#------------------------------ Create Classifier ------------------
manual_param = {'C':100,'gamma':0.1}
clf = SVC(gamma=manual_param['gamma'], C=manual_param['C'])
clf.fit(X_2d, y_2d)
#------------------------------ Create Classifier ------------------
C_range = np.logspace(1, 3, 3)
gamma_range = np.logspace(-3, -1, 3)
param_grid = dict(gamma=gamma_range, C=C_range)
cv = StratifiedShuffleSplit(y_2d, n_iter=100, test_size=0.2, random_state=42)
grid = GridSearchCV(SVC(), param_grid=param_grid, cv=cv)
#------------------------------ Best parameter Aprox.---------------
grid.fit(X_2d, y_2d)
best_param = grid.best_params_
best_score = grid.best_score_
manual_score = clf.score(T_2d_scaled,yy_2d)
C_2d_range = [1e1, 1e2, 1e3];
gamma_2d_range = [1e-3, 1e-2, 0.1]
classifiers = []
for C in C_2d_range:
for gamma in gamma_2d_range:
clf_auto = SVC(C=C, gamma=gamma)
clf_auto.fit(X_2d, y_2d)
classifiers.append((C, gamma, clf_auto))
#------------------------------ Parameters Visualization -----------
fig = plt.figure(figsize=(8, 6))
max_x = max(X_2d[:, 0]); min_x = min(X_2d[:, 0])
max_y = max(X_2d[:, 1]); min_y = min(X_2d[:, 1])
xx, yy = np.meshgrid(np.linspace((min_x-abs(min_x*(0.15))),
(max_x+max_x*(0.15)), 200),
np.linspace((min_y-abs(min_y*(0.15))),
(max_y+max_y*(0.15)), 200))
for (k, (C, gamma, clf_auto)) in enumerate(classifiers):
# evaluate decision function in a grid
Z = clf_auto.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# visualize decision function for these parameters
plt.subplot(len(C_2d_range), len(gamma_2d_range), k + 1)
plt.title("gamma=10^%d, C=10^%d" % (np.log10(gamma), np.log10(C)),
size='medium')
# visualize parameter's effect on decision function
plt.pcolormesh(xx, yy, -Z, cmap=plt.cm.RdBu)
plt.scatter(X_2d[:, 0], X_2d[:, 1], c=y_2d, cmap=plt.cm.RdBu_r)
plt.xticks(())
plt.yticks(())
plt.axis('tight')
#------------------------------ Save SVM and Scaler to a File ------
joblib.dump(clf, dir_wrk+'/Features/Dump_Files/wleftSVM.pkl',compress=3)
joblib.dump(scaler,dir_wrk+'/Features/Dump_Files/wleftScaler.pkl',compress=3)
#------------------------------ Save SVM data files to a File ------
exec "savemat('%s/Features/SVM_DataFile/Wink_Left_SVM_2D.mat', {'svm data':X_2d,'svm target':y_2d,'svm test':T_2d, 'svm target test':yy_2d, 'features names':feature_names_2d,'target names':target_names_2d}, do_compression=1)" %(dir_wrk)
fig.canvas.set_window_title('Parametros RBF de SVM para "Parpadeo Izq."')
return manual_param, manual_score, best_param, best_score
#-------------------------- SVM wright ---------------------------------
def svm_wright_():
#------------------------------ labels -----------------------------
# data full
target_names = np.array(['active','standard','offline','eog'])
# data 2D
target_names_2d = np.array(['active','standard'])
feature_names_2d = ['a1_froben','a2_hjorth']
#------------------------------ Variables ------------------------------
seq_ = []
for x in range(total_n_emg):exec "seq_.append('part_%s')" % (x)
svm_data = [];svm_target = [];svm_target_test = [];svm_test = []
for x,cls in enumerate(target_names):
exec "%s_froben = []" % (cls)
exec "%s_hjr = []" % (cls)
#------------------------------ Extract Feats ----------------------
exec "mat_contents = loadmat_new.loadmat('%s/Features/Parameters_files/Right_Wink.mat')" %(dir_wrk)
active = mat_contents['active'];
standard = mat_contents['standard']
offline = mat_contents['offline']
eog = mat_contents['eog']
for y,cls in enumerate(target_names):
exec "%s_class = []" %(cls)
exec "%s_class_data = []" %(cls)
exec "%s_class_test = []" %(cls)
exec "%s_class_target = []" %(cls)
#------------------------------ Create classes ---------------------
for y,cls in enumerate(target_names):
for x in range(total_n_emg):
exec "%s_class.append(np.asarray(collections.OrderedDict(sorted(%s[seq_[%s]].items())).values()))" % (cls,cls,x)
exec "rndm.shuffle(%s_class)" %(cls)
#------------------------------ test and data extract --------------
for y,cls in enumerate(target_names):
for x in range(total_n_emg):
exec "if %s < test_n_emg:%s_class_test.append(%s_class[x])" %(x,cls,cls)
exec "if %s >= test_n_emg:%s_class_data.append(%s_class[x])" %(x,cls,cls)
for y,cls in enumerate(target_names):
exec "%s_class_test = np.asarray(%s_class_test)" %(cls,cls)
exec "%s_class_data = np.asarray(%s_class_data)" %(cls,cls)
#------------------------------ Create targets ---------------------
for x in range(numClss):
exec "class_%s = [x]*50" %(x)
exec "class_test_%s = [x]*25" %(x)
exec "svm_target.append(class_%s)" %(x)
exec "svm_target_test.append(class_test_%s)" %(x)
#------------------------------ test and data file ---------------------
for y,cls in enumerate(target_names):
exec "svm_data.append(%s_class_data)" %(cls)
exec "svm_test.append(%s_class_test)" %(cls)
for y,cls in enumerate(['target_test','target','data','test']):
exec "svm_%s = list(chain.from_iterable(svm_%s))" %(cls,cls)
exec "svm_%s = np.asarray(svm_%s)" %(cls,cls)
#------------------------------ Train SVM --------------------------
X = svm_data; y = svm_target; T = svm_test;yy = svm_target_test
#------------------------------ Data Binary Class and Feats --------
#0,49 50,99 100,149 150,199
#50 20 15 15
#0,24 25,49 50,74 75,99
#25 11 7 7
# Train part
X_2d = np.delete(X,range(50,80)+range(100,135)+range(150,185),axis=0)
y_2d = y[y < 2]
# Test part
T_2d = np.delete(T,range(25,39)+range(50,68)+range(75,93),axis=0)
yy_2d = yy[yy < 2]
#------------------------------ Standardize data ------------
scaler = StandardScaler()
X_2d = scaler.fit_transform(X_2d)
T_2d_scaled = scaler.transform(T_2d)
#------------------------------ Create Classifier ------------------
manual_param = {'C':100,'gamma':0.1}
clf = SVC(gamma=manual_param['gamma'], C=manual_param['C'])
clf.fit(X_2d, y_2d)
#------------------------------ Create Classifier ------------------
C_range = np.logspace(1, 3, 3)
gamma_range = np.logspace(-3, -1, 3)
param_grid = dict(gamma=gamma_range, C=C_range)
cv = StratifiedShuffleSplit(y_2d, n_iter=100, test_size=0.2, random_state=42)
grid = GridSearchCV(SVC(), param_grid=param_grid, cv=cv)
#------------------------------ Best parameter Aprox.---------------
grid.fit(X_2d, y_2d)
best_param = grid.best_params_
best_score = grid.best_score_
manual_score = clf.score(T_2d_scaled,yy_2d)
C_2d_range = [1e1, 1e2, 1e3];
gamma_2d_range = [1e-3, 1e-2, 0.1]
classifiers = []
for C in C_2d_range:
for gamma in gamma_2d_range:
clf_auto = SVC(C=C, gamma=gamma)
clf_auto.fit(X_2d, y_2d)
classifiers.append((C, gamma, clf_auto))
#------------------------------ Parameters Visualization -----------
fig = plt.figure(figsize=(8, 6))
max_x = max(X_2d[:, 0]); min_x = min(X_2d[:, 0])
max_y = max(X_2d[:, 1]); min_y = min(X_2d[:, 1])
xx, yy = np.meshgrid(np.linspace((min_x-abs(min_x*(0.15))),
(max_x+max_x*(0.15)), 200),
np.linspace((min_y-abs(min_y*(0.15))),
(max_y+max_y*(0.15)), 200))
for (k, (C, gamma, clf_auto)) in enumerate(classifiers):
# evaluate decision function in a grid
Z = clf_auto.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# visualize decision function for these parameters
plt.subplot(len(C_2d_range), len(gamma_2d_range), k + 1)
plt.title("gamma=10^%d, C=10^%d" % (np.log10(gamma), np.log10(C)),
size='medium')
# visualize parameter's effect on decision function
plt.pcolormesh(xx, yy, -Z, cmap=plt.cm.RdBu)
plt.scatter(X_2d[:, 0], X_2d[:, 1], c=y_2d, cmap=plt.cm.RdBu_r)
plt.xticks(())
plt.yticks(())
plt.axis('tight')
#------------------------------ Save SVM and Scaler to a File ------
joblib.dump(clf, dir_wrk+'/Features/Dump_Files/wrightSVM.pkl',compress=3)
joblib.dump(scaler,dir_wrk+'/Features/Dump_Files/wrightScaler.pkl',compress=3)
#------------------------------ Save SVM data files to a File ------
exec "savemat('%s/Features/SVM_DataFile/Wink_Right_SVM_2D.mat', {'svm data':X_2d,'svm target':y_2d,'svm test':T_2d, 'svm target test':yy_2d, 'features names':feature_names_2d,'target names':target_names_2d}, do_compression=1)" %(dir_wrk)
fig.canvas.set_window_title('Parametros RBF de SVM para "Parpadeo Der."')
plt.show()
return manual_param, manual_score, best_param, best_score
#-------------------------- SVM Main -----------------------------------
def feat_extract_(self,name):
if exists(dir_wrk+'/User_Data/'+name):
self.label_results.setText("Procesando...")
alpha_extra_()
beta_extra_()
emg_extra_()
plot_alpha_()
plot_beta_()
plot_wleft_()
plot_wright_()
man_param1, man_score1, auto_param1, auto_score1 = svm_alpha_()
man_param2, man_score2, auto_param2, auto_score2 = svm_beta_()
man_param3, man_score3, auto_param3, auto_score3 = svm_wleft_()
man_param4, man_score4, auto_param4, auto_score4 = svm_wright_()
self.label_results.setText("The best manual parameters for Alpha are\n{:<20}\nwith a score of {:.0%}\nThe best automatic parameters for Alpha are\n{:<20}\nwith a score of {:.0%}\n \nThe best manual parameters for Beta are\n{:<20}\nwith a score of {:.0%}\nThe best automatic parameters for Beta are\n{:<20}\nwith a score of {:.0%}\n \nThe best manual parameters for Parpadeo Izq. are\n{:<20}\nwith a score of {:.0%}\nThe best automatic parameters for Parpadeo Izq. are\n{:<20}\nwith a score of {:.0%}\n \nThe best manual parameters for Parpadeo Der. are\n{:<20}\nwith a score of {:.0%}\nThe best automatic parameters for Parpadeo Der. are\n{:<20}\nwith a score of {:.0%}\n \n".
format(man_param1, man_score1, auto_param1, auto_score1,man_param2, man_score2, auto_param2, auto_score2, man_param3, man_score3, auto_param3, auto_score3, man_param4, man_score4, auto_param4, auto_score4))
else:
self.label_results.setText("Usuario no encontrado")
| gpl-3.0 |
anntzer/scipy | scipy/linalg/_basic.py | 7 | 64381 | #
# Author: Pearu Peterson, March 2002
#
# w/ additions by Travis Oliphant, March 2002
# and Jake Vanderplas, August 2012
from warnings import warn
import numpy as np
from numpy import atleast_1d, atleast_2d
from ._flinalg_py import get_flinalg_funcs
from .lapack import get_lapack_funcs, _compute_lwork
from ._misc import LinAlgError, _datacopied, LinAlgWarning
from ._decomp import _asarray_validated
from . import _decomp, _decomp_svd
from ._solve_toeplitz import levinson
__all__ = ['solve', 'solve_triangular', 'solveh_banded', 'solve_banded',
'solve_toeplitz', 'solve_circulant', 'inv', 'det', 'lstsq',
'pinv', 'pinvh', 'matrix_balance', 'matmul_toeplitz']
# Linear equations
def _solve_check(n, info, lamch=None, rcond=None):
""" Check arguments during the different steps of the solution phase """
if info < 0:
raise ValueError('LAPACK reported an illegal value in {}-th argument'
'.'.format(-info))
elif 0 < info:
raise LinAlgError('Matrix is singular.')
if lamch is None:
return
E = lamch('E')
if rcond < E:
warn('Ill-conditioned matrix (rcond={:.6g}): '
'result may not be accurate.'.format(rcond),
LinAlgWarning, stacklevel=3)
def solve(a, b, sym_pos=False, lower=False, overwrite_a=False,
overwrite_b=False, check_finite=True, assume_a='gen',
transposed=False):
"""
Solves the linear equation set ``a @ x == b`` for the unknown ``x``
for square `a` matrix.
If the data matrix is known to be a particular type then supplying the
corresponding string to ``assume_a`` key chooses the dedicated solver.
The available options are
=================== ========
generic matrix 'gen'
symmetric 'sym'
hermitian 'her'
positive definite 'pos'
=================== ========
If omitted, ``'gen'`` is the default structure.
The datatype of the arrays define which solver is called regardless
of the values. In other words, even when the complex array entries have
precisely zero imaginary parts, the complex solver will be called based
on the data type of the array.
Parameters
----------
a : (N, N) array_like
Square input data
b : (N, NRHS) array_like
Input data for the right hand side.
sym_pos : bool, default: False, deprecated
Assume `a` is symmetric and positive definite.
.. deprecated:: 0.19.0
This keyword is deprecated and should be replaced by using
``assume_a = 'pos'``. `sym_pos` will be removed in SciPy 1.11.0.
lower : bool, default: False
Ignored if ``assume_a == 'gen'`` (the default). If True, the
calculation uses only the data in the lower triangle of `a`;
entries above the diagonal are ignored. If False (default), the
calculation uses only the data in the upper triangle of `a`; entries
below the diagonal are ignored.
overwrite_a : bool, default: False
Allow overwriting data in `a` (may enhance performance).
overwrite_b : bool, default: False
Allow overwriting data in `b` (may enhance performance).
check_finite : bool, default: True
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
assume_a : str, {'gen', 'sym', 'her', 'pos'}
Valid entries are explained above.
transposed : bool, default: False
If True, solve ``a.T @ x == b``. Raises `NotImplementedError`
for complex `a`.
Returns
-------
x : (N, NRHS) ndarray
The solution array.
Raises
------
ValueError
If size mismatches detected or input a is not square.
LinAlgError
If the matrix is singular.
LinAlgWarning
If an ill-conditioned input a is detected.
NotImplementedError
If transposed is True and input a is a complex matrix.
Notes
-----
If the input b matrix is a 1-D array with N elements, when supplied
together with an NxN input a, it is assumed as a valid column vector
despite the apparent size mismatch. This is compatible with the
numpy.dot() behavior and the returned result is still 1-D array.
The generic, symmetric, Hermitian and positive definite solutions are
obtained via calling ?GESV, ?SYSV, ?HESV, and ?POSV routines of
LAPACK respectively.
Examples
--------
Given `a` and `b`, solve for `x`:
>>> import numpy as np
>>> a = np.array([[3, 2, 0], [1, -1, 0], [0, 5, 1]])
>>> b = np.array([2, 4, -1])
>>> from scipy import linalg
>>> x = linalg.solve(a, b)
>>> x
array([ 2., -2., 9.])
>>> np.dot(a, x) == b
array([ True, True, True], dtype=bool)
"""
# Flags for 1-D or N-D right-hand side
b_is_1D = False
a1 = atleast_2d(_asarray_validated(a, check_finite=check_finite))
b1 = atleast_1d(_asarray_validated(b, check_finite=check_finite))
n = a1.shape[0]
overwrite_a = overwrite_a or _datacopied(a1, a)
overwrite_b = overwrite_b or _datacopied(b1, b)
if a1.shape[0] != a1.shape[1]:
raise ValueError('Input a needs to be a square matrix.')
if n != b1.shape[0]:
# Last chance to catch 1x1 scalar a and 1-D b arrays
if not (n == 1 and b1.size != 0):
raise ValueError('Input b has to have same number of rows as '
'input a')
# accommodate empty arrays
if b1.size == 0:
return np.asfortranarray(b1.copy())
# regularize 1-D b arrays to 2D
if b1.ndim == 1:
if n == 1:
b1 = b1[None, :]
else:
b1 = b1[:, None]
b_is_1D = True
# Backwards compatibility - old keyword.
if sym_pos:
message = ("The 'sym_pos' keyword is deprecated and should be "
"replaced by using 'assume_a = \"pos\"'. 'sym_pos' will be"
" removed in SciPy 1.11.0.")
warn(message, DeprecationWarning, stacklevel=2)
assume_a = 'pos'
if assume_a not in ('gen', 'sym', 'her', 'pos'):
raise ValueError('{} is not a recognized matrix structure'
''.format(assume_a))
# for a real matrix, describe it as "symmetric", not "hermitian"
# (lapack doesn't know what to do with real hermitian matrices)
if assume_a == 'her' and not np.iscomplexobj(a1):
assume_a = 'sym'
# Get the correct lamch function.
# The LAMCH functions only exists for S and D
# So for complex values we have to convert to real/double.
if a1.dtype.char in 'fF': # single precision
lamch = get_lapack_funcs('lamch', dtype='f')
else:
lamch = get_lapack_funcs('lamch', dtype='d')
# Currently we do not have the other forms of the norm calculators
# lansy, lanpo, lanhe.
# However, in any case they only reduce computations slightly...
lange = get_lapack_funcs('lange', (a1,))
# Since the I-norm and 1-norm are the same for symmetric matrices
# we can collect them all in this one call
# Note however, that when issuing 'gen' and form!='none', then
# the I-norm should be used
if transposed:
trans = 1
norm = 'I'
if np.iscomplexobj(a1):
raise NotImplementedError('scipy.linalg.solve can currently '
'not solve a^T x = b or a^H x = b '
'for complex matrices.')
else:
trans = 0
norm = '1'
anorm = lange(norm, a1)
# Generalized case 'gesv'
if assume_a == 'gen':
gecon, getrf, getrs = get_lapack_funcs(('gecon', 'getrf', 'getrs'),
(a1, b1))
lu, ipvt, info = getrf(a1, overwrite_a=overwrite_a)
_solve_check(n, info)
x, info = getrs(lu, ipvt, b1,
trans=trans, overwrite_b=overwrite_b)
_solve_check(n, info)
rcond, info = gecon(lu, anorm, norm=norm)
# Hermitian case 'hesv'
elif assume_a == 'her':
hecon, hesv, hesv_lw = get_lapack_funcs(('hecon', 'hesv',
'hesv_lwork'), (a1, b1))
lwork = _compute_lwork(hesv_lw, n, lower)
lu, ipvt, x, info = hesv(a1, b1, lwork=lwork,
lower=lower,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
_solve_check(n, info)
rcond, info = hecon(lu, ipvt, anorm)
# Symmetric case 'sysv'
elif assume_a == 'sym':
sycon, sysv, sysv_lw = get_lapack_funcs(('sycon', 'sysv',
'sysv_lwork'), (a1, b1))
lwork = _compute_lwork(sysv_lw, n, lower)
lu, ipvt, x, info = sysv(a1, b1, lwork=lwork,
lower=lower,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
_solve_check(n, info)
rcond, info = sycon(lu, ipvt, anorm)
# Positive definite case 'posv'
else:
pocon, posv = get_lapack_funcs(('pocon', 'posv'),
(a1, b1))
lu, x, info = posv(a1, b1, lower=lower,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
_solve_check(n, info)
rcond, info = pocon(lu, anorm)
_solve_check(n, info, lamch, rcond)
if b_is_1D:
x = x.ravel()
return x
def solve_triangular(a, b, trans=0, lower=False, unit_diagonal=False,
overwrite_b=False, check_finite=True):
"""
Solve the equation `a x = b` for `x`, assuming a is a triangular matrix.
Parameters
----------
a : (M, M) array_like
A triangular matrix
b : (M,) or (M, N) array_like
Right-hand side matrix in `a x = b`
lower : bool, optional
Use only data contained in the lower triangle of `a`.
Default is to use upper triangle.
trans : {0, 1, 2, 'N', 'T', 'C'}, optional
Type of system to solve:
======== =========
trans system
======== =========
0 or 'N' a x = b
1 or 'T' a^T x = b
2 or 'C' a^H x = b
======== =========
unit_diagonal : bool, optional
If True, diagonal elements of `a` are assumed to be 1 and
will not be referenced.
overwrite_b : bool, optional
Allow overwriting data in `b` (may enhance performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, N) ndarray
Solution to the system `a x = b`. Shape of return matches `b`.
Raises
------
LinAlgError
If `a` is singular
Notes
-----
.. versionadded:: 0.9.0
Examples
--------
Solve the lower triangular system a x = b, where::
[3 0 0 0] [4]
a = [2 1 0 0] b = [2]
[1 0 1 0] [4]
[1 1 1 1] [2]
>>> import numpy as np
>>> from scipy.linalg import solve_triangular
>>> a = np.array([[3, 0, 0, 0], [2, 1, 0, 0], [1, 0, 1, 0], [1, 1, 1, 1]])
>>> b = np.array([4, 2, 4, 2])
>>> x = solve_triangular(a, b, lower=True)
>>> x
array([ 1.33333333, -0.66666667, 2.66666667, -1.33333333])
>>> a.dot(x) # Check the result
array([ 4., 2., 4., 2.])
"""
a1 = _asarray_validated(a, check_finite=check_finite)
b1 = _asarray_validated(b, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
if a1.shape[0] != b1.shape[0]:
raise ValueError('shapes of a {} and b {} are incompatible'
.format(a1.shape, b1.shape))
overwrite_b = overwrite_b or _datacopied(b1, b)
trans = {'N': 0, 'T': 1, 'C': 2}.get(trans, trans)
trtrs, = get_lapack_funcs(('trtrs',), (a1, b1))
if a1.flags.f_contiguous or trans == 2:
x, info = trtrs(a1, b1, overwrite_b=overwrite_b, lower=lower,
trans=trans, unitdiag=unit_diagonal)
else:
# transposed system is solved since trtrs expects Fortran ordering
x, info = trtrs(a1.T, b1, overwrite_b=overwrite_b, lower=not lower,
trans=not trans, unitdiag=unit_diagonal)
if info == 0:
return x
if info > 0:
raise LinAlgError("singular matrix: resolution failed at diagonal %d" %
(info-1))
raise ValueError('illegal value in %dth argument of internal trtrs' %
(-info))
def solve_banded(l_and_u, ab, b, overwrite_ab=False, overwrite_b=False,
check_finite=True):
"""
Solve the equation a x = b for x, assuming a is banded matrix.
The matrix a is stored in `ab` using the matrix diagonal ordered form::
ab[u + i - j, j] == a[i,j]
Example of `ab` (shape of a is (6,6), `u` =1, `l` =2)::
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Parameters
----------
(l, u) : (integer, integer)
Number of non-zero lower and upper diagonals
ab : (`l` + `u` + 1, M) array_like
Banded matrix
b : (M,) or (M, K) array_like
Right-hand side
overwrite_ab : bool, optional
Discard data in `ab` (may enhance performance)
overwrite_b : bool, optional
Discard data in `b` (may enhance performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, K) ndarray
The solution to the system a x = b. Returned shape depends on the
shape of `b`.
Examples
--------
Solve the banded system a x = b, where::
[5 2 -1 0 0] [0]
[1 4 2 -1 0] [1]
a = [0 1 3 2 -1] b = [2]
[0 0 1 2 2] [2]
[0 0 0 1 1] [3]
There is one nonzero diagonal below the main diagonal (l = 1), and
two above (u = 2). The diagonal banded form of the matrix is::
[* * -1 -1 -1]
ab = [* 2 2 2 2]
[5 4 3 2 1]
[1 1 1 1 *]
>>> import numpy as np
>>> from scipy.linalg import solve_banded
>>> ab = np.array([[0, 0, -1, -1, -1],
... [0, 2, 2, 2, 2],
... [5, 4, 3, 2, 1],
... [1, 1, 1, 1, 0]])
>>> b = np.array([0, 1, 2, 2, 3])
>>> x = solve_banded((1, 2), ab, b)
>>> x
array([-2.37288136, 3.93220339, -4. , 4.3559322 , -1.3559322 ])
"""
a1 = _asarray_validated(ab, check_finite=check_finite, as_inexact=True)
b1 = _asarray_validated(b, check_finite=check_finite, as_inexact=True)
# Validate shapes.
if a1.shape[-1] != b1.shape[0]:
raise ValueError("shapes of ab and b are not compatible.")
(nlower, nupper) = l_and_u
if nlower + nupper + 1 != a1.shape[0]:
raise ValueError("invalid values for the number of lower and upper "
"diagonals: l+u+1 (%d) does not equal ab.shape[0] "
"(%d)" % (nlower + nupper + 1, ab.shape[0]))
overwrite_b = overwrite_b or _datacopied(b1, b)
if a1.shape[-1] == 1:
b2 = np.array(b1, copy=(not overwrite_b))
b2 /= a1[1, 0]
return b2
if nlower == nupper == 1:
overwrite_ab = overwrite_ab or _datacopied(a1, ab)
gtsv, = get_lapack_funcs(('gtsv',), (a1, b1))
du = a1[0, 1:]
d = a1[1, :]
dl = a1[2, :-1]
du2, d, du, x, info = gtsv(dl, d, du, b1, overwrite_ab, overwrite_ab,
overwrite_ab, overwrite_b)
else:
gbsv, = get_lapack_funcs(('gbsv',), (a1, b1))
a2 = np.zeros((2*nlower + nupper + 1, a1.shape[1]), dtype=gbsv.dtype)
a2[nlower:, :] = a1
lu, piv, x, info = gbsv(nlower, nupper, a2, b1, overwrite_ab=True,
overwrite_b=overwrite_b)
if info == 0:
return x
if info > 0:
raise LinAlgError("singular matrix")
raise ValueError('illegal value in %d-th argument of internal '
'gbsv/gtsv' % -info)
def solveh_banded(ab, b, overwrite_ab=False, overwrite_b=False, lower=False,
check_finite=True):
"""
Solve equation a x = b. a is Hermitian positive-definite banded matrix.
Uses Thomas' Algorithm, which is more efficient than standard LU
factorization, but should only be used for Hermitian positive-definite
matrices.
The matrix ``a`` is stored in `ab` either in lower diagonal or upper
diagonal ordered form:
ab[u + i - j, j] == a[i,j] (if upper form; i <= j)
ab[ i - j, j] == a[i,j] (if lower form; i >= j)
Example of `ab` (shape of ``a`` is (6, 6), number of upper diagonals,
``u`` =2)::
upper form:
* * a02 a13 a24 a35
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
lower form:
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Cells marked with * are not used.
Parameters
----------
ab : (``u`` + 1, M) array_like
Banded matrix
b : (M,) or (M, K) array_like
Right-hand side
overwrite_ab : bool, optional
Discard data in `ab` (may enhance performance)
overwrite_b : bool, optional
Discard data in `b` (may enhance performance)
lower : bool, optional
Is the matrix in the lower form. (Default is upper form)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, K) ndarray
The solution to the system ``a x = b``. Shape of return matches shape
of `b`.
Notes
-----
In the case of a non-positive definite matrix ``a``, the solver
`solve_banded` may be used.
Examples
--------
Solve the banded system ``A x = b``, where::
[ 4 2 -1 0 0 0] [1]
[ 2 5 2 -1 0 0] [2]
A = [-1 2 6 2 -1 0] b = [2]
[ 0 -1 2 7 2 -1] [3]
[ 0 0 -1 2 8 2] [3]
[ 0 0 0 -1 2 9] [3]
>>> import numpy as np
>>> from scipy.linalg import solveh_banded
``ab`` contains the main diagonal and the nonzero diagonals below the
main diagonal. That is, we use the lower form:
>>> ab = np.array([[ 4, 5, 6, 7, 8, 9],
... [ 2, 2, 2, 2, 2, 0],
... [-1, -1, -1, -1, 0, 0]])
>>> b = np.array([1, 2, 2, 3, 3, 3])
>>> x = solveh_banded(ab, b, lower=True)
>>> x
array([ 0.03431373, 0.45938375, 0.05602241, 0.47759104, 0.17577031,
0.34733894])
Solve the Hermitian banded system ``H x = b``, where::
[ 8 2-1j 0 0 ] [ 1 ]
H = [2+1j 5 1j 0 ] b = [1+1j]
[ 0 -1j 9 -2-1j] [1-2j]
[ 0 0 -2+1j 6 ] [ 0 ]
In this example, we put the upper diagonals in the array ``hb``:
>>> hb = np.array([[0, 2-1j, 1j, -2-1j],
... [8, 5, 9, 6 ]])
>>> b = np.array([1, 1+1j, 1-2j, 0])
>>> x = solveh_banded(hb, b)
>>> x
array([ 0.07318536-0.02939412j, 0.11877624+0.17696461j,
0.10077984-0.23035393j, -0.00479904-0.09358128j])
"""
a1 = _asarray_validated(ab, check_finite=check_finite)
b1 = _asarray_validated(b, check_finite=check_finite)
# Validate shapes.
if a1.shape[-1] != b1.shape[0]:
raise ValueError("shapes of ab and b are not compatible.")
overwrite_b = overwrite_b or _datacopied(b1, b)
overwrite_ab = overwrite_ab or _datacopied(a1, ab)
if a1.shape[0] == 2:
ptsv, = get_lapack_funcs(('ptsv',), (a1, b1))
if lower:
d = a1[0, :].real
e = a1[1, :-1]
else:
d = a1[1, :].real
e = a1[0, 1:].conj()
d, du, x, info = ptsv(d, e, b1, overwrite_ab, overwrite_ab,
overwrite_b)
else:
pbsv, = get_lapack_funcs(('pbsv',), (a1, b1))
c, x, info = pbsv(a1, b1, lower=lower, overwrite_ab=overwrite_ab,
overwrite_b=overwrite_b)
if info > 0:
raise LinAlgError("%dth leading minor not positive definite" % info)
if info < 0:
raise ValueError('illegal value in %dth argument of internal '
'pbsv' % -info)
return x
def solve_toeplitz(c_or_cr, b, check_finite=True):
"""Solve a Toeplitz system using Levinson Recursion
The Toeplitz matrix has constant diagonals, with c as its first column
and r as its first row. If r is not given, ``r == conjugate(c)`` is
assumed.
Parameters
----------
c_or_cr : array_like or tuple of (array_like, array_like)
The vector ``c``, or a tuple of arrays (``c``, ``r``). Whatever the
actual shape of ``c``, it will be converted to a 1-D array. If not
supplied, ``r = conjugate(c)`` is assumed; in this case, if c[0] is
real, the Toeplitz matrix is Hermitian. r[0] is ignored; the first row
of the Toeplitz matrix is ``[c[0], r[1:]]``. Whatever the actual shape
of ``r``, it will be converted to a 1-D array.
b : (M,) or (M, K) array_like
Right-hand side in ``T x = b``.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(result entirely NaNs) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, K) ndarray
The solution to the system ``T x = b``. Shape of return matches shape
of `b`.
See Also
--------
toeplitz : Toeplitz matrix
Notes
-----
The solution is computed using Levinson-Durbin recursion, which is faster
than generic least-squares methods, but can be less numerically stable.
Examples
--------
Solve the Toeplitz system T x = b, where::
[ 1 -1 -2 -3] [1]
T = [ 3 1 -1 -2] b = [2]
[ 6 3 1 -1] [2]
[10 6 3 1] [5]
To specify the Toeplitz matrix, only the first column and the first
row are needed.
>>> import numpy as np
>>> c = np.array([1, 3, 6, 10]) # First column of T
>>> r = np.array([1, -1, -2, -3]) # First row of T
>>> b = np.array([1, 2, 2, 5])
>>> from scipy.linalg import solve_toeplitz, toeplitz
>>> x = solve_toeplitz((c, r), b)
>>> x
array([ 1.66666667, -1. , -2.66666667, 2.33333333])
Check the result by creating the full Toeplitz matrix and
multiplying it by `x`. We should get `b`.
>>> T = toeplitz(c, r)
>>> T.dot(x)
array([ 1., 2., 2., 5.])
"""
# If numerical stability of this algorithm is a problem, a future
# developer might consider implementing other O(N^2) Toeplitz solvers,
# such as GKO (https://www.jstor.org/stable/2153371) or Bareiss.
r, c, b, dtype, b_shape = _validate_args_for_toeplitz_ops(
c_or_cr, b, check_finite, keep_b_shape=True)
# Form a 1-D array of values to be used in the matrix, containing a
# reversed copy of r[1:], followed by c.
vals = np.concatenate((r[-1:0:-1], c))
if b is None:
raise ValueError('illegal value, `b` is a required argument')
if b.ndim == 1:
x, _ = levinson(vals, np.ascontiguousarray(b))
else:
x = np.column_stack([levinson(vals, np.ascontiguousarray(b[:, i]))[0]
for i in range(b.shape[1])])
x = x.reshape(*b_shape)
return x
def _get_axis_len(aname, a, axis):
ax = axis
if ax < 0:
ax += a.ndim
if 0 <= ax < a.ndim:
return a.shape[ax]
raise ValueError("'%saxis' entry is out of bounds" % (aname,))
def solve_circulant(c, b, singular='raise', tol=None,
caxis=-1, baxis=0, outaxis=0):
"""Solve C x = b for x, where C is a circulant matrix.
`C` is the circulant matrix associated with the vector `c`.
The system is solved by doing division in Fourier space. The
calculation is::
x = ifft(fft(b) / fft(c))
where `fft` and `ifft` are the fast Fourier transform and its inverse,
respectively. For a large vector `c`, this is *much* faster than
solving the system with the full circulant matrix.
Parameters
----------
c : array_like
The coefficients of the circulant matrix.
b : array_like
Right-hand side matrix in ``a x = b``.
singular : str, optional
This argument controls how a near singular circulant matrix is
handled. If `singular` is "raise" and the circulant matrix is
near singular, a `LinAlgError` is raised. If `singular` is
"lstsq", the least squares solution is returned. Default is "raise".
tol : float, optional
If any eigenvalue of the circulant matrix has an absolute value
that is less than or equal to `tol`, the matrix is considered to be
near singular. If not given, `tol` is set to::
tol = abs_eigs.max() * abs_eigs.size * np.finfo(np.float64).eps
where `abs_eigs` is the array of absolute values of the eigenvalues
of the circulant matrix.
caxis : int
When `c` has dimension greater than 1, it is viewed as a collection
of circulant vectors. In this case, `caxis` is the axis of `c` that
holds the vectors of circulant coefficients.
baxis : int
When `b` has dimension greater than 1, it is viewed as a collection
of vectors. In this case, `baxis` is the axis of `b` that holds the
right-hand side vectors.
outaxis : int
When `c` or `b` are multidimensional, the value returned by
`solve_circulant` is multidimensional. In this case, `outaxis` is
the axis of the result that holds the solution vectors.
Returns
-------
x : ndarray
Solution to the system ``C x = b``.
Raises
------
LinAlgError
If the circulant matrix associated with `c` is near singular.
See Also
--------
circulant : circulant matrix
Notes
-----
For a 1-D vector `c` with length `m`, and an array `b`
with shape ``(m, ...)``,
solve_circulant(c, b)
returns the same result as
solve(circulant(c), b)
where `solve` and `circulant` are from `scipy.linalg`.
.. versionadded:: 0.16.0
Examples
--------
>>> import numpy as np
>>> from scipy.linalg import solve_circulant, solve, circulant, lstsq
>>> c = np.array([2, 2, 4])
>>> b = np.array([1, 2, 3])
>>> solve_circulant(c, b)
array([ 0.75, -0.25, 0.25])
Compare that result to solving the system with `scipy.linalg.solve`:
>>> solve(circulant(c), b)
array([ 0.75, -0.25, 0.25])
A singular example:
>>> c = np.array([1, 1, 0, 0])
>>> b = np.array([1, 2, 3, 4])
Calling ``solve_circulant(c, b)`` will raise a `LinAlgError`. For the
least square solution, use the option ``singular='lstsq'``:
>>> solve_circulant(c, b, singular='lstsq')
array([ 0.25, 1.25, 2.25, 1.25])
Compare to `scipy.linalg.lstsq`:
>>> x, resid, rnk, s = lstsq(circulant(c), b)
>>> x
array([ 0.25, 1.25, 2.25, 1.25])
A broadcasting example:
Suppose we have the vectors of two circulant matrices stored in an array
with shape (2, 5), and three `b` vectors stored in an array with shape
(3, 5). For example,
>>> c = np.array([[1.5, 2, 3, 0, 0], [1, 1, 4, 3, 2]])
>>> b = np.arange(15).reshape(-1, 5)
We want to solve all combinations of circulant matrices and `b` vectors,
with the result stored in an array with shape (2, 3, 5). When we
disregard the axes of `c` and `b` that hold the vectors of coefficients,
the shapes of the collections are (2,) and (3,), respectively, which are
not compatible for broadcasting. To have a broadcast result with shape
(2, 3), we add a trivial dimension to `c`: ``c[:, np.newaxis, :]`` has
shape (2, 1, 5). The last dimension holds the coefficients of the
circulant matrices, so when we call `solve_circulant`, we can use the
default ``caxis=-1``. The coefficients of the `b` vectors are in the last
dimension of the array `b`, so we use ``baxis=-1``. If we use the
default `outaxis`, the result will have shape (5, 2, 3), so we'll use
``outaxis=-1`` to put the solution vectors in the last dimension.
>>> x = solve_circulant(c[:, np.newaxis, :], b, baxis=-1, outaxis=-1)
>>> x.shape
(2, 3, 5)
>>> np.set_printoptions(precision=3) # For compact output of numbers.
>>> x
array([[[-0.118, 0.22 , 1.277, -0.142, 0.302],
[ 0.651, 0.989, 2.046, 0.627, 1.072],
[ 1.42 , 1.758, 2.816, 1.396, 1.841]],
[[ 0.401, 0.304, 0.694, -0.867, 0.377],
[ 0.856, 0.758, 1.149, -0.412, 0.831],
[ 1.31 , 1.213, 1.603, 0.042, 1.286]]])
Check by solving one pair of `c` and `b` vectors (cf. ``x[1, 1, :]``):
>>> solve_circulant(c[1], b[1, :])
array([ 0.856, 0.758, 1.149, -0.412, 0.831])
"""
c = np.atleast_1d(c)
nc = _get_axis_len("c", c, caxis)
b = np.atleast_1d(b)
nb = _get_axis_len("b", b, baxis)
if nc != nb:
raise ValueError('Shapes of c {} and b {} are incompatible'
.format(c.shape, b.shape))
fc = np.fft.fft(np.moveaxis(c, caxis, -1), axis=-1)
abs_fc = np.abs(fc)
if tol is None:
# This is the same tolerance as used in np.linalg.matrix_rank.
tol = abs_fc.max(axis=-1) * nc * np.finfo(np.float64).eps
if tol.shape != ():
tol.shape = tol.shape + (1,)
else:
tol = np.atleast_1d(tol)
near_zeros = abs_fc <= tol
is_near_singular = np.any(near_zeros)
if is_near_singular:
if singular == 'raise':
raise LinAlgError("near singular circulant matrix.")
else:
# Replace the small values with 1 to avoid errors in the
# division fb/fc below.
fc[near_zeros] = 1
fb = np.fft.fft(np.moveaxis(b, baxis, -1), axis=-1)
q = fb / fc
if is_near_singular:
# `near_zeros` is a boolean array, same shape as `c`, that is
# True where `fc` is (near) zero. `q` is the broadcasted result
# of fb / fc, so to set the values of `q` to 0 where `fc` is near
# zero, we use a mask that is the broadcast result of an array
# of True values shaped like `b` with `near_zeros`.
mask = np.ones_like(b, dtype=bool) & near_zeros
q[mask] = 0
x = np.fft.ifft(q, axis=-1)
if not (np.iscomplexobj(c) or np.iscomplexobj(b)):
x = x.real
if outaxis != -1:
x = np.moveaxis(x, -1, outaxis)
return x
# matrix inversion
def inv(a, overwrite_a=False, check_finite=True):
"""
Compute the inverse of a matrix.
Parameters
----------
a : array_like
Square matrix to be inverted.
overwrite_a : bool, optional
Discard data in `a` (may improve performance). Default is False.
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
ainv : ndarray
Inverse of the matrix `a`.
Raises
------
LinAlgError
If `a` is singular.
ValueError
If `a` is not square, or not 2D.
Examples
--------
>>> import numpy as np
>>> from scipy import linalg
>>> a = np.array([[1., 2.], [3., 4.]])
>>> linalg.inv(a)
array([[-2. , 1. ],
[ 1.5, -0.5]])
>>> np.dot(a, linalg.inv(a))
array([[ 1., 0.],
[ 0., 1.]])
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or _datacopied(a1, a)
# XXX: I found no advantage or disadvantage of using finv.
# finv, = get_flinalg_funcs(('inv',),(a1,))
# if finv is not None:
# a_inv,info = finv(a1,overwrite_a=overwrite_a)
# if info==0:
# return a_inv
# if info>0: raise LinAlgError, "singular matrix"
# if info<0: raise ValueError('illegal value in %d-th argument of '
# 'internal inv.getrf|getri'%(-info))
getrf, getri, getri_lwork = get_lapack_funcs(('getrf', 'getri',
'getri_lwork'),
(a1,))
lu, piv, info = getrf(a1, overwrite_a=overwrite_a)
if info == 0:
lwork = _compute_lwork(getri_lwork, a1.shape[0])
# XXX: the following line fixes curious SEGFAULT when
# benchmarking 500x500 matrix inverse. This seems to
# be a bug in LAPACK ?getri routine because if lwork is
# minimal (when using lwork[0] instead of lwork[1]) then
# all tests pass. Further investigation is required if
# more such SEGFAULTs occur.
lwork = int(1.01 * lwork)
inv_a, info = getri(lu, piv, lwork=lwork, overwrite_lu=1)
if info > 0:
raise LinAlgError("singular matrix")
if info < 0:
raise ValueError('illegal value in %d-th argument of internal '
'getrf|getri' % -info)
return inv_a
# Determinant
def det(a, overwrite_a=False, check_finite=True):
"""
Compute the determinant of a matrix
The determinant of a square matrix is a value derived arithmetically
from the coefficients of the matrix.
The determinant for a 3x3 matrix, for example, is computed as follows::
a b c
d e f = A
g h i
det(A) = a*e*i + b*f*g + c*d*h - c*e*g - b*d*i - a*f*h
Parameters
----------
a : (M, M) array_like
A square matrix.
overwrite_a : bool, optional
Allow overwriting data in a (may enhance performance).
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
det : float or complex
Determinant of `a`.
Notes
-----
The determinant is computed via LU factorization, LAPACK routine z/dgetrf.
Examples
--------
>>> import numpy as np
>>> from scipy import linalg
>>> a = np.array([[1,2,3], [4,5,6], [7,8,9]])
>>> linalg.det(a)
0.0
>>> a = np.array([[0,2,3], [4,5,6], [7,8,9]])
>>> linalg.det(a)
3.0
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or _datacopied(a1, a)
fdet, = get_flinalg_funcs(('det',), (a1,))
a_det, info = fdet(a1, overwrite_a=overwrite_a)
if info < 0:
raise ValueError('illegal value in %d-th argument of internal '
'det.getrf' % -info)
return a_det
# Linear Least Squares
def lstsq(a, b, cond=None, overwrite_a=False, overwrite_b=False,
check_finite=True, lapack_driver=None):
"""
Compute least-squares solution to equation Ax = b.
Compute a vector x such that the 2-norm ``|b - A x|`` is minimized.
Parameters
----------
a : (M, N) array_like
Left-hand side array
b : (M,) or (M, K) array_like
Right hand side array
cond : float, optional
Cutoff for 'small' singular values; used to determine effective
rank of a. Singular values smaller than
``cond * largest_singular_value`` are considered zero.
overwrite_a : bool, optional
Discard data in `a` (may enhance performance). Default is False.
overwrite_b : bool, optional
Discard data in `b` (may enhance performance). Default is False.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
lapack_driver : str, optional
Which LAPACK driver is used to solve the least-squares problem.
Options are ``'gelsd'``, ``'gelsy'``, ``'gelss'``. Default
(``'gelsd'``) is a good choice. However, ``'gelsy'`` can be slightly
faster on many problems. ``'gelss'`` was used historically. It is
generally slow but uses less memory.
.. versionadded:: 0.17.0
Returns
-------
x : (N,) or (N, K) ndarray
Least-squares solution.
residues : (K,) ndarray or float
Square of the 2-norm for each column in ``b - a x``, if ``M > N`` and
``ndim(A) == n`` (returns a scalar if ``b`` is 1-D). Otherwise a
(0,)-shaped array is returned.
rank : int
Effective rank of `a`.
s : (min(M, N),) ndarray or None
Singular values of `a`. The condition number of ``a`` is
``s[0] / s[-1]``.
Raises
------
LinAlgError
If computation does not converge.
ValueError
When parameters are not compatible.
See Also
--------
scipy.optimize.nnls : linear least squares with non-negativity constraint
Notes
-----
When ``'gelsy'`` is used as a driver, `residues` is set to a (0,)-shaped
array and `s` is always ``None``.
Examples
--------
>>> import numpy as np
>>> from scipy.linalg import lstsq
>>> import matplotlib.pyplot as plt
Suppose we have the following data:
>>> x = np.array([1, 2.5, 3.5, 4, 5, 7, 8.5])
>>> y = np.array([0.3, 1.1, 1.5, 2.0, 3.2, 6.6, 8.6])
We want to fit a quadratic polynomial of the form ``y = a + b*x**2``
to this data. We first form the "design matrix" M, with a constant
column of 1s and a column containing ``x**2``:
>>> M = x[:, np.newaxis]**[0, 2]
>>> M
array([[ 1. , 1. ],
[ 1. , 6.25],
[ 1. , 12.25],
[ 1. , 16. ],
[ 1. , 25. ],
[ 1. , 49. ],
[ 1. , 72.25]])
We want to find the least-squares solution to ``M.dot(p) = y``,
where ``p`` is a vector with length 2 that holds the parameters
``a`` and ``b``.
>>> p, res, rnk, s = lstsq(M, y)
>>> p
array([ 0.20925829, 0.12013861])
Plot the data and the fitted curve.
>>> plt.plot(x, y, 'o', label='data')
>>> xx = np.linspace(0, 9, 101)
>>> yy = p[0] + p[1]*xx**2
>>> plt.plot(xx, yy, label='least squares fit, $y = a + bx^2$')
>>> plt.xlabel('x')
>>> plt.ylabel('y')
>>> plt.legend(framealpha=1, shadow=True)
>>> plt.grid(alpha=0.25)
>>> plt.show()
"""
a1 = _asarray_validated(a, check_finite=check_finite)
b1 = _asarray_validated(b, check_finite=check_finite)
if len(a1.shape) != 2:
raise ValueError('Input array a should be 2D')
m, n = a1.shape
if len(b1.shape) == 2:
nrhs = b1.shape[1]
else:
nrhs = 1
if m != b1.shape[0]:
raise ValueError('Shape mismatch: a and b should have the same number'
' of rows ({} != {}).'.format(m, b1.shape[0]))
if m == 0 or n == 0: # Zero-sized problem, confuses LAPACK
x = np.zeros((n,) + b1.shape[1:], dtype=np.common_type(a1, b1))
if n == 0:
residues = np.linalg.norm(b1, axis=0)**2
else:
residues = np.empty((0,))
return x, residues, 0, np.empty((0,))
driver = lapack_driver
if driver is None:
driver = lstsq.default_lapack_driver
if driver not in ('gelsd', 'gelsy', 'gelss'):
raise ValueError('LAPACK driver "%s" is not found' % driver)
lapack_func, lapack_lwork = get_lapack_funcs((driver,
'%s_lwork' % driver),
(a1, b1))
real_data = True if (lapack_func.dtype.kind == 'f') else False
if m < n:
# need to extend b matrix as it will be filled with
# a larger solution matrix
if len(b1.shape) == 2:
b2 = np.zeros((n, nrhs), dtype=lapack_func.dtype)
b2[:m, :] = b1
else:
b2 = np.zeros(n, dtype=lapack_func.dtype)
b2[:m] = b1
b1 = b2
overwrite_a = overwrite_a or _datacopied(a1, a)
overwrite_b = overwrite_b or _datacopied(b1, b)
if cond is None:
cond = np.finfo(lapack_func.dtype).eps
if driver in ('gelss', 'gelsd'):
if driver == 'gelss':
lwork = _compute_lwork(lapack_lwork, m, n, nrhs, cond)
v, x, s, rank, work, info = lapack_func(a1, b1, cond, lwork,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
elif driver == 'gelsd':
if real_data:
lwork, iwork = _compute_lwork(lapack_lwork, m, n, nrhs, cond)
x, s, rank, info = lapack_func(a1, b1, lwork,
iwork, cond, False, False)
else: # complex data
lwork, rwork, iwork = _compute_lwork(lapack_lwork, m, n,
nrhs, cond)
x, s, rank, info = lapack_func(a1, b1, lwork, rwork, iwork,
cond, False, False)
if info > 0:
raise LinAlgError("SVD did not converge in Linear Least Squares")
if info < 0:
raise ValueError('illegal value in %d-th argument of internal %s'
% (-info, lapack_driver))
resids = np.asarray([], dtype=x.dtype)
if m > n:
x1 = x[:n]
if rank == n:
resids = np.sum(np.abs(x[n:])**2, axis=0)
x = x1
return x, resids, rank, s
elif driver == 'gelsy':
lwork = _compute_lwork(lapack_lwork, m, n, nrhs, cond)
jptv = np.zeros((a1.shape[1], 1), dtype=np.int32)
v, x, j, rank, info = lapack_func(a1, b1, jptv, cond,
lwork, False, False)
if info < 0:
raise ValueError("illegal value in %d-th argument of internal "
"gelsy" % -info)
if m > n:
x1 = x[:n]
x = x1
return x, np.array([], x.dtype), rank, None
lstsq.default_lapack_driver = 'gelsd'
def pinv(a, atol=None, rtol=None, return_rank=False, check_finite=True,
cond=None, rcond=None):
"""
Compute the (Moore-Penrose) pseudo-inverse of a matrix.
Calculate a generalized inverse of a matrix using its
singular-value decomposition ``U @ S @ V`` in the economy mode and picking
up only the columns/rows that are associated with significant singular
values.
If ``s`` is the maximum singular value of ``a``, then the
significance cut-off value is determined by ``atol + rtol * s``. Any
singular value below this value is assumed insignificant.
Parameters
----------
a : (M, N) array_like
Matrix to be pseudo-inverted.
atol : float, optional
Absolute threshold term, default value is 0.
.. versionadded:: 1.7.0
rtol : float, optional
Relative threshold term, default value is ``max(M, N) * eps`` where
``eps`` is the machine precision value of the datatype of ``a``.
.. versionadded:: 1.7.0
return_rank : bool, optional
If True, return the effective rank of the matrix.
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
cond, rcond : float, optional
In older versions, these values were meant to be used as ``atol`` with
``rtol=0``. If both were given ``rcond`` overwrote ``cond`` and hence
the code was not correct. Thus using these are strongly discouraged and
the tolerances above are recommended instead. In fact, if provided,
atol, rtol takes precedence over these keywords.
.. versionchanged:: 1.7.0
Deprecated in favor of ``rtol`` and ``atol`` parameters above and
will be removed in future versions of SciPy.
.. versionchanged:: 1.3.0
Previously the default cutoff value was just ``eps*f`` where ``f``
was ``1e3`` for single precision and ``1e6`` for double precision.
Returns
-------
B : (N, M) ndarray
The pseudo-inverse of matrix `a`.
rank : int
The effective rank of the matrix. Returned if `return_rank` is True.
Raises
------
LinAlgError
If SVD computation does not converge.
Examples
--------
>>> import numpy as np
>>> from scipy import linalg
>>> rng = np.random.default_rng()
>>> a = rng.standard_normal((9, 6))
>>> B = linalg.pinv(a)
>>> np.allclose(a, a @ B @ a)
True
>>> np.allclose(B, B @ a @ B)
True
"""
a = _asarray_validated(a, check_finite=check_finite)
u, s, vh = _decomp_svd.svd(a, full_matrices=False, check_finite=False)
t = u.dtype.char.lower()
maxS = np.max(s)
if rcond or cond:
warn('Use of the "cond" and "rcond" keywords are deprecated and '
'will be removed in future versions of SciPy. Use "atol" and '
'"rtol" keywords instead', DeprecationWarning, stacklevel=2)
# backwards compatible only atol and rtol are both missing
if (rcond or cond) and (atol is None) and (rtol is None):
atol = rcond or cond
rtol = 0.
atol = 0. if atol is None else atol
rtol = max(a.shape) * np.finfo(t).eps if (rtol is None) else rtol
if (atol < 0.) or (rtol < 0.):
raise ValueError("atol and rtol values must be positive.")
val = atol + maxS * rtol
rank = np.sum(s > val)
u = u[:, :rank]
u /= s[:rank]
B = (u @ vh[:rank]).conj().T
if return_rank:
return B, rank
else:
return B
def pinvh(a, atol=None, rtol=None, lower=True, return_rank=False,
check_finite=True):
"""
Compute the (Moore-Penrose) pseudo-inverse of a Hermitian matrix.
Calculate a generalized inverse of a complex Hermitian/real symmetric
matrix using its eigenvalue decomposition and including all eigenvalues
with 'large' absolute value.
Parameters
----------
a : (N, N) array_like
Real symmetric or complex hermetian matrix to be pseudo-inverted
atol : float, optional
Absolute threshold term, default value is 0.
.. versionadded:: 1.7.0
rtol : float, optional
Relative threshold term, default value is ``N * eps`` where
``eps`` is the machine precision value of the datatype of ``a``.
.. versionadded:: 1.7.0
lower : bool, optional
Whether the pertinent array data is taken from the lower or upper
triangle of `a`. (Default: lower)
return_rank : bool, optional
If True, return the effective rank of the matrix.
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
B : (N, N) ndarray
The pseudo-inverse of matrix `a`.
rank : int
The effective rank of the matrix. Returned if `return_rank` is True.
Raises
------
LinAlgError
If eigenvalue algorithm does not converge.
Examples
--------
>>> import numpy as np
>>> from scipy.linalg import pinvh
>>> rng = np.random.default_rng()
>>> a = rng.standard_normal((9, 6))
>>> a = np.dot(a, a.T)
>>> B = pinvh(a)
>>> np.allclose(a, a @ B @ a)
True
>>> np.allclose(B, B @ a @ B)
True
"""
a = _asarray_validated(a, check_finite=check_finite)
s, u = _decomp.eigh(a, lower=lower, check_finite=False)
t = u.dtype.char.lower()
maxS = np.max(np.abs(s))
atol = 0. if atol is None else atol
rtol = max(a.shape) * np.finfo(t).eps if (rtol is None) else rtol
if (atol < 0.) or (rtol < 0.):
raise ValueError("atol and rtol values must be positive.")
val = atol + maxS * rtol
above_cutoff = (abs(s) > val)
psigma_diag = 1.0 / s[above_cutoff]
u = u[:, above_cutoff]
B = (u * psigma_diag) @ u.conj().T
if return_rank:
return B, len(psigma_diag)
else:
return B
def matrix_balance(A, permute=True, scale=True, separate=False,
overwrite_a=False):
"""
Compute a diagonal similarity transformation for row/column balancing.
The balancing tries to equalize the row and column 1-norms by applying
a similarity transformation such that the magnitude variation of the
matrix entries is reflected to the scaling matrices.
Moreover, if enabled, the matrix is first permuted to isolate the upper
triangular parts of the matrix and, again if scaling is also enabled,
only the remaining subblocks are subjected to scaling.
The balanced matrix satisfies the following equality
.. math::
B = T^{-1} A T
The scaling coefficients are approximated to the nearest power of 2
to avoid round-off errors.
Parameters
----------
A : (n, n) array_like
Square data matrix for the balancing.
permute : bool, optional
The selector to define whether permutation of A is also performed
prior to scaling.
scale : bool, optional
The selector to turn on and off the scaling. If False, the matrix
will not be scaled.
separate : bool, optional
This switches from returning a full matrix of the transformation
to a tuple of two separate 1-D permutation and scaling arrays.
overwrite_a : bool, optional
This is passed to xGEBAL directly. Essentially, overwrites the result
to the data. It might increase the space efficiency. See LAPACK manual
for details. This is False by default.
Returns
-------
B : (n, n) ndarray
Balanced matrix
T : (n, n) ndarray
A possibly permuted diagonal matrix whose nonzero entries are
integer powers of 2 to avoid numerical truncation errors.
scale, perm : (n,) ndarray
If ``separate`` keyword is set to True then instead of the array
``T`` above, the scaling and the permutation vectors are given
separately as a tuple without allocating the full array ``T``.
Notes
-----
This algorithm is particularly useful for eigenvalue and matrix
decompositions and in many cases it is already called by various
LAPACK routines.
The algorithm is based on the well-known technique of [1]_ and has
been modified to account for special cases. See [2]_ for details
which have been implemented since LAPACK v3.5.0. Before this version
there are corner cases where balancing can actually worsen the
conditioning. See [3]_ for such examples.
The code is a wrapper around LAPACK's xGEBAL routine family for matrix
balancing.
.. versionadded:: 0.19.0
References
----------
.. [1] B.N. Parlett and C. Reinsch, "Balancing a Matrix for
Calculation of Eigenvalues and Eigenvectors", Numerische Mathematik,
Vol.13(4), 1969, :doi:`10.1007/BF02165404`
.. [2] R. James, J. Langou, B.R. Lowery, "On matrix balancing and
eigenvector computation", 2014, :arxiv:`1401.5766`
.. [3] D.S. Watkins. A case where balancing is harmful.
Electron. Trans. Numer. Anal, Vol.23, 2006.
Examples
--------
>>> import numpy as np
>>> from scipy import linalg
>>> x = np.array([[1,2,0], [9,1,0.01], [1,2,10*np.pi]])
>>> y, permscale = linalg.matrix_balance(x)
>>> np.abs(x).sum(axis=0) / np.abs(x).sum(axis=1)
array([ 3.66666667, 0.4995005 , 0.91312162])
>>> np.abs(y).sum(axis=0) / np.abs(y).sum(axis=1)
array([ 1.2 , 1.27041742, 0.92658316]) # may vary
>>> permscale # only powers of 2 (0.5 == 2^(-1))
array([[ 0.5, 0. , 0. ], # may vary
[ 0. , 1. , 0. ],
[ 0. , 0. , 1. ]])
"""
A = np.atleast_2d(_asarray_validated(A, check_finite=True))
if not np.equal(*A.shape):
raise ValueError('The data matrix for balancing should be square.')
gebal = get_lapack_funcs(('gebal'), (A,))
B, lo, hi, ps, info = gebal(A, scale=scale, permute=permute,
overwrite_a=overwrite_a)
if info < 0:
raise ValueError('xGEBAL exited with the internal error '
'"illegal value in argument number {}.". See '
'LAPACK documentation for the xGEBAL error codes.'
''.format(-info))
# Separate the permutations from the scalings and then convert to int
scaling = np.ones_like(ps, dtype=float)
scaling[lo:hi+1] = ps[lo:hi+1]
# gebal uses 1-indexing
ps = ps.astype(int, copy=False) - 1
n = A.shape[0]
perm = np.arange(n)
# LAPACK permutes with the ordering n --> hi, then 0--> lo
if hi < n:
for ind, x in enumerate(ps[hi+1:][::-1], 1):
if n-ind == x:
continue
perm[[x, n-ind]] = perm[[n-ind, x]]
if lo > 0:
for ind, x in enumerate(ps[:lo]):
if ind == x:
continue
perm[[x, ind]] = perm[[ind, x]]
if separate:
return B, (scaling, perm)
# get the inverse permutation
iperm = np.empty_like(perm)
iperm[perm] = np.arange(n)
return B, np.diag(scaling)[iperm, :]
def _validate_args_for_toeplitz_ops(c_or_cr, b, check_finite, keep_b_shape,
enforce_square=True):
"""Validate arguments and format inputs for toeplitz functions
Parameters
----------
c_or_cr : array_like or tuple of (array_like, array_like)
The vector ``c``, or a tuple of arrays (``c``, ``r``). Whatever the
actual shape of ``c``, it will be converted to a 1-D array. If not
supplied, ``r = conjugate(c)`` is assumed; in this case, if c[0] is
real, the Toeplitz matrix is Hermitian. r[0] is ignored; the first row
of the Toeplitz matrix is ``[c[0], r[1:]]``. Whatever the actual shape
of ``r``, it will be converted to a 1-D array.
b : (M,) or (M, K) array_like
Right-hand side in ``T x = b``.
check_finite : bool
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(result entirely NaNs) if the inputs do contain infinities or NaNs.
keep_b_shape : bool
Whether to convert a (M,) dimensional b into a (M, 1) dimensional
matrix.
enforce_square : bool, optional
If True (default), this verifies that the Toeplitz matrix is square.
Returns
-------
r : array
1d array corresponding to the first row of the Toeplitz matrix.
c: array
1d array corresponding to the first column of the Toeplitz matrix.
b: array
(M,), (M, 1) or (M, K) dimensional array, post validation,
corresponding to ``b``.
dtype: numpy datatype
``dtype`` stores the datatype of ``r``, ``c`` and ``b``. If any of
``r``, ``c`` or ``b`` are complex, ``dtype`` is ``np.complex128``,
otherwise, it is ``np.float``.
b_shape: tuple
Shape of ``b`` after passing it through ``_asarray_validated``.
"""
if isinstance(c_or_cr, tuple):
c, r = c_or_cr
c = _asarray_validated(c, check_finite=check_finite).ravel()
r = _asarray_validated(r, check_finite=check_finite).ravel()
else:
c = _asarray_validated(c_or_cr, check_finite=check_finite).ravel()
r = c.conjugate()
if b is None:
raise ValueError('`b` must be an array, not None.')
b = _asarray_validated(b, check_finite=check_finite)
b_shape = b.shape
is_not_square = r.shape[0] != c.shape[0]
if (enforce_square and is_not_square) or b.shape[0] != r.shape[0]:
raise ValueError('Incompatible dimensions.')
is_cmplx = np.iscomplexobj(r) or np.iscomplexobj(c) or np.iscomplexobj(b)
dtype = np.complex128 if is_cmplx else np.double
r, c, b = (np.asarray(i, dtype=dtype) for i in (r, c, b))
if b.ndim == 1 and not keep_b_shape:
b = b.reshape(-1, 1)
elif b.ndim != 1:
b = b.reshape(b.shape[0], -1)
return r, c, b, dtype, b_shape
def matmul_toeplitz(c_or_cr, x, check_finite=False, workers=None):
"""Efficient Toeplitz Matrix-Matrix Multiplication using FFT
This function returns the matrix multiplication between a Toeplitz
matrix and a dense matrix.
The Toeplitz matrix has constant diagonals, with c as its first column
and r as its first row. If r is not given, ``r == conjugate(c)`` is
assumed.
Parameters
----------
c_or_cr : array_like or tuple of (array_like, array_like)
The vector ``c``, or a tuple of arrays (``c``, ``r``). Whatever the
actual shape of ``c``, it will be converted to a 1-D array. If not
supplied, ``r = conjugate(c)`` is assumed; in this case, if c[0] is
real, the Toeplitz matrix is Hermitian. r[0] is ignored; the first row
of the Toeplitz matrix is ``[c[0], r[1:]]``. Whatever the actual shape
of ``r``, it will be converted to a 1-D array.
x : (M,) or (M, K) array_like
Matrix with which to multiply.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(result entirely NaNs) if the inputs do contain infinities or NaNs.
workers : int, optional
To pass to scipy.fft.fft and ifft. Maximum number of workers to use
for parallel computation. If negative, the value wraps around from
``os.cpu_count()``. See scipy.fft.fft for more details.
Returns
-------
T @ x : (M,) or (M, K) ndarray
The result of the matrix multiplication ``T @ x``. Shape of return
matches shape of `x`.
See Also
--------
toeplitz : Toeplitz matrix
solve_toeplitz : Solve a Toeplitz system using Levinson Recursion
Notes
-----
The Toeplitz matrix is embedded in a circulant matrix and the FFT is used
to efficiently calculate the matrix-matrix product.
Because the computation is based on the FFT, integer inputs will
result in floating point outputs. This is unlike NumPy's `matmul`,
which preserves the data type of the input.
This is partly based on the implementation that can be found in [1]_,
licensed under the MIT license. More information about the method can be
found in reference [2]_. References [3]_ and [4]_ have more reference
implementations in Python.
.. versionadded:: 1.6.0
References
----------
.. [1] Jacob R Gardner, Geoff Pleiss, David Bindel, Kilian
Q Weinberger, Andrew Gordon Wilson, "GPyTorch: Blackbox Matrix-Matrix
Gaussian Process Inference with GPU Acceleration" with contributions
from Max Balandat and Ruihan Wu. Available online:
https://github.com/cornellius-gp/gpytorch
.. [2] J. Demmel, P. Koev, and X. Li, "A Brief Survey of Direct Linear
Solvers". In Z. Bai, J. Demmel, J. Dongarra, A. Ruhe, and H. van der
Vorst, editors. Templates for the Solution of Algebraic Eigenvalue
Problems: A Practical Guide. SIAM, Philadelphia, 2000. Available at:
http://www.netlib.org/utk/people/JackDongarra/etemplates/node384.html
.. [3] R. Scheibler, E. Bezzam, I. Dokmanic, Pyroomacoustics: A Python
package for audio room simulations and array processing algorithms,
Proc. IEEE ICASSP, Calgary, CA, 2018.
https://github.com/LCAV/pyroomacoustics/blob/pypi-release/
pyroomacoustics/adaptive/util.py
.. [4] Marano S, Edwards B, Ferrari G and Fah D (2017), "Fitting
Earthquake Spectra: Colored Noise and Incomplete Data", Bulletin of
the Seismological Society of America., January, 2017. Vol. 107(1),
pp. 276-291.
Examples
--------
Multiply the Toeplitz matrix T with matrix x::
[ 1 -1 -2 -3] [1 10]
T = [ 3 1 -1 -2] x = [2 11]
[ 6 3 1 -1] [2 11]
[10 6 3 1] [5 19]
To specify the Toeplitz matrix, only the first column and the first
row are needed.
>>> import numpy as np
>>> c = np.array([1, 3, 6, 10]) # First column of T
>>> r = np.array([1, -1, -2, -3]) # First row of T
>>> x = np.array([[1, 10], [2, 11], [2, 11], [5, 19]])
>>> from scipy.linalg import toeplitz, matmul_toeplitz
>>> matmul_toeplitz((c, r), x)
array([[-20., -80.],
[ -7., -8.],
[ 9., 85.],
[ 33., 218.]])
Check the result by creating the full Toeplitz matrix and
multiplying it by ``x``.
>>> toeplitz(c, r) @ x
array([[-20, -80],
[ -7, -8],
[ 9, 85],
[ 33, 218]])
The full matrix is never formed explicitly, so this routine
is suitable for very large Toeplitz matrices.
>>> n = 1000000
>>> matmul_toeplitz([1] + [0]*(n-1), np.ones(n))
array([1., 1., 1., ..., 1., 1., 1.])
"""
from ..fft import fft, ifft, rfft, irfft
r, c, x, dtype, x_shape = _validate_args_for_toeplitz_ops(
c_or_cr, x, check_finite, keep_b_shape=False, enforce_square=False)
n, m = x.shape
T_nrows = len(c)
T_ncols = len(r)
p = T_nrows + T_ncols - 1 # equivalent to len(embedded_col)
embedded_col = np.concatenate((c, r[-1:0:-1]))
if np.iscomplexobj(embedded_col) or np.iscomplexobj(x):
fft_mat = fft(embedded_col, axis=0, workers=workers).reshape(-1, 1)
fft_x = fft(x, n=p, axis=0, workers=workers)
mat_times_x = ifft(fft_mat*fft_x, axis=0,
workers=workers)[:T_nrows, :]
else:
# Real inputs; using rfft is faster
fft_mat = rfft(embedded_col, axis=0, workers=workers).reshape(-1, 1)
fft_x = rfft(x, n=p, axis=0, workers=workers)
mat_times_x = irfft(fft_mat*fft_x, axis=0,
workers=workers, n=p)[:T_nrows, :]
return_shape = (T_nrows,) if len(x_shape) == 1 else (T_nrows, m)
return mat_times_x.reshape(*return_shape)
| bsd-3-clause |
anntzer/scipy | benchmarks/benchmarks/peak_finding.py | 10 | 1523 | """Benchmarks for peak finding related functions."""
from .common import Benchmark, safe_import
with safe_import():
from scipy.signal import find_peaks, peak_prominences, peak_widths
from scipy.datasets import electrocardiogram
class FindPeaks(Benchmark):
"""Benchmark `scipy.signal.find_peaks`.
Notes
-----
The first value of `distance` is None in which case the benchmark shows
the actual speed of the underlying maxima finding function.
"""
param_names = ['distance']
params = [[None, 8, 64, 512, 4096]]
def setup(self, distance):
self.x = electrocardiogram()
def time_find_peaks(self, distance):
find_peaks(self.x, distance=distance)
class PeakProminences(Benchmark):
"""Benchmark `scipy.signal.peak_prominences`."""
param_names = ['wlen']
params = [[None, 8, 64, 512, 4096]]
def setup(self, wlen):
self.x = electrocardiogram()
self.peaks = find_peaks(self.x)[0]
def time_peak_prominences(self, wlen):
peak_prominences(self.x, self.peaks, wlen)
class PeakWidths(Benchmark):
"""Benchmark `scipy.signal.peak_widths`."""
param_names = ['rel_height']
params = [[0, 0.25, 0.5, 0.75, 1]]
def setup(self, rel_height):
self.x = electrocardiogram()
self.peaks = find_peaks(self.x)[0]
self.prominence_data = peak_prominences(self.x, self.peaks)
def time_peak_widths(self, rel_height):
peak_widths(self.x, self.peaks, rel_height, self.prominence_data)
| bsd-3-clause |
jhaux/tensorflow | tensorflow/contrib/learn/python/learn/preprocessing/categorical.py | 151 | 4269 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implements preprocessing transformers for categorical variables."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
# pylint: disable=g-bad-import-order
from . import categorical_vocabulary
from ..learn_io.data_feeder import setup_processor_data_feeder
# pylint: enable=g-bad-import-order
class CategoricalProcessor(object):
"""Maps documents to sequences of word ids.
As a common convention, Nan values are handled as unknown tokens.
Both float('nan') and np.nan are accepted.
"""
def __init__(self, min_frequency=0, share=False, vocabularies=None):
"""Initializes a CategoricalProcessor instance.
Args:
min_frequency: Minimum frequency of categories in the vocabulary.
share: Share vocabulary between variables.
vocabularies: list of CategoricalVocabulary objects for each variable in
the input dataset.
Attributes:
vocabularies_: list of CategoricalVocabulary objects.
"""
self.min_frequency = min_frequency
self.share = share
self.vocabularies_ = vocabularies
def freeze(self, freeze=True):
"""Freeze or unfreeze all vocabularies.
Args:
freeze: Boolean, indicate if vocabularies should be frozen.
"""
for vocab in self.vocabularies_:
vocab.freeze(freeze)
def fit(self, x, unused_y=None):
"""Learn a vocabulary dictionary of all categories in `x`.
Args:
x: numpy matrix or iterable of lists/numpy arrays.
unused_y: to match fit format signature of estimators.
Returns:
self
"""
x = setup_processor_data_feeder(x)
for row in x:
# Create vocabularies if not given.
if self.vocabularies_ is None:
# If not share, one per column, else one shared across.
if not self.share:
self.vocabularies_ = [
categorical_vocabulary.CategoricalVocabulary() for _ in row
]
else:
vocab = categorical_vocabulary.CategoricalVocabulary()
self.vocabularies_ = [vocab for _ in row]
for idx, value in enumerate(row):
# Nans are handled as unknowns.
if (isinstance(value, float) and math.isnan(value)) or value == np.nan:
continue
self.vocabularies_[idx].add(value)
if self.min_frequency > 0:
for vocab in self.vocabularies_:
vocab.trim(self.min_frequency)
self.freeze()
return self
def fit_transform(self, x, unused_y=None):
"""Learn the vocabulary dictionary and return indexies of categories.
Args:
x: numpy matrix or iterable of lists/numpy arrays.
unused_y: to match fit_transform signature of estimators.
Returns:
x: iterable, [n_samples]. Category-id matrix.
"""
self.fit(x)
return self.transform(x)
def transform(self, x):
"""Transform documents to category-id matrix.
Converts categories to ids give fitted vocabulary from `fit` or
one provided in the constructor.
Args:
x: numpy matrix or iterable of lists/numpy arrays.
Yields:
x: iterable, [n_samples]. Category-id matrix.
"""
self.freeze()
x = setup_processor_data_feeder(x)
for row in x:
output_row = []
for idx, value in enumerate(row):
# Return <UNK> when it's Nan.
if (isinstance(value, float) and math.isnan(value)) or value == np.nan:
output_row.append(0)
continue
output_row.append(self.vocabularies_[idx].get(value))
yield np.array(output_row, dtype=np.int64)
| apache-2.0 |
margulies/topography | sandbox/individual_distance/individual_dist_label.py | 1 | 4395 | #!/usr/bin/python
import os, numpy as np, scipy as sp
import nibabel as nib
from surfer import Brain
import h5py
from sklearn import preprocessing
from sklearn.utils.arpack import eigsh
# Set defaults:
dataDir = '/afs/cbs.mpg.de/projects/mar005_lsd-lemon-surf/probands'
fsDir = '/afs/cbs.mpg.de/projects/mar004_lsd-lemon-preproc/freesurfer'
output_base_dir = '/scr/liberia1'
subject = ['26410']
def vizBrain(data, subject_id='fsaverage5', hemi='lh', surface='pial', filename='brain.png'):
brain = Brain(subject_id, hemi, surface)
dmin = data.min()#+(data.std()/2)
dmax = data.max()#-(data.std()/2)
brain.add_data(data, dmin, dmax, colormap="hot", alpha=0.7)
brain.save_montage(filename, order=['lat', 'med'], orientation='h', border_size=10)
def DoFiedler(conn):
# prep for embedding
K = (conn + 1) / 2.
v = np.sqrt(np.sum(K, axis=1))
A = K/(v[:, None] * v[None, :])
del K
A = np.squeeze(A * [A > 0])
# diffusion embedding
n_components_embedding = 2
lambdas, vectors = eigsh(A, k=n_components_embedding+1)
del A
lambdas = lambdas[::-1]
vectors = vectors[:, ::-1]
psi = vectors/vectors[:, 0][:, None]
lambdas = lambdas[1:] / (1 - lambdas[1:])
embedding = psi[:, 1:(n_components_embedding + 1 + 1)] * lambdas[:n_components_embedding+1][None, :]
return embedding
for subject in subjects:
for hemi in ['lh', 'rh']:
# read in data
cort = np.sort(fs.io.read_label('%s/fsaverage5/label/%s.cortex.label' % (fsDir, hemi)))
dataCorr = np.load('%s/%s/correlation_maps/%s_lsd_corr_1ab_fsa5_%s.npy' % (dataDir, subject, subject, hemi))
fullsize = len(dataCorr)
distFile = '%s/%s/distance_maps/%s_%s_geoDist_fsa5.mat' % (dataDir, subject, subject, hemi)
with h5py.File(distFile, 'r') as f:
dist = f['dataAll'][()]
dist = dist[cort, :][:, cort]
min_max_scaler = preprocessing.MinMaxScaler()
dist_scaled = min_max_scaler.fit_transform(dist)
del dist
distmat = np.zeros((fullsize, fullsize))
distmat[np.ix_(cort, cort)] = dist_scaled
del dist_scaled
# embedding
embedding = DoFiedler(dataCorr[cort, :][:, cort]) # see below for details
del dataCorr
# reinsert zeros:
fiedler = np.zeros(fullsize)
fiedler[cort] = embedding[:,1] # before this was embedding[:,0],
# but changed to 1 since fiedler vector is the vector belonging second smallest eigenvalue
# TODO: use individual-specific annot file, need to be transformed from individual space to fsa5 space
fs_annot = fs.io.read_annot('/afs/cbs.mpg.de/projects/mar004_lsd-lemon-preproc/freesurfer/fsaverage5/label/lh.aparc.a2009s.annot')#'%s/%s/label/%s.aparc.a2009s.annot' % (fsDir, subject, hemi))
index = [i for i, s in enumerate(list(fs_annot[2])) if 'G_pariet_inf-Angular' in s]
label_parietal = fs_annot[0] == index
masked_fiedler = fiedler * label_parietal
parietal_index = np.where(masked_fiedler == max(masked_fiedler))
# changed np.mean(fiedler) to np.mean(np.nonzero(fiedler)), is that correct?
if np.mean(np.nonzero(masked_fiedler)) > np.mean(np.nonzero(fiedler)):
parietal_index = np.where(masked_fiedler == max(masked_fiedler))
else:
parietal_index = np.where(masked_fiedler == min(masked_fiedler))
# TODO: add this as second overlay to surface instead of changing value in masked_fiedler
masked_fiedler[parietal_index] = 1
vizBrain(masked_fiedler) # TODO: save to disc for qc
label_list = ['S_calcarine'] # TODO: add other labels
g = 0
indices = [i for i, s in enumerate(list(fs_annot[2])) if label_list[g] in s]
label_dist = np.min(distmat[np.where(fs_annot[0] == indices),:], axis=1).squeeze()
# TODO: add this as second overlay to surface instead of changing value in label_dist
label_dist[parietal_index] = 1
vizBrain(label_dist) # TODO: save to disc
# save out anat_dist for subject / hemi / anat label
# also create images for quality control: fiedler, masked_fiedler
| mit |
heli522/scikit-learn | sklearn/metrics/cluster/tests/test_unsupervised.py | 228 | 2823 | import numpy as np
from scipy.sparse import csr_matrix
from sklearn import datasets
from sklearn.metrics.cluster.unsupervised import silhouette_score
from sklearn.metrics import pairwise_distances
from sklearn.utils.testing import assert_false, assert_almost_equal
from sklearn.utils.testing import assert_raises_regexp
def test_silhouette():
# Tests the Silhouette Coefficient.
dataset = datasets.load_iris()
X = dataset.data
y = dataset.target
D = pairwise_distances(X, metric='euclidean')
# Given that the actual labels are used, we can assume that S would be
# positive.
silhouette = silhouette_score(D, y, metric='precomputed')
assert(silhouette > 0)
# Test without calculating D
silhouette_metric = silhouette_score(X, y, metric='euclidean')
assert_almost_equal(silhouette, silhouette_metric)
# Test with sampling
silhouette = silhouette_score(D, y, metric='precomputed',
sample_size=int(X.shape[0] / 2),
random_state=0)
silhouette_metric = silhouette_score(X, y, metric='euclidean',
sample_size=int(X.shape[0] / 2),
random_state=0)
assert(silhouette > 0)
assert(silhouette_metric > 0)
assert_almost_equal(silhouette_metric, silhouette)
# Test with sparse X
X_sparse = csr_matrix(X)
D = pairwise_distances(X_sparse, metric='euclidean')
silhouette = silhouette_score(D, y, metric='precomputed')
assert(silhouette > 0)
def test_no_nan():
# Assert Silhouette Coefficient != nan when there is 1 sample in a class.
# This tests for the condition that caused issue 960.
# Note that there is only one sample in cluster 0. This used to cause the
# silhouette_score to return nan (see bug #960).
labels = np.array([1, 0, 1, 1, 1])
# The distance matrix doesn't actually matter.
D = np.random.RandomState(0).rand(len(labels), len(labels))
silhouette = silhouette_score(D, labels, metric='precomputed')
assert_false(np.isnan(silhouette))
def test_correct_labelsize():
# Assert 1 < n_labels < n_samples
dataset = datasets.load_iris()
X = dataset.data
# n_labels = n_samples
y = np.arange(X.shape[0])
assert_raises_regexp(ValueError,
'Number of labels is %d\. Valid values are 2 '
'to n_samples - 1 \(inclusive\)' % len(np.unique(y)),
silhouette_score, X, y)
# n_labels = 1
y = np.zeros(X.shape[0])
assert_raises_regexp(ValueError,
'Number of labels is %d\. Valid values are 2 '
'to n_samples - 1 \(inclusive\)' % len(np.unique(y)),
silhouette_score, X, y)
| bsd-3-clause |
arabenjamin/scikit-learn | examples/text/document_clustering.py | 228 | 8356 | """
=======================================
Clustering text documents using k-means
=======================================
This is an example showing how the scikit-learn can be used to cluster
documents by topics using a bag-of-words approach. This example uses
a scipy.sparse matrix to store the features instead of standard numpy arrays.
Two feature extraction methods can be used in this example:
- TfidfVectorizer uses a in-memory vocabulary (a python dict) to map the most
frequent words to features indices and hence compute a word occurrence
frequency (sparse) matrix. The word frequencies are then reweighted using
the Inverse Document Frequency (IDF) vector collected feature-wise over
the corpus.
- HashingVectorizer hashes word occurrences to a fixed dimensional space,
possibly with collisions. The word count vectors are then normalized to
each have l2-norm equal to one (projected to the euclidean unit-ball) which
seems to be important for k-means to work in high dimensional space.
HashingVectorizer does not provide IDF weighting as this is a stateless
model (the fit method does nothing). When IDF weighting is needed it can
be added by pipelining its output to a TfidfTransformer instance.
Two algorithms are demoed: ordinary k-means and its more scalable cousin
minibatch k-means.
Additionally, latent sematic analysis can also be used to reduce dimensionality
and discover latent patterns in the data.
It can be noted that k-means (and minibatch k-means) are very sensitive to
feature scaling and that in this case the IDF weighting helps improve the
quality of the clustering by quite a lot as measured against the "ground truth"
provided by the class label assignments of the 20 newsgroups dataset.
This improvement is not visible in the Silhouette Coefficient which is small
for both as this measure seem to suffer from the phenomenon called
"Concentration of Measure" or "Curse of Dimensionality" for high dimensional
datasets such as text data. Other measures such as V-measure and Adjusted Rand
Index are information theoretic based evaluation scores: as they are only based
on cluster assignments rather than distances, hence not affected by the curse
of dimensionality.
Note: as k-means is optimizing a non-convex objective function, it will likely
end up in a local optimum. Several runs with independent random init might be
necessary to get a good convergence.
"""
# Author: Peter Prettenhofer <peter.prettenhofer@gmail.com>
# Lars Buitinck <L.J.Buitinck@uva.nl>
# License: BSD 3 clause
from __future__ import print_function
from sklearn.datasets import fetch_20newsgroups
from sklearn.decomposition import TruncatedSVD
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import Normalizer
from sklearn import metrics
from sklearn.cluster import KMeans, MiniBatchKMeans
import logging
from optparse import OptionParser
import sys
from time import time
import numpy as np
# Display progress logs on stdout
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
# parse commandline arguments
op = OptionParser()
op.add_option("--lsa",
dest="n_components", type="int",
help="Preprocess documents with latent semantic analysis.")
op.add_option("--no-minibatch",
action="store_false", dest="minibatch", default=True,
help="Use ordinary k-means algorithm (in batch mode).")
op.add_option("--no-idf",
action="store_false", dest="use_idf", default=True,
help="Disable Inverse Document Frequency feature weighting.")
op.add_option("--use-hashing",
action="store_true", default=False,
help="Use a hashing feature vectorizer")
op.add_option("--n-features", type=int, default=10000,
help="Maximum number of features (dimensions)"
" to extract from text.")
op.add_option("--verbose",
action="store_true", dest="verbose", default=False,
help="Print progress reports inside k-means algorithm.")
print(__doc__)
op.print_help()
(opts, args) = op.parse_args()
if len(args) > 0:
op.error("this script takes no arguments.")
sys.exit(1)
###############################################################################
# Load some categories from the training set
categories = [
'alt.atheism',
'talk.religion.misc',
'comp.graphics',
'sci.space',
]
# Uncomment the following to do the analysis on all the categories
#categories = None
print("Loading 20 newsgroups dataset for categories:")
print(categories)
dataset = fetch_20newsgroups(subset='all', categories=categories,
shuffle=True, random_state=42)
print("%d documents" % len(dataset.data))
print("%d categories" % len(dataset.target_names))
print()
labels = dataset.target
true_k = np.unique(labels).shape[0]
print("Extracting features from the training dataset using a sparse vectorizer")
t0 = time()
if opts.use_hashing:
if opts.use_idf:
# Perform an IDF normalization on the output of HashingVectorizer
hasher = HashingVectorizer(n_features=opts.n_features,
stop_words='english', non_negative=True,
norm=None, binary=False)
vectorizer = make_pipeline(hasher, TfidfTransformer())
else:
vectorizer = HashingVectorizer(n_features=opts.n_features,
stop_words='english',
non_negative=False, norm='l2',
binary=False)
else:
vectorizer = TfidfVectorizer(max_df=0.5, max_features=opts.n_features,
min_df=2, stop_words='english',
use_idf=opts.use_idf)
X = vectorizer.fit_transform(dataset.data)
print("done in %fs" % (time() - t0))
print("n_samples: %d, n_features: %d" % X.shape)
print()
if opts.n_components:
print("Performing dimensionality reduction using LSA")
t0 = time()
# Vectorizer results are normalized, which makes KMeans behave as
# spherical k-means for better results. Since LSA/SVD results are
# not normalized, we have to redo the normalization.
svd = TruncatedSVD(opts.n_components)
normalizer = Normalizer(copy=False)
lsa = make_pipeline(svd, normalizer)
X = lsa.fit_transform(X)
print("done in %fs" % (time() - t0))
explained_variance = svd.explained_variance_ratio_.sum()
print("Explained variance of the SVD step: {}%".format(
int(explained_variance * 100)))
print()
###############################################################################
# Do the actual clustering
if opts.minibatch:
km = MiniBatchKMeans(n_clusters=true_k, init='k-means++', n_init=1,
init_size=1000, batch_size=1000, verbose=opts.verbose)
else:
km = KMeans(n_clusters=true_k, init='k-means++', max_iter=100, n_init=1,
verbose=opts.verbose)
print("Clustering sparse data with %s" % km)
t0 = time()
km.fit(X)
print("done in %0.3fs" % (time() - t0))
print()
print("Homogeneity: %0.3f" % metrics.homogeneity_score(labels, km.labels_))
print("Completeness: %0.3f" % metrics.completeness_score(labels, km.labels_))
print("V-measure: %0.3f" % metrics.v_measure_score(labels, km.labels_))
print("Adjusted Rand-Index: %.3f"
% metrics.adjusted_rand_score(labels, km.labels_))
print("Silhouette Coefficient: %0.3f"
% metrics.silhouette_score(X, km.labels_, sample_size=1000))
print()
if not opts.use_hashing:
print("Top terms per cluster:")
if opts.n_components:
original_space_centroids = svd.inverse_transform(km.cluster_centers_)
order_centroids = original_space_centroids.argsort()[:, ::-1]
else:
order_centroids = km.cluster_centers_.argsort()[:, ::-1]
terms = vectorizer.get_feature_names()
for i in range(true_k):
print("Cluster %d:" % i, end='')
for ind in order_centroids[i, :10]:
print(' %s' % terms[ind], end='')
print()
| bsd-3-clause |
michellemorales/OpenMM | models/cognitive_mapping_and_planning/cfgs/config_common.py | 14 | 9658 | # Copyright 2016 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
import numpy as np
import logging
import src.utils as utils
import datasets.nav_env_config as nec
from datasets import factory
def adjust_args_for_mode(args, mode):
if mode == 'train':
args.control.train = True
elif mode == 'val1':
# Same settings as for training, to make sure nothing wonky is happening
# there.
args.control.test = True
args.control.test_mode = 'val'
args.navtask.task_params.batch_size = 32
elif mode == 'val2':
# No data augmentation, not sampling but taking the argmax action, not
# sampling from the ground truth at all.
args.control.test = True
args.arch.action_sample_type = 'argmax'
args.arch.sample_gt_prob_type = 'zero'
args.navtask.task_params.data_augment = \
utils.Foo(lr_flip=0, delta_angle=0, delta_xy=0, relight=False,
relight_fast=False, structured=False)
args.control.test_mode = 'val'
args.navtask.task_params.batch_size = 32
elif mode == 'bench':
# Actually testing the agent in settings that are kept same between
# different runs.
args.navtask.task_params.batch_size = 16
args.control.test = True
args.arch.action_sample_type = 'argmax'
args.arch.sample_gt_prob_type = 'zero'
args.navtask.task_params.data_augment = \
utils.Foo(lr_flip=0, delta_angle=0, delta_xy=0, relight=False,
relight_fast=False, structured=False)
args.summary.test_iters = 250
args.control.only_eval_when_done = True
args.control.reset_rng_seed = True
args.control.test_mode = 'test'
else:
logging.fatal('Unknown mode: %s.', mode)
assert(False)
return args
def get_solver_vars(solver_str):
if solver_str == '': vals = [];
else: vals = solver_str.split('_')
ks = ['clip', 'dlw', 'long', 'typ', 'isdk', 'adam_eps', 'init_lr'];
ks = ks[:len(vals)]
# Gradient clipping or not.
if len(vals) == 0: ks.append('clip'); vals.append('noclip');
# data loss weight.
if len(vals) == 1: ks.append('dlw'); vals.append('dlw20')
# how long to train for.
if len(vals) == 2: ks.append('long'); vals.append('nolong')
# Adam
if len(vals) == 3: ks.append('typ'); vals.append('adam2')
# reg loss wt
if len(vals) == 4: ks.append('rlw'); vals.append('rlw1')
# isd_k
if len(vals) == 5: ks.append('isdk'); vals.append('isdk415') # 415, inflexion at 2.5k.
# adam eps
if len(vals) == 6: ks.append('adam_eps'); vals.append('aeps1en8')
# init lr
if len(vals) == 7: ks.append('init_lr'); vals.append('lr1en3')
assert(len(vals) == 8)
vars = utils.Foo()
for k, v in zip(ks, vals):
setattr(vars, k, v)
logging.error('solver_vars: %s', vars)
return vars
def process_solver_str(solver_str):
solver = utils.Foo(
seed=0, learning_rate_decay=None, clip_gradient_norm=None, max_steps=None,
initial_learning_rate=None, momentum=None, steps_per_decay=None,
logdir=None, sync=False, adjust_lr_sync=True, wt_decay=0.0001,
data_loss_wt=None, reg_loss_wt=None, freeze_conv=True, num_workers=1,
task=0, ps_tasks=0, master='local', typ=None, momentum2=None,
adam_eps=None)
# Clobber with overrides from solver str.
solver_vars = get_solver_vars(solver_str)
solver.data_loss_wt = float(solver_vars.dlw[3:].replace('x', '.'))
solver.adam_eps = float(solver_vars.adam_eps[4:].replace('x', '.').replace('n', '-'))
solver.initial_learning_rate = float(solver_vars.init_lr[2:].replace('x', '.').replace('n', '-'))
solver.reg_loss_wt = float(solver_vars.rlw[3:].replace('x', '.'))
solver.isd_k = float(solver_vars.isdk[4:].replace('x', '.'))
long = solver_vars.long
if long == 'long':
solver.steps_per_decay = 40000
solver.max_steps = 120000
elif long == 'long2':
solver.steps_per_decay = 80000
solver.max_steps = 120000
elif long == 'nolong' or long == 'nol':
solver.steps_per_decay = 20000
solver.max_steps = 60000
else:
logging.fatal('solver_vars.long should be long, long2, nolong or nol.')
assert(False)
clip = solver_vars.clip
if clip == 'noclip' or clip == 'nocl':
solver.clip_gradient_norm = 0
elif clip[:4] == 'clip':
solver.clip_gradient_norm = float(clip[4:].replace('x', '.'))
else:
logging.fatal('Unknown solver_vars.clip: %s', clip)
assert(False)
typ = solver_vars.typ
if typ == 'adam':
solver.typ = 'adam'
solver.momentum = 0.9
solver.momentum2 = 0.999
solver.learning_rate_decay = 1.0
elif typ == 'adam2':
solver.typ = 'adam'
solver.momentum = 0.9
solver.momentum2 = 0.999
solver.learning_rate_decay = 0.1
elif typ == 'sgd':
solver.typ = 'sgd'
solver.momentum = 0.99
solver.momentum2 = None
solver.learning_rate_decay = 0.1
else:
logging.fatal('Unknown solver_vars.typ: %s', typ)
assert(False)
logging.error('solver: %s', solver)
return solver
def get_navtask_vars(navtask_str):
if navtask_str == '': vals = []
else: vals = navtask_str.split('_')
ks_all = ['dataset_name', 'modality', 'task', 'history', 'max_dist',
'num_steps', 'step_size', 'n_ori', 'aux_views', 'data_aug']
ks = ks_all[:len(vals)]
# All data or not.
if len(vals) == 0: ks.append('dataset_name'); vals.append('sbpd')
# modality
if len(vals) == 1: ks.append('modality'); vals.append('rgb')
# semantic task?
if len(vals) == 2: ks.append('task'); vals.append('r2r')
# number of history frames.
if len(vals) == 3: ks.append('history'); vals.append('h0')
# max steps
if len(vals) == 4: ks.append('max_dist'); vals.append('32')
# num steps
if len(vals) == 5: ks.append('num_steps'); vals.append('40')
# step size
if len(vals) == 6: ks.append('step_size'); vals.append('8')
# n_ori
if len(vals) == 7: ks.append('n_ori'); vals.append('4')
# Auxiliary views.
if len(vals) == 8: ks.append('aux_views'); vals.append('nv0')
# Normal data augmentation as opposed to structured data augmentation (if set
# to straug.
if len(vals) == 9: ks.append('data_aug'); vals.append('straug')
assert(len(vals) == 10)
for i in range(len(ks)):
assert(ks[i] == ks_all[i])
vars = utils.Foo()
for k, v in zip(ks, vals):
setattr(vars, k, v)
logging.error('navtask_vars: %s', vals)
return vars
def process_navtask_str(navtask_str):
navtask = nec.nav_env_base_config()
# Clobber with overrides from strings.
navtask_vars = get_navtask_vars(navtask_str)
navtask.task_params.n_ori = int(navtask_vars.n_ori)
navtask.task_params.max_dist = int(navtask_vars.max_dist)
navtask.task_params.num_steps = int(navtask_vars.num_steps)
navtask.task_params.step_size = int(navtask_vars.step_size)
navtask.task_params.data_augment.delta_xy = int(navtask_vars.step_size)/2.
n_aux_views_each = int(navtask_vars.aux_views[2])
aux_delta_thetas = np.concatenate((np.arange(n_aux_views_each) + 1,
-1 -np.arange(n_aux_views_each)))
aux_delta_thetas = aux_delta_thetas*np.deg2rad(navtask.camera_param.fov)
navtask.task_params.aux_delta_thetas = aux_delta_thetas
if navtask_vars.data_aug == 'aug':
navtask.task_params.data_augment.structured = False
elif navtask_vars.data_aug == 'straug':
navtask.task_params.data_augment.structured = True
else:
logging.fatal('Unknown navtask_vars.data_aug %s.', navtask_vars.data_aug)
assert(False)
navtask.task_params.num_history_frames = int(navtask_vars.history[1:])
navtask.task_params.n_views = 1+navtask.task_params.num_history_frames
navtask.task_params.goal_channels = int(navtask_vars.n_ori)
if navtask_vars.task == 'hard':
navtask.task_params.type = 'rng_rejection_sampling_many'
navtask.task_params.rejection_sampling_M = 2000
navtask.task_params.min_dist = 10
elif navtask_vars.task == 'r2r':
navtask.task_params.type = 'room_to_room_many'
elif navtask_vars.task == 'ST':
# Semantic task at hand.
navtask.task_params.goal_channels = \
len(navtask.task_params.semantic_task.class_map_names)
navtask.task_params.rel_goal_loc_dim = \
len(navtask.task_params.semantic_task.class_map_names)
navtask.task_params.type = 'to_nearest_obj_acc'
else:
logging.fatal('navtask_vars.task: should be hard or r2r, ST')
assert(False)
if navtask_vars.modality == 'rgb':
navtask.camera_param.modalities = ['rgb']
navtask.camera_param.img_channels = 3
elif navtask_vars.modality == 'd':
navtask.camera_param.modalities = ['depth']
navtask.camera_param.img_channels = 2
navtask.task_params.img_height = navtask.camera_param.height
navtask.task_params.img_width = navtask.camera_param.width
navtask.task_params.modalities = navtask.camera_param.modalities
navtask.task_params.img_channels = navtask.camera_param.img_channels
navtask.task_params.img_fov = navtask.camera_param.fov
navtask.dataset = factory.get_dataset(navtask_vars.dataset_name)
return navtask
| gpl-2.0 |
ChadFulton/statsmodels | statsmodels/stats/tests/test_diagnostic.py | 1 | 43366 | # -*- coding: utf-8 -*-
"""Tests for Regression Diagnostics and Specification Tests
Created on Thu Feb 09 13:19:47 2012
Author: Josef Perktold
License: BSD-3
currently all tests are against R
"""
import os
import numpy as np
import pandas as pd
from numpy.testing import (assert_, assert_almost_equal, assert_equal,
assert_allclose, assert_array_equal)
import pytest
from statsmodels.regression.linear_model import OLS
from statsmodels.tools.tools import add_constant
from statsmodels.datasets import macrodata
import statsmodels.stats.sandwich_covariance as sw
import statsmodels.stats.diagnostic as smsdia
import json
import statsmodels.stats.outliers_influence as oi
cur_dir = os.path.abspath(os.path.dirname(__file__))
def compare_t_est(sp, sp_dict, decimal=(14, 14)):
assert_allclose(sp[0], sp_dict['statistic'], atol=10 ** -decimal[0],
rtol=10 ** -decimal[0])
assert_allclose(sp[1], sp_dict['pvalue'], atol=10 ** -decimal[1],
rtol=10 ** -decimal[0])
def notyet_atst():
d = macrodata.load(as_pandas=False).data
realinv = d['realinv']
realgdp = d['realgdp']
realint = d['realint']
endog = realinv
exog = add_constant(np.c_[realgdp, realint])
res_ols1 = OLS(endog, exog).fit()
#growth rates
gs_l_realinv = 400 * np.diff(np.log(d['realinv']))
gs_l_realgdp = 400 * np.diff(np.log(d['realgdp']))
lint = d['realint'][:-1]
tbilrate = d['tbilrate'][:-1]
endogg = gs_l_realinv
exogg = add_constant(np.c_[gs_l_realgdp, lint])
exogg2 = add_constant(np.c_[gs_l_realgdp, tbilrate])
res_ols = OLS(endogg, exogg).fit()
res_ols2 = OLS(endogg, exogg2).fit()
#the following were done accidentally with res_ols1 in R,
#with original Greene data
params = np.array([-272.3986041341653, 0.1779455206941112,
0.2149432424658157])
cov_hac_4 = np.array([1321.569466333051, -0.2318836566017612,
37.01280466875694, -0.2318836566017614, 4.602339488102263e-05,
-0.0104687835998635, 37.012804668757, -0.0104687835998635,
21.16037144168061]).reshape(3,3, order='F')
cov_hac_10 = np.array([2027.356101193361, -0.3507514463299015,
54.81079621448568, -0.350751446329901, 6.953380432635583e-05,
-0.01268990195095196, 54.81079621448564, -0.01268990195095195,
22.92512402151113]).reshape(3,3, order='F')
#goldfeld-quandt
het_gq_greater = dict(statistic=13.20512768685082, df1=99, df2=98,
pvalue=1.246141976112324e-30, distr='f')
het_gq_less = dict(statistic=13.20512768685082, df1=99, df2=98, pvalue=1.)
het_gq_2sided = dict(statistic=13.20512768685082, df1=99, df2=98,
pvalue=1.246141976112324e-30, distr='f')
#goldfeld-quandt, fraction = 0.5
het_gq_greater_2 = dict(statistic=87.1328934692124, df1=48, df2=47,
pvalue=2.154956842194898e-33, distr='f')
gq = smsdia.het_goldfeldquandt(endog, exog, split=0.5)
compare_t_est(gq, het_gq_greater, decimal=(13, 14))
assert_equal(gq[-1], 'increasing')
harvey_collier = dict(stat=2.28042114041313, df=199,
pvalue=0.02364236161988260, distr='t')
#hc = harvtest(fm, order.by=ggdp , data = list())
harvey_collier_2 = dict(stat=0.7516918462158783, df=199,
pvalue=0.4531244858006127, distr='t')
##################################
class TestDiagnosticG(object):
@classmethod
def setup_class(cls):
d = macrodata.load_pandas().data
#growth rates
gs_l_realinv = 400 * np.diff(np.log(d['realinv'].values))
gs_l_realgdp = 400 * np.diff(np.log(d['realgdp'].values))
lint = d['realint'][:-1].values
tbilrate = d['tbilrate'][:-1].values
endogg = gs_l_realinv
exogg = add_constant(np.c_[gs_l_realgdp, lint])
exogg2 = add_constant(np.c_[gs_l_realgdp, tbilrate])
exogg3 = add_constant(np.c_[gs_l_realgdp])
res_ols = OLS(endogg, exogg).fit()
res_ols2 = OLS(endogg, exogg2).fit()
res_ols3 = OLS(endogg, exogg3).fit()
cls.res = res_ols
cls.res2 = res_ols2
cls.res3 = res_ols3
cls.endog = cls.res.model.endog
cls.exog = cls.res.model.exog
def test_basic(self):
#mainly to check I got the right regression
#> mkarray(fm$coefficients, "params")
params = np.array([-9.48167277465485, 4.3742216647032,
-0.613996969478989])
assert_almost_equal(self.res.params, params, decimal=12)
def test_hac(self):
res = self.res
#> nw = NeweyWest(fm, lag = 4, prewhite = FALSE, verbose=TRUE)
#> nw2 = NeweyWest(fm, lag=10, prewhite = FALSE, verbose=TRUE)
#> mkarray(nw, "cov_hac_4")
cov_hac_4 = np.array([1.385551290884014, -0.3133096102522685,
-0.0597207976835705, -0.3133096102522685, 0.1081011690351306,
0.000389440793564336, -0.0597207976835705, 0.000389440793564339,
0.0862118527405036]).reshape(3,3, order='F')
#> mkarray(nw2, "cov_hac_10")
cov_hac_10 = np.array([1.257386180080192, -0.2871560199899846,
-0.03958300024627573, -0.2871560199899845, 0.1049107028987101,
0.0003896205316866944, -0.03958300024627578, 0.0003896205316866961,
0.0985539340694839]).reshape(3,3, order='F')
cov = sw.cov_hac_simple(res, nlags=4, use_correction=False)
bse_hac = sw.se_cov(cov)
assert_almost_equal(cov, cov_hac_4, decimal=14)
assert_almost_equal(bse_hac, np.sqrt(np.diag(cov)), decimal=14)
cov = sw.cov_hac_simple(res, nlags=10, use_correction=False)
bse_hac = sw.se_cov(cov)
assert_almost_equal(cov, cov_hac_10, decimal=14)
assert_almost_equal(bse_hac, np.sqrt(np.diag(cov)), decimal=14)
def test_het_goldfeldquandt(self):
#TODO: test options missing
#> gq = gqtest(fm, alternative='greater')
#> mkhtest_f(gq, 'het_gq_greater', 'f')
het_gq_greater = dict(statistic=0.5313259064778423,
pvalue=0.9990217851193723,
parameters=(98, 98), distr='f')
#> gq = gqtest(fm, alternative='less')
#> mkhtest_f(gq, 'het_gq_less', 'f')
het_gq_less = dict(statistic=0.5313259064778423,
pvalue=0.000978214880627621,
parameters=(98, 98), distr='f')
#> gq = gqtest(fm, alternative='two.sided')
#> mkhtest_f(gq, 'het_gq_two_sided', 'f')
het_gq_two_sided = dict(statistic=0.5313259064778423,
pvalue=0.001956429761255241,
parameters=(98, 98), distr='f')
#> gq = gqtest(fm, fraction=0.1, alternative='two.sided')
#> mkhtest_f(gq, 'het_gq_two_sided_01', 'f')
het_gq_two_sided_01 = dict(statistic=0.5006976835928314,
pvalue=0.001387126702579789,
parameters=(88, 87), distr='f')
#> gq = gqtest(fm, fraction=0.5, alternative='two.sided')
#> mkhtest_f(gq, 'het_gq_two_sided_05', 'f')
het_gq_two_sided_05 = dict(statistic=0.434815645134117,
pvalue=0.004799321242905568,
parameters=(48, 47), distr='f')
endogg, exogg = self.endog, self.exog
#tests
gq = smsdia.het_goldfeldquandt(endogg, exogg, split=0.5)
compare_t_est(gq, het_gq_greater, decimal=(14, 14))
assert_equal(gq[-1], 'increasing')
gq = smsdia.het_goldfeldquandt(endogg, exogg, split=0.5,
alternative='decreasing')
compare_t_est(gq, het_gq_less, decimal=(14, 14))
assert_equal(gq[-1], 'decreasing')
gq = smsdia.het_goldfeldquandt(endogg, exogg, split=0.5,
alternative='two-sided')
compare_t_est(gq, het_gq_two_sided, decimal=(14, 14))
assert_equal(gq[-1], 'two-sided')
#TODO: forcing the same split as R 202-90-90-1=21
gq = smsdia.het_goldfeldquandt(endogg, exogg, split=90, drop=21,
alternative='two-sided')
compare_t_est(gq, het_gq_two_sided_01, decimal=(14, 14))
assert_equal(gq[-1], 'two-sided')
#TODO other options ???
def test_het_breusch_pagan(self):
res = self.res
bptest = dict(statistic=0.709924388395087, pvalue=0.701199952134347,
parameters=(2,), distr='f')
bp = smsdia.het_breuschpagan(res.resid, res.model.exog)
compare_t_est(bp, bptest, decimal=(12, 12))
def test_het_white(self):
res = self.res
#TODO: regressiontest, compare with Greene or Gretl or Stata
hw = smsdia.het_white(res.resid, res.model.exog)
hw_values = (33.503722896538441, 2.9887960597830259e-06,
7.7945101228430946, 1.0354575277704231e-06)
assert_almost_equal(hw, hw_values)
def test_het_arch(self):
#test het_arch and indirectly het_lm against R
#> library(FinTS)
#> at = ArchTest(residuals(fm), lags=4)
#> mkhtest(at, 'archtest_4', 'chi2')
archtest_4 = dict(statistic=3.43473400836259,
pvalue=0.487871315392619, parameters=(4,),
distr='chi2')
#> at = ArchTest(residuals(fm), lags=12)
#> mkhtest(at, 'archtest_12', 'chi2')
archtest_12 = dict(statistic=8.648320999014171,
pvalue=0.732638635007718, parameters=(12,),
distr='chi2')
at4 = smsdia.het_arch(self.res.resid, maxlag=4)
at12 = smsdia.het_arch(self.res.resid, maxlag=12)
compare_t_est(at4[:2], archtest_4, decimal=(12, 13))
compare_t_est(at12[:2], archtest_12, decimal=(12, 13))
def test_het_arch2(self):
#test autolag options, this also test het_lm
#unfortunately optimal lag=1 for this data
resid = self.res.resid
res1 = smsdia.het_arch(resid, maxlag=1, autolag=None, store=True)
rs1 = res1[-1]
res2 = smsdia.het_arch(resid, maxlag=5, autolag='aic', store=True)
rs2 = res2[-1]
assert_almost_equal(rs2.resols.params, rs1.resols.params, decimal=13)
assert_almost_equal(res2[:4], res1[:4], decimal=13)
#test that smallest lag, maxlag=1 works
res3 = smsdia.het_arch(resid, maxlag=1, autolag='aic')
assert_almost_equal(res3[:4], res1[:4], decimal=13)
def test_acorr_breusch_godfrey(self):
res = self.res
#bgf = bgtest(fm, order = 4, type="F")
breuschgodfrey_f = dict(statistic=1.179280833676792,
pvalue=0.321197487261203,
parameters=(4,195,), distr='f')
#> bgc = bgtest(fm, order = 4, type="Chisq")
#> mkhtest(bgc, "breuschpagan_c", "chi2")
breuschgodfrey_c = dict(statistic=4.771042651230007,
pvalue=0.3116067133066697,
parameters=(4,), distr='chi2')
bg = smsdia.acorr_breusch_godfrey(res, nlags=4)
bg_r = [breuschgodfrey_c['statistic'], breuschgodfrey_c['pvalue'],
breuschgodfrey_f['statistic'], breuschgodfrey_f['pvalue']]
assert_almost_equal(bg, bg_r, decimal=13)
# check that lag choice works
bg2 = smsdia.acorr_breusch_godfrey(res, nlags=None)
bg3 = smsdia.acorr_breusch_godfrey(res, nlags=14)
assert_almost_equal(bg2, bg3, decimal=13)
def test_acorr_ljung_box(self):
#unit-test which may be useful later
#ddof correction for fitted parameters in ARMA(p,q) fitdf=p+q
#> bt = Box.test(residuals(fm), lag=4, type = "Ljung-Box", fitdf=2)
#> mkhtest(bt, "ljung_box_4df2", "chi2")
# ljung_box_4df2 = dict(statistic=5.23587172795227,
# pvalue=0.0729532930400377,
# parameters=(2,), distr='chi2')
#> bt = Box.test(residuals(fm), lag=4, type = "Box-Pierce", fitdf=2)
#> mkhtest(bt, "ljung_box_bp_4df2", "chi2")
# ljung_box_bp_4df2 = dict(statistic=5.12462932741681,
# pvalue=0.0771260128929921,
# parameters=(2,), distr='chi2')
res = self.res
#general test
#> bt = Box.test(residuals(fm), lag=4, type = "Ljung-Box")
#> mkhtest(bt, "ljung_box_4", "chi2")
ljung_box_4 = dict(statistic=5.23587172795227, pvalue=0.263940335284713,
parameters=(4,), distr='chi2')
#> bt = Box.test(residuals(fm), lag=4, type = "Box-Pierce")
#> mkhtest(bt, "ljung_box_bp_4", "chi2")
ljung_box_bp_4 = dict(statistic=5.12462932741681,
pvalue=0.2747471266820692,
parameters=(4,), distr='chi2')
lb, lbpval, bp, bppval = smsdia.acorr_ljungbox(res.resid, 4,
boxpierce=True)
compare_t_est([lb[-1], lbpval[-1]], ljung_box_4, decimal=(13, 13))
compare_t_est([bp[-1], bppval[-1]], ljung_box_bp_4, decimal=(13, 13))
def test_acorr_ljung_box_big_default(self):
res = self.res
#test with big dataset and default lag
#> bt = Box.test(residuals(fm), type = "Ljung-Box")
#> mkhtest(bt, "ljung_box_none", "chi2")
ljung_box_none = dict(statistic=51.03724531797195, pvalue=0.11334744923390,
distr='chi2')
#> bt = Box.test(residuals(fm), type = "Box-Pierce")
#> mkhtest(bt, "ljung_box_bp_none", "chi2")
ljung_box_bp_none = dict(statistic=45.12238537034000,
pvalue=0.26638168491464,
distr='chi2')
lb, lbpval, bp, bppval = smsdia.acorr_ljungbox(res.resid, boxpierce=True)
compare_t_est([lb[-1], lbpval[-1]], ljung_box_none, decimal=(13, 13))
compare_t_est([bp[-1], bppval[-1]], ljung_box_bp_none, decimal=(13, 13))
def test_acorr_ljung_box_small_default(self):
res = self.res
#test with small dataset and default lag
#> bt = Box.test(residuals(fm), type = "Ljung-Box")
#> mkhtest(bt, "ljung_box_small", "chi2")
ljung_box_small = dict(statistic=9.61503968281915, pvalue=0.72507000996945,
parameters=(0,), distr='chi2')
#> bt = Box.test(residuals(fm), type = "Box-Pierce")
#> mkhtest(bt, "ljung_box_bp_small", "chi2")
ljung_box_bp_small = dict(statistic=7.41692150864936,
pvalue=0.87940785887006,
parameters=(0,), distr='chi2')
lb, lbpval, bp, bppval = smsdia.acorr_ljungbox(res.resid[:30], boxpierce=True)
compare_t_est([lb[-1], lbpval[-1]], ljung_box_small, decimal=(13, 13))
compare_t_est([bp[-1], bppval[-1]], ljung_box_bp_small, decimal=(13, 13))
def test_harvey_collier(self):
#> hc = harvtest(fm, order.by = NULL, data = list())
#> mkhtest_f(hc, 'harvey_collier', 't')
harvey_collier = dict(statistic=0.494432160939874,
pvalue=0.6215491310408242,
parameters=(198), distr='t')
#> hc2 = harvtest(fm, order.by=ggdp , data = list())
#> mkhtest_f(hc2, 'harvey_collier_2', 't')
harvey_collier_2 = dict(statistic=1.42104628340473,
pvalue=0.1568762892441689,
parameters=(198), distr='t')
hc = smsdia.linear_harvey_collier(self.res)
compare_t_est(hc, harvey_collier, decimal=(12, 12))
def test_rainbow(self):
#rainbow test
#> rt = raintest(fm)
#> mkhtest_f(rt, 'raintest', 'f')
raintest = dict(statistic=0.6809600116739604, pvalue=0.971832843583418,
parameters=(101, 98), distr='f')
#> rt = raintest(fm, center=0.4)
#> mkhtest_f(rt, 'raintest_center_04', 'f')
raintest_center_04 = dict(statistic=0.682635074191527,
pvalue=0.971040230422121,
parameters=(101, 98), distr='f')
#> rt = raintest(fm, fraction=0.4)
#> mkhtest_f(rt, 'raintest_fraction_04', 'f')
raintest_fraction_04 = dict(statistic=0.565551237772662,
pvalue=0.997592305968473,
parameters=(122, 77), distr='f')
#> rt = raintest(fm, order.by=ggdp)
#Warning message:
#In if (order.by == "mahalanobis") { :
# the condition has length > 1 and only the first element will be used
#> mkhtest_f(rt, 'raintest_order_gdp', 'f')
raintest_order_gdp = dict(statistic=1.749346160513353,
pvalue=0.002896131042494884,
parameters=(101, 98), distr='f')
rb = smsdia.linear_rainbow(self.res)
compare_t_est(rb, raintest, decimal=(13, 14))
rb = smsdia.linear_rainbow(self.res, frac=0.4)
compare_t_est(rb, raintest_fraction_04, decimal=(13, 14))
def test_compare_lr(self):
res = self.res
res3 = self.res3 #nested within res
#lrtest
#lrt = lrtest(fm, fm2)
#Model 1: ginv ~ ggdp + lint
#Model 2: ginv ~ ggdp
lrtest = dict(loglike1=-763.9752181602237, loglike2=-766.3091902020184,
chi2value=4.66794408358942, pvalue=0.03073069384028677,
df=(4,3,1))
lrt = res.compare_lr_test(res3)
assert_almost_equal(lrt[0], lrtest['chi2value'], decimal=11)
assert_almost_equal(lrt[1], lrtest['pvalue'], decimal=11)
waldtest = dict(fvalue=4.65216373312492, pvalue=0.03221346195239025,
df=(199,200,1))
wt = res.compare_f_test(res3)
assert_almost_equal(wt[0], waldtest['fvalue'], decimal=11)
assert_almost_equal(wt[1], waldtest['pvalue'], decimal=11)
def test_compare_nonnested(self):
res = self.res
res2 = self.res2
#jt = jtest(fm, lm(ginv ~ ggdp + tbilrate))
#Estimate Std. Error t value Pr(>|t|)
jtest = [('M1 + fitted(M2)', 1.591505670785873, 0.7384552861695823,
2.155182176352370, 0.032354572525314450, '*'),
('M2 + fitted(M1)', 1.305687653016899, 0.4808385176653064,
2.715438978051544, 0.007203854534057954, '**')]
jt1 = smsdia.compare_j(res2, res)
assert_almost_equal(jt1, jtest[0][3:5], decimal=13)
jt2 = smsdia.compare_j(res, res2)
assert_almost_equal(jt2, jtest[1][3:5], decimal=14)
#Estimate Std. Error z value Pr(>|z|)
coxtest = [('fitted(M1) ~ M2', -0.782030488930356, 0.599696502782265,
-1.304043770977755, 1.922186587840554e-01, ' '),
('fitted(M2) ~ M1', -2.248817107408537, 0.392656854330139,
-5.727181590258883, 1.021128495098556e-08, '***')]
ct1 = smsdia.compare_cox(res, res2)
assert_almost_equal(ct1, coxtest[0][3:5], decimal=13)
ct2 = smsdia.compare_cox(res2, res)
assert_almost_equal(ct2, coxtest[1][3:5], decimal=12)
#TODO should be approx
# Res.Df Df F Pr(>F)
encomptest = [('M1 vs. ME', 198, -1, 4.644810213266983,
0.032354572525313666, '*'),
('M2 vs. ME', 198, -1, 7.373608843521585,
0.007203854534058054, '**')]
# Estimate Std. Error t value
petest = [('M1 + log(fit(M1))-fit(M2)', -229.281878354594596,
44.5087822087058598, -5.15139, 6.201281252449979e-07),
('M2 + fit(M1)-exp(fit(M2))', 0.000634664704814,
0.0000462387010349, 13.72583, 1.319536115230356e-30)]
def test_cusum_ols(self):
#R library(strucchange)
#> sc = sctest(ginv ~ ggdp + lint, type="OLS-CUSUM")
#> mkhtest(sc, 'cusum_ols', 'BB')
cusum_ols = dict(statistic=1.055750610401214, pvalue=0.2149567397376543,
parameters=(), distr='BB') #Brownian Bridge
k_vars=3
cs_ols = smsdia.breaks_cusumolsresid(self.res.resid, ddof=k_vars) #
compare_t_est(cs_ols, cusum_ols, decimal=(12, 12))
def test_breaks_hansen(self):
#> sc = sctest(ginv ~ ggdp + lint, type="Nyblom-Hansen")
#> mkhtest(sc, 'breaks_nyblom_hansen', 'BB')
breaks_nyblom_hansen = dict(statistic=1.0300792740544484,
pvalue=0.1136087530212015,
parameters=(), distr='BB')
bh = smsdia.breaks_hansen(self.res)
assert_almost_equal(bh[0], breaks_nyblom_hansen['statistic'],
decimal=13)
#TODO: breaks_hansen doesn't return pvalues
def test_recursive_residuals(self):
reccumres_standardize = np.array([-2.151, -3.748, -3.114, -3.096,
-1.865, -2.230, -1.194, -3.500, -3.638, -4.447, -4.602, -4.631, -3.999,
-4.830, -5.429, -5.435, -6.554, -8.093, -8.567, -7.532, -7.079, -8.468,
-9.320, -12.256, -11.932, -11.454, -11.690, -11.318, -12.665, -12.842,
-11.693, -10.803, -12.113, -12.109, -13.002, -11.897, -10.787, -10.159,
-9.038, -9.007, -8.634, -7.552, -7.153, -6.447, -5.183, -3.794, -3.511,
-3.979, -3.236, -3.793, -3.699, -5.056, -5.724, -4.888, -4.309, -3.688,
-3.918, -3.735, -3.452, -2.086, -6.520, -7.959, -6.760, -6.855, -6.032,
-4.405, -4.123, -4.075, -3.235, -3.115, -3.131, -2.986, -1.813, -4.824,
-4.424, -4.796, -4.000, -3.390, -4.485, -4.669, -4.560, -3.834, -5.507,
-3.792, -2.427, -1.756, -0.354, 1.150, 0.586, 0.643, 1.773, -0.830,
-0.388, 0.517, 0.819, 2.240, 3.791, 3.187, 3.409, 2.431, 0.668, 0.957,
-0.928, 0.327, -0.285, -0.625, -2.316, -1.986, -0.744, -1.396, -1.728,
-0.646, -2.602, -2.741, -2.289, -2.897, -1.934, -2.532, -3.175, -2.806,
-3.099, -2.658, -2.487, -2.515, -2.224, -2.416, -1.141, 0.650, -0.947,
0.725, 0.439, 0.885, 2.419, 2.642, 2.745, 3.506, 4.491, 5.377, 4.624,
5.523, 6.488, 6.097, 5.390, 6.299, 6.656, 6.735, 8.151, 7.260, 7.846,
8.771, 8.400, 8.717, 9.916, 9.008, 8.910, 8.294, 8.982, 8.540, 8.395,
7.782, 7.794, 8.142, 8.362, 8.400, 7.850, 7.643, 8.228, 6.408, 7.218,
7.699, 7.895, 8.725, 8.938, 8.781, 8.350, 9.136, 9.056, 10.365, 10.495,
10.704, 10.784, 10.275, 10.389, 11.586, 11.033, 11.335, 11.661, 10.522,
10.392, 10.521, 10.126, 9.428, 9.734, 8.954, 9.949, 10.595, 8.016,
6.636, 6.975])
rr = smsdia.recursive_olsresiduals(self.res, skip=3, alpha=0.95)
assert_equal(np.round(rr[5][1:], 3), reccumres_standardize) #extra zero in front
#assert_equal(np.round(rr[3][4:], 3), np.diff(reccumres_standardize))
assert_almost_equal(rr[3][4:], np.diff(reccumres_standardize),3)
assert_almost_equal(rr[4][3:].std(ddof=1), 10.7242, decimal=4)
#regression number, visually checked with graph from gretl
ub0 = np.array([ 13.37318571, 13.50758959, 13.64199346, 13.77639734,
13.91080121])
ub1 = np.array([ 39.44753774, 39.58194162, 39.7163455 , 39.85074937,
39.98515325])
lb, ub = rr[6]
assert_almost_equal(ub[:5], ub0, decimal=7)
assert_almost_equal(lb[:5], -ub0, decimal=7)
assert_almost_equal(ub[-5:], ub1, decimal=7)
assert_almost_equal(lb[-5:], -ub1, decimal=7)
#test a few values with explicit OLS
endog = self.res.model.endog
exog = self.res.model.exog
params = []
ypred = []
for i in range(3,10):
resi = OLS(endog[:i], exog[:i]).fit()
ypred.append(resi.model.predict(resi.params, exog[i]))
params.append(resi.params)
assert_almost_equal(rr[2][3:10], ypred, decimal=12)
assert_almost_equal(rr[0][3:10], endog[3:10] - ypred, decimal=12)
assert_almost_equal(rr[1][2:9], params, decimal=12)
def test_normality(self):
res = self.res
#> library(nortest) #Lilliefors (Kolmogorov-Smirnov) normality test
#> lt = lillie.test(residuals(fm))
#> mkhtest(lt, "lilliefors", "-")
lilliefors1 = dict(statistic=0.0723390908786589,
pvalue=0.01204113540102896, parameters=(), distr='-')
#> lt = lillie.test(residuals(fm)**2)
#> mkhtest(lt, "lilliefors", "-")
lilliefors2 = dict(statistic=0.301311621898024,
pvalue=1.004305736618051e-51,
parameters=(), distr='-')
#> lt = lillie.test(residuals(fm)[1:20])
#> mkhtest(lt, "lilliefors", "-")
lilliefors3 = dict(statistic=0.1333956004203103,
pvalue=0.20, parameters=(), distr='-')
lf1 = smsdia.lilliefors(res.resid)
lf2 = smsdia.lilliefors(res.resid**2)
lf3 = smsdia.lilliefors(res.resid[:20])
compare_t_est(lf1, lilliefors1, decimal=(14, 14))
compare_t_est(lf2, lilliefors2, decimal=(14, 14)) # pvalue very small
assert_allclose(lf2[1], lilliefors2['pvalue'], rtol=1e-10)
compare_t_est(lf3, lilliefors3, decimal=(14, 1))
# R uses different approximation for pvalue in last case
#> ad = ad.test(residuals(fm))
#> mkhtest(ad, "ad3", "-")
adr1 = dict(statistic=1.602209621518313, pvalue=0.0003937979149362316,
parameters=(), distr='-')
#> ad = ad.test(residuals(fm)**2)
#> mkhtest(ad, "ad3", "-")
adr2 = dict(statistic=np.inf, pvalue=np.nan, parameters=(), distr='-')
#> ad = ad.test(residuals(fm)[1:20])
#> mkhtest(ad, "ad3", "-")
adr3 = dict(statistic=0.3017073732210775, pvalue=0.5443499281265933,
parameters=(), distr='-')
ad1 = smsdia.normal_ad(res.resid)
compare_t_est(ad1, adr1, decimal=(11, 13))
ad2 = smsdia.normal_ad(res.resid**2)
assert_(np.isinf(ad2[0]))
ad3 = smsdia.normal_ad(res.resid[:20])
compare_t_est(ad3, adr3, decimal=(11, 12))
def test_influence(self):
res = self.res
#this test is slow
infl = oi.OLSInfluence(res)
path = os.path.join(cur_dir, "results", "influence_lsdiag_R.json")
with open(path, 'r') as fp:
lsdiag = json.load(fp)
#basic
assert_almost_equal(np.array(lsdiag['cov.scaled']).reshape(3, 3),
res.cov_params(), decimal=14)
assert_almost_equal(np.array(lsdiag['cov.unscaled']).reshape(3, 3),
res.normalized_cov_params, decimal=14)
c0, c1 = infl.cooks_distance #TODO: what's c1
assert_almost_equal(c0, lsdiag['cooks'], decimal=14)
assert_almost_equal(infl.hat_matrix_diag, lsdiag['hat'], decimal=14)
assert_almost_equal(infl.resid_studentized_internal,
lsdiag['std.res'], decimal=14)
#slow:
#infl._get_all_obs() #slow, nobs estimation loop, called implicitly
dffits, dffth = infl.dffits
assert_almost_equal(dffits, lsdiag['dfits'], decimal=14)
assert_almost_equal(infl.resid_studentized_external,
lsdiag['stud.res'], decimal=14)
import pandas
fn = os.path.join(cur_dir,"results/influence_measures_R.csv")
infl_r = pandas.read_csv(fn, index_col=0)
conv = lambda s: 1 if s=='TRUE' else 0
fn = os.path.join(cur_dir,"results/influence_measures_bool_R.csv")
#not used yet:
#infl_bool_r = pandas.read_csv(fn, index_col=0,
# converters=dict(zip(lrange(7),[conv]*7)))
infl_r2 = np.asarray(infl_r)
assert_almost_equal(infl.dfbetas, infl_r2[:,:3], decimal=13)
assert_almost_equal(infl.cov_ratio, infl_r2[:,4], decimal=14)
#duplicates
assert_almost_equal(dffits, infl_r2[:,3], decimal=14)
assert_almost_equal(c0, infl_r2[:,5], decimal=14)
assert_almost_equal(infl.hat_matrix_diag, infl_r2[:,6], decimal=14)
#Note: for dffits, R uses a threshold around 0.36, mine: dffits[1]=0.24373
#TODO: finish and check thresholds and pvalues
'''
R has
>>> np.nonzero(np.asarray(infl_bool_r["dffit"]))[0]
array([ 6, 26, 63, 76, 90, 199])
>>> np.nonzero(np.asarray(infl_bool_r["cov.r"]))[0]
array([ 4, 26, 59, 61, 63, 72, 76, 84, 91, 92, 94, 95, 108,
197, 198])
>>> np.nonzero(np.asarray(infl_bool_r["hat"]))[0]
array([ 62, 76, 84, 90, 91, 92, 95, 108, 197, 199])
'''
class TestDiagnosticGPandas(TestDiagnosticG):
@classmethod
def setup_class(cls):
d = macrodata.load_pandas().data
#growth rates
d['gs_l_realinv'] = 400 * np.log(d['realinv']).diff()
d['gs_l_realgdp'] = 400 * np.log(d['realgdp']).diff()
d['lint'] = d['realint'].shift(1)
d['tbilrate'] = d['tbilrate'].shift(1)
d = d.dropna()
cls.d = d
endogg = d['gs_l_realinv']
exogg = add_constant(d[['gs_l_realgdp', 'lint']])
exogg2 = add_constant(d[['gs_l_realgdp', 'tbilrate']])
exogg3 = add_constant(d[['gs_l_realgdp']])
res_ols = OLS(endogg, exogg).fit()
res_ols2 = OLS(endogg, exogg2).fit()
res_ols3 = OLS(endogg, exogg3).fit()
cls.res = res_ols
cls.res2 = res_ols2
cls.res3 = res_ols3
cls.endog = cls.res.model.endog
cls.exog = cls.res.model.exog
def grangertest():
#> gt = grangertest(ginv, ggdp, order=4)
#> gt
#Granger causality test
#
#Model 1: ggdp ~ Lags(ggdp, 1:4) + Lags(ginv, 1:4)
#Model 2: ggdp ~ Lags(ggdp, 1:4)
grangertest = dict(fvalue=1.589672703015157, pvalue=0.178717196987075,
df=(198,193))
def test_outlier_influence_funcs(reset_randomstate):
#smoke test
x = add_constant(np.random.randn(10, 2))
y = x.sum(1) + np.random.randn(10)
res = OLS(y, x).fit()
out_05 = oi.summary_table(res)
# GH3344 : Check alpha has an effect
out_01 = oi.summary_table(res, alpha=0.01)
assert_(np.all(out_01[1][:, 6] <= out_05[1][:, 6]))
assert_(np.all(out_01[1][:, 7] >= out_05[1][:, 7]))
res2 = OLS(y, x[:,0]).fit()
oi.summary_table(res2, alpha=0.05)
infl = res2.get_influence()
infl.summary_table()
def test_influence_wrapped():
from pandas import DataFrame
from pandas.util.testing import assert_series_equal
d = macrodata.load_pandas().data
#growth rates
gs_l_realinv = 400 * np.log(d['realinv']).diff().dropna()
gs_l_realgdp = 400 * np.log(d['realgdp']).diff().dropna()
lint = d['realint'][:-1]
# re-index these because they won't conform to lint
gs_l_realgdp.index = lint.index
gs_l_realinv.index = lint.index
data = dict(const=np.ones_like(lint), lint=lint, lrealgdp=gs_l_realgdp)
#order is important
exog = DataFrame(data, columns=['const','lrealgdp','lint'])
res = OLS(gs_l_realinv, exog).fit()
#basic
# already tested
#assert_almost_equal(lsdiag['cov.scaled'],
# res.cov_params().values.ravel(), decimal=14)
#assert_almost_equal(lsdiag['cov.unscaled'],
# res.normalized_cov_params.values.ravel(), decimal=14)
infl = oi.OLSInfluence(res)
# smoke test just to make sure it works, results separately tested
df = infl.summary_frame()
assert_(isinstance(df, DataFrame))
#this test is slow
path = os.path.join(cur_dir, "results", "influence_lsdiag_R.json")
with open(path, "r") as fp:
lsdiag = json.load(fp)
c0, c1 = infl.cooks_distance #TODO: what's c1, it's pvalues? -ss
#NOTE: we get a hard-cored 5 decimals with pandas testing
assert_almost_equal(c0, lsdiag['cooks'], 14)
assert_almost_equal(infl.hat_matrix_diag, (lsdiag['hat']), 14)
assert_almost_equal(infl.resid_studentized_internal,
lsdiag['std.res'], 14)
#slow:
dffits, dffth = infl.dffits
assert_almost_equal(dffits, lsdiag['dfits'], 14)
assert_almost_equal(infl.resid_studentized_external,
lsdiag['stud.res'], 14)
import pandas
fn = os.path.join(cur_dir,"results/influence_measures_R.csv")
infl_r = pandas.read_csv(fn, index_col=0)
conv = lambda s: 1 if s=='TRUE' else 0
fn = os.path.join(cur_dir,"results/influence_measures_bool_R.csv")
#not used yet:
#infl_bool_r = pandas.read_csv(fn, index_col=0,
# converters=dict(zip(lrange(7),[conv]*7)))
infl_r2 = np.asarray(infl_r)
#TODO: finish wrapping this stuff
assert_almost_equal(infl.dfbetas, infl_r2[:,:3], decimal=13)
assert_almost_equal(infl.cov_ratio, infl_r2[:,4], decimal=14)
def test_influence_dtype():
# see #2148 bug when endog is integer
y = np.ones(20)
np.random.seed(123)
x = np.random.randn(20, 3)
res1 = OLS(y, x).fit()
res2 = OLS(y*1., x).fit()
cr1 = res1.get_influence().cov_ratio
cr2 = res2.get_influence().cov_ratio
assert_allclose(cr1, cr2, rtol=1e-14)
# regression test for values
cr3 = np.array(
[ 1.22239215, 1.31551021, 1.52671069, 1.05003921, 0.89099323,
1.57405066, 1.03230092, 0.95844196, 1.15531836, 1.21963623,
0.87699564, 1.16707748, 1.10481391, 0.98839447, 1.08999334,
1.35680102, 1.46227715, 1.45966708, 1.13659521, 1.22799038])
assert_almost_equal(cr1, cr3, decimal=8)
def get_duncan_data():
# results from R with NA -> 1. Just testing interface here because
# outlier_test is just a wrapper
labels = ['accountant', 'pilot', 'architect', 'author', 'chemist',
'minister', 'professor', 'dentist', 'reporter', 'engineer',
'undertaker', 'lawyer', 'physician', 'welfare.worker', 'teacher',
'conductor', 'contractor', 'factory.owner', 'store.manager',
'banker', 'bookkeeper', 'mail.carrier', 'insurance.agent',
'store.clerk', 'carpenter', 'electrician', 'RR.engineer',
'machinist', 'auto.repairman', 'plumber', 'gas.stn.attendant',
'coal.miner', 'streetcar.motorman', 'taxi.driver',
'truck.driver', 'machine.operator', 'barber', 'bartender',
'shoe.shiner', 'cook', 'soda.clerk', 'watchman', 'janitor',
'policeman', 'waiter']
#Duncan's prestige data from car
exog = [[1.0, 62.0, 86.0], [1.0, 72.0, 76.0], [1.0, 75.0, 92.0],
[1.0, 55.0, 90.0], [1.0, 64.0, 86.0], [1.0, 21.0, 84.0],
[1.0, 64.0, 93.0], [1.0, 80.0, 100.0], [1.0, 67.0, 87.0],
[1.0, 72.0, 86.0], [1.0, 42.0, 74.0], [1.0, 76.0, 98.0],
[1.0, 76.0, 97.0], [1.0, 41.0, 84.0], [1.0, 48.0, 91.0],
[1.0, 76.0, 34.0], [1.0, 53.0, 45.0], [1.0, 60.0, 56.0],
[1.0, 42.0, 44.0], [1.0, 78.0, 82.0], [1.0, 29.0, 72.0],
[1.0, 48.0, 55.0], [1.0, 55.0, 71.0], [1.0, 29.0, 50.0],
[1.0, 21.0, 23.0], [1.0, 47.0, 39.0], [1.0, 81.0, 28.0],
[1.0, 36.0, 32.0], [1.0, 22.0, 22.0], [1.0, 44.0, 25.0],
[1.0, 15.0, 29.0], [1.0, 7.0, 7.0], [1.0, 42.0, 26.0],
[1.0, 9.0, 19.0], [1.0, 21.0, 15.0], [1.0, 21.0, 20.0],
[1.0, 16.0, 26.0], [1.0, 16.0, 28.0], [1.0, 9.0, 17.0],
[1.0, 14.0, 22.0], [1.0, 12.0, 30.0], [1.0, 17.0, 25.0],
[1.0, 7.0, 20.0], [1.0, 34.0, 47.0], [1.0, 8.0, 32.0]]
endog = [ 82., 83., 90., 76., 90., 87., 93., 90., 52., 88., 57.,
89., 97., 59., 73., 38., 76., 81., 45., 92., 39., 34.,
41., 16., 33., 53., 67., 57., 26., 29., 10., 15., 19.,
10., 13., 24., 20., 7., 3., 16., 6., 11., 8., 41.,
10.]
return endog, exog, labels
def test_outlier_test():
endog, exog, labels = get_duncan_data()
ndarray_mod = OLS(endog, exog).fit()
rstudent = [3.1345185839, -2.3970223990, 2.0438046359, -1.9309187757,
1.8870465798, -1.7604905300, -1.7040324156, 1.6024285876,
-1.4332485037, -1.1044851583, 1.0688582315, 1.0185271840,
-0.9024219332, -0.9023876471, -0.8830953936, 0.8265782334,
0.8089220547, 0.7682770197, 0.7319491074, -0.6665962829,
0.5227352794, -0.5135016547, 0.5083881518, 0.4999224372,
-0.4980818221, -0.4759717075, -0.4293565820, -0.4114056499,
-0.3779540862, 0.3556874030, 0.3409200462, 0.3062248646,
0.3038999429, -0.3030815773, -0.1873387893, 0.1738050251,
0.1424246593, -0.1292266025, 0.1272066463, -0.0798902878,
0.0788467222, 0.0722556991, 0.0505098280, 0.0233215136,
0.0007112055]
unadj_p = [0.003177202, 0.021170298, 0.047432955, 0.060427645, 0.066248120,
0.085783008, 0.095943909, 0.116738318, 0.159368890, 0.275822623,
0.291386358, 0.314400295, 0.372104049, 0.372122040, 0.382333561,
0.413260793, 0.423229432, 0.446725370, 0.468363101, 0.508764039,
0.603971990, 0.610356737, 0.613905871, 0.619802317, 0.621087703,
0.636621083, 0.669911674, 0.682917818, 0.707414459, 0.723898263,
0.734904667, 0.760983108, 0.762741124, 0.763360242, 0.852319039,
0.862874018, 0.887442197, 0.897810225, 0.899398691, 0.936713197,
0.937538115, 0.942749758, 0.959961394, 0.981506948, 0.999435989]
bonf_p = [0.1429741, 0.9526634, 2.1344830, 2.7192440, 2.9811654, 3.8602354,
4.3174759, 5.2532243, 7.1716001, 12.4120180, 13.1123861, 14.1480133,
16.7446822, 16.7454918, 17.2050103, 18.5967357, 19.0453245,
20.1026416, 21.0763395, 22.8943818, 27.1787396, 27.4660532,
27.6257642, 27.8911043, 27.9489466, 28.6479487, 30.1460253,
30.7313018, 31.8336506, 32.5754218, 33.0707100, 34.2442399,
34.3233506, 34.3512109, 38.3543568, 38.8293308, 39.9348989,
40.4014601, 40.4729411, 42.1520939, 42.1892152, 42.4237391,
43.1982627, 44.1678127, 44.9746195]
bonf_p = np.array(bonf_p)
bonf_p[bonf_p > 1] = 1
sorted_labels = ["minister", "reporter", "contractor", "insurance.agent",
"machinist", "store.clerk", "conductor", "factory.owner",
"mail.carrier", "streetcar.motorman", "carpenter", "coal.miner",
"bartender", "bookkeeper", "soda.clerk", "chemist", "RR.engineer",
"professor", "electrician", "gas.stn.attendant", "auto.repairman",
"watchman", "banker", "machine.operator", "dentist", "waiter",
"shoe.shiner", "welfare.worker", "plumber", "physician", "pilot",
"engineer", "accountant", "lawyer", "undertaker", "barber",
"store.manager", "truck.driver", "cook", "janitor", "policeman",
"architect", "teacher", "taxi.driver", "author"]
res2 = np.c_[rstudent, unadj_p, bonf_p]
res = oi.outlier_test(ndarray_mod, method='b', labels=labels, order=True)
np.testing.assert_almost_equal(res.values, res2, 7)
np.testing.assert_equal(res.index.tolist(), sorted_labels) # pylint: disable-msg=E1103
data = pd.DataFrame(np.column_stack((endog, exog)),
columns='y const var1 var2'.split(),
index=labels)
# check `order` with pandas bug in #3971
res_pd = OLS.from_formula('y ~ const + var1 + var2 - 0', data).fit()
res_outl2 = oi.outlier_test(res_pd, method='b', order=True)
assert_almost_equal(res_outl2.values, res2, 7)
assert_equal(res_outl2.index.tolist(), sorted_labels)
res_outl1 = res_pd.outlier_test(method='b')
res_outl1 = res_outl1.sort_values(['unadj_p'], ascending=True)
assert_almost_equal(res_outl1.values, res2, 7)
assert_equal(res_outl1.index.tolist(), sorted_labels)
assert_array_equal(res_outl2.index, res_outl1.index)
# additional keywords in method
res_outl3 = res_pd.outlier_test(method='b', order=True)
assert_equal(res_outl3.index.tolist(), sorted_labels)
res_outl4 = res_pd.outlier_test(method='b', order=True, cutoff=0.15)
assert_equal(res_outl4.index.tolist(), sorted_labels[:1])
if __name__ == '__main__':
import pytest
pytest.main([__file__, '-vvs', '-x', '--pdb'])
#t = TestDiagnosticG()
#t.test_basic()
#t.test_hac()
#t.test_acorr_breusch_godfrey()
#t.test_acorr_ljung_box()
#t.test_het_goldfeldquandt()
#t.test_het_breusch_pagan()
#t.test_het_white()
#t.test_compare_lr()
#t.test_compare_nonnested()
#t.test_influence()
##################################################
'''
J test
Model 1: ginv ~ ggdp + lint
Model 2: ginv ~ ggdp + tbilrate
Estimate Std. Error t value Pr(>|t|)
M1 + fitted(M2) 1.591505670785873 0.7384552861695823 2.15518 0.0323546 *
M2 + fitted(M1) 1.305687653016899 0.4808385176653064 2.71544 0.0072039 **
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
= lm(ginv ~ ggdp + tbilrate)
> ct = coxtest(fm, fm3)
> ct
Cox test
Model 1: ginv ~ ggdp + lint
Model 2: ginv ~ ggdp + tbilrate
Estimate Std. Error z value Pr(>|z|)
fitted(M1) ~ M2 -0.782030488930356 0.599696502782265 -1.30404 0.19222
fitted(M2) ~ M1 -2.248817107408537 0.392656854330139 -5.72718 1.0211e-08 ***
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
> et = encomptest(fm, fm3)
> et
Encompassing test
Model 1: ginv ~ ggdp + lint
Model 2: ginv ~ ggdp + tbilrate
Model E: ginv ~ ggdp + lint + tbilrate
Res.Df Df F Pr(>F)
M1 vs. ME 198 -1 4.64481 0.0323546 *
M2 vs. ME 198 -1 7.37361 0.0072039 **
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
> fm4 = lm(realinv ~ realgdp + realint, data=d)
> fm5 = lm(log(realinv) ~ realgdp + realint, data=d)
> pet = petest(fm4, fm5)
> pet
PE test
Model 1: realinv ~ realgdp + realint
Model 2: log(realinv) ~ realgdp + realint
Estimate Std. Error t value
M1 + log(fit(M1))-fit(M2) -229.281878354594596 44.5087822087058598 -5.15139
M2 + fit(M1)-exp(fit(M2)) 0.000634664704814 0.0000462387010349 13.72583
Pr(>|t|)
M1 + log(fit(M1))-fit(M2) 6.2013e-07 ***
M2 + fit(M1)-exp(fit(M2)) < 2.22e-16 ***
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
'''
| bsd-3-clause |
MD2Korg/CerebralCortex | jupyter_demo/demo_algorithm/gps_clustering.py | 1 | 4106 | # Copyright (c) 2019, MD2K Center of Excellence
# - Nasir Ali <nasir.ali08@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy as np
import pandas as pd
from geopy.distance import great_circle
from pyspark.sql.functions import pandas_udf, PandasUDFType
from shapely.geometry.multipoint import MultiPoint
from sklearn.cluster import DBSCAN
from pyspark.sql.types import StructField, StructType, StringType, FloatType
EPSILON_CONSTANT = 1000
LATITUDE = 0
LONGITUDE = 1
ACCURACY = -1
GPS_ACCURACY_THRESHOLD = 41.0
KM_PER_RADIAN = 6371.0088
GEO_FENCE_DISTANCE = 2
MINIMUM_POINTS_IN_CLUSTER = 500
def get_centermost_point(cluster: object) -> object:
"""
:param cluster:
:return:
:rtype: object
"""
centroid = (
MultiPoint(cluster).centroid.x, MultiPoint(cluster).centroid.y)
centermost_point = min(cluster, key=lambda point: great_circle(point,
centroid).m)
return tuple(centermost_point)
schema = StructType([
StructField("user", StringType()),
StructField("latitude", FloatType()),
StructField("longitude", FloatType())
])
@pandas_udf(schema, PandasUDFType.GROUPED_MAP)
def gps_clusters(data: object) -> object:
"""
Computes the clusters
:rtype: object
:param list data: list of interpolated gps data
:param float geo_fence_distance: Maximum distance between points in a
cluster
:param int min_points_in_cluster: Minimum number of points in a cluster
:return: list of cluster-centroids coordinates
"""
geo_fence_distance = GEO_FENCE_DISTANCE
min_points_in_cluster = MINIMUM_POINTS_IN_CLUSTER
data = data[data.accuracy < GPS_ACCURACY_THRESHOLD]
id = data.user.iloc[0]
dataframe = pd.DataFrame(
{'latitude': data.latitude, 'longitude': data.longitude})
coords = dataframe.as_matrix(columns=['latitude', 'longitude'])
epsilon = geo_fence_distance / (
EPSILON_CONSTANT * KM_PER_RADIAN)
db = DBSCAN(eps=epsilon, min_samples=min_points_in_cluster,
algorithm='ball_tree', metric='haversine').fit(
np.radians(coords))
cluster_labels = db.labels_
num_clusters = len(set(cluster_labels))
clusters = pd.Series(
[coords[cluster_labels == n] for n in range(-1, num_clusters)])
clusters = clusters.apply(lambda y: np.nan if len(y) == 0 else y)
clusters.dropna(how='any', inplace=True)
centermost_points = clusters.map(get_centermost_point)
centermost_points = np.array(centermost_points)
all_centroid = []
for cols in centermost_points:
cols = np.array(cols)
cols.flatten()
cs = ([id, cols[LATITUDE], cols[LONGITUDE]])
all_centroid.append(cs)
df = pd.DataFrame(all_centroid, columns=['user', 'latitude', 'longitude'])
return df
| bsd-2-clause |
tensorflow/tensorflow-experimental_link_static_libraries_once | tensorflow/python/data/experimental/kernel_tests/service/dynamic_sharding_test.py | 4 | 17068 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for dynamic sharding."""
import collections
from absl.testing import parameterized
import numpy as np
from tensorflow.python.data.experimental.kernel_tests.service import test_base as data_service_test_base
from tensorflow.python.data.experimental.ops import data_service_ops
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.kernel_tests import tf_record_test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import readers
from tensorflow.python.framework import combinations
from tensorflow.python.framework import errors
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import test
class DynamicShardingTest(data_service_test_base.TestBase,
parameterized.TestCase):
def _make_dynamic_sharding_dataset(self, dataset, cluster):
return self.make_distributed_dataset(
dataset,
cluster,
processing_mode=data_service_ops.ShardingPolicy.DYNAMIC,
job_name="job_name")
@combinations.generate(test_base.default_test_combinations())
def testBasic(self):
cluster = data_service_test_base.TestCluster(num_workers=2)
num_elements = 100
ds = dataset_ops.Dataset.range(num_elements)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertDatasetProduces(
ds, list(range(num_elements)), assert_items_equal=True)
@combinations.generate(test_base.default_test_combinations())
def testTensorSlices(self):
cluster = data_service_test_base.TestCluster(num_workers=2)
vals = [5, 1, 2, 4]
ds = dataset_ops.Dataset.from_tensor_slices(vals)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertDatasetProduces(ds, vals, assert_items_equal=True)
@combinations.generate(test_base.default_test_combinations())
def testInterleave(self):
cluster = data_service_test_base.TestCluster(num_workers=2)
elements = [1, 5, 0]
ds = dataset_ops.Dataset.from_tensor_slices(elements)
ds = ds.interleave(lambda x: dataset_ops.Dataset.from_tensor_slices([x]))
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertDatasetProduces(ds, elements, assert_items_equal=True)
@combinations.generate(test_base.default_test_combinations())
def testParallelInterleave(self):
cluster = data_service_test_base.TestCluster(num_workers=2)
elements = [1, 5, 0]
ds = dataset_ops.Dataset.from_tensor_slices(elements)
ds = ds.interleave(
lambda x: dataset_ops.Dataset.from_tensor_slices([x]),
num_parallel_calls=dataset_ops.AUTOTUNE)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertDatasetProduces(ds, elements, assert_items_equal=True)
@combinations.generate(test_base.default_test_combinations())
def testFlatMap(self):
cluster = data_service_test_base.TestCluster(num_workers=2)
elements = [1, 5, 0]
ds = dataset_ops.Dataset.from_tensor_slices(elements)
ds = ds.flat_map(lambda x: dataset_ops.Dataset.from_tensor_slices([x]))
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertDatasetProduces(ds, elements, assert_items_equal=True)
@combinations.generate(test_base.default_test_combinations())
def testGroupByWindow(self):
# Verify that split providers are not propagated into iterators created for
# the reduce datasets created by the reduce_fn in group_by_window.
cluster = data_service_test_base.TestCluster(num_workers=2)
elements = [1, 5, 0]
ds = dataset_ops.Dataset.from_tensor_slices(elements)
def reduce_fn(_, window):
return dataset_ops.Dataset.zip((window, dataset_ops.Dataset.range(100)))
ds = ds.group_by_window(lambda x: 0, reduce_fn, window_size=3)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
# This will fail if the tensor_slices split provider is propagated into the
# `reduce_fn`, since the `zip` requires either 0 or 2 split providers.
self.getDatasetOutput(ds)
@combinations.generate(test_base.default_test_combinations())
def testRepeatBeforeDistribution(self):
cluster = data_service_test_base.TestCluster(num_workers=2)
num_repeats = 5
num_elements = 20
ds = dataset_ops.Dataset.range(num_elements).repeat(num_repeats)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertDatasetProduces(
ds, num_repeats * list(range(num_elements)), assert_items_equal=True)
@combinations.generate(test_base.default_test_combinations())
def testRepeatAfterDistribution(self):
cluster = data_service_test_base.TestCluster(num_workers=2)
num_repeats = 5
num_elements = 20
ds = dataset_ops.Dataset.range(num_elements)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
ds = ds.repeat(num_repeats)
self.assertDatasetProduces(
ds, num_repeats * list(range(num_elements)), assert_items_equal=True)
@combinations.generate(test_base.default_test_combinations())
def testForeverRepeat(self):
cluster = data_service_test_base.TestCluster(num_workers=2)
num_elements = 20
elements_to_read = 1000
ds = dataset_ops.Dataset.range(num_elements).repeat()
ds = self._make_dynamic_sharding_dataset(ds, cluster)
get_next = self.getNext(ds)
results = {}
for _ in range(elements_to_read):
val = self.evaluate(get_next())
if val not in results:
results[val] = 0
results[val] += 1
for i in range(num_elements):
self.assertGreater(results[i], elements_to_read / num_elements / 2)
@combinations.generate(test_base.default_test_combinations())
def testForeverRepeatFewElements(self):
num_workers = 5
cluster = data_service_test_base.TestCluster(num_workers=num_workers)
# Less than the number of workers, so that some workers get zero elements on
# the first repetition.
num_elements = 1
ds = dataset_ops.Dataset.range(num_elements).repeat()
ds = self._make_dynamic_sharding_dataset(ds, cluster)
get_next = self.getNext(ds)
for _ in range(20):
self.assertEqual(self.evaluate(get_next()), 0)
# Stop all but one worker and check that we can still read.
for i in range(num_workers - 1):
cluster.workers[i].stop()
for _ in range(20):
self.assertEqual(self.evaluate(get_next()), 0)
@combinations.generate(test_base.default_test_combinations())
def testShuffleAndRepeat(self):
cluster = data_service_test_base.TestCluster(num_workers=2)
num_repeats = 5
num_elements = 20
ds = dataset_ops.Dataset.range(num_elements).shuffle(num_elements).repeat(
num_repeats)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertDatasetProduces(
ds, num_repeats * list(range(num_elements)), assert_items_equal=True)
@combinations.generate(test_base.default_test_combinations())
def testZip(self):
num_elements = 10
cluster = data_service_test_base.TestCluster(num_workers=1)
a = dataset_ops.Dataset.range(num_elements)
ds = dataset_ops.Dataset.zip((a, a))
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertDatasetProduces(
ds, list(zip(range(num_elements), range(num_elements))))
@combinations.generate(test_base.default_test_combinations())
def testNestedZip(self):
num_elements = 10
cluster = data_service_test_base.TestCluster(num_workers=1)
a = dataset_ops.Dataset.range(num_elements)
ds = dataset_ops.Dataset.zip((a, a))
ds = dataset_ops.Dataset.zip((a, a, ds, a))
ds = self._make_dynamic_sharding_dataset(ds, cluster)
b = list(range(10))
self.assertDatasetProduces(ds, list(zip(b, b, zip(b, b), b)))
@combinations.generate(test_base.default_test_combinations())
def testImbalancedZip(self):
smaller_num_elements = 200
larger_num_elements = 1000
cluster = data_service_test_base.TestCluster(num_workers=1)
a = dataset_ops.Dataset.range(smaller_num_elements)
b = dataset_ops.Dataset.range(larger_num_elements)
ds = dataset_ops.Dataset.zip((a, b))
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertDatasetProduces(
ds, list(zip(range(smaller_num_elements), range(smaller_num_elements))))
@combinations.generate(test_base.default_test_combinations())
def testImbalancedZipMultiWorker(self):
smaller_num_elements = 200
larger_num_elements = 1000
cluster = data_service_test_base.TestCluster(num_workers=3)
a = dataset_ops.Dataset.range(smaller_num_elements)
b = dataset_ops.Dataset.range(larger_num_elements)
ds = dataset_ops.Dataset.zip((a, b))
ds = self._make_dynamic_sharding_dataset(ds, cluster)
# Cannot assert specific elements because the range datasets are split
# nondeterministically and may not line up.
self.assertLen(self.getDatasetOutput(ds), smaller_num_elements)
@combinations.generate(test_base.default_test_combinations())
def testZipDifferentRates(self):
cluster = data_service_test_base.TestCluster(num_workers=3)
a = dataset_ops.Dataset.range(100)
b = dataset_ops.Dataset.range(100).filter(
lambda x: math_ops.equal(x % 10, 0))
ds = dataset_ops.Dataset.zip((a, b))
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertLen(self.getDatasetOutput(ds), 10)
@combinations.generate(test_base.default_test_combinations())
def testZipDifferentRepeats(self):
cluster = data_service_test_base.TestCluster(num_workers=3)
a = dataset_ops.Dataset.range(50)
b = dataset_ops.Dataset.range(10).repeat(10)
ds = dataset_ops.Dataset.zip((a, b))
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertLen(self.getDatasetOutput(ds), 50)
@combinations.generate(test_base.default_test_combinations())
def testSampleFromDatasets(self):
cluster = data_service_test_base.TestCluster(num_workers=3)
num_samples = 200
weights = [.6, .3, .1]
classes = len(weights)
# Create a dataset that samples each integer in `[0, num_datasets)`
# with probability given by `weights[i]`.
ds = dataset_ops.Dataset.sample_from_datasets(
[dataset_ops.Dataset.from_tensors(i).repeat() for i in range(classes)],
weights)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
ds = ds.take(num_samples)
freqs = np.zeros([classes])
for v in self.getDatasetOutput(ds):
freqs[v] += 1
self.assertGreater(freqs[0], freqs[1])
self.assertGreater(freqs[1], freqs[2])
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
combinations.combine(num_workers=[1, 3])))
def testChooseFromDatasets(self, num_workers):
cluster = data_service_test_base.TestCluster(num_workers=num_workers)
words = [b"foo", b"bar", b"baz"]
datasets = [dataset_ops.Dataset.from_tensors(w).repeat() for w in words]
choice_array = np.random.randint(3, size=(15,), dtype=np.int64)
choice_dataset = dataset_ops.Dataset.from_tensor_slices(choice_array)
ds = dataset_ops.Dataset.choose_from_datasets(datasets, choice_dataset)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
expected = [words[i] for i in choice_array] * num_workers
assert_items_equal = (num_workers > 1)
self.assertDatasetProduces(
ds, expected, assert_items_equal=assert_items_equal)
@combinations.generate(
combinations.times(test_base.default_test_combinations()))
def testEnumerateReplicateOnSplit(self):
num_workers = 3
cluster = data_service_test_base.TestCluster(num_workers)
ds = dataset_ops.Dataset.from_tensor_slices(["a", "b", "c"]).repeat()
ds = ds.enumerate()
ds = self._make_dynamic_sharding_dataset(ds, cluster)
get_next = self.getNext(ds)
counts = collections.defaultdict(int)
while True:
i, _ = self.evaluate(get_next())
counts[i] += 1
# Read until all workers have reached enumeration index 10.
if counts[10] == num_workers:
break
for i in range(10):
self.assertEqual(counts[i], num_workers)
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
combinations.combine(num_workers=[1, 3])))
def testConcatenate(self, num_workers):
cluster = data_service_test_base.TestCluster(num_workers=num_workers)
a = dataset_ops.Dataset.range(100)
b = dataset_ops.Dataset.range(100, 200)
ds = a.concatenate(b)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
assert_items_equal = (num_workers > 1)
self.assertDatasetProduces(
ds, list(range(200)), assert_items_equal=assert_items_equal)
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
combinations.combine(already_written=[True, False])))
def testSnapshot(self, already_written):
num_workers = 3
cluster = data_service_test_base.TestCluster(num_workers=num_workers)
ds = dataset_ops.Dataset.range(100)
ds = ds.snapshot(self.get_temp_dir())
if already_written:
# Materialize the snapshot.
self.getDatasetOutput(ds)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
error_regex = "Splitting is not implemented for snapshot datasets"
with self.assertRaisesRegex(errors.UnimplementedError, error_regex):
self.getDatasetOutput(ds)
@combinations.generate(test_base.default_test_combinations())
def testDistributedDataset(self):
cluster_1 = data_service_test_base.TestCluster(num_workers=1)
cluster_2 = data_service_test_base.TestCluster(num_workers=1)
num_sizes = 10
size_repeats = 5
numbers = [1 * i for i in range(num_sizes)] * size_repeats
ds = dataset_ops.Dataset.from_tensor_slices(numbers)
ds = self.make_distributed_dataset(
ds, cluster_1, processing_mode=data_service_ops.ShardingPolicy.OFF)
ds = ds.map(lambda x: x + 1)
ds = self._make_dynamic_sharding_dataset(ds, cluster_2)
error_regex = ("Cannot create split providers for dataset " +
"of type DataServiceDataset")
with self.assertRaisesRegex(errors.UnimplementedError, error_regex):
self.getDatasetOutput(ds)
@combinations.generate(test_base.default_test_combinations())
def testDistributedEpoch(self):
cluster = data_service_test_base.TestCluster(num_workers=2)
num_elements = 100
ds = dataset_ops.Dataset.range(num_elements)
ds = self.make_distributed_dataset(
ds, cluster, processing_mode="distributed_epoch")
self.assertDatasetProduces(
ds, list(range(num_elements)), assert_items_equal=True)
@combinations.generate(test_base.default_test_combinations())
def testFlatMapWithRepeat(self):
cluster = data_service_test_base.TestCluster(num_workers=3)
ds = dataset_ops.Dataset.range(5)
def flat_map_fn(_):
return dataset_ops.Dataset.from_tensor_slices(["a", "b", "c"]).repeat(10)
ds = ds.flat_map(flat_map_fn)
ds = self._make_dynamic_sharding_dataset(ds, cluster)
self.assertDatasetProduces(
ds, [b"a", b"b", b"c"] * 50, assert_items_equal=True)
class DynamicShardingFilesTest(data_service_test_base.TestBase,
tf_record_test_base.TFRecordTestBase,
parameterized.TestCase):
def setUp(self):
super(DynamicShardingFilesTest, self).setUp()
self._num_files = 5
self._num_records = 5
self._filenames = self._createFiles()
@combinations.generate(test_base.default_test_combinations())
def testShuffleFiles(self):
cluster = data_service_test_base.TestCluster(num_workers=3)
shuffled_filenames = random_ops.random_shuffle(self._filenames)
dataset = dataset_ops.Dataset.from_tensor_slices(shuffled_filenames)
dataset = dataset.interleave(readers.TFRecordDataset)
dataset = self.make_distributed_dataset(
dataset,
cluster=cluster,
processing_mode=data_service_ops.ShardingPolicy.DYNAMIC)
# pylint:disable=g-complex-comprehension
expected = [
b"Record %d of file %d" % (record, file)
for file in range(0, 5)
for record in range(0, 5)
]
self.assertDatasetProduces(
dataset,
expected,
requires_initialization=True,
assert_items_equal=True)
if __name__ == "__main__":
test.main()
| apache-2.0 |
heli522/scikit-learn | examples/gaussian_process/plot_gp_regression.py | 252 | 4054 | #!/usr/bin/python
# -*- coding: utf-8 -*-
r"""
=========================================================
Gaussian Processes regression: basic introductory example
=========================================================
A simple one-dimensional regression exercise computed in two different ways:
1. A noise-free case with a cubic correlation model
2. A noisy case with a squared Euclidean correlation model
In both cases, the model parameters are estimated using the maximum
likelihood principle.
The figures illustrate the interpolating property of the Gaussian Process
model as well as its probabilistic nature in the form of a pointwise 95%
confidence interval.
Note that the parameter ``nugget`` is applied as a Tikhonov regularization
of the assumed covariance between the training points. In the special case
of the squared euclidean correlation model, nugget is mathematically equivalent
to a normalized variance: That is
.. math::
\mathrm{nugget}_i = \left[\frac{\sigma_i}{y_i}\right]^2
"""
print(__doc__)
# Author: Vincent Dubourg <vincent.dubourg@gmail.com>
# Jake Vanderplas <vanderplas@astro.washington.edu>
# Licence: BSD 3 clause
import numpy as np
from sklearn.gaussian_process import GaussianProcess
from matplotlib import pyplot as pl
np.random.seed(1)
def f(x):
"""The function to predict."""
return x * np.sin(x)
#----------------------------------------------------------------------
# First the noiseless case
X = np.atleast_2d([1., 3., 5., 6., 7., 8.]).T
# Observations
y = f(X).ravel()
# Mesh the input space for evaluations of the real function, the prediction and
# its MSE
x = np.atleast_2d(np.linspace(0, 10, 1000)).T
# Instanciate a Gaussian Process model
gp = GaussianProcess(corr='cubic', theta0=1e-2, thetaL=1e-4, thetaU=1e-1,
random_start=100)
# Fit to data using Maximum Likelihood Estimation of the parameters
gp.fit(X, y)
# Make the prediction on the meshed x-axis (ask for MSE as well)
y_pred, MSE = gp.predict(x, eval_MSE=True)
sigma = np.sqrt(MSE)
# Plot the function, the prediction and the 95% confidence interval based on
# the MSE
fig = pl.figure()
pl.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
pl.plot(X, y, 'r.', markersize=10, label=u'Observations')
pl.plot(x, y_pred, 'b-', label=u'Prediction')
pl.fill(np.concatenate([x, x[::-1]]),
np.concatenate([y_pred - 1.9600 * sigma,
(y_pred + 1.9600 * sigma)[::-1]]),
alpha=.5, fc='b', ec='None', label='95% confidence interval')
pl.xlabel('$x$')
pl.ylabel('$f(x)$')
pl.ylim(-10, 20)
pl.legend(loc='upper left')
#----------------------------------------------------------------------
# now the noisy case
X = np.linspace(0.1, 9.9, 20)
X = np.atleast_2d(X).T
# Observations and noise
y = f(X).ravel()
dy = 0.5 + 1.0 * np.random.random(y.shape)
noise = np.random.normal(0, dy)
y += noise
# Mesh the input space for evaluations of the real function, the prediction and
# its MSE
x = np.atleast_2d(np.linspace(0, 10, 1000)).T
# Instanciate a Gaussian Process model
gp = GaussianProcess(corr='squared_exponential', theta0=1e-1,
thetaL=1e-3, thetaU=1,
nugget=(dy / y) ** 2,
random_start=100)
# Fit to data using Maximum Likelihood Estimation of the parameters
gp.fit(X, y)
# Make the prediction on the meshed x-axis (ask for MSE as well)
y_pred, MSE = gp.predict(x, eval_MSE=True)
sigma = np.sqrt(MSE)
# Plot the function, the prediction and the 95% confidence interval based on
# the MSE
fig = pl.figure()
pl.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
pl.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label=u'Observations')
pl.plot(x, y_pred, 'b-', label=u'Prediction')
pl.fill(np.concatenate([x, x[::-1]]),
np.concatenate([y_pred - 1.9600 * sigma,
(y_pred + 1.9600 * sigma)[::-1]]),
alpha=.5, fc='b', ec='None', label='95% confidence interval')
pl.xlabel('$x$')
pl.ylabel('$f(x)$')
pl.ylim(-10, 20)
pl.legend(loc='upper left')
pl.show()
| bsd-3-clause |
DonBeo/scikit-learn | benchmarks/bench_mnist.py | 153 | 6006 | """
=======================
MNIST dataset benchmark
=======================
Benchmark on the MNIST dataset. The dataset comprises 70,000 samples
and 784 features. Here, we consider the task of predicting
10 classes - digits from 0 to 9 from their raw images. By contrast to the
covertype dataset, the feature space is homogenous.
Example of output :
[..]
Classification performance:
===========================
Classifier train-time test-time error-rat
------------------------------------------------------------
Nystroem-SVM 105.07s 0.91s 0.0227
ExtraTrees 48.20s 1.22s 0.0288
RandomForest 47.17s 1.21s 0.0304
SampledRBF-SVM 140.45s 0.84s 0.0486
CART 22.84s 0.16s 0.1214
dummy 0.01s 0.02s 0.8973
"""
from __future__ import division, print_function
# Author: Issam H. Laradji
# Arnaud Joly <arnaud.v.joly@gmail.com>
# License: BSD 3 clause
import os
from time import time
import argparse
import numpy as np
from sklearn.datasets import fetch_mldata
from sklearn.datasets import get_data_home
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.dummy import DummyClassifier
from sklearn.externals.joblib import Memory
from sklearn.kernel_approximation import Nystroem
from sklearn.kernel_approximation import RBFSampler
from sklearn.metrics import zero_one_loss
from sklearn.pipeline import make_pipeline
from sklearn.svm import LinearSVC
from sklearn.tree import DecisionTreeClassifier
from sklearn.utils import check_array
# Memoize the data extraction and memory map the resulting
# train / test splits in readonly mode
memory = Memory(os.path.join(get_data_home(), 'mnist_benchmark_data'),
mmap_mode='r')
@memory.cache
def load_data(dtype=np.float32, order='F'):
"""Load the data, then cache and memmap the train/test split"""
######################################################################
## Load dataset
print("Loading dataset...")
data = fetch_mldata('MNIST original')
X = check_array(data['data'], dtype=dtype, order=order)
y = data["target"]
# Normalize features
X = X / 255
## Create train-test split (as [Joachims, 2006])
print("Creating train-test split...")
n_train = 60000
X_train = X[:n_train]
y_train = y[:n_train]
X_test = X[n_train:]
y_test = y[n_train:]
return X_train, X_test, y_train, y_test
ESTIMATORS = {
"dummy": DummyClassifier(),
'CART': DecisionTreeClassifier(),
'ExtraTrees': ExtraTreesClassifier(n_estimators=100),
'RandomForest': RandomForestClassifier(n_estimators=100),
'Nystroem-SVM':
make_pipeline(Nystroem(gamma=0.015, n_components=1000), LinearSVC(C=100)),
'SampledRBF-SVM':
make_pipeline(RBFSampler(gamma=0.015, n_components=1000), LinearSVC(C=100))
}
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--classifiers', nargs="+",
choices=ESTIMATORS, type=str,
default=['ExtraTrees', 'Nystroem-SVM'],
help="list of classifiers to benchmark.")
parser.add_argument('--n-jobs', nargs="?", default=1, type=int,
help="Number of concurrently running workers for "
"models that support parallelism.")
parser.add_argument('--order', nargs="?", default="C", type=str,
choices=["F", "C"],
help="Allow to choose between fortran and C ordered "
"data")
parser.add_argument('--random-seed', nargs="?", default=0, type=int,
help="Common seed used by random number generator.")
args = vars(parser.parse_args())
print(__doc__)
X_train, X_test, y_train, y_test = load_data(order=args["order"])
print("")
print("Dataset statistics:")
print("===================")
print("%s %d" % ("number of features:".ljust(25), X_train.shape[1]))
print("%s %d" % ("number of classes:".ljust(25), np.unique(y_train).size))
print("%s %s" % ("data type:".ljust(25), X_train.dtype))
print("%s %d (size=%dMB)" % ("number of train samples:".ljust(25),
X_train.shape[0], int(X_train.nbytes / 1e6)))
print("%s %d (size=%dMB)" % ("number of test samples:".ljust(25),
X_test.shape[0], int(X_test.nbytes / 1e6)))
print()
print("Training Classifiers")
print("====================")
error, train_time, test_time = {}, {}, {}
for name in sorted(args["classifiers"]):
print("Training %s ... " % name, end="")
estimator = ESTIMATORS[name]
estimator_params = estimator.get_params()
estimator.set_params(**{p: args["random_seed"]
for p in estimator_params
if p.endswith("random_state")})
if "n_jobs" in estimator_params:
estimator.set_params(n_jobs=args["n_jobs"])
time_start = time()
estimator.fit(X_train, y_train)
train_time[name] = time() - time_start
time_start = time()
y_pred = estimator.predict(X_test)
test_time[name] = time() - time_start
error[name] = zero_one_loss(y_test, y_pred)
print("done")
print()
print("Classification performance:")
print("===========================")
print("{0: <24} {1: >10} {2: >11} {3: >12}"
"".format("Classifier ", "train-time", "test-time", "error-rate"))
print("-" * 60)
for name in sorted(args["classifiers"], key=error.get):
print("{0: <23} {1: >10.2f}s {2: >10.2f}s {3: >12.4f}"
"".format(name, train_time[name], test_time[name], error[name]))
print()
| bsd-3-clause |
tapomayukh/projects_in_python | classification/Classification_with_kNN/Single_Contact_Classification/Time_Window/test10_cross_validate_objects_800ms.py | 1 | 4257 |
# Principal Component Analysis Code :
from numpy import mean,cov,double,cumsum,dot,linalg,array,rank,size,flipud
from pylab import *
import numpy as np
import matplotlib.pyplot as pp
#from enthought.mayavi import mlab
import scipy.ndimage as ni
import roslib; roslib.load_manifest('sandbox_tapo_darpa_m3')
import rospy
#import hrl_lib.mayavi2_util as mu
import hrl_lib.viz as hv
import hrl_lib.util as ut
import hrl_lib.matplotlib_util as mpu
import pickle
from mvpa.clfs.knn import kNN
from mvpa.datasets import Dataset
from mvpa.clfs.transerror import TransferError
from mvpa.misc.data_generators import normalFeatureDataset
from mvpa.algorithms.cvtranserror import CrossValidatedTransferError
from mvpa.datasets.splitters import NFoldSplitter
import sys
sys.path.insert(0, '/home/tapo/svn/robot1_data/usr/tapo/data_code/Classification/Data/Single_Contact_kNN/Window')
from data_800ms import Fmat_original
def pca(X):
#get dimensions
num_data,dim = X.shape
#center data
mean_X = X.mean(axis=1)
M = (X-mean_X) # subtract the mean (along columns)
Mcov = cov(M)
print 'PCA - COV-Method used'
val,vec = linalg.eig(Mcov)
#return the projection matrix, the variance and the mean
return vec,val,mean_X, M, Mcov
if __name__ == '__main__':
Fmat = Fmat_original
# Checking the Data-Matrix
m_tot, n_tot = np.shape(Fmat)
print 'Total_Matrix_Shape:',m_tot,n_tot
eigvec_total, eigval_total, mean_data_total, B, C = pca(Fmat)
#print eigvec_total
#print eigval_total
#print mean_data_total
m_eigval_total, n_eigval_total = np.shape(np.matrix(eigval_total))
m_eigvec_total, n_eigvec_total = np.shape(eigvec_total)
m_mean_data_total, n_mean_data_total = np.shape(np.matrix(mean_data_total))
print 'Eigenvalue Shape:',m_eigval_total, n_eigval_total
print 'Eigenvector Shape:',m_eigvec_total, n_eigvec_total
print 'Mean-Data Shape:',m_mean_data_total, n_mean_data_total
#Recall that the cumulative sum of the eigenvalues shows the level of variance accounted by each of the corresponding eigenvectors. On the x axis there is the number of eigenvalues used.
perc_total = cumsum(eigval_total)/sum(eigval_total)
# Reduced Eigen-Vector Matrix according to highest Eigenvalues..(Considering First 20 based on above figure)
W = eigvec_total[:,0:9]
m_W, n_W = np.shape(W)
print 'Reduced Dimension Eigenvector Shape:',m_W, n_W
# Normalizes the data set with respect to its variance (Not an Integral part of PCA, but useful)
length = len(eigval_total)
s = np.matrix(np.zeros(length)).T
i = 0
while i < length:
s[i] = sqrt(C[i,i])
i = i+1
Z = np.divide(B,s)
m_Z, n_Z = np.shape(Z)
print 'Z-Score Shape:', m_Z, n_Z
#Projected Data:
Y = (W.T)*B # 'B' for my Laptop: otherwise 'Z' instead of 'B'
m_Y, n_Y = np.shape(Y.T)
print 'Transposed Projected Data Shape:', m_Y, n_Y
#Using PYMVPA
PCA_data = np.array(Y.T)
PCA_label_2 = ['Styrofoam-Fixed']*5 + ['Books-Fixed']*5 + ['Bucket-Fixed']*5 + ['Bowl-Fixed']*5 + ['Can-Fixed']*5 + ['Box-Fixed']*5 + ['Pipe-Fixed']*5 + ['Styrofoam-Movable']*5 + ['Container-Movable']*5 + ['Books-Movable']*5 + ['Cloth-Roll-Movable']*5 + ['Black-Rubber-Movable']*5 + ['Can-Movable']*5 + ['Box-Movable']*5 + ['Rug-Fixed']*5 + ['Bubble-Wrap-1-Fixed']*5 + ['Pillow-1-Fixed']*5 + ['Bubble-Wrap-2-Fixed']*5 + ['Sponge-Fixed']*5 + ['Foliage-Fixed']*5 + ['Pillow-2-Fixed']*5 + ['Rug-Movable']*5 + ['Bubble-Wrap-1-Movable']*5 + ['Pillow-1-Movable']*5 + ['Bubble-Wrap-2-Movable']*5 + ['Pillow-2-Movable']*5 + ['Plush-Toy-Movable']*5 + ['Sponge-Movable']*5
clf = kNN(k=1)
terr = TransferError(clf)
ds1 = Dataset(samples=PCA_data,labels=PCA_label_2)
print ds1.samples.shape
cvterr = CrossValidatedTransferError(terr,NFoldSplitter(cvtype=1),enable_states=['confusion'])
error = cvterr(ds1)
print error
print cvterr.confusion.asstring(description=False)
figure(1)
cvterr.confusion.plot(numbers='True',numbers_alpha=2)
#show()
# Variances
figure(2)
title('Variances of PCs')
stem(range(len(perc_total)),perc_total,'--b')
axis([-0.3,130.3,0,1.2])
grid('True')
show()
| mit |
DonBeo/scikit-learn | sklearn/decomposition/tests/test_fastica.py | 14 | 7877 | """
Test the fastica algorithm.
"""
import itertools
import warnings
import numpy as np
from scipy import stats
from nose.tools import assert_raises
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_warns
from sklearn.decomposition import FastICA, fastica, PCA
from sklearn.decomposition.fastica_ import _gs_decorrelation
from sklearn.externals.six import moves
def center_and_norm(x, axis=-1):
""" Centers and norms x **in place**
Parameters
-----------
x: ndarray
Array with an axis of observations (statistical units) measured on
random variables.
axis: int, optional
Axis along which the mean and variance are calculated.
"""
x = np.rollaxis(x, axis)
x -= x.mean(axis=0)
x /= x.std(axis=0)
def test_gs():
# Test gram schmidt orthonormalization
# generate a random orthogonal matrix
rng = np.random.RandomState(0)
W, _, _ = np.linalg.svd(rng.randn(10, 10))
w = rng.randn(10)
_gs_decorrelation(w, W, 10)
assert_less((w ** 2).sum(), 1.e-10)
w = rng.randn(10)
u = _gs_decorrelation(w, W, 5)
tmp = np.dot(u, W.T)
assert_less((tmp[:5] ** 2).sum(), 1.e-10)
def test_fastica_simple(add_noise=False):
# Test the FastICA algorithm on very simple data.
rng = np.random.RandomState(0)
# scipy.stats uses the global RNG:
np.random.seed(0)
n_samples = 1000
# Generate two sources:
s1 = (2 * np.sin(np.linspace(0, 100, n_samples)) > 0) - 1
s2 = stats.t.rvs(1, size=n_samples)
s = np.c_[s1, s2].T
center_and_norm(s)
s1, s2 = s
# Mixing angle
phi = 0.6
mixing = np.array([[np.cos(phi), np.sin(phi)],
[np.sin(phi), -np.cos(phi)]])
m = np.dot(mixing, s)
if add_noise:
m += 0.1 * rng.randn(2, 1000)
center_and_norm(m)
# function as fun arg
def g_test(x):
return x ** 3, (3 * x ** 2).mean(axis=-1)
algos = ['parallel', 'deflation']
nls = ['logcosh', 'exp', 'cube', g_test]
whitening = [True, False]
for algo, nl, whiten in itertools.product(algos, nls, whitening):
if whiten:
k_, mixing_, s_ = fastica(m.T, fun=nl, algorithm=algo)
assert_raises(ValueError, fastica, m.T, fun=np.tanh,
algorithm=algo)
else:
X = PCA(n_components=2, whiten=True).fit_transform(m.T)
k_, mixing_, s_ = fastica(X, fun=nl, algorithm=algo, whiten=False)
assert_raises(ValueError, fastica, X, fun=np.tanh,
algorithm=algo)
s_ = s_.T
# Check that the mixing model described in the docstring holds:
if whiten:
assert_almost_equal(s_, np.dot(np.dot(mixing_, k_), m))
center_and_norm(s_)
s1_, s2_ = s_
# Check to see if the sources have been estimated
# in the wrong order
if abs(np.dot(s1_, s2)) > abs(np.dot(s1_, s1)):
s2_, s1_ = s_
s1_ *= np.sign(np.dot(s1_, s1))
s2_ *= np.sign(np.dot(s2_, s2))
# Check that we have estimated the original sources
if not add_noise:
assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=2)
assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=2)
else:
assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=1)
assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=1)
# Test FastICA class
_, _, sources_fun = fastica(m.T, fun=nl, algorithm=algo, random_state=0)
ica = FastICA(fun=nl, algorithm=algo, random_state=0)
sources = ica.fit_transform(m.T)
assert_equal(ica.components_.shape, (2, 2))
assert_equal(sources.shape, (1000, 2))
assert_array_almost_equal(sources_fun, sources)
assert_array_almost_equal(sources, ica.transform(m.T))
assert_equal(ica.mixing_.shape, (2, 2))
for fn in [np.tanh, "exp(-.5(x^2))"]:
ica = FastICA(fun=fn, algorithm=algo, random_state=0)
assert_raises(ValueError, ica.fit, m.T)
assert_raises(TypeError, FastICA(fun=moves.xrange(10)).fit, m.T)
def test_fastica_nowhiten():
m = [[0, 1], [1, 0]]
# test for issue #697
ica = FastICA(n_components=1, whiten=False, random_state=0)
assert_warns(UserWarning, ica.fit, m)
assert_true(hasattr(ica, 'mixing_'))
def test_non_square_fastica(add_noise=False):
# Test the FastICA algorithm on very simple data.
rng = np.random.RandomState(0)
n_samples = 1000
# Generate two sources:
t = np.linspace(0, 100, n_samples)
s1 = np.sin(t)
s2 = np.ceil(np.sin(np.pi * t))
s = np.c_[s1, s2].T
center_and_norm(s)
s1, s2 = s
# Mixing matrix
mixing = rng.randn(6, 2)
m = np.dot(mixing, s)
if add_noise:
m += 0.1 * rng.randn(6, n_samples)
center_and_norm(m)
k_, mixing_, s_ = fastica(m.T, n_components=2, random_state=rng)
s_ = s_.T
# Check that the mixing model described in the docstring holds:
assert_almost_equal(s_, np.dot(np.dot(mixing_, k_), m))
center_and_norm(s_)
s1_, s2_ = s_
# Check to see if the sources have been estimated
# in the wrong order
if abs(np.dot(s1_, s2)) > abs(np.dot(s1_, s1)):
s2_, s1_ = s_
s1_ *= np.sign(np.dot(s1_, s1))
s2_ *= np.sign(np.dot(s2_, s2))
# Check that we have estimated the original sources
if not add_noise:
assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=3)
assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=3)
def test_fit_transform():
# Test FastICA.fit_transform
rng = np.random.RandomState(0)
X = rng.random_sample((100, 10))
for whiten, n_components in [[True, 5], [False, None]]:
n_components_ = (n_components if n_components is not None else
X.shape[1])
ica = FastICA(n_components=n_components, whiten=whiten, random_state=0)
Xt = ica.fit_transform(X)
assert_equal(ica.components_.shape, (n_components_, 10))
assert_equal(Xt.shape, (100, n_components_))
ica = FastICA(n_components=n_components, whiten=whiten, random_state=0)
ica.fit(X)
assert_equal(ica.components_.shape, (n_components_, 10))
Xt2 = ica.transform(X)
assert_array_almost_equal(Xt, Xt2)
def test_inverse_transform():
# Test FastICA.inverse_transform
n_features = 10
n_samples = 100
n1, n2 = 5, 10
rng = np.random.RandomState(0)
X = rng.random_sample((n_samples, n_features))
expected = {(True, n1): (n_features, n1),
(True, n2): (n_features, n2),
(False, n1): (n_features, n2),
(False, n2): (n_features, n2)}
for whiten in [True, False]:
for n_components in [n1, n2]:
n_components_ = (n_components if n_components is not None else
X.shape[1])
ica = FastICA(n_components=n_components, random_state=rng,
whiten=whiten)
with warnings.catch_warnings(record=True):
# catch "n_components ignored" warning
Xt = ica.fit_transform(X)
expected_shape = expected[(whiten, n_components_)]
assert_equal(ica.mixing_.shape, expected_shape)
X2 = ica.inverse_transform(Xt)
assert_equal(X.shape, X2.shape)
# reversibility test in non-reduction case
if n_components == X.shape[1]:
assert_array_almost_equal(X, X2)
if __name__ == '__main__':
import nose
nose.run(argv=['', __file__])
| bsd-3-clause |
DonBeo/scikit-learn | examples/cluster/plot_cluster_iris.py | 347 | 2593 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
=========================================================
K-means Clustering
=========================================================
The plots display firstly what a K-means algorithm would yield
using three clusters. It is then shown what the effect of a bad
initialization is on the classification process:
By setting n_init to only 1 (default is 10), the amount of
times that the algorithm will be run with different centroid
seeds is reduced.
The next plot displays what using eight clusters would deliver
and finally the ground truth.
"""
print(__doc__)
# Code source: Gaël Varoquaux
# Modified for documentation by Jaques Grobler
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from sklearn.cluster import KMeans
from sklearn import datasets
np.random.seed(5)
centers = [[1, 1], [-1, -1], [1, -1]]
iris = datasets.load_iris()
X = iris.data
y = iris.target
estimators = {'k_means_iris_3': KMeans(n_clusters=3),
'k_means_iris_8': KMeans(n_clusters=8),
'k_means_iris_bad_init': KMeans(n_clusters=3, n_init=1,
init='random')}
fignum = 1
for name, est in estimators.items():
fig = plt.figure(fignum, figsize=(4, 3))
plt.clf()
ax = Axes3D(fig, rect=[0, 0, .95, 1], elev=48, azim=134)
plt.cla()
est.fit(X)
labels = est.labels_
ax.scatter(X[:, 3], X[:, 0], X[:, 2], c=labels.astype(np.float))
ax.w_xaxis.set_ticklabels([])
ax.w_yaxis.set_ticklabels([])
ax.w_zaxis.set_ticklabels([])
ax.set_xlabel('Petal width')
ax.set_ylabel('Sepal length')
ax.set_zlabel('Petal length')
fignum = fignum + 1
# Plot the ground truth
fig = plt.figure(fignum, figsize=(4, 3))
plt.clf()
ax = Axes3D(fig, rect=[0, 0, .95, 1], elev=48, azim=134)
plt.cla()
for name, label in [('Setosa', 0),
('Versicolour', 1),
('Virginica', 2)]:
ax.text3D(X[y == label, 3].mean(),
X[y == label, 0].mean() + 1.5,
X[y == label, 2].mean(), name,
horizontalalignment='center',
bbox=dict(alpha=.5, edgecolor='w', facecolor='w'))
# Reorder the labels to have colors matching the cluster results
y = np.choose(y, [1, 2, 0]).astype(np.float)
ax.scatter(X[:, 3], X[:, 0], X[:, 2], c=y)
ax.w_xaxis.set_ticklabels([])
ax.w_yaxis.set_ticklabels([])
ax.w_zaxis.set_ticklabels([])
ax.set_xlabel('Petal width')
ax.set_ylabel('Sepal length')
ax.set_zlabel('Petal length')
plt.show()
| bsd-3-clause |
raphaelrubino/nid | nn/bi/binid.py | 1 | 6515 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import numpy as np
np.random.seed(1337)
import theano
from keras.models import Model
from keras.layers import Input, Embedding, Dropout, Dense, GlobalAveragePooling1D, concatenate, Flatten
from keras.preprocessing import sequence
from keras.utils import np_utils
from sklearn.utils import shuffle
import time
import sys
class Bilingual_neural_information_density():
def __init__( self, src_corpus, src_max_features, src_max_length, trg_context, trg_target, trg_max_features, trg_max_length, batch_size, valid_size ):
self.src_corpus = src_corpus
self.src_max_features = src_max_features
self.src_max_length = src_max_length
self.trg_context = trg_context
self.trg_target = trg_target
self.batch = batch_size
self.valid_size = valid_size
trg_nb_instances = len( self.trg_context )
print( "Instances: {0}".format( trg_nb_instances ), flush = True )
self.valid_size = np.int( valid_size * trg_nb_instances )
self.train_size = np.int( trg_nb_instances - self.valid_size )
self.trg_max_features = trg_max_features
self.trg_max_length = trg_max_length
self.model = -1
self.build_train_valid()
self.build_batched_data()
def build_train_valid( self ):
self.src_corpus, self.trg_context, self.trg_target = shuffle( self.src_corpus, self.trg_context, self.trg_target )
self.src_corpus_train = self.src_corpus[ :self.train_size ]
self.src_corpus_valid = self.src_corpus[ self.train_size: ]
self.trg_context_train = self.trg_context[ :self.train_size ]
self.trg_target_train = self.trg_target[ :self.train_size ]
self.trg_context_valid = self.trg_context[ self.train_size: ]
self.trg_target_valid = self.trg_target[ self.train_size: ]
def build_batched_data( self ):
self.batch_src_corpus_train = np.asarray( [ np.asarray( self.src_corpus_train[ x : x + self.batch ] ) for x in range( 0, len( self.src_corpus_train ), self.batch ) ] )
self.batch_src_corpus_valid = np.asarray( [ np.asarray( self.src_corpus_valid[ x : x + self.batch ] ) for x in range( 0, len( self.src_corpus_valid ), self.batch ) ] )
self.batch_trg_context_train = np.asarray( [ np.asarray( self.trg_context_train[ x : x + self.batch ] ) for x in range( 0, len( self.trg_context_train ), self.batch ) ] )
self.batch_trg_target_train = np.asarray( [ np.asarray( self.trg_target_train[ x : x + self.batch ] ) for x in range( 0, len( self.trg_target_train ), self.batch ) ] )
self.batch_trg_context_valid = np.asarray( [ np.asarray( self.trg_context_valid[ x : x + self.batch ] ) for x in range( 0, len( self.trg_context_valid ), self.batch ) ] )
self.batch_trg_target_valid = np.asarray( [ np.asarray( self.trg_target_valid[ x : x + self.batch ] ) for x in range( 0, len( self.trg_target_valid ), self.batch ) ] )
def get_model( self ):
return self.model
def save_architecture( self, filename ):
with open( filename + '.architecture.json', "w" ) as f:
f.write( self.model.to_json() )
def save_weights( self, filename ):
self.model.save_weights( filename + '.weights.h5', overwrite = True )
def get_default_model( self, src_embedding, trg_embedding, dropout ):
input_src_sentence = Input( shape = ( self.src_max_length, ), dtype = 'int32', name = 'input_src_sentence' )
emb_src_sentence = Embedding( input_dim = self.src_max_features, output_dim = src_embedding, input_length = self.src_max_length )( input_src_sentence )
pool_src_sentence = GlobalAveragePooling1D()( emb_src_sentence )
drop_src_sentence = Dropout( dropout )( pool_src_sentence )
input_trg_context = Input( shape = ( self.trg_max_length, ), dtype = 'int32', name = 'input_trg_context' )
emb_trg_context = Embedding( input_dim = self.trg_max_features, output_dim = trg_embedding, input_length = self.trg_max_length )( input_trg_context )
flat_trg_context = Flatten()( emb_trg_context )
drop_trg_context = Dropout( dropout )( flat_trg_context )
concat = concatenate( [ drop_src_sentence, drop_trg_context ] )
output = Dense( self.trg_max_features, activation = 'softmax', name = 'output' )( concat )
model = Model( inputs = [ input_src_sentence, input_trg_context ], outputs = output )
return model
def train_model( self ):
train_loss = 0.0
train_acc = 0.0
for j in range( self.batch_trg_target_train.shape[ 0 ] ):
loss, metrics = self.model.train_on_batch( \
{ 'input_src_sentence': sequence.pad_sequences( self.batch_src_corpus_train[ j ], \
maxlen = self.src_max_length, dtype = 'int32', padding = 'post', value = 2 ) , \
'input_trg_context': self.batch_trg_context_train[ j ] }, \
{ 'output': np_utils.to_categorical( self.batch_trg_target_train[ j ], \
num_classes = self.trg_max_features ) } )
train_loss += loss
train_acc += metrics
train_loss /= j
train_acc /= j
return train_loss, train_acc
def valid_model( self ):
valid_loss = 0.0
valid_acc = 0.0
for k in range( self.batch_trg_target_valid.shape[ 0 ] ):
loss, metrics = self.model.test_on_batch( \
{ 'input_src_sentence': sequence.pad_sequences( self.batch_src_corpus_valid[ k ], \
maxlen = self.src_max_length, dtype = 'int32', padding = 'post', value = 2 ), \
'input_trg_context': self.batch_trg_context_valid[ k ] }, \
{ 'output': np_utils.to_categorical( self.batch_trg_target_valid[ k ], \
num_classes = self.trg_max_features ) } )
valid_loss += loss
valid_acc += metrics
valid_loss /= k
valid_acc /= k
return valid_loss, valid_acc
def train( self, src_embedding_size, trg_embedding_size, dropout, nb_epochs, out_model ):
self.model = self.get_default_model( src_embedding_size, trg_embedding_size, dropout )
self.model.compile( optimizer = 'RMSprop', loss = 'categorical_crossentropy', metrics = [ 'accuracy' ] )
best_acc = np.float( 0.0 )
best_loss = np.float( 999.9 )
for i in range( nb_epochs ):
time_start = time.time()
print( "Epoch {0}".format( i + 1 ), flush = True )
train_loss, train_acc = train_model()
valid_loss, valid_acc = valid_model()
if best_acc < valid_acc:
best_acc = valid_acc
self.save_weights( "{0}.acc_{1}".format( out_model, np.round( best_acc, 3 ) ) )
self.save_architecture( "{0}.acc_{1}".format( out_model, np.round( best_acc, 3 ) ) )
print( "train loss {0} -- acc: {1} ---- valid loss: {2} -- acc: {3}".format( train_loss, train_acc, valid_loss, valid_acc ), flush = True )
time_elapsed = time.time() - time_start
print( "{0} seconds".format( time_elapsed ), flush = True )
| mit |
heli522/scikit-learn | examples/svm/plot_separating_hyperplane.py | 291 | 1273 | """
=========================================
SVM: Maximum margin separating hyperplane
=========================================
Plot the maximum margin separating hyperplane within a two-class
separable dataset using a Support Vector Machine classifier with
linear kernel.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm
# we create 40 separable points
np.random.seed(0)
X = np.r_[np.random.randn(20, 2) - [2, 2], np.random.randn(20, 2) + [2, 2]]
Y = [0] * 20 + [1] * 20
# fit the model
clf = svm.SVC(kernel='linear')
clf.fit(X, Y)
# get the separating hyperplane
w = clf.coef_[0]
a = -w[0] / w[1]
xx = np.linspace(-5, 5)
yy = a * xx - (clf.intercept_[0]) / w[1]
# plot the parallels to the separating hyperplane that pass through the
# support vectors
b = clf.support_vectors_[0]
yy_down = a * xx + (b[1] - a * b[0])
b = clf.support_vectors_[-1]
yy_up = a * xx + (b[1] - a * b[0])
# plot the line, the points, and the nearest vectors to the plane
plt.plot(xx, yy, 'k-')
plt.plot(xx, yy_down, 'k--')
plt.plot(xx, yy_up, 'k--')
plt.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1],
s=80, facecolors='none')
plt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.Paired)
plt.axis('tight')
plt.show()
| bsd-3-clause |
anntzer/scikit-learn | examples/svm/plot_weighted_samples.py | 12 | 2047 | """
=====================
SVM: Weighted samples
=====================
Plot decision function of a weighted dataset, where the size of points
is proportional to its weight.
The sample weighting rescales the C parameter, which means that the classifier
puts more emphasis on getting these points right. The effect might often be
subtle.
To emphasize the effect here, we particularly weight outliers, making the
deformation of the decision boundary very visible.
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm
def plot_decision_function(classifier, sample_weight, axis, title):
# plot the decision function
xx, yy = np.meshgrid(np.linspace(-4, 5, 500), np.linspace(-4, 5, 500))
Z = classifier.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# plot the line, the points, and the nearest vectors to the plane
axis.contourf(xx, yy, Z, alpha=0.75, cmap=plt.cm.bone)
axis.scatter(
X[:, 0],
X[:, 1],
c=y,
s=100 * sample_weight,
alpha=0.9,
cmap=plt.cm.bone,
edgecolors="black",
)
axis.axis("off")
axis.set_title(title)
# we create 20 points
np.random.seed(0)
X = np.r_[np.random.randn(10, 2) + [1, 1], np.random.randn(10, 2)]
y = [1] * 10 + [-1] * 10
sample_weight_last_ten = abs(np.random.randn(len(X)))
sample_weight_constant = np.ones(len(X))
# and bigger weights to some outliers
sample_weight_last_ten[15:] *= 5
sample_weight_last_ten[9] *= 15
# Fit the models.
# This model does not take into account sample weights.
clf_no_weights = svm.SVC(gamma=1)
clf_no_weights.fit(X, y)
# This other model takes into account some dedicated sample weights.
clf_weights = svm.SVC(gamma=1)
clf_weights.fit(X, y, sample_weight=sample_weight_last_ten)
fig, axes = plt.subplots(1, 2, figsize=(14, 6))
plot_decision_function(
clf_no_weights, sample_weight_constant, axes[0], "Constant weights"
)
plot_decision_function(clf_weights, sample_weight_last_ten, axes[1], "Modified weights")
plt.show()
| bsd-3-clause |
coderbone/SickRage | lib/guessit/plugins/transformers.py | 33 | 9580 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2013 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function, unicode_literals
from logging import getLogger
from pkg_resources import EntryPoint
from guessit.options import reload as reload_options
from stevedore import ExtensionManager
from stevedore.extension import Extension
log = getLogger(__name__)
class Transformer(object): # pragma: no cover
def __init__(self, priority=0):
self.priority = priority
self.log = getLogger(self.name)
@property
def name(self):
return self.__class__.__name__
def supported_properties(self):
return {}
def second_pass_options(self, mtree, options=None):
return None
def should_process(self, mtree, options=None):
return True
def process(self, mtree, options=None):
pass
def post_process(self, mtree, options=None):
pass
def register_arguments(self, opts, naming_opts, output_opts, information_opts, webservice_opts, other_options):
pass
def rate_quality(self, guess, *props):
return 0
class CustomTransformerExtensionManager(ExtensionManager):
def __init__(self, namespace='guessit.transformer', invoke_on_load=True,
invoke_args=(), invoke_kwds={}, propagate_map_exceptions=True, on_load_failure_callback=None,
verify_requirements=False):
super(CustomTransformerExtensionManager, self).__init__(namespace=namespace,
invoke_on_load=invoke_on_load,
invoke_args=invoke_args,
invoke_kwds=invoke_kwds,
propagate_map_exceptions=propagate_map_exceptions,
on_load_failure_callback=on_load_failure_callback,
verify_requirements=verify_requirements)
@staticmethod
def order_extensions(extensions):
"""Order the loaded transformers
It should follow those rules
- website before language (eg: tvu.org.ru vs russian)
- language before episodes_rexps
- properties before language (eg: he-aac vs hebrew)
- release_group before properties (eg: XviD-?? vs xvid)
"""
extensions.sort(key=lambda ext: -ext.obj.priority)
return extensions
@staticmethod
def _load_one_plugin(ep, invoke_on_load, invoke_args, invoke_kwds, verify_requirements=True):
if not ep.dist:
# `require` argument of ep.load() is deprecated in newer versions of setuptools
if hasattr(ep, 'resolve'):
plugin = ep.resolve()
elif hasattr(ep, '_load'):
plugin = ep._load()
else:
plugin = ep.load(require=False)
else:
plugin = ep.load()
if invoke_on_load:
obj = plugin(*invoke_args, **invoke_kwds)
else:
obj = None
return Extension(ep.name, ep, plugin, obj)
def _load_plugins(self, invoke_on_load, invoke_args, invoke_kwds, verify_requirements):
return self.order_extensions(super(CustomTransformerExtensionManager, self)._load_plugins(invoke_on_load, invoke_args, invoke_kwds, verify_requirements))
def objects(self):
return self.map(self._get_obj)
@staticmethod
def _get_obj(ext):
return ext.obj
def object(self, name):
try:
return self[name].obj
except KeyError:
return None
def register_module(self, name=None, module_name=None, attrs=(), entry_point=None):
if entry_point:
ep = EntryPoint.parse(entry_point)
else:
ep = EntryPoint(name, module_name, attrs)
loaded = self._load_one_plugin(ep, invoke_on_load=True, invoke_args=(), invoke_kwds={})
if loaded:
self.extensions.append(loaded)
self.extensions = self.order_extensions(self.extensions)
self._extensions_by_name = None
class DefaultTransformerExtensionManager(CustomTransformerExtensionManager):
@property
def _internal_entry_points(self):
return ['split_path_components = guessit.transfo.split_path_components:SplitPathComponents',
'guess_filetype = guessit.transfo.guess_filetype:GuessFiletype',
'split_explicit_groups = guessit.transfo.split_explicit_groups:SplitExplicitGroups',
'guess_date = guessit.transfo.guess_date:GuessDate',
'guess_website = guessit.transfo.guess_website:GuessWebsite',
'guess_release_group = guessit.transfo.guess_release_group:GuessReleaseGroup',
'guess_properties = guessit.transfo.guess_properties:GuessProperties',
'guess_language = guessit.transfo.guess_language:GuessLanguage',
'guess_video_rexps = guessit.transfo.guess_video_rexps:GuessVideoRexps',
'guess_episodes_rexps = guessit.transfo.guess_episodes_rexps:GuessEpisodesRexps',
'guess_weak_episodes_rexps = guessit.transfo.guess_weak_episodes_rexps:GuessWeakEpisodesRexps',
'guess_bonus_features = guessit.transfo.guess_bonus_features:GuessBonusFeatures',
'guess_year = guessit.transfo.guess_year:GuessYear',
'guess_country = guessit.transfo.guess_country:GuessCountry',
'guess_idnumber = guessit.transfo.guess_idnumber:GuessIdnumber',
'split_on_dash = guessit.transfo.split_on_dash:SplitOnDash',
'guess_episode_info_from_position = guessit.transfo.guess_episode_info_from_position:GuessEpisodeInfoFromPosition',
'guess_movie_title_from_position = guessit.transfo.guess_movie_title_from_position:GuessMovieTitleFromPosition',
'guess_episode_details = guessit.transfo.guess_episode_details:GuessEpisodeDetails',
'expected_series = guessit.transfo.expected_series:ExpectedSeries',
'expected_title = guessit.transfo.expected_title:ExpectedTitle',]
def _find_entry_points(self, namespace):
entry_points = {}
# Internal entry points
if namespace == self.namespace:
for internal_entry_point_str in self._internal_entry_points:
internal_entry_point = EntryPoint.parse(internal_entry_point_str)
entry_points[internal_entry_point.name] = internal_entry_point
# Package entry points
setuptools_entrypoints = super(DefaultTransformerExtensionManager, self)._find_entry_points(namespace)
for setuptools_entrypoint in setuptools_entrypoints:
entry_points[setuptools_entrypoint.name] = setuptools_entrypoint
return list(entry_points.values())
_extensions = None
def all_transformers():
return _extensions.objects()
def get_transformer(name):
return _extensions.object(name)
def add_transformer(name, module_name, class_name):
"""
Add a transformer
:param name: the name of the transformer. ie: 'guess_regexp_id'
:param name: the module name. ie: 'flexget.utils.parsers.transformers.guess_regexp_id'
:param class_name: the class name. ie: 'GuessRegexpId'
"""
_extensions.register_module(name, module_name, (class_name,))
def add_transformer(entry_point):
"""
Add a transformer
:param entry_point: entry point spec format. ie: 'guess_regexp_id = flexget.utils.parsers.transformers.guess_regexp_id:GuessRegexpId'
"""
_extensions.register_module(entry_point = entry_point)
def reload(custom=False):
"""
Reload extension manager with default or custom one.
:param custom: if True, custom manager will be used, else default one.
Default manager will load default extensions from guessit and setuptools packaging extensions
Custom manager will not load default extensions from guessit, using only setuptools packaging extensions.
:type custom: boolean
"""
global _extensions
if custom:
_extensions = CustomTransformerExtensionManager()
else:
_extensions = DefaultTransformerExtensionManager()
reload_options(all_transformers())
reload()
| gpl-3.0 |
abartlet/samba-old | third_party/dnspython/dns/node.py | 56 | 5869 | # Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS nodes. A node is a set of rdatasets."""
import StringIO
import dns.rdataset
import dns.rdatatype
import dns.renderer
class Node(object):
"""A DNS node.
A node is a set of rdatasets
@ivar rdatasets: the node's rdatasets
@type rdatasets: list of dns.rdataset.Rdataset objects"""
__slots__ = ['rdatasets']
def __init__(self):
"""Initialize a DNS node.
"""
self.rdatasets = [];
def to_text(self, name, **kw):
"""Convert a node to text format.
Each rdataset at the node is printed. Any keyword arguments
to this method are passed on to the rdataset's to_text() method.
@param name: the owner name of the rdatasets
@type name: dns.name.Name object
@rtype: string
"""
s = StringIO.StringIO()
for rds in self.rdatasets:
print >> s, rds.to_text(name, **kw)
return s.getvalue()[:-1]
def __repr__(self):
return '<DNS node ' + str(id(self)) + '>'
def __eq__(self, other):
"""Two nodes are equal if they have the same rdatasets.
@rtype: bool
"""
#
# This is inefficient. Good thing we don't need to do it much.
#
for rd in self.rdatasets:
if rd not in other.rdatasets:
return False
for rd in other.rdatasets:
if rd not in self.rdatasets:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def __len__(self):
return len(self.rdatasets)
def __iter__(self):
return iter(self.rdatasets)
def find_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE,
create=False):
"""Find an rdataset matching the specified properties in the
current node.
@param rdclass: The class of the rdataset
@type rdclass: int
@param rdtype: The type of the rdataset
@type rdtype: int
@param covers: The covered type. Usually this value is
dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or
dns.rdatatype.RRSIG, then the covers value will be the rdata
type the SIG/RRSIG covers. The library treats the SIG and RRSIG
types as if they were a family of
types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much
easier to work with than if RRSIGs covering different rdata
types were aggregated into a single RRSIG rdataset.
@type covers: int
@param create: If True, create the rdataset if it is not found.
@type create: bool
@raises KeyError: An rdataset of the desired type and class does
not exist and I{create} is not True.
@rtype: dns.rdataset.Rdataset object
"""
for rds in self.rdatasets:
if rds.match(rdclass, rdtype, covers):
return rds
if not create:
raise KeyError
rds = dns.rdataset.Rdataset(rdclass, rdtype)
self.rdatasets.append(rds)
return rds
def get_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE,
create=False):
"""Get an rdataset matching the specified properties in the
current node.
None is returned if an rdataset of the specified type and
class does not exist and I{create} is not True.
@param rdclass: The class of the rdataset
@type rdclass: int
@param rdtype: The type of the rdataset
@type rdtype: int
@param covers: The covered type.
@type covers: int
@param create: If True, create the rdataset if it is not found.
@type create: bool
@rtype: dns.rdataset.Rdataset object or None
"""
try:
rds = self.find_rdataset(rdclass, rdtype, covers, create)
except KeyError:
rds = None
return rds
def delete_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE):
"""Delete the rdataset matching the specified properties in the
current node.
If a matching rdataset does not exist, it is not an error.
@param rdclass: The class of the rdataset
@type rdclass: int
@param rdtype: The type of the rdataset
@type rdtype: int
@param covers: The covered type.
@type covers: int
"""
rds = self.get_rdataset(rdclass, rdtype, covers)
if not rds is None:
self.rdatasets.remove(rds)
def replace_rdataset(self, replacement):
"""Replace an rdataset.
It is not an error if there is no rdataset matching I{replacement}.
Ownership of the I{replacement} object is transferred to the node;
in other words, this method does not store a copy of I{replacement}
at the node, it stores I{replacement} itself.
"""
self.delete_rdataset(replacement.rdclass, replacement.rdtype,
replacement.covers)
self.rdatasets.append(replacement)
| gpl-3.0 |
florian-f/sklearn | sklearn/tests/test_base.py | 7 | 4987 |
# Author: Gael Varoquaux
# License: BSD
import numpy as np
import scipy.sparse as sp
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_raises
from sklearn.base import BaseEstimator, clone, is_classifier
from sklearn.svm import SVC
from sklearn.pipeline import Pipeline
from sklearn.grid_search import GridSearchCV
#############################################################################
# A few test classes
class MyEstimator(BaseEstimator):
def __init__(self, l1=0, empty=None):
self.l1 = l1
self.empty = empty
class K(BaseEstimator):
def __init__(self, c=None, d=None):
self.c = c
self.d = d
class T(BaseEstimator):
def __init__(self, a=None, b=None):
self.a = a
self.b = b
class Buggy(BaseEstimator):
" A buggy estimator that does not set its parameters right. "
def __init__(self, a=None):
self.a = 1
class NoEstimator(object):
def __init__(self):
pass
def fit(self, X=None, y=None):
return self
def predict(self, X=None):
return None
class VargEstimator(BaseEstimator):
"""Sklearn estimators shouldn't have vargs."""
def __init__(self, *vargs):
pass
#############################################################################
# The tests
def test_clone():
"""Tests that clone creates a correct deep copy.
We create an estimator, make a copy of its original state
(which, in this case, is the current state of the estimator),
and check that the obtained copy is a correct deep copy.
"""
from sklearn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
new_selector = clone(selector)
assert_true(selector is not new_selector)
assert_equal(selector.get_params(), new_selector.get_params())
selector = SelectFpr(f_classif, alpha=np.zeros((10, 2)))
new_selector = clone(selector)
assert_true(selector is not new_selector)
def test_clone_2():
"""Tests that clone doesn't copy everything.
We first create an estimator, give it an own attribute, and
make a copy of its original state. Then we check that the copy doesn't
have the specific attribute we manually added to the initial estimator.
"""
from sklearn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
selector.own_attribute = "test"
new_selector = clone(selector)
assert_false(hasattr(new_selector, "own_attribute"))
def test_clone_buggy():
"""Check that clone raises an error on buggy estimators."""
buggy = Buggy()
buggy.a = 2
assert_raises(RuntimeError, clone, buggy)
no_estimator = NoEstimator()
assert_raises(TypeError, clone, no_estimator)
varg_est = VargEstimator()
assert_raises(RuntimeError, clone, varg_est)
def test_clone_empty_array():
"""Regression test for cloning estimators with empty arrays"""
clf = MyEstimator(empty=np.array([]))
clf2 = clone(clf)
assert_array_equal(clf.empty, clf2.empty)
clf = MyEstimator(empty=sp.csr_matrix(np.array([[0]])))
clf2 = clone(clf)
assert_array_equal(clf.empty.data, clf2.empty.data)
def test_repr():
"""Smoke test the repr of the base estimator."""
my_estimator = MyEstimator()
repr(my_estimator)
test = T(K(), K())
assert_equal(
repr(test),
"T(a=K(c=None, d=None), b=K(c=None, d=None))"
)
some_est = T(a=["long_params"] * 1000)
assert_equal(len(repr(some_est)), 415)
def test_str():
"""Smoke test the str of the base estimator"""
my_estimator = MyEstimator()
str(my_estimator)
def test_get_params():
test = T(K(), K())
assert_true('a__d' in test.get_params(deep=True))
assert_true('a__d' not in test.get_params(deep=False))
test.set_params(a__d=2)
assert_true(test.a.d == 2)
assert_raises(ValueError, test.set_params, a__a=2)
def test_is_classifier():
svc = SVC()
assert_true(is_classifier(svc))
assert_true(is_classifier(GridSearchCV(svc, {'C': [0.1, 1]})))
assert_true(is_classifier(Pipeline([('svc', svc)])))
assert_true(is_classifier(Pipeline([('svc_cv',
GridSearchCV(svc, {'C': [0.1, 1]}))])))
def test_set_params():
# test nested estimator parameter setting
clf = Pipeline([("svc", SVC())])
# non-existing parameter in svc
assert_raises(ValueError, clf.set_params, svc__stupid_param=True)
# non-existing parameter of pipeline
assert_raises(ValueError, clf.set_params, svm__stupid_param=True)
# we don't currently catch if the things in pipeline are estimators
#bad_pipeline = Pipeline([("bad", NoEstimator())])
#assert_raises(AttributeError, bad_pipeline.set_params,
#bad__stupid_param=True)
| bsd-3-clause |
florian-f/sklearn | sklearn/feature_extraction/dict_vectorizer.py | 1 | 9362 | # Author: Lars Buitinck <L.J.Buitinck@uva.nl>
# License: BSD-style.
from array import array
from collections import Mapping, Sequence
from operator import itemgetter
import numpy as np
import scipy.sparse as sp
from ..base import BaseEstimator, TransformerMixin
from ..externals import six
from ..externals.six.moves import xrange
from ..utils import atleast2d_or_csr, tosequence
def _tosequence(X):
"""Turn X into a sequence or ndarray, avoiding a copy if possible."""
if isinstance(X, Mapping): # single sample
return [X]
else:
return tosequence(X)
class DictVectorizer(BaseEstimator, TransformerMixin):
"""Transforms lists of feature-value mappings to vectors.
This transformer turns lists of mappings (dict-like objects) of feature
names to feature values into Numpy arrays or scipy.sparse matrices for use
with scikit-learn estimators.
When feature values are strings, this transformer will do a binary one-hot
(aka one-of-K) coding: one boolean-valued feature is constructed for each
of the possible string values that the feature can take on. For instance,
a feature "f" that can take on the values "ham" and "spam" will become two
features in the output, one signifying "f=ham", the other "f=spam".
Features that do not occur in a sample (mapping) will have a zero value
in the resulting array/matrix.
Parameters
----------
dtype : callable, optional
The type of feature values. Passed to Numpy array/scipy.sparse matrix
constructors as the dtype argument.
separator: string, optional
Separator string used when constructing new features for one-hot
coding.
sparse: boolean, optional.
Whether transform should produce scipy.sparse matrices.
True by default.
Examples
--------
>>> from sklearn.feature_extraction import DictVectorizer
>>> v = DictVectorizer(sparse=False)
>>> D = [{'foo': 1, 'bar': 2}, {'foo': 3, 'baz': 1}]
>>> X = v.fit_transform(D)
>>> X
array([[ 2., 0., 1.],
[ 0., 1., 3.]])
>>> v.inverse_transform(X) == \
[{'bar': 2.0, 'foo': 1.0}, {'baz': 1.0, 'foo': 3.0}]
True
>>> v.transform({'foo': 4, 'unseen_feature': 3})
array([[ 0., 0., 4.]])
"""
def __init__(self, dtype=np.float64, separator="=", sparse=True):
self.dtype = dtype
self.separator = separator
self.sparse = sparse
def fit(self, X, y=None):
"""Learn a list of feature name -> indices mappings.
Parameters
----------
X : Mapping or iterable over Mappings
Dict(s) or Mapping(s) from feature names (arbitrary Python
objects) to feature values (strings or convertible to dtype).
y : (ignored)
Returns
-------
self
"""
X = _tosequence(X)
# collect all the possible feature names
feature_names = set()
for x in X:
for f, v in six.iteritems(x):
if isinstance(v, six.string_types):
f = "%s%s%s" % (f, self.separator, v)
feature_names.add(f)
# sort the feature names to define the mapping
feature_names = sorted(feature_names)
self.vocabulary_ = dict((f, i) for i, f in enumerate(feature_names))
self.feature_names_ = feature_names
return self
def fit_transform(self, X, y=None):
"""Learn a list of feature name -> indices mappings and transform X.
Like fit(X) followed by transform(X).
Parameters
----------
X : Mapping or iterable over Mappings
Dict(s) or Mapping(s) from feature names (arbitrary Python
objects) to feature values (strings or convertible to dtype).
y : (ignored)
Returns
-------
Xa : {array, sparse matrix}
Feature vectors; always 2-d.
"""
X = _tosequence(X)
self.fit(X)
return self.transform(X)
def inverse_transform(self, X, dict_type=dict):
"""Transform array or sparse matrix X back to feature mappings.
X must have been produced by this DictVectorizer's transform or
fit_transform method; it may only have passed through transformers
that preserve the number of features and their order.
In the case of one-hot/one-of-K coding, the constructed feature
names and values are returned rather than the original ones.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Sample matrix.
dict_type : callable, optional
Constructor for feature mappings. Must conform to the
collections.Mapping API.
Returns
-------
D : list of dict_type objects, length = n_samples
Feature mappings for the samples in X.
"""
X = atleast2d_or_csr(X) # COO matrix is not subscriptable
n_samples = X.shape[0]
names = self.feature_names_
dicts = [dict_type() for _ in xrange(n_samples)]
if sp.issparse(X):
for i, j in zip(*X.nonzero()):
dicts[i][names[j]] = X[i, j]
else:
for i, d in enumerate(dicts):
for j, v in enumerate(X[i, :]):
if v != 0:
d[names[j]] = X[i, j]
return dicts
def transform(self, X, y=None):
"""Transform feature->value dicts to array or sparse matrix.
Named features not encountered during fit or fit_transform will be
silently ignored.
Parameters
----------
X : Mapping or iterable over Mappings, length = n_samples
Dict(s) or Mapping(s) from feature names (arbitrary Python
objects) to feature values (strings or convertible to dtype).
y : (ignored)
Returns
-------
Xa : {array, sparse matrix}
Feature vectors; always 2-d.
"""
# Sanity check: Python's array has no way of explicitly requesting the
# signed 32-bit integers that scipy.sparse needs, so we use the next
# best thing: typecode "i" (int). However, if that gives larger or
# smaller integers than 32-bit ones, np.frombuffer screws up.
assert array("i").itemsize == 4, (
"sizeof(int) != 4 on your platform; please report this at"
" https://github.com/scikit-learn/scikit-learn/issues and"
" include the output from platform.platform() in your bug report")
dtype = self.dtype
vocab = self.vocabulary_
if self.sparse:
X = [X] if isinstance(X, Mapping) else X
indices = array("i")
indptr = array("i", [0])
# XXX we could change values to an array.array as well, but it
# would require (heuristic) conversion of dtype to typecode...
values = []
for x in X:
for f, v in six.iteritems(x):
if isinstance(v, six.string_types):
f = "%s%s%s" % (f, self.separator, v)
v = 1
try:
indices.append(vocab[f])
values.append(dtype(v))
except KeyError:
pass
indptr.append(len(indices))
indices = np.frombuffer(indices, dtype=np.int32)
indptr = np.frombuffer(indptr, dtype=np.int32)
shape = (len(indptr) - 1, len(vocab))
return sp.csr_matrix((values, indices, indptr),
shape=shape, dtype=dtype)
else:
X = _tosequence(X)
Xa = np.zeros((len(X), len(vocab)), dtype=dtype)
for i, x in enumerate(X):
for f, v in six.iteritems(x):
if isinstance(v, six.string_types):
f = "%s%s%s" % (f, self.separator, v)
v = 1
try:
Xa[i, vocab[f]] = dtype(v)
except KeyError:
pass
return Xa
def get_feature_names(self):
"""Returns a list of feature names, ordered by their indices.
If one-of-K coding is applied to categorical features, this will
include the constructed feature names but not the original ones.
"""
return self.feature_names_
def restrict(self, support, indices=False):
"""Restrict the features to those in support.
Parameters
----------
support : array-like
Boolean mask or list of indices (as returned by the get_support
member of feature selectors).
indices : boolean, optional
Whether support is a list of indices.
"""
if not indices:
support = np.where(support)[0]
names = self.feature_names_
new_vocab = {}
for i in support:
new_vocab[names[i]] = len(new_vocab)
self.vocabulary_ = new_vocab
self.feature_names_ = [f for f, i in sorted(six.iteritems(new_vocab),
key=itemgetter(1))]
return self
| bsd-3-clause |
pytorch/fairseq | fairseq/data/audio/audio_utils.py | 1 | 10246 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import mmap
from pathlib import Path
from typing import BinaryIO, List, Optional, Tuple, Union
import numpy as np
import torch
import torch.nn.functional as F
SF_AUDIO_FILE_EXTENSIONS = {".wav", ".flac", ".ogg"}
FEATURE_OR_SF_AUDIO_FILE_EXTENSIONS = {".npy", ".wav", ".flac", ".ogg"}
def convert_waveform(
waveform: Union[np.ndarray, torch.Tensor],
sample_rate: int,
normalize_volume: bool = False,
to_mono: bool = False,
to_sample_rate: Optional[int] = None,
) -> Tuple[Union[np.ndarray, torch.Tensor], int]:
"""convert a waveform:
- to a target sample rate
- from multi-channel to mono channel
- volume normalization
Args:
waveform (numpy.ndarray or torch.Tensor): 2D original waveform
(channels x length)
sample_rate (int): original sample rate
normalize_volume (bool): perform volume normalization
to_mono (bool): convert to mono channel if having multiple channels
to_sample_rate (Optional[int]): target sample rate
Returns:
waveform (numpy.ndarray): converted 2D waveform (channels x length)
sample_rate (float): target sample rate
"""
try:
import torchaudio.sox_effects as ta_sox
except ImportError:
raise ImportError("Please install torchaudio: pip install torchaudio")
effects = []
if normalize_volume:
effects.append(["gain", "-n"])
if to_sample_rate is not None and to_sample_rate != sample_rate:
effects.append(["rate", f"{to_sample_rate}"])
if to_mono and waveform.shape[0] > 1:
effects.append(["channels", "1"])
if len(effects) > 0:
is_np_input = isinstance(waveform, np.ndarray)
_waveform = torch.from_numpy(waveform) if is_np_input else waveform
converted, converted_sample_rate = ta_sox.apply_effects_tensor(
_waveform, sample_rate, effects
)
if is_np_input:
converted = converted.numpy()
return converted, converted_sample_rate
return waveform, sample_rate
def get_waveform(
path_or_fp: Union[str, BinaryIO],
normalization: bool = True,
mono: bool = True,
frames: int = -1,
start: int = 0,
always_2d: bool = True,
output_sample_rate: Optional[int] = None,
normalize_volume: bool = False,
) -> Tuple[np.ndarray, int]:
"""Get the waveform and sample rate of a 16-bit WAV/FLAC/OGG Vorbis audio.
Args:
path_or_fp (str or BinaryIO): the path or file-like object
normalization (bool): normalize values to [-1, 1] (Default: True)
mono (bool): convert multi-channel audio to mono-channel one
frames (int): the number of frames to read. (-1 for reading all)
start (int): Where to start reading. A negative value counts from the end.
always_2d (bool): always return 2D array even for mono-channel audios
output_sample_rate (Optional[int]): output sample rate
normalize_volume (bool): normalize volume
Returns:
waveform (numpy.ndarray): 1D or 2D waveform (channels x length)
sample_rate (float): sample rate
"""
if isinstance(path_or_fp, str):
ext = Path(path_or_fp).suffix
if ext not in SF_AUDIO_FILE_EXTENSIONS:
raise ValueError(f"Unsupported audio format: {ext}")
try:
import soundfile as sf
except ImportError:
raise ImportError("Please install soundfile: pip install soundfile")
waveform, sample_rate = sf.read(
path_or_fp, dtype="float32", always_2d=True, frames=frames, start=start
)
waveform = waveform.T # T x C -> C x T
waveform, sample_rate = convert_waveform(
waveform,
sample_rate,
normalize_volume=normalize_volume,
to_mono=mono,
to_sample_rate=output_sample_rate,
)
if not normalization:
waveform *= 2**15 # denormalized to 16-bit signed integers
if not always_2d:
waveform = waveform.squeeze(axis=0)
return waveform, sample_rate
def _get_kaldi_fbank(
waveform: np.ndarray, sample_rate: int, n_bins=80
) -> Optional[np.ndarray]:
"""Get mel-filter bank features via PyKaldi."""
try:
from kaldi.feat.fbank import Fbank, FbankOptions
from kaldi.feat.mel import MelBanksOptions
from kaldi.feat.window import FrameExtractionOptions
from kaldi.matrix import Vector
mel_opts = MelBanksOptions()
mel_opts.num_bins = n_bins
frame_opts = FrameExtractionOptions()
frame_opts.samp_freq = sample_rate
opts = FbankOptions()
opts.mel_opts = mel_opts
opts.frame_opts = frame_opts
fbank = Fbank(opts=opts)
features = fbank.compute(Vector(waveform.squeeze()), 1.0).numpy()
return features
except ImportError:
return None
def _get_torchaudio_fbank(
waveform: np.ndarray, sample_rate, n_bins=80
) -> Optional[np.ndarray]:
"""Get mel-filter bank features via TorchAudio."""
try:
import torchaudio.compliance.kaldi as ta_kaldi
waveform = torch.from_numpy(waveform)
features = ta_kaldi.fbank(
waveform, num_mel_bins=n_bins, sample_frequency=sample_rate
)
return features.numpy()
except ImportError:
return None
def get_fbank(path_or_fp: Union[str, BinaryIO], n_bins=80) -> np.ndarray:
"""Get mel-filter bank features via PyKaldi or TorchAudio. Prefer PyKaldi
(faster CPP implementation) to TorchAudio (Python implementation). Note that
Kaldi/TorchAudio requires 16-bit signed integers as inputs and hence the
waveform should not be normalized."""
waveform, sample_rate = get_waveform(path_or_fp, normalization=False)
features = _get_kaldi_fbank(waveform, sample_rate, n_bins)
if features is None:
features = _get_torchaudio_fbank(waveform, sample_rate, n_bins)
if features is None:
raise ImportError(
"Please install pyKaldi or torchaudio to enable "
"online filterbank feature extraction"
)
return features
def is_npy_data(data: bytes) -> bool:
return data[0] == 147 and data[1] == 78
def is_sf_audio_data(data: bytes) -> bool:
is_wav = data[0] == 82 and data[1] == 73 and data[2] == 70
is_flac = data[0] == 102 and data[1] == 76 and data[2] == 97
is_ogg = data[0] == 79 and data[1] == 103 and data[2] == 103
return is_wav or is_flac or is_ogg
def mmap_read(path: str, offset: int, length: int) -> bytes:
with open(path, "rb") as f:
with mmap.mmap(f.fileno(), length=0, access=mmap.ACCESS_READ) as mmap_o:
data = mmap_o[offset : offset + length]
return data
def read_from_stored_zip(zip_path: str, offset: int, length: int) -> bytes:
return mmap_read(zip_path, offset, length)
def parse_path(path: str) -> Tuple[str, List[int]]:
"""Parse data path which is either a path to
1. a .npy/.wav/.flac/.ogg file
2. a stored ZIP file with slicing info: "[zip_path]:[offset]:[length]"
Args:
path (str): the data path to parse
Returns:
file_path (str): the file path
slice_ptr (list of int): empty in case 1;
byte offset and length for the slice in case 2
"""
if Path(path).suffix in FEATURE_OR_SF_AUDIO_FILE_EXTENSIONS:
_path, slice_ptr = path, []
else:
_path, *slice_ptr = path.split(":")
if not Path(_path).is_file():
raise FileNotFoundError(f"File not found: {_path}")
assert len(slice_ptr) in {0, 2}, f"Invalid path: {path}"
slice_ptr = [int(i) for i in slice_ptr]
return _path, slice_ptr
def get_window(window_fn: callable, n_fft: int, win_length: int) -> torch.Tensor:
padding = n_fft - win_length
assert padding >= 0
return F.pad(window_fn(win_length), (padding // 2, padding - padding // 2))
def get_fourier_basis(n_fft: int) -> torch.Tensor:
basis = np.fft.fft(np.eye(n_fft))
basis = np.vstack(
[np.real(basis[: n_fft // 2 + 1, :]), np.imag(basis[: n_fft // 2 + 1, :])]
)
return torch.from_numpy(basis).float()
def get_mel_filters(
sample_rate: int, n_fft: int, n_mels: int, f_min: float, f_max: float
) -> torch.Tensor:
try:
import librosa
except ImportError:
raise ImportError("Please install librosa: pip install librosa")
basis = librosa.filters.mel(sample_rate, n_fft, n_mels, f_min, f_max)
return torch.from_numpy(basis).float()
class TTSSpectrogram(torch.nn.Module):
def __init__(
self,
n_fft: int,
win_length: int,
hop_length: int,
window_fn: callable = torch.hann_window,
return_phase: bool = False,
) -> None:
super(TTSSpectrogram, self).__init__()
self.n_fft = n_fft
self.hop_length = hop_length
self.return_phase = return_phase
basis = get_fourier_basis(n_fft).unsqueeze(1)
basis *= get_window(window_fn, n_fft, win_length)
self.register_buffer("basis", basis)
def forward(
self, waveform: torch.Tensor
) -> Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]:
padding = (self.n_fft // 2, self.n_fft // 2)
x = F.pad(waveform.unsqueeze(1), padding, mode="reflect")
x = F.conv1d(x, self.basis, stride=self.hop_length)
real_part = x[:, : self.n_fft // 2 + 1, :]
imag_part = x[:, self.n_fft // 2 + 1 :, :]
magnitude = torch.sqrt(real_part**2 + imag_part**2)
if self.return_phase:
phase = torch.atan2(imag_part, real_part)
return magnitude, phase
return magnitude
class TTSMelScale(torch.nn.Module):
def __init__(
self, n_mels: int, sample_rate: int, f_min: float, f_max: float, n_stft: int
) -> None:
super(TTSMelScale, self).__init__()
basis = get_mel_filters(sample_rate, (n_stft - 1) * 2, n_mels, f_min, f_max)
self.register_buffer("basis", basis)
def forward(self, specgram: torch.Tensor) -> torch.Tensor:
return torch.matmul(self.basis, specgram)
| mit |
pytorch/fairseq | examples/speech_recognition/data/data_utils.py | 1 | 3429 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
def calc_mean_invstddev(feature):
if len(feature.size()) != 2:
raise ValueError("We expect the input feature to be 2-D tensor")
mean = feature.mean(0)
var = feature.var(0)
# avoid division by ~zero
eps = 1e-8
if (var < eps).any():
return mean, 1.0 / (torch.sqrt(var) + eps)
return mean, 1.0 / torch.sqrt(var)
def apply_mv_norm(features):
# If there is less than 2 spectrograms, the variance cannot be computed (is NaN)
# and normalization is not possible, so return the item as it is
if features.size(0) < 2:
return features
mean, invstddev = calc_mean_invstddev(features)
res = (features - mean) * invstddev
return res
def lengths_to_encoder_padding_mask(lengths, batch_first=False):
"""
convert lengths (a 1-D Long/Int tensor) to 2-D binary tensor
Args:
lengths: a (B, )-shaped tensor
Return:
max_length: maximum length of B sequences
encoder_padding_mask: a (max_length, B) binary mask, where
[t, b] = 0 for t < lengths[b] and 1 otherwise
TODO:
kernelize this function if benchmarking shows this function is slow
"""
max_lengths = torch.max(lengths).item()
bsz = lengths.size(0)
encoder_padding_mask = torch.arange(
max_lengths
).to( # a (T, ) tensor with [0, ..., T-1]
lengths.device
).view( # move to the right device
1, max_lengths
).expand( # reshape to (1, T)-shaped tensor
bsz, -1
) >= lengths.view( # expand to (B, T)-shaped tensor
bsz, 1
).expand(
-1, max_lengths
)
if not batch_first:
return encoder_padding_mask.t(), max_lengths
else:
return encoder_padding_mask, max_lengths
def encoder_padding_mask_to_lengths(
encoder_padding_mask, max_lengths, batch_size, device
):
"""
convert encoder_padding_mask (2-D binary tensor) to a 1-D tensor
Conventionally, encoder output contains a encoder_padding_mask, which is
a 2-D mask in a shape (T, B), whose (t, b) element indicate whether
encoder_out[t, b] is a valid output (=0) or not (=1). Occasionally, we
need to convert this mask tensor to a 1-D tensor in shape (B, ), where
[b] denotes the valid length of b-th sequence
Args:
encoder_padding_mask: a (T, B)-shaped binary tensor or None; if None,
indicating all are valid
Return:
seq_lengths: a (B,)-shaped tensor, where its (b, )-th element is the
number of valid elements of b-th sequence
max_lengths: maximum length of all sequence, if encoder_padding_mask is
not None, max_lengths must equal to encoder_padding_mask.size(0)
batch_size: batch size; if encoder_padding_mask is
not None, max_lengths must equal to encoder_padding_mask.size(1)
device: which device to put the result on
"""
if encoder_padding_mask is None:
return torch.Tensor([max_lengths] * batch_size).to(torch.int32).to(device)
assert encoder_padding_mask.size(0) == max_lengths, "max_lengths does not match"
assert encoder_padding_mask.size(1) == batch_size, "batch_size does not match"
return max_lengths - torch.sum(encoder_padding_mask, dim=0)
| mit |
lbybee/redacted | redactedOld.py | 1 | 14430 | from pylab import *
from time import strftime
import smtplib
import wxPython as wx
from wxPython.lib import wordwrap
import os
import Image
import numpy
from math import log
from xml.dom import minidom
from random import choice, randrange
from datetime import datetime
from wxPython.lib.pubsub import Publisher as pub
def genData():
"""
Generates a random dataset, ordered by the country index of the shp file
used. Returns a tuple including the year of the data, the data itself and
a norm value used to map the data set to the map.
"""
"""
#1.1 Take in an index of available datasets, in this case the file
overview.xml. indicator_list is the list of possible datasets
taken from overview_doc. In overview_doc the 'i' element represents
the individual datasets or indicators. xml.dom is used to parse the
information from overview.xml.
#1.2 Then choose a random dataset from the index, the result is
indicator. indicator_doc is the parsed xml for the randomly chosen
dataset. The .xml files are stored in the /xml/ subdirectory. The
file names are of the format id.xml, where id is the unique identifier
element for each dataset taken from overview_doc.
#1.3 Then generate id_list. id_list is a list of all the ids that each
dataset can contain. It is worth noting here that the current datasets
are taken from the Gapminder World desktop application taken from Gapminder
(http://www.gapminder.org/world). I am unsure about the licencing for
this data and Gapminder World. They seem like good people so probably
not that problematic but definitely something I should look into. This
data structure is only temporary anyways. It is also worth noting that
there are ids for more than just countries (for instance there are ids
for continents and regions).
#1.4 Then generate country_id_dict. The country_id_dict maps an id to a
country name. The xml element <a...> contains a mapping of ids to names
which I use here. 256 is the max id for countries. All greater ids are
for regions or continents or what have you so I don't include them. This
information is still taken from overview_doc.
#1.5 Then generate the year variable. The year variable is a string of the
header information from the indicator_doc. It is used to extract the max
and min years for the given dataset.
#1.6 Then generate the start_year and end_year. start_year is the first year
that data is available for, it is parsed from the string based on
'firstTime', end_year is exactly the same however, it is parsed using
'lastTime'.
#1.7 Then generate country_data_list. country_data_list is a list of tuples.
The first element of the tuple is the id for the given row of data. The
second element of the tuple is the final year available for the given row
of data. The third element of the tuple is a list of all the data points
for the given row of data. It should be noted here that an 'n' in the list
of data points for the given row of data is used to represent a jump in the
index of the data points. This would be used for instance if you want to
skip years. For example '3n' means skip 3 years into the future for the
next data point.
#1.8 Then generate country_data_dict. country_data_dict is a dictionary
mapping ids to a list of data point years. It should be noted that this
differs from the data points in country_data_list because empty years
are elements in the new list of data point years. First, I filter out any
data rows for which the id does not represent a country (they'll be useless
with the map, maybe for a later iteration). If a data row represents a
country then I generate an empty list of the same length as the total number
of possible years. Then I iterate through the elements in the data points
from country_data_list. If there is an element of the '_n' format then I
move the index up by '_' this leaves empty elements in the data point year
list. Since the data points are still strings I convert them to floats.
To account for the possibility of non numerical data points I include an
exception where you just try again with a new random data set.
#1.9 Then generate the constant, rand_year. rand_year is a year randomly
taken from the set of possible years for a given data set.add The data used
will be from this random year.
#1.10 Then generate country_name_dict. country_name_dict maps a country name
to a data point for the chosen random year. I take the country from the
country_data_dict, use the id and map it to the name in country_id_dict, I
use that and map it to the country_data_id[country][rand_year]. country in
this case is just the id.
#1.11 Then generate order_list. order_list is just a a list pulled from a
text file of country names. The important part of order_list is that the
order of the names in order_list is in the same order as the countries in
the shp file.
#1.12 Then generate data_base_list. data_base_list is a list of the data
points from country_name_dict, the key part is that they are in the order
of the countries from order_list. Additionally, the data points are no
longer strings and are converted to floats. In the case where there
was no data, whether by not having a country or because there is no data
point, -9999 is used as a placeholder.
#1.13 Then check to see if there is any data in data_base_list. If there is
no data in data_base_list (all the values are -9999, this is what set()
does) then try again with a new data set.
#1.14 Then generate output_data_list. output_data_list is different from
data_base_list because in cases where the maximum value is an order of
magnitude greater than the mean logs are used. The way this works: generate
the maximum and minimum value from data_base_list, ignoring missing values.
If the maximum is an order of magnitude greater than the mean use the
log_10() transformation. For negative values, take the negative log_10
of the absolute value. If the values are between -1 and 1 just divide by
10. May change this later. If the values are 0 just use 0. If the
max is not more than an order of magnitude greater than the mean just use
data_base_list.
#1.15 Then generate the variable norm. norm is the scaling factor I use
to normalize all the data points to a range from 0.0 to 1.0 and -1.0 to
0.0. The maximum absolute value is set as the norm.
#1.16 Then return a tuple that contains, the year being used, the
output_data_list, and the norm.
"""
overview_doc = minidom.parse('./xml/overview.xml') #1.1
indicator_list = overview_doc.getElementsByTagName('i')
indicator = choice(indicator_list) #1.2
indicator_doc = minidom.parse('./xml/' + str(indicator.attributes['id']
.value) + '.xml')
id_list = overview_doc.getElementsByTagName('a') #1.3
country_id_dict = {} #1.4
for i in id_list:
if int(i.attributes['id'].value.replace('i','')) <= 256:
country_id_dict[i.attributes['id'].value] = i.attributes['n'].value
##This method for getting year is at best a temporary solution.
##Originally, I wanted to pull the year out of the previously parsed xml
##overview_doc, however when I tried to call the correct element I recieved
##an error so this is my temporary solution, should still be looked into.
##3/29/2013 LB
year = (open('./xml/' + str(indicator.attributes['id'].value) + '.xml')
.read().split('\n')[0].split(' ')) #1.5
start_year = int(year[6].replace('firstTime="','').replace('"','')) #1.6
end_year = int(year[7].replace('lastTime="','').replace('"',''))
country_data_list = [(d.attributes['m'].value.split(',')[0],
d.attributes['m'].value.split(',')[3],
[v for v in d.attributes['d'].value
.split(',')]) for d in
indicator_doc.getElementsByTagName('t1')] #1.7
try: #1.8
country_data_dict = {}
for country in country_data_list:
if int(country[0].replace('i','')) <= 256:
country_data_dict[country[0]] = ['.']*((end_year-start_year)+1)
index = int(country[1])-start_year
for entry in country[2]:
if 'n' in entry:
index = index + int(entry.replace('n',''))
else:
country_data_dict[country[0]][index] = float(entry)
index = index + 1
##There may be a better solution to possible non numeric values than to just
##try again, this will be fixed when I do a custom data format though so
##it isn't a high priority.
##3/29/2013 LB
except:
return genData()
##It would be more efficient to simply take the random year before
##generating the whole country_data_dict[2] but this is going to change
##when I addd the new menu options to allow the user to change the selected
##year.
##3/29/2013 LB
if end_year != start_year and not(end_year < start_year): #1.9
rand_year = randrange(end_year-start_year)
else:
rand_year = 0
country_name_dict = {} #1.10
for country in country_data_dict:
country_name_dict[country_id_dict[country].encode("ascii")] = country_data_dict[country][rand_year]
order_list = open('Export_Output.txt','r').read().split('\n')[1:-1] #1.11
data_base_list = [country_name_dict[i.split(',')[2].replace('"','')]
if i.split(',')[2].replace('"','') in country_name_dict
else -9999 for i in order_list] #1.12
data_base_list = [i if i != '.' else -9999 for i in data_base_list]
empty_check = False #1.13
if len(set(data_base_list)) == 1:
empty_check = True
if empty_check:
return genData()
maximum = max([abs(i) for i in data_base_list if i != -9999]) #1.14
mean = sum([i for i in data_base_list if i != -9999])/len(data_base_list)
output_data_list = []
if maximum > mean*10:
for element in data_base_list:
if element == -9999:
output_data_list.append(-9999)
if element == 0.0:
output_data_list.append(0.0)
elif element != -9999 and element > 1.0:
output_data_list.append(log(element,10))
elif element != -9999 and element > 0.0 and element < 1.0:
output_data_list.append(element/10)
elif element != -9999 and element < -1.0:
output_data_list.append(-log(-element,10))
elif element != -9999 and element > -1.0 and element < 0.0:
output_data_list.append(element/10)
else:
output_data_list = data_base_list
norm = max([abs(i) for i in output_data_list if i != -9999])/256 #1.15
return (str(indicator.attributes['originalName'].value)
+ str(rand_year+start_year),output_data_list,norm) #1.16
def mapOut():
"""
Produces a map image with the correct coloring. Also return a string
for the correct answer.
"""
"""
#2.1 Take in the base_img. The base_img is an rgba image where the alpha
value holds an id for the particular country that the pixel is in. Convert
the image into an array. Get the dimensions of this array (sy,sx), then
generate out_data which is an empty matrix where the output pixels will be
stored.
#2.2 Generate the data tuple and the dta dictionary, dta contains the
actual data.
#2.3 Then iterate through each pixel of img_data. For each pixel, take
the country index and check it against the the dta dictionary. If the
data point is -9999 treat it as missing and color the pixel grey. If the
data point is negative color it red. If the data point is positive color it
green.
#2.4 Output the new image.
"""
img = Image.open('base_img.png') #2.1
img_data = numpy.asarray(img)
(sy, sx) = (img_data.shape[0], img_data.shape[1])
out_data = numpy.zeros((sy, sx, 3),dtype=numpy.uint8)
data = genData() #2.2
dta = data[1]
for j in range(sy): #2.3
for i in range(sx):
if img_data[j,i,0] != 0:
if img_data[j,i,3] != 255:
if dta[img_data[j,i,3]-1] != -9999:
if dta[img_data[j,i,3]-1] < 0.0:
out_data[j,i,0] = 255
out_data[j,i,1] = 256-int(abs(dta[img_data[j,i,3]-1])
/data[2])
out_data[j,i,2] = 256-int(abs(dta[img_data[j,i,3]-1])
/data[2])
else:
out_data[j,i,0] = 256-int(abs(dta[img_data[j,i,3]-1])
/data[2])
out_data[j,i,1] = 255
out_data[j,i,2] = 256-int(abs(dta[img_data[j,i,3]-1])
/data[2])
else:
out_data[j,i,0] = 136
out_data[j,i,1] = 136
out_data[j,i,2] = 136
else:
out_data[j,i,0] = 255
out_data[j,i,1] = 255
out_data[j,i,2] = 255
else:
out_data[j,i,0] = 0
out_data[j,i,1] = 0
out_data[j,i,2] = 0
out_image = Image.fromarray(out_data, 'RGB') #2.4
out_image.save('guess.png')
answer = open('answer.txt','w')
answer.write(data[0] + '\n')
answer.close()
return data[0]
| gpl-2.0 |
tom8941/MISP-IOC-Validator | checkioc.py | 1 | 25865 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# MISP-IOC-Validator - Validate IOC from MISP ; Export results and iocs to SIEM and sensors using syslog and CEF format
#
# Copyright (C) 2016 Thomas Hilt
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
from netaddr import IPNetwork, IPAddress
import argparse
import os
import os.path
import json
import time
import socket
from datetime import timedelta, date, datetime
from shutil import copyfile
from cybox.objects.file_object import File
import stix.utils as utils
from stix.core import STIXPackage, STIXHeader
from stix.indicator import Indicator
import dumbpig
from cef import *
from ioctest import *
from dataload import *
import time
import smtplib
import sys
import csv
import re
from email.mime.text import MIMEText
#import requests
#from requests.packages.urllib3.exceptions import InsecureRequestWarning
#requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
MAIL_FROM = 'localhost@localhost.local'
MAIL_SERVER = 'smtp.localhost.local'
yara_processed = set()
snort_processed = set()
mailed_attributes = set()
stix_supported = set(['filename|sha1','sha1','filename|md5','md5','filename|sha256','sha256'])
stix_indicators = set()
def _get_misp_version(misp):
'''Return the version of misp from the misp instance given.
misp -- misp instance connected.
'''
misp_version = json.dumps(misp.get_version()).encode('utf8').decode('string_escape')
misp_version = misp_version[1::] # remove first "
misp_version = misp_version[:-1] # remove last "
misp_version = misp_version.split(':')[1]
misp_version = misp_version.strip(' ')
misp_version = misp_version.strip('"')
return misp_version
def _perdelta(start, end, delta):
'''Generates and yields dates between start and end with a gap of days between dates defined by delta.
start -- start date of the range.
end -- end date of the range.
delta -- day gap number between dates to yield.
'''
curr = start
while curr < end:
yield curr
curr += delta
def _create_date_list(start, end, delta):
'''Returns a list of the dates between start and end with a gap of days between dates defined by delta.
start -- start date of the range.
end -- end date of the range.
delta -- day gap number between dates to return.
'''
dates=start.split('-')
start_date = date(int(dates[0]), int(dates[1]), int(dates[2]))
datee=end.split('-')
end_date = date(int(datee[0]), int(datee[1]), int(datee[2]))
rangesize = int(delta)
datelist = [str(result) for result in _perdelta(start_date,end_date,timedelta(days=rangesize))]
datelist.append(str(end_date)) # add border date
return datelist
def _get_stix_indicator(ioc, uuid, stix_file):
'''Add one ioc to a stix indicator and return the indicator object
ioc -- contains the ioc value
uuid -- uuid of the ioc (attribute uuid)
stix_file -- stix file to write
'''
if '|' in ioc: # like in filename|md5
ioc = ioc.split('|')[1]
f = File()
indicator = Indicator()
indicator.title = uuid
indicator.description = ("ioc with MISP attribute id : " + uuid)
indicator.set_producer_identity("checkioc of tom8941")
indicator.set_produced_time(utils.dates.now())
f.add_hash(ioc)
indicator.add_object(f)
return indicator
def _export_yara(yara_rule,yara_file,yara_except_set):
'''Write yara_rule in yara_file
yara_rule -- Yara rule to write.
yara_file -- File to write.
yara_except_set -- Set of yara rules to discard from the export.
'''
yara_name_match = re.search('^(private|global| )*rule\s*\w*',yara_rule,re.MULTILINE)
if yara_name_match:
yara_name = yara_name_match.group().replace('rule','').strip(' \t\n\r')
yara_name_match_import = re.search('^import',yara_rule,re.MULTILINE)
if not yara_name_match_import:
if yara_name not in yara_processed and yara_name not in yara_except_set: #avoid duplicates and unwanted rules
yara_processed.add(yara_name)
yara_export_file.write(yara_rule)
yara_export_file.write('\n')
def _export_snort(snort_rule,snort_file):
'''Write snort_rule in snort_file
snort_rule -- Yara rule to write.
snort_file -- File to write.
'''
snort_name_match = re.search('msg:\"[^"]*";',snort_rule,re.MULTILINE)
if snort_name_match:
snort_name = snort_name_match.group()
snort_name = snort_name[5:-2]
if snort_rule not in snort_processed: #avoid duplicates
snort_processed.add(snort_rule)
snort_rule = snort_rule.replace('msg:"','msg:"[MISP] ')
snort_export_file.write(snort_rule)
snort_export_file.write('\n')
def _read_attribute_tracking_file(tracking_filepath):
'''Read a csv formatted file that should contain a list of uuid,date of attributes and return a dictionary using uuid as key and date as value.
tracking_filepath -- Path of the csv formatted file ("," as separator) that contains a list of uuid,date of attributes processed.
'''
dic = {}
if os.path.exists(tracking_filepath):
if os.path.isfile(tracking_filepath):
with open(tracking_filepath, 'r') as tracking_file:
csv_reader = csv.reader(tracking_file, delimiter=',')
for row in csv_reader:
dic[row[0]] = row[1]
tracking_file.close()
return dic
def _update_attribute_tracking_file(tracking_filepath, tracking_dict):
'''Convert a dictionary using attribute uuid as key and attribute date as value into a csv formatted file that should contain a list of uuid,date of attributes.
tracking_filepath -- Path of the csv formatted file ("," as separator) that contains a list of uuid,date of attributes processed.
tracking_dict -- Dictionary using attribute uuid as key and attribute date as value.
'''
with open(tracking_filepath, 'w') as tracking_file:
for key in tracking_dict:
tracking_file.write(key + ',' + tracking_dict[key] + '\n')
tracking_file.close()
def _add_to_mailed_attributes(event, attribute, reason):
'''Add the attribute and reason of failure to the set of attribute that will be sent by mail.
event -- Event realted to the attribute.
attribute -- Attribute to add to the set of mailed attributes.
reason -- contains the reason of the failure.
'''
mailed_attributes.add((event['Orgname'], event['Orgcname'], event['uuid'], attribute['uuid'], event['info'], reason))
def _send_attributes_mail(mail_address, attribute_set):
'''Send the the content of attribute_set by mail to mail_address
attribute_set -- contains the attributes contents.
mail_address -- contain the mail address that will recieve the results.
'''
msg = 'List of problems with IOCs : \n\n'
msg += 'Org / OrgC / Event UUID / Attribute UUID / Description / Error message \n\n'
for result in attribute_set:
msg += str(result[0]) + ' / ' + str(result[1]) + ' / ' + str(result[2]) + ' / ' + str(result[3]) + ' / ' + str(result[4]) + ' / ' + str(result[5]) + '\n'
mime_msg = MIMEText(msg)
mime_msg['Subject'] = '[MISP-EU] MISP Quality check'
s = smtplib.SMTP(MAIL_SERVER)
s.sendmail(MAIL_FROM, mail_address, mime_msg.as_string())
s.quit()
def check_last(misp, last="1d", datasrc_dict=None, allowed_attribute_set=None, quiet=False, attribute_status_dict={}, stix_export_file=None, yara_export_file=None, snort_export_file=None, to_mail=False):
'''Check attributes from events published during the last period defined.
misp -- misp instance connected.
last -- last period used to catch events. (default 1d)
datasrc_dict -- dict that contains data source sets used for checks. (default None)
allowed_attribute_set -- set that contains the misp attibute types that would be checked. (default None)
quiet -- define if processing output should be displayed. (default False)
attribute_status_dict -- define the file used to track attributes processed. (default {})
yara_export_file -- define the file used to export valid yara rules. (default None)
snort_export_file -- define the file used to export valid snort rules. (default None)
to_mail -- define if the set of attributes that should be mailed have to be filled. (default False)
'''
res = misp.download_last(last)
if 'response' in res.keys():
json_events = res['response']
else:
if not quiet:
print 'No attributes in the specified period'
return
j=0
for result in check_attributes(json_events,datasrc_dict,allowed_attribute_set, attribute_status_dict):
if result:
j+=1
if stix_export_file is not None and result['result'] == 'OK':
if result['attribute_dict']['type'] in stix_supported:
stix_indicators.add(_get_stix_indicator(result['attribute_dict']['value'],result['attribute_dict']['uuid'], stix_export_file))
if yara_export_file is not None and result['result'] == 'OK':
if result['attribute_dict']['type'] == 'yara':
_export_yara(result['attribute_dict']['value'], yara_export_file,datasrc_dict['yara_export_except'])
if snort_export_file is not None and result['result'] == 'OK':
if result['attribute_dict']['type'] == 'snort':
_export_snort(result['attribute_dict']['value'], snort_export_file)
if to_mail and result['result'] == 'NOK':
_add_to_mailed_attributes(result['event_dict'], result['attribute_dict'], result['reason'])
yield get_CEF_syslog(_get_misp_version(misp), result['event_dict'], result['attribute_dict'], result['result'], result['reason'])
if not quiet:
print 'Processing of last ' + last + ' : ' + str(j) + ' attributes processed'
def sliced_search(misp, date_from=None, date_to=None, day_slice=1, time_wait=0, datasrc_dict=None, allowed_attribute_set=None, quiet=False, attribute_status_dict={}, stix_export_file=None, yara_export_file=None, snort_export_file=None, to_mail=False):
'''Check attributes from events created during the given time range.
misp -- misp instance connected.
date_from -- start date of the range. (default None)
date_to -- end date of the range. (default None)
day_slice -- define that size in days of subranges generated to check events in order to perform checks in smaller. (default 1)
time_wait -- define the time to wait between checks of two subranges generated by the day_slice parameter in order to reduce misp server request load. (default 0)
datasrc_dict -- dict that contains data source sets used for checks. (default None)
allowed_attribute_set -- Dictionary using attribute uuid as key and attribute date as value used to track attributes updates. (default None)
quiet -- define if processing output should be displayed. (default False)
attribute_status_dict -- define the file used to track attributes processed. (default {})
yara_export_file -- define the file used to export valid yara rules. (default None)
snort_export_file -- define the file used to export valid snort rules. (default None)
to_mail -- define if the set of attributes that should be mailed have to be filled. (default False)
'''
datelist = _create_date_list(date_from, date_to, day_slice)
for i in range(0,len(datelist) - 1):
res = misp.search(date_from=datelist[i],date_to=datelist[i+1])
if 'response' in res.keys():
json_events = res['response']
else:
if not quiet:
print 'Processing from ' + datelist[i] + ' to ' + datelist[i+1] + ': No attributes'
yield None
continue
j=0
for result in check_attributes(json_events,datasrc_dict,allowed_attribute_set, attribute_status_dict):
if result:
j+=1
if stix_export_file is not None and result['result'] == 'OK':
if result['attribute_dict']['type'] in stix_supported:
stix_indicators.add(_get_stix_indicator(result['attribute_dict']['value'],result['attribute_dict']['uuid'], stix_export_file))
if yara_export_file is not None and result['result'] == 'OK':
if result['attribute_dict']['type'] == 'yara':
_export_yara(result['attribute_dict']['value'], yara_export_file, datasrc_dict['yara_export_except'])
if snort_export_file is not None and result['result'] == 'OK':
if result['attribute_dict']['type'] == 'snort':
_export_snort(result['attribute_dict']['value'], snort_export_file)
if to_mail and result['result'] == 'NOK':
_add_to_mailed_attributes(result['event_dict'], result['attribute_dict'], result['reason'])
yield get_CEF_syslog(_get_misp_version(misp), result['event_dict'], result['attribute_dict'], result['result'], result['reason'])
if not quiet:
print 'Processing from ' + datelist[i] + ' to ' + datelist[i+1] + ': ' + str(j) + ' attributes processed'
time.sleep(int(time_wait))
def update_tracking_last(misp, last="1d", allowed_attribute_set=None, quiet=False, attribute_status_dict={}):
'''Update the attribute tracking file using the last function to fetch events.
misp -- misp instance connected.
last -- last period used to catch events. (default 1d)
allowed_attribute_set -- set that contains the misp attibute types that would be checked. (default None)
quiet -- define if processing output should be displayed. (default False)
attribute_status_dict -- define the file used to track attributes processed. (default {})
'''
res = misp.download_last(last)
if 'response' in res.keys():
json_events = res['response']
else:
if not quiet:
print 'No attributes in the specified period'
return
j=0
for result in track_attributes(json_events,allowed_attribute_set, attribute_status_dict):
if result:
j+=1
if not quiet:
print 'Processing of last ' + last + ' : ' + str(j) + ' attributes processed'
def update_tracking(misp, date_from=None, date_to=None, day_slice=1, time_wait=0, allowed_attribute_set=None, quiet=False, attribute_status_dict={}):
'''Update the attribute tracking file using the range search function to fetch events.
misp -- misp instance connected.
date_from -- start date of the range. (default None)
date_to -- end date of the range. (default None)
day_slice -- define that size in days of subranges generated to check events in order to perform checks in smaller. (default 1)
time_wait -- define the time to wait between checks of two subranges generated by the day_slice parameter in order to reduce misp server request load. (default 0)
allowed_attribute_set -- Dictionary using attribute uuid as key and attribute date as value used to track attributes updates. (default None)
quiet -- define if processing output should be displayed. (default False)
attribute_status_dict -- define the file used to track attributes processed. (default {})
'''
datelist = _create_date_list(date_from, date_to, day_slice)
for i in range(0,len(datelist) - 1):
res = misp.search(date_from=datelist[i],date_to=datelist[i+1])
if 'response' in res.keys():
json_events = res['response']
else:
if not quiet:
print 'Processing from ' + datelist[i] + ' to ' + datelist[i+1] + ': No attributes'
continue
j=0
for result in track_attributes(json_events,allowed_attribute_set, attribute_status_dict):
if result:
j+=1
if not quiet:
print 'Processing from ' + datelist[i] + ' to ' + datelist[i+1] + ': ' + str(j) + ' attributes processed'
time.sleep(int(time_wait))
############################################
################# Main ####################
############################################
if __name__ == '__main__':
'''
'''
parser = argparse.ArgumentParser(description='Download events from a MISP instance and verify their validity.')
parser.add_argument("--print_types",help="Print valid MISP attribute types", action="store_true")
parser.add_argument("--update_tracking_only", help="update the file used to track already processed attributes. Should be used with -s and -e.", action="store_true")
parser.add_argument("--lock", help="Specify a lock file to prevent multiple execution.")
parser.add_argument("-l", "--last", help="can be defined in days, hours, minutes (for example 5d or 12h or 30m)")
parser.add_argument("-s", "--start", help="start date of time range YYYY-MM-DD format")
parser.add_argument("-e", "--end", help="end date of time range YYYY-MM-DD format")
parser.add_argument("-d", "--day_slice",help="size of dayrange in days")
parser.add_argument("-t", "--time_wait",default=0,help="time to wait between processing of 2 range of days in seconds")
parser.add_argument("-i", "--ip", help="Syslog server ip")
parser.add_argument("-p", "--port", help="Syslog server port")
parser.add_argument("-x", "--stix_export_path", help="Valid ioc STIX format file path (only for hashes)")
parser.add_argument("-y", "--yara_export_path", help="Valid yara rules export file path")
parser.add_argument("-z", "--snort_export_path", help="Valid snort rules export file path")
parser.add_argument("-a", "--attribute_tracking", help="file used to track already processed attributes based on its uuid and modification date")
parser.add_argument("-m", "--mail", help="Email that will receive results of wrong IOCs.")
argtypegroup = parser.add_mutually_exclusive_group()
argtypegroup.add_argument("-o", "--only", nargs="+",help="Only attribute type given")
argtypegroup.add_argument("-w", "--without", nargs="+",help="Without attribute type given")
argverb_group = parser.add_mutually_exclusive_group()
argverb_group.add_argument("-v", "--verbose", help="Print the result of each test of attributes.", action="store_true")
argverb_group.add_argument("-q", "--quiet", help="Suppress all outputs", action="store_true")
args = parser.parse_args()
if args.print_types:
print 'List of valid attributes : '
for e in allowed_attribute_set:
print e
exit(0)
if not args.quiet:
print time.strftime("%c")
if args.lock:
if os.path.exists(args.lock):
if os.path.isfile(args.lock):
print "Lock file already exists. Please wait until the other process has finished or delete this file."
exit(0)
else:
print "Lock file path already exists but it is not a file. Please suppress it."
exit(0)
else:
with open(args.lock, 'w') as lock_file:
lock_file.write('1\n')
lock_file.close()
if args.stix_export_path:
stix_export_file = open(args.stix_export_path, 'w')
else:
stix_export_file = None
if args.yara_export_path:
yara_export_file = open(args.yara_export_path, 'w')
else:
yara_export_file = None
if args.snort_export_path:
snort_export_file = open(args.snort_export_path, 'w')
else:
snort_export_file = None
if args.only:
if any(e not in allowed_attribute_set for e in args.only):
print 'Some elements of the attribute list are not valid. Use --print_types, to show the valid ones'
exit(0)
else:
allowed_attribute_set.clear()
for e in args.only:
allowed_attribute_set.add(e)
if args.without:
if any(e not in allowed_attribute_set for e in args.without):
print 'Some elements of the attribute list are not valid. Use --print-types, to show the valid ones'
exit(0)
else:
for e in args.without:
allowed_attribute_set.remove(e)
if not args.update_tracking_only:
datasrc_dict = import_external_sources(allowed_attribute_set) # Load datasets
if args.attribute_tracking:
attribute_status_dict = _read_attribute_tracking_file(args.attribute_tracking)
if os.path.exists(args.attribute_tracking):
if os.path.isfile(args.attribute_tracking):
copyfile(args.attribute_tracking, args.attribute_tracking + '.bak')
misp = PyMISP(misp_url, misp_key, misp_verifycert, 'json')
sock = None
if args.ip is not None and args.port is not None:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.connect((args.ip,int(args.port)))
if args.update_tracking_only:
if not args.attribute_tracking:
print "-a or --attribute_tracking parameter missing."
exit(0)
if args.last:
update_tracking_last(misp,args.last,allowed_attribute_set, args.quiet, attribute_status_dict)
else:
print "-l/--last or, -s/--start and -e/--end parameters missing."
exit(0)
if args.day_slice is None:
date_format = "%Y-%m-%d"
delta = datetime.datetime.strptime(args.end,date_format) - datetime.datetime.strptime(args.start,date_format)
update_tracking(misp,args.start,args.end,str(int(delta.days)),0,allowed_attribute_set, args.quiet, attribute_status_dict)
else:
update_tracking(misp,args.start,args.end,args.day_slice,args.time_wait,allowed_attribute_set,args.quiet,attribute_status_dict)
elif args.last is not None:
for message in check_last(misp,args.last,datasrc_dict, allowed_attribute_set, args.quiet, attribute_status_dict, stix_export_file, yara_export_file, snort_export_file, bool(args.mail)):
if args.verbose and message is not None:
print message
if isinstance(sock,socket.socket) and message is not None:
sock.send(message)
elif args.day_slice is None:
date_format = "%Y-%m-%d"
delta = datetime.datetime.strptime(args.end,date_format) - datetime.datetime.strptime(args.start,date_format)
for message in sliced_search(misp,args.start,args.end,str(int(delta.days)),0,datasrc_dict, allowed_attribute_set, args.quiet, attribute_status_dict, stix_export_file, yara_export_file, snort_export_file, bool(args.mail)):
if args.verbose and message is not None:
print message
if isinstance(sock,socket.socket) and message is not None:
sock.send(message)
else:
for message in sliced_search(misp,args.start,args.end,args.day_slice,args.time_wait,datasrc_dict, allowed_attribute_set, args.quiet, attribute_status_dict, stix_export_file, yara_export_file, snort_export_file, bool(args.mail)):
if args.verbose and message is not None:
print message
if isinstance(sock,socket.socket) and message is not None:
sock.send(message)
if isinstance(sock,socket.socket):
sock.close()
if args.stix_export_path:
stix_package = STIXPackage()
stix_header = STIXHeader()
stix_header.description = "MISP checkioc STIX export"
stix_package.stix_header = stix_header
for indicator in stix_indicators:
stix_package.add(indicator)
stix_export_file.write(stix_package.to_xml())
stix_export_file.close()
if args.yara_export_path:
yara_export_file.close()
if args.snort_export_path:
snort_export_file.close()
if args.attribute_tracking:
_update_attribute_tracking_file(args.attribute_tracking, attribute_status_dict)
if os.path.exists(args.attribute_tracking + '.bak'):
if os.path.isfile(args.attribute_tracking + '.bak'):
os.remove(args.attribute_tracking + '.bak')
if args.mail:
_send_attributes_mail(args.mail, mailed_attributes)
if args.lock:
if os.path.exists(args.lock):
if os.path.isfile(args.lock):
os.remove(args.lock)
if not args.quiet:
print time.strftime("%c")
exit(0)
| gpl-3.0 |
eric-haibin-lin/mxnet | example/ssd/dataset/yolo_format.py | 54 | 5795 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import numpy as np
from imdb import Imdb
class YoloFormat(Imdb):
"""
Base class for loading datasets as used in YOLO
Parameters:
----------
name : str
name for this dataset
classes : list or tuple of str
class names in this dataset
list_file : str
filename of the image list file
image_dir : str
image directory
label_dir : str
label directory
extension : str
by default .jpg
label_extension : str
by default .txt
shuffle : bool
whether to shuffle the initial order when loading this dataset,
default is True
"""
def __init__(self, name, classes, list_file, image_dir, label_dir, \
extension='.jpg', label_extension='.txt', shuffle=True):
if isinstance(classes, list) or isinstance(classes, tuple):
num_classes = len(classes)
elif isinstance(classes, str):
with open(classes, 'r') as f:
classes = [l.strip() for l in f.readlines()]
num_classes = len(classes)
else:
raise ValueError("classes should be list/tuple or text file")
assert num_classes > 0, "number of classes must > 0"
super(YoloFormat, self).__init__(name + '_' + str(num_classes))
self.classes = classes
self.num_classes = num_classes
self.list_file = list_file
self.image_dir = image_dir
self.label_dir = label_dir
self.extension = extension
self.label_extension = label_extension
self.image_set_index = self._load_image_set_index(shuffle)
self.num_images = len(self.image_set_index)
self.labels = self._load_image_labels()
def _load_image_set_index(self, shuffle):
"""
find out which indexes correspond to given image set (train or val)
Parameters:
----------
shuffle : boolean
whether to shuffle the image list
Returns:
----------
entire list of images specified in the setting
"""
assert os.path.exists(self.list_file), 'Path does not exists: {}'.format(self.list_file)
with open(self.list_file, 'r') as f:
image_set_index = [x.strip() for x in f.readlines()]
if shuffle:
np.random.shuffle(image_set_index)
return image_set_index
def image_path_from_index(self, index):
"""
given image index, find out full path
Parameters:
----------
index: int
index of a specific image
Returns:
----------
full path of this image
"""
assert self.image_set_index is not None, "Dataset not initialized"
name = self.image_set_index[index]
image_file = os.path.join(self.image_dir, name) + self.extension
assert os.path.exists(image_file), 'Path does not exist: {}'.format(image_file)
return image_file
def label_from_index(self, index):
"""
given image index, return preprocessed ground-truth
Parameters:
----------
index: int
index of a specific image
Returns:
----------
ground-truths of this image
"""
assert self.labels is not None, "Labels not processed"
return self.labels[index]
def _label_path_from_index(self, index):
"""
given image index, find out annotation path
Parameters:
----------
index: int
index of a specific image
Returns:
----------
full path of annotation file
"""
label_file = os.path.join(self.label_dir, index + self.label_extension)
assert os.path.exists(label_file), 'Path does not exist: {}'.format(label_file)
return label_file
def _load_image_labels(self):
"""
preprocess all ground-truths
Returns:
----------
labels packed in [num_images x max_num_objects x 5] tensor
"""
temp = []
# load ground-truths
for idx in self.image_set_index:
label_file = self._label_path_from_index(idx)
with open(label_file, 'r') as f:
label = []
for line in f.readlines():
temp_label = line.strip().split()
assert len(temp_label) == 5, "Invalid label file" + label_file
cls_id = int(temp_label[0])
x = float(temp_label[1])
y = float(temp_label[2])
half_width = float(temp_label[3]) / 2
half_height = float(temp_label[4]) / 2
xmin = x - half_width
ymin = y - half_height
xmax = x + half_width
ymax = y + half_height
label.append([cls_id, xmin, ymin, xmax, ymax])
temp.append(np.array(label))
return temp
| apache-2.0 |
tjsavage/full_nonrel_starter | django/contrib/gis/utils/geoip.py | 316 | 14811 | """
This module houses the GeoIP object, a ctypes wrapper for the MaxMind GeoIP(R)
C API (http://www.maxmind.com/app/c). This is an alternative to the GPL
licensed Python GeoIP interface provided by MaxMind.
GeoIP(R) is a registered trademark of MaxMind, LLC of Boston, Massachusetts.
For IP-based geolocation, this module requires the GeoLite Country and City
datasets, in binary format (CSV will not work!). The datasets may be
downloaded from MaxMind at http://www.maxmind.com/download/geoip/database/.
Grab GeoIP.dat.gz and GeoLiteCity.dat.gz, and unzip them in the directory
corresponding to settings.GEOIP_PATH. See the GeoIP docstring and examples
below for more details.
TODO: Verify compatibility with Windows.
Example:
>>> from django.contrib.gis.utils import GeoIP
>>> g = GeoIP()
>>> g.country('google.com')
{'country_code': 'US', 'country_name': 'United States'}
>>> g.city('72.14.207.99')
{'area_code': 650,
'city': 'Mountain View',
'country_code': 'US',
'country_code3': 'USA',
'country_name': 'United States',
'dma_code': 807,
'latitude': 37.419200897216797,
'longitude': -122.05740356445312,
'postal_code': '94043',
'region': 'CA'}
>>> g.lat_lon('salon.com')
(37.789798736572266, -122.39420318603516)
>>> g.lon_lat('uh.edu')
(-95.415199279785156, 29.77549934387207)
>>> g.geos('24.124.1.80').wkt
'POINT (-95.2087020874023438 39.0392990112304688)'
"""
import os, re
from ctypes import c_char_p, c_float, c_int, Structure, CDLL, POINTER
from ctypes.util import find_library
from django.conf import settings
if not settings.configured: settings.configure()
# Creating the settings dictionary with any settings, if needed.
GEOIP_SETTINGS = dict((key, getattr(settings, key))
for key in ('GEOIP_PATH', 'GEOIP_LIBRARY_PATH', 'GEOIP_COUNTRY', 'GEOIP_CITY')
if hasattr(settings, key))
lib_path = GEOIP_SETTINGS.get('GEOIP_LIBRARY_PATH', None)
# GeoIP Exception class.
class GeoIPException(Exception): pass
# The shared library for the GeoIP C API. May be downloaded
# from http://www.maxmind.com/download/geoip/api/c/
if lib_path:
lib_name = None
else:
# TODO: Is this really the library name for Windows?
lib_name = 'GeoIP'
# Getting the path to the GeoIP library.
if lib_name: lib_path = find_library(lib_name)
if lib_path is None: raise GeoIPException('Could not find the GeoIP library (tried "%s"). '
'Try setting GEOIP_LIBRARY_PATH in your settings.' % lib_name)
lgeoip = CDLL(lib_path)
# Regular expressions for recognizing IP addresses and the GeoIP
# free database editions.
ipregex = re.compile(r'^(?P<w>\d\d?\d?)\.(?P<x>\d\d?\d?)\.(?P<y>\d\d?\d?)\.(?P<z>\d\d?\d?)$')
free_regex = re.compile(r'^GEO-\d{3}FREE')
lite_regex = re.compile(r'^GEO-\d{3}LITE')
#### GeoIP C Structure definitions ####
class GeoIPRecord(Structure):
_fields_ = [('country_code', c_char_p),
('country_code3', c_char_p),
('country_name', c_char_p),
('region', c_char_p),
('city', c_char_p),
('postal_code', c_char_p),
('latitude', c_float),
('longitude', c_float),
# TODO: In 1.4.6 this changed from `int dma_code;` to
# `union {int metro_code; int dma_code;};`. Change
# to a `ctypes.Union` in to accomodate in future when
# pre-1.4.6 versions are no longer distributed.
('dma_code', c_int),
('area_code', c_int),
# TODO: The following structure fields were added in 1.4.3 --
# uncomment these fields when sure previous versions are no
# longer distributed by package maintainers.
#('charset', c_int),
#('continent_code', c_char_p),
]
class GeoIPTag(Structure): pass
#### ctypes function prototypes ####
RECTYPE = POINTER(GeoIPRecord)
DBTYPE = POINTER(GeoIPTag)
# For retrieving records by name or address.
def record_output(func):
func.restype = RECTYPE
return func
rec_by_addr = record_output(lgeoip.GeoIP_record_by_addr)
rec_by_name = record_output(lgeoip.GeoIP_record_by_name)
# For opening & closing GeoIP database files.
geoip_open = lgeoip.GeoIP_open
geoip_open.restype = DBTYPE
geoip_close = lgeoip.GeoIP_delete
geoip_close.argtypes = [DBTYPE]
geoip_close.restype = None
# String output routines.
def string_output(func):
func.restype = c_char_p
return func
geoip_dbinfo = string_output(lgeoip.GeoIP_database_info)
cntry_code_by_addr = string_output(lgeoip.GeoIP_country_code_by_addr)
cntry_code_by_name = string_output(lgeoip.GeoIP_country_code_by_name)
cntry_name_by_addr = string_output(lgeoip.GeoIP_country_name_by_addr)
cntry_name_by_name = string_output(lgeoip.GeoIP_country_name_by_name)
#### GeoIP class ####
class GeoIP(object):
# The flags for GeoIP memory caching.
# GEOIP_STANDARD - read database from filesystem, uses least memory.
#
# GEOIP_MEMORY_CACHE - load database into memory, faster performance
# but uses more memory
#
# GEOIP_CHECK_CACHE - check for updated database. If database has been updated,
# reload filehandle and/or memory cache.
#
# GEOIP_INDEX_CACHE - just cache
# the most frequently accessed index portion of the database, resulting
# in faster lookups than GEOIP_STANDARD, but less memory usage than
# GEOIP_MEMORY_CACHE - useful for larger databases such as
# GeoIP Organization and GeoIP City. Note, for GeoIP Country, Region
# and Netspeed databases, GEOIP_INDEX_CACHE is equivalent to GEOIP_MEMORY_CACHE
#
GEOIP_STANDARD = 0
GEOIP_MEMORY_CACHE = 1
GEOIP_CHECK_CACHE = 2
GEOIP_INDEX_CACHE = 4
cache_options = dict((opt, None) for opt in (0, 1, 2, 4))
_city_file = ''
_country_file = ''
# Initially, pointers to GeoIP file references are NULL.
_city = None
_country = None
def __init__(self, path=None, cache=0, country=None, city=None):
"""
Initializes the GeoIP object, no parameters are required to use default
settings. Keyword arguments may be passed in to customize the locations
of the GeoIP data sets.
* path: Base directory to where GeoIP data is located or the full path
to where the city or country data files (*.dat) are located.
Assumes that both the city and country data sets are located in
this directory; overrides the GEOIP_PATH settings attribute.
* cache: The cache settings when opening up the GeoIP datasets,
and may be an integer in (0, 1, 2, 4) corresponding to
the GEOIP_STANDARD, GEOIP_MEMORY_CACHE, GEOIP_CHECK_CACHE,
and GEOIP_INDEX_CACHE `GeoIPOptions` C API settings,
respectively. Defaults to 0, meaning that the data is read
from the disk.
* country: The name of the GeoIP country data file. Defaults to
'GeoIP.dat'; overrides the GEOIP_COUNTRY settings attribute.
* city: The name of the GeoIP city data file. Defaults to
'GeoLiteCity.dat'; overrides the GEOIP_CITY settings attribute.
"""
# Checking the given cache option.
if cache in self.cache_options:
self._cache = self.cache_options[cache]
else:
raise GeoIPException('Invalid caching option: %s' % cache)
# Getting the GeoIP data path.
if not path:
path = GEOIP_SETTINGS.get('GEOIP_PATH', None)
if not path: raise GeoIPException('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
if not isinstance(path, basestring):
raise TypeError('Invalid path type: %s' % type(path).__name__)
if os.path.isdir(path):
# Constructing the GeoIP database filenames using the settings
# dictionary. If the database files for the GeoLite country
# and/or city datasets exist, then try and open them.
country_db = os.path.join(path, country or GEOIP_SETTINGS.get('GEOIP_COUNTRY', 'GeoIP.dat'))
if os.path.isfile(country_db):
self._country = geoip_open(country_db, cache)
self._country_file = country_db
city_db = os.path.join(path, city or GEOIP_SETTINGS.get('GEOIP_CITY', 'GeoLiteCity.dat'))
if os.path.isfile(city_db):
self._city = geoip_open(city_db, cache)
self._city_file = city_db
elif os.path.isfile(path):
# Otherwise, some detective work will be needed to figure
# out whether the given database path is for the GeoIP country
# or city databases.
ptr = geoip_open(path, cache)
info = geoip_dbinfo(ptr)
if lite_regex.match(info):
# GeoLite City database detected.
self._city = ptr
self._city_file = path
elif free_regex.match(info):
# GeoIP Country database detected.
self._country = ptr
self._country_file = path
else:
raise GeoIPException('Unable to recognize database edition: %s' % info)
else:
raise GeoIPException('GeoIP path must be a valid file or directory.')
def __del__(self):
# Cleaning any GeoIP file handles lying around.
if self._country: geoip_close(self._country)
if self._city: geoip_close(self._city)
def _check_query(self, query, country=False, city=False, city_or_country=False):
"Helper routine for checking the query and database availability."
# Making sure a string was passed in for the query.
if not isinstance(query, basestring):
raise TypeError('GeoIP query must be a string, not type %s' % type(query).__name__)
# Extra checks for the existence of country and city databases.
if city_or_country and not (self._country or self._city):
raise GeoIPException('Invalid GeoIP country and city data files.')
elif country and not self._country:
raise GeoIPException('Invalid GeoIP country data file: %s' % self._country_file)
elif city and not self._city:
raise GeoIPException('Invalid GeoIP city data file: %s' % self._city_file)
def city(self, query):
"""
Returns a dictionary of city information for the given IP address or
Fully Qualified Domain Name (FQDN). Some information in the dictionary
may be undefined (None).
"""
self._check_query(query, city=True)
if ipregex.match(query):
# If an IP address was passed in
ptr = rec_by_addr(self._city, c_char_p(query))
else:
# If a FQDN was passed in.
ptr = rec_by_name(self._city, c_char_p(query))
# Checking the pointer to the C structure, if valid pull out elements
# into a dicionary and return.
if bool(ptr):
record = ptr.contents
return dict((tup[0], getattr(record, tup[0])) for tup in record._fields_)
else:
return None
def country_code(self, query):
"Returns the country code for the given IP Address or FQDN."
self._check_query(query, city_or_country=True)
if self._country:
if ipregex.match(query): return cntry_code_by_addr(self._country, query)
else: return cntry_code_by_name(self._country, query)
else:
return self.city(query)['country_code']
def country_name(self, query):
"Returns the country name for the given IP Address or FQDN."
self._check_query(query, city_or_country=True)
if self._country:
if ipregex.match(query): return cntry_name_by_addr(self._country, query)
else: return cntry_name_by_name(self._country, query)
else:
return self.city(query)['country_name']
def country(self, query):
"""
Returns a dictonary with with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters.
"""
# Returning the country code and name
return {'country_code' : self.country_code(query),
'country_name' : self.country_name(query),
}
#### Coordinate retrieval routines ####
def coords(self, query, ordering=('longitude', 'latitude')):
cdict = self.city(query)
if cdict is None: return None
else: return tuple(cdict[o] for o in ordering)
def lon_lat(self, query):
"Returns a tuple of the (longitude, latitude) for the given query."
return self.coords(query)
def lat_lon(self, query):
"Returns a tuple of the (latitude, longitude) for the given query."
return self.coords(query, ('latitude', 'longitude'))
def geos(self, query):
"Returns a GEOS Point object for the given query."
ll = self.lon_lat(query)
if ll:
from django.contrib.gis.geos import Point
return Point(ll, srid=4326)
else:
return None
#### GeoIP Database Information Routines ####
def country_info(self):
"Returns information about the GeoIP country database."
if self._country is None:
ci = 'No GeoIP Country data in "%s"' % self._country_file
else:
ci = geoip_dbinfo(self._country)
return ci
country_info = property(country_info)
def city_info(self):
"Retuns information about the GeoIP city database."
if self._city is None:
ci = 'No GeoIP City data in "%s"' % self._city_file
else:
ci = geoip_dbinfo(self._city)
return ci
city_info = property(city_info)
def info(self):
"Returns information about all GeoIP databases in use."
return 'Country:\n\t%s\nCity:\n\t%s' % (self.country_info, self.city_info)
info = property(info)
#### Methods for compatibility w/the GeoIP-Python API. ####
@classmethod
def open(cls, full_path, cache):
return GeoIP(full_path, cache)
def _rec_by_arg(self, arg):
if self._city:
return self.city(arg)
else:
return self.country(arg)
region_by_addr = city
region_by_name = city
record_by_addr = _rec_by_arg
record_by_name = _rec_by_arg
country_code_by_addr = country_code
country_code_by_name = country_code
country_name_by_addr = country_name
country_name_by_name = country_name
| bsd-3-clause |
amith01994/intellij-community | python/lib/Lib/site-packages/django/contrib/gis/utils/geoip.py | 316 | 14811 | """
This module houses the GeoIP object, a ctypes wrapper for the MaxMind GeoIP(R)
C API (http://www.maxmind.com/app/c). This is an alternative to the GPL
licensed Python GeoIP interface provided by MaxMind.
GeoIP(R) is a registered trademark of MaxMind, LLC of Boston, Massachusetts.
For IP-based geolocation, this module requires the GeoLite Country and City
datasets, in binary format (CSV will not work!). The datasets may be
downloaded from MaxMind at http://www.maxmind.com/download/geoip/database/.
Grab GeoIP.dat.gz and GeoLiteCity.dat.gz, and unzip them in the directory
corresponding to settings.GEOIP_PATH. See the GeoIP docstring and examples
below for more details.
TODO: Verify compatibility with Windows.
Example:
>>> from django.contrib.gis.utils import GeoIP
>>> g = GeoIP()
>>> g.country('google.com')
{'country_code': 'US', 'country_name': 'United States'}
>>> g.city('72.14.207.99')
{'area_code': 650,
'city': 'Mountain View',
'country_code': 'US',
'country_code3': 'USA',
'country_name': 'United States',
'dma_code': 807,
'latitude': 37.419200897216797,
'longitude': -122.05740356445312,
'postal_code': '94043',
'region': 'CA'}
>>> g.lat_lon('salon.com')
(37.789798736572266, -122.39420318603516)
>>> g.lon_lat('uh.edu')
(-95.415199279785156, 29.77549934387207)
>>> g.geos('24.124.1.80').wkt
'POINT (-95.2087020874023438 39.0392990112304688)'
"""
import os, re
from ctypes import c_char_p, c_float, c_int, Structure, CDLL, POINTER
from ctypes.util import find_library
from django.conf import settings
if not settings.configured: settings.configure()
# Creating the settings dictionary with any settings, if needed.
GEOIP_SETTINGS = dict((key, getattr(settings, key))
for key in ('GEOIP_PATH', 'GEOIP_LIBRARY_PATH', 'GEOIP_COUNTRY', 'GEOIP_CITY')
if hasattr(settings, key))
lib_path = GEOIP_SETTINGS.get('GEOIP_LIBRARY_PATH', None)
# GeoIP Exception class.
class GeoIPException(Exception): pass
# The shared library for the GeoIP C API. May be downloaded
# from http://www.maxmind.com/download/geoip/api/c/
if lib_path:
lib_name = None
else:
# TODO: Is this really the library name for Windows?
lib_name = 'GeoIP'
# Getting the path to the GeoIP library.
if lib_name: lib_path = find_library(lib_name)
if lib_path is None: raise GeoIPException('Could not find the GeoIP library (tried "%s"). '
'Try setting GEOIP_LIBRARY_PATH in your settings.' % lib_name)
lgeoip = CDLL(lib_path)
# Regular expressions for recognizing IP addresses and the GeoIP
# free database editions.
ipregex = re.compile(r'^(?P<w>\d\d?\d?)\.(?P<x>\d\d?\d?)\.(?P<y>\d\d?\d?)\.(?P<z>\d\d?\d?)$')
free_regex = re.compile(r'^GEO-\d{3}FREE')
lite_regex = re.compile(r'^GEO-\d{3}LITE')
#### GeoIP C Structure definitions ####
class GeoIPRecord(Structure):
_fields_ = [('country_code', c_char_p),
('country_code3', c_char_p),
('country_name', c_char_p),
('region', c_char_p),
('city', c_char_p),
('postal_code', c_char_p),
('latitude', c_float),
('longitude', c_float),
# TODO: In 1.4.6 this changed from `int dma_code;` to
# `union {int metro_code; int dma_code;};`. Change
# to a `ctypes.Union` in to accomodate in future when
# pre-1.4.6 versions are no longer distributed.
('dma_code', c_int),
('area_code', c_int),
# TODO: The following structure fields were added in 1.4.3 --
# uncomment these fields when sure previous versions are no
# longer distributed by package maintainers.
#('charset', c_int),
#('continent_code', c_char_p),
]
class GeoIPTag(Structure): pass
#### ctypes function prototypes ####
RECTYPE = POINTER(GeoIPRecord)
DBTYPE = POINTER(GeoIPTag)
# For retrieving records by name or address.
def record_output(func):
func.restype = RECTYPE
return func
rec_by_addr = record_output(lgeoip.GeoIP_record_by_addr)
rec_by_name = record_output(lgeoip.GeoIP_record_by_name)
# For opening & closing GeoIP database files.
geoip_open = lgeoip.GeoIP_open
geoip_open.restype = DBTYPE
geoip_close = lgeoip.GeoIP_delete
geoip_close.argtypes = [DBTYPE]
geoip_close.restype = None
# String output routines.
def string_output(func):
func.restype = c_char_p
return func
geoip_dbinfo = string_output(lgeoip.GeoIP_database_info)
cntry_code_by_addr = string_output(lgeoip.GeoIP_country_code_by_addr)
cntry_code_by_name = string_output(lgeoip.GeoIP_country_code_by_name)
cntry_name_by_addr = string_output(lgeoip.GeoIP_country_name_by_addr)
cntry_name_by_name = string_output(lgeoip.GeoIP_country_name_by_name)
#### GeoIP class ####
class GeoIP(object):
# The flags for GeoIP memory caching.
# GEOIP_STANDARD - read database from filesystem, uses least memory.
#
# GEOIP_MEMORY_CACHE - load database into memory, faster performance
# but uses more memory
#
# GEOIP_CHECK_CACHE - check for updated database. If database has been updated,
# reload filehandle and/or memory cache.
#
# GEOIP_INDEX_CACHE - just cache
# the most frequently accessed index portion of the database, resulting
# in faster lookups than GEOIP_STANDARD, but less memory usage than
# GEOIP_MEMORY_CACHE - useful for larger databases such as
# GeoIP Organization and GeoIP City. Note, for GeoIP Country, Region
# and Netspeed databases, GEOIP_INDEX_CACHE is equivalent to GEOIP_MEMORY_CACHE
#
GEOIP_STANDARD = 0
GEOIP_MEMORY_CACHE = 1
GEOIP_CHECK_CACHE = 2
GEOIP_INDEX_CACHE = 4
cache_options = dict((opt, None) for opt in (0, 1, 2, 4))
_city_file = ''
_country_file = ''
# Initially, pointers to GeoIP file references are NULL.
_city = None
_country = None
def __init__(self, path=None, cache=0, country=None, city=None):
"""
Initializes the GeoIP object, no parameters are required to use default
settings. Keyword arguments may be passed in to customize the locations
of the GeoIP data sets.
* path: Base directory to where GeoIP data is located or the full path
to where the city or country data files (*.dat) are located.
Assumes that both the city and country data sets are located in
this directory; overrides the GEOIP_PATH settings attribute.
* cache: The cache settings when opening up the GeoIP datasets,
and may be an integer in (0, 1, 2, 4) corresponding to
the GEOIP_STANDARD, GEOIP_MEMORY_CACHE, GEOIP_CHECK_CACHE,
and GEOIP_INDEX_CACHE `GeoIPOptions` C API settings,
respectively. Defaults to 0, meaning that the data is read
from the disk.
* country: The name of the GeoIP country data file. Defaults to
'GeoIP.dat'; overrides the GEOIP_COUNTRY settings attribute.
* city: The name of the GeoIP city data file. Defaults to
'GeoLiteCity.dat'; overrides the GEOIP_CITY settings attribute.
"""
# Checking the given cache option.
if cache in self.cache_options:
self._cache = self.cache_options[cache]
else:
raise GeoIPException('Invalid caching option: %s' % cache)
# Getting the GeoIP data path.
if not path:
path = GEOIP_SETTINGS.get('GEOIP_PATH', None)
if not path: raise GeoIPException('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
if not isinstance(path, basestring):
raise TypeError('Invalid path type: %s' % type(path).__name__)
if os.path.isdir(path):
# Constructing the GeoIP database filenames using the settings
# dictionary. If the database files for the GeoLite country
# and/or city datasets exist, then try and open them.
country_db = os.path.join(path, country or GEOIP_SETTINGS.get('GEOIP_COUNTRY', 'GeoIP.dat'))
if os.path.isfile(country_db):
self._country = geoip_open(country_db, cache)
self._country_file = country_db
city_db = os.path.join(path, city or GEOIP_SETTINGS.get('GEOIP_CITY', 'GeoLiteCity.dat'))
if os.path.isfile(city_db):
self._city = geoip_open(city_db, cache)
self._city_file = city_db
elif os.path.isfile(path):
# Otherwise, some detective work will be needed to figure
# out whether the given database path is for the GeoIP country
# or city databases.
ptr = geoip_open(path, cache)
info = geoip_dbinfo(ptr)
if lite_regex.match(info):
# GeoLite City database detected.
self._city = ptr
self._city_file = path
elif free_regex.match(info):
# GeoIP Country database detected.
self._country = ptr
self._country_file = path
else:
raise GeoIPException('Unable to recognize database edition: %s' % info)
else:
raise GeoIPException('GeoIP path must be a valid file or directory.')
def __del__(self):
# Cleaning any GeoIP file handles lying around.
if self._country: geoip_close(self._country)
if self._city: geoip_close(self._city)
def _check_query(self, query, country=False, city=False, city_or_country=False):
"Helper routine for checking the query and database availability."
# Making sure a string was passed in for the query.
if not isinstance(query, basestring):
raise TypeError('GeoIP query must be a string, not type %s' % type(query).__name__)
# Extra checks for the existence of country and city databases.
if city_or_country and not (self._country or self._city):
raise GeoIPException('Invalid GeoIP country and city data files.')
elif country and not self._country:
raise GeoIPException('Invalid GeoIP country data file: %s' % self._country_file)
elif city and not self._city:
raise GeoIPException('Invalid GeoIP city data file: %s' % self._city_file)
def city(self, query):
"""
Returns a dictionary of city information for the given IP address or
Fully Qualified Domain Name (FQDN). Some information in the dictionary
may be undefined (None).
"""
self._check_query(query, city=True)
if ipregex.match(query):
# If an IP address was passed in
ptr = rec_by_addr(self._city, c_char_p(query))
else:
# If a FQDN was passed in.
ptr = rec_by_name(self._city, c_char_p(query))
# Checking the pointer to the C structure, if valid pull out elements
# into a dicionary and return.
if bool(ptr):
record = ptr.contents
return dict((tup[0], getattr(record, tup[0])) for tup in record._fields_)
else:
return None
def country_code(self, query):
"Returns the country code for the given IP Address or FQDN."
self._check_query(query, city_or_country=True)
if self._country:
if ipregex.match(query): return cntry_code_by_addr(self._country, query)
else: return cntry_code_by_name(self._country, query)
else:
return self.city(query)['country_code']
def country_name(self, query):
"Returns the country name for the given IP Address or FQDN."
self._check_query(query, city_or_country=True)
if self._country:
if ipregex.match(query): return cntry_name_by_addr(self._country, query)
else: return cntry_name_by_name(self._country, query)
else:
return self.city(query)['country_name']
def country(self, query):
"""
Returns a dictonary with with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters.
"""
# Returning the country code and name
return {'country_code' : self.country_code(query),
'country_name' : self.country_name(query),
}
#### Coordinate retrieval routines ####
def coords(self, query, ordering=('longitude', 'latitude')):
cdict = self.city(query)
if cdict is None: return None
else: return tuple(cdict[o] for o in ordering)
def lon_lat(self, query):
"Returns a tuple of the (longitude, latitude) for the given query."
return self.coords(query)
def lat_lon(self, query):
"Returns a tuple of the (latitude, longitude) for the given query."
return self.coords(query, ('latitude', 'longitude'))
def geos(self, query):
"Returns a GEOS Point object for the given query."
ll = self.lon_lat(query)
if ll:
from django.contrib.gis.geos import Point
return Point(ll, srid=4326)
else:
return None
#### GeoIP Database Information Routines ####
def country_info(self):
"Returns information about the GeoIP country database."
if self._country is None:
ci = 'No GeoIP Country data in "%s"' % self._country_file
else:
ci = geoip_dbinfo(self._country)
return ci
country_info = property(country_info)
def city_info(self):
"Retuns information about the GeoIP city database."
if self._city is None:
ci = 'No GeoIP City data in "%s"' % self._city_file
else:
ci = geoip_dbinfo(self._city)
return ci
city_info = property(city_info)
def info(self):
"Returns information about all GeoIP databases in use."
return 'Country:\n\t%s\nCity:\n\t%s' % (self.country_info, self.city_info)
info = property(info)
#### Methods for compatibility w/the GeoIP-Python API. ####
@classmethod
def open(cls, full_path, cache):
return GeoIP(full_path, cache)
def _rec_by_arg(self, arg):
if self._city:
return self.city(arg)
else:
return self.country(arg)
region_by_addr = city
region_by_name = city
record_by_addr = _rec_by_arg
record_by_name = _rec_by_arg
country_code_by_addr = country_code
country_code_by_name = country_code
country_name_by_addr = country_name
country_name_by_name = country_name
| apache-2.0 |
coderbone/SickRage | lib/sqlalchemy/ext/hybrid.py | 79 | 27994 | # ext/hybrid.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Define attributes on ORM-mapped classes that have "hybrid" behavior.
"hybrid" means the attribute has distinct behaviors defined at the
class level and at the instance level.
The :mod:`~sqlalchemy.ext.hybrid` extension provides a special form of
method decorator, is around 50 lines of code and has almost no
dependencies on the rest of SQLAlchemy. It can, in theory, work with
any descriptor-based expression system.
Consider a mapping ``Interval``, representing integer ``start`` and ``end``
values. We can define higher level functions on mapped classes that produce
SQL expressions at the class level, and Python expression evaluation at the
instance level. Below, each function decorated with :class:`.hybrid_method` or
:class:`.hybrid_property` may receive ``self`` as an instance of the class, or
as the class itself::
from sqlalchemy import Column, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session, aliased
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
Base = declarative_base()
class Interval(Base):
__tablename__ = 'interval'
id = Column(Integer, primary_key=True)
start = Column(Integer, nullable=False)
end = Column(Integer, nullable=False)
def __init__(self, start, end):
self.start = start
self.end = end
@hybrid_property
def length(self):
return self.end - self.start
@hybrid_method
def contains(self,point):
return (self.start <= point) & (point < self.end)
@hybrid_method
def intersects(self, other):
return self.contains(other.start) | self.contains(other.end)
Above, the ``length`` property returns the difference between the
``end`` and ``start`` attributes. With an instance of ``Interval``,
this subtraction occurs in Python, using normal Python descriptor
mechanics::
>>> i1 = Interval(5, 10)
>>> i1.length
5
When dealing with the ``Interval`` class itself, the :class:`.hybrid_property`
descriptor evaluates the function body given the ``Interval`` class as
the argument, which when evaluated with SQLAlchemy expression mechanics
returns a new SQL expression::
>>> print Interval.length
interval."end" - interval.start
>>> print Session().query(Interval).filter(Interval.length > 10)
SELECT interval.id AS interval_id, interval.start AS interval_start,
interval."end" AS interval_end
FROM interval
WHERE interval."end" - interval.start > :param_1
ORM methods such as :meth:`~.Query.filter_by` generally use ``getattr()`` to
locate attributes, so can also be used with hybrid attributes::
>>> print Session().query(Interval).filter_by(length=5)
SELECT interval.id AS interval_id, interval.start AS interval_start,
interval."end" AS interval_end
FROM interval
WHERE interval."end" - interval.start = :param_1
The ``Interval`` class example also illustrates two methods,
``contains()`` and ``intersects()``, decorated with
:class:`.hybrid_method`. This decorator applies the same idea to
methods that :class:`.hybrid_property` applies to attributes. The
methods return boolean values, and take advantage of the Python ``|``
and ``&`` bitwise operators to produce equivalent instance-level and
SQL expression-level boolean behavior::
>>> i1.contains(6)
True
>>> i1.contains(15)
False
>>> i1.intersects(Interval(7, 18))
True
>>> i1.intersects(Interval(25, 29))
False
>>> print Session().query(Interval).filter(Interval.contains(15))
SELECT interval.id AS interval_id, interval.start AS interval_start,
interval."end" AS interval_end
FROM interval
WHERE interval.start <= :start_1 AND interval."end" > :end_1
>>> ia = aliased(Interval)
>>> print Session().query(Interval, ia).filter(Interval.intersects(ia))
SELECT interval.id AS interval_id, interval.start AS interval_start,
interval."end" AS interval_end, interval_1.id AS interval_1_id,
interval_1.start AS interval_1_start, interval_1."end" AS interval_1_end
FROM interval, interval AS interval_1
WHERE interval.start <= interval_1.start
AND interval."end" > interval_1.start
OR interval.start <= interval_1."end"
AND interval."end" > interval_1."end"
Defining Expression Behavior Distinct from Attribute Behavior
--------------------------------------------------------------
Our usage of the ``&`` and ``|`` bitwise operators above was
fortunate, considering our functions operated on two boolean values to
return a new one. In many cases, the construction of an in-Python
function and a SQLAlchemy SQL expression have enough differences that
two separate Python expressions should be defined. The
:mod:`~sqlalchemy.ext.hybrid` decorators define the
:meth:`.hybrid_property.expression` modifier for this purpose. As an
example we'll define the radius of the interval, which requires the
usage of the absolute value function::
from sqlalchemy import func
class Interval(object):
# ...
@hybrid_property
def radius(self):
return abs(self.length) / 2
@radius.expression
def radius(cls):
return func.abs(cls.length) / 2
Above the Python function ``abs()`` is used for instance-level
operations, the SQL function ``ABS()`` is used via the :attr:`.func`
object for class-level expressions::
>>> i1.radius
2
>>> print Session().query(Interval).filter(Interval.radius > 5)
SELECT interval.id AS interval_id, interval.start AS interval_start,
interval."end" AS interval_end
FROM interval
WHERE abs(interval."end" - interval.start) / :abs_1 > :param_1
Defining Setters
----------------
Hybrid properties can also define setter methods. If we wanted
``length`` above, when set, to modify the endpoint value::
class Interval(object):
# ...
@hybrid_property
def length(self):
return self.end - self.start
@length.setter
def length(self, value):
self.end = self.start + value
The ``length(self, value)`` method is now called upon set::
>>> i1 = Interval(5, 10)
>>> i1.length
5
>>> i1.length = 12
>>> i1.end
17
Working with Relationships
--------------------------
There's no essential difference when creating hybrids that work with
related objects as opposed to column-based data. The need for distinct
expressions tends to be greater. Two variants of we'll illustrate
are the "join-dependent" hybrid, and the "correlated subquery" hybrid.
Join-Dependent Relationship Hybrid
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Consider the following declarative
mapping which relates a ``User`` to a ``SavingsAccount``::
from sqlalchemy import Column, Integer, ForeignKey, Numeric, String
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.hybrid import hybrid_property
Base = declarative_base()
class SavingsAccount(Base):
__tablename__ = 'account'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'), nullable=False)
balance = Column(Numeric(15, 5))
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String(100), nullable=False)
accounts = relationship("SavingsAccount", backref="owner")
@hybrid_property
def balance(self):
if self.accounts:
return self.accounts[0].balance
else:
return None
@balance.setter
def balance(self, value):
if not self.accounts:
account = Account(owner=self)
else:
account = self.accounts[0]
account.balance = value
@balance.expression
def balance(cls):
return SavingsAccount.balance
The above hybrid property ``balance`` works with the first
``SavingsAccount`` entry in the list of accounts for this user. The
in-Python getter/setter methods can treat ``accounts`` as a Python
list available on ``self``.
However, at the expression level, it's expected that the ``User`` class will
be used in an appropriate context such that an appropriate join to
``SavingsAccount`` will be present::
>>> print Session().query(User, User.balance).\\
... join(User.accounts).filter(User.balance > 5000)
SELECT "user".id AS user_id, "user".name AS user_name,
account.balance AS account_balance
FROM "user" JOIN account ON "user".id = account.user_id
WHERE account.balance > :balance_1
Note however, that while the instance level accessors need to worry
about whether ``self.accounts`` is even present, this issue expresses
itself differently at the SQL expression level, where we basically
would use an outer join::
>>> from sqlalchemy import or_
>>> print (Session().query(User, User.balance).outerjoin(User.accounts).
... filter(or_(User.balance < 5000, User.balance == None)))
SELECT "user".id AS user_id, "user".name AS user_name,
account.balance AS account_balance
FROM "user" LEFT OUTER JOIN account ON "user".id = account.user_id
WHERE account.balance < :balance_1 OR account.balance IS NULL
Correlated Subquery Relationship Hybrid
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
We can, of course, forego being dependent on the enclosing query's usage
of joins in favor of the correlated subquery, which can portably be packed
into a single column expression. A correlated subquery is more portable, but
often performs more poorly at the SQL level. Using the same technique
illustrated at :ref:`mapper_column_property_sql_expressions`,
we can adjust our ``SavingsAccount`` example to aggregate the balances for
*all* accounts, and use a correlated subquery for the column expression::
from sqlalchemy import Column, Integer, ForeignKey, Numeric, String
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy import select, func
Base = declarative_base()
class SavingsAccount(Base):
__tablename__ = 'account'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'), nullable=False)
balance = Column(Numeric(15, 5))
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String(100), nullable=False)
accounts = relationship("SavingsAccount", backref="owner")
@hybrid_property
def balance(self):
return sum(acc.balance for acc in self.accounts)
@balance.expression
def balance(cls):
return select([func.sum(SavingsAccount.balance)]).\\
where(SavingsAccount.user_id==cls.id).\\
label('total_balance')
The above recipe will give us the ``balance`` column which renders
a correlated SELECT::
>>> print s.query(User).filter(User.balance > 400)
SELECT "user".id AS user_id, "user".name AS user_name
FROM "user"
WHERE (SELECT sum(account.balance) AS sum_1
FROM account
WHERE account.user_id = "user".id) > :param_1
.. _hybrid_custom_comparators:
Building Custom Comparators
---------------------------
The hybrid property also includes a helper that allows construction of
custom comparators. A comparator object allows one to customize the
behavior of each SQLAlchemy expression operator individually. They
are useful when creating custom types that have some highly
idiosyncratic behavior on the SQL side.
The example class below allows case-insensitive comparisons on the attribute
named ``word_insensitive``::
from sqlalchemy.ext.hybrid import Comparator, hybrid_property
from sqlalchemy import func, Column, Integer, String
from sqlalchemy.orm import Session
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class CaseInsensitiveComparator(Comparator):
def __eq__(self, other):
return func.lower(self.__clause_element__()) == func.lower(other)
class SearchWord(Base):
__tablename__ = 'searchword'
id = Column(Integer, primary_key=True)
word = Column(String(255), nullable=False)
@hybrid_property
def word_insensitive(self):
return self.word.lower()
@word_insensitive.comparator
def word_insensitive(cls):
return CaseInsensitiveComparator(cls.word)
Above, SQL expressions against ``word_insensitive`` will apply the ``LOWER()``
SQL function to both sides::
>>> print Session().query(SearchWord).filter_by(word_insensitive="Trucks")
SELECT searchword.id AS searchword_id, searchword.word AS searchword_word
FROM searchword
WHERE lower(searchword.word) = lower(:lower_1)
The ``CaseInsensitiveComparator`` above implements part of the
:class:`.ColumnOperators` interface. A "coercion" operation like
lowercasing can be applied to all comparison operations (i.e. ``eq``,
``lt``, ``gt``, etc.) using :meth:`.Operators.operate`::
class CaseInsensitiveComparator(Comparator):
def operate(self, op, other):
return op(func.lower(self.__clause_element__()), func.lower(other))
Hybrid Value Objects
--------------------
Note in our previous example, if we were to compare the
``word_insensitive`` attribute of a ``SearchWord`` instance to a plain
Python string, the plain Python string would not be coerced to lower
case - the ``CaseInsensitiveComparator`` we built, being returned by
``@word_insensitive.comparator``, only applies to the SQL side.
A more comprehensive form of the custom comparator is to construct a
*Hybrid Value Object*. This technique applies the target value or
expression to a value object which is then returned by the accessor in
all cases. The value object allows control of all operations upon
the value as well as how compared values are treated, both on the SQL
expression side as well as the Python value side. Replacing the
previous ``CaseInsensitiveComparator`` class with a new
``CaseInsensitiveWord`` class::
class CaseInsensitiveWord(Comparator):
"Hybrid value representing a lower case representation of a word."
def __init__(self, word):
if isinstance(word, basestring):
self.word = word.lower()
elif isinstance(word, CaseInsensitiveWord):
self.word = word.word
else:
self.word = func.lower(word)
def operate(self, op, other):
if not isinstance(other, CaseInsensitiveWord):
other = CaseInsensitiveWord(other)
return op(self.word, other.word)
def __clause_element__(self):
return self.word
def __str__(self):
return self.word
key = 'word'
"Label to apply to Query tuple results"
Above, the ``CaseInsensitiveWord`` object represents ``self.word``,
which may be a SQL function, or may be a Python native. By
overriding ``operate()`` and ``__clause_element__()`` to work in terms
of ``self.word``, all comparison operations will work against the
"converted" form of ``word``, whether it be SQL side or Python side.
Our ``SearchWord`` class can now deliver the ``CaseInsensitiveWord``
object unconditionally from a single hybrid call::
class SearchWord(Base):
__tablename__ = 'searchword'
id = Column(Integer, primary_key=True)
word = Column(String(255), nullable=False)
@hybrid_property
def word_insensitive(self):
return CaseInsensitiveWord(self.word)
The ``word_insensitive`` attribute now has case-insensitive comparison
behavior universally, including SQL expression vs. Python expression
(note the Python value is converted to lower case on the Python side
here)::
>>> print Session().query(SearchWord).filter_by(word_insensitive="Trucks")
SELECT searchword.id AS searchword_id, searchword.word AS searchword_word
FROM searchword
WHERE lower(searchword.word) = :lower_1
SQL expression versus SQL expression::
>>> sw1 = aliased(SearchWord)
>>> sw2 = aliased(SearchWord)
>>> print Session().query(
... sw1.word_insensitive,
... sw2.word_insensitive).\\
... filter(
... sw1.word_insensitive > sw2.word_insensitive
... )
SELECT lower(searchword_1.word) AS lower_1,
lower(searchword_2.word) AS lower_2
FROM searchword AS searchword_1, searchword AS searchword_2
WHERE lower(searchword_1.word) > lower(searchword_2.word)
Python only expression::
>>> ws1 = SearchWord(word="SomeWord")
>>> ws1.word_insensitive == "sOmEwOrD"
True
>>> ws1.word_insensitive == "XOmEwOrX"
False
>>> print ws1.word_insensitive
someword
The Hybrid Value pattern is very useful for any kind of value that may
have multiple representations, such as timestamps, time deltas, units
of measurement, currencies and encrypted passwords.
.. seealso::
`Hybrids and Value Agnostic Types
<http://techspot.zzzeek.org/2011/10/21/hybrids-and-value-agnostic-types/>`_ -
on the techspot.zzzeek.org blog
`Value Agnostic Types, Part II
<http://techspot.zzzeek.org/2011/10/29/value-agnostic-types-part-ii/>`_ -
on the techspot.zzzeek.org blog
.. _hybrid_transformers:
Building Transformers
----------------------
A *transformer* is an object which can receive a :class:`.Query`
object and return a new one. The :class:`.Query` object includes a
method :meth:`.with_transformation` that returns a new :class:`.Query`
transformed by the given function.
We can combine this with the :class:`.Comparator` class to produce one type
of recipe which can both set up the FROM clause of a query as well as assign
filtering criterion.
Consider a mapped class ``Node``, which assembles using adjacency list
into a hierarchical tree pattern::
from sqlalchemy import Column, Integer, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Node(Base):
__tablename__ = 'node'
id =Column(Integer, primary_key=True)
parent_id = Column(Integer, ForeignKey('node.id'))
parent = relationship("Node", remote_side=id)
Suppose we wanted to add an accessor ``grandparent``. This would
return the ``parent`` of ``Node.parent``. When we have an instance of
``Node``, this is simple::
from sqlalchemy.ext.hybrid import hybrid_property
class Node(Base):
# ...
@hybrid_property
def grandparent(self):
return self.parent.parent
For the expression, things are not so clear. We'd need to construct
a :class:`.Query` where we :meth:`~.Query.join` twice along
``Node.parent`` to get to the ``grandparent``. We can instead return
a transforming callable that we'll combine with the
:class:`.Comparator` class to receive any :class:`.Query` object, and
return a new one that's joined to the ``Node.parent`` attribute and
filtered based on the given criterion::
from sqlalchemy.ext.hybrid import Comparator
class GrandparentTransformer(Comparator):
def operate(self, op, other):
def transform(q):
cls = self.__clause_element__()
parent_alias = aliased(cls)
return q.join(parent_alias, cls.parent).\\
filter(op(parent_alias.parent, other))
return transform
Base = declarative_base()
class Node(Base):
__tablename__ = 'node'
id =Column(Integer, primary_key=True)
parent_id = Column(Integer, ForeignKey('node.id'))
parent = relationship("Node", remote_side=id)
@hybrid_property
def grandparent(self):
return self.parent.parent
@grandparent.comparator
def grandparent(cls):
return GrandparentTransformer(cls)
The ``GrandparentTransformer`` overrides the core
:meth:`.Operators.operate` method at the base of the
:class:`.Comparator` hierarchy to return a query-transforming
callable, which then runs the given comparison operation in a
particular context. Such as, in the example above, the ``operate``
method is called, given the :attr:`.Operators.eq` callable as well as
the right side of the comparison ``Node(id=5)``. A function
``transform`` is then returned which will transform a :class:`.Query`
first to join to ``Node.parent``, then to compare ``parent_alias``
using :attr:`.Operators.eq` against the left and right sides, passing
into :class:`.Query.filter`:
.. sourcecode:: pycon+sql
>>> from sqlalchemy.orm import Session
>>> session = Session()
{sql}>>> session.query(Node).\\
... with_transformation(Node.grandparent==Node(id=5)).\\
... all()
SELECT node.id AS node_id, node.parent_id AS node_parent_id
FROM node JOIN node AS node_1 ON node_1.id = node.parent_id
WHERE :param_1 = node_1.parent_id
{stop}
We can modify the pattern to be more verbose but flexible by separating
the "join" step from the "filter" step. The tricky part here is ensuring
that successive instances of ``GrandparentTransformer`` use the same
:class:`.AliasedClass` object against ``Node``. Below we use a simple
memoizing approach that associates a ``GrandparentTransformer``
with each class::
class Node(Base):
# ...
@grandparent.comparator
def grandparent(cls):
# memoize a GrandparentTransformer
# per class
if '_gp' not in cls.__dict__:
cls._gp = GrandparentTransformer(cls)
return cls._gp
class GrandparentTransformer(Comparator):
def __init__(self, cls):
self.parent_alias = aliased(cls)
@property
def join(self):
def go(q):
return q.join(self.parent_alias, Node.parent)
return go
def operate(self, op, other):
return op(self.parent_alias.parent, other)
.. sourcecode:: pycon+sql
{sql}>>> session.query(Node).\\
... with_transformation(Node.grandparent.join).\\
... filter(Node.grandparent==Node(id=5))
SELECT node.id AS node_id, node.parent_id AS node_parent_id
FROM node JOIN node AS node_1 ON node_1.id = node.parent_id
WHERE :param_1 = node_1.parent_id
{stop}
The "transformer" pattern is an experimental pattern that starts
to make usage of some functional programming paradigms.
While it's only recommended for advanced and/or patient developers,
there's probably a whole lot of amazing things it can be used for.
"""
from .. import util
from ..orm import attributes, interfaces
HYBRID_METHOD = util.symbol('HYBRID_METHOD')
"""Symbol indicating an :class:`_InspectionAttr` that's
of type :class:`.hybrid_method`.
Is assigned to the :attr:`._InspectionAttr.extension_type`
attibute.
.. seealso::
:attr:`.Mapper.all_orm_attributes`
"""
HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY')
"""Symbol indicating an :class:`_InspectionAttr` that's
of type :class:`.hybrid_method`.
Is assigned to the :attr:`._InspectionAttr.extension_type`
attibute.
.. seealso::
:attr:`.Mapper.all_orm_attributes`
"""
class hybrid_method(interfaces._InspectionAttr):
"""A decorator which allows definition of a Python object method with both
instance-level and class-level behavior.
"""
is_attribute = True
extension_type = HYBRID_METHOD
def __init__(self, func, expr=None):
"""Create a new :class:`.hybrid_method`.
Usage is typically via decorator::
from sqlalchemy.ext.hybrid import hybrid_method
class SomeClass(object):
@hybrid_method
def value(self, x, y):
return self._value + x + y
@value.expression
def value(self, x, y):
return func.some_function(self._value, x, y)
"""
self.func = func
self.expr = expr or func
def __get__(self, instance, owner):
if instance is None:
return self.expr.__get__(owner, owner.__class__)
else:
return self.func.__get__(instance, owner)
def expression(self, expr):
"""Provide a modifying decorator that defines a
SQL-expression producing method."""
self.expr = expr
return self
class hybrid_property(interfaces._InspectionAttr):
"""A decorator which allows definition of a Python descriptor with both
instance-level and class-level behavior.
"""
is_attribute = True
extension_type = HYBRID_PROPERTY
def __init__(self, fget, fset=None, fdel=None, expr=None):
"""Create a new :class:`.hybrid_property`.
Usage is typically via decorator::
from sqlalchemy.ext.hybrid import hybrid_property
class SomeClass(object):
@hybrid_property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
"""
self.fget = fget
self.fset = fset
self.fdel = fdel
self.expr = expr or fget
util.update_wrapper(self, fget)
def __get__(self, instance, owner):
if instance is None:
return self.expr(owner)
else:
return self.fget(instance)
def __set__(self, instance, value):
if self.fset is None:
raise AttributeError("can't set attribute")
self.fset(instance, value)
def __delete__(self, instance):
if self.fdel is None:
raise AttributeError("can't delete attribute")
self.fdel(instance)
def setter(self, fset):
"""Provide a modifying decorator that defines a value-setter method."""
self.fset = fset
return self
def deleter(self, fdel):
"""Provide a modifying decorator that defines a
value-deletion method."""
self.fdel = fdel
return self
def expression(self, expr):
"""Provide a modifying decorator that defines a SQL-expression
producing method."""
self.expr = expr
return self
def comparator(self, comparator):
"""Provide a modifying decorator that defines a custom
comparator producing method.
The return value of the decorated method should be an instance of
:class:`~.hybrid.Comparator`.
"""
proxy_attr = attributes.\
create_proxied_attribute(self)
def expr(owner):
return proxy_attr(owner, self.__name__, self, comparator(owner))
self.expr = expr
return self
class Comparator(interfaces.PropComparator):
"""A helper class that allows easy construction of custom
:class:`~.orm.interfaces.PropComparator`
classes for usage with hybrids."""
property = None
def __init__(self, expression):
self.expression = expression
def __clause_element__(self):
expr = self.expression
while hasattr(expr, '__clause_element__'):
expr = expr.__clause_element__()
return expr
def adapt_to_entity(self, adapt_to_entity):
# interesting....
return self
| gpl-3.0 |
BhavyaLight/information-retrival-search-engine | informationRetrival/frontend/views.py | 1 | 6155 | from django.shortcuts import render
from .forms import SearchForm, ClassifyForm
from whoosh.qparser import MultifieldParser, QueryParser
from whoosh import index as i
from whoosh import scoring
import whoosh.query as QRY
import time
import pandas as pd
from datetime import datetime
from indexing.crawl import crawl_and_update
from classification.classify import Classification
from sklearn.externals import joblib
from django.contrib.staticfiles.templatetags.staticfiles import static
INDEX_FILE = '/Users/noopurjain/Desktop/Index'
WRITE_FILE = '/Users/noopurjain/Desktop/Trial_2'
CLASSIFICATION_PATH = '/mnt/d/model_files_new_with_voting_with_weights/'
def show(request):
if request.method == 'POST':
overview = request.POST.get('overview')
title = request.POST.get('title')
poster_path = request.POST.get('poster_path')
id = request.POST.get('imdb_id')
print (id)
ix = i.open_dir(INDEX_FILE)
searcher = ix.searcher()
docnum = searcher.document_number(imdb_id=id)
recoms = searcher.more_like(docnum,'overview')
return render(request, 'frontend/show.html', {'overview': overview, 'title': title, 'poster_path': poster_path, 'recommendations': recoms})
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
search_field = form.cleaned_data['search_field']
query = form.cleaned_data['search_text']
rating = request.GET.get("rating")
year = request.GET.get("year")
query = query.replace('+', ' AND ').replace('-', ' NOT ')
filter_q = None
# TODO: Change Directory here
ix = i.open_dir(INDEX_FILE)
start_time = time.time()
if query is not None and query != u"":
parser = MultifieldParser(search_field, schema=ix.schema)
if year!=None and rating!=None:
date_q = QRY.DateRange("release_date", datetime.strptime(year.split(",")[0], "%Y"),\
datetime.strptime(year.split(",")[1], "%Y"))
rating_q = QRY.NumericRange("vote_average",int(rating.split(",")[0]), int(rating.split(",")[1]))
filter_q = QRY.Require(date_q, rating_q)
else:
year = "1970,2017"
rating = "2,8"
try:
qry = parser.parse(query)
except:
qry = None
return render(request, 'frontend/index.html', {'error': True, 'message':"Query is null!", 'form':form})
if qry is not None:
searcher = ix.searcher(weighting=scoring.TF_IDF())
corrected = searcher.correct_query(qry, query)
if corrected.query != qry:
return render(request, 'frontend/index.html', {'search_field': search_field, 'correction': True, 'suggested': corrected.string, 'form': form})
hits = searcher.search(qry,filter=filter_q,limit=None)
elapsed_time = time.time() - start_time
elapsed_time = "{0:.3f}".format(elapsed_time)
return render(request, 'frontend/index.html', {'search_field': search_field, 'search_text': form.cleaned_data['search_text'], \
'error': False, 'hits': hits, 'form':form, 'elapsed': elapsed_time,\
'number': len(hits), 'year': year, 'rating': rating})
else:
return render(request, 'frontend/index.html', {'error': True, 'message':"Sorry couldn't parse", 'form':form})
else:
return render(request, 'frontend/index.html', {'error': True, 'message':'oops', 'form':form})
else:
form = SearchForm()
return render(request, 'frontend/index.html', {'form': form})
def classification(request):
results_dict = Classification(CLASSIFICATION_PATH).get_classification_results()
results = pd.DataFrame(results_dict)
for column in ['romance','crime','horror']:
results[column] = results[column].apply(lambda x: str((int(x.split('/')[0]) * 100)/int(x.split('/')[1]))+" %")
results.columns = ['F(1) Score', 'F(W) Score', 'Recall', 'Accuracy', 'Crime', 'Horror', 'Model', 'Precision', 'Romance','Vectorizer']
results = results[['Model','Vectorizer', 'Crime', 'Horror', 'Romance', 'F(1) Score', 'F(W) Score', 'Recall', 'Accuracy', 'Precision']]
results = results.to_html
if request.method == "POST":
form = ClassifyForm(request.POST)
if form.is_valid():
plot = form.cleaned_data['classify_plot']
genre, time = Classification(CLASSIFICATION_PATH).Classify_Text(plot)
return render(request, 'frontend/classify.html', {'results': results, 'form': form, 'genre': genre[0], 'time': time})
else:
return render(request, 'frontend/classify.html', {'results': results, 'form': form})
else:
form = ClassifyForm()
return render(request, 'frontend/classify.html', {'results': results, 'form': form})
def crawl(request):
if request.method == "GET":
form = SearchForm(request.GET)
date_now = datetime.now()
search_field = request.GET.get('search_field')
query = request.GET.get('search_text')
ix = i.open_dir(INDEX_FILE)
parser = QueryParser("release_date", schema=ix.schema)
qry = parser.parse(date_now.strftime("%Y-%m-%d"))
searcher = ix.searcher()
hits = searcher.search(qry, limit=1)
print (len(hits))
if (len(hits)==0):
# send new records directory to the indexing function to add them to the index
total_records = crawl_and_update(date_now, WRITE_FILE, INDEX_FILE)
else:
total_records = "Already up-to-date"
return render(request, 'frontend/crawl.html', {'total_records': total_records, 'form': form})
| mit |
anntzer/scikit-learn | sklearn/manifold/_t_sne.py | 8 | 43233 | # Author: Alexander Fabisch -- <afabisch@informatik.uni-bremen.de>
# Author: Christopher Moody <chrisemoody@gmail.com>
# Author: Nick Travers <nickt@squareup.com>
# License: BSD 3 clause (C) 2014
# This is the exact and Barnes-Hut t-SNE implementation. There are other
# modifications of the algorithm:
# * Fast Optimization for t-SNE:
# https://cseweb.ucsd.edu/~lvdmaaten/workshops/nips2010/papers/vandermaaten.pdf
import warnings
from time import time
import numpy as np
from scipy import linalg
from scipy.spatial.distance import pdist
from scipy.spatial.distance import squareform
from scipy.sparse import csr_matrix, issparse
from numbers import Integral, Real
from ..neighbors import NearestNeighbors
from ..base import BaseEstimator
from ..utils import check_random_state
from ..utils._openmp_helpers import _openmp_effective_n_threads
from ..utils.validation import check_non_negative
from ..utils._param_validation import Interval, StrOptions, Hidden
from ..decomposition import PCA
from ..metrics.pairwise import pairwise_distances, _VALID_METRICS
# mypy error: Module 'sklearn.manifold' has no attribute '_utils'
from . import _utils # type: ignore
# mypy error: Module 'sklearn.manifold' has no attribute '_barnes_hut_tsne'
from . import _barnes_hut_tsne # type: ignore
MACHINE_EPSILON = np.finfo(np.double).eps
def _joint_probabilities(distances, desired_perplexity, verbose):
"""Compute joint probabilities p_ij from distances.
Parameters
----------
distances : ndarray of shape (n_samples * (n_samples-1) / 2,)
Distances of samples are stored as condensed matrices, i.e.
we omit the diagonal and duplicate entries and store everything
in a one-dimensional array.
desired_perplexity : float
Desired perplexity of the joint probability distributions.
verbose : int
Verbosity level.
Returns
-------
P : ndarray of shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
"""
# Compute conditional probabilities such that they approximately match
# the desired perplexity
distances = distances.astype(np.float32, copy=False)
conditional_P = _utils._binary_search_perplexity(
distances, desired_perplexity, verbose
)
P = conditional_P + conditional_P.T
sum_P = np.maximum(np.sum(P), MACHINE_EPSILON)
P = np.maximum(squareform(P) / sum_P, MACHINE_EPSILON)
return P
def _joint_probabilities_nn(distances, desired_perplexity, verbose):
"""Compute joint probabilities p_ij from distances using just nearest
neighbors.
This method is approximately equal to _joint_probabilities. The latter
is O(N), but limiting the joint probability to nearest neighbors improves
this substantially to O(uN).
Parameters
----------
distances : sparse matrix of shape (n_samples, n_samples)
Distances of samples to its n_neighbors nearest neighbors. All other
distances are left to zero (and are not materialized in memory).
Matrix should be of CSR format.
desired_perplexity : float
Desired perplexity of the joint probability distributions.
verbose : int
Verbosity level.
Returns
-------
P : sparse matrix of shape (n_samples, n_samples)
Condensed joint probability matrix with only nearest neighbors. Matrix
will be of CSR format.
"""
t0 = time()
# Compute conditional probabilities such that they approximately match
# the desired perplexity
distances.sort_indices()
n_samples = distances.shape[0]
distances_data = distances.data.reshape(n_samples, -1)
distances_data = distances_data.astype(np.float32, copy=False)
conditional_P = _utils._binary_search_perplexity(
distances_data, desired_perplexity, verbose
)
assert np.all(np.isfinite(conditional_P)), "All probabilities should be finite"
# Symmetrize the joint probability distribution using sparse operations
P = csr_matrix(
(conditional_P.ravel(), distances.indices, distances.indptr),
shape=(n_samples, n_samples),
)
P = P + P.T
# Normalize the joint probability distribution
sum_P = np.maximum(P.sum(), MACHINE_EPSILON)
P /= sum_P
assert np.all(np.abs(P.data) <= 1.0)
if verbose >= 2:
duration = time() - t0
print("[t-SNE] Computed conditional probabilities in {:.3f}s".format(duration))
return P
def _kl_divergence(
params,
P,
degrees_of_freedom,
n_samples,
n_components,
skip_num_points=0,
compute_error=True,
):
"""t-SNE objective function: gradient of the KL divergence
of p_ijs and q_ijs and the absolute error.
Parameters
----------
params : ndarray of shape (n_params,)
Unraveled embedding.
P : ndarray of shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
degrees_of_freedom : int
Degrees of freedom of the Student's-t distribution.
n_samples : int
Number of samples.
n_components : int
Dimension of the embedded space.
skip_num_points : int, default=0
This does not compute the gradient for points with indices below
`skip_num_points`. This is useful when computing transforms of new
data where you'd like to keep the old data fixed.
compute_error: bool, default=True
If False, the kl_divergence is not computed and returns NaN.
Returns
-------
kl_divergence : float
Kullback-Leibler divergence of p_ij and q_ij.
grad : ndarray of shape (n_params,)
Unraveled gradient of the Kullback-Leibler divergence with respect to
the embedding.
"""
X_embedded = params.reshape(n_samples, n_components)
# Q is a heavy-tailed distribution: Student's t-distribution
dist = pdist(X_embedded, "sqeuclidean")
dist /= degrees_of_freedom
dist += 1.0
dist **= (degrees_of_freedom + 1.0) / -2.0
Q = np.maximum(dist / (2.0 * np.sum(dist)), MACHINE_EPSILON)
# Optimization trick below: np.dot(x, y) is faster than
# np.sum(x * y) because it calls BLAS
# Objective: C (Kullback-Leibler divergence of P and Q)
if compute_error:
kl_divergence = 2.0 * np.dot(P, np.log(np.maximum(P, MACHINE_EPSILON) / Q))
else:
kl_divergence = np.nan
# Gradient: dC/dY
# pdist always returns double precision distances. Thus we need to take
grad = np.ndarray((n_samples, n_components), dtype=params.dtype)
PQd = squareform((P - Q) * dist)
for i in range(skip_num_points, n_samples):
grad[i] = np.dot(np.ravel(PQd[i], order="K"), X_embedded[i] - X_embedded)
grad = grad.ravel()
c = 2.0 * (degrees_of_freedom + 1.0) / degrees_of_freedom
grad *= c
return kl_divergence, grad
def _kl_divergence_bh(
params,
P,
degrees_of_freedom,
n_samples,
n_components,
angle=0.5,
skip_num_points=0,
verbose=False,
compute_error=True,
num_threads=1,
):
"""t-SNE objective function: KL divergence of p_ijs and q_ijs.
Uses Barnes-Hut tree methods to calculate the gradient that
runs in O(NlogN) instead of O(N^2).
Parameters
----------
params : ndarray of shape (n_params,)
Unraveled embedding.
P : sparse matrix of shape (n_samples, n_sample)
Sparse approximate joint probability matrix, computed only for the
k nearest-neighbors and symmetrized. Matrix should be of CSR format.
degrees_of_freedom : int
Degrees of freedom of the Student's-t distribution.
n_samples : int
Number of samples.
n_components : int
Dimension of the embedded space.
angle : float, default=0.5
This is the trade-off between speed and accuracy for Barnes-Hut T-SNE.
'angle' is the angular size (referred to as theta in [3]) of a distant
node as measured from a point. If this size is below 'angle' then it is
used as a summary node of all points contained within it.
This method is not very sensitive to changes in this parameter
in the range of 0.2 - 0.8. Angle less than 0.2 has quickly increasing
computation time and angle greater 0.8 has quickly increasing error.
skip_num_points : int, default=0
This does not compute the gradient for points with indices below
`skip_num_points`. This is useful when computing transforms of new
data where you'd like to keep the old data fixed.
verbose : int, default=False
Verbosity level.
compute_error: bool, default=True
If False, the kl_divergence is not computed and returns NaN.
num_threads : int, default=1
Number of threads used to compute the gradient. This is set here to
avoid calling _openmp_effective_n_threads for each gradient step.
Returns
-------
kl_divergence : float
Kullback-Leibler divergence of p_ij and q_ij.
grad : ndarray of shape (n_params,)
Unraveled gradient of the Kullback-Leibler divergence with respect to
the embedding.
"""
params = params.astype(np.float32, copy=False)
X_embedded = params.reshape(n_samples, n_components)
val_P = P.data.astype(np.float32, copy=False)
neighbors = P.indices.astype(np.int64, copy=False)
indptr = P.indptr.astype(np.int64, copy=False)
grad = np.zeros(X_embedded.shape, dtype=np.float32)
error = _barnes_hut_tsne.gradient(
val_P,
X_embedded,
neighbors,
indptr,
grad,
angle,
n_components,
verbose,
dof=degrees_of_freedom,
compute_error=compute_error,
num_threads=num_threads,
)
c = 2.0 * (degrees_of_freedom + 1.0) / degrees_of_freedom
grad = grad.ravel()
grad *= c
return error, grad
def _gradient_descent(
objective,
p0,
it,
n_iter,
n_iter_check=1,
n_iter_without_progress=300,
momentum=0.8,
learning_rate=200.0,
min_gain=0.01,
min_grad_norm=1e-7,
verbose=0,
args=None,
kwargs=None,
):
"""Batch gradient descent with momentum and individual gains.
Parameters
----------
objective : callable
Should return a tuple of cost and gradient for a given parameter
vector. When expensive to compute, the cost can optionally
be None and can be computed every n_iter_check steps using
the objective_error function.
p0 : array-like of shape (n_params,)
Initial parameter vector.
it : int
Current number of iterations (this function will be called more than
once during the optimization).
n_iter : int
Maximum number of gradient descent iterations.
n_iter_check : int, default=1
Number of iterations before evaluating the global error. If the error
is sufficiently low, we abort the optimization.
n_iter_without_progress : int, default=300
Maximum number of iterations without progress before we abort the
optimization.
momentum : float within (0.0, 1.0), default=0.8
The momentum generates a weight for previous gradients that decays
exponentially.
learning_rate : float, default=200.0
The learning rate for t-SNE is usually in the range [10.0, 1000.0]. If
the learning rate is too high, the data may look like a 'ball' with any
point approximately equidistant from its nearest neighbours. If the
learning rate is too low, most points may look compressed in a dense
cloud with few outliers.
min_gain : float, default=0.01
Minimum individual gain for each parameter.
min_grad_norm : float, default=1e-7
If the gradient norm is below this threshold, the optimization will
be aborted.
verbose : int, default=0
Verbosity level.
args : sequence, default=None
Arguments to pass to objective function.
kwargs : dict, default=None
Keyword arguments to pass to objective function.
Returns
-------
p : ndarray of shape (n_params,)
Optimum parameters.
error : float
Optimum.
i : int
Last iteration.
"""
if args is None:
args = []
if kwargs is None:
kwargs = {}
p = p0.copy().ravel()
update = np.zeros_like(p)
gains = np.ones_like(p)
error = np.finfo(float).max
best_error = np.finfo(float).max
best_iter = i = it
tic = time()
for i in range(it, n_iter):
check_convergence = (i + 1) % n_iter_check == 0
# only compute the error when needed
kwargs["compute_error"] = check_convergence or i == n_iter - 1
error, grad = objective(p, *args, **kwargs)
inc = update * grad < 0.0
dec = np.invert(inc)
gains[inc] += 0.2
gains[dec] *= 0.8
np.clip(gains, min_gain, np.inf, out=gains)
grad *= gains
update = momentum * update - learning_rate * grad
p += update
if check_convergence:
toc = time()
duration = toc - tic
tic = toc
grad_norm = linalg.norm(grad)
if verbose >= 2:
print(
"[t-SNE] Iteration %d: error = %.7f,"
" gradient norm = %.7f"
" (%s iterations in %0.3fs)"
% (i + 1, error, grad_norm, n_iter_check, duration)
)
if error < best_error:
best_error = error
best_iter = i
elif i - best_iter > n_iter_without_progress:
if verbose >= 2:
print(
"[t-SNE] Iteration %d: did not make any progress "
"during the last %d episodes. Finished."
% (i + 1, n_iter_without_progress)
)
break
if grad_norm <= min_grad_norm:
if verbose >= 2:
print(
"[t-SNE] Iteration %d: gradient norm %f. Finished."
% (i + 1, grad_norm)
)
break
return p, error, i
def trustworthiness(X, X_embedded, *, n_neighbors=5, metric="euclidean"):
r"""Indicate to what extent the local structure is retained.
The trustworthiness is within [0, 1]. It is defined as
.. math::
T(k) = 1 - \frac{2}{nk (2n - 3k - 1)} \sum^n_{i=1}
\sum_{j \in \mathcal{N}_{i}^{k}} \max(0, (r(i, j) - k))
where for each sample i, :math:`\mathcal{N}_{i}^{k}` are its k nearest
neighbors in the output space, and every sample j is its :math:`r(i, j)`-th
nearest neighbor in the input space. In other words, any unexpected nearest
neighbors in the output space are penalised in proportion to their rank in
the input space.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features) or \
(n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row.
X_embedded : {array-like, sparse matrix} of shape (n_samples, n_components)
Embedding of the training data in low-dimensional space.
n_neighbors : int, default=5
The number of neighbors that will be considered. Should be fewer than
`n_samples / 2` to ensure the trustworthiness to lies within [0, 1], as
mentioned in [1]_. An error will be raised otherwise.
metric : str or callable, default='euclidean'
Which metric to use for computing pairwise distances between samples
from the original input space. If metric is 'precomputed', X must be a
matrix of pairwise distances or squared distances. Otherwise, for a list
of available metrics, see the documentation of argument metric in
`sklearn.pairwise.pairwise_distances` and metrics listed in
`sklearn.metrics.pairwise.PAIRWISE_DISTANCE_FUNCTIONS`. Note that the
"cosine" metric uses :func:`~sklearn.metrics.pairwise.cosine_distances`.
.. versionadded:: 0.20
Returns
-------
trustworthiness : float
Trustworthiness of the low-dimensional embedding.
References
----------
.. [1] Jarkko Venna and Samuel Kaski. 2001. Neighborhood
Preservation in Nonlinear Projection Methods: An Experimental Study.
In Proceedings of the International Conference on Artificial Neural Networks
(ICANN '01). Springer-Verlag, Berlin, Heidelberg, 485-491.
.. [2] Laurens van der Maaten. Learning a Parametric Embedding by Preserving
Local Structure. Proceedings of the Twelth International Conference on
Artificial Intelligence and Statistics, PMLR 5:384-391, 2009.
"""
n_samples = X.shape[0]
if n_neighbors >= n_samples / 2:
raise ValueError(
f"n_neighbors ({n_neighbors}) should be less than n_samples / 2"
f" ({n_samples / 2})"
)
dist_X = pairwise_distances(X, metric=metric)
if metric == "precomputed":
dist_X = dist_X.copy()
# we set the diagonal to np.inf to exclude the points themselves from
# their own neighborhood
np.fill_diagonal(dist_X, np.inf)
ind_X = np.argsort(dist_X, axis=1)
# `ind_X[i]` is the index of sorted distances between i and other samples
ind_X_embedded = (
NearestNeighbors(n_neighbors=n_neighbors)
.fit(X_embedded)
.kneighbors(return_distance=False)
)
# We build an inverted index of neighbors in the input space: For sample i,
# we define `inverted_index[i]` as the inverted index of sorted distances:
# inverted_index[i][ind_X[i]] = np.arange(1, n_sample + 1)
inverted_index = np.zeros((n_samples, n_samples), dtype=int)
ordered_indices = np.arange(n_samples + 1)
inverted_index[ordered_indices[:-1, np.newaxis], ind_X] = ordered_indices[1:]
ranks = (
inverted_index[ordered_indices[:-1, np.newaxis], ind_X_embedded] - n_neighbors
)
t = np.sum(ranks[ranks > 0])
t = 1.0 - t * (
2.0 / (n_samples * n_neighbors * (2.0 * n_samples - 3.0 * n_neighbors - 1.0))
)
return t
class TSNE(BaseEstimator):
"""T-distributed Stochastic Neighbor Embedding.
t-SNE [1] is a tool to visualize high-dimensional data. It converts
similarities between data points to joint probabilities and tries
to minimize the Kullback-Leibler divergence between the joint
probabilities of the low-dimensional embedding and the
high-dimensional data. t-SNE has a cost function that is not convex,
i.e. with different initializations we can get different results.
It is highly recommended to use another dimensionality reduction
method (e.g. PCA for dense data or TruncatedSVD for sparse data)
to reduce the number of dimensions to a reasonable amount (e.g. 50)
if the number of features is very high. This will suppress some
noise and speed up the computation of pairwise distances between
samples. For more tips see Laurens van der Maaten's FAQ [2].
Read more in the :ref:`User Guide <t_sne>`.
Parameters
----------
n_components : int, default=2
Dimension of the embedded space.
perplexity : float, default=30.0
The perplexity is related to the number of nearest neighbors that
is used in other manifold learning algorithms. Larger datasets
usually require a larger perplexity. Consider selecting a value
between 5 and 50. Different values can result in significantly
different results. The perplexity must be less that the number
of samples.
early_exaggeration : float, default=12.0
Controls how tight natural clusters in the original space are in
the embedded space and how much space will be between them. For
larger values, the space between natural clusters will be larger
in the embedded space. Again, the choice of this parameter is not
very critical. If the cost function increases during initial
optimization, the early exaggeration factor or the learning rate
might be too high.
learning_rate : float or "auto", default="auto"
The learning rate for t-SNE is usually in the range [10.0, 1000.0]. If
the learning rate is too high, the data may look like a 'ball' with any
point approximately equidistant from its nearest neighbours. If the
learning rate is too low, most points may look compressed in a dense
cloud with few outliers. If the cost function gets stuck in a bad local
minimum increasing the learning rate may help.
Note that many other t-SNE implementations (bhtsne, FIt-SNE, openTSNE,
etc.) use a definition of learning_rate that is 4 times smaller than
ours. So our learning_rate=200 corresponds to learning_rate=800 in
those other implementations. The 'auto' option sets the learning_rate
to `max(N / early_exaggeration / 4, 50)` where N is the sample size,
following [4] and [5].
.. versionchanged:: 1.2
The default value changed to `"auto"`.
n_iter : int, default=1000
Maximum number of iterations for the optimization. Should be at
least 250.
n_iter_without_progress : int, default=300
Maximum number of iterations without progress before we abort the
optimization, used after 250 initial iterations with early
exaggeration. Note that progress is only checked every 50 iterations so
this value is rounded to the next multiple of 50.
.. versionadded:: 0.17
parameter *n_iter_without_progress* to control stopping criteria.
min_grad_norm : float, default=1e-7
If the gradient norm is below this threshold, the optimization will
be stopped.
metric : str or callable, default='euclidean'
The metric to use when calculating distance between instances in a
feature array. If metric is a string, it must be one of the options
allowed by scipy.spatial.distance.pdist for its metric parameter, or
a metric listed in pairwise.PAIRWISE_DISTANCE_FUNCTIONS.
If metric is "precomputed", X is assumed to be a distance matrix.
Alternatively, if metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays from X as input and return a value indicating
the distance between them. The default is "euclidean" which is
interpreted as squared euclidean distance.
metric_params : dict, default=None
Additional keyword arguments for the metric function.
.. versionadded:: 1.1
init : {"random", "pca"} or ndarray of shape (n_samples, n_components), \
default="pca"
Initialization of embedding.
PCA initialization cannot be used with precomputed distances and is
usually more globally stable than random initialization.
.. versionchanged:: 1.2
The default value changed to `"pca"`.
verbose : int, default=0
Verbosity level.
random_state : int, RandomState instance or None, default=None
Determines the random number generator. Pass an int for reproducible
results across multiple function calls. Note that different
initializations might result in different local minima of the cost
function. See :term:`Glossary <random_state>`.
method : {'barnes_hut', 'exact'}, default='barnes_hut'
By default the gradient calculation algorithm uses Barnes-Hut
approximation running in O(NlogN) time. method='exact'
will run on the slower, but exact, algorithm in O(N^2) time. The
exact algorithm should be used when nearest-neighbor errors need
to be better than 3%. However, the exact method cannot scale to
millions of examples.
.. versionadded:: 0.17
Approximate optimization *method* via the Barnes-Hut.
angle : float, default=0.5
Only used if method='barnes_hut'
This is the trade-off between speed and accuracy for Barnes-Hut T-SNE.
'angle' is the angular size (referred to as theta in [3]) of a distant
node as measured from a point. If this size is below 'angle' then it is
used as a summary node of all points contained within it.
This method is not very sensitive to changes in this parameter
in the range of 0.2 - 0.8. Angle less than 0.2 has quickly increasing
computation time and angle greater 0.8 has quickly increasing error.
n_jobs : int, default=None
The number of parallel jobs to run for neighbors search. This parameter
has no impact when ``metric="precomputed"`` or
(``metric="euclidean"`` and ``method="exact"``).
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
.. versionadded:: 0.22
square_distances : True, default='deprecated'
This parameter has no effect since distance values are always squared
since 1.1.
.. deprecated:: 1.1
`square_distances` has no effect from 1.1 and will be removed in
1.3.
Attributes
----------
embedding_ : array-like of shape (n_samples, n_components)
Stores the embedding vectors.
kl_divergence_ : float
Kullback-Leibler divergence after optimization.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
feature_names_in_ : ndarray of shape (`n_features_in_`,)
Names of features seen during :term:`fit`. Defined only when `X`
has feature names that are all strings.
.. versionadded:: 1.0
learning_rate_ : float
Effective learning rate.
.. versionadded:: 1.2
n_iter_ : int
Number of iterations run.
See Also
--------
sklearn.decomposition.PCA : Principal component analysis that is a linear
dimensionality reduction method.
sklearn.decomposition.KernelPCA : Non-linear dimensionality reduction using
kernels and PCA.
MDS : Manifold learning using multidimensional scaling.
Isomap : Manifold learning based on Isometric Mapping.
LocallyLinearEmbedding : Manifold learning using Locally Linear Embedding.
SpectralEmbedding : Spectral embedding for non-linear dimensionality.
References
----------
[1] van der Maaten, L.J.P.; Hinton, G.E. Visualizing High-Dimensional Data
Using t-SNE. Journal of Machine Learning Research 9:2579-2605, 2008.
[2] van der Maaten, L.J.P. t-Distributed Stochastic Neighbor Embedding
https://lvdmaaten.github.io/tsne/
[3] L.J.P. van der Maaten. Accelerating t-SNE using Tree-Based Algorithms.
Journal of Machine Learning Research 15(Oct):3221-3245, 2014.
https://lvdmaaten.github.io/publications/papers/JMLR_2014.pdf
[4] Belkina, A. C., Ciccolella, C. O., Anno, R., Halpert, R., Spidlen, J.,
& Snyder-Cappione, J. E. (2019). Automated optimized parameters for
T-distributed stochastic neighbor embedding improve visualization
and analysis of large datasets. Nature Communications, 10(1), 1-12.
[5] Kobak, D., & Berens, P. (2019). The art of using t-SNE for single-cell
transcriptomics. Nature Communications, 10(1), 1-14.
Examples
--------
>>> import numpy as np
>>> from sklearn.manifold import TSNE
>>> X = np.array([[0, 0, 0], [0, 1, 1], [1, 0, 1], [1, 1, 1]])
>>> X_embedded = TSNE(n_components=2, learning_rate='auto',
... init='random', perplexity=3).fit_transform(X)
>>> X_embedded.shape
(4, 2)
"""
_parameter_constraints: dict = {
"n_components": [Interval(Integral, 1, None, closed="left")],
"perplexity": [Interval(Real, 0, None, closed="neither")],
"early_exaggeration": [Interval(Real, 1, None, closed="left")],
"learning_rate": [
StrOptions({"auto"}),
Interval(Real, 0, None, closed="neither"),
],
"n_iter": [Interval(Integral, 250, None, closed="left")],
"n_iter_without_progress": [Interval(Integral, -1, None, closed="left")],
"min_grad_norm": [Interval(Real, 0, None, closed="left")],
"metric": [StrOptions(set(_VALID_METRICS) | {"precomputed"}), callable],
"metric_params": [dict, None],
"init": [
StrOptions({"pca", "random"}),
np.ndarray,
],
"verbose": ["verbose"],
"random_state": ["random_state"],
"method": [StrOptions({"barnes_hut", "exact"})],
"angle": [Interval(Real, 0, 1, closed="both")],
"n_jobs": [None, Integral],
"square_distances": ["boolean", Hidden(StrOptions({"deprecated"}))],
}
# Control the number of exploration iterations with early_exaggeration on
_EXPLORATION_N_ITER = 250
# Control the number of iterations between progress checks
_N_ITER_CHECK = 50
def __init__(
self,
n_components=2,
*,
perplexity=30.0,
early_exaggeration=12.0,
learning_rate="auto",
n_iter=1000,
n_iter_without_progress=300,
min_grad_norm=1e-7,
metric="euclidean",
metric_params=None,
init="pca",
verbose=0,
random_state=None,
method="barnes_hut",
angle=0.5,
n_jobs=None,
square_distances="deprecated",
):
self.n_components = n_components
self.perplexity = perplexity
self.early_exaggeration = early_exaggeration
self.learning_rate = learning_rate
self.n_iter = n_iter
self.n_iter_without_progress = n_iter_without_progress
self.min_grad_norm = min_grad_norm
self.metric = metric
self.metric_params = metric_params
self.init = init
self.verbose = verbose
self.random_state = random_state
self.method = method
self.angle = angle
self.n_jobs = n_jobs
self.square_distances = square_distances
def _check_params_vs_input(self, X):
if self.perplexity >= X.shape[0]:
raise ValueError("perplexity must be less than n_samples")
def _fit(self, X, skip_num_points=0):
"""Private function to fit the model using X as training data."""
if isinstance(self.init, str) and self.init == "pca" and issparse(X):
raise TypeError(
"PCA initialization is currently not supported "
"with the sparse input matrix. Use "
'init="random" instead.'
)
if self.square_distances != "deprecated":
warnings.warn(
"The parameter `square_distances` has not effect and will be "
"removed in version 1.3.",
FutureWarning,
)
if self.learning_rate == "auto":
# See issue #18018
self.learning_rate_ = X.shape[0] / self.early_exaggeration / 4
self.learning_rate_ = np.maximum(self.learning_rate_, 50)
else:
self.learning_rate_ = self.learning_rate
if self.method == "barnes_hut":
X = self._validate_data(
X,
accept_sparse=["csr"],
ensure_min_samples=2,
dtype=[np.float32, np.float64],
)
else:
X = self._validate_data(
X, accept_sparse=["csr", "csc", "coo"], dtype=[np.float32, np.float64]
)
if self.metric == "precomputed":
if isinstance(self.init, str) and self.init == "pca":
raise ValueError(
'The parameter init="pca" cannot be used with metric="precomputed".'
)
if X.shape[0] != X.shape[1]:
raise ValueError("X should be a square distance matrix")
check_non_negative(
X,
"TSNE.fit(). With metric='precomputed', X "
"should contain positive distances.",
)
if self.method == "exact" and issparse(X):
raise TypeError(
'TSNE with method="exact" does not accept sparse '
'precomputed distance matrix. Use method="barnes_hut" '
"or provide the dense distance matrix."
)
if self.method == "barnes_hut" and self.n_components > 3:
raise ValueError(
"'n_components' should be inferior to 4 for the "
"barnes_hut algorithm as it relies on "
"quad-tree or oct-tree."
)
random_state = check_random_state(self.random_state)
n_samples = X.shape[0]
neighbors_nn = None
if self.method == "exact":
# Retrieve the distance matrix, either using the precomputed one or
# computing it.
if self.metric == "precomputed":
distances = X
else:
if self.verbose:
print("[t-SNE] Computing pairwise distances...")
if self.metric == "euclidean":
# Euclidean is squared here, rather than using **= 2,
# because euclidean_distances already calculates
# squared distances, and returns np.sqrt(dist) for
# squared=False.
# Also, Euclidean is slower for n_jobs>1, so don't set here
distances = pairwise_distances(X, metric=self.metric, squared=True)
else:
metric_params_ = self.metric_params or {}
distances = pairwise_distances(
X, metric=self.metric, n_jobs=self.n_jobs, **metric_params_
)
if np.any(distances < 0):
raise ValueError(
"All distances should be positive, the metric given is not correct"
)
if self.metric != "euclidean":
distances **= 2
# compute the joint probability distribution for the input space
P = _joint_probabilities(distances, self.perplexity, self.verbose)
assert np.all(np.isfinite(P)), "All probabilities should be finite"
assert np.all(P >= 0), "All probabilities should be non-negative"
assert np.all(
P <= 1
), "All probabilities should be less or then equal to one"
else:
# Compute the number of nearest neighbors to find.
# LvdM uses 3 * perplexity as the number of neighbors.
# In the event that we have very small # of points
# set the neighbors to n - 1.
n_neighbors = min(n_samples - 1, int(3.0 * self.perplexity + 1))
if self.verbose:
print("[t-SNE] Computing {} nearest neighbors...".format(n_neighbors))
# Find the nearest neighbors for every point
knn = NearestNeighbors(
algorithm="auto",
n_jobs=self.n_jobs,
n_neighbors=n_neighbors,
metric=self.metric,
metric_params=self.metric_params,
)
t0 = time()
knn.fit(X)
duration = time() - t0
if self.verbose:
print(
"[t-SNE] Indexed {} samples in {:.3f}s...".format(
n_samples, duration
)
)
t0 = time()
distances_nn = knn.kneighbors_graph(mode="distance")
duration = time() - t0
if self.verbose:
print(
"[t-SNE] Computed neighbors for {} samples in {:.3f}s...".format(
n_samples, duration
)
)
# Free the memory used by the ball_tree
del knn
# knn return the euclidean distance but we need it squared
# to be consistent with the 'exact' method. Note that the
# the method was derived using the euclidean method as in the
# input space. Not sure of the implication of using a different
# metric.
distances_nn.data **= 2
# compute the joint probability distribution for the input space
P = _joint_probabilities_nn(distances_nn, self.perplexity, self.verbose)
if isinstance(self.init, np.ndarray):
X_embedded = self.init
elif self.init == "pca":
pca = PCA(
n_components=self.n_components,
svd_solver="randomized",
random_state=random_state,
)
X_embedded = pca.fit_transform(X).astype(np.float32, copy=False)
# PCA is rescaled so that PC1 has standard deviation 1e-4 which is
# the default value for random initialization. See issue #18018.
X_embedded = X_embedded / np.std(X_embedded[:, 0]) * 1e-4
elif self.init == "random":
# The embedding is initialized with iid samples from Gaussians with
# standard deviation 1e-4.
X_embedded = 1e-4 * random_state.standard_normal(
size=(n_samples, self.n_components)
).astype(np.float32)
# Degrees of freedom of the Student's t-distribution. The suggestion
# degrees_of_freedom = n_components - 1 comes from
# "Learning a Parametric Embedding by Preserving Local Structure"
# Laurens van der Maaten, 2009.
degrees_of_freedom = max(self.n_components - 1, 1)
return self._tsne(
P,
degrees_of_freedom,
n_samples,
X_embedded=X_embedded,
neighbors=neighbors_nn,
skip_num_points=skip_num_points,
)
def _tsne(
self,
P,
degrees_of_freedom,
n_samples,
X_embedded,
neighbors=None,
skip_num_points=0,
):
"""Runs t-SNE."""
# t-SNE minimizes the Kullback-Leiber divergence of the Gaussians P
# and the Student's t-distributions Q. The optimization algorithm that
# we use is batch gradient descent with two stages:
# * initial optimization with early exaggeration and momentum at 0.5
# * final optimization with momentum at 0.8
params = X_embedded.ravel()
opt_args = {
"it": 0,
"n_iter_check": self._N_ITER_CHECK,
"min_grad_norm": self.min_grad_norm,
"learning_rate": self.learning_rate_,
"verbose": self.verbose,
"kwargs": dict(skip_num_points=skip_num_points),
"args": [P, degrees_of_freedom, n_samples, self.n_components],
"n_iter_without_progress": self._EXPLORATION_N_ITER,
"n_iter": self._EXPLORATION_N_ITER,
"momentum": 0.5,
}
if self.method == "barnes_hut":
obj_func = _kl_divergence_bh
opt_args["kwargs"]["angle"] = self.angle
# Repeat verbose argument for _kl_divergence_bh
opt_args["kwargs"]["verbose"] = self.verbose
# Get the number of threads for gradient computation here to
# avoid recomputing it at each iteration.
opt_args["kwargs"]["num_threads"] = _openmp_effective_n_threads()
else:
obj_func = _kl_divergence
# Learning schedule (part 1): do 250 iteration with lower momentum but
# higher learning rate controlled via the early exaggeration parameter
P *= self.early_exaggeration
params, kl_divergence, it = _gradient_descent(obj_func, params, **opt_args)
if self.verbose:
print(
"[t-SNE] KL divergence after %d iterations with early exaggeration: %f"
% (it + 1, kl_divergence)
)
# Learning schedule (part 2): disable early exaggeration and finish
# optimization with a higher momentum at 0.8
P /= self.early_exaggeration
remaining = self.n_iter - self._EXPLORATION_N_ITER
if it < self._EXPLORATION_N_ITER or remaining > 0:
opt_args["n_iter"] = self.n_iter
opt_args["it"] = it + 1
opt_args["momentum"] = 0.8
opt_args["n_iter_without_progress"] = self.n_iter_without_progress
params, kl_divergence, it = _gradient_descent(obj_func, params, **opt_args)
# Save the final number of iterations
self.n_iter_ = it
if self.verbose:
print(
"[t-SNE] KL divergence after %d iterations: %f"
% (it + 1, kl_divergence)
)
X_embedded = params.reshape(n_samples, self.n_components)
self.kl_divergence_ = kl_divergence
return X_embedded
def fit_transform(self, X, y=None):
"""Fit X into an embedded space and return that transformed output.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features) or \
(n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row. If the method
is 'exact', X may be a sparse matrix of type 'csr', 'csc'
or 'coo'. If the method is 'barnes_hut' and the metric is
'precomputed', X may be a precomputed sparse graph.
y : None
Ignored.
Returns
-------
X_new : ndarray of shape (n_samples, n_components)
Embedding of the training data in low-dimensional space.
"""
self._validate_params()
self._check_params_vs_input(X)
embedding = self._fit(X)
self.embedding_ = embedding
return self.embedding_
def fit(self, X, y=None):
"""Fit X into an embedded space.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features) or \
(n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row. If the method
is 'exact', X may be a sparse matrix of type 'csr', 'csc'
or 'coo'. If the method is 'barnes_hut' and the metric is
'precomputed', X may be a precomputed sparse graph.
y : None
Ignored.
Returns
-------
X_new : array of shape (n_samples, n_components)
Embedding of the training data in low-dimensional space.
"""
self._validate_params()
self.fit_transform(X)
return self
def _more_tags(self):
return {"pairwise": self.metric == "precomputed"}
| bsd-3-clause |
anntzer/scikit-learn | examples/linear_model/plot_ols.py | 13 | 2048 | # -*- coding: utf-8 -*-
"""
=========================================================
Linear Regression Example
=========================================================
The example below uses only the first feature of the `diabetes` dataset,
in order to illustrate the data points within the two-dimensional plot.
The straight line can be seen in the plot, showing how linear regression
attempts to draw a straight line that will best minimize the
residual sum of squares between the observed responses in the dataset,
and the responses predicted by the linear approximation.
The coefficients, residual sum of squares and the coefficient of
determination are also calculated.
"""
# Code source: Jaques Grobler
# License: BSD 3 clause
import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets, linear_model
from sklearn.metrics import mean_squared_error, r2_score
# Load the diabetes dataset
diabetes_X, diabetes_y = datasets.load_diabetes(return_X_y=True)
# Use only one feature
diabetes_X = diabetes_X[:, np.newaxis, 2]
# Split the data into training/testing sets
diabetes_X_train = diabetes_X[:-20]
diabetes_X_test = diabetes_X[-20:]
# Split the targets into training/testing sets
diabetes_y_train = diabetes_y[:-20]
diabetes_y_test = diabetes_y[-20:]
# Create linear regression object
regr = linear_model.LinearRegression()
# Train the model using the training sets
regr.fit(diabetes_X_train, diabetes_y_train)
# Make predictions using the testing set
diabetes_y_pred = regr.predict(diabetes_X_test)
# The coefficients
print("Coefficients: \n", regr.coef_)
# The mean squared error
print("Mean squared error: %.2f" % mean_squared_error(diabetes_y_test, diabetes_y_pred))
# The coefficient of determination: 1 is perfect prediction
print("Coefficient of determination: %.2f" % r2_score(diabetes_y_test, diabetes_y_pred))
# Plot outputs
plt.scatter(diabetes_X_test, diabetes_y_test, color="black")
plt.plot(diabetes_X_test, diabetes_y_pred, color="blue", linewidth=3)
plt.xticks(())
plt.yticks(())
plt.show()
| bsd-3-clause |
anntzer/scikit-learn | examples/linear_model/plot_sgdocsvm_vs_ocsvm.py | 13 | 5255 | """
====================================================================
One-Class SVM versus One-Class SVM using Stochastic Gradient Descent
====================================================================
This example shows how to approximate the solution of
:class:`sklearn.svm.OneClassSVM` in the case of an RBF kernel with
:class:`sklearn.linear_model.SGDOneClassSVM`, a Stochastic Gradient Descent
(SGD) version of the One-Class SVM. A kernel approximation is first used in
order to apply :class:`sklearn.linear_model.SGDOneClassSVM` which implements a
linear One-Class SVM using SGD.
Note that :class:`sklearn.linear_model.SGDOneClassSVM` scales linearly with
the number of samples whereas the complexity of a kernelized
:class:`sklearn.svm.OneClassSVM` is at best quadratic with respect to the
number of samples. It is not the purpose of this example to illustrate the
benefits of such an approximation in terms of computation time but rather to
show that we obtain similar results on a toy dataset.
""" # noqa: E501
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from sklearn.svm import OneClassSVM
from sklearn.linear_model import SGDOneClassSVM
from sklearn.kernel_approximation import Nystroem
from sklearn.pipeline import make_pipeline
font = {"weight": "normal", "size": 15}
matplotlib.rc("font", **font)
random_state = 42
rng = np.random.RandomState(random_state)
# Generate train data
X = 0.3 * rng.randn(500, 2)
X_train = np.r_[X + 2, X - 2]
# Generate some regular novel observations
X = 0.3 * rng.randn(20, 2)
X_test = np.r_[X + 2, X - 2]
# Generate some abnormal novel observations
X_outliers = rng.uniform(low=-4, high=4, size=(20, 2))
xx, yy = np.meshgrid(np.linspace(-4.5, 4.5, 50), np.linspace(-4.5, 4.5, 50))
# OCSVM hyperparameters
nu = 0.05
gamma = 2.0
# Fit the One-Class SVM
clf = OneClassSVM(gamma=gamma, kernel="rbf", nu=nu)
clf.fit(X_train)
y_pred_train = clf.predict(X_train)
y_pred_test = clf.predict(X_test)
y_pred_outliers = clf.predict(X_outliers)
n_error_train = y_pred_train[y_pred_train == -1].size
n_error_test = y_pred_test[y_pred_test == -1].size
n_error_outliers = y_pred_outliers[y_pred_outliers == 1].size
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# Fit the One-Class SVM using a kernel approximation and SGD
transform = Nystroem(gamma=gamma, random_state=random_state)
clf_sgd = SGDOneClassSVM(
nu=nu, shuffle=True, fit_intercept=True, random_state=random_state, tol=1e-4
)
pipe_sgd = make_pipeline(transform, clf_sgd)
pipe_sgd.fit(X_train)
y_pred_train_sgd = pipe_sgd.predict(X_train)
y_pred_test_sgd = pipe_sgd.predict(X_test)
y_pred_outliers_sgd = pipe_sgd.predict(X_outliers)
n_error_train_sgd = y_pred_train_sgd[y_pred_train_sgd == -1].size
n_error_test_sgd = y_pred_test_sgd[y_pred_test_sgd == -1].size
n_error_outliers_sgd = y_pred_outliers_sgd[y_pred_outliers_sgd == 1].size
Z_sgd = pipe_sgd.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z_sgd = Z_sgd.reshape(xx.shape)
# plot the level sets of the decision function
plt.figure(figsize=(9, 6))
plt.title("One Class SVM")
plt.contourf(xx, yy, Z, levels=np.linspace(Z.min(), 0, 7), cmap=plt.cm.PuBu)
a = plt.contour(xx, yy, Z, levels=[0], linewidths=2, colors="darkred")
plt.contourf(xx, yy, Z, levels=[0, Z.max()], colors="palevioletred")
s = 20
b1 = plt.scatter(X_train[:, 0], X_train[:, 1], c="white", s=s, edgecolors="k")
b2 = plt.scatter(X_test[:, 0], X_test[:, 1], c="blueviolet", s=s, edgecolors="k")
c = plt.scatter(X_outliers[:, 0], X_outliers[:, 1], c="gold", s=s, edgecolors="k")
plt.axis("tight")
plt.xlim((-4.5, 4.5))
plt.ylim((-4.5, 4.5))
plt.legend(
[a.collections[0], b1, b2, c],
[
"learned frontier",
"training observations",
"new regular observations",
"new abnormal observations",
],
loc="upper left",
)
plt.xlabel(
"error train: %d/%d; errors novel regular: %d/%d; errors novel abnormal: %d/%d"
% (
n_error_train,
X_train.shape[0],
n_error_test,
X_test.shape[0],
n_error_outliers,
X_outliers.shape[0],
)
)
plt.show()
plt.figure(figsize=(9, 6))
plt.title("Online One-Class SVM")
plt.contourf(xx, yy, Z_sgd, levels=np.linspace(Z_sgd.min(), 0, 7), cmap=plt.cm.PuBu)
a = plt.contour(xx, yy, Z_sgd, levels=[0], linewidths=2, colors="darkred")
plt.contourf(xx, yy, Z_sgd, levels=[0, Z_sgd.max()], colors="palevioletred")
s = 20
b1 = plt.scatter(X_train[:, 0], X_train[:, 1], c="white", s=s, edgecolors="k")
b2 = plt.scatter(X_test[:, 0], X_test[:, 1], c="blueviolet", s=s, edgecolors="k")
c = plt.scatter(X_outliers[:, 0], X_outliers[:, 1], c="gold", s=s, edgecolors="k")
plt.axis("tight")
plt.xlim((-4.5, 4.5))
plt.ylim((-4.5, 4.5))
plt.legend(
[a.collections[0], b1, b2, c],
[
"learned frontier",
"training observations",
"new regular observations",
"new abnormal observations",
],
loc="upper left",
)
plt.xlabel(
"error train: %d/%d; errors novel regular: %d/%d; errors novel abnormal: %d/%d"
% (
n_error_train_sgd,
X_train.shape[0],
n_error_test_sgd,
X_test.shape[0],
n_error_outliers_sgd,
X_outliers.shape[0],
)
)
plt.show()
| bsd-3-clause |
anntzer/scikit-learn | examples/linear_model/plot_logistic_path.py | 12 | 2159 | """
==============================================
Regularization path of L1- Logistic Regression
==============================================
Train l1-penalized logistic regression models on a binary classification
problem derived from the Iris dataset.
The models are ordered from strongest regularized to least regularized. The 4
coefficients of the models are collected and plotted as a "regularization
path": on the left-hand side of the figure (strong regularizers), all the
coefficients are exactly 0. When regularization gets progressively looser,
coefficients can get non-zero values one after the other.
Here we choose the liblinear solver because it can efficiently optimize for the
Logistic Regression loss with a non-smooth, sparsity inducing l1 penalty.
Also note that we set a low value for the tolerance to make sure that the model
has converged before collecting the coefficients.
We also use warm_start=True which means that the coefficients of the models are
reused to initialize the next model fit to speed-up the computation of the
full-path.
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# License: BSD 3 clause
# %%
# Load data
# ---------
from sklearn import datasets
iris = datasets.load_iris()
X = iris.data
y = iris.target
X = X[y != 2]
y = y[y != 2]
X /= X.max() # Normalize X to speed-up convergence
# %%
# Compute regularization path
# ---------------------------
import numpy as np
from sklearn import linear_model
from sklearn.svm import l1_min_c
cs = l1_min_c(X, y, loss="log") * np.logspace(0, 7, 16)
clf = linear_model.LogisticRegression(
penalty="l1",
solver="liblinear",
tol=1e-6,
max_iter=int(1e6),
warm_start=True,
intercept_scaling=10000.0,
)
coefs_ = []
for c in cs:
clf.set_params(C=c)
clf.fit(X, y)
coefs_.append(clf.coef_.ravel().copy())
coefs_ = np.array(coefs_)
# %%
# Plot regularization path
# ------------------------
import matplotlib.pyplot as plt
plt.plot(np.log10(cs), coefs_, marker="o")
ymin, ymax = plt.ylim()
plt.xlabel("log(C)")
plt.ylabel("Coefficients")
plt.title("Logistic Regression Path")
plt.axis("tight")
plt.show()
| bsd-3-clause |
anntzer/scikit-learn | sklearn/svm/tests/test_svm.py | 8 | 47550 | """
Testing for Support Vector Machine module (sklearn.svm)
TODO: remove hard coded numerical results when possible
"""
import warnings
import re
import numpy as np
import pytest
from numpy.testing import assert_array_equal, assert_array_almost_equal
from numpy.testing import assert_almost_equal
from numpy.testing import assert_allclose
from scipy import sparse
from sklearn import svm, linear_model, datasets, metrics, base
from sklearn.svm import LinearSVC, OneClassSVM, SVR, NuSVR, LinearSVR
from sklearn.model_selection import train_test_split
from sklearn.datasets import make_classification, make_blobs
from sklearn.metrics import f1_score
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.utils import check_random_state
from sklearn.utils._testing import ignore_warnings
from sklearn.utils.validation import _num_samples
from sklearn.utils import shuffle
from sklearn.exceptions import ConvergenceWarning
from sklearn.exceptions import NotFittedError, UndefinedMetricWarning
from sklearn.multiclass import OneVsRestClassifier
# mypy error: Module 'sklearn.svm' has no attribute '_libsvm'
from sklearn.svm import _libsvm # type: ignore
# toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
Y = [1, 1, 1, 2, 2, 2]
T = [[-1, -1], [2, 2], [3, 2]]
true_result = [1, 2, 2]
# also load the iris dataset
iris = datasets.load_iris()
rng = check_random_state(42)
perm = rng.permutation(iris.target.size)
iris.data = iris.data[perm]
iris.target = iris.target[perm]
def test_libsvm_parameters():
# Test parameters on classes that make use of libsvm.
clf = svm.SVC(kernel="linear").fit(X, Y)
assert_array_equal(clf.dual_coef_, [[-0.25, 0.25]])
assert_array_equal(clf.support_, [1, 3])
assert_array_equal(clf.support_vectors_, (X[1], X[3]))
assert_array_equal(clf.intercept_, [0.0])
assert_array_equal(clf.predict(X), Y)
def test_libsvm_iris():
# Check consistency on dataset iris.
# shuffle the dataset so that labels are not ordered
for k in ("linear", "rbf"):
clf = svm.SVC(kernel=k).fit(iris.data, iris.target)
assert np.mean(clf.predict(iris.data) == iris.target) > 0.9
assert hasattr(clf, "coef_") == (k == "linear")
assert_array_equal(clf.classes_, np.sort(clf.classes_))
# check also the low-level API
# We unpack the values to create a dictionary with some of the return values
# from Libsvm's fit.
(
libsvm_support,
libsvm_support_vectors,
libsvm_n_class_SV,
libsvm_sv_coef,
libsvm_intercept,
libsvm_probA,
libsvm_probB,
# libsvm_fit_status and libsvm_n_iter won't be used below.
libsvm_fit_status,
libsvm_n_iter,
) = _libsvm.fit(iris.data, iris.target.astype(np.float64))
model_params = {
"support": libsvm_support,
"SV": libsvm_support_vectors,
"nSV": libsvm_n_class_SV,
"sv_coef": libsvm_sv_coef,
"intercept": libsvm_intercept,
"probA": libsvm_probA,
"probB": libsvm_probB,
}
pred = _libsvm.predict(iris.data, **model_params)
assert np.mean(pred == iris.target) > 0.95
# We unpack the values to create a dictionary with some of the return values
# from Libsvm's fit.
(
libsvm_support,
libsvm_support_vectors,
libsvm_n_class_SV,
libsvm_sv_coef,
libsvm_intercept,
libsvm_probA,
libsvm_probB,
# libsvm_fit_status and libsvm_n_iter won't be used below.
libsvm_fit_status,
libsvm_n_iter,
) = _libsvm.fit(iris.data, iris.target.astype(np.float64), kernel="linear")
model_params = {
"support": libsvm_support,
"SV": libsvm_support_vectors,
"nSV": libsvm_n_class_SV,
"sv_coef": libsvm_sv_coef,
"intercept": libsvm_intercept,
"probA": libsvm_probA,
"probB": libsvm_probB,
}
pred = _libsvm.predict(iris.data, **model_params, kernel="linear")
assert np.mean(pred == iris.target) > 0.95
pred = _libsvm.cross_validation(
iris.data, iris.target.astype(np.float64), 5, kernel="linear", random_seed=0
)
assert np.mean(pred == iris.target) > 0.95
# If random_seed >= 0, the libsvm rng is seeded (by calling `srand`), hence
# we should get deterministic results (assuming that there is no other
# thread calling this wrapper calling `srand` concurrently).
pred2 = _libsvm.cross_validation(
iris.data, iris.target.astype(np.float64), 5, kernel="linear", random_seed=0
)
assert_array_equal(pred, pred2)
def test_precomputed():
# SVC with a precomputed kernel.
# We test it with a toy dataset and with iris.
clf = svm.SVC(kernel="precomputed")
# Gram matrix for train data (square matrix)
# (we use just a linear kernel)
K = np.dot(X, np.array(X).T)
clf.fit(K, Y)
# Gram matrix for test data (rectangular matrix)
KT = np.dot(T, np.array(X).T)
pred = clf.predict(KT)
with pytest.raises(ValueError):
clf.predict(KT.T)
assert_array_equal(clf.dual_coef_, [[-0.25, 0.25]])
assert_array_equal(clf.support_, [1, 3])
assert_array_equal(clf.intercept_, [0])
assert_array_almost_equal(clf.support_, [1, 3])
assert_array_equal(pred, true_result)
# Gram matrix for test data but compute KT[i,j]
# for support vectors j only.
KT = np.zeros_like(KT)
for i in range(len(T)):
for j in clf.support_:
KT[i, j] = np.dot(T[i], X[j])
pred = clf.predict(KT)
assert_array_equal(pred, true_result)
# same as before, but using a callable function instead of the kernel
# matrix. kernel is just a linear kernel
def kfunc(x, y):
return np.dot(x, y.T)
clf = svm.SVC(kernel=kfunc)
clf.fit(np.array(X), Y)
pred = clf.predict(T)
assert_array_equal(clf.dual_coef_, [[-0.25, 0.25]])
assert_array_equal(clf.intercept_, [0])
assert_array_almost_equal(clf.support_, [1, 3])
assert_array_equal(pred, true_result)
# test a precomputed kernel with the iris dataset
# and check parameters against a linear SVC
clf = svm.SVC(kernel="precomputed")
clf2 = svm.SVC(kernel="linear")
K = np.dot(iris.data, iris.data.T)
clf.fit(K, iris.target)
clf2.fit(iris.data, iris.target)
pred = clf.predict(K)
assert_array_almost_equal(clf.support_, clf2.support_)
assert_array_almost_equal(clf.dual_coef_, clf2.dual_coef_)
assert_array_almost_equal(clf.intercept_, clf2.intercept_)
assert_almost_equal(np.mean(pred == iris.target), 0.99, decimal=2)
# Gram matrix for test data but compute KT[i,j]
# for support vectors j only.
K = np.zeros_like(K)
for i in range(len(iris.data)):
for j in clf.support_:
K[i, j] = np.dot(iris.data[i], iris.data[j])
pred = clf.predict(K)
assert_almost_equal(np.mean(pred == iris.target), 0.99, decimal=2)
clf = svm.SVC(kernel=kfunc)
clf.fit(iris.data, iris.target)
assert_almost_equal(np.mean(pred == iris.target), 0.99, decimal=2)
def test_svr():
# Test Support Vector Regression
diabetes = datasets.load_diabetes()
for clf in (
svm.NuSVR(kernel="linear", nu=0.4, C=1.0),
svm.NuSVR(kernel="linear", nu=0.4, C=10.0),
svm.SVR(kernel="linear", C=10.0),
svm.LinearSVR(C=10.0),
svm.LinearSVR(C=10.0),
):
clf.fit(diabetes.data, diabetes.target)
assert clf.score(diabetes.data, diabetes.target) > 0.02
# non-regression test; previously, BaseLibSVM would check that
# len(np.unique(y)) < 2, which must only be done for SVC
svm.SVR().fit(diabetes.data, np.ones(len(diabetes.data)))
svm.LinearSVR().fit(diabetes.data, np.ones(len(diabetes.data)))
def test_linearsvr():
# check that SVR(kernel='linear') and LinearSVC() give
# comparable results
diabetes = datasets.load_diabetes()
lsvr = svm.LinearSVR(C=1e3).fit(diabetes.data, diabetes.target)
score1 = lsvr.score(diabetes.data, diabetes.target)
svr = svm.SVR(kernel="linear", C=1e3).fit(diabetes.data, diabetes.target)
score2 = svr.score(diabetes.data, diabetes.target)
assert_allclose(np.linalg.norm(lsvr.coef_), np.linalg.norm(svr.coef_), 1, 0.0001)
assert_almost_equal(score1, score2, 2)
def test_linearsvr_fit_sampleweight():
# check correct result when sample_weight is 1
# check that SVR(kernel='linear') and LinearSVC() give
# comparable results
diabetes = datasets.load_diabetes()
n_samples = len(diabetes.target)
unit_weight = np.ones(n_samples)
lsvr = svm.LinearSVR(C=1e3, tol=1e-12, max_iter=10000).fit(
diabetes.data, diabetes.target, sample_weight=unit_weight
)
score1 = lsvr.score(diabetes.data, diabetes.target)
lsvr_no_weight = svm.LinearSVR(C=1e3, tol=1e-12, max_iter=10000).fit(
diabetes.data, diabetes.target
)
score2 = lsvr_no_weight.score(diabetes.data, diabetes.target)
assert_allclose(
np.linalg.norm(lsvr.coef_), np.linalg.norm(lsvr_no_weight.coef_), 1, 0.0001
)
assert_almost_equal(score1, score2, 2)
# check that fit(X) = fit([X1, X2, X3],sample_weight = [n1, n2, n3]) where
# X = X1 repeated n1 times, X2 repeated n2 times and so forth
random_state = check_random_state(0)
random_weight = random_state.randint(0, 10, n_samples)
lsvr_unflat = svm.LinearSVR(C=1e3, tol=1e-12, max_iter=10000).fit(
diabetes.data, diabetes.target, sample_weight=random_weight
)
score3 = lsvr_unflat.score(
diabetes.data, diabetes.target, sample_weight=random_weight
)
X_flat = np.repeat(diabetes.data, random_weight, axis=0)
y_flat = np.repeat(diabetes.target, random_weight, axis=0)
lsvr_flat = svm.LinearSVR(C=1e3, tol=1e-12, max_iter=10000).fit(X_flat, y_flat)
score4 = lsvr_flat.score(X_flat, y_flat)
assert_almost_equal(score3, score4, 2)
def test_svr_errors():
X = [[0.0], [1.0]]
y = [0.0, 0.5]
# Bad kernel
clf = svm.SVR(kernel=lambda x, y: np.array([[1.0]]))
clf.fit(X, y)
with pytest.raises(ValueError):
clf.predict(X)
def test_oneclass():
# Test OneClassSVM
clf = svm.OneClassSVM()
clf.fit(X)
pred = clf.predict(T)
assert_array_equal(pred, [1, -1, -1])
assert pred.dtype == np.dtype("intp")
assert_array_almost_equal(clf.intercept_, [-1.218], decimal=3)
assert_array_almost_equal(clf.dual_coef_, [[0.750, 0.750, 0.750, 0.750]], decimal=3)
with pytest.raises(AttributeError):
(lambda: clf.coef_)()
def test_oneclass_decision_function():
# Test OneClassSVM decision function
clf = svm.OneClassSVM()
rnd = check_random_state(2)
# Generate train data
X = 0.3 * rnd.randn(100, 2)
X_train = np.r_[X + 2, X - 2]
# Generate some regular novel observations
X = 0.3 * rnd.randn(20, 2)
X_test = np.r_[X + 2, X - 2]
# Generate some abnormal novel observations
X_outliers = rnd.uniform(low=-4, high=4, size=(20, 2))
# fit the model
clf = svm.OneClassSVM(nu=0.1, kernel="rbf", gamma=0.1)
clf.fit(X_train)
# predict things
y_pred_test = clf.predict(X_test)
assert np.mean(y_pred_test == 1) > 0.9
y_pred_outliers = clf.predict(X_outliers)
assert np.mean(y_pred_outliers == -1) > 0.9
dec_func_test = clf.decision_function(X_test)
assert_array_equal((dec_func_test > 0).ravel(), y_pred_test == 1)
dec_func_outliers = clf.decision_function(X_outliers)
assert_array_equal((dec_func_outliers > 0).ravel(), y_pred_outliers == 1)
def test_oneclass_score_samples():
X_train = [[1, 1], [1, 2], [2, 1]]
clf = svm.OneClassSVM(gamma=1).fit(X_train)
assert_array_equal(
clf.score_samples([[2.0, 2.0]]),
clf.decision_function([[2.0, 2.0]]) + clf.offset_,
)
def test_tweak_params():
# Make sure some tweaking of parameters works.
# We change clf.dual_coef_ at run time and expect .predict() to change
# accordingly. Notice that this is not trivial since it involves a lot
# of C/Python copying in the libsvm bindings.
# The success of this test ensures that the mapping between libsvm and
# the python classifier is complete.
clf = svm.SVC(kernel="linear", C=1.0)
clf.fit(X, Y)
assert_array_equal(clf.dual_coef_, [[-0.25, 0.25]])
assert_array_equal(clf.predict([[-0.1, -0.1]]), [1])
clf._dual_coef_ = np.array([[0.0, 1.0]])
assert_array_equal(clf.predict([[-0.1, -0.1]]), [2])
def test_probability():
# Predict probabilities using SVC
# This uses cross validation, so we use a slightly bigger testing set.
for clf in (
svm.SVC(probability=True, random_state=0, C=1.0),
svm.NuSVC(probability=True, random_state=0),
):
clf.fit(iris.data, iris.target)
prob_predict = clf.predict_proba(iris.data)
assert_array_almost_equal(np.sum(prob_predict, 1), np.ones(iris.data.shape[0]))
assert np.mean(np.argmax(prob_predict, 1) == clf.predict(iris.data)) > 0.9
assert_almost_equal(
clf.predict_proba(iris.data), np.exp(clf.predict_log_proba(iris.data)), 8
)
def test_decision_function():
# Test decision_function
# Sanity check, test that decision_function implemented in python
# returns the same as the one in libsvm
# multi class:
clf = svm.SVC(kernel="linear", C=0.1, decision_function_shape="ovo").fit(
iris.data, iris.target
)
dec = np.dot(iris.data, clf.coef_.T) + clf.intercept_
assert_array_almost_equal(dec, clf.decision_function(iris.data))
# binary:
clf.fit(X, Y)
dec = np.dot(X, clf.coef_.T) + clf.intercept_
prediction = clf.predict(X)
assert_array_almost_equal(dec.ravel(), clf.decision_function(X))
assert_array_almost_equal(
prediction, clf.classes_[(clf.decision_function(X) > 0).astype(int)]
)
expected = np.array([-1.0, -0.66, -1.0, 0.66, 1.0, 1.0])
assert_array_almost_equal(clf.decision_function(X), expected, 2)
# kernel binary:
clf = svm.SVC(kernel="rbf", gamma=1, decision_function_shape="ovo")
clf.fit(X, Y)
rbfs = rbf_kernel(X, clf.support_vectors_, gamma=clf.gamma)
dec = np.dot(rbfs, clf.dual_coef_.T) + clf.intercept_
assert_array_almost_equal(dec.ravel(), clf.decision_function(X))
@pytest.mark.parametrize("SVM", (svm.SVC, svm.NuSVC))
def test_decision_function_shape(SVM):
# check that decision_function_shape='ovr' or 'ovo' gives
# correct shape and is consistent with predict
clf = SVM(kernel="linear", decision_function_shape="ovr").fit(
iris.data, iris.target
)
dec = clf.decision_function(iris.data)
assert dec.shape == (len(iris.data), 3)
assert_array_equal(clf.predict(iris.data), np.argmax(dec, axis=1))
# with five classes:
X, y = make_blobs(n_samples=80, centers=5, random_state=0)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
clf = SVM(kernel="linear", decision_function_shape="ovr").fit(X_train, y_train)
dec = clf.decision_function(X_test)
assert dec.shape == (len(X_test), 5)
assert_array_equal(clf.predict(X_test), np.argmax(dec, axis=1))
# check shape of ovo_decition_function=True
clf = SVM(kernel="linear", decision_function_shape="ovo").fit(X_train, y_train)
dec = clf.decision_function(X_train)
assert dec.shape == (len(X_train), 10)
def test_svr_predict():
# Test SVR's decision_function
# Sanity check, test that predict implemented in python
# returns the same as the one in libsvm
X = iris.data
y = iris.target
# linear kernel
reg = svm.SVR(kernel="linear", C=0.1).fit(X, y)
dec = np.dot(X, reg.coef_.T) + reg.intercept_
assert_array_almost_equal(dec.ravel(), reg.predict(X).ravel())
# rbf kernel
reg = svm.SVR(kernel="rbf", gamma=1).fit(X, y)
rbfs = rbf_kernel(X, reg.support_vectors_, gamma=reg.gamma)
dec = np.dot(rbfs, reg.dual_coef_.T) + reg.intercept_
assert_array_almost_equal(dec.ravel(), reg.predict(X).ravel())
def test_weight():
# Test class weights
clf = svm.SVC(class_weight={1: 0.1})
# we give a small weights to class 1
clf.fit(X, Y)
# so all predicted values belong to class 2
assert_array_almost_equal(clf.predict(X), [2] * 6)
X_, y_ = make_classification(
n_samples=200, n_features=10, weights=[0.833, 0.167], random_state=2
)
for clf in (
linear_model.LogisticRegression(),
svm.LinearSVC(random_state=0),
svm.SVC(),
):
clf.set_params(class_weight={0: 0.1, 1: 10})
clf.fit(X_[:100], y_[:100])
y_pred = clf.predict(X_[100:])
assert f1_score(y_[100:], y_pred) > 0.3
@pytest.mark.parametrize("estimator", [svm.SVC(C=1e-2), svm.NuSVC()])
def test_svm_classifier_sided_sample_weight(estimator):
# fit a linear SVM and check that giving more weight to opposed samples
# in the space will flip the decision toward these samples.
X = [[-2, 0], [-1, -1], [0, -2], [0, 2], [1, 1], [2, 0]]
estimator.set_params(kernel="linear")
# check that with unit weights, a sample is supposed to be predicted on
# the boundary
sample_weight = [1] * 6
estimator.fit(X, Y, sample_weight=sample_weight)
y_pred = estimator.decision_function([[-1.0, 1.0]])
assert y_pred == pytest.approx(0)
# give more weights to opposed samples
sample_weight = [10.0, 0.1, 0.1, 0.1, 0.1, 10]
estimator.fit(X, Y, sample_weight=sample_weight)
y_pred = estimator.decision_function([[-1.0, 1.0]])
assert y_pred < 0
sample_weight = [1.0, 0.1, 10.0, 10.0, 0.1, 0.1]
estimator.fit(X, Y, sample_weight=sample_weight)
y_pred = estimator.decision_function([[-1.0, 1.0]])
assert y_pred > 0
@pytest.mark.parametrize("estimator", [svm.SVR(C=1e-2), svm.NuSVR(C=1e-2)])
def test_svm_regressor_sided_sample_weight(estimator):
# similar test to test_svm_classifier_sided_sample_weight but for
# SVM regressors
X = [[-2, 0], [-1, -1], [0, -2], [0, 2], [1, 1], [2, 0]]
estimator.set_params(kernel="linear")
# check that with unit weights, a sample is supposed to be predicted on
# the boundary
sample_weight = [1] * 6
estimator.fit(X, Y, sample_weight=sample_weight)
y_pred = estimator.predict([[-1.0, 1.0]])
assert y_pred == pytest.approx(1.5)
# give more weights to opposed samples
sample_weight = [10.0, 0.1, 0.1, 0.1, 0.1, 10]
estimator.fit(X, Y, sample_weight=sample_weight)
y_pred = estimator.predict([[-1.0, 1.0]])
assert y_pred < 1.5
sample_weight = [1.0, 0.1, 10.0, 10.0, 0.1, 0.1]
estimator.fit(X, Y, sample_weight=sample_weight)
y_pred = estimator.predict([[-1.0, 1.0]])
assert y_pred > 1.5
def test_svm_equivalence_sample_weight_C():
# test that rescaling all samples is the same as changing C
clf = svm.SVC()
clf.fit(X, Y)
dual_coef_no_weight = clf.dual_coef_
clf.set_params(C=100)
clf.fit(X, Y, sample_weight=np.repeat(0.01, len(X)))
assert_allclose(dual_coef_no_weight, clf.dual_coef_)
@pytest.mark.parametrize(
"Estimator, err_msg",
[
(svm.SVC, "Invalid input - all samples have zero or negative weights."),
(svm.NuSVC, "(negative dimensions are not allowed|nu is infeasible)"),
(svm.SVR, "Invalid input - all samples have zero or negative weights."),
(svm.NuSVR, "Invalid input - all samples have zero or negative weights."),
(svm.OneClassSVM, "Invalid input - all samples have zero or negative weights."),
],
ids=["SVC", "NuSVC", "SVR", "NuSVR", "OneClassSVM"],
)
@pytest.mark.parametrize(
"sample_weight",
[[0] * len(Y), [-0.3] * len(Y)],
ids=["weights-are-zero", "weights-are-negative"],
)
def test_negative_sample_weights_mask_all_samples(Estimator, err_msg, sample_weight):
est = Estimator(kernel="linear")
with pytest.raises(ValueError, match=err_msg):
est.fit(X, Y, sample_weight=sample_weight)
@pytest.mark.parametrize(
"Classifier, err_msg",
[
(
svm.SVC,
"Invalid input - all samples with positive weights have the same label",
),
(svm.NuSVC, "specified nu is infeasible"),
],
ids=["SVC", "NuSVC"],
)
@pytest.mark.parametrize(
"sample_weight",
[[0, -0.5, 0, 1, 1, 1], [1, 1, 1, 0, -0.1, -0.3]],
ids=["mask-label-1", "mask-label-2"],
)
def test_negative_weights_svc_leave_just_one_label(Classifier, err_msg, sample_weight):
clf = Classifier(kernel="linear")
with pytest.raises(ValueError, match=err_msg):
clf.fit(X, Y, sample_weight=sample_weight)
@pytest.mark.parametrize(
"Classifier, model",
[
(svm.SVC, {"when-left": [0.3998, 0.4], "when-right": [0.4, 0.3999]}),
(svm.NuSVC, {"when-left": [0.3333, 0.3333], "when-right": [0.3333, 0.3333]}),
],
ids=["SVC", "NuSVC"],
)
@pytest.mark.parametrize(
"sample_weight, mask_side",
[([1, -0.5, 1, 1, 1, 1], "when-left"), ([1, 1, 1, 0, 1, 1], "when-right")],
ids=["partial-mask-label-1", "partial-mask-label-2"],
)
def test_negative_weights_svc_leave_two_labels(
Classifier, model, sample_weight, mask_side
):
clf = Classifier(kernel="linear")
clf.fit(X, Y, sample_weight=sample_weight)
assert_allclose(clf.coef_, [model[mask_side]], rtol=1e-3)
@pytest.mark.parametrize(
"Estimator", [svm.SVC, svm.NuSVC, svm.NuSVR], ids=["SVC", "NuSVC", "NuSVR"]
)
@pytest.mark.parametrize(
"sample_weight",
[[1, -0.5, 1, 1, 1, 1], [1, 1, 1, 0, 1, 1]],
ids=["partial-mask-label-1", "partial-mask-label-2"],
)
def test_negative_weight_equal_coeffs(Estimator, sample_weight):
# model generates equal coefficients
est = Estimator(kernel="linear")
est.fit(X, Y, sample_weight=sample_weight)
coef = np.abs(est.coef_).ravel()
assert coef[0] == pytest.approx(coef[1], rel=1e-3)
@ignore_warnings(category=UndefinedMetricWarning)
def test_auto_weight():
# Test class weights for imbalanced data
from sklearn.linear_model import LogisticRegression
# We take as dataset the two-dimensional projection of iris so
# that it is not separable and remove half of predictors from
# class 1.
# We add one to the targets as a non-regression test:
# class_weight="balanced"
# used to work only when the labels where a range [0..K).
from sklearn.utils import compute_class_weight
X, y = iris.data[:, :2], iris.target + 1
unbalanced = np.delete(np.arange(y.size), np.where(y > 2)[0][::2])
classes = np.unique(y[unbalanced])
class_weights = compute_class_weight("balanced", classes=classes, y=y[unbalanced])
assert np.argmax(class_weights) == 2
for clf in (
svm.SVC(kernel="linear"),
svm.LinearSVC(random_state=0),
LogisticRegression(),
):
# check that score is better when class='balanced' is set.
y_pred = clf.fit(X[unbalanced], y[unbalanced]).predict(X)
clf.set_params(class_weight="balanced")
y_pred_balanced = clf.fit(
X[unbalanced],
y[unbalanced],
).predict(X)
assert metrics.f1_score(y, y_pred, average="macro") <= metrics.f1_score(
y, y_pred_balanced, average="macro"
)
def test_bad_input():
# Test dimensions for labels
Y2 = Y[:-1] # wrong dimensions for labels
with pytest.raises(ValueError):
svm.SVC().fit(X, Y2)
# Test with arrays that are non-contiguous.
for clf in (svm.SVC(), svm.LinearSVC(random_state=0)):
Xf = np.asfortranarray(X)
assert not Xf.flags["C_CONTIGUOUS"]
yf = np.ascontiguousarray(np.tile(Y, (2, 1)).T)
yf = yf[:, -1]
assert not yf.flags["F_CONTIGUOUS"]
assert not yf.flags["C_CONTIGUOUS"]
clf.fit(Xf, yf)
assert_array_equal(clf.predict(T), true_result)
# error for precomputed kernelsx
clf = svm.SVC(kernel="precomputed")
with pytest.raises(ValueError):
clf.fit(X, Y)
# predict with sparse input when trained with dense
clf = svm.SVC().fit(X, Y)
with pytest.raises(ValueError):
clf.predict(sparse.lil_matrix(X))
Xt = np.array(X).T
clf.fit(np.dot(X, Xt), Y)
with pytest.raises(ValueError):
clf.predict(X)
clf = svm.SVC()
clf.fit(X, Y)
with pytest.raises(ValueError):
clf.predict(Xt)
def test_svc_nonfinite_params():
# Check SVC throws ValueError when dealing with non-finite parameter values
rng = np.random.RandomState(0)
n_samples = 10
fmax = np.finfo(np.float64).max
X = fmax * rng.uniform(size=(n_samples, 2))
y = rng.randint(0, 2, size=n_samples)
clf = svm.SVC()
msg = "The dual coefficients or intercepts are not finite"
with pytest.raises(ValueError, match=msg):
clf.fit(X, y)
def test_unicode_kernel():
# Test that a unicode kernel name does not cause a TypeError
clf = svm.SVC(kernel="linear", probability=True)
clf.fit(X, Y)
clf.predict_proba(T)
_libsvm.cross_validation(
iris.data, iris.target.astype(np.float64), 5, kernel="linear", random_seed=0
)
def test_sparse_precomputed():
clf = svm.SVC(kernel="precomputed")
sparse_gram = sparse.csr_matrix([[1, 0], [0, 1]])
with pytest.raises(TypeError, match="Sparse precomputed"):
clf.fit(sparse_gram, [0, 1])
def test_sparse_fit_support_vectors_empty():
# Regression test for #14893
X_train = sparse.csr_matrix(
[[0, 1, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0], [0, 0, 0, 1]]
)
y_train = np.array([0.04, 0.04, 0.10, 0.16])
model = svm.SVR(kernel="linear")
model.fit(X_train, y_train)
assert not model.support_vectors_.data.size
assert not model.dual_coef_.data.size
@pytest.mark.parametrize("loss", ["hinge", "squared_hinge"])
@pytest.mark.parametrize("penalty", ["l1", "l2"])
@pytest.mark.parametrize("dual", [True, False])
def test_linearsvc_parameters(loss, penalty, dual):
# Test possible parameter combinations in LinearSVC
# Generate list of possible parameter combinations
X, y = make_classification(n_samples=5, n_features=5, random_state=0)
clf = svm.LinearSVC(penalty=penalty, loss=loss, dual=dual, random_state=0)
if (
(loss, penalty) == ("hinge", "l1")
or (loss, penalty, dual) == ("hinge", "l2", False)
or (penalty, dual) == ("l1", True)
):
with pytest.raises(
ValueError,
match="Unsupported set of arguments.*penalty='%s.*loss='%s.*dual=%s"
% (penalty, loss, dual),
):
clf.fit(X, y)
else:
clf.fit(X, y)
def test_linearsvc():
# Test basic routines using LinearSVC
clf = svm.LinearSVC(random_state=0).fit(X, Y)
# by default should have intercept
assert clf.fit_intercept
assert_array_equal(clf.predict(T), true_result)
assert_array_almost_equal(clf.intercept_, [0], decimal=3)
# the same with l1 penalty
clf = svm.LinearSVC(
penalty="l1", loss="squared_hinge", dual=False, random_state=0
).fit(X, Y)
assert_array_equal(clf.predict(T), true_result)
# l2 penalty with dual formulation
clf = svm.LinearSVC(penalty="l2", dual=True, random_state=0).fit(X, Y)
assert_array_equal(clf.predict(T), true_result)
# l2 penalty, l1 loss
clf = svm.LinearSVC(penalty="l2", loss="hinge", dual=True, random_state=0)
clf.fit(X, Y)
assert_array_equal(clf.predict(T), true_result)
# test also decision function
dec = clf.decision_function(T)
res = (dec > 0).astype(int) + 1
assert_array_equal(res, true_result)
def test_linearsvc_crammer_singer():
# Test LinearSVC with crammer_singer multi-class svm
ovr_clf = svm.LinearSVC(random_state=0).fit(iris.data, iris.target)
cs_clf = svm.LinearSVC(multi_class="crammer_singer", random_state=0)
cs_clf.fit(iris.data, iris.target)
# similar prediction for ovr and crammer-singer:
assert (ovr_clf.predict(iris.data) == cs_clf.predict(iris.data)).mean() > 0.9
# classifiers shouldn't be the same
assert (ovr_clf.coef_ != cs_clf.coef_).all()
# test decision function
assert_array_equal(
cs_clf.predict(iris.data),
np.argmax(cs_clf.decision_function(iris.data), axis=1),
)
dec_func = np.dot(iris.data, cs_clf.coef_.T) + cs_clf.intercept_
assert_array_almost_equal(dec_func, cs_clf.decision_function(iris.data))
def test_linearsvc_fit_sampleweight():
# check correct result when sample_weight is 1
n_samples = len(X)
unit_weight = np.ones(n_samples)
clf = svm.LinearSVC(random_state=0).fit(X, Y)
clf_unitweight = svm.LinearSVC(random_state=0, tol=1e-12, max_iter=1000).fit(
X, Y, sample_weight=unit_weight
)
# check if same as sample_weight=None
assert_array_equal(clf_unitweight.predict(T), clf.predict(T))
assert_allclose(clf.coef_, clf_unitweight.coef_, 1, 0.0001)
# check that fit(X) = fit([X1, X2, X3],sample_weight = [n1, n2, n3]) where
# X = X1 repeated n1 times, X2 repeated n2 times and so forth
random_state = check_random_state(0)
random_weight = random_state.randint(0, 10, n_samples)
lsvc_unflat = svm.LinearSVC(random_state=0, tol=1e-12, max_iter=1000).fit(
X, Y, sample_weight=random_weight
)
pred1 = lsvc_unflat.predict(T)
X_flat = np.repeat(X, random_weight, axis=0)
y_flat = np.repeat(Y, random_weight, axis=0)
lsvc_flat = svm.LinearSVC(random_state=0, tol=1e-12, max_iter=1000).fit(
X_flat, y_flat
)
pred2 = lsvc_flat.predict(T)
assert_array_equal(pred1, pred2)
assert_allclose(lsvc_unflat.coef_, lsvc_flat.coef_, 1, 0.0001)
def test_crammer_singer_binary():
# Test Crammer-Singer formulation in the binary case
X, y = make_classification(n_classes=2, random_state=0)
for fit_intercept in (True, False):
acc = (
svm.LinearSVC(
fit_intercept=fit_intercept,
multi_class="crammer_singer",
random_state=0,
)
.fit(X, y)
.score(X, y)
)
assert acc > 0.9
def test_linearsvc_iris():
# Test that LinearSVC gives plausible predictions on the iris dataset
# Also, test symbolic class names (classes_).
target = iris.target_names[iris.target]
clf = svm.LinearSVC(random_state=0).fit(iris.data, target)
assert set(clf.classes_) == set(iris.target_names)
assert np.mean(clf.predict(iris.data) == target) > 0.8
dec = clf.decision_function(iris.data)
pred = iris.target_names[np.argmax(dec, 1)]
assert_array_equal(pred, clf.predict(iris.data))
def test_dense_liblinear_intercept_handling(classifier=svm.LinearSVC):
# Test that dense liblinear honours intercept_scaling param
X = [[2, 1], [3, 1], [1, 3], [2, 3]]
y = [0, 0, 1, 1]
clf = classifier(
fit_intercept=True,
penalty="l1",
loss="squared_hinge",
dual=False,
C=4,
tol=1e-7,
random_state=0,
)
assert clf.intercept_scaling == 1, clf.intercept_scaling
assert clf.fit_intercept
# when intercept_scaling is low the intercept value is highly "penalized"
# by regularization
clf.intercept_scaling = 1
clf.fit(X, y)
assert_almost_equal(clf.intercept_, 0, decimal=5)
# when intercept_scaling is sufficiently high, the intercept value
# is not affected by regularization
clf.intercept_scaling = 100
clf.fit(X, y)
intercept1 = clf.intercept_
assert intercept1 < -1
# when intercept_scaling is sufficiently high, the intercept value
# doesn't depend on intercept_scaling value
clf.intercept_scaling = 1000
clf.fit(X, y)
intercept2 = clf.intercept_
assert_array_almost_equal(intercept1, intercept2, decimal=2)
def test_liblinear_set_coef():
# multi-class case
clf = svm.LinearSVC().fit(iris.data, iris.target)
values = clf.decision_function(iris.data)
clf.coef_ = clf.coef_.copy()
clf.intercept_ = clf.intercept_.copy()
values2 = clf.decision_function(iris.data)
assert_array_almost_equal(values, values2)
# binary-class case
X = [[2, 1], [3, 1], [1, 3], [2, 3]]
y = [0, 0, 1, 1]
clf = svm.LinearSVC().fit(X, y)
values = clf.decision_function(X)
clf.coef_ = clf.coef_.copy()
clf.intercept_ = clf.intercept_.copy()
values2 = clf.decision_function(X)
assert_array_equal(values, values2)
def test_immutable_coef_property():
# Check that primal coef modification are not silently ignored
svms = [
svm.SVC(kernel="linear").fit(iris.data, iris.target),
svm.NuSVC(kernel="linear").fit(iris.data, iris.target),
svm.SVR(kernel="linear").fit(iris.data, iris.target),
svm.NuSVR(kernel="linear").fit(iris.data, iris.target),
svm.OneClassSVM(kernel="linear").fit(iris.data),
]
for clf in svms:
with pytest.raises(AttributeError):
clf.__setattr__("coef_", np.arange(3))
with pytest.raises((RuntimeError, ValueError)):
clf.coef_.__setitem__((0, 0), 0)
def test_linearsvc_verbose():
# stdout: redirect
import os
stdout = os.dup(1) # save original stdout
os.dup2(os.pipe()[1], 1) # replace it
# actual call
clf = svm.LinearSVC(verbose=1)
clf.fit(X, Y)
# stdout: restore
os.dup2(stdout, 1) # restore original stdout
def test_svc_clone_with_callable_kernel():
# create SVM with callable linear kernel, check that results are the same
# as with built-in linear kernel
svm_callable = svm.SVC(
kernel=lambda x, y: np.dot(x, y.T),
probability=True,
random_state=0,
decision_function_shape="ovr",
)
# clone for checking clonability with lambda functions..
svm_cloned = base.clone(svm_callable)
svm_cloned.fit(iris.data, iris.target)
svm_builtin = svm.SVC(
kernel="linear", probability=True, random_state=0, decision_function_shape="ovr"
)
svm_builtin.fit(iris.data, iris.target)
assert_array_almost_equal(svm_cloned.dual_coef_, svm_builtin.dual_coef_)
assert_array_almost_equal(svm_cloned.intercept_, svm_builtin.intercept_)
assert_array_equal(svm_cloned.predict(iris.data), svm_builtin.predict(iris.data))
assert_array_almost_equal(
svm_cloned.predict_proba(iris.data),
svm_builtin.predict_proba(iris.data),
decimal=4,
)
assert_array_almost_equal(
svm_cloned.decision_function(iris.data),
svm_builtin.decision_function(iris.data),
)
def test_svc_bad_kernel():
svc = svm.SVC(kernel=lambda x, y: x)
with pytest.raises(ValueError):
svc.fit(X, Y)
def test_libsvm_convergence_warnings():
a = svm.SVC(
kernel=lambda x, y: np.dot(x, y.T), probability=True, random_state=0, max_iter=2
)
warning_msg = (
r"Solver terminated early \(max_iter=2\). Consider pre-processing "
r"your data with StandardScaler or MinMaxScaler."
)
with pytest.warns(ConvergenceWarning, match=warning_msg):
a.fit(np.array(X), Y)
assert np.all(a.n_iter_ == 2)
def test_unfitted():
X = "foo!" # input validation not required when SVM not fitted
clf = svm.SVC()
with pytest.raises(Exception, match=r".*\bSVC\b.*\bnot\b.*\bfitted\b"):
clf.predict(X)
clf = svm.NuSVR()
with pytest.raises(Exception, match=r".*\bNuSVR\b.*\bnot\b.*\bfitted\b"):
clf.predict(X)
# ignore convergence warnings from max_iter=1
@ignore_warnings
def test_consistent_proba():
a = svm.SVC(probability=True, max_iter=1, random_state=0)
proba_1 = a.fit(X, Y).predict_proba(X)
a = svm.SVC(probability=True, max_iter=1, random_state=0)
proba_2 = a.fit(X, Y).predict_proba(X)
assert_array_almost_equal(proba_1, proba_2)
def test_linear_svm_convergence_warnings():
# Test that warnings are raised if model does not converge
lsvc = svm.LinearSVC(random_state=0, max_iter=2)
warning_msg = "Liblinear failed to converge, increase the number of iterations."
with pytest.warns(ConvergenceWarning, match=warning_msg):
lsvc.fit(X, Y)
# Check that we have an n_iter_ attribute with int type as opposed to a
# numpy array or an np.int32 so as to match the docstring.
assert isinstance(lsvc.n_iter_, int)
assert lsvc.n_iter_ == 2
lsvr = svm.LinearSVR(random_state=0, max_iter=2)
with pytest.warns(ConvergenceWarning, match=warning_msg):
lsvr.fit(iris.data, iris.target)
assert isinstance(lsvr.n_iter_, int)
assert lsvr.n_iter_ == 2
def test_svr_coef_sign():
# Test that SVR(kernel="linear") has coef_ with the right sign.
# Non-regression test for #2933.
X = np.random.RandomState(21).randn(10, 3)
y = np.random.RandomState(12).randn(10)
for svr in [svm.SVR(kernel="linear"), svm.NuSVR(kernel="linear"), svm.LinearSVR()]:
svr.fit(X, y)
assert_array_almost_equal(
svr.predict(X), np.dot(X, svr.coef_.ravel()) + svr.intercept_
)
def test_lsvc_intercept_scaling_zero():
# Test that intercept_scaling is ignored when fit_intercept is False
lsvc = svm.LinearSVC(fit_intercept=False)
lsvc.fit(X, Y)
assert lsvc.intercept_ == 0.0
def test_hasattr_predict_proba():
# Method must be (un)available before or after fit, switched by
# `probability` param
G = svm.SVC(probability=True)
assert hasattr(G, "predict_proba")
G.fit(iris.data, iris.target)
assert hasattr(G, "predict_proba")
G = svm.SVC(probability=False)
assert not hasattr(G, "predict_proba")
G.fit(iris.data, iris.target)
assert not hasattr(G, "predict_proba")
# Switching to `probability=True` after fitting should make
# predict_proba available, but calling it must not work:
G.probability = True
assert hasattr(G, "predict_proba")
msg = "predict_proba is not available when fitted with probability=False"
with pytest.raises(NotFittedError, match=msg):
G.predict_proba(iris.data)
def test_decision_function_shape_two_class():
for n_classes in [2, 3]:
X, y = make_blobs(centers=n_classes, random_state=0)
for estimator in [svm.SVC, svm.NuSVC]:
clf = OneVsRestClassifier(estimator(decision_function_shape="ovr")).fit(
X, y
)
assert len(clf.predict(X)) == len(y)
def test_ovr_decision_function():
# One point from each quadrant represents one class
X_train = np.array([[1, 1], [-1, 1], [-1, -1], [1, -1]])
y_train = [0, 1, 2, 3]
# First point is closer to the decision boundaries than the second point
base_points = np.array([[5, 5], [10, 10]])
# For all the quadrants (classes)
X_test = np.vstack(
(
base_points * [1, 1], # Q1
base_points * [-1, 1], # Q2
base_points * [-1, -1], # Q3
base_points * [1, -1], # Q4
)
)
y_test = [0] * 2 + [1] * 2 + [2] * 2 + [3] * 2
clf = svm.SVC(kernel="linear", decision_function_shape="ovr")
clf.fit(X_train, y_train)
y_pred = clf.predict(X_test)
# Test if the prediction is the same as y
assert_array_equal(y_pred, y_test)
deci_val = clf.decision_function(X_test)
# Assert that the predicted class has the maximum value
assert_array_equal(np.argmax(deci_val, axis=1), y_pred)
# Get decision value at test points for the predicted class
pred_class_deci_val = deci_val[range(8), y_pred].reshape((4, 2))
# Assert pred_class_deci_val > 0 here
assert np.min(pred_class_deci_val) > 0.0
# Test if the first point has lower decision value on every quadrant
# compared to the second point
assert np.all(pred_class_deci_val[:, 0] < pred_class_deci_val[:, 1])
@pytest.mark.parametrize("SVCClass", [svm.SVC, svm.NuSVC])
def test_svc_invalid_break_ties_param(SVCClass):
X, y = make_blobs(random_state=42)
svm = SVCClass(
kernel="linear", decision_function_shape="ovo", break_ties=True, random_state=42
).fit(X, y)
with pytest.raises(ValueError, match="break_ties must be False"):
svm.predict(y)
@pytest.mark.parametrize("SVCClass", [svm.SVC, svm.NuSVC])
def test_svc_ovr_tie_breaking(SVCClass):
"""Test if predict breaks ties in OVR mode.
Related issue: https://github.com/scikit-learn/scikit-learn/issues/8277
"""
X, y = make_blobs(random_state=0, n_samples=20, n_features=2)
xs = np.linspace(X[:, 0].min(), X[:, 0].max(), 100)
ys = np.linspace(X[:, 1].min(), X[:, 1].max(), 100)
xx, yy = np.meshgrid(xs, ys)
common_params = dict(
kernel="rbf", gamma=1e6, random_state=42, decision_function_shape="ovr"
)
svm = SVCClass(
break_ties=False,
**common_params,
).fit(X, y)
pred = svm.predict(np.c_[xx.ravel(), yy.ravel()])
dv = svm.decision_function(np.c_[xx.ravel(), yy.ravel()])
assert not np.all(pred == np.argmax(dv, axis=1))
svm = SVCClass(
break_ties=True,
**common_params,
).fit(X, y)
pred = svm.predict(np.c_[xx.ravel(), yy.ravel()])
dv = svm.decision_function(np.c_[xx.ravel(), yy.ravel()])
assert np.all(pred == np.argmax(dv, axis=1))
def test_gamma_scale():
X, y = [[0.0], [1.0]], [0, 1]
clf = svm.SVC()
clf.fit(X, y)
assert_almost_equal(clf._gamma, 4)
@pytest.mark.parametrize(
"SVM, params",
[
(LinearSVC, {"penalty": "l1", "loss": "squared_hinge", "dual": False}),
(LinearSVC, {"penalty": "l2", "loss": "squared_hinge", "dual": True}),
(LinearSVC, {"penalty": "l2", "loss": "squared_hinge", "dual": False}),
(LinearSVC, {"penalty": "l2", "loss": "hinge", "dual": True}),
(LinearSVR, {"loss": "epsilon_insensitive", "dual": True}),
(LinearSVR, {"loss": "squared_epsilon_insensitive", "dual": True}),
(LinearSVR, {"loss": "squared_epsilon_insensitive", "dual": True}),
],
)
def test_linearsvm_liblinear_sample_weight(SVM, params):
X = np.array(
[
[1, 3],
[1, 3],
[1, 3],
[1, 3],
[2, 1],
[2, 1],
[2, 1],
[2, 1],
[3, 3],
[3, 3],
[3, 3],
[3, 3],
[4, 1],
[4, 1],
[4, 1],
[4, 1],
],
dtype=np.dtype("float"),
)
y = np.array(
[1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2], dtype=np.dtype("int")
)
X2 = np.vstack([X, X])
y2 = np.hstack([y, 3 - y])
sample_weight = np.ones(shape=len(y) * 2)
sample_weight[len(y) :] = 0
X2, y2, sample_weight = shuffle(X2, y2, sample_weight, random_state=0)
base_estimator = SVM(random_state=42)
base_estimator.set_params(**params)
base_estimator.set_params(tol=1e-12, max_iter=1000)
est_no_weight = base.clone(base_estimator).fit(X, y)
est_with_weight = base.clone(base_estimator).fit(
X2, y2, sample_weight=sample_weight
)
for method in ("predict", "decision_function"):
if hasattr(base_estimator, method):
X_est_no_weight = getattr(est_no_weight, method)(X)
X_est_with_weight = getattr(est_with_weight, method)(X)
assert_allclose(X_est_no_weight, X_est_with_weight)
@pytest.mark.parametrize("Klass", (OneClassSVM, SVR, NuSVR))
def test_n_support(Klass):
# Make n_support is correct for oneclass and SVR (used to be
# non-initialized)
# this is a non regression test for issue #14774
X = np.array([[0], [0.44], [0.45], [0.46], [1]])
y = np.arange(X.shape[0])
est = Klass()
assert not hasattr(est, "n_support_")
est.fit(X, y)
assert est.n_support_[0] == est.support_vectors_.shape[0]
assert est.n_support_.size == 1
@pytest.mark.parametrize("Estimator", [svm.SVC, svm.SVR])
def test_custom_kernel_not_array_input(Estimator):
"""Test using a custom kernel that is not fed with array-like for floats"""
data = ["A A", "A", "B", "B B", "A B"]
X = np.array([[2, 0], [1, 0], [0, 1], [0, 2], [1, 1]]) # count encoding
y = np.array([1, 1, 2, 2, 1])
def string_kernel(X1, X2):
assert isinstance(X1[0], str)
n_samples1 = _num_samples(X1)
n_samples2 = _num_samples(X2)
K = np.zeros((n_samples1, n_samples2))
for ii in range(n_samples1):
for jj in range(ii, n_samples2):
K[ii, jj] = X1[ii].count("A") * X2[jj].count("A")
K[ii, jj] += X1[ii].count("B") * X2[jj].count("B")
K[jj, ii] = K[ii, jj]
return K
K = string_kernel(data, data)
assert_array_equal(np.dot(X, X.T), K)
svc1 = Estimator(kernel=string_kernel).fit(data, y)
svc2 = Estimator(kernel="linear").fit(X, y)
svc3 = Estimator(kernel="precomputed").fit(K, y)
assert svc1.score(data, y) == svc3.score(K, y)
assert svc1.score(data, y) == svc2.score(X, y)
if hasattr(svc1, "decision_function"): # classifier
assert_allclose(svc1.decision_function(data), svc2.decision_function(X))
assert_allclose(svc1.decision_function(data), svc3.decision_function(K))
assert_array_equal(svc1.predict(data), svc2.predict(X))
assert_array_equal(svc1.predict(data), svc3.predict(K))
else: # regressor
assert_allclose(svc1.predict(data), svc2.predict(X))
assert_allclose(svc1.predict(data), svc3.predict(K))
def test_svc_raises_error_internal_representation():
"""Check that SVC raises error when internal representation is altered.
Non-regression test for #18891 and https://nvd.nist.gov/vuln/detail/CVE-2020-28975
"""
clf = svm.SVC(kernel="linear").fit(X, Y)
clf._n_support[0] = 1000000
msg = "The internal representation of SVC was altered"
with pytest.raises(ValueError, match=msg):
clf.predict(X)
@pytest.mark.parametrize(
"estimator, expected_n_iter_type",
[
(svm.SVC, np.ndarray),
(svm.NuSVC, np.ndarray),
(svm.SVR, int),
(svm.NuSVR, int),
(svm.OneClassSVM, int),
],
)
@pytest.mark.parametrize(
"dataset",
[
make_classification(n_classes=2, n_informative=2, random_state=0),
make_classification(n_classes=3, n_informative=3, random_state=0),
make_classification(n_classes=4, n_informative=4, random_state=0),
],
)
def test_n_iter_libsvm(estimator, expected_n_iter_type, dataset):
# Check that the type of n_iter_ is correct for the classes that inherit
# from BaseSVC.
# Note that for SVC, and NuSVC this is an ndarray; while for SVR, NuSVR, and
# OneClassSVM, it is an int.
# For SVC and NuSVC also check the shape of n_iter_.
X, y = dataset
n_iter = estimator(kernel="linear").fit(X, y).n_iter_
assert type(n_iter) == expected_n_iter_type
if estimator in [svm.SVC, svm.NuSVC]:
n_classes = len(np.unique(y))
assert n_iter.shape == (n_classes * (n_classes - 1) // 2,)
# TODO(1.4): Remove
@pytest.mark.parametrize("Klass", [SVR, NuSVR, OneClassSVM])
def test_svm_class_weights_deprecation(Klass):
clf = Klass()
with warnings.catch_warnings():
warnings.simplefilter("error", FutureWarning)
clf.fit(X, Y)
msg = (
"Attribute `class_weight_` was deprecated in version 1.2 and will be removed"
" in 1.4"
)
with pytest.warns(FutureWarning, match=re.escape(msg)):
getattr(clf, "class_weight_")
| bsd-3-clause |
pytorch/fairseq | fairseq/tasks/cross_lingual_lm.py | 1 | 6454 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import itertools
import logging
import os
from collections import OrderedDict
import numpy as np
from fairseq import tokenizer, utils
from fairseq.data import ConcatDataset, Dictionary, TokenBlockDataset, data_utils
from fairseq.data.legacy.masked_lm_dataset import MaskedLMDataset
from fairseq.data.legacy.masked_lm_dictionary import MaskedLMDictionary
from fairseq.data.multi_corpus_sampled_dataset import MultiCorpusSampledDataset
from fairseq.tasks import LegacyFairseqTask, register_task
logger = logging.getLogger(__name__)
@register_task("cross_lingual_lm")
class CrossLingualLMTask(LegacyFairseqTask):
"""
Task for training cross-lingual language models.
For more details look at: https://arxiv.org/pdf/1901.07291.pdf
Args:
dictionary (Dictionary): the dictionary for the input of the task
"""
@staticmethod
def add_args(parser):
"""Add task-specific arguments to the parser."""
parser.add_argument(
"data",
help="colon separated path to data directories list, \
will be iterated upon during epochs in round-robin manner",
)
parser.add_argument(
"--tokens-per-sample",
default=512,
type=int,
help="max number of total tokens over all segments" " per sample",
)
parser.add_argument(
"--monolingual-langs",
default="en",
type=str,
help="comma separated list of languages for which we"
" want to train XLM on",
)
parser.add_argument(
"--shuffle",
action="store_true",
help="shuffle each monolingual dataset while" " training",
)
def __init__(self, args, dictionary):
super().__init__(args)
self.dictionary = dictionary
self.seed = args.seed
self.distributed_world_size = args.distributed_world_size
self.langs2id = self._lang_to_id(args.monolingual_langs)
def _lang_to_id(self, languages: str):
"""
Build a map from languages to ids. These ids are used as segment labels
for cross-lingual LM training.
"""
lang2id = {}
langs = [l.strip() for l in languages.split(",")]
for id, lang in enumerate(langs):
lang2id[lang] = id
return lang2id
@classmethod
def load_dictionary(cls, filename):
return MaskedLMDictionary.load(filename)
@classmethod
def build_dictionary(
cls, filenames, workers=1, threshold=-1, nwords=-1, padding_factor=8
):
d = MaskedLMDictionary()
for filename in filenames:
Dictionary.add_file_to_dictionary(
filename, d, tokenizer.tokenize_line, workers
)
d.finalize(threshold=threshold, nwords=nwords, padding_factor=padding_factor)
return d
@property
def target_dictionary(self):
return self.dictionary
@classmethod
def setup_task(cls, args, **kwargs):
"""Setup the task."""
dictionary = MaskedLMDictionary.load(os.path.join(args.data, "dict.txt"))
logger.info("dictionary: {} types".format(len(dictionary)))
return cls(args, dictionary)
def _load_single_lang_dataset(self, split, epoch):
loaded_datasets = []
paths = utils.split_paths(self.args.data)
assert len(paths) > 0
data_path = paths[(epoch - 1) % len(paths)]
for k in itertools.count():
split_k = split + (str(k) if k > 0 else "")
path = os.path.join(data_path, split_k)
ds = data_utils.load_indexed_dataset(
path, self.dictionary, self.args.dataset_impl
)
if ds is None:
if k > 0:
break
else:
raise FileNotFoundError(
"Dataset not found: {} ({})".format(split, data_path)
)
# Since we append each block with the classification_token,
# we need to effectively create blocks of length
# tokens_per_sample-1
loaded_datasets.append(
TokenBlockDataset(
ds,
ds.sizes,
self.args.tokens_per_sample - 1,
pad=self.dictionary.pad(),
eos=self.dictionary.eos(),
)
)
logger.info(
"{} {} {} examples".format(data_path, split_k, len(loaded_datasets[-1]))
)
if len(loaded_datasets) == 1:
dataset = loaded_datasets[0]
sizes = dataset.sizes
else:
dataset = ConcatDataset(loaded_datasets)
sizes = np.concatenate([ds.sizes for ds in loaded_datasets])
return dataset, sizes
def load_dataset(self, split, epoch=1, combine=False, **kwargs):
"""Load a given dataset split.
Args:
split (str): name of the split (e.g., train, valid, test)
"""
dataset_map = OrderedDict()
for lang in self.langs2id.keys():
# Datasets are expected to be in "split.lang" format (Eg: train.en)
language_split = "{}.{}".format(split, lang)
block_dataset, sizes = self._load_single_lang_dataset(
split=language_split, epoch=epoch
)
dataset_map[lang] = MaskedLMDataset(
dataset=block_dataset,
sizes=sizes,
vocab=self.dictionary,
pad_idx=self.dictionary.pad(),
mask_idx=self.dictionary.mask(),
classif_token_idx=self.dictionary.eos(),
sep_token_idx=self.dictionary.eos(),
shuffle=getattr(self.args, "shuffle", False),
has_pairs=False,
segment_id=self.langs2id[lang],
seed=self.seed,
)
self.datasets[split] = MultiCorpusSampledDataset(dataset_map)
logger.info(
"{} {} {} examples".format(
utils.split_paths(self.args.data)[epoch - 1],
split,
len(self.datasets[split]),
)
)
| mit |