rem
stringlengths
1
322k
add
stringlengths
0
2.05M
context
stringlengths
4
228k
meta
stringlengths
156
215
from scipy_test.testing import ScipyTest
from scipy.test.testing import ScipyTest
def _ppimport_importer(self): name = self.__name__
cf7437641c3970f9712152ab4ef39c45532e5297 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/cf7437641c3970f9712152ab4ef39c45532e5297/ppimport.py
import gui_thread_guts
import gui_thread_guts
def exit_gui_thread(last_exit = oldexitfunc): # don't wait on MS platforms -- it hangs. # On X11, we have to shut down the secondary thread. if running_in_second_thread and os.name != 'nt': import gui_thread_guts event_poster = gui_thread_guts.proxy_base() event_catcher = event_poster.catcher finished = threading.Event() evt = gui_thread_guts.proxy_event(event_catcher.Close, (),{},finished) event_poster.post(evt) # wait for event to get handled finished.wait() # wait for the gui_thread to die. gui_thread_finished.wait() if last_exit: last_exit()
34865d4577575a214163bb59a5d287b7df2430a9 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/34865d4577575a214163bb59a5d287b7df2430a9/main.py
finished = threading.Event() evt = gui_thread_guts.proxy_event(event_catcher.Close, (),{},finished) event_poster.post(evt) finished.wait()
if in_proxy_call: event_catcher.Close() else: finished = threading.Event() evt = gui_thread_guts.proxy_event(event_catcher.Close, (),{},finished) event_poster.post(evt) finished.wait()
def exit_gui_thread(last_exit = oldexitfunc): # don't wait on MS platforms -- it hangs. # On X11, we have to shut down the secondary thread. if running_in_second_thread and os.name != 'nt': import gui_thread_guts event_poster = gui_thread_guts.proxy_base() event_catcher = event_poster.catcher finished = threading.Event() evt = gui_thread_guts.proxy_event(event_catcher.Close, (),{},finished) event_poster.post(evt) # wait for event to get handled finished.wait() # wait for the gui_thread to die. gui_thread_finished.wait() if last_exit: last_exit()
34865d4577575a214163bb59a5d287b7df2430a9 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/34865d4577575a214163bb59a5d287b7df2430a9/main.py
if not hasattr(wx_class, 'init2'): wx_class.init2 = wx_class.__init__
if not hasattr(wx_class, '_iNiT2'): if hasattr(wx_class, '__init__'): wx_class._iNiT2 = wx_class.__init__ else: wx_class._iNiT2 = None
def register(wx_class): """ Create a gui_thread compatible version of wx_class Test whether a proxy is necessary. If so, generate and return the proxy class. if not, just return the wx_class unaltered. """ if running_in_second_thread: #print 'proxy generated' return proxify(wx_class) else: if not hasattr(wx_class, 'init2'): wx_class.init2 = wx_class.__init__ wx_class.__init__ = plain_class__init__ return wx_class
34865d4577575a214163bb59a5d287b7df2430a9 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/34865d4577575a214163bb59a5d287b7df2430a9/main.py
self.init2(*args,**kw)
"""This is apparently useful for apps like PyCrust.""" if self._iNiT2: self._iNiT2(*args,**kw)
def plain_class__init__(self,*args,**kw): self.init2(*args,**kw) add_close_event_handler(self) self.proxy_object_alive = 1
34865d4577575a214163bb59a5d287b7df2430a9 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/34865d4577575a214163bb59a5d287b7df2430a9/main.py
results = 'self.wx_obj = finished._result;' \
results = 'self.wx_obj = ret_val;' \
def generate_method(method,wx_class): """ Create a proxy method. This first creates a text version of the method, accounting for slight differences between __init__ methods and all other methods. It converts the text to a code object (using exec) and returns the code object. The code is never actually written to a file. It takes about .4 seconds on a wxFrame object with 150 methods. This is a one time cost at start up. It might be beneficial if we use the same code over and over to actually write the proxy class to a module. (.pyc file?) """ module_name = wx_class.__module__ class_name = wx_class.__name__ import_statement = 'from %s import %s' % (module_name,class_name) documentation = "" try: documentation = getattr(getattr(wx_class, method), '__doc__') except AttributeError: pass if method == '__init__': call_method = class_name pre_test = '' #pre_test = 'from gui_thread_guts import proxy_base;'\ # 'proxy_base.__init__(self)' arguments = 'arg_list = args' results = 'self.wx_obj = finished._result;' \ 'add_close_event_handler(self);' \ 'self.proxy_object_alive = 1;' elif (method == '__getattr__') or (method == '__del__'): return None else: pre_test = "if not self.proxy_object_alive: proxy_error()" call_method = '%s.%s' % (class_name,method) arguments = 'arg_list = tuple([self.wx_obj] + list(args))' results = 'return smart_return(finished._result, self)' body = """def %(method)s(self,*args,**kw): \"\"\"%(documentation)s\"\"\" %(pre_test)s from gui_thread_guts import proxy_event, smart_return %(import_statement)s #import statement finished = threading.Event() # remove proxies if present args = dereference_arglist(args) %(arguments)s #arguments evt = proxy_event(%(call_method)s,arg_list,kw,finished) self.post(evt) finished.wait() if finished.exception_info: raise finished.exception_info[0],finished.exception_info[1] %(results)s #results\n""" %locals() #if method == '__init__': # print body exec(body) return eval(method)
34865d4577575a214163bb59a5d287b7df2430a9 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/34865d4577575a214163bb59a5d287b7df2430a9/main.py
results = 'return smart_return(finished._result, self)'
results = 'return smart_return(ret_val)'
def generate_method(method,wx_class): """ Create a proxy method. This first creates a text version of the method, accounting for slight differences between __init__ methods and all other methods. It converts the text to a code object (using exec) and returns the code object. The code is never actually written to a file. It takes about .4 seconds on a wxFrame object with 150 methods. This is a one time cost at start up. It might be beneficial if we use the same code over and over to actually write the proxy class to a module. (.pyc file?) """ module_name = wx_class.__module__ class_name = wx_class.__name__ import_statement = 'from %s import %s' % (module_name,class_name) documentation = "" try: documentation = getattr(getattr(wx_class, method), '__doc__') except AttributeError: pass if method == '__init__': call_method = class_name pre_test = '' #pre_test = 'from gui_thread_guts import proxy_base;'\ # 'proxy_base.__init__(self)' arguments = 'arg_list = args' results = 'self.wx_obj = finished._result;' \ 'add_close_event_handler(self);' \ 'self.proxy_object_alive = 1;' elif (method == '__getattr__') or (method == '__del__'): return None else: pre_test = "if not self.proxy_object_alive: proxy_error()" call_method = '%s.%s' % (class_name,method) arguments = 'arg_list = tuple([self.wx_obj] + list(args))' results = 'return smart_return(finished._result, self)' body = """def %(method)s(self,*args,**kw): \"\"\"%(documentation)s\"\"\" %(pre_test)s from gui_thread_guts import proxy_event, smart_return %(import_statement)s #import statement finished = threading.Event() # remove proxies if present args = dereference_arglist(args) %(arguments)s #arguments evt = proxy_event(%(call_method)s,arg_list,kw,finished) self.post(evt) finished.wait() if finished.exception_info: raise finished.exception_info[0],finished.exception_info[1] %(results)s #results\n""" %locals() #if method == '__init__': # print body exec(body) return eval(method)
34865d4577575a214163bb59a5d287b7df2430a9 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/34865d4577575a214163bb59a5d287b7df2430a9/main.py
%(import_statement)s finished = threading.Event()
%(import_statement)s
body = """def %(method)s(self,*args,**kw): \"\"\"%(documentation)s\"\"\" %(pre_test)s from gui_thread_guts import proxy_event, smart_return %(import_statement)s #import statement finished = threading.Event() # remove proxies if present args = dereference_arglist(args) %(arguments)s #arguments evt = proxy_event(%(call_method)s,arg_list,kw,finished) self.post(evt) finished.wait() if finished.exception_info: raise finished.exception_info[0],finished.exception_info[1] %(results)s #results\n""" %locals()
34865d4577575a214163bb59a5d287b7df2430a9 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/34865d4577575a214163bb59a5d287b7df2430a9/main.py
%(arguments)s evt = proxy_event(%(call_method)s,arg_list,kw,finished) self.post(evt) finished.wait() if finished.exception_info: raise finished.exception_info[0],finished.exception_info[1]
%(arguments)s ret_val = None if in_proxy_call: ret_val = apply(%(call_method)s, arg_list, kw) else: finished = threading.Event() evt = proxy_event(%(call_method)s,arg_list,kw,finished) self.post(evt) finished.wait() if finished.exception_info: raise finished.exception_info[0], \ finished.exception_info[1] ret_val = finished._result
body = """def %(method)s(self,*args,**kw): \"\"\"%(documentation)s\"\"\" %(pre_test)s from gui_thread_guts import proxy_event, smart_return %(import_statement)s #import statement finished = threading.Event() # remove proxies if present args = dereference_arglist(args) %(arguments)s #arguments evt = proxy_event(%(call_method)s,arg_list,kw,finished) self.post(evt) finished.wait() if finished.exception_info: raise finished.exception_info[0],finished.exception_info[1] %(results)s #results\n""" %locals()
34865d4577575a214163bb59a5d287b7df2430a9 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/34865d4577575a214163bb59a5d287b7df2430a9/main.py
return hasattr(x, '_proxy_attr__proxy')
hasattr(x, 'x._proxy_attr__dont_mess_with_me_unless_you_know_what_youre_doing')
def is_proxy_attr(x): return hasattr(x, '_proxy_attr__proxy')
34865d4577575a214163bb59a5d287b7df2430a9 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/34865d4577575a214163bb59a5d287b7df2430a9/main.py
return self._cdf(k,*args) - self._cdf(k-1,*args)
return self.cdf(k,*args) - self.cdf(k-1,*args)
def _pdf(self, k, *args): return self._cdf(k,*args) - self._cdf(k-1,*args)
7a60946bcd81aa5b89c0700f2a0d4143f8139c55 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/7a60946bcd81aa5b89c0700f2a0d4143f8139c55/distributions.py
def logL(self, b, Y, **extra):
def logL(self, b, Y):
def logL(self, b, Y, **extra): return -scipy.linalg.norm(self.whiten(Y) - N.dot(self.wdesign, b))**2 / 2.
9ddff40881d697c0b8f7657aabf6bd401fbfe489 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9ddff40881d697c0b8f7657aabf6bd401fbfe489/regression.py
def __init__(self, design, **keywords): LikelihoodModel.__init__(self, **keywords)
def __init__(self, design): LikelihoodModel.__init__(self)
def __init__(self, design, **keywords): LikelihoodModel.__init__(self, **keywords) self.initialize(design)
9ddff40881d697c0b8f7657aabf6bd401fbfe489 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9ddff40881d697c0b8f7657aabf6bd401fbfe489/regression.py
def initialize(self, design, **keywords):
def initialize(self, design):
def initialize(self, design, **keywords): self.design = design self.wdesign = self.whiten(design) self.calc_beta = L.pinv(self.wdesign) self.normalized_cov_beta = N.dot(self.calc_beta, N.transpose(self.calc_beta)) self.df_resid = self.wdesign.shape[0] - utils.rank(self.design)
9ddff40881d697c0b8f7657aabf6bd401fbfe489 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9ddff40881d697c0b8f7657aabf6bd401fbfe489/regression.py
def fit(self, Y, **keywords):
def fit(self, Y):
def fit(self, Y, **keywords): """ Full \'fit\' of the model including estimate of covariance matrix, (whitened) residuals and scale.
9ddff40881d697c0b8f7657aabf6bd401fbfe489 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9ddff40881d697c0b8f7657aabf6bd401fbfe489/regression.py
def __init__(self, design, rho=0, **keywords): LikelihoodModel.__init__(self, **keywords)
def __init__(self, design, rho=0):
def __init__(self, design, rho=0, **keywords): LikelihoodModel.__init__(self, **keywords) self.rho = rho self.initialize(design)
9ddff40881d697c0b8f7657aabf6bd401fbfe489 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9ddff40881d697c0b8f7657aabf6bd401fbfe489/regression.py
self.initialize(design)
ols_model.__init__(self, design)
def __init__(self, design, rho=0, **keywords): LikelihoodModel.__init__(self, **keywords) self.rho = rho self.initialize(design)
9ddff40881d697c0b8f7657aabf6bd401fbfe489 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9ddff40881d697c0b8f7657aabf6bd401fbfe489/regression.py
class wls_model(ar_model):
class wls_model(ols_model):
def whiten(self, X): factor = 1. / N.sqrt(1 - self.rho**2) return N.concatenate([[X[0]], (X[1:] - self.rho * X[0:-1]) * factor])
9ddff40881d697c0b8f7657aabf6bd401fbfe489 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9ddff40881d697c0b8f7657aabf6bd401fbfe489/regression.py
def __init__(self, design, weights=1, **keywords): LikelihoodModel.__init__(self, **keywords)
def __init__(self, design, weights=1):
def __init__(self, design, weights=1, **keywords): LikelihoodModel.__init__(self, **keywords) self.weights = weights self.initialize(design)
9ddff40881d697c0b8f7657aabf6bd401fbfe489 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9ddff40881d697c0b8f7657aabf6bd401fbfe489/regression.py
self.initialize(design)
ols_model.__init__(self, design)
def __init__(self, design, weights=1, **keywords): LikelihoodModel.__init__(self, **keywords) self.weights = weights self.initialize(design)
9ddff40881d697c0b8f7657aabf6bd401fbfe489 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9ddff40881d697c0b8f7657aabf6bd401fbfe489/regression.py
norm_resid = self.resid * N.multiply.outer(N.ones(self.Y.shape[0]), sdd) return norm_resid
return self.resid * N.multiply.outer(N.ones(self.Y.shape[0]), sdd)
def norm_resid(self): """ Residuals, normalized to have unit length.
9ddff40881d697c0b8f7657aabf6bd401fbfe489 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9ddff40881d697c0b8f7657aabf6bd401fbfe489/regression.py
def predict(self, design): """ Return fitted values from a design matrix. """
9ddff40881d697c0b8f7657aabf6bd401fbfe489 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9ddff40881d697c0b8f7657aabf6bd401fbfe489/regression.py
range = abs(upper - lower) if range == 0.:
rng = abs(upper - lower) if rng == 0.:
def auto_ticks(data_bounds, bounds_info = default_bounds): """ Find locations for axis tick marks. Calculate the location for tick marks on an axis. data_bounds is a sequence of 2 numbers specifying the maximum and minimum values of the data along this axis. bounds_info is a sequence of 3 values that specify how the axis end points and tick interval are calculated. An array of tick mark locations is returned from the function. The first and last tick entries are the axis end points. data_bounds -- (lower,upper). The maximum and minimum values of the data long this axis. If any of the settings in bounds_info are 'auto' or 'fit', the axis properties are calculated automatically from these settings. bounds_info -- (lower,upper,interval). Each entry can either be a numerical value or a string. If a number,the axis property is set to that value. If the entry is 'auto', the property is calculated automatically. lower and upper can also be 'fit' in which case the axis end points are set equal to the values in data_bounds. """ # pretty ugly code... # man, this needs some testing. if is_number(bounds_info[0]): lower = bounds_info[0] else: lower = data_bounds[0] if is_number(bounds_info[1]): upper = bounds_info[1] else: upper = data_bounds[1] interval = bounds_info[2] #print 'raw interval:', interval if interval in ['linear','auto']: range = abs(upper - lower) if range == 0.: # anything more intelligent to do here? interval = .5 lower,upper = data_bounds + array((-.5,.5)) if is_base2(range) and is_base2(upper) and range > 4: if range == 2: interval = 1 elif range == 4: interval = 4 else: interval = range / 4 # maybe we want it 8 else: interval = auto_interval((lower,upper)) elif type(interval) in [type(0.0),type(0)]: pass else: #print 'interval: ', interval raise ValueError, interval + " is an unknown value for interval: " \ " expects 'auto' or 'linear', or a number" # If the lower or upper bound are set to 'auto', # calculate them based on the newly chosen interval. #print 'interval:', interval auto_lower,auto_upper = auto_bounds(data_bounds,interval) # if the lower and upper bound span 0, make sure ticks # will hit exactly on zero. if lower < 0 and upper > 0: hi_ticks = arange(0,auto_upper+interval,interval) low_ticks = - arange(interval,-auto_lower+interval,interval) ticks = concatenate((low_ticks[::-1],hi_ticks)) else: # othersize the ticks start and end on the lower and # upper values. ticks = arange(auto_lower,auto_upper+interval,interval) if bounds_info[0] == 'fit': ticks[0] = lower if bounds_info[1] == 'fit': ticks[-1] = upper return ticks
35ea2c2d87fbbfafca1db7236ceb79a8d525d834 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/35ea2c2d87fbbfafca1db7236ceb79a8d525d834/plot_utility.py
if is_base2(range) and is_base2(upper) and range > 4: if range == 2:
if is_base2(rng) and is_base2(upper) and rng > 4: if rng == 2:
def auto_ticks(data_bounds, bounds_info = default_bounds): """ Find locations for axis tick marks. Calculate the location for tick marks on an axis. data_bounds is a sequence of 2 numbers specifying the maximum and minimum values of the data along this axis. bounds_info is a sequence of 3 values that specify how the axis end points and tick interval are calculated. An array of tick mark locations is returned from the function. The first and last tick entries are the axis end points. data_bounds -- (lower,upper). The maximum and minimum values of the data long this axis. If any of the settings in bounds_info are 'auto' or 'fit', the axis properties are calculated automatically from these settings. bounds_info -- (lower,upper,interval). Each entry can either be a numerical value or a string. If a number,the axis property is set to that value. If the entry is 'auto', the property is calculated automatically. lower and upper can also be 'fit' in which case the axis end points are set equal to the values in data_bounds. """ # pretty ugly code... # man, this needs some testing. if is_number(bounds_info[0]): lower = bounds_info[0] else: lower = data_bounds[0] if is_number(bounds_info[1]): upper = bounds_info[1] else: upper = data_bounds[1] interval = bounds_info[2] #print 'raw interval:', interval if interval in ['linear','auto']: range = abs(upper - lower) if range == 0.: # anything more intelligent to do here? interval = .5 lower,upper = data_bounds + array((-.5,.5)) if is_base2(range) and is_base2(upper) and range > 4: if range == 2: interval = 1 elif range == 4: interval = 4 else: interval = range / 4 # maybe we want it 8 else: interval = auto_interval((lower,upper)) elif type(interval) in [type(0.0),type(0)]: pass else: #print 'interval: ', interval raise ValueError, interval + " is an unknown value for interval: " \ " expects 'auto' or 'linear', or a number" # If the lower or upper bound are set to 'auto', # calculate them based on the newly chosen interval. #print 'interval:', interval auto_lower,auto_upper = auto_bounds(data_bounds,interval) # if the lower and upper bound span 0, make sure ticks # will hit exactly on zero. if lower < 0 and upper > 0: hi_ticks = arange(0,auto_upper+interval,interval) low_ticks = - arange(interval,-auto_lower+interval,interval) ticks = concatenate((low_ticks[::-1],hi_ticks)) else: # othersize the ticks start and end on the lower and # upper values. ticks = arange(auto_lower,auto_upper+interval,interval) if bounds_info[0] == 'fit': ticks[0] = lower if bounds_info[1] == 'fit': ticks[-1] = upper return ticks
35ea2c2d87fbbfafca1db7236ceb79a8d525d834 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/35ea2c2d87fbbfafca1db7236ceb79a8d525d834/plot_utility.py
elif range == 4:
elif rng == 4:
def auto_ticks(data_bounds, bounds_info = default_bounds): """ Find locations for axis tick marks. Calculate the location for tick marks on an axis. data_bounds is a sequence of 2 numbers specifying the maximum and minimum values of the data along this axis. bounds_info is a sequence of 3 values that specify how the axis end points and tick interval are calculated. An array of tick mark locations is returned from the function. The first and last tick entries are the axis end points. data_bounds -- (lower,upper). The maximum and minimum values of the data long this axis. If any of the settings in bounds_info are 'auto' or 'fit', the axis properties are calculated automatically from these settings. bounds_info -- (lower,upper,interval). Each entry can either be a numerical value or a string. If a number,the axis property is set to that value. If the entry is 'auto', the property is calculated automatically. lower and upper can also be 'fit' in which case the axis end points are set equal to the values in data_bounds. """ # pretty ugly code... # man, this needs some testing. if is_number(bounds_info[0]): lower = bounds_info[0] else: lower = data_bounds[0] if is_number(bounds_info[1]): upper = bounds_info[1] else: upper = data_bounds[1] interval = bounds_info[2] #print 'raw interval:', interval if interval in ['linear','auto']: range = abs(upper - lower) if range == 0.: # anything more intelligent to do here? interval = .5 lower,upper = data_bounds + array((-.5,.5)) if is_base2(range) and is_base2(upper) and range > 4: if range == 2: interval = 1 elif range == 4: interval = 4 else: interval = range / 4 # maybe we want it 8 else: interval = auto_interval((lower,upper)) elif type(interval) in [type(0.0),type(0)]: pass else: #print 'interval: ', interval raise ValueError, interval + " is an unknown value for interval: " \ " expects 'auto' or 'linear', or a number" # If the lower or upper bound are set to 'auto', # calculate them based on the newly chosen interval. #print 'interval:', interval auto_lower,auto_upper = auto_bounds(data_bounds,interval) # if the lower and upper bound span 0, make sure ticks # will hit exactly on zero. if lower < 0 and upper > 0: hi_ticks = arange(0,auto_upper+interval,interval) low_ticks = - arange(interval,-auto_lower+interval,interval) ticks = concatenate((low_ticks[::-1],hi_ticks)) else: # othersize the ticks start and end on the lower and # upper values. ticks = arange(auto_lower,auto_upper+interval,interval) if bounds_info[0] == 'fit': ticks[0] = lower if bounds_info[1] == 'fit': ticks[-1] = upper return ticks
35ea2c2d87fbbfafca1db7236ceb79a8d525d834 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/35ea2c2d87fbbfafca1db7236ceb79a8d525d834/plot_utility.py
interval = range / 4
interval = rng / 4
def auto_ticks(data_bounds, bounds_info = default_bounds): """ Find locations for axis tick marks. Calculate the location for tick marks on an axis. data_bounds is a sequence of 2 numbers specifying the maximum and minimum values of the data along this axis. bounds_info is a sequence of 3 values that specify how the axis end points and tick interval are calculated. An array of tick mark locations is returned from the function. The first and last tick entries are the axis end points. data_bounds -- (lower,upper). The maximum and minimum values of the data long this axis. If any of the settings in bounds_info are 'auto' or 'fit', the axis properties are calculated automatically from these settings. bounds_info -- (lower,upper,interval). Each entry can either be a numerical value or a string. If a number,the axis property is set to that value. If the entry is 'auto', the property is calculated automatically. lower and upper can also be 'fit' in which case the axis end points are set equal to the values in data_bounds. """ # pretty ugly code... # man, this needs some testing. if is_number(bounds_info[0]): lower = bounds_info[0] else: lower = data_bounds[0] if is_number(bounds_info[1]): upper = bounds_info[1] else: upper = data_bounds[1] interval = bounds_info[2] #print 'raw interval:', interval if interval in ['linear','auto']: range = abs(upper - lower) if range == 0.: # anything more intelligent to do here? interval = .5 lower,upper = data_bounds + array((-.5,.5)) if is_base2(range) and is_base2(upper) and range > 4: if range == 2: interval = 1 elif range == 4: interval = 4 else: interval = range / 4 # maybe we want it 8 else: interval = auto_interval((lower,upper)) elif type(interval) in [type(0.0),type(0)]: pass else: #print 'interval: ', interval raise ValueError, interval + " is an unknown value for interval: " \ " expects 'auto' or 'linear', or a number" # If the lower or upper bound are set to 'auto', # calculate them based on the newly chosen interval. #print 'interval:', interval auto_lower,auto_upper = auto_bounds(data_bounds,interval) # if the lower and upper bound span 0, make sure ticks # will hit exactly on zero. if lower < 0 and upper > 0: hi_ticks = arange(0,auto_upper+interval,interval) low_ticks = - arange(interval,-auto_lower+interval,interval) ticks = concatenate((low_ticks[::-1],hi_ticks)) else: # othersize the ticks start and end on the lower and # upper values. ticks = arange(auto_lower,auto_upper+interval,interval) if bounds_info[0] == 'fit': ticks[0] = lower if bounds_info[1] == 'fit': ticks[-1] = upper return ticks
35ea2c2d87fbbfafca1db7236ceb79a8d525d834 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/35ea2c2d87fbbfafca1db7236ceb79a8d525d834/plot_utility.py
hi_ticks = arange(0,auto_upper+interval,interval) low_ticks = - arange(interval,-auto_lower+interval,interval)
hi_ticks = arange(0,upper+interval,interval) low_ticks = - arange(interval,-lower+interval,interval)
def auto_ticks(data_bounds, bounds_info = default_bounds): """ Find locations for axis tick marks. Calculate the location for tick marks on an axis. data_bounds is a sequence of 2 numbers specifying the maximum and minimum values of the data along this axis. bounds_info is a sequence of 3 values that specify how the axis end points and tick interval are calculated. An array of tick mark locations is returned from the function. The first and last tick entries are the axis end points. data_bounds -- (lower,upper). The maximum and minimum values of the data long this axis. If any of the settings in bounds_info are 'auto' or 'fit', the axis properties are calculated automatically from these settings. bounds_info -- (lower,upper,interval). Each entry can either be a numerical value or a string. If a number,the axis property is set to that value. If the entry is 'auto', the property is calculated automatically. lower and upper can also be 'fit' in which case the axis end points are set equal to the values in data_bounds. """ # pretty ugly code... # man, this needs some testing. if is_number(bounds_info[0]): lower = bounds_info[0] else: lower = data_bounds[0] if is_number(bounds_info[1]): upper = bounds_info[1] else: upper = data_bounds[1] interval = bounds_info[2] #print 'raw interval:', interval if interval in ['linear','auto']: range = abs(upper - lower) if range == 0.: # anything more intelligent to do here? interval = .5 lower,upper = data_bounds + array((-.5,.5)) if is_base2(range) and is_base2(upper) and range > 4: if range == 2: interval = 1 elif range == 4: interval = 4 else: interval = range / 4 # maybe we want it 8 else: interval = auto_interval((lower,upper)) elif type(interval) in [type(0.0),type(0)]: pass else: #print 'interval: ', interval raise ValueError, interval + " is an unknown value for interval: " \ " expects 'auto' or 'linear', or a number" # If the lower or upper bound are set to 'auto', # calculate them based on the newly chosen interval. #print 'interval:', interval auto_lower,auto_upper = auto_bounds(data_bounds,interval) # if the lower and upper bound span 0, make sure ticks # will hit exactly on zero. if lower < 0 and upper > 0: hi_ticks = arange(0,auto_upper+interval,interval) low_ticks = - arange(interval,-auto_lower+interval,interval) ticks = concatenate((low_ticks[::-1],hi_ticks)) else: # othersize the ticks start and end on the lower and # upper values. ticks = arange(auto_lower,auto_upper+interval,interval) if bounds_info[0] == 'fit': ticks[0] = lower if bounds_info[1] == 'fit': ticks[-1] = upper return ticks
35ea2c2d87fbbfafca1db7236ceb79a8d525d834 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/35ea2c2d87fbbfafca1db7236ceb79a8d525d834/plot_utility.py
ticks = arange(auto_lower,auto_upper+interval,interval)
ticks = arange(lower,upper+interval,interval)
def auto_ticks(data_bounds, bounds_info = default_bounds): """ Find locations for axis tick marks. Calculate the location for tick marks on an axis. data_bounds is a sequence of 2 numbers specifying the maximum and minimum values of the data along this axis. bounds_info is a sequence of 3 values that specify how the axis end points and tick interval are calculated. An array of tick mark locations is returned from the function. The first and last tick entries are the axis end points. data_bounds -- (lower,upper). The maximum and minimum values of the data long this axis. If any of the settings in bounds_info are 'auto' or 'fit', the axis properties are calculated automatically from these settings. bounds_info -- (lower,upper,interval). Each entry can either be a numerical value or a string. If a number,the axis property is set to that value. If the entry is 'auto', the property is calculated automatically. lower and upper can also be 'fit' in which case the axis end points are set equal to the values in data_bounds. """ # pretty ugly code... # man, this needs some testing. if is_number(bounds_info[0]): lower = bounds_info[0] else: lower = data_bounds[0] if is_number(bounds_info[1]): upper = bounds_info[1] else: upper = data_bounds[1] interval = bounds_info[2] #print 'raw interval:', interval if interval in ['linear','auto']: range = abs(upper - lower) if range == 0.: # anything more intelligent to do here? interval = .5 lower,upper = data_bounds + array((-.5,.5)) if is_base2(range) and is_base2(upper) and range > 4: if range == 2: interval = 1 elif range == 4: interval = 4 else: interval = range / 4 # maybe we want it 8 else: interval = auto_interval((lower,upper)) elif type(interval) in [type(0.0),type(0)]: pass else: #print 'interval: ', interval raise ValueError, interval + " is an unknown value for interval: " \ " expects 'auto' or 'linear', or a number" # If the lower or upper bound are set to 'auto', # calculate them based on the newly chosen interval. #print 'interval:', interval auto_lower,auto_upper = auto_bounds(data_bounds,interval) # if the lower and upper bound span 0, make sure ticks # will hit exactly on zero. if lower < 0 and upper > 0: hi_ticks = arange(0,auto_upper+interval,interval) low_ticks = - arange(interval,-auto_lower+interval,interval) ticks = concatenate((low_ticks[::-1],hi_ticks)) else: # othersize the ticks start and end on the lower and # upper values. ticks = arange(auto_lower,auto_upper+interval,interval) if bounds_info[0] == 'fit': ticks[0] = lower if bounds_info[1] == 'fit': ticks[-1] = upper return ticks
35ea2c2d87fbbfafca1db7236ceb79a8d525d834 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/35ea2c2d87fbbfafca1db7236ceb79a8d525d834/plot_utility.py
if len(probs) == 1:
if not isscalar(probs) and len(probs) == 1:
def ttest_ind (a, b, axis=0, printit=False, name1='Samp1', name2='Samp2',writemode='a'): """
0c04c1e7f2fc7002edae986ffab22eeee61f5d26 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0c04c1e7f2fc7002edae986ffab22eeee61f5d26/stats.py
raise ValueError, 'Unequal length arrays.'
raise ValueError, 'unequal length arrays'
def ttest_rel (a,b,axis=None,printit=False,name1='Samp1',name2='Samp2',writemode='a'): """
0c04c1e7f2fc7002edae986ffab22eeee61f5d26 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0c04c1e7f2fc7002edae986ffab22eeee61f5d26/stats.py
t = add.reduce(d,axis) / denom
t = add.reduce(d, axis) / denom
def ttest_rel (a,b,axis=None,printit=False,name1='Samp1',name2='Samp2',writemode='a'): """
0c04c1e7f2fc7002edae986ffab22eeee61f5d26 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0c04c1e7f2fc7002edae986ffab22eeee61f5d26/stats.py
if len(probs) == 1:
if not isscalar(probs) and len(probs) == 1:
def ttest_rel (a,b,axis=None,printit=False,name1='Samp1',name2='Samp2',writemode='a'): """
0c04c1e7f2fc7002edae986ffab22eeee61f5d26 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0c04c1e7f2fc7002edae986ffab22eeee61f5d26/stats.py
def _drv_pdf(self, xk, *args):
def _drv_pmf(self, xk, *args):
def _drv_pdf(self, xk, *args): try: return self.P[xk] except KeyError: return 0.0
36cabfbe2951857d27dd7276332f9f36c7467b53 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/36cabfbe2951857d27dd7276332f9f36c7467b53/distributions.py
ext = Extension(parent_package+'integrate._quadpack',sources)
ext = Extension(parent_package+'integrate._quadpack',sources, library_dirs=atlas_library_dirs, libraries=['quadpack','linpack_lite'] + blas_libraries)
def configuration(parent_package=''): if parent_package: parent_package += '.' local_path = get_path(__name__) config = default_config_dict() if parent_package: config['packages'].append(parent_package+'integrate') #config['packages'].append(parent_package+'integrate.tests') quadpack = glob(os.path.join(local_path,'quadpack','*.f')) config['fortran_libraries'].append(('quadpack',{'sources':quadpack})) odepack = glob(os.path.join(local_path,'odepack','*.f')) config['fortran_libraries'].append(('odepack',{'sources':odepack})) # should we try to weed through files and replace with calls to # LAPACK routines? linpack_lite = glob(os.path.join(local_path,'linpack_lite','*.f')) config['fortran_libraries'].append(('linpack_lite',{'sources':linpack_lite})) mach = glob(os.path.join(local_path,'mach','*.f')) config['fortran_libraries'].append(('mach',{'sources':mach})) # Extension # flibraries.append(('blas',{'sources':blas})) # Note that all extension modules will be linked against all c and # fortran libraries. But it is a good idea to at least comment # the dependencies in the section for each subpackage. sources = ['_quadpackmodule.c'] sources = [os.path.join(local_path,x) for x in sources] ext = Extension(parent_package+'integrate._quadpack',sources) config['ext_modules'].append(ext) # need info about blas -- how to get this??? blas_libraries, lapack_libraries, atlas_library_dirs = get_atlas_info() sources = ['_odepackmodule.c'] sources = [os.path.join(local_path,x) for x in sources] ext = Extension(parent_package+'integrate._odepack',sources, library_dirs=atlas_library_dirs, libraries=['odepack','linpack_lite',] + blas_libraries) config['ext_modules'].append(ext) # vode sources = [os.path.join(local_path,'vode.pyf')] ext = Extension(parent_package+'integrate.vode', sources, library_dirs=atlas_library_dirs, libraries=['odepack','linpack_lite'] + blas_libraries, ) config['ext_modules'].append(ext) return config
1967d709e5c24c55d63bab3c4571e2599e5ca6db /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/1967d709e5c24c55d63bab3c4571e2599e5ca6db/setup_integrate.py
blas_libraries, lapack_libraries, atlas_library_dirs = get_atlas_info()
def configuration(parent_package=''): if parent_package: parent_package += '.' local_path = get_path(__name__) config = default_config_dict() if parent_package: config['packages'].append(parent_package+'integrate') #config['packages'].append(parent_package+'integrate.tests') quadpack = glob(os.path.join(local_path,'quadpack','*.f')) config['fortran_libraries'].append(('quadpack',{'sources':quadpack})) odepack = glob(os.path.join(local_path,'odepack','*.f')) config['fortran_libraries'].append(('odepack',{'sources':odepack})) # should we try to weed through files and replace with calls to # LAPACK routines? linpack_lite = glob(os.path.join(local_path,'linpack_lite','*.f')) config['fortran_libraries'].append(('linpack_lite',{'sources':linpack_lite})) mach = glob(os.path.join(local_path,'mach','*.f')) config['fortran_libraries'].append(('mach',{'sources':mach})) # Extension # flibraries.append(('blas',{'sources':blas})) # Note that all extension modules will be linked against all c and # fortran libraries. But it is a good idea to at least comment # the dependencies in the section for each subpackage. sources = ['_quadpackmodule.c'] sources = [os.path.join(local_path,x) for x in sources] ext = Extension(parent_package+'integrate._quadpack',sources) config['ext_modules'].append(ext) # need info about blas -- how to get this??? blas_libraries, lapack_libraries, atlas_library_dirs = get_atlas_info() sources = ['_odepackmodule.c'] sources = [os.path.join(local_path,x) for x in sources] ext = Extension(parent_package+'integrate._odepack',sources, library_dirs=atlas_library_dirs, libraries=['odepack','linpack_lite',] + blas_libraries) config['ext_modules'].append(ext) # vode sources = [os.path.join(local_path,'vode.pyf')] ext = Extension(parent_package+'integrate.vode', sources, library_dirs=atlas_library_dirs, libraries=['odepack','linpack_lite'] + blas_libraries, ) config['ext_modules'].append(ext) return config
1967d709e5c24c55d63bab3c4571e2599e5ca6db /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/1967d709e5c24c55d63bab3c4571e2599e5ca6db/setup_integrate.py
libraries=['odepack','linpack_lite',] + blas_libraries)
libraries=['odepack','linpack_lite'] + blas_libraries)
def configuration(parent_package=''): if parent_package: parent_package += '.' local_path = get_path(__name__) config = default_config_dict() if parent_package: config['packages'].append(parent_package+'integrate') #config['packages'].append(parent_package+'integrate.tests') quadpack = glob(os.path.join(local_path,'quadpack','*.f')) config['fortran_libraries'].append(('quadpack',{'sources':quadpack})) odepack = glob(os.path.join(local_path,'odepack','*.f')) config['fortran_libraries'].append(('odepack',{'sources':odepack})) # should we try to weed through files and replace with calls to # LAPACK routines? linpack_lite = glob(os.path.join(local_path,'linpack_lite','*.f')) config['fortran_libraries'].append(('linpack_lite',{'sources':linpack_lite})) mach = glob(os.path.join(local_path,'mach','*.f')) config['fortran_libraries'].append(('mach',{'sources':mach})) # Extension # flibraries.append(('blas',{'sources':blas})) # Note that all extension modules will be linked against all c and # fortran libraries. But it is a good idea to at least comment # the dependencies in the section for each subpackage. sources = ['_quadpackmodule.c'] sources = [os.path.join(local_path,x) for x in sources] ext = Extension(parent_package+'integrate._quadpack',sources) config['ext_modules'].append(ext) # need info about blas -- how to get this??? blas_libraries, lapack_libraries, atlas_library_dirs = get_atlas_info() sources = ['_odepackmodule.c'] sources = [os.path.join(local_path,x) for x in sources] ext = Extension(parent_package+'integrate._odepack',sources, library_dirs=atlas_library_dirs, libraries=['odepack','linpack_lite',] + blas_libraries) config['ext_modules'].append(ext) # vode sources = [os.path.join(local_path,'vode.pyf')] ext = Extension(parent_package+'integrate.vode', sources, library_dirs=atlas_library_dirs, libraries=['odepack','linpack_lite'] + blas_libraries, ) config['ext_modules'].append(ext) return config
1967d709e5c24c55d63bab3c4571e2599e5ca6db /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/1967d709e5c24c55d63bab3c4571e2599e5ca6db/setup_integrate.py
)
)
def configuration(parent_package=''): if parent_package: parent_package += '.' local_path = get_path(__name__) config = default_config_dict() if parent_package: config['packages'].append(parent_package+'integrate') #config['packages'].append(parent_package+'integrate.tests') quadpack = glob(os.path.join(local_path,'quadpack','*.f')) config['fortran_libraries'].append(('quadpack',{'sources':quadpack})) odepack = glob(os.path.join(local_path,'odepack','*.f')) config['fortran_libraries'].append(('odepack',{'sources':odepack})) # should we try to weed through files and replace with calls to # LAPACK routines? linpack_lite = glob(os.path.join(local_path,'linpack_lite','*.f')) config['fortran_libraries'].append(('linpack_lite',{'sources':linpack_lite})) mach = glob(os.path.join(local_path,'mach','*.f')) config['fortran_libraries'].append(('mach',{'sources':mach})) # Extension # flibraries.append(('blas',{'sources':blas})) # Note that all extension modules will be linked against all c and # fortran libraries. But it is a good idea to at least comment # the dependencies in the section for each subpackage. sources = ['_quadpackmodule.c'] sources = [os.path.join(local_path,x) for x in sources] ext = Extension(parent_package+'integrate._quadpack',sources) config['ext_modules'].append(ext) # need info about blas -- how to get this??? blas_libraries, lapack_libraries, atlas_library_dirs = get_atlas_info() sources = ['_odepackmodule.c'] sources = [os.path.join(local_path,x) for x in sources] ext = Extension(parent_package+'integrate._odepack',sources, library_dirs=atlas_library_dirs, libraries=['odepack','linpack_lite',] + blas_libraries) config['ext_modules'].append(ext) # vode sources = [os.path.join(local_path,'vode.pyf')] ext = Extension(parent_package+'integrate.vode', sources, library_dirs=atlas_library_dirs, libraries=['odepack','linpack_lite'] + blas_libraries, ) config['ext_modules'].append(ext) return config
1967d709e5c24c55d63bab3c4571e2599e5ca6db /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/1967d709e5c24c55d63bab3c4571e2599e5ca6db/setup_integrate.py
assert_equal(numstring,array([0.,0.1,0.2,0.3,
assert_almost_equal(numstring,array([0.,0.1,0.2,0.3,
def check_arange(self): numstring = arange(0,2.21,.1) assert_equal(numstring,array([0.,0.1,0.2,0.3, 0.4,0.5,0.6,0.7, 0.8,0.9,1.,1.1, 1.2,1.3,1.4,1.5, 1.6,1.7,1.8,1.9, 2.,2.1,2.2])) numstringa = arange(3,4,.3) assert_array_equal(numstringa, array([3.,3.3,3.6,3.9])) numstringb = arange(3,27,3) assert_array_equal(numstringb,array([3,6,9,12, 15,18,21,24])) numstringc = arange(3.3,27,4) assert_array_equal(numstringc,array([3.3,7.3,11.3,15.3, 19.3,23.3]))
453fd3caa7bf2311fbdd6285044545bfa24e1a43 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/453fd3caa7bf2311fbdd6285044545bfa24e1a43/test_basic.py
assert_equal(lag2.c,array([1,-2*(k+2),(k+1.)*(k+2.)])/2.0) assert_equal(lag3.c,array([-1,3*(k+3),-3*(k+2)*(k+3),(k+1)*(k+2)*(k+3)])/6.0)
assert_almost_equal(lag2.c,array([1,-2*(k+2),(k+1.)*(k+2.)])/2.0) assert_almost_equal(lag3.c,array([-1,3*(k+3),-3*(k+2)*(k+3),(k+1)*(k+2)*(k+3)])/6.0)
def check_genlaguerre(self): k = 5*rand()-0.9 lag0 = genlaguerre(0,k) lag1 = genlaguerre(1,k) lag2 = genlaguerre(2,k) lag3 = genlaguerre(3,k) assert_equal(lag0.c,[1]) assert_equal(lag1.c,[-1,k+1]) assert_equal(lag2.c,array([1,-2*(k+2),(k+1.)*(k+2.)])/2.0) assert_equal(lag3.c,array([-1,3*(k+3),-3*(k+2)*(k+3),(k+1)*(k+2)*(k+3)])/6.0)
453fd3caa7bf2311fbdd6285044545bfa24e1a43 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/453fd3caa7bf2311fbdd6285044545bfa24e1a43/test_basic.py
assert_equal(leg3.c,array([5,0,-3,0])/2.0) assert_equal(leg4.c,array([35,0,-30,0,3])/8.0) assert_equal(leg5.c,array([63,0,-70,0,15,0])/8.0)
assert_almost_equal(leg3.c,array([5,0,-3,0])/2.0) assert_almost_equal(leg4.c,array([35,0,-30,0,3])/8.0) assert_almost_equal(leg5.c,array([63,0,-70,0,15,0])/8.0)
def check_legendre(self): leg0 = legendre(0) leg1 = legendre(1) leg2 = legendre(2) leg3 = legendre(3) leg4 = legendre(4) leg5 = legendre(5) assert_equal(leg0.c,[1]) assert_equal(leg1.c,[1,0]) assert_equal(leg2.c,array([3,0,-1])/2.0) assert_equal(leg3.c,array([5,0,-3,0])/2.0) assert_equal(leg4.c,array([35,0,-30,0,3])/8.0) assert_equal(leg5.c,array([63,0,-70,0,15,0])/8.0)
453fd3caa7bf2311fbdd6285044545bfa24e1a43 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/453fd3caa7bf2311fbdd6285044545bfa24e1a43/test_basic.py
axis('equal')
try: axis('equal') except AttributeError: _active.client.layout_all() axis('equal')
def image(img,x=None,y=None,colormap = 'grey',scale='no'): """Colormap should really default to the current colormap...""" # check for hold here validate_active() image = wxplt.image_object(img,x,y,colormap=colormap,scale=scale) if not _active.hold in ['on','yes']: _active.line_list.data = [] # clear it out _active.image_list.data = [] # clear it out _active.image_list.append(image) axis('equal') else: _active.image_list.append(image) _active.update() return _active
59c89b534da7c9908b2c641419b46c4a95d5a335 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/59c89b534da7c9908b2c641419b46c4a95d5a335/interface.py
assert_array_almost_equal(dot(a,v[:,i]),w[i]*v[:,i])
assert_array_almost_equal(dot(a,v[:,i]),w[i]*v[:,i],self.decimal)
def check_heev_complex(self,level=1,suffix=''): a= [[1,2-2j,3+7j],[2+2j,2,3],[3-7j,3,5]] exact_w=[-6.305141710654834,2.797880950890922,11.50726075976392] f = getattr(self.lapack,'heev'+suffix) w,v,info=f(a) assert not info,`info` assert_array_almost_equal(w,exact_w) for i in range(3): assert_array_almost_equal(dot(a,v[:,i]),w[i]*v[:,i])
fdfb9407f7e177aa1c1fc45311bee6a21c04c162 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/fdfb9407f7e177aa1c1fc45311bee6a21c04c162/esv_tests.py
except AttributeError:
except (AttributeError, TypeError):
def __call__(self,*args): for arg in args: try: n = len(arg) if (n==0): return self.zerocall(args) except AttributeError: pass return squeeze(arraymap(self.thefunc,args,self.otypes))
8c9572e413d199c0af05a7fcd5bc26460b78eedf /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/8c9572e413d199c0af05a7fcd5bc26460b78eedf/basic.py
sources=['sigtoolsmodule.c','firfilter.c','medianfilter.c'],
sources=['sigtoolsmodule.c', 'firfilter.c','medianfilter.c'], depends = ['sigtools.h']
def configuration(parent_package='',top_path=None): from scipy.distutils.misc_util import Configuration config = Configuration('signal', parent_package, top_path) config.add_data_dir('tests') config.add_extension('sigtools', sources=['sigtoolsmodule.c','firfilter.c','medianfilter.c'], ) config.add_extension('spline', sources = ['splinemodule.c','S_bspline_util.c','D_bspline_util.c', 'C_bspline_util.c','Z_bspline_util.c','bspline_util.c'], ) return config
75a5ea6b5949731a870f0154c6f4790e8dd5aa8b /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/75a5ea6b5949731a870f0154c6f4790e8dd5aa8b/setup.py
def inverse(self, z):
def inverse(self, x):
def inverse(self, z): return N.power(x, 1. / self.power)
647bb06795ae02adc259586799601702e407e398 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/647bb06795ae02adc259586799601702e407e398/links.py
"""Associated Legendre functions of the second kind, Pmn(z) and its
"""Associated Legendre functions of the first kind, Pmn(z) and its
def lpmn(m,n,z): """Associated Legendre functions of the second kind, Pmn(z) and its derivative, Pmn'(z) of order m and degree n. Returns two arrays of size (m+1,n+1) containing Pmn(z) and Pmn'(z) for all orders from 0..m and degrees from 0..n. z can be complex. """ if not isscalar(m) or (abs(m)>n): raise ValueError, "m must be <= n." if not isscalar(n) or (n<0): raise ValueError, "n must be a non-negative integer." if not isscalar(z): raise ValueError, "z must be scalar." if (m < 0): mp = -m mf,nf = mgrid[0:mp+1,0:n+1] sv = errprint(0) fixarr = where(mf>nf,0.0,(-1)**mf * gamma(nf-mf+1) / gamma(nf+mf+1)) sv = errprint(sv) else: mp = m if any(iscomplex(z)): p,pd = specfun.clpmn(mp,n,real(z),imag(z)) else: p,pd = specfun.lpmn(mp,n,z) if (m < 0): p = p * fixarr pd = pd * fixarr return p,pd
1dd759c870a3397fc61e94997defff945cbe808e /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/1dd759c870a3397fc61e94997defff945cbe808e/basic.py
temp = coo_matrix((s, ij), dims=dims, nzmax=nzmax, \
temp = coo_matrix( s, ij, dims=dims, nzmax=nzmax, \
def __init__(self, arg1, dims=(None,None), nzmax=100, dtype='d', copy=False): spmatrix.__init__(self) if isdense(arg1): # Convert the dense matrix arg1 to CSC format if rank(arg1) == 2: s = asarray(arg1) if s.dtypechar not in 'fdFD': # Use a double array as the source (but leave it alone) s = s*1.0 if (rank(s) == 2): M, N = s.shape dtype = s.dtypechar func = getattr(sparsetools, _transtabl[dtype]+'fulltocsc') ierr = irow = jcol = 0 nnz = sum(ravel(s != 0.0)) a = zeros((nnz,), dtype) rowa = zeros((nnz,), 'i') ptra = zeros((N+1,), 'i') while 1: a, rowa, ptra, irow, jcol, ierr = \ func(s, a, rowa, ptra, irow, jcol, ierr) if (ierr == 0): break nnz = nnz + ALLOCSIZE a = resize1d(a, nnz) rowa = resize1d(rowa, nnz) self.data = a self.rowind = rowa self.indptr = ptra self.shape = (M, N) # s = dok_matrix(arg1).tocsc(nzmax) # self.shape = s.shape # self.data = s.data # self.rowind = s.rowind # self.indptr = s.indptr else: raise ValueError, "dense array does not have rank 1 or 2" elif isspmatrix(arg1): s = arg1 if isinstance(s, csc_matrix): # do nothing but copy information self.shape = s.shape if copy: self.data = s.data.copy() self.rowind = s.rowind.copy() self.indptr = s.indptr.copy() else: self.data = s.data self.rowind = s.rowind self.indptr = s.indptr elif isinstance(s, csr_matrix): self.shape = s.shape func = getattr(sparsetools, s.ftype+'transp') self.data, self.rowind, self.indptr = \ func(s.shape[1], s.data, s.colind, s.indptr) else: temp = s.tocsc() self.data = temp.data self.rowind = temp.rowind self.indptr = temp.indptr self.shape = temp.shape elif type(arg1) == tuple: try: # Assume it's a tuple of matrix dimensions (M, N) (M, N) = arg1 M = int(M) # will raise TypeError if (data, ij) N = int(N) self.data = zeros((nzmax,), dtype) self.rowind = zeros((nzmax,), int) self.indptr = zeros((N+1,), int) self.shape = (M, N) except (ValueError, TypeError): try: # Try interpreting it as (data, ij) (s, ij) = arg1 assert isinstance(ij, ArrayType) and (rank(ij) == 2) and (shape(ij) == (len(s), 2)) temp = coo_matrix((s, ij), dims=dims, nzmax=nzmax, \ dtype=dtype).tocsc() self.shape = temp.shape self.data = temp.data self.rowind = temp.rowind self.indptr = temp.indptr except: try: # Try interpreting it as (data, rowind, indptr) (s, rowind, indptr) = arg1 if copy: self.data = array(s) self.rowind = array(rowind) self.indptr = array(indptr) else: self.data = asarray(s) self.rowind = asarray(rowind) self.indptr = asarray(indptr) except: raise ValueError, "unrecognized form for csc_matrix constructor" else: raise ValueError, "unrecognized form for csc_matrix constructor"
0b2d25691f3b5b478761024814ba8dc28534e0fb /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0b2d25691f3b5b478761024814ba8dc28534e0fb/sparse.py
temp = coo_matrix(s, ijnew, dims=(M, N), nzmax=nzmax, dtype=dtype) temp = temp.tocsr()
temp = coo_matrix(s, ijnew, dims=dims, nzmax=nzmax, dtype=dtype).tocsr()
def __init__(self, arg1, dims=(None,None), nzmax=100, dtype='d', copy=False): spmatrix.__init__(self) if isdense(arg1): # Convert the dense matrix arg1 to CSR format if rank(arg1) == 2: s = asarray(arg1) ocsc = csc_matrix(transpose(s)) self.colind = ocsc.rowind self.indptr = ocsc.indptr self.data = ocsc.data self.shape = (ocsc.shape[1], ocsc.shape[0])
0b2d25691f3b5b478761024814ba8dc28534e0fb /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0b2d25691f3b5b478761024814ba8dc28534e0fb/sparse.py
self.colind = temp.rowind
self.colind = temp.colind
def __init__(self, arg1, dims=(None,None), nzmax=100, dtype='d', copy=False): spmatrix.__init__(self) if isdense(arg1): # Convert the dense matrix arg1 to CSR format if rank(arg1) == 2: s = asarray(arg1) ocsc = csc_matrix(transpose(s)) self.colind = ocsc.rowind self.indptr = ocsc.indptr self.data = ocsc.data self.shape = (ocsc.shape[1], ocsc.shape[0])
0b2d25691f3b5b478761024814ba8dc28534e0fb /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0b2d25691f3b5b478761024814ba8dc28534e0fb/sparse.py
def __init__(self, arg1, dims=(None,None), nzmax=100, dtype='d', copy=False): spmatrix.__init__(self) if isdense(arg1): # Convert the dense matrix arg1 to CSR format if rank(arg1) == 2: s = asarray(arg1) ocsc = csc_matrix(transpose(s)) self.colind = ocsc.rowind self.indptr = ocsc.indptr self.data = ocsc.data self.shape = (ocsc.shape[1], ocsc.shape[0])
0b2d25691f3b5b478761024814ba8dc28534e0fb /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0b2d25691f3b5b478761024814ba8dc28534e0fb/sparse.py
def __init__(self, obj, ij, dims=None, nzmax=None, dtype=None):
def __init__(self, obj, ij_in, dims=None, nzmax=None, dtype=None):
def __init__(self, obj, ij, dims=None, nzmax=None, dtype=None): spmatrix.__init__(self) try: # Assume the first calling convention assert len(ij) == 2 if dims is None: M = int(amax(ij[0])) N = int(amax(ij[1])) self.shape = (M, N) else: # Use 2 steps to ensure dims has length 2. M, N = dims self.shape = (M, N) self.row = asarray(ij[0], 'i') self.col = asarray(ij[1], 'i') self.data = asarray(obj, dtype=dtype) self.dtypechar = self.data.dtypechar if nzmax is None: nzmax = len(self.data) self.nzmax = nzmax self._check() except Exception, e: raise e, "invalid input format"
0b2d25691f3b5b478761024814ba8dc28534e0fb /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0b2d25691f3b5b478761024814ba8dc28534e0fb/sparse.py
assert len(ij) == 2
if len(ij_in) != 2: if isdense( ij_in ) and (ij_in.shape[1] == 2): ij = (ij_in[:,0], ij_in[:,1]) else: raise AssertionError else: ij = ij_in
def __init__(self, obj, ij, dims=None, nzmax=None, dtype=None): spmatrix.__init__(self) try: # Assume the first calling convention assert len(ij) == 2 if dims is None: M = int(amax(ij[0])) N = int(amax(ij[1])) self.shape = (M, N) else: # Use 2 steps to ensure dims has length 2. M, N = dims self.shape = (M, N) self.row = asarray(ij[0], 'i') self.col = asarray(ij[1], 'i') self.data = asarray(obj, dtype=dtype) self.dtypechar = self.data.dtypechar if nzmax is None: nzmax = len(self.data) self.nzmax = nzmax self._check() except Exception, e: raise e, "invalid input format"
0b2d25691f3b5b478761024814ba8dc28534e0fb /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0b2d25691f3b5b478761024814ba8dc28534e0fb/sparse.py
self.row = asarray(ij[0], 'i') self.col = asarray(ij[1], 'i')
self.row = asarray(ij[0]) self.col = asarray(ij[1])
def __init__(self, obj, ij, dims=None, nzmax=None, dtype=None): spmatrix.__init__(self) try: # Assume the first calling convention assert len(ij) == 2 if dims is None: M = int(amax(ij[0])) N = int(amax(ij[1])) self.shape = (M, N) else: # Use 2 steps to ensure dims has length 2. M, N = dims self.shape = (M, N) self.row = asarray(ij[0], 'i') self.col = asarray(ij[1], 'i') self.data = asarray(obj, dtype=dtype) self.dtypechar = self.data.dtypechar if nzmax is None: nzmax = len(self.data) self.nzmax = nzmax self._check() except Exception, e: raise e, "invalid input format"
0b2d25691f3b5b478761024814ba8dc28534e0fb /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0b2d25691f3b5b478761024814ba8dc28534e0fb/sparse.py
raise "could not import pylab"
raise ImportError, "could not import pylab"
def main(): parser = OptionParser( usage = usage ) parser.add_option( "-c", "--compare", action = "store_true", dest = "compare", default = False, help = "compare with default scipy.sparse solver [default: %default]" ) parser.add_option( "-p", "--plot", action = "store_true", dest = "plot", default = False, help = "plot time statistics [default: %default]" ) parser.add_option( "-d", "--default-url", action = "store_true", dest = "default_url", default = False, help = "use default url [default: %default]" ) parser.add_option( "-f", "--format", type = type( '' ), dest = "format", default = 'triplet', help = "matrix format [default: %default]" ) (options, args) = parser.parse_args() if (len( args ) >= 1): matrixNames = args; else: parser.print_help(), return sizes, nnzs, times, errors = [], [], [], [] legends = ['umfpack', 'sparse.solve'] for ii, matrixName in enumerate( matrixNames ): print '*' * 50 mtx = readMatrix( matrixName, options ) sizes.append( mtx.shape ) nnzs.append( mtx.nnz ) tts = nm.zeros( (2,), dtype = nm.double ) times.append( tts ) err = nm.zeros( (2,2), dtype = nm.double ) errors.append( err ) print 'size : %s (%d nnz)' % (mtx.shape, mtx.nnz) sol0 = nm.ones( (mtx.shape[0],), dtype = nm.double ) rhs = mtx * sol0 umfpack = um.UmfpackContext() tt = time.clock() sol = umfpack( um.UMFPACK_A, mtx, rhs, autoTranspose = True ) tts[0] = time.clock() - tt print "umfpack : %.2f s" % tts[0] error = mtx * sol - rhs err[0,0] = nla.norm( error ) print '||Ax-b|| :', err[0,0] error = sol0 - sol err[0,1] = nla.norm( error ) print '||x - x_{exact}|| :', err[0,1] if options.compare: tt = time.clock() sol = sp.solve( mtx, rhs ) tts[1] = time.clock() - tt print "sparse.solve : %.2f s" % tts[1] error = mtx * sol - rhs err[1,0] = nla.norm( error ) print '||Ax-b|| :', err[1,0] error = sol0 - sol err[1,1] = nla.norm( error ) print '||x - x_{exact}|| :', err[1,1] if options.plot: try: import pylab except ImportError: raise "could not import pylab" times = nm.array( times ) print times pylab.plot( times[:,0], 'b-o' ) if options.compare: pylab.plot( times[:,1], 'r-s' ) else: del legends[1] print legends ax = pylab.axis() y2 = 0.5 * (ax[3] - ax[2]) xrng = range( len( nnzs ) ) for ii in xrng: yy = y2 + 0.4 * (ax[3] - ax[2])\ * nm.sin( ii * 2 * nm.pi / (len( xrng ) - 1) ) if options.compare: pylab.text( ii+0.02, yy, '%s\n%.2e err_umf\n%.2e err_sp' % (sizes[ii], nm.sum( errors[ii][0,:] ), nm.sum( errors[ii][1,:] )) ) else: pylab.text( ii+0.02, yy, '%s\n%.2e err_umf' % (sizes[ii], nm.sum( errors[ii][0,:] )) ) pylab.plot( [ii, ii], [ax[2], ax[3]], 'k:' ) pylab.xticks( xrng, ['%d' % (nnzs[ii] ) for ii in xrng] ) pylab.xlabel( 'nnz' ) pylab.ylabel( 'time [s]' ) pylab.legend( legends ) pylab.axis( [ax[0] - 0.05, ax[1] + 1, ax[2], ax[3]] ) pylab.show()
cecc9fce65da8b1cada916c7bd787391a1165b6b /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/cecc9fce65da8b1cada916c7bd787391a1165b6b/test_umfpack.py
config = Configuration(None, parent_package, top_path, maintainer = "SciPy Developers", maintainer_email = "scipy-dev@scipy.org", description = "Scientific Algorithms Library for Python", url = "http://www.scipy.org", license = 'BSD', )
config = Configuration(None, parent_package, top_path)
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration(None, parent_package, top_path, maintainer = "SciPy Developers", maintainer_email = "scipy-dev@scipy.org", description = "Scientific Algorithms Library for Python", url = "http://www.scipy.org", license = 'BSD', ) config.set_options(ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=True) config.add_subpackage('Lib') config.name = 'scipy' # used in generated file names config.add_data_files(('scipy','*.txt')) from version import version as version config.dict_append(version=version) return config
bc6100f003941bc7ae54f63adea8af817acfdf0f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/bc6100f003941bc7ae54f63adea8af817acfdf0f/setup.py
config.name = 'scipy'
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration(None, parent_package, top_path, maintainer = "SciPy Developers", maintainer_email = "scipy-dev@scipy.org", description = "Scientific Algorithms Library for Python", url = "http://www.scipy.org", license = 'BSD', ) config.set_options(ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=True) config.add_subpackage('Lib') config.name = 'scipy' # used in generated file names config.add_data_files(('scipy','*.txt')) from version import version as version config.dict_append(version=version) return config
bc6100f003941bc7ae54f63adea8af817acfdf0f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/bc6100f003941bc7ae54f63adea8af817acfdf0f/setup.py
from version import version as version config.dict_append(version=version)
config.get_version('Lib/version.py')
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration(None, parent_package, top_path, maintainer = "SciPy Developers", maintainer_email = "scipy-dev@scipy.org", description = "Scientific Algorithms Library for Python", url = "http://www.scipy.org", license = 'BSD', ) config.set_options(ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=True) config.add_subpackage('Lib') config.name = 'scipy' # used in generated file names config.add_data_files(('scipy','*.txt')) from version import version as version config.dict_append(version=version) return config
bc6100f003941bc7ae54f63adea8af817acfdf0f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/bc6100f003941bc7ae54f63adea8af817acfdf0f/setup.py
setup( configuration=configuration )
from version import version as version setup( name = 'scipy', version = version, maintainer = "SciPy Developers", maintainer_email = "scipy-dev@scipy.org", description = "Scientific Algorithms Library for Python", url = "http://www.scipy.org", license = 'BSD', configuration=configuration )
def setup_package(): from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration old_path = os.getcwd() local_path = os.path.dirname(os.path.abspath(sys.argv[0])) os.chdir(local_path) sys.path.insert(0,local_path) sys.path.insert(0,os.path.join(local_path,'Lib')) # to retrive version try: setup( configuration=configuration ) finally: del sys.path[0] os.chdir(old_path) return
bc6100f003941bc7ae54f63adea8af817acfdf0f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/bc6100f003941bc7ae54f63adea8af817acfdf0f/setup.py
from scipy.distutils.core import setup setup(**configuration(top_path=''))
setup_package()
def configuration(parent_package='',top_path=None): from scipy.distutils.misc_util import Configuration config = Configuration() config.add_subpackage('Lib') return config.todict()
fd9c0fe68b5dc8e687422a1709d66cd508eba282 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/fd9c0fe68b5dc8e687422a1709d66cd508eba282/setup.py
winfun = blackmanharris
winfunc = blackmanharris
def get_window(window,Nx,fftbins=1): """Return a window of length Nx and type window. If fftbins is 1, create a "periodic" window ready to use with ifftshift and be multiplied by the result of an fft (SEE ALSO fftfreq). Window types: boxcar, triang, blackman, hamming, hanning, bartlett, parzen, bohman, blackmanharris, nuttall, barthann, kaiser (needs beta), gaussian (needs std), general_gaussian (needs power, width), slepian (needs width) If the window requires no parameters, then it can be a string. If the window requires parameters, the window argument should be a tuple with the first argument the string name of the window, and the next arguments the needed parameters. If window is a floating point number, it is interpreted as the beta parameter of the kaiser window. """ sym = not fftbins try: beta = float(window) except (TypeError, ValueError): args = () if isinstance(window, types.TupleType): winstr = window[0] if len(window) > 1: args = window[1:] elif isinstance(window, types.StringType): if window in ['kaiser', 'ksr', 'gaussian', 'gauss', 'gss', 'general gaussian', 'general_gaussian', 'general gauss', 'general_gauss', 'ggs']: raise ValueError, "That window needs a parameter -- pass a tuple" else: winstr = window if winstr in ['blackman', 'black', 'blk']: winfunc = blackman elif winstr in ['triangle', 'triang', 'tri']: winfunc = triang elif winstr in ['hamming', 'hamm', 'ham']: winfunc = hamming elif winstr in ['bartlett', 'bart', 'brt']: winfunc = bartlett elif winstr in ['hanning', 'hann', 'han']: winfunc = hanning elif winstr in ['blackmanharris', 'blackharr','bkh']: winfun = blackmanharris elif winstr in ['parzen', 'parz', 'par']: winfun = parzen elif winstr in ['bohman', 'bman', 'bmn']: winfunc = bohman elif winstr in ['nuttall', 'nutl', 'nut']: winfunc = nuttall elif winstr in ['barthann', 'brthan', 'bth']: winfunc = barthann elif winstr in ['kaiser', 'ksr']: winfunc = kaiser elif winstr in ['gaussian', 'gauss', 'gss']: winfunc = gaussian elif winstr in ['general gaussian', 'general_gaussian', 'general gauss', 'general_gauss', 'ggs']: winfunc = general_gaussian elif winstr in ['boxcar', 'box', 'ones']: winfunc = boxcar elif winstr in ['slepian', 'slep', 'optimal', 'dss']: winfunc = slepian else: raise ValueError, "Unknown window type." params = (Nx,)+args + (sym,) else: winfunc = kaiser params = (Nx,beta,sym) return winfunc(*params)
63d8e8d7616b114c10659baa34e30d182a840851 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/63d8e8d7616b114c10659baa34e30d182a840851/signaltools.py
winfun = parzen
winfunc = parzen
def get_window(window,Nx,fftbins=1): """Return a window of length Nx and type window. If fftbins is 1, create a "periodic" window ready to use with ifftshift and be multiplied by the result of an fft (SEE ALSO fftfreq). Window types: boxcar, triang, blackman, hamming, hanning, bartlett, parzen, bohman, blackmanharris, nuttall, barthann, kaiser (needs beta), gaussian (needs std), general_gaussian (needs power, width), slepian (needs width) If the window requires no parameters, then it can be a string. If the window requires parameters, the window argument should be a tuple with the first argument the string name of the window, and the next arguments the needed parameters. If window is a floating point number, it is interpreted as the beta parameter of the kaiser window. """ sym = not fftbins try: beta = float(window) except (TypeError, ValueError): args = () if isinstance(window, types.TupleType): winstr = window[0] if len(window) > 1: args = window[1:] elif isinstance(window, types.StringType): if window in ['kaiser', 'ksr', 'gaussian', 'gauss', 'gss', 'general gaussian', 'general_gaussian', 'general gauss', 'general_gauss', 'ggs']: raise ValueError, "That window needs a parameter -- pass a tuple" else: winstr = window if winstr in ['blackman', 'black', 'blk']: winfunc = blackman elif winstr in ['triangle', 'triang', 'tri']: winfunc = triang elif winstr in ['hamming', 'hamm', 'ham']: winfunc = hamming elif winstr in ['bartlett', 'bart', 'brt']: winfunc = bartlett elif winstr in ['hanning', 'hann', 'han']: winfunc = hanning elif winstr in ['blackmanharris', 'blackharr','bkh']: winfun = blackmanharris elif winstr in ['parzen', 'parz', 'par']: winfun = parzen elif winstr in ['bohman', 'bman', 'bmn']: winfunc = bohman elif winstr in ['nuttall', 'nutl', 'nut']: winfunc = nuttall elif winstr in ['barthann', 'brthan', 'bth']: winfunc = barthann elif winstr in ['kaiser', 'ksr']: winfunc = kaiser elif winstr in ['gaussian', 'gauss', 'gss']: winfunc = gaussian elif winstr in ['general gaussian', 'general_gaussian', 'general gauss', 'general_gauss', 'ggs']: winfunc = general_gaussian elif winstr in ['boxcar', 'box', 'ones']: winfunc = boxcar elif winstr in ['slepian', 'slep', 'optimal', 'dss']: winfunc = slepian else: raise ValueError, "Unknown window type." params = (Nx,)+args + (sym,) else: winfunc = kaiser params = (Nx,beta,sym) return winfunc(*params)
63d8e8d7616b114c10659baa34e30d182a840851 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/63d8e8d7616b114c10659baa34e30d182a840851/signaltools.py
bytestr = str(var.itemsize()*Numeric.product(var.shape))
bytestr = str(var.itemsize*Numeric.product(var.shape))
def who(vardict=None): """Print the scipy arrays in the given dictionary (or globals() if None). """ if vardict is None: frame = sys._getframe().f_back vardict = frame.f_globals sta = [] cache = {} for name in vardict.keys(): if isinstance(vardict[name],Numeric.ArrayType): var = vardict[name] idv = id(var) if idv in cache.keys(): namestr = name + " (%s)" % cache[idv] original=0 else: cache[idv] = name namestr = name original=1 shapestr = " x ".join(map(str, var.shape)) bytestr = str(var.itemsize()*Numeric.product(var.shape)) sta.append([namestr, shapestr, bytestr, typename(var.dtypechar), original]) maxname = 0 maxshape = 0 maxbyte = 0 totalbytes = 0 for k in range(len(sta)): val = sta[k] if maxname < len(val[0]): maxname = len(val[0]) if maxshape < len(val[1]): maxshape = len(val[1]) if maxbyte < len(val[2]): maxbyte = len(val[2]) if val[4]: totalbytes += int(val[2]) max = Numeric.maximum if len(sta) > 0: sp1 = max(10,maxname) sp2 = max(10,maxshape) sp3 = max(10,maxbyte) prval = "Name %s Shape %s Bytes %s Type" % (sp1*' ', sp2*' ', sp3*' ') print prval + "\n" + "="*(len(prval)+5) + "\n" for k in range(len(sta)): val = sta[k] print "%s %s %s %s %s %s %s" % (val[0], ' '*(sp1-len(val[0])+4), val[1], ' '*(sp2-len(val[1])+5), val[2], ' '*(sp3-len(val[2])+5), val[3]) print "\nUpper bound on total bytes = %d" % totalbytes return
6abca17924a00fc2d4eaca2a48f22355760c96e3 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/6abca17924a00fc2d4eaca2a48f22355760c96e3/common.py
def who(vardict=None): """Print the scipy arrays in the given dictionary (or globals() if None). """ if vardict is None: frame = sys._getframe().f_back vardict = frame.f_globals sta = [] cache = {} for name in vardict.keys(): if isinstance(vardict[name],Numeric.ArrayType): var = vardict[name] idv = id(var) if idv in cache.keys(): namestr = name + " (%s)" % cache[idv] original=0 else: cache[idv] = name namestr = name original=1 shapestr = " x ".join(map(str, var.shape)) bytestr = str(var.itemsize()*Numeric.product(var.shape)) sta.append([namestr, shapestr, bytestr, typename(var.dtypechar), original]) maxname = 0 maxshape = 0 maxbyte = 0 totalbytes = 0 for k in range(len(sta)): val = sta[k] if maxname < len(val[0]): maxname = len(val[0]) if maxshape < len(val[1]): maxshape = len(val[1]) if maxbyte < len(val[2]): maxbyte = len(val[2]) if val[4]: totalbytes += int(val[2]) max = Numeric.maximum if len(sta) > 0: sp1 = max(10,maxname) sp2 = max(10,maxshape) sp3 = max(10,maxbyte) prval = "Name %s Shape %s Bytes %s Type" % (sp1*' ', sp2*' ', sp3*' ') print prval + "\n" + "="*(len(prval)+5) + "\n" for k in range(len(sta)): val = sta[k] print "%s %s %s %s %s %s %s" % (val[0], ' '*(sp1-len(val[0])+4), val[1], ' '*(sp2-len(val[1])+5), val[2], ' '*(sp3-len(val[2])+5), val[3]) print "\nUpper bound on total bytes = %d" % totalbytes return
6abca17924a00fc2d4eaca2a48f22355760c96e3 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/6abca17924a00fc2d4eaca2a48f22355760c96e3/common.py
def test(level=10): from numpy.test.testing import module_test module_test(__name__,__file__,level=level) def test_suite(level=1): from numpy.test.testing import module_test_suite return module_test_suite(__name__,__file__,level=level)
def test(level=10): from numpy.test.testing import module_test module_test(__name__,__file__,level=level)
6abca17924a00fc2d4eaca2a48f22355760c96e3 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/6abca17924a00fc2d4eaca2a48f22355760c96e3/common.py
config.add_data_files(('gistdata',xplt_files))
config.add_data_dir('gistdata') config.add_data_dir((os.path.join(config.path_in_package,'gistdata'), os.path.abspath(config.paths('src/g')[0])))
def get_playsource(extension,build_dir): if windows: playsource = winsource + allsource elif cygwin: playsource = unixsource + winsource + allsource elif macosx: playsource = unixsource + macsource + allsource else: playsource = unixsource + x11source + allsource sources = [os.path.join(local_path,n) for n in playsource]
8788c1da4dd4951ebe926e36cf9e82a96503e43d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/8788c1da4dd4951ebe926e36cf9e82a96503e43d/setup.py
print bounds_info print data_bounds print ticks
def auto_ticks(data_bounds, bounds_info = default_bounds): """ Find locations for axis tick marks. Calculate the location for tick marks on an axis. data_bounds is a sequence of 2 numbers specifying the maximum and minimum values of the data along this axis. bounds_info is a sequence of 3 values that specify how the axis end points and tick interval are calculated. An array of tick mark locations is returned from the function. The first and last tick entries are the axis end points. data_bounds -- (lower,upper). The maximum and minimum values of the data long this axis. If any of the settings in bounds_info are 'auto' or 'fit', the axis properties are calculated automatically from these settings. bounds_info -- (lower,upper,interval). Each entry can either be a numerical value or a string. If a number,the axis property is set to that value. If the entry is 'auto', the property is calculated automatically. lower and upper can also be 'fit' in which case the axis end points are set equal to the values in data_bounds. """ # pretty ugly code... # man, this needs some testing. if is_number(bounds_info[0]): lower = bounds_info[0] else: lower = data_bounds[0] if is_number(bounds_info[1]): upper = bounds_info[1] else: upper = data_bounds[1] interval = bounds_info[2] #print 'raw interval:', interval if interval in ['linear','auto']: rng = abs(upper - lower) if rng == 0.: # anything more intelligent to do here? interval = .5 lower,upper = data_bounds + array((-.5,.5)) if is_base2(rng) and is_base2(upper) and rng > 4: if rng == 2: interval = 1 elif rng == 4: interval = 4 else: interval = rng / 4 # maybe we want it 8 else: interval = auto_interval((lower,upper)) elif type(interval) in [type(0.0),type(0)]: pass else: #print 'interval: ', interval raise ValueError, interval + " is an unknown value for interval: " \ " expects 'auto' or 'linear', or a number" # If the lower or upper bound are set to 'auto', # calculate them based on the newly chosen interval. #print 'interval:', interval auto_lower,auto_upper = auto_bounds(data_bounds,interval) if bounds_info[0] == 'auto': lower = auto_lower if bounds_info[1] == 'auto': upper = auto_upper # if the lower and upper bound span 0, make sure ticks # will hit exactly on zero. if lower < 0 and upper > 0: hi_ticks = arange(0,upper+interval,interval) low_ticks = - arange(interval,-lower+interval,interval) ticks = concatenate((low_ticks[::-1],hi_ticks)) else: # othersize the ticks start and end on the lower and # upper values. ticks = arange(lower,upper+interval,interval) if bounds_info[0] == 'fit': ticks[0] = lower if bounds_info[1] == 'fit': ticks[-1] = upper print bounds_info print data_bounds print ticks return ticks
e66ba78e3d68e7540df692b5e38d8b8334bab9b7 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/e66ba78e3d68e7540df692b5e38d8b8334bab9b7/plot_utility.py
ext_args['define_macros'] = [('ATLAS_INFO','"%s"' % atlas_version)]
if sys.platform=='win32': ext_args['define_macros'] = [('ATLAS_INFO','"\\"%s\\""' % atlas_version)] else: ext_args['define_macros'] = [('ATLAS_INFO','"%s"' % atlas_version)]
def configuration(parent_package='',parent_path=None): from scipy_distutils.core import Extension from scipy_distutils.misc_util import fortran_library_item, dot_join,\ SourceGenerator, get_path, default_config_dict, get_build_temp from scipy_distutils.system_info import get_info,dict_append,\ AtlasNotFoundError,LapackNotFoundError,BlasNotFoundError,\ LapackSrcNotFoundError,BlasSrcNotFoundError package = 'linalg' from interface_gen import generate_interface config = default_config_dict(package,parent_package) local_path = get_path(__name__,parent_path) abs_local_path = os.path.abspath(local_path) no_atlas = 0 atlas_info = get_info('atlas_threads') if ('ATLAS_WITHOUT_LAPACK',None) in atlas_info.get('define_macros',[]): atlas_info = get_info('lapack_atlas_threads') or atlas_info if not atlas_info: atlas_info = get_info('atlas') if atlas_info: if ('ATLAS_WITHOUT_LAPACK',None) in atlas_info.get('define_macros',[]): atlas_info = get_info('lapack_atlas') or atlas_info #atlas_info = {} # uncomment if ATLAS is available but want to use # Fortran LAPACK/BLAS; useful for testing f_libs = [] atlas_version = None temp_path = os.path.join(get_build_temp(),'linalg','atlas_version') dir_util.mkpath(temp_path,verbose=1) atlas_version_file = os.path.join(temp_path,'atlas_version') if atlas_info: if os.path.isfile(atlas_version_file): atlas_version = open(atlas_version_file).read() print 'ATLAS version',atlas_version if atlas_info and atlas_version is None: # Try to determine ATLAS version shutil.copy(os.path.join(local_path,'atlas_version.c'),temp_path) cur_dir = os.getcwd() os.chdir(temp_path) cmd = '%s %s --verbose build_ext --inplace --force'%\ (sys.executable, os.path.join(abs_local_path,'setup_atlas_version.py')) print cmd s,o=run_command(cmd) if not s: cmd = sys.executable+' -c "import atlas_version"' print cmd s,o=run_command(cmd) if not s: m = re.match(r'ATLAS version (?P<version>\d+[.]\d+[.]\d+)',o) if m: atlas_version = m.group('version') print 'ATLAS version',atlas_version if atlas_version is None: if re.search(r'undefined symbol: ATL_buildinfo',o,re.M): atlas_version = '3.2.1_pre3.3.6' print 'ATLAS version',atlas_version else: print o else: print o os.chdir(cur_dir) if atlas_version is None: print 'Failed to determine ATLAS version' else: f = open(atlas_version_file,'w') f.write(atlas_version) f.close() if atlas_info: if ('ATLAS_WITH_LAPACK_ATLAS',None) in atlas_info.get('define_macros',[]): lapack_info = get_info('lapack') if not lapack_info: warnings.warn(LapackNotFoundError.__doc__) lapack_src_info = get_info('lapack_src') if not lapack_src_info: raise LapackSrcNotFoundError,LapackSrcNotFoundError.__doc__ dict_append(lapack_info,libraries=['lapack_src']) f_libs.append(fortran_library_item(\ 'lapack_src',lapack_src_info['sources'], )) dict_append(atlas_info,**lapack_info) elif ('ATLAS_WITHOUT_LAPACK',None) in atlas_info.get('define_macros',[]): lapack_info = get_info('lapack') if not lapack_info: warnings.warn(LapackNotFoundError.__doc__) lapack_src_info = get_info('lapack_src') if not lapack_src_info: raise LapackSrcNotFoundError,LapackSrcNotFoundError.__doc__ dict_append(lapack_info,libraries=['lapack_src']) f_libs.append(fortran_library_item(\ 'lapack_src',lapack_src_info['sources'], )) dict_append(lapack_info,**atlas_info) atlas_info = lapack_info blas_info,lapack_info = {},{} if not atlas_info: warnings.warn(AtlasNotFoundError.__doc__) no_atlas = 1 blas_info = get_info('blas') #blas_info = {} # test building BLAS from sources. if not blas_info: warnings.warn(BlasNotFoundError.__doc__) blas_src_info = get_info('blas_src') if not blas_src_info: raise BlasSrcNotFoundError,BlasSrcNotFoundError.__doc__ dict_append(blas_info,libraries=['blas_src']) f_libs.append(fortran_library_item(\ 'blas_src',blas_src_info['sources'] + \ [os.path.join(local_path,'src','fblaswrap.f')], )) lapack_info = get_info('lapack') #lapack_info = {} # test building LAPACK from sources. if not lapack_info: warnings.warn(LapackNotFoundError.__doc__) lapack_src_info = get_info('lapack_src') if not lapack_src_info: raise LapackSrcNotFoundError,LapackSrcNotFoundError.__doc__ dict_append(lapack_info,libraries=['lapack_src']) f_libs.append(fortran_library_item(\ 'lapack_src',lapack_src_info['sources'], )) dict_append(atlas_info,**lapack_info) dict_append(atlas_info,**blas_info) target_dir = '' skip_names = {'clapack':[],'flapack':[],'cblas':[],'fblas':[]} if skip_single_routines: target_dir = 'dbl' skip_names['clapack'].extend(\ 'sgesv cgesv sgetrf cgetrf sgetrs cgetrs sgetri cgetri'\ ' sposv cposv spotrf cpotrf spotrs cpotrs spotri cpotri'\ ' slauum clauum strtri ctrtri'.split()) skip_names['flapack'].extend(skip_names['clapack']) skip_names['flapack'].extend(\ 'sgesdd cgesdd sgelss cgelss sgeqrf cgeqrf sgeev cgeev'\ ' sgegv cgegv ssyev cheev slaswp claswp sgees cgees' ' sggev cggev'.split()) skip_names['cblas'].extend('saxpy caxpy'.split()) skip_names['fblas'].extend(skip_names['cblas']) skip_names['fblas'].extend(\ 'srotg crotg srotmg srot csrot srotm sswap cswap sscal cscal'\ ' csscal scopy ccopy sdot cdotu cdotc snrm2 scnrm2 sasum scasum'\ ' isamax icamax sgemv cgemv chemv ssymv strmv ctrmv'\ ' sgemm cgemm'.split()) if using_lapack_blas: target_dir = os.path.join(target_dir,'blas') skip_names['fblas'].extend(\ 'drotmg srotmg drotm srotm'.split()) if atlas_version=='3.2.1_pre3.3.6': target_dir = os.path.join(target_dir,'atlas321') skip_names['clapack'].extend(\ 'sgetri dgetri cgetri zgetri spotri dpotri cpotri zpotri'\ ' slauum dlauum clauum zlauum strtri dtrtri ctrtri ztrtri'.split()) elif atlas_version>'3.4.0' and atlas_version<='3.5.12': skip_names['clapack'].extend('cpotrf zpotrf'.split()) # atlas_version: ext_args = {'name':dot_join(parent_package,package,'atlas_version'), 'sources':[os.path.join(local_path,'atlas_version.c')]} if no_atlas: ext_args['define_macros'] = [('NO_ATLAS_INFO',1)] else: ext_args['libraries'] = [atlas_info['libraries'][-1]] ext_args['library_dirs'] = atlas_info['library_dirs'][:] if atlas_version is None: ext_args['define_macros'] = [('NO_ATLAS_INFO',2)] else: ext_args['define_macros'] = [('ATLAS_INFO','"%s"' % atlas_version)] ext = Extension(**ext_args) config['ext_modules'].append(ext) # In case any of atlas|lapack|blas libraries are not available def generate_empty_pyf(target,sources,generator,skips): name = os.path.basename(target)[:-4] f = open(target,'w') f.write('python module '+name+'\n') f.write('usercode void empty_module(void) {}\n') f.write('interface\n') f.write('subroutine empty_module()\n') f.write('intent(c) empty_module\n') f.write('end subroutine empty_module\n') f.write('end interface\nend python module'+name+'\n') f.close() # fblas: def generate_fblas_pyf(target,sources,generator,skips): generator('fblas',sources[0],target,skips) if not (blas_info or atlas_info): generate_fblas_pyf = generate_empty_pyf sources = ['generic_fblas.pyf', 'generic_fblas1.pyf', 'generic_fblas2.pyf', 'generic_fblas3.pyf', os.path.join('src','fblaswrap.f')] sources = [os.path.join(local_path,s) for s in sources] fblas_pyf = SourceGenerator(generate_fblas_pyf, os.path.join(target_dir,'fblas.pyf'), sources,generate_interface, skip_names['fblas']) ext_args = {'name':dot_join(parent_package,package,'fblas'), 'sources':[fblas_pyf,sources[-1]], 'depends': sources[:4]} dict_append(ext_args,**atlas_info) ext = Extension(**ext_args) ext.need_fcompiler_opts = 1 config['ext_modules'].append(ext) # cblas: def generate_cblas_pyf(target,sources,generator,skips): generator('cblas',sources[0],target,skips) if no_atlas: generate_cblas_pyf = generate_empty_pyf sources = ['generic_cblas.pyf', 'generic_cblas1.pyf'] sources = [os.path.join(local_path,s) for s in sources] cblas_pyf = SourceGenerator(generate_cblas_pyf, os.path.join(target_dir,'cblas.pyf'), sources,generate_interface, skip_names['cblas']) ext_args = {'name':dot_join(parent_package,package,'cblas'), 'sources':[cblas_pyf], 'depends':sources} dict_append(ext_args,**atlas_info) ext = Extension(**ext_args) ext.need_fcompiler_opts = 1 config['ext_modules'].append(ext) # flapack: def generate_flapack_pyf(target,sources,generator,skips): generator('flapack',sources[0],target,skips) if not (lapack_info or atlas_info): generate_flapack_pyf = generate_empty_pyf sources = ['generic_flapack.pyf','flapack_user_routines.pyf'] sources = [os.path.join(local_path,s) for s in sources] flapack_pyf = SourceGenerator(generate_flapack_pyf, os.path.join(target_dir,'flapack.pyf'), sources,generate_interface, skip_names['flapack']) ext_args = {'name':dot_join(parent_package,package,'flapack'), 'sources':[flapack_pyf], 'depends':sources} dict_append(ext_args,**atlas_info) ext = Extension(**ext_args) ext.need_fcompiler_opts = 1 config['ext_modules'].append(ext) # clapack: def generate_clapack_pyf(target,sources,generator,skips): generator('clapack',sources[0],target,skips) if no_atlas: generate_clapack_pyf = generate_empty_pyf sources = ['generic_clapack.pyf'] sources = [os.path.join(local_path,s) for s in sources] clapack_pyf = SourceGenerator(generate_clapack_pyf, os.path.join(target_dir,'clapack.pyf'), sources,generate_interface, skip_names['clapack']) ext_args = {'name':dot_join(parent_package,package,'clapack'), 'sources':[clapack_pyf], 'depends':sources} dict_append(ext_args,**atlas_info) ext = Extension(**ext_args) ext.need_fcompiler_opts = 1 config['ext_modules'].append(ext) # _flinalg: flinalg = [] for f in ['det.f','lu.f', #'wrappers.c','inv.f', ]: flinalg.append(os.path.join(local_path,'src',f)) ext_args = {'name':dot_join(parent_package,package,'_flinalg'), 'sources':flinalg} dict_append(ext_args,**atlas_info) config['ext_modules'].append(Extension(**ext_args)) # calc_lwork: ext_args = {'name':dot_join(parent_package,package,'calc_lwork'), 'sources':[os.path.join(local_path,'src','calc_lwork.f')], } dict_append(ext_args,**atlas_info) config['ext_modules'].append(Extension(**ext_args)) config['fortran_libraries'].extend(f_libs) return config
2b3614319420f19f7e0bbe3e4ab259e95f2e5474 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/2b3614319420f19f7e0bbe3e4ab259e95f2e5474/setup_linalg.py
assert_array_equal(y.imag,real(y))
assert_array_equal(y.imag,imag(y))
def check_cmplx(self): y = rand(10,)+1j*rand(10,) assert_array_equal(y.imag,real(y))
59d6286a5b1d133debddbb784988df02f719fd5c /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/59d6286a5b1d133debddbb784988df02f719fd5c/test_misc.py
z = array([-1,0,1]))
z = array([-1,0,1])
def check_fail(self): z = array([-1,0,1])) res = iscomplex(z) assert(not sometrue(res))
59d6286a5b1d133debddbb784988df02f719fd5c /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/59d6286a5b1d133debddbb784988df02f719fd5c/test_misc.py
z = array([-1,0,1j]))
z = array([-1,0,1j])
def check_pass(self): z = array([-1,0,1j])) res = isreal(z) assert_array_equal(res,[1,1,0])
59d6286a5b1d133debddbb784988df02f719fd5c /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/59d6286a5b1d133debddbb784988df02f719fd5c/test_misc.py
class test_real_if_close(unittest.TestCase): def check_basic(self): a = randn(10) b = real_if_close(a+1e-15j) assert(array_is_real(b)) assert_array_equal(a,b)
def check_trailing_skip(self): a= array([0,0,1,0,2,3,0,4,0]) res = trim_zeros(a) assert_array_equal(res,array([1,0,2,3,0,4]))
59d6286a5b1d133debddbb784988df02f719fd5c /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/59d6286a5b1d133debddbb784988df02f719fd5c/test_misc.py
def bayes_mvs(data,alpha=0.90): """Return Bayesian confidence intervals for the mean, var, and std. Assumes 1-d data all has same mean and variance and uses Jeffrey's prior for variance and std. alpha gives the probability that the returned interval contains the true parameter. Uses peak of conditional pdf as starting center. Returns (peak, (a, b)) for each of mean, variance and standard deviation. """ x = ravel(data) n = len(x) assert(n > 1) n = float(n) xbar = sb.add.reduce(x)/n C = sb.add.reduce(x*x)/n - xbar*xbar # fac = sqrt(C/(n-1)) tval = distributions.t.ppf((1+alpha)/2.0,n-1) delta = fac*tval ma = xbar - delta mb = xbar + delta mp = xbar # fac = n*C/2.0 peak = 2/(n+1.) a = (n-1)/2.0 F_peak = distributions.invgamma.cdf(peak,a) q1 = F_peak - alpha/2.0 q2 = F_peak + alpha/2.0 if (q1 < 0): # non-symmetric area q2 = alpha va = 0.0 else: va = fac*distributions.invgamma.ppf(q1,a) vb = fac*distributions.invgamma.ppf(q2,a) vp = peak*fac # fac = sqrt(fac) peak = sqrt(2./n) F_peak = distributions.gengamma.cdf(peak,a,-2) q1 = F_peak - alpha/2.0 q2 = F_peak + alpha/2.0 if (q1 < 0): q2 = alpha sta = 0.0 else: sta = fac*distributions.gengamma.ppf(q1,a,-2) stb = fac*distributions.gengamma.ppf(q2,a,-2) stp = peak*fac return (mp,(ma,mb)),(vp,(va,vb)),(stp,(sta,stb))
c2b82a6c3287b6c99c72891a654ee06c261efaf6 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/c2b82a6c3287b6c99c72891a654ee06c261efaf6/morestats.py
peak = 2/(n+1.) a = (n-1)/2.0 F_peak = distributions.invgamma.cdf(peak,a)
a = (n-1)/2.0 if (n > 3): peak = 2/(n-3.0) F_peak = distributions.invgamma.cdf(peak,a) else: F_peak = -1.0 if (F_peak < alpha/2.0): peak = distributions.invgamma.ppf(0.5,a) F_peak = 0.5
def bayes_mvs(data,alpha=0.90): """Return Bayesian confidence intervals for the mean, var, and std. Assumes 1-d data all has same mean and variance and uses Jeffrey's prior for variance and std. alpha gives the probability that the returned interval contains the true parameter. Uses peak of conditional pdf as starting center. Returns (peak, (a, b)) for each of mean, variance and standard deviation. """ x = ravel(data) n = len(x) assert(n > 1) n = float(n) xbar = sb.add.reduce(x)/n C = sb.add.reduce(x*x)/n - xbar*xbar # fac = sqrt(C/(n-1)) tval = distributions.t.ppf((1+alpha)/2.0,n-1) delta = fac*tval ma = xbar - delta mb = xbar + delta mp = xbar # fac = n*C/2.0 peak = 2/(n+1.) a = (n-1)/2.0 F_peak = distributions.invgamma.cdf(peak,a) q1 = F_peak - alpha/2.0 q2 = F_peak + alpha/2.0 if (q1 < 0): # non-symmetric area q2 = alpha va = 0.0 else: va = fac*distributions.invgamma.ppf(q1,a) vb = fac*distributions.invgamma.ppf(q2,a) vp = peak*fac # fac = sqrt(fac) peak = sqrt(2./n) F_peak = distributions.gengamma.cdf(peak,a,-2) q1 = F_peak - alpha/2.0 q2 = F_peak + alpha/2.0 if (q1 < 0): q2 = alpha sta = 0.0 else: sta = fac*distributions.gengamma.ppf(q1,a,-2) stb = fac*distributions.gengamma.ppf(q2,a,-2) stp = peak*fac return (mp,(ma,mb)),(vp,(va,vb)),(stp,(sta,stb))
c2b82a6c3287b6c99c72891a654ee06c261efaf6 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/c2b82a6c3287b6c99c72891a654ee06c261efaf6/morestats.py
if (q1 < 0): q2 = alpha va = 0.0 else: va = fac*distributions.invgamma.ppf(q1,a)
if (q2 > 1): q2 = 1.0 va = fac*distributions.invgamma.ppf(q1,a)
def bayes_mvs(data,alpha=0.90): """Return Bayesian confidence intervals for the mean, var, and std. Assumes 1-d data all has same mean and variance and uses Jeffrey's prior for variance and std. alpha gives the probability that the returned interval contains the true parameter. Uses peak of conditional pdf as starting center. Returns (peak, (a, b)) for each of mean, variance and standard deviation. """ x = ravel(data) n = len(x) assert(n > 1) n = float(n) xbar = sb.add.reduce(x)/n C = sb.add.reduce(x*x)/n - xbar*xbar # fac = sqrt(C/(n-1)) tval = distributions.t.ppf((1+alpha)/2.0,n-1) delta = fac*tval ma = xbar - delta mb = xbar + delta mp = xbar # fac = n*C/2.0 peak = 2/(n+1.) a = (n-1)/2.0 F_peak = distributions.invgamma.cdf(peak,a) q1 = F_peak - alpha/2.0 q2 = F_peak + alpha/2.0 if (q1 < 0): # non-symmetric area q2 = alpha va = 0.0 else: va = fac*distributions.invgamma.ppf(q1,a) vb = fac*distributions.invgamma.ppf(q2,a) vp = peak*fac # fac = sqrt(fac) peak = sqrt(2./n) F_peak = distributions.gengamma.cdf(peak,a,-2) q1 = F_peak - alpha/2.0 q2 = F_peak + alpha/2.0 if (q1 < 0): q2 = alpha sta = 0.0 else: sta = fac*distributions.gengamma.ppf(q1,a,-2) stb = fac*distributions.gengamma.ppf(q2,a,-2) stp = peak*fac return (mp,(ma,mb)),(vp,(va,vb)),(stp,(sta,stb))
c2b82a6c3287b6c99c72891a654ee06c261efaf6 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/c2b82a6c3287b6c99c72891a654ee06c261efaf6/morestats.py
peak = sqrt(2./n) F_peak = distributions.gengamma.cdf(peak,a,-2)
if (n > 2): peak = special.gamma(a-0.5) / special.gamma(a) F_peak = distributions.gengamma.cdf(peak,a,-2) else: F_peak = -1.0 if (F_peak < alpha/2.0): peak = distributions.gengamma.ppf(0.5,a,-2) F_peak = 0.5
def bayes_mvs(data,alpha=0.90): """Return Bayesian confidence intervals for the mean, var, and std. Assumes 1-d data all has same mean and variance and uses Jeffrey's prior for variance and std. alpha gives the probability that the returned interval contains the true parameter. Uses peak of conditional pdf as starting center. Returns (peak, (a, b)) for each of mean, variance and standard deviation. """ x = ravel(data) n = len(x) assert(n > 1) n = float(n) xbar = sb.add.reduce(x)/n C = sb.add.reduce(x*x)/n - xbar*xbar # fac = sqrt(C/(n-1)) tval = distributions.t.ppf((1+alpha)/2.0,n-1) delta = fac*tval ma = xbar - delta mb = xbar + delta mp = xbar # fac = n*C/2.0 peak = 2/(n+1.) a = (n-1)/2.0 F_peak = distributions.invgamma.cdf(peak,a) q1 = F_peak - alpha/2.0 q2 = F_peak + alpha/2.0 if (q1 < 0): # non-symmetric area q2 = alpha va = 0.0 else: va = fac*distributions.invgamma.ppf(q1,a) vb = fac*distributions.invgamma.ppf(q2,a) vp = peak*fac # fac = sqrt(fac) peak = sqrt(2./n) F_peak = distributions.gengamma.cdf(peak,a,-2) q1 = F_peak - alpha/2.0 q2 = F_peak + alpha/2.0 if (q1 < 0): q2 = alpha sta = 0.0 else: sta = fac*distributions.gengamma.ppf(q1,a,-2) stb = fac*distributions.gengamma.ppf(q2,a,-2) stp = peak*fac return (mp,(ma,mb)),(vp,(va,vb)),(stp,(sta,stb))
c2b82a6c3287b6c99c72891a654ee06c261efaf6 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/c2b82a6c3287b6c99c72891a654ee06c261efaf6/morestats.py
if (q1 < 0): q2 = alpha sta = 0.0 else: sta = fac*distributions.gengamma.ppf(q1,a,-2)
if (q2 > 1): q2 = 1.0 sta = fac*distributions.gengamma.ppf(q1,a,-2)
def bayes_mvs(data,alpha=0.90): """Return Bayesian confidence intervals for the mean, var, and std. Assumes 1-d data all has same mean and variance and uses Jeffrey's prior for variance and std. alpha gives the probability that the returned interval contains the true parameter. Uses peak of conditional pdf as starting center. Returns (peak, (a, b)) for each of mean, variance and standard deviation. """ x = ravel(data) n = len(x) assert(n > 1) n = float(n) xbar = sb.add.reduce(x)/n C = sb.add.reduce(x*x)/n - xbar*xbar # fac = sqrt(C/(n-1)) tval = distributions.t.ppf((1+alpha)/2.0,n-1) delta = fac*tval ma = xbar - delta mb = xbar + delta mp = xbar # fac = n*C/2.0 peak = 2/(n+1.) a = (n-1)/2.0 F_peak = distributions.invgamma.cdf(peak,a) q1 = F_peak - alpha/2.0 q2 = F_peak + alpha/2.0 if (q1 < 0): # non-symmetric area q2 = alpha va = 0.0 else: va = fac*distributions.invgamma.ppf(q1,a) vb = fac*distributions.invgamma.ppf(q2,a) vp = peak*fac # fac = sqrt(fac) peak = sqrt(2./n) F_peak = distributions.gengamma.cdf(peak,a,-2) q1 = F_peak - alpha/2.0 q2 = F_peak + alpha/2.0 if (q1 < 0): q2 = alpha sta = 0.0 else: sta = fac*distributions.gengamma.ppf(q1,a,-2) stb = fac*distributions.gengamma.ppf(q2,a,-2) stp = peak*fac return (mp,(ma,mb)),(vp,(va,vb)),(stp,(sta,stb))
c2b82a6c3287b6c99c72891a654ee06c261efaf6 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/c2b82a6c3287b6c99c72891a654ee06c261efaf6/morestats.py
the_server=SocketServer.ThreadingTCPServer( (host, port), standard_sync_handler)
the_server=MyThreadingTCPServer( (host, port), standard_sync_handler)
def server(host=default_host,port=10000): import os global server_pid server_pid = os.getpid() sync_cluster.server_pid = server_pid print "starting server on %s:%s" % (host,port) print server_pid #the_server=SocketServer.TCPServer( (host, port), standard_sync_handler) #the_server=SocketServer.ForkingTCPServer( (host, port), standard_sync_handler) the_server=SocketServer.ThreadingTCPServer( (host, port), standard_sync_handler) __name__ = '__main__' the_server.serve_forever()
a87ededb4efa7cc9dae253b61b8b29f8bce29964 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/a87ededb4efa7cc9dae253b61b8b29f8bce29964/sync_cluster.py
c_decl = "fortranname(%s)" % new_name
c_decl = "fortranname %s" % new_name
def rename_functions(interface_in,prefix,suffix): sub_list = all_subroutines(interface_in) interface = '' for sub in sub_list: name = function_name(sub) new_name = prefix+name+suffix c_decl = "fortranname(%s)" % new_name #renamed_sub = string.replace(sub, name ,new_name ,1) renamed_sub = sub renamed_sub = string.replace(renamed_sub, '\n' , '\n ' + c_decl +'\n' ,1) interface = interface + renamed_sub + '\n\n' return interface
db393baf597b12d2422b24131319288ccf17770b /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/db393baf597b12d2422b24131319288ccf17770b/interface_gen.py
"""
"""
def usage(): s = "usage: python interface_gen.py file_name module_name\n" \ "\n" \ " file_name -- file containing generic description of\n" \ " lapack interface\n" \ " module_name -- name of module to generate\n" print s
db393baf597b12d2422b24131319288ccf17770b /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/db393baf597b12d2422b24131319288ccf17770b/interface_gen.py
ord = 1 computes the largest row sum ord = -1 computes the smallest row sum ord = Inf computes the largest column sum ord = -Inf computes the smallest column sum
ord = 1 computes the largest column sum of absolute values ord = -1 computes the smallest column sum of absolute values ord = Inf computes the largest row sum of absolute values ord = -Inf computes the smallest row sum of absolute values
def norm(x, ord=2): """ norm(x, ord=2) -> n Matrix and vector norm. Inputs: x -- a rank-1 (vector) or rank-2 (matrix) array ord -- the order of norm. Comments: For vectors ord can be any real number including Inf or -Inf. ord = Inf, computes the maximum of the magnitudes ord = -Inf, computes minimum of the magnitudes ord is finite, computes sum(abs(x)**ord)**(1.0/ord) For matrices ord can only be + or - 1, 2, Inf. ord = 2 computes the largest singular value ord = -2 computes the smallest singular value ord = 1 computes the largest row sum ord = -1 computes the smallest row sum ord = Inf computes the largest column sum ord = -Inf computes the smallest column sum """ x = asarray(x) nd = len(x.shape) Inf = scipy_base.Inf if nd == 1: if ord == Inf: return scipy_base.amax(abs(x)) elif ord == -Inf: return scipy_base.amin(abs(x)) else: return scipy_base.sum(abs(x)**ord)**(1.0/ord) elif nd == 2: if ord == 2: return scipy_base.amax(decomp.svd(x)[1]) elif ord == -2: return scipy_base.amin(decomp.svd(x)[1]) elif ord == 1: return scipy_base.amax(scipy_base.sum(abs(x))) elif ord == Inf: return scipy_base.amax(scipy_base.sum(abs(x),axis=1)) elif ord == -1: return scipy_base.amin(scipy_base.sum(abs(x))) elif ord == -Inf: return scipy_base.amin(scipy_base.sum(abs(x),axis=1)) else: raise ValueError, "Invalid norm order for matrices." else: raise ValueError, "Improper number of dimensions to norm."
452748c71b17d9f7d20ea4ccaf500e7805c8bcf7 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/452748c71b17d9f7d20ea4ccaf500e7805c8bcf7/basic.py
'iwrk':array([],intc),'u': array([],float),
'iwrk':array([],int32),'u': array([],float),
def splprep(x,w=None,u=None,ub=None,ue=None,k=3,task=0,s=None,t=None, full_output=0,nest=None,per=0,quiet=1): """Find the B-spline representation of an N-dimensional curve. Description: Given a list of N rank-1 arrays, x, which represent a curve in N-dimensional space parametrized by u, find a smooth approximating spline curve g(u). Uses the FORTRAN routine parcur from FITPACK Inputs: x -- A list of sample vector arrays representing the curve. u -- An array of parameter values. If not given, these values are calculated automatically as (M = len(x[0])): v[0] = 0 v[i] = v[i-1] + distance(x[i],x[i-1]) u[i] = v[i] / v[M-1] ub, ue -- The end-points of the parameters interval. Defaults to u[0] and u[-1]. k -- Degree of the spline. Cubic splines are recommended. Even values of k should be avoided especially with a small s-value. 1 <= k <= 5. task -- If task==0 find t and c for a given smoothing factor, s. If task==1 find t and c for another value of the smoothing factor, s. There must have been a previous call with task=0 or task=1 for the same set of data. If task=-1 find the weighted least square spline for a given set of knots, t. s -- A smoothing condition. The amount of smoothness is determined by satisfying the conditions: sum((w * (y - g))**2,axis=0) <= s where g(x) is the smoothed interpolation of (x,y). The user can use s to control the tradeoff between closeness and smoothness of fit. Larger s means more smoothing while smaller values of s indicate less smoothing. Recommended values of s depend on the weights, w. If the weights represent the inverse of the standard-deviation of y, then a good s value should be found in the range (m-sqrt(2*m),m+sqrt(2*m)) where m is the number of datapoints in x, y, and w. t -- The knots needed for task=-1. full_output -- If non-zero, then return optional outputs. nest -- An over-estimate of the total number of knots of the spline to help in determining the storage space. By default nest=m/2. Always large enough is nest=m+k+1. per -- If non-zero, data points are considered periodic with period x[m-1] - x[0] and a smooth periodic spline approximation is returned. Values of y[m-1] and w[m-1] are not used. quiet -- Non-zero to suppress messages. Outputs: (tck, u, {fp, ier, msg}) tck -- (t,c,k) a tuple containing the vector of knots, the B-spline coefficients, and the degree of the spline. u -- An array of the values of the parameter. fp -- The weighted sum of squared residuals of the spline approximation. ier -- An integer flag about splrep success. Success is indicated if ier<=0. If ier in [1,2,3] an error occurred but was not raised. Otherwise an error is raised. msg -- A message corresponding to the integer flag, ier. Remarks: SEE splev for evaluation of the spline and its derivatives. """ if task<=0: _parcur_cache = {'t': array([],float), 'wrk': array([],float), 'iwrk':array([],intc),'u': array([],float), 'ub':0,'ue':1} x=myasarray(x) idim,m=x.shape if per: for i in range(idim): if x[i][0]!=x[i][-1]: if quiet<2:print 'Warning: Setting x[%d][%d]=x[%d][0]'%(i,m,i) x[i][-1]=x[i][0] if not 0<idim<11: raise TypeError,'0<idim<11 must hold' if w is None: w=ones(m,float) else: w=myasarray(w) ipar=(u is not None) if ipar: _parcur_cache['u']=u if ub is None: _parcur_cache['ub']=u[0] else: _parcur_cache['ub']=ub if ue is None: _parcur_cache['ue']=u[-1] else: _parcur_cache['ue']=ue else: _parcur_cache['u']=zeros(m,float) if not (1<=k<=5): raise TypeError, '1<=k=%d<=5 must hold'%(k) if not (-1<=task<=1): raise TypeError, 'task must be either -1,0, or 1' if (not len(w)==m) or (ipar==1 and (not len(u)==m)): raise TypeError,'Mismatch of input dimensions' if s is None: s=m-sqrt(2*m) if t is None and task==-1: raise TypeError, 'Knots must be given for task=-1' if t is not None: _parcur_cache['t']=myasarray(t) n=len(_parcur_cache['t']) if task==-1 and n<2*k+2: raise TypeError, 'There must be at least 2*k+2 knots for task=-1' if m<=k: raise TypeError, 'm>k must hold' if nest is None: nest=m+2*k if (task>=0 and s==0) or (nest<0): if per: nest=m+2*k else: nest=m+k+1 nest=max(nest,2*k+3) u=_parcur_cache['u'] ub=_parcur_cache['ub'] ue=_parcur_cache['ue'] t=_parcur_cache['t'] wrk=_parcur_cache['wrk'] iwrk=_parcur_cache['iwrk'] t,c,o=_fitpack._parcur(ravel(transpose(x)),w,u,ub,ue,k,task,ipar,s,t, nest,wrk,iwrk,per) _parcur_cache['u']=o['u'] _parcur_cache['ub']=o['ub'] _parcur_cache['ue']=o['ue'] _parcur_cache['t']=t _parcur_cache['wrk']=o['wrk'] _parcur_cache['iwrk']=o['iwrk'] ier,fp,n=o['ier'],o['fp'],len(t) u=o['u'] c.shape=idim,n-k-1 tcku = [t,list(c),k],u if ier<=0 and not quiet: print _iermess[ier][0] print "\tk=%d n=%d m=%d fp=%f s=%f"%(k,len(t),m,fp,s) if ier>0 and not full_output: if ier in [1,2,3]: print "Warning: "+_iermess[ier][0] else: try: raise _iermess[ier][1],_iermess[ier][0] except KeyError: raise _iermess['unknown'][1],_iermess['unknown'][0] if full_output: try: return tcku,fp,ier,_iermess[ier][0] except KeyError: return tcku,fp,ier,_iermess['unknown'][0] else: return tcku
9a0347e760bcb96ee1fc399da0a604243f75cec4 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9a0347e760bcb96ee1fc399da0a604243f75cec4/fitpack.py
'iwrk':array([],intc)}
'iwrk':array([],int32)}
def splprep(x,w=None,u=None,ub=None,ue=None,k=3,task=0,s=None,t=None, full_output=0,nest=None,per=0,quiet=1): """Find the B-spline representation of an N-dimensional curve. Description: Given a list of N rank-1 arrays, x, which represent a curve in N-dimensional space parametrized by u, find a smooth approximating spline curve g(u). Uses the FORTRAN routine parcur from FITPACK Inputs: x -- A list of sample vector arrays representing the curve. u -- An array of parameter values. If not given, these values are calculated automatically as (M = len(x[0])): v[0] = 0 v[i] = v[i-1] + distance(x[i],x[i-1]) u[i] = v[i] / v[M-1] ub, ue -- The end-points of the parameters interval. Defaults to u[0] and u[-1]. k -- Degree of the spline. Cubic splines are recommended. Even values of k should be avoided especially with a small s-value. 1 <= k <= 5. task -- If task==0 find t and c for a given smoothing factor, s. If task==1 find t and c for another value of the smoothing factor, s. There must have been a previous call with task=0 or task=1 for the same set of data. If task=-1 find the weighted least square spline for a given set of knots, t. s -- A smoothing condition. The amount of smoothness is determined by satisfying the conditions: sum((w * (y - g))**2,axis=0) <= s where g(x) is the smoothed interpolation of (x,y). The user can use s to control the tradeoff between closeness and smoothness of fit. Larger s means more smoothing while smaller values of s indicate less smoothing. Recommended values of s depend on the weights, w. If the weights represent the inverse of the standard-deviation of y, then a good s value should be found in the range (m-sqrt(2*m),m+sqrt(2*m)) where m is the number of datapoints in x, y, and w. t -- The knots needed for task=-1. full_output -- If non-zero, then return optional outputs. nest -- An over-estimate of the total number of knots of the spline to help in determining the storage space. By default nest=m/2. Always large enough is nest=m+k+1. per -- If non-zero, data points are considered periodic with period x[m-1] - x[0] and a smooth periodic spline approximation is returned. Values of y[m-1] and w[m-1] are not used. quiet -- Non-zero to suppress messages. Outputs: (tck, u, {fp, ier, msg}) tck -- (t,c,k) a tuple containing the vector of knots, the B-spline coefficients, and the degree of the spline. u -- An array of the values of the parameter. fp -- The weighted sum of squared residuals of the spline approximation. ier -- An integer flag about splrep success. Success is indicated if ier<=0. If ier in [1,2,3] an error occurred but was not raised. Otherwise an error is raised. msg -- A message corresponding to the integer flag, ier. Remarks: SEE splev for evaluation of the spline and its derivatives. """ if task<=0: _parcur_cache = {'t': array([],float), 'wrk': array([],float), 'iwrk':array([],intc),'u': array([],float), 'ub':0,'ue':1} x=myasarray(x) idim,m=x.shape if per: for i in range(idim): if x[i][0]!=x[i][-1]: if quiet<2:print 'Warning: Setting x[%d][%d]=x[%d][0]'%(i,m,i) x[i][-1]=x[i][0] if not 0<idim<11: raise TypeError,'0<idim<11 must hold' if w is None: w=ones(m,float) else: w=myasarray(w) ipar=(u is not None) if ipar: _parcur_cache['u']=u if ub is None: _parcur_cache['ub']=u[0] else: _parcur_cache['ub']=ub if ue is None: _parcur_cache['ue']=u[-1] else: _parcur_cache['ue']=ue else: _parcur_cache['u']=zeros(m,float) if not (1<=k<=5): raise TypeError, '1<=k=%d<=5 must hold'%(k) if not (-1<=task<=1): raise TypeError, 'task must be either -1,0, or 1' if (not len(w)==m) or (ipar==1 and (not len(u)==m)): raise TypeError,'Mismatch of input dimensions' if s is None: s=m-sqrt(2*m) if t is None and task==-1: raise TypeError, 'Knots must be given for task=-1' if t is not None: _parcur_cache['t']=myasarray(t) n=len(_parcur_cache['t']) if task==-1 and n<2*k+2: raise TypeError, 'There must be at least 2*k+2 knots for task=-1' if m<=k: raise TypeError, 'm>k must hold' if nest is None: nest=m+2*k if (task>=0 and s==0) or (nest<0): if per: nest=m+2*k else: nest=m+k+1 nest=max(nest,2*k+3) u=_parcur_cache['u'] ub=_parcur_cache['ub'] ue=_parcur_cache['ue'] t=_parcur_cache['t'] wrk=_parcur_cache['wrk'] iwrk=_parcur_cache['iwrk'] t,c,o=_fitpack._parcur(ravel(transpose(x)),w,u,ub,ue,k,task,ipar,s,t, nest,wrk,iwrk,per) _parcur_cache['u']=o['u'] _parcur_cache['ub']=o['ub'] _parcur_cache['ue']=o['ue'] _parcur_cache['t']=t _parcur_cache['wrk']=o['wrk'] _parcur_cache['iwrk']=o['iwrk'] ier,fp,n=o['ier'],o['fp'],len(t) u=o['u'] c.shape=idim,n-k-1 tcku = [t,list(c),k],u if ier<=0 and not quiet: print _iermess[ier][0] print "\tk=%d n=%d m=%d fp=%f s=%f"%(k,len(t),m,fp,s) if ier>0 and not full_output: if ier in [1,2,3]: print "Warning: "+_iermess[ier][0] else: try: raise _iermess[ier][1],_iermess[ier][0] except KeyError: raise _iermess['unknown'][1],_iermess['unknown'][0] if full_output: try: return tcku,fp,ier,_iermess[ier][0] except KeyError: return tcku,fp,ier,_iermess['unknown'][0] else: return tcku
9a0347e760bcb96ee1fc399da0a604243f75cec4 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9a0347e760bcb96ee1fc399da0a604243f75cec4/fitpack.py
_curfit_cache['iwrk'] = empty((nest,),intc)
_curfit_cache['iwrk'] = empty((nest,),int32)
def splrep(x,y,w=None,xb=None,xe=None,k=3,task=0,s=1e-3,t=None, full_output=0,per=0,quiet=1): """Find the B-spline representation of 1-D curve. Description: Given the set of data points (x[i], y[i]) determine a smooth spline approximation of degree k on the interval xb <= x <= xe. The coefficients, c, and the knot points, t, are returned. Uses the FORTRAN routine curfit from FITPACK. Inputs: x, y -- The data points defining a curve y = f(x). w -- Strictly positive rank-1 array of weights the same length as x and y. The weights are used in computing the weighted least-squares spline fit. If the errors in the y values have standard-deviation given by the vector d, then w should be 1/d. Default is ones(len(x)). xb, xe -- The interval to fit. If None, these default to x[0] and x[-1] respectively. k -- The order of the spline fit. It is recommended to use cubic splines. Even order splines should be avoided especially with small s values. 1 <= k <= 5 task -- If task==0 find t and c for a given smoothing factor, s. If task==1 find t and c for another value of the smoothing factor, s. There must have been a previous call with task=0 or task=1 for the same set of data (t will be stored an used internally) If task=-1 find the weighted least square spline for a given set of knots, t. These should be interior knots as knots on the ends will be added automatically. s -- A smoothing condition. The amount of smoothness is determined by satisfying the conditions: sum((w * (y - g))**2,axis=0) <= s where g(x) is the smoothed interpolation of (x,y). The user can use s to control the tradeoff between closeness and smoothness of fit. Larger s means more smoothing while smaller values of s indicate less smoothing. Recommended values of s depend on the weights, w. If the weights represent the inverse of the standard-deviation of y, then a good s value should be found in the range (m-sqrt(2*m),m+sqrt(2*m)) where m is the number of datapoints in x, y, and w. default : s=m-sqrt(2*m) t -- The knots needed for task=-1. If given then task is automatically set to -1. full_output -- If non-zero, then return optional outputs. per -- If non-zero, data points are considered periodic with period x[m-1] - x[0] and a smooth periodic spline approximation is returned. Values of y[m-1] and w[m-1] are not used. quiet -- Non-zero to suppress messages. Outputs: (tck, {fp, ier, msg}) tck -- (t,c,k) a tuple containing the vector of knots, the B-spline coefficients, and the degree of the spline. fp -- The weighted sum of squared residuals of the spline approximation. ier -- An integer flag about splrep success. Success is indicated if ier<=0. If ier in [1,2,3] an error occurred but was not raised. Otherwise an error is raised. msg -- A message corresponding to the integer flag, ier. Remarks: See splev for evaluation of the spline and its derivatives. Example: x = linspace(0, 10, 10) y = sin(x) tck = splrep(x, y) x2 = linspace(0, 10, 200) y2 = splev(x2, tck) plot(x, y, 'o', x2, y2) """ if task<=0: _curfit_cache = {} x,y=map(myasarray,[x,y]) m=len(x) if w is None: w=ones(m,float) else: w=myasarray(w) if not len(w) == m: raise TypeError,' len(w)=%d is not equal to m=%d'%(len(w),m) if (m != len(y)) or (m != len(w)): raise TypeError, 'Lengths of the first three arguments (x,y,w) must be equal' if not (1<=k<=5): raise TypeError, 'Given degree of the spline (k=%d) is not supported. (1<=k<=5)'%(k) if m<=k: raise TypeError, 'm>k must hold' if xb is None: xb=x[0] if xe is None: xe=x[-1] if not (-1<=task<=1): raise TypeError, 'task must be either -1,0, or 1' if s is None: s = m-sqrt(2*m) if t is not None: task = -1 if task == -1: if t is None: raise TypeError, 'Knots must be given for task=-1' numknots = len(t) _curfit_cache['t'] = empty((numknots + 2*k+2,),float) _curfit_cache['t'][k+1:-k-1] = t nest = len(_curfit_cache['t']) elif task == 0: if per: nest = max(m+2*k,2*k+3) else: nest = max(m+k+1,2*k+3) t = empty((nest,),float) _curfit_cache['t'] = t if task <= 0: _curfit_cache['wrk'] = empty((m*(k+1)+nest*(7+3*k),),float) _curfit_cache['iwrk'] = empty((nest,),intc) try: t=_curfit_cache['t'] wrk=_curfit_cache['wrk'] iwrk=_curfit_cache['iwrk'] except KeyError: raise TypeError, "must call with task=1 only after"\ " call with task=0,-1" if not per: n,c,fp,ier = dfitpack.curfit(task, x, y, w, t, wrk, iwrk, xb, xe, k, s) else: n,c,fp,ier = dfitpack.percur(task, x, y, w, t, wrk, iwrk, k, s) tck = [t[:n],c[:n-k-1],k] if ier<=0 and not quiet: print _iermess[ier][0] print "\tk=%d n=%d m=%d fp=%f s=%f"%(k,len(t),m,fp,s) if ier>0 and not full_output: if ier in [1,2,3]: print "Warning: "+_iermess[ier][0] else: try: raise _iermess[ier][1],_iermess[ier][0] except KeyError: raise _iermess['unknown'][1],_iermess['unknown'][0] if full_output: try: return tck,fp,ier,_iermess[ier][0] except KeyError: return tck,fp,ier,_iermess['unknown'][0] else: return tck
9a0347e760bcb96ee1fc399da0a604243f75cec4 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9a0347e760bcb96ee1fc399da0a604243f75cec4/fitpack.py
'wrk': array([],float), 'iwrk':array([],intc)}
'wrk': array([],float), 'iwrk':array([],int32)}
#def _curfit(x,y,w=None,xb=None,xe=None,k=3,task=0,s=None,t=None,
9a0347e760bcb96ee1fc399da0a604243f75cec4 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/9a0347e760bcb96ee1fc399da0a604243f75cec4/fitpack.py
app.MainLoop() gui_thread_finished.set()
try: app.MainLoop() finally: gui_thread_finished.set()
def gui_thread(finished): """ Indirectly imports wxPython into the second thread """ import sys try: # If we can find a module named wxPython. Odds are (maybe 100%), # we don't want to start a new thread with a MainLoop() in it. if not sys.modules.has_key('wxPython'): #import must be done inside if statement!!! from gui_thread_guts import second_thread_app # Variable used to see if the wxApp is # running in the main or secondary thread # Used to determine if proxies should be generated. global running_in_second_thread,app,gui_thread_finished app = second_thread_app(0) running_in_second_thread = 1 app.MainLoop() #when the main loop exits, we need to single the # exit_gui_thread function that it is OK to shut down. gui_thread_finished.set() finally: finished.set()
f4c0aa6cdb582dd11b29d9f08256052fa4375ff0 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/f4c0aa6cdb582dd11b29d9f08256052fa4375ff0/main.py
hasattr(x,'is_proxy')
return hasattr(x,'is_proxy')
def is_proxy(x): hasattr(x,'is_proxy')
f4c0aa6cdb582dd11b29d9f08256052fa4375ff0 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/f4c0aa6cdb582dd11b29d9f08256052fa4375ff0/main.py
def configuration(parent_package=''): package = 'odr' config = Configuration(package,parent_package) local_path = get_path(__name__)
def configuration(parent_package='', top_path=None): config = Configuration('odr', parent_package, top_path)
def configuration(parent_package=''): package = 'odr' config = Configuration(package,parent_package) local_path = get_path(__name__) libodr_files = ['d_odr.f', 'd_mprec.f', 'dlunoc.f'] atlas_info = get_info('atlas') #atlas_info = {} # uncomment if ATLAS is available but want to use # Fortran LAPACK/BLAS; useful for testing blas_libs = [] if not atlas_info: warnings.warn(AtlasNotFoundError.__doc__) blas_info = get_info('blas') if blas_info: libodr_files.append('d_lpk.f') blas_libs.extend(blas_info['libraries']) else: warnings.warn(BlasNotFoundError.__doc__) libodr_files.append('d_lpkbls.f') else: libodr_files.append('d_lpk.f') blas_libs.extend(atlas_info['libraries']) libodr = [os.path.join(local_path, 'odrpack', x) for x in libodr_files] config.add_library('odrpack', sources=libodr) sources = ['__odrpack.c'] config.add_extension('__odrpack', sources=sources, libraries=['odrpack']+blas_libs, include_dirs=[local_path], library_dirs=atlas_info['library_dirs'], ) return config
aecb4ec4e3c2d9f447453b41152a4c3b45e46610 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/aecb4ec4e3c2d9f447453b41152a4c3b45e46610/setup_odr.py
libodr = [os.path.join(local_path, 'odrpack', x) for x in libodr_files]
libodr = [os.path.join('odrpack', x) for x in libodr_files]
def configuration(parent_package=''): package = 'odr' config = Configuration(package,parent_package) local_path = get_path(__name__) libodr_files = ['d_odr.f', 'd_mprec.f', 'dlunoc.f'] atlas_info = get_info('atlas') #atlas_info = {} # uncomment if ATLAS is available but want to use # Fortran LAPACK/BLAS; useful for testing blas_libs = [] if not atlas_info: warnings.warn(AtlasNotFoundError.__doc__) blas_info = get_info('blas') if blas_info: libodr_files.append('d_lpk.f') blas_libs.extend(blas_info['libraries']) else: warnings.warn(BlasNotFoundError.__doc__) libodr_files.append('d_lpkbls.f') else: libodr_files.append('d_lpk.f') blas_libs.extend(atlas_info['libraries']) libodr = [os.path.join(local_path, 'odrpack', x) for x in libodr_files] config.add_library('odrpack', sources=libodr) sources = ['__odrpack.c'] config.add_extension('__odrpack', sources=sources, libraries=['odrpack']+blas_libs, include_dirs=[local_path], library_dirs=atlas_info['library_dirs'], ) return config
aecb4ec4e3c2d9f447453b41152a4c3b45e46610 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/aecb4ec4e3c2d9f447453b41152a4c3b45e46610/setup_odr.py
include_dirs=[local_path],
include_dirs=['.'],
def configuration(parent_package=''): package = 'odr' config = Configuration(package,parent_package) local_path = get_path(__name__) libodr_files = ['d_odr.f', 'd_mprec.f', 'dlunoc.f'] atlas_info = get_info('atlas') #atlas_info = {} # uncomment if ATLAS is available but want to use # Fortran LAPACK/BLAS; useful for testing blas_libs = [] if not atlas_info: warnings.warn(AtlasNotFoundError.__doc__) blas_info = get_info('blas') if blas_info: libodr_files.append('d_lpk.f') blas_libs.extend(blas_info['libraries']) else: warnings.warn(BlasNotFoundError.__doc__) libodr_files.append('d_lpkbls.f') else: libodr_files.append('d_lpk.f') blas_libs.extend(atlas_info['libraries']) libodr = [os.path.join(local_path, 'odrpack', x) for x in libodr_files] config.add_library('odrpack', sources=libodr) sources = ['__odrpack.c'] config.add_extension('__odrpack', sources=sources, libraries=['odrpack']+blas_libs, include_dirs=[local_path], library_dirs=atlas_info['library_dirs'], ) return config
aecb4ec4e3c2d9f447453b41152a4c3b45e46610 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/aecb4ec4e3c2d9f447453b41152a4c3b45e46610/setup_odr.py
setup(**configuration())
setup(**configuration(top_path='').todict())
def configuration(parent_package=''): package = 'odr' config = Configuration(package,parent_package) local_path = get_path(__name__) libodr_files = ['d_odr.f', 'd_mprec.f', 'dlunoc.f'] atlas_info = get_info('atlas') #atlas_info = {} # uncomment if ATLAS is available but want to use # Fortran LAPACK/BLAS; useful for testing blas_libs = [] if not atlas_info: warnings.warn(AtlasNotFoundError.__doc__) blas_info = get_info('blas') if blas_info: libodr_files.append('d_lpk.f') blas_libs.extend(blas_info['libraries']) else: warnings.warn(BlasNotFoundError.__doc__) libodr_files.append('d_lpkbls.f') else: libodr_files.append('d_lpk.f') blas_libs.extend(atlas_info['libraries']) libodr = [os.path.join(local_path, 'odrpack', x) for x in libodr_files] config.add_library('odrpack', sources=libodr) sources = ['__odrpack.c'] config.add_extension('__odrpack', sources=sources, libraries=['odrpack']+blas_libs, include_dirs=[local_path], library_dirs=atlas_info['library_dirs'], ) return config
aecb4ec4e3c2d9f447453b41152a4c3b45e46610 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/aecb4ec4e3c2d9f447453b41152a4c3b45e46610/setup_odr.py
typecode = _coerce_rules[(self._typecode,other._typecode)] data1, data2 = _convert_data(self.data, other.data, typecode)
typecode = _coerce_rules[(self._typecode,ocs._typecode)] data1, data2 = _convert_data(self.data, ocs.data, typecode)
def __sub__(self, other): ocs = csc_matrix(other) if (ocs.shape != self.shape): raise ValueError, "Inconsistent shapes." typecode = _coerce_rules[(self._typecode,other._typecode)] data1, data2 = _convert_data(self.data, other.data, typecode) func = getattr(sparsetools,_transtabl[typecode]+'cscadd') c,rowc,ptrc,ierr = func(data1,self.rowind,self.indptr,-data2,other.rowind,other.indptr) if ierr: raise ValueError, "Ran out of space (but shouldn't have happened)." M, N = self.shape return csc_matrix(c,(rowc,ptrc),M=M,N=N)
e99e5203975fe3a9d552183da484d14ad3139e3c /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/e99e5203975fe3a9d552183da484d14ad3139e3c/Sparse.py
c,rowc,ptrc,ierr = func(data1,self.rowind,self.indptr,-data2,other.rowind,other.indptr)
c,rowc,ptrc,ierr = func(data1,self.rowind,self.indptr,-data2,ocs.rowind,ocs.indptr)
def __sub__(self, other): ocs = csc_matrix(other) if (ocs.shape != self.shape): raise ValueError, "Inconsistent shapes." typecode = _coerce_rules[(self._typecode,other._typecode)] data1, data2 = _convert_data(self.data, other.data, typecode) func = getattr(sparsetools,_transtabl[typecode]+'cscadd') c,rowc,ptrc,ierr = func(data1,self.rowind,self.indptr,-data2,other.rowind,other.indptr) if ierr: raise ValueError, "Ran out of space (but shouldn't have happened)." M, N = self.shape return csc_matrix(c,(rowc,ptrc),M=M,N=N)
e99e5203975fe3a9d552183da484d14ad3139e3c /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/e99e5203975fe3a9d552183da484d14ad3139e3c/Sparse.py
typecode = _coerce_rules[(self._typecode,other._typecode)] data1, data2 = _convert_data(self.data, other.data, typecode)
typecode = _coerce_rules[(self._typecode,ocs._typecode)] data1, data2 = _convert_data(self.data, ocs.data, typecode)
def __rsub__(self, other): # implement other - self ocs = csc_matrix(other) if (ocs.shape != self.shape): raise ValueError, "Inconsistent shapes." typecode = _coerce_rules[(self._typecode,other._typecode)] data1, data2 = _convert_data(self.data, other.data, typecode) func = getattr(sparsetools,_transtabl[typecode]+'cscadd') c,rowc,ptrc,ierr = func(-data1,self.rowind,self.indptr,data2,other.rowind,other.indptr) if ierr: raise ValueError, "Ran out of space (but shouldn't have happened)." M, N = self.shape return csc_matrix(c,(rowc,ptrc),M=M,N=N)
e99e5203975fe3a9d552183da484d14ad3139e3c /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/e99e5203975fe3a9d552183da484d14ad3139e3c/Sparse.py
c,rowc,ptrc,ierr = func(-data1,self.rowind,self.indptr,data2,other.rowind,other.indptr)
c,rowc,ptrc,ierr = func(-data1,self.rowind,self.indptr,data2,ocs.rowind,ocs.indptr)
def __rsub__(self, other): # implement other - self ocs = csc_matrix(other) if (ocs.shape != self.shape): raise ValueError, "Inconsistent shapes." typecode = _coerce_rules[(self._typecode,other._typecode)] data1, data2 = _convert_data(self.data, other.data, typecode) func = getattr(sparsetools,_transtabl[typecode]+'cscadd') c,rowc,ptrc,ierr = func(-data1,self.rowind,self.indptr,data2,other.rowind,other.indptr) if ierr: raise ValueError, "Ran out of space (but shouldn't have happened)." M, N = self.shape return csc_matrix(c,(rowc,ptrc),M=M,N=N)
e99e5203975fe3a9d552183da484d14ad3139e3c /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/e99e5203975fe3a9d552183da484d14ad3139e3c/Sparse.py
target_dir = join(target_dir,'atlas321')
target_dir = os.path.join(target_dir,'atlas321')
def local_glob(path): return glob(os.path.join(local_path,path))
0a5f7f39ba004d3b195121a4cd75bba9ff069149 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0a5f7f39ba004d3b195121a4cd75bba9ff069149/setup_lapack.py
target = join(build_dir,target_dir,'clapack.pyf')
target = os.path.join(build_dir,target_dir,'clapack.pyf')
def get_clapack_source(ext, build_dir): name = ext.name.split('.')[-1] assert name=='clapack',`name` if atlas_version is None: target = join(build_dir,target_dir,'clapack.pyf') from distutils.dep_util import newer if newer(__file__,target): f = open(source,'w') f.write(tmpl_empty_clapack_pyf) f.close() else: target = ext.depends[0] assert os.path.basename(target)=='clapack.pyf.src' return target
0a5f7f39ba004d3b195121a4cd75bba9ff069149 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/0a5f7f39ba004d3b195121a4cd75bba9ff069149/setup_lapack.py
namelength = self.read_element()
namelength = self.read_element()[0]
def get_raw_array(self): namelength = self.read_element() # get field names names = self.read_element() splitnames = [names[i:i+namelength] for i in \ xrange(0,len(names),namelength)] self.obj_template._fieldnames = [x.tostring().strip('\x00') for x in splitnames] return super(Mat5StructMatrixGetter, self).get_raw_array()
2036af607acb2df8f7607e2e9cb47872bc407312 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/2036af607acb2df8f7607e2e9cb47872bc407312/mio5.py
def bracket(func, xa=0.0, xb=1.0, args=(), grow_limit=110.0):
def bracket(func, xa=0.0, xb=1.0, args=(), grow_limit=110.0, maxiter=1000):
def bracket(func, xa=0.0, xb=1.0, args=(), grow_limit=110.0): """Given a function and distinct initial points, search in the downhill direction (as defined by the initital points) and return new points xa, xb, xc that bracket the minimum of the function: f(xa) > f(xb) < f(xc) """ _gold = 1.618034 _verysmall_num = 1e-21 fa = apply(func, (xa,)+args) fb = apply(func, (xb,)+args) if (fa < fb): # Switch so fa > fb dum = xa; xa = xb; xb = dum dum = fa; fa = fb; fb = dum xc = xb + _gold*(xb-xa) fc = apply(func, (xc,)+args) funcalls = 3 iter = 0 while (fc < fb): tmp1 = (xb - xa)*(fb-fc) tmp2 = (xb - xc)*(fb-fa) val = tmp2-tmp1 if abs(val) < _verysmall_num: denom = 2.0*_verysmall_num else: denom = 2.0*val w = xb - ((xb-xc)*tmp2-(xb-xa)*tmp1)/denom wlim = xb + grow_limit*(xc-xb) if iter > 1000: raise RuntimeError, "Too many iterations." if (w-xc)*(xb-w) > 0.0: fw = apply(func, (w,)+args) funcalls += 1 if (fw < fc): xa = xb; xb=w; fa=fb; fb=fw return xa, xb, xc, fa, fb, fc, funcalls elif (fw > fb): xc = w; fc=fw return xa, xb, xc, fa, fb, fc, funcalls w = xc + _gold*(xc-xb) fw = apply(func, (w,)+args) funcalls += 1 elif (w-wlim)*(wlim-xc) >= 0.0: w = wlim fw = apply(func, (w,)+args) funcalls += 1 elif (w-wlim)*(xc-w) > 0.0: fw = apply(func, (w,)+args) funcalls += 1 if (fw < fc): xb=xc; xc=w; w=xc+_gold*(xc-xb) fb=fc; fc=fw; fw=apply(func, (w,)+args) funcalls += 1 else: w = xc + _gold*(xc-xb) fw = apply(func, (w,)+args) funcalls += 1 xa=xb; xb=xc; xc=w fa=fb; fb=fc; fc=fw return xa, xb, xc, fa, fb, fc, funcalls
3b3f958ee4591a8bc4df4568d445bf78c10ea7fe /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/3b3f958ee4591a8bc4df4568d445bf78c10ea7fe/optimize.py
if iter > 1000:
if iter > maxiter:
def bracket(func, xa=0.0, xb=1.0, args=(), grow_limit=110.0): """Given a function and distinct initial points, search in the downhill direction (as defined by the initital points) and return new points xa, xb, xc that bracket the minimum of the function: f(xa) > f(xb) < f(xc) """ _gold = 1.618034 _verysmall_num = 1e-21 fa = apply(func, (xa,)+args) fb = apply(func, (xb,)+args) if (fa < fb): # Switch so fa > fb dum = xa; xa = xb; xb = dum dum = fa; fa = fb; fb = dum xc = xb + _gold*(xb-xa) fc = apply(func, (xc,)+args) funcalls = 3 iter = 0 while (fc < fb): tmp1 = (xb - xa)*(fb-fc) tmp2 = (xb - xc)*(fb-fa) val = tmp2-tmp1 if abs(val) < _verysmall_num: denom = 2.0*_verysmall_num else: denom = 2.0*val w = xb - ((xb-xc)*tmp2-(xb-xa)*tmp1)/denom wlim = xb + grow_limit*(xc-xb) if iter > 1000: raise RuntimeError, "Too many iterations." if (w-xc)*(xb-w) > 0.0: fw = apply(func, (w,)+args) funcalls += 1 if (fw < fc): xa = xb; xb=w; fa=fb; fb=fw return xa, xb, xc, fa, fb, fc, funcalls elif (fw > fb): xc = w; fc=fw return xa, xb, xc, fa, fb, fc, funcalls w = xc + _gold*(xc-xb) fw = apply(func, (w,)+args) funcalls += 1 elif (w-wlim)*(wlim-xc) >= 0.0: w = wlim fw = apply(func, (w,)+args) funcalls += 1 elif (w-wlim)*(xc-w) > 0.0: fw = apply(func, (w,)+args) funcalls += 1 if (fw < fc): xb=xc; xc=w; w=xc+_gold*(xc-xb) fb=fc; fc=fw; fw=apply(func, (w,)+args) funcalls += 1 else: w = xc + _gold*(xc-xb) fw = apply(func, (w,)+args) funcalls += 1 xa=xb; xb=xc; xc=w fa=fb; fb=fc; fc=fw return xa, xb, xc, fa, fb, fc, funcalls
3b3f958ee4591a8bc4df4568d445bf78c10ea7fe /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/3b3f958ee4591a8bc4df4568d445bf78c10ea7fe/optimize.py
if not _hold:
if not _hold and gist.plsys() < 2:
def matplot(x,y=None,axis=-1): if y is None: # no axis data y = x x = Numeric.arange(0,y.shape[axis]) x,y = Numeric.asarray(x), Numeric.asarray(y) assert(len(y.shape)==2) assert(len(x)==y.shape[axis]) otheraxis = (1+axis) % 2 sliceobj = [slice(None)]*2 if not _hold: gist.fma() clear_global_linetype() for k in range(y.shape[otheraxis]): thiscolor = _colors[_corder[k % len(_corder)]] sliceobj[otheraxis] = k gist.plg(y[sliceobj],x,type='solid',color=thiscolor,marks=0) append_global_linetype(_rcolors[thiscolor]+'-')
d31b1aed025e9765ed229fd77d6341f36f1473cc /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12971/d31b1aed025e9765ed229fd77d6341f36f1473cc/Mplot.py