code
stringlengths 22
1.05M
| apis
sequencelengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
# Code provided by:
# @misc{Subramanian2020,
# author = {<NAME>},
# title = {PyTorch-VAE},
# year = {2020},
# publisher = {GitHub},
# journal = {GitHub repository},
# howpublished = {\url{https://github.com/AntixK/PyTorch-VAE}}
# }
from torch import nn
from abc import abstractmethod
from typing import List, Any, TypeVar
Tensor = TypeVar("torch.tensor")
class BaseVAE(nn.Module):
def __init__(self) -> None:
super(BaseVAE, self).__init__()
def encode(self, input: Tensor) -> List[Tensor]:
raise NotImplementedError
def decode(self, input: Tensor) -> Any:
raise NotImplementedError
def sample(self, batch_size: int, current_device: int, **kwargs) -> Tensor:
raise RuntimeWarning()
def generate(self, x: Tensor, **kwargs) -> Tensor:
raise NotImplementedError
def embed(self, x: Tensor, **kwargs) -> Tensor:
raise NotImplementedError
@abstractmethod
def forward(self, *inputs: Tensor) -> Tensor:
pass
@abstractmethod
def loss_function(self, *inputs: Any, **kwargs) -> Tensor:
pass
| [
"typing.TypeVar"
] | [((347, 370), 'typing.TypeVar', 'TypeVar', (['"""torch.tensor"""'], {}), "('torch.tensor')\n", (354, 370), False, 'from typing import List, Any, TypeVar\n')] |
# -*- coding: utf-8 -*-
# @Author: wqshen
# @Email: <EMAIL>
# @Date: 2020/6/10 14:43
# @Last Modified by: wqshen
import numpy as np
from logzero import logger
from .point_stat_base import PointStatBase
class ContinuousVariableVerification(PointStatBase):
def __init__(self, forecast=None, obs=None, fcsterr=None, group=None):
if (forecast is None or obs is None) and fcsterr is None:
raise Exception("Initialize failed, check forecast and obs and fcsterr values.")
elif forecast is not None and obs is not None and fcsterr is not None:
logger.warning("You give forecast, obs and fcsterr, but the fcsterr will be ignored.")
fcsterr = None
self._available_score = ['N', 'ME', 'ME2', 'MSE', 'RMSE', 'ESTDEV', 'BCMSE', 'MAE', 'IQR', 'MAD', 'EPCT']
if fcsterr is not None:
self._error = fcsterr[~np.isnan(fcsterr)]
if forecast is None:
forecast = fcsterr + obs
if obs is None:
obs = forecast - fcsterr
# Not Available, 'BAGSS', 'ANOM_CORR'
self._available_score += ['FBAR', 'OBAR', 'FSTDEV', 'OSTDEV', 'PR_CORR', 'SP_CORR', 'KT_CORR', 'MBIAS', ]
super(ContinuousVariableVerification, self).__init__(forecast, obs, group)
@property
def FBAR(self):
"""**The sample mean forecast, FBAR**"""
return self.mean_forecast(self._f)
@staticmethod
def mean_forecast(forecast):
r"""**The sample mean forecast, FBAR**
the sample mean forecast (FBAR) is defined as,
.. math::
\bar{f} = \frac{1}{n}\sum_{i=1}^{n}f_i
Returns
------
numpy.ndarray, the sample mean forecast (FBAR)
"""
return np.average(forecast)
@property
def OBAR(self):
"""**The sample mean observation, OBAR**"""
return self.mean_observation(self._o)
@staticmethod
def mean_observation(obs):
r"""**The sample mean observation, OBAR**
the sample mean observation (OBAR) is defined as,
.. math::
\bar{o} = \frac{1}{n}\sum_{i=1}^{n}o_i
Returns
-------
numpy.ndarray, the sample mean observation (OBAR)
"""
return np.average(obs)
@property
def FSTDEV(self):
"""**The forecast standard deviation (FSTDEV)**"""
return self.forecast_standard_deviation(self._f)
@staticmethod
def forecast_standard_deviation(forecast):
r"""**The forecast standard deviation (FSTDEV)**
The sample variance of the forecasts is defined as
.. math::
s^{2}_{f} = \frac{1}{T-1}\sum_{i=1}^{T}(f_i - \bar{f})^2
The forecast standard deviation, FSTDEV, is defined as
.. math::
s_{f} = \sqrt{s^{2}_{f}}
Returns
-------
numpy.ndarray, the forecast standard deviation (FSTDEV)
"""
return np.std(forecast)
@property
def OSTDEV(self):
r"""**The observed standard deviation (OSTDEV)**"""
return self.observation_standard_deviation(self._o)
@staticmethod
def observation_standard_deviation(obs):
r"""**The observed standard deviation (OSTDEV)**
The sample variance of the observations is defined as
.. math::
s^{2}_{o} = \frac{1}{T-1}\sum_{i=1}^{T}(o_i - \bar{o})^2
The observed standard deviation, OSTDEV, is defined as
.. math::
s_{o} = \sqrt{s^{2}_{o}}
Returns
-------
numpy.ndarray, the observed standard deviation (OSTDEV)
"""
return np.std(obs)
@property
def PR_CORR(self):
r"""**The Pearson correlation coefficient ( :math:`r` , PR_CORR)**"""
return self.pearson_correlation_coefficient(self._f, self._o)
@staticmethod
def pearson_correlation_coefficient(forecast, obs):
r"""**The Pearson correlation coefficient ( :math:`r` , PR_CORR)**
The Pearson correlation coefficient, **r**,
measures the strength of linear association between the forecasts and observations.
The Pearson correlation coefficient is defined as:
.. math::
r = \frac{\sum^{T}_{i=1}(f_i - \bar{f})(o_i - \bar{o})}{\sqrt{\sum{(f_i - \bar{f})^2}}\sqrt{\sum{(o_i - \bar{o})^2}}}
r can range between -1 and 1;
a value of 1 indicates perfect correlation and
a value of -1 indicates perfect negative correlation.
A value of 0 indicates that the forecasts and observations are not correlated.
Returns
-------
numpy.ndarray, the Pearson correlation coefficient (PR_CORR)
"""
return np.corrcoef(forecast, obs)[1, 0]
@property
def SP_CORR(self):
r"""**The Spearman rank correlation coefficient ( :math:`\rho_s` , SP_CORR)**"""
return self.spearman_rank_correlation_cofficient(self._f, self._o)
@staticmethod
def spearman_rank_correlation_cofficient(forecast, obs):
r"""**The Spearman rank correlation coefficient ( :math:`\rho_s` , SP_CORR)**
The Spearman rank correlation cofficient ( :math:`\rho_s` ) is a robust measure of association
that is based on the ranks of the forecast and observed values rather than the actual values.
That is, the forecast and observed samples are ordered from smallest to largest
and rank values (from 1 to **n**, where **n** is the total number of pairs) are assigned.
The pairs of forecast-observed ranks are then used to compute a correlation cofficient,
analogous to the Pearson correlation cofficient, **r**.
A simpler formulation of the Spearman-rank correlation is based on differences
between the each of the pairs of ranks (denoted as ( :math:`d_i` ) ):
.. math::
\rho_s = \frac{6}{n(n^2 - 1)}\sum^{n}_{i=1}d^{2}_{i}
Like **r**, the Spearman rank correlation coecient ranges between -1 and 1;
a value of 1 indicates perfect correlation and
a value of -1 indicates perfect negative correlation.
A value of 0 indicates that the forecasts and observations are not correlated.
Returns
-------
numpy.ndarray, the Spearman correlation coefficient (SP_CORR)
"""
from scipy.stats import spearmanr
return spearmanr(forecast, obs)
@property
def KT_CORR(self):
r"""**Kendall's Tau statistic ( :math:`\tau` , KT_CORR)**"""
return self.kendall_tau_statistic(self._f, self._o)
@staticmethod
def kendall_tau_statistic(forecast, obs):
r"""**Kendall's Tau statistic ( :math:`\tau` , KT_CORR)**
Kendall's Tau statistic ( :math:`\tau` ) is a robust measure of the level of association
between the forecast and observation pairs. It is defined as
.. math::
\tau = \frac{N_c - N_p}{n(n-1)/2}
where NC is the number of "concordant" pairs and ND is the number of "discordant" pairs.
Concordant pairs are identied by comparing each pair with all other pairs in the sample;
this can be done most easily by ordering all of the ( :math:`f_i, o_i` ) pairs
according to :math:`f_i`, in which case the :math:`o_i`, values won't necessarily be in order.
The number of concordant matches of a particular pair with other pairs is computed by
counting the number of pairs (with larger values)
for which the value of oi for the current pair is exceeded (that is, pairs for which
the values of **f** and **o** are both larger than the value for the current pair).
Once this is done, Nc is computed by summing the counts for all pairs.
The total number of possible pairs is ; thus, the number of discordant pairs is .
Like **r** and :math:`\rho_s` , Kendall's Tau ( :math:`\tau` ) ranges between -1 and 1;
a value of 1 indicates perfect association (concor-dance) and
a value of -1 indicates perfect negative association.
A value of 0 indicates that the forecasts and observations are not associated.
Returns
-------
numpy.ndarray, Kendall's Tau statistic ( :math:`\tau` , KT_CORR)
"""
from scipy.stats import kendalltau
return kendalltau(forecast, obs)
@property
def ME(self):
"""**The Mean Error (ME)**"""
return self.mean_error(self.error)
@staticmethod
def mean_error(error):
r"""**The Mean Error (ME)**
The Mean Error, ME, is a measure of overall bias for continuous variables;
in particular ME = Bias. It is defined as
.. math::
ME = \frac{1}{n}\sum^{n}_{i=1}(f_i - o_i) = \bar{f} - \bar{o}
A perfect forecast has ME = 0.
Returns
-------
numpy.ndarray, The Mean Error (ME)
"""
return np.average(error)
@property
def ME2(self):
"""**The Mean Error Squared** (ME2)"""
return self.mean_error_squared(self.error)
@staticmethod
def mean_error_squared(error):
"""**The Mean Error Squared** (ME2)
The Mean Error Squared, ME2, is provided to give a complete breakdown of MSE
in terms of squared Bias plus estimated variance of the error,
as detailed below in the section on BCMSE. It is defined as ME2 = ME2.
A perfect forecast has ME2 = 0.
Returns
-------
numpy.ndarray, The Mean Error (ME)
"""
return np.square(np.average(error))
@property
def MBIAS(self):
"""**Multiplicative bias (MBIAS)**"""
return self.multiplicative_bias(self._f, self._o)
@staticmethod
def multiplicative_bias(forecast, error):
r"""**Multiplicative bias (MBIAS)**
Multiplicative bias is simply the ratio of the means of the forecasts and the observations:
.. math::
MBIAS = \frac{\bar{f}}{\bar{o}}
Returns
-------
numpy.ndarray, Multiplicative bias (MBIAS)
"""
return np.average(forecast) / np.average(error)
@property
def MSE(self):
"""**Mean-squared error (MSE)**"""
return self.mean_squared_error(self.error)
@staticmethod
def mean_squared_error(error):
r"""**Mean-squared error (MSE)**
MSE measures the average squared error of the forecasts. Specifically,
.. math::
MSE = \frac{1}{n}\sum{(f_i - o_i)^2}
Returns
-------
numpy.ndarray, Mean-squared error (MSE)
"""
return np.average(error ** 2)
@property
def RMSE(self):
"""**Root-mean-squared error (RMSE)**"""
return self.root_mean_squared_error(self.error)
@staticmethod
def root_mean_squared_error(error):
"""**Root-mean-squared error (RMSE)**
RMSE is simply the square root of the MSE, :math:`RMSE = \sqrt{MSE}`
Returns
-------
numpy.ndarray, Root-mean-squared error (RMSE)
"""
return np.sqrt(np.average(error ** 2))
@property
def ESTDEV(self):
"""**Standard deviation of the error** (ESTDEV)"""
return self.standard_deviation_of_error(self.error)
@staticmethod
def standard_deviation_of_error(error):
"""**Standard deviation of the error** (ESTDEV)
Returns
-------
numpy.ndaray, Standard deviation of the error
"""
return np.std(error)
@property
def BCMSE(self):
"""**Bias-Corrected MSE (BCMSE)**"""
return self.bias_corrected_mse(self.error)
@staticmethod
def bias_corrected_mse(error):
r"""**Bias-Corrected MSE (BCMSE)**
MSE and RMSE are strongly impacted by large errors.
They also are strongly impacted by large bias (ME) values.
MSE and RMSE can range from 0 to infinity.
A perfect forecast would have MSE = RMSE = 0.
MSE can be re-written as,
.. math::
MSE = (\bar{f} - \bar{o})^2 + s^{2}_{f} + s^{2}_{o} -2 s_f s_o r_{fo}
where :math:`\bar{f} - \bar{o} = ME` and :math:`s^{2}_{f} + s^{2}_{o} -2 s_f s_o r_{fo}` is
the estimated variance of the error, :math:`s^{2}_{fo}` . Thus, :math:`MSE = ME^2 + s^{2}_{f-o}`
To understand the behavior of MSE, it is important to examine both of the terms of MSE,
rather than examining MSE alone. Moreover, MSE can be strongly influenced by ME,
as shown by this decomposition.
The standard deviation of the error, :math:`s_{f-o}` , is
.. math::
s_{f-o}=\sqrt{s^{2}_{f-o}}=\sqrt{s^{2}_{f} + s^{2}_{o} -2 s_f s_o r_{fo}}
Note that the square of the standard deviation of the error (ESTDEV2) is
sometimes called the "Bias-corrected MSE" (BCMSE)
because it removes the effect of overall bias from the forecast-observation squared differences.
Returns
-------
numpy.ndarray, Bias-Corrected MSE (BCMSE)
"""
return np.square(np.std(error))
@property
def MAE(self):
"""**Mean Absolute Error (MAE)**"""
return self.mean_absolute_error(self.error)
@staticmethod
def mean_absolute_error(error):
r"""**Mean Absolute Error (MAE)**
The Mean Absolute Error (MAE) is defined as :math:`MAE = \frac{1}{n}\sum{|f_i - o_i|}`
MAE is less inuenced by large errors and also does not depend on the mean error.
A perfect forecast would have MAE = 0.
Returns
-------
numpy.ndarray, Mean Absolute Error (MAE)
"""
return np.average(np.abs(error))
@property
def IQR(self):
""""**Inter Quartile Range of the Errors (IQR)**"""
return self.inter_quartile_range_of_errors(self.error)
@staticmethod
def inter_quartile_range_of_errors(error):
r"""**Inter Quartile Range of the Errors (IQR)**
The Inter Quartile Range of the Errors (IQR) is the difference
between the 75th and 25th percentiles of the errors. It is dened as
.. math::
IQR = p_{75} (f_i - o_i) - p_{25}(f_i - o_i)
IQR is another estimate of spread, similar to standard error,
but is less inuenced by large errors and also does not depend on the mean error.
A perfect forecast would have IQR = 0.
Returns
-------
nupmy.ndarray, Inter Quartile Range of the Errors (IQR)
"""
return np.percentile(error, 75) - np.percentile(error, 25)
@property
def MAD(self):
"""Median Absolute Deviation (MAD)"""
return self.median_absolute_deviation(self.error)
@staticmethod
def median_absolute_deviation(error):
"""Median Absolute Deviation (MAD)
The Median Absolute Deviation (MAD) is defined as :math:`MAD=median|f_i - o_i|`
MAD is an estimate of spread, similar to standard error,
but is less inuenced by large errors and also does not depend on the mean error.
A perfect forecast would have MAD = 0.
Returns
-------
numpy.ndarray, Median Absolute Deviation (MAD)
"""
return np.median(np.abs(error))
@property
def BAGSS(self):
"""Bias Adjusted Gilbert Skill Score (BAGSS)"""
return self.bias_adjusted_gilbert_skill_score(self._f, self._o)
@staticmethod
def bias_adjusted_gilbert_skill_score(forecast, obs):
"""Bias Adjusted Gilbert Skill Score (BAGSS)
The Bias Adjusted Gilbert Skill Score (BAGSS) is the Gilbert Skill Score,
but with the contingency table counts adjusted to eliminate
as much bias in the forecast as possible.
For details, see `Brill and Messinger, 2009. <https://www.adv-geosci.net/16/137/2008/>`_
Returns
-------
Not implemented
numpy.ndarray, Bias Adjusted Gilbert Skill Score (BAGSS)
"""
return
@property
def EPCT(self):
"""Percentiles (0.1, 0.25, 0.5, 0.75, 0.9) of the errors"""
return self.percentile_errors(self.error)
@staticmethod
def percentile_errors(error):
"""Percentiles of the errors
Percentiles of the errors provide more information about the distribution of errors
than can be obtained from the mean and standard deviations of the errors.
Percentiles are computed by ordering the errors from smallest to largest
and computing the rank location of each percentile in the ordering,
and matching the rank to the actual value.
Percentiles can also be used to create box plots of the errors.
The 0.10th, 0.25th, 0.50th, 0.75th, and 0.90th quantile values of the errors are computed.
Returns
-------
numpy.ndarray, Percentiles of the errors
"""
quantiles = np.array([0.1, 0.25, 0.5, 0.75, 0.9])
return np.quantile(error, quantiles)
@property
def ANOM_CORR(self):
"""The Anomaly correlation coefficient (ANOM_CORR)"""
return self.anomaly_correlation_coefficient(self._f, self._o, None)
@staticmethod
def anomaly_correlation_coefficient(forecast, obs, climate):
r"""The Anomaly correlation coefficient (ANOM_CORR)
The Anomaly correlation coecient is equivalent to the Pearson correlation coefficient,
except that both the forecasts and observations are first adjusted according to a climatology value.
The anomaly is the difference between the individual forecast or observation and the typical situation,
as measured by a climatology (**c**) of some variety.
It measures the strength of linear association between the forecast anomolies and observed anomalies.
The Anomaly correlation coefficient is defined as:
.. math::
Anomoly Correlation = \frac{\sum{(f_i - c)(o_i - c)}} {\sqrt{\sum{(f_i - c)^2}} \sqrt{\sum{(o_i - c)^2}}}
Anomaly correlation can range between -1 and 1;
- a value of 1 indicates perfect correlation and
- a value of -1 indicates perfect negative correlation.
- A value of 0 indicates that the forecast and observed anomalies are not correlated.
Returns
-------
Not implemented
"""
return
def list_score(self):
"""list all available score"""
return {k: np.round(getattr(self, k), self.round) for k in self._available_score}
| [
"numpy.quantile",
"numpy.average",
"numpy.abs",
"numpy.std",
"numpy.corrcoef",
"scipy.stats.spearmanr",
"numpy.isnan",
"numpy.percentile",
"numpy.array",
"logzero.logger.warning",
"scipy.stats.kendalltau"
] | [((1751, 1771), 'numpy.average', 'np.average', (['forecast'], {}), '(forecast)\n', (1761, 1771), True, 'import numpy as np\n'), ((2252, 2267), 'numpy.average', 'np.average', (['obs'], {}), '(obs)\n', (2262, 2267), True, 'import numpy as np\n'), ((2936, 2952), 'numpy.std', 'np.std', (['forecast'], {}), '(forecast)\n', (2942, 2952), True, 'import numpy as np\n'), ((3626, 3637), 'numpy.std', 'np.std', (['obs'], {}), '(obs)\n', (3632, 3637), True, 'import numpy as np\n'), ((6365, 6389), 'scipy.stats.spearmanr', 'spearmanr', (['forecast', 'obs'], {}), '(forecast, obs)\n', (6374, 6389), False, 'from scipy.stats import spearmanr\n'), ((8305, 8330), 'scipy.stats.kendalltau', 'kendalltau', (['forecast', 'obs'], {}), '(forecast, obs)\n', (8315, 8330), False, 'from scipy.stats import kendalltau\n'), ((8897, 8914), 'numpy.average', 'np.average', (['error'], {}), '(error)\n', (8907, 8914), True, 'import numpy as np\n'), ((10599, 10621), 'numpy.average', 'np.average', (['(error ** 2)'], {}), '(error ** 2)\n', (10609, 10621), True, 'import numpy as np\n'), ((11480, 11493), 'numpy.std', 'np.std', (['error'], {}), '(error)\n', (11486, 11493), True, 'import numpy as np\n'), ((16891, 16928), 'numpy.array', 'np.array', (['[0.1, 0.25, 0.5, 0.75, 0.9]'], {}), '([0.1, 0.25, 0.5, 0.75, 0.9])\n', (16899, 16928), True, 'import numpy as np\n'), ((16944, 16973), 'numpy.quantile', 'np.quantile', (['error', 'quantiles'], {}), '(error, quantiles)\n', (16955, 16973), True, 'import numpy as np\n'), ((4699, 4725), 'numpy.corrcoef', 'np.corrcoef', (['forecast', 'obs'], {}), '(forecast, obs)\n', (4710, 4725), True, 'import numpy as np\n'), ((9536, 9553), 'numpy.average', 'np.average', (['error'], {}), '(error)\n', (9546, 9553), True, 'import numpy as np\n'), ((10079, 10099), 'numpy.average', 'np.average', (['forecast'], {}), '(forecast)\n', (10089, 10099), True, 'import numpy as np\n'), ((10102, 10119), 'numpy.average', 'np.average', (['error'], {}), '(error)\n', (10112, 10119), True, 'import numpy as np\n'), ((11067, 11089), 'numpy.average', 'np.average', (['(error ** 2)'], {}), '(error ** 2)\n', (11077, 11089), True, 'import numpy as np\n'), ((13060, 13073), 'numpy.std', 'np.std', (['error'], {}), '(error)\n', (13066, 13073), True, 'import numpy as np\n'), ((13656, 13669), 'numpy.abs', 'np.abs', (['error'], {}), '(error)\n', (13662, 13669), True, 'import numpy as np\n'), ((14509, 14533), 'numpy.percentile', 'np.percentile', (['error', '(75)'], {}), '(error, 75)\n', (14522, 14533), True, 'import numpy as np\n'), ((14536, 14560), 'numpy.percentile', 'np.percentile', (['error', '(25)'], {}), '(error, 25)\n', (14549, 14560), True, 'import numpy as np\n'), ((15221, 15234), 'numpy.abs', 'np.abs', (['error'], {}), '(error)\n', (15227, 15234), True, 'import numpy as np\n'), ((584, 675), 'logzero.logger.warning', 'logger.warning', (['"""You give forecast, obs and fcsterr, but the fcsterr will be ignored."""'], {}), "(\n 'You give forecast, obs and fcsterr, but the fcsterr will be ignored.')\n", (598, 675), False, 'from logzero import logger\n'), ((879, 896), 'numpy.isnan', 'np.isnan', (['fcsterr'], {}), '(fcsterr)\n', (887, 896), True, 'import numpy as np\n')] |
import numpy as np
from scipy.integrate import solve_ivp
import matplotlib.pyplot as plt
from numpy.random import randint, rand
from sir import *
def SIR_continuous_reinfected(b,k,time,ii,r):
"""
Simulates continuous SIR model
ii = initial percentage of infected
time = Days of simulation
b = probability that people getting infectious
k = probability that people getting recovered
r = reinfected probability
returns sol from solve_ivp
"""
def SIR(t, X):
#The main set of equations
Y = np.zeros((3))
Y[0] = -b * X[0] * X[2]
Y[1] = k * X[2] - r * X[1]
Y[2] = b * X[0] * X[2] - (k * X[2]) + r * X[1]
return Y
t_eval = np.linspace(0, time, time)
sol1 = solve_ivp(SIR, [0, time], [1-ii, 0, ii], method='RK45', t_eval=t_eval) # solve the equation
return sol1
## Discrete
class Person_reinfection(Person):
"""
An agent representing a person.
By default, a person is susceptible but not infectious. They can become infectious by exposing with disease method.
Status: 0 = susceptible 1 = infected 2 = removed
"""
def __init__(self,startpos=None):
self.status = 0
if startpos==None:
self.pos = np.random.rand(2)
else:
self.pos = np.array(startpos)
self.reinfection=1
def reinfectionrate(self):
return self.reinfection
def immunization(self,p):
q=self.reinfection-p
if q<0:
q=0
self.reinfection=q
def count_susceptible(pop):
"""
counts number of susceptible
"""
return sum(p.is_susceptible() for p in pop)
def count_infected(pop):
"""
counts number of infected
"""
return sum(p.is_infected() for p in pop)
def count_removed(pop):
"""
counts number of removed
"""
return sum(p.is_removed() for p in pop)
def SIR_discrete_reinfection(N,ii,b,T,k):
"""
Simulates discrete SIR model
N = Total number of people
ii = initial percentage of infected
b = number of contacts per day
T = Days of simulation
k = probability that people getting recovered
returns list of s,i,r
"""
pop = [Person_reinfection() for i in range(N)]
initial_infection = randint(N,size=np.int(N*ii))
for i in initial_infection:
pop[i].infection()
counts_susceptible = [count_susceptible(pop)]
counts_infected = [count_infected(pop)]
counts_removed = [count_removed(pop)]
for t in range(T):
# update the population
for i in range(N):
if pop[i].is_infected():
# person i infected all their contacts
contacts = randint(N, size=b)
for j in contacts:
if not pop[j].is_removed():
pop[j].infection()
#if rand() < p:
# pop[j].infection()
if pop[j].is_removed():
if rand()<pop[j].reinfectionrate():
pop[j].infection()
if rand()< k:
pop[i].remove()
pop[i].immunization(rand())
# add to our counts
counts_susceptible.append(count_susceptible(pop))
counts_infected.append(count_infected(pop))
counts_removed.append(count_removed(pop))
return np.array([counts_susceptible,counts_infected,counts_removed])
| [
"numpy.zeros",
"scipy.integrate.solve_ivp",
"numpy.random.randint",
"numpy.array",
"numpy.int",
"numpy.linspace",
"numpy.random.rand"
] | [((717, 743), 'numpy.linspace', 'np.linspace', (['(0)', 'time', 'time'], {}), '(0, time, time)\n', (728, 743), True, 'import numpy as np\n'), ((755, 827), 'scipy.integrate.solve_ivp', 'solve_ivp', (['SIR', '[0, time]', '[1 - ii, 0, ii]'], {'method': '"""RK45"""', 't_eval': 't_eval'}), "(SIR, [0, time], [1 - ii, 0, ii], method='RK45', t_eval=t_eval)\n", (764, 827), False, 'from scipy.integrate import solve_ivp\n'), ((3439, 3502), 'numpy.array', 'np.array', (['[counts_susceptible, counts_infected, counts_removed]'], {}), '([counts_susceptible, counts_infected, counts_removed])\n', (3447, 3502), True, 'import numpy as np\n'), ((551, 562), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (559, 562), True, 'import numpy as np\n'), ((1284, 1301), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (1298, 1301), True, 'import numpy as np\n'), ((1339, 1357), 'numpy.array', 'np.array', (['startpos'], {}), '(startpos)\n', (1347, 1357), True, 'import numpy as np\n'), ((2329, 2343), 'numpy.int', 'np.int', (['(N * ii)'], {}), '(N * ii)\n', (2335, 2343), True, 'import numpy as np\n'), ((2741, 2759), 'numpy.random.randint', 'randint', (['N'], {'size': 'b'}), '(N, size=b)\n', (2748, 2759), False, 'from numpy.random import randint, rand\n'), ((3144, 3150), 'numpy.random.rand', 'rand', ([], {}), '()\n', (3148, 3150), False, 'from numpy.random import randint, rand\n'), ((3231, 3237), 'numpy.random.rand', 'rand', ([], {}), '()\n', (3235, 3237), False, 'from numpy.random import randint, rand\n'), ((3045, 3051), 'numpy.random.rand', 'rand', ([], {}), '()\n', (3049, 3051), False, 'from numpy.random import randint, rand\n')] |
# Generated by Django 2.1.1 on 2018-09-13 18:15
from django.db import migrations, models
import django.utils.timezone
import showcase.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Portfolio',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='title')),
('slug', models.SlugField(help_text="Used to build the category's URL.", max_length=255, unique=True, verbose_name='slug')),
('content', models.TextField(blank=True, verbose_name='content')),
('image', models.ImageField(blank=True, help_text='Used for illustration.', upload_to=showcase.models.image_upload_to_dispatcher, verbose_name='image')),
('image_caption', models.TextField(blank=True, help_text="Image's caption.", verbose_name='caption')),
('creation_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation date')),
],
),
]
| [
"django.db.models.TextField",
"django.db.models.CharField",
"django.db.models.SlugField",
"django.db.models.AutoField",
"django.db.models.ImageField",
"django.db.models.DateTimeField"
] | [((357, 450), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (373, 450), False, 'from django.db import migrations, models\n'), ((475, 529), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""title"""'}), "(max_length=255, verbose_name='title')\n", (491, 529), False, 'from django.db import migrations, models\n'), ((557, 675), 'django.db.models.SlugField', 'models.SlugField', ([], {'help_text': '"""Used to build the category\'s URL."""', 'max_length': '(255)', 'unique': '(True)', 'verbose_name': '"""slug"""'}), '(help_text="Used to build the category\'s URL.", max_length=\n 255, unique=True, verbose_name=\'slug\')\n', (573, 675), False, 'from django.db import migrations, models\n'), ((701, 753), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'verbose_name': '"""content"""'}), "(blank=True, verbose_name='content')\n", (717, 753), False, 'from django.db import migrations, models\n'), ((782, 928), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'help_text': '"""Used for illustration."""', 'upload_to': 'showcase.models.image_upload_to_dispatcher', 'verbose_name': '"""image"""'}), "(blank=True, help_text='Used for illustration.', upload_to\n =showcase.models.image_upload_to_dispatcher, verbose_name='image')\n", (799, 928), False, 'from django.db import migrations, models\n'), ((960, 1047), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'help_text': '"""Image\'s caption."""', 'verbose_name': '"""caption"""'}), '(blank=True, help_text="Image\'s caption.", verbose_name=\n \'caption\')\n', (976, 1047), False, 'from django.db import migrations, models\n'), ((1079, 1169), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation date"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation date')\n", (1099, 1169), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
import socketserver
except:
import SocketServer as socketserver
import signal
import socket
import serial
import os
import json
import sys
HOST, PORT = '0.0.0.0', 51234
serial = serial.Serial('/dev/ttyACM0', 57600)
class ScratchHandler(socketserver.BaseRequestHandler):
def setup(self):
os.system('/home/pi/mugbot-talk-1.1.sh ' + 'スクラッチとの接続を開始しました &')
# for speak in English
# os.system('espeak -ven+f3 -k5 -s150 "Scratch connection established" &')
def handle(self):
while True:
self.data = self.request.recv(1024).strip()
if len(self.data) == 0:
break
json_obj = json.loads(self.data)
action = json_obj['action']
arg = json_obj['arg']
if action == 'face_y':
arg = min(max(int(arg) + 95, 80), 110)
serial.write((str(arg) + 'y').encode())
elif action == 'face_x':
arg = min(max(int(arg) + 90, 5), 175)
serial.write((str(arg) + 'x').encode())
elif action == 'eye':
arg = min(max(int(arg), 0), 255)
serial.write((str(arg) + 'z').encode())
elif action == 'speech':
serial.write('t'.encode())
if sys.version_info.major == 2:
arg = arg.encode('utf-8')
os.system('/home/pi/mugbot-talk-1.1.sh ' + arg + ' &')
# for speak in English
# os.system('espeak -ven+f3 -k5 -s150 ' + '"' + arg +'" &')
serial.write('n'.encode())
else:
print('Unknown Command')
class ScratchServer(socketserver.ThreadingTCPServer):
def server_bind(self):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
self.socket.bind(self.server_address)
if __name__ == '__main__':
signal.signal(signal.SIGINT, signal.SIG_DFL)
server = ScratchServer((HOST, PORT), ScratchHandler)
server.serve_forever()
| [
"serial.Serial",
"signal.signal",
"os.system",
"json.loads"
] | [((239, 275), 'serial.Serial', 'serial.Serial', (['"""/dev/ttyACM0"""', '(57600)'], {}), "('/dev/ttyACM0', 57600)\n", (252, 275), False, 'import serial\n'), ((1976, 2020), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal.SIG_DFL'], {}), '(signal.SIGINT, signal.SIG_DFL)\n', (1989, 2020), False, 'import signal\n'), ((362, 426), 'os.system', 'os.system', (["('/home/pi/mugbot-talk-1.1.sh ' + 'スクラッチとの接続を開始しました &')"], {}), "('/home/pi/mugbot-talk-1.1.sh ' + 'スクラッチとの接続を開始しました &')\n", (371, 426), False, 'import os\n'), ((729, 750), 'json.loads', 'json.loads', (['self.data'], {}), '(self.data)\n', (739, 750), False, 'import json\n'), ((1447, 1501), 'os.system', 'os.system', (["('/home/pi/mugbot-talk-1.1.sh ' + arg + ' &')"], {}), "('/home/pi/mugbot-talk-1.1.sh ' + arg + ' &')\n", (1456, 1501), False, 'import os\n')] |
# -*- coding: utf-8 -*-
import pytest
from boto.dynamodb2.fields import HashKey
from boto.dynamodb2.fields import RangeKey
from boto.dynamodb2.fields import GlobalAllIndex
from boto.dynamodb2.table import Table
from mycroft.models.aws_connections import get_avro_schema
from mycroft.models.etl_records import ETLRecords
from mycroft.logic.run_actions import _parse_runs
from mycroft.logic.run_actions import list_runs_by_job_id
from tests.models.test_abstract_records import dynamodb_connection # noqa
from tests.models.test_abstract_records import NAME_TO_SCHEMA
from tests.models.test_etl_record import FakeETLRecord
from tests.data.etl_record import SAMPLE_JOB_ID
from tests.data.etl_record import SAMPLE_RECORD_JOBS
BASE_DICT = {
'hash_key': None,
'data_date': None,
'etl_status': None,
'et_runtime': None,
'et_starttime': None,
'load_runtime': None,
'load_starttime': None,
'redshift_id': None,
's3_path': None,
'updated_at': None,
'run_by': None,
'job_id': None,
'etl_error': None,
'additional_arguments': None,
}
class TestRunActions(object):
@pytest.yield_fixture(scope='module') # noqa
def etl_records(self, dynamodb_connection):
avro_schema = get_avro_schema('mycroft/avro/etl_record.json')
index_job_id = GlobalAllIndex(
ETLRecords.INDEX_JOB_ID_AND_DATA_DATE,
parts=[HashKey('job_id'), RangeKey('data_date')])
table = Table.create(
'ETLRecords',
schema=NAME_TO_SCHEMA['etl_records'],
connection=dynamodb_connection,
global_indexes=[index_job_id])
etl_records = ETLRecords(persistence_object=table, avro_schema_object=avro_schema)
for job in SAMPLE_RECORD_JOBS:
assert etl_records.put(**job)
yield etl_records
assert table.delete()
def test__parse_runs_empty_run(self):
empty_runs = [FakeETLRecord(BASE_DICT)]
result = _parse_runs(empty_runs)
assert result['runs'][0] == BASE_DICT
def test_list_runs_by_job_id(self, etl_records):
return_value = list_runs_by_job_id(SAMPLE_JOB_ID, etl_records)
expected_count = len([job for job in SAMPLE_RECORD_JOBS
if job['job_id'] == SAMPLE_JOB_ID])
assert len(return_value['runs']) == expected_count
@pytest.mark.parametrize("job_id", ['y', '..', '!', '', '_'])
def test_list_runs_by_job_id_bad_job_id(self, job_id):
with pytest.raises(ValueError) as e:
list_runs_by_job_id(job_id, None)
assert e.exconly().startswith("ValueError: invalid job_id")
| [
"pytest.yield_fixture",
"mycroft.logic.run_actions.list_runs_by_job_id",
"boto.dynamodb2.fields.HashKey",
"mycroft.models.etl_records.ETLRecords",
"boto.dynamodb2.table.Table.create",
"mycroft.logic.run_actions._parse_runs",
"pytest.raises",
"tests.models.test_etl_record.FakeETLRecord",
"pytest.mark.parametrize",
"boto.dynamodb2.fields.RangeKey",
"mycroft.models.aws_connections.get_avro_schema"
] | [((1121, 1157), 'pytest.yield_fixture', 'pytest.yield_fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1141, 1157), False, 'import pytest\n'), ((2355, 2415), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""job_id"""', "['y', '..', '!', '', '_']"], {}), "('job_id', ['y', '..', '!', '', '_'])\n", (2378, 2415), False, 'import pytest\n'), ((1236, 1283), 'mycroft.models.aws_connections.get_avro_schema', 'get_avro_schema', (['"""mycroft/avro/etl_record.json"""'], {}), "('mycroft/avro/etl_record.json')\n", (1251, 1283), False, 'from mycroft.models.aws_connections import get_avro_schema\n'), ((1452, 1584), 'boto.dynamodb2.table.Table.create', 'Table.create', (['"""ETLRecords"""'], {'schema': "NAME_TO_SCHEMA['etl_records']", 'connection': 'dynamodb_connection', 'global_indexes': '[index_job_id]'}), "('ETLRecords', schema=NAME_TO_SCHEMA['etl_records'], connection\n =dynamodb_connection, global_indexes=[index_job_id])\n", (1464, 1584), False, 'from boto.dynamodb2.table import Table\n'), ((1651, 1719), 'mycroft.models.etl_records.ETLRecords', 'ETLRecords', ([], {'persistence_object': 'table', 'avro_schema_object': 'avro_schema'}), '(persistence_object=table, avro_schema_object=avro_schema)\n', (1661, 1719), False, 'from mycroft.models.etl_records import ETLRecords\n'), ((1965, 1988), 'mycroft.logic.run_actions._parse_runs', '_parse_runs', (['empty_runs'], {}), '(empty_runs)\n', (1976, 1988), False, 'from mycroft.logic.run_actions import _parse_runs\n'), ((2112, 2159), 'mycroft.logic.run_actions.list_runs_by_job_id', 'list_runs_by_job_id', (['SAMPLE_JOB_ID', 'etl_records'], {}), '(SAMPLE_JOB_ID, etl_records)\n', (2131, 2159), False, 'from mycroft.logic.run_actions import list_runs_by_job_id\n'), ((1922, 1946), 'tests.models.test_etl_record.FakeETLRecord', 'FakeETLRecord', (['BASE_DICT'], {}), '(BASE_DICT)\n', (1935, 1946), False, 'from tests.models.test_etl_record import FakeETLRecord\n'), ((2488, 2513), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2501, 2513), False, 'import pytest\n'), ((2532, 2565), 'mycroft.logic.run_actions.list_runs_by_job_id', 'list_runs_by_job_id', (['job_id', 'None'], {}), '(job_id, None)\n', (2551, 2565), False, 'from mycroft.logic.run_actions import list_runs_by_job_id\n'), ((1393, 1410), 'boto.dynamodb2.fields.HashKey', 'HashKey', (['"""job_id"""'], {}), "('job_id')\n", (1400, 1410), False, 'from boto.dynamodb2.fields import HashKey\n'), ((1412, 1433), 'boto.dynamodb2.fields.RangeKey', 'RangeKey', (['"""data_date"""'], {}), "('data_date')\n", (1420, 1433), False, 'from boto.dynamodb2.fields import RangeKey\n')] |
from django import forms
from django.contrib.auth.models import Group
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from django.core.exceptions import ValidationError
from django.contrib.auth import authenticate
from .models import User, Profile
from django.contrib import messages
from phonenumber_field.formfields import PhoneNumberField
from phonenumber_field.widgets import PhoneNumberPrefixWidget
from django.core.validators import RegexValidator
class UserAdminCreationForm(forms.ModelForm):
"""A form for creating new users. Includes all the required
fields, plus a repeated password."""
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(
label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = User
fields = ('email', 'username')
def clean_username(self):
username = self.cleaned_data.get('username').lower()
try:
User.objects.get(username__exact=username)
except User.DoesNotExist:
return username
raise forms.ValidationError("This username is already taken.")
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise ValidationError("Passwords don't match")
return password2
def clean_email(self):
email = self.cleaned_data['email'].lower()
try:
account = User.objects.get(email=email)
except User.DoesNotExist:
return email
raise forms.ValidationError(f"Email {email} is already in use.")
def save(self, commit=True):
# Save the provided password in hashed format
user = super().save(commit=False)
user.set_password(self.cleaned_data["password2"])
if commit:
user.save()
return user
class UserAdminChangeForm(forms.ModelForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
password = ReadOnlyPasswordHashField()
class Meta:
model = User
fields = ('__all__')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class UserProfileForm(forms.ModelForm):
GENDER = (
('Male', 'Male'),
('Female', 'Female'),
('Other', 'Other'),
)
gender = forms.ChoiceField(
label='Gender', choices=GENDER, widget=forms.RadioSelect, required=False)
date_of_birth = forms.DateField(widget=forms.DateInput(
attrs={'type': 'date'}), required=False)
phonenumber = PhoneNumberField(
widget = PhoneNumberPrefixWidget(initial='IN')
)
class Meta:
model = Profile
fields = ['first_name', 'last_name', 'phonenumber', 'country', 'avatar', 'address', 'gender',
'date_of_birth', 'pincode', 'language', 'location', 'website', 'bio']
widgets = {
'first_name': forms.TextInput(attrs={'placeholder': 'your first name'}),
'last_name': forms.TextInput(attrs={'placeholder': 'your last name'}),
'email': forms.EmailInput(attrs={'placeholder': 'you <EMAIL>'}),
'country': forms.TextInput(attrs={'placeholder': 'country you where you live'}),
'address': forms.TextInput(attrs={'placeholder': 'your address where you live'}),
'pincode': forms.TextInput(attrs={'placeholder': 'pincode'}),
'language': forms.TextInput(attrs={'placeholder': 'language'}),
'location': forms.TextInput(attrs={'placeholder': 'location'}),
'bio': forms.TextInput(attrs={'placeholder': 'about you'}),
'website': forms.TextInput(attrs={'placeholder': 'your website url e.g. https://your_website.com'}),
}
| [
"django.core.exceptions.ValidationError",
"django.forms.ChoiceField",
"django.forms.TextInput",
"django.forms.DateInput",
"django.contrib.auth.forms.ReadOnlyPasswordHashField",
"django.forms.EmailInput",
"django.forms.ValidationError",
"phonenumber_field.widgets.PhoneNumberPrefixWidget",
"django.forms.CharField"
] | [((652, 713), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""Password"""', 'widget': 'forms.PasswordInput'}), "(label='Password', widget=forms.PasswordInput)\n", (667, 713), False, 'from django import forms\n'), ((731, 805), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""Password confirmation"""', 'widget': 'forms.PasswordInput'}), "(label='Password confirmation', widget=forms.PasswordInput)\n", (746, 805), False, 'from django import forms\n'), ((2320, 2347), 'django.contrib.auth.forms.ReadOnlyPasswordHashField', 'ReadOnlyPasswordHashField', ([], {}), '()\n', (2345, 2347), False, 'from django.contrib.auth.forms import ReadOnlyPasswordHashField\n'), ((2869, 2964), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Gender"""', 'choices': 'GENDER', 'widget': 'forms.RadioSelect', 'required': '(False)'}), "(label='Gender', choices=GENDER, widget=forms.RadioSelect,\n required=False)\n", (2886, 2964), False, 'from django import forms\n'), ((1141, 1197), 'django.forms.ValidationError', 'forms.ValidationError', (['"""This username is already taken."""'], {}), "('This username is already taken.')\n", (1162, 1197), False, 'from django import forms\n'), ((1772, 1830), 'django.forms.ValidationError', 'forms.ValidationError', (['f"""Email {email} is already in use."""'], {}), "(f'Email {email} is already in use.')\n", (1793, 1830), False, 'from django import forms\n'), ((1480, 1520), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Passwords don\'t match"""'], {}), '("Passwords don\'t match")\n', (1495, 1520), False, 'from django.core.exceptions import ValidationError\n'), ((3015, 3054), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'type': 'date'}"}), "(attrs={'type': 'date'})\n", (3030, 3054), False, 'from django import forms\n'), ((3137, 3174), 'phonenumber_field.widgets.PhoneNumberPrefixWidget', 'PhoneNumberPrefixWidget', ([], {'initial': '"""IN"""'}), "(initial='IN')\n", (3160, 3174), False, 'from phonenumber_field.widgets import PhoneNumberPrefixWidget\n'), ((3466, 3523), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'your first name'}"}), "(attrs={'placeholder': 'your first name'})\n", (3481, 3523), False, 'from django import forms\n'), ((3551, 3607), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'your last name'}"}), "(attrs={'placeholder': 'your last name'})\n", (3566, 3607), False, 'from django import forms\n'), ((3631, 3685), 'django.forms.EmailInput', 'forms.EmailInput', ([], {'attrs': "{'placeholder': 'you <EMAIL>'}"}), "(attrs={'placeholder': 'you <EMAIL>'})\n", (3647, 3685), False, 'from django import forms\n'), ((3711, 3779), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'country you where you live'}"}), "(attrs={'placeholder': 'country you where you live'})\n", (3726, 3779), False, 'from django import forms\n'), ((3805, 3874), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'your address where you live'}"}), "(attrs={'placeholder': 'your address where you live'})\n", (3820, 3874), False, 'from django import forms\n'), ((3900, 3949), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'pincode'}"}), "(attrs={'placeholder': 'pincode'})\n", (3915, 3949), False, 'from django import forms\n'), ((3976, 4026), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'language'}"}), "(attrs={'placeholder': 'language'})\n", (3991, 4026), False, 'from django import forms\n'), ((4053, 4103), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'location'}"}), "(attrs={'placeholder': 'location'})\n", (4068, 4103), False, 'from django import forms\n'), ((4125, 4176), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'about you'}"}), "(attrs={'placeholder': 'about you'})\n", (4140, 4176), False, 'from django import forms\n'), ((4202, 4294), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'your website url e.g. https://your_website.com'}"}), "(attrs={'placeholder':\n 'your website url e.g. https://your_website.com'})\n", (4217, 4294), False, 'from django import forms\n')] |
"""
Copyright 2016 <NAME>, <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from abc import ABCMeta, abstractmethod
from django.core import validators
from django.db import models
alpha_numeric_validator = validators.RegexValidator(r'^[0-9a-zA-Z]+$', 'Only alphanumeric characters are allowed.')
rfid_validator = alpha_numeric_validator
class Model(models.Model):
"""
Base class for all models.
"""
__metaclass__ = ABCMeta
class Meta:
abstract = True
def __repr__(self):
fields_string = ', '.join(['%s:"%s"' % (field.name, getattr(self, field.name)) for field in self._meta.fields])
return '<%s(%s)>' % (self.__class__._meta.object_name, fields_string)
def __str__(self):
return str(self.pk)
@abstractmethod
def get_recursive_pks(self):
"""
Returns a list of primary keys of all recursive parents.
Used to determine the URL of an object.
"""
pass
class Residence(Model):
"""
Represents a residence.
"""
rfid = models.CharField(primary_key=True, max_length=100, validators=[rfid_validator])
class Meta:
ordering = ('rfid',)
def get_recursive_pks(self):
pks = [self.pk]
return pks
class User(Model):
"""
Represents a user.
"""
imei = models.CharField(primary_key=True, max_length=100, validators=[alpha_numeric_validator])
name = models.CharField(max_length=100)
residence = models.ForeignKey('Residence', related_name='users')
class Meta:
ordering = ('imei',)
def get_recursive_pks(self):
pks = self.residence.get_recursive_pks()
pks.append(self.pk)
return pks
class Room(Model):
"""
Represents a room.
"""
# id is automatically generated if no other primary_key is defined
name = models.CharField(max_length=100)
residence = models.ForeignKey('Residence', related_name='rooms')
class Meta:
ordering = ('name',)
def get_recursive_pks(self):
pks = self.residence.get_recursive_pks()
pks.append(self.pk)
return pks
class Thermostat(Model):
"""
Represents a thermostat.
"""
rfid = models.CharField(primary_key=True, max_length=100, validators=[rfid_validator])
room = models.ForeignKey('Room', related_name='thermostats')
name = models.CharField(max_length=100, blank=False)
class Meta:
ordering = ('rfid',)
def get_recursive_pks(self):
pks = self.room.get_recursive_pks()
pks.append(self.pk)
return pks
class Temperature(Model):
"""
Represents a temperature.
"""
datetime = models.DateTimeField(primary_key=True)
value = models.FloatField()
thermostat = models.ForeignKey('Thermostat', related_name='temperatures')
class Meta:
ordering = ('datetime',)
def get_recursive_pks(self):
pks = self.thermostat.get_recursive_pks()
assert (self.pk == self.datetime)
pks.append(self.datetime.isoformat())
return pks
class ThermostatMetaEntry(Model):
"""
Represents a thermistat meta entry containing signal strength, uptime and battery level.
"""
id = models.AutoField(primary_key=True)
datetime = models.DateTimeField()
rssi = models.IntegerField(null=True)
uptime = models.IntegerField(null=True)
battery = models.IntegerField(null=True)
thermostat = models.ForeignKey('Thermostat', related_name='meta_entries')
class Meta:
unique_together = ('thermostat', 'datetime')
ordering = ('datetime',)
def get_recursive_pks(self):
pks = self.thermostat.get_recursive_pks()
pks.append(self.pk)
return pks
class Device(Model):
"""
Base class for a physical device with an RFID number and MAC address.
"""
__metaclass__ = ABCMeta
rfid = models.CharField(primary_key=True, max_length=100, validators=[rfid_validator])
mac = models.CharField(max_length=17, unique=True)
class Meta:
abstract = True
def get_recursive_pks(self):
return [self.pk]
class RaspberryDevice(Device):
"""
Represents a physical Raspberry Pi device.
"""
@property
def residence(self):
residences = Residence.objects.filter(rfid=self.rfid)
assert (0 <= len(residences) <= 1)
if len(residences) > 0:
return residences[0]
else:
return None
@property
def thermostat_devices(self):
"""
:return: Thermostat devices associated to the Raspberry Pi.
"""
residence = self.residence
if residence is None:
return None
rooms = Room.objects.filter(residence=residence)
room_pks = [room.pk for room in rooms]
thermostats = Thermostat.objects.filter(room__in=room_pks)
thermostat_rfids = [thermostat.rfid for thermostat in thermostats]
thermostat_devices = ThermostatDevice.objects.filter(rfid__in=thermostat_rfids)
return thermostat_devices
class ThermostatDevice(Device):
"""
Represents a physical thermostat device.
"""
@property
def thermostat(self):
thermostats = Thermostat.objects.filter(rfid=self.rfid)
assert (0 <= len(thermostats) <= 1)
if len(thermostats) > 0:
return thermostats[0]
else:
return None
class TimetableEntry(Model):
"""
Base class for a weekly timetable entry.
"""
__metaclass__ = ABCMeta
MONDAY = 0
TUESDAY = 1
WEDNESDAY = 2
THURSDAY = 3
FRIDAY = 4
SATURDAY = 5
SUNDAY = 6
DAY_IN_WEEK_CHOICES = [
(MONDAY, 'Monday'),
(TUESDAY, 'Tuesday'),
(WEDNESDAY, 'Wednesday'),
(THURSDAY, 'Thursday'),
(FRIDAY, 'Friday'),
(SATURDAY, 'Saturday'),
(SUNDAY, 'Sunday'),
]
day = models.CharField(max_length=3, choices=DAY_IN_WEEK_CHOICES)
time = models.TimeField()
class Meta:
abstract = True
class HeatingTableEntry(TimetableEntry):
"""
Represents an entry of a heating schedule.
"""
class Meta:
unique_together = ('day', 'time', 'thermostat')
ordering = ('day', 'time')
temperature = models.FloatField(validators=[validators.MinValueValidator(5), validators.MaxValueValidator(30)])
thermostat = models.ForeignKey(Thermostat, related_name='heating_table_entries')
def get_recursive_pks(self):
pks = self.thermostat.get_recursive_pks()
pks.append(self.pk)
return pks
class OccupancyPredictionEntry(TimetableEntry):
"""
Represents an user occupancy prediction entry.
This is a stub and is intended to be used in future work.
"""
class Meta:
unique_together = ('day', 'time', 'user')
ordering = ('day', 'time')
user = models.ForeignKey(User)
def get_recursive_pks(self):
pks = self.user.get_recursive_pks()
pks.append(self.pk)
return pks
| [
"django.db.models.TimeField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.core.validators.MinValueValidator",
"django.db.models.FloatField",
"django.db.models.AutoField",
"django.db.models.IntegerField",
"django.core.validators.RegexValidator",
"django.db.models.DateTimeField",
"django.core.validators.MaxValueValidator"
] | [((697, 789), 'django.core.validators.RegexValidator', 'validators.RegexValidator', (['"""^[0-9a-zA-Z]+$"""', '"""Only alphanumeric characters are allowed."""'], {}), "('^[0-9a-zA-Z]+$',\n 'Only alphanumeric characters are allowed.')\n", (722, 789), False, 'from django.core import validators\n'), ((1533, 1612), 'django.db.models.CharField', 'models.CharField', ([], {'primary_key': '(True)', 'max_length': '(100)', 'validators': '[rfid_validator]'}), '(primary_key=True, max_length=100, validators=[rfid_validator])\n', (1549, 1612), False, 'from django.db import models\n'), ((1807, 1900), 'django.db.models.CharField', 'models.CharField', ([], {'primary_key': '(True)', 'max_length': '(100)', 'validators': '[alpha_numeric_validator]'}), '(primary_key=True, max_length=100, validators=[\n alpha_numeric_validator])\n', (1823, 1900), False, 'from django.db import models\n'), ((1907, 1939), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1923, 1939), False, 'from django.db import models\n'), ((1956, 2008), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Residence"""'], {'related_name': '"""users"""'}), "('Residence', related_name='users')\n", (1973, 2008), False, 'from django.db import models\n'), ((2327, 2359), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (2343, 2359), False, 'from django.db import models\n'), ((2376, 2428), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Residence"""'], {'related_name': '"""rooms"""'}), "('Residence', related_name='rooms')\n", (2393, 2428), False, 'from django.db import models\n'), ((2688, 2767), 'django.db.models.CharField', 'models.CharField', ([], {'primary_key': '(True)', 'max_length': '(100)', 'validators': '[rfid_validator]'}), '(primary_key=True, max_length=100, validators=[rfid_validator])\n', (2704, 2767), False, 'from django.db import models\n'), ((2779, 2832), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Room"""'], {'related_name': '"""thermostats"""'}), "('Room', related_name='thermostats')\n", (2796, 2832), False, 'from django.db import models\n'), ((2844, 2889), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(False)'}), '(max_length=100, blank=False)\n', (2860, 2889), False, 'from django.db import models\n'), ((3150, 3188), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (3170, 3188), False, 'from django.db import models\n'), ((3201, 3220), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (3218, 3220), False, 'from django.db import models\n'), ((3238, 3298), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Thermostat"""'], {'related_name': '"""temperatures"""'}), "('Thermostat', related_name='temperatures')\n", (3255, 3298), False, 'from django.db import models\n'), ((3694, 3728), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (3710, 3728), False, 'from django.db import models\n'), ((3744, 3766), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (3764, 3766), False, 'from django.db import models\n'), ((3778, 3808), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (3797, 3808), False, 'from django.db import models\n'), ((3822, 3852), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (3841, 3852), False, 'from django.db import models\n'), ((3867, 3897), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (3886, 3897), False, 'from django.db import models\n'), ((3915, 3975), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Thermostat"""'], {'related_name': '"""meta_entries"""'}), "('Thermostat', related_name='meta_entries')\n", (3932, 3975), False, 'from django.db import models\n'), ((4363, 4442), 'django.db.models.CharField', 'models.CharField', ([], {'primary_key': '(True)', 'max_length': '(100)', 'validators': '[rfid_validator]'}), '(primary_key=True, max_length=100, validators=[rfid_validator])\n', (4379, 4442), False, 'from django.db import models\n'), ((4453, 4497), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(17)', 'unique': '(True)'}), '(max_length=17, unique=True)\n', (4469, 4497), False, 'from django.db import models\n'), ((6379, 6438), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)', 'choices': 'DAY_IN_WEEK_CHOICES'}), '(max_length=3, choices=DAY_IN_WEEK_CHOICES)\n', (6395, 6438), False, 'from django.db import models\n'), ((6450, 6468), 'django.db.models.TimeField', 'models.TimeField', ([], {}), '()\n', (6466, 6468), False, 'from django.db import models\n'), ((6857, 6924), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Thermostat'], {'related_name': '"""heating_table_entries"""'}), "(Thermostat, related_name='heating_table_entries')\n", (6874, 6924), False, 'from django.db import models\n'), ((7348, 7371), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {}), '(User)\n', (7365, 7371), False, 'from django.db import models\n'), ((6772, 6803), 'django.core.validators.MinValueValidator', 'validators.MinValueValidator', (['(5)'], {}), '(5)\n', (6800, 6803), False, 'from django.core import validators\n'), ((6805, 6837), 'django.core.validators.MaxValueValidator', 'validators.MaxValueValidator', (['(30)'], {}), '(30)\n', (6833, 6837), False, 'from django.core import validators\n')] |
"""
Adapted Code from https://github.com/AtsushiSakai/PythonRobotics
"""
from functools import partial
from estimation import ExtendedKalmanFilter, KalmanFilter
import jax.numpy as jnp
import numpy as np
from jax import jacfwd, jit
import matplotlib.pyplot as plt
from src.environments import DiffDriveRobot
from util import plot, History
def _motion_model(x, u, w, dt=0.01):
return jnp.array([
x[0] + x[3] * jnp.cos(x[2]) * dt,
x[1] + x[3] * jnp.sin(x[2]) * dt,
x[2] + u[1] * dt,
u[0]
]) + w
def _observation_model(x, v):
H = jnp.array([[1, 0, 0, 0],
[0, 1, 0, 0]])
return H @ x + v
def controller(x):
v = 1.0 # [m/s]
yawrate = 0.1 # [rad/s]
u = jnp.array([v, yawrate])
return u
def main():
env = DiffDriveRobot()
z = env.reset()
x_hat = jnp.zeros(4) # [x, y, yaw, velocity]
x_cov = jnp.eye(4)
Q = jnp.diag(jnp.array([
0.1, # variance of location on x-axis
0.1, # variance of location on y-axis
jnp.deg2rad(0.5), # variance of yaw angle
0.1 # variance of velocity
])) ** 2 # predict state covariance
R = jnp.diag(jnp.array([2, 2])) ** 2 # Observation x,y position covariance
filter = ExtendedKalmanFilter(
process_model=_motion_model,
observation_model=_observation_model,
process_noise_covariance=Q,
observation_noise_covariance=R
)
filter = jit(filter)
history = History()
history.update(x=x_hat, z=z, x_hat=x_hat, covariance=x_cov)
for t in range(5000):
print(t)
u = controller(x_hat) # [velocity, yaw_rate]
obs, _, _, info = env.step(u)
x_hat, x_cov = filter(x=x_hat, P=x_cov, u=u, z=obs)
history.update(x=info['x'], z=obs, x_hat=x_hat, covariance=x_cov)
if t % 100 == 0:
plot(data=history)
if __name__ == '__main__':
main() | [
"estimation.ExtendedKalmanFilter",
"jax.numpy.array",
"jax.numpy.deg2rad",
"src.environments.DiffDriveRobot",
"jax.jit",
"jax.numpy.eye",
"util.History",
"util.plot",
"jax.numpy.cos",
"jax.numpy.zeros",
"jax.numpy.sin"
] | [((577, 616), 'jax.numpy.array', 'jnp.array', (['[[1, 0, 0, 0], [0, 1, 0, 0]]'], {}), '([[1, 0, 0, 0], [0, 1, 0, 0]])\n', (586, 616), True, 'import jax.numpy as jnp\n'), ((735, 758), 'jax.numpy.array', 'jnp.array', (['[v, yawrate]'], {}), '([v, yawrate])\n', (744, 758), True, 'import jax.numpy as jnp\n'), ((795, 811), 'src.environments.DiffDriveRobot', 'DiffDriveRobot', ([], {}), '()\n', (809, 811), False, 'from src.environments import DiffDriveRobot\n'), ((844, 856), 'jax.numpy.zeros', 'jnp.zeros', (['(4)'], {}), '(4)\n', (853, 856), True, 'import jax.numpy as jnp\n'), ((893, 903), 'jax.numpy.eye', 'jnp.eye', (['(4)'], {}), '(4)\n', (900, 903), True, 'import jax.numpy as jnp\n'), ((1250, 1406), 'estimation.ExtendedKalmanFilter', 'ExtendedKalmanFilter', ([], {'process_model': '_motion_model', 'observation_model': '_observation_model', 'process_noise_covariance': 'Q', 'observation_noise_covariance': 'R'}), '(process_model=_motion_model, observation_model=\n _observation_model, process_noise_covariance=Q,\n observation_noise_covariance=R)\n', (1270, 1406), False, 'from estimation import ExtendedKalmanFilter, KalmanFilter\n'), ((1449, 1460), 'jax.jit', 'jit', (['filter'], {}), '(filter)\n', (1452, 1460), False, 'from jax import jacfwd, jit\n'), ((1476, 1485), 'util.History', 'History', ([], {}), '()\n', (1483, 1485), False, 'from util import plot, History\n'), ((1173, 1190), 'jax.numpy.array', 'jnp.array', (['[2, 2]'], {}), '([2, 2])\n', (1182, 1190), True, 'import jax.numpy as jnp\n'), ((1857, 1875), 'util.plot', 'plot', ([], {'data': 'history'}), '(data=history)\n', (1861, 1875), False, 'from util import plot, History\n'), ((1036, 1052), 'jax.numpy.deg2rad', 'jnp.deg2rad', (['(0.5)'], {}), '(0.5)\n', (1047, 1052), True, 'import jax.numpy as jnp\n'), ((426, 439), 'jax.numpy.cos', 'jnp.cos', (['x[2]'], {}), '(x[2])\n', (433, 439), True, 'import jax.numpy as jnp\n'), ((468, 481), 'jax.numpy.sin', 'jnp.sin', (['x[2]'], {}), '(x[2])\n', (475, 481), True, 'import jax.numpy as jnp\n')] |
"""
==================================================================
Compare LogisticRegression solver with sklearn's liblinear backend
==================================================================
"""
import time
import warnings
import numpy as np
from numpy.linalg import norm
import matplotlib.pyplot as plt
from sklearn import linear_model
from libsvmdata import fetch_libsvm
from celer import LogisticRegression
warnings.filterwarnings("ignore", message="Objective did not converge")
warnings.filterwarnings("ignore", message="Liblinear failed to converge")
X, y = fetch_libsvm("news20.binary")
C_min = 2 / norm(X.T @ y, ord=np.inf)
C = 20 * C_min
def pobj_logreg(w):
return np.sum(np.log(1 + np.exp(-y * (X @ w)))) + 1. / C * norm(w, ord=1)
pobj_celer = []
t_celer = []
for n_iter in range(10):
t0 = time.time()
clf = LogisticRegression(
C=C, solver="celer-pn", max_iter=n_iter, tol=0).fit(X, y)
t_celer.append(time.time() - t0)
w_celer = clf.coef_.ravel()
pobj_celer.append(pobj_logreg(w_celer))
pobj_celer = np.array(pobj_celer)
pobj_libl = []
t_libl = []
for n_iter in np.arange(0, 50, 10):
t0 = time.time()
clf = linear_model.LogisticRegression(
C=C, solver="liblinear", penalty='l1', fit_intercept=False,
max_iter=n_iter, random_state=0, tol=1e-10).fit(X, y)
t_libl.append(time.time() - t0)
w_libl = clf.coef_.ravel()
pobj_libl.append(pobj_logreg(w_libl))
pobj_libl = np.array(pobj_libl)
p_star = min(pobj_celer.min(), pobj_libl.min())
plt.close("all")
fig = plt.figure(figsize=(4, 2), constrained_layout=True)
plt.semilogy(t_celer, pobj_celer - p_star, label="Celer-PN")
plt.semilogy(t_libl, pobj_libl - p_star, label="liblinear")
plt.legend()
plt.xlabel("Time (s)")
plt.ylabel("objective suboptimality")
plt.show(block=False)
| [
"celer.LogisticRegression",
"matplotlib.pyplot.show",
"libsvmdata.fetch_libsvm",
"warnings.filterwarnings",
"matplotlib.pyplot.close",
"matplotlib.pyplot.legend",
"time.time",
"matplotlib.pyplot.figure",
"sklearn.linear_model.LogisticRegression",
"numpy.arange",
"numpy.array",
"numpy.linalg.norm",
"numpy.exp",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.semilogy",
"matplotlib.pyplot.xlabel"
] | [((427, 498), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'message': '"""Objective did not converge"""'}), "('ignore', message='Objective did not converge')\n", (450, 498), False, 'import warnings\n'), ((499, 572), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'message': '"""Liblinear failed to converge"""'}), "('ignore', message='Liblinear failed to converge')\n", (522, 572), False, 'import warnings\n'), ((581, 610), 'libsvmdata.fetch_libsvm', 'fetch_libsvm', (['"""news20.binary"""'], {}), "('news20.binary')\n", (593, 610), False, 'from libsvmdata import fetch_libsvm\n'), ((1066, 1086), 'numpy.array', 'np.array', (['pobj_celer'], {}), '(pobj_celer)\n', (1074, 1086), True, 'import numpy as np\n'), ((1131, 1151), 'numpy.arange', 'np.arange', (['(0)', '(50)', '(10)'], {}), '(0, 50, 10)\n', (1140, 1151), True, 'import numpy as np\n'), ((1469, 1488), 'numpy.array', 'np.array', (['pobj_libl'], {}), '(pobj_libl)\n', (1477, 1488), True, 'import numpy as np\n'), ((1539, 1555), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (1548, 1555), True, 'import matplotlib.pyplot as plt\n'), ((1562, 1613), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(4, 2)', 'constrained_layout': '(True)'}), '(figsize=(4, 2), constrained_layout=True)\n', (1572, 1613), True, 'import matplotlib.pyplot as plt\n'), ((1614, 1674), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (['t_celer', '(pobj_celer - p_star)'], {'label': '"""Celer-PN"""'}), "(t_celer, pobj_celer - p_star, label='Celer-PN')\n", (1626, 1674), True, 'import matplotlib.pyplot as plt\n'), ((1675, 1734), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (['t_libl', '(pobj_libl - p_star)'], {'label': '"""liblinear"""'}), "(t_libl, pobj_libl - p_star, label='liblinear')\n", (1687, 1734), True, 'import matplotlib.pyplot as plt\n'), ((1735, 1747), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1745, 1747), True, 'import matplotlib.pyplot as plt\n'), ((1748, 1770), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (1758, 1770), True, 'import matplotlib.pyplot as plt\n'), ((1771, 1808), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""objective suboptimality"""'], {}), "('objective suboptimality')\n", (1781, 1808), True, 'import matplotlib.pyplot as plt\n'), ((1809, 1830), 'matplotlib.pyplot.show', 'plt.show', ([], {'block': '(False)'}), '(block=False)\n', (1817, 1830), True, 'import matplotlib.pyplot as plt\n'), ((624, 649), 'numpy.linalg.norm', 'norm', (['(X.T @ y)'], {'ord': 'np.inf'}), '(X.T @ y, ord=np.inf)\n', (628, 649), False, 'from numpy.linalg import norm\n'), ((831, 842), 'time.time', 'time.time', ([], {}), '()\n', (840, 842), False, 'import time\n'), ((1162, 1173), 'time.time', 'time.time', ([], {}), '()\n', (1171, 1173), False, 'import time\n'), ((750, 764), 'numpy.linalg.norm', 'norm', (['w'], {'ord': '(1)'}), '(w, ord=1)\n', (754, 764), False, 'from numpy.linalg import norm\n'), ((853, 919), 'celer.LogisticRegression', 'LogisticRegression', ([], {'C': 'C', 'solver': '"""celer-pn"""', 'max_iter': 'n_iter', 'tol': '(0)'}), "(C=C, solver='celer-pn', max_iter=n_iter, tol=0)\n", (871, 919), False, 'from celer import LogisticRegression\n'), ((958, 969), 'time.time', 'time.time', ([], {}), '()\n', (967, 969), False, 'import time\n'), ((1184, 1323), 'sklearn.linear_model.LogisticRegression', 'linear_model.LogisticRegression', ([], {'C': 'C', 'solver': '"""liblinear"""', 'penalty': '"""l1"""', 'fit_intercept': '(False)', 'max_iter': 'n_iter', 'random_state': '(0)', 'tol': '(1e-10)'}), "(C=C, solver='liblinear', penalty='l1',\n fit_intercept=False, max_iter=n_iter, random_state=0, tol=1e-10)\n", (1215, 1323), False, 'from sklearn import linear_model\n'), ((1365, 1376), 'time.time', 'time.time', ([], {}), '()\n', (1374, 1376), False, 'import time\n'), ((716, 736), 'numpy.exp', 'np.exp', (['(-y * (X @ w))'], {}), '(-y * (X @ w))\n', (722, 736), True, 'import numpy as np\n')] |
#! /usr/bin/env python
import testbase
import unittest
import samweb_client
import samweb_cli
import time,os
defname = 'test-project'
class TestDefinition(testbase.SamdevTest):
def test_descDefinition_DefNotFound(self):
fake_def_name = 'doesnotexist_%d' % time.time()
self.assertRaises(samweb_client.exceptions.DefinitionNotFound, self.samweb.descDefinition, fake_def_name)
self.assertRaises(samweb_client.exceptions.DefinitionNotFound, self.samweb.descDefinitionDict, fake_def_name)
def test_descDefinition(self):
output = self.samweb.descDefinition(defname)
assert defname in output
d = self.samweb.descDefinitionDict(defname)
assert d['defname'] == defname
def test_snapshot(self):
output = self.samweb.takeSnapshot(defname)
self.assertEquals(int(output),1)
def test_create_rename_delete_definition(self):
defname = 'samweb_client_test_def_%s_%d' % (os.getpid(), int(time.time()))
self.samweb.createDefinition(defname, "file_name dummy", "illingwo", "samdev")
d = self.samweb.descDefinition(defname)
assert defname in d
d = self.samweb.descDefinitionDict(defname)
assert defname == d["defname"]
defname2 = defname + '_2'
self.samweb.modifyDefinition(defname,defname=defname2)
d = self.samweb.descDefinitionDict(defname2)
assert defname2 == d["defname"]
self.samweb.deleteDefinition(defname2)
class TestDefinitionCommands(testbase.SAMWebCmdTest):
def test_takeSnapshot(self):
cmdline = '-e samdev take-snapshot %s' % defname
self.check_cmd_return(cmdline.split())
assert "1\n" == self.stdout
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"os.getpid",
"time.time"
] | [((1746, 1761), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1759, 1761), False, 'import unittest\n'), ((271, 282), 'time.time', 'time.time', ([], {}), '()\n', (280, 282), False, 'import time, os\n'), ((956, 967), 'os.getpid', 'os.getpid', ([], {}), '()\n', (965, 967), False, 'import time, os\n'), ((973, 984), 'time.time', 'time.time', ([], {}), '()\n', (982, 984), False, 'import time, os\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
"""Minibatching utilities."""
import itertools
import operator
import os
import pickle
import numpy as np
import torch
from sklearn.utils import shuffle
from torch.autograd import Variable
# Change to python3+.
# from itertools import zip
class DataIterator(object):
"""Data Iterator."""
@staticmethod
def _trim_vocab(vocab, vocab_size):
"""Discard start, end, pad and unk tokens if already present.
Args:
vocab(list): Vocabulary.
vocab_size(int): The size of the vocabulary.
Returns:
word2id(list): Word to index list.
id2word(list): Index to word list.
"""
if "<s>" in vocab:
del vocab["<s>"]
if "<pad>" in vocab:
del vocab["<pad>"]
if "</s>" in vocab:
del vocab["</s>"]
if "<unk>" in vocab:
del vocab["<unk>"]
word2id = {"<s>": 0, "<pad>": 1, "</s>": 2, "<unk>": 3}
id2word = {0: "<s>", 1: "<pad>", 2: "</s>", 3: "<unk>"}
sorted_word2id = sorted(
vocab.items(), key=operator.itemgetter(1), reverse=True
)
if vocab_size != -1:
sorted_words = [x[0] for x in sorted_word2id[:vocab_size]]
else:
sorted_words = [x[0] for x in sorted_word2id]
for ind, word in enumerate(sorted_words):
word2id[word] = ind + 4
for ind, word in enumerate(sorted_words):
id2word[ind + 4] = word
return word2id, id2word
def construct_vocab(
self, sentences, vocab_size, lowercase=False, charlevel=False
):
"""Create vocabulary.
Args:
sentences(list): The list of sentences.
vocab_size(int): The size of vocabulary.
lowercase(bool): If lowercase the sentences.
charlevel(bool): If need to split the sentence with space.
Returns:
word2id(list): Word to index list.
id2word(list): Index to word list.
"""
vocab = {}
for sentence in sentences:
if isinstance(sentence, str):
if lowercase:
sentence = sentence.lower()
if not charlevel:
sentence = sentence.split()
for word in sentence:
if word not in vocab:
vocab[word] = 1
else:
vocab[word] += 1
word2id, id2word = self._trim_vocab(vocab, vocab_size)
return word2id, id2word
class BufferedDataIterator(DataIterator):
"""Multi Parallel corpus data iterator."""
def __init__(
self,
src,
trg,
src_vocab_size,
trg_vocab_size,
tasknames,
save_dir,
buffer_size=1e6,
lowercase=False,
seed=0,
):
"""Initialize params.
Args:
src(list): source dataset.
trg(list): target dataset.
src_vocab_size(int): The size of source vocab.
trg_vocab_size(int): The size of target vocab.
tasknames(list): The list of task names.
save_dir(str): The saving dir.
buffer_size(float): Buffer size.
lowercase(bool): if lowercase the data.
"""
self.seed = seed
self.fname_src = src
self.fname_trg = trg
self.src_vocab_size = src_vocab_size
self.trg_vocab_size = trg_vocab_size
self.tasknames = tasknames
self.save_dir = save_dir
self.buffer_size = buffer_size
self.lowercase = lowercase
# Open a list of file pointers to all the files.
self.f_src = [
open(fname, "r", encoding="utf-8") for fname in self.fname_src
]
self.f_trg = [
open(fname, "r", encoding="utf-8") for fname in self.fname_trg
]
# Initialize dictionaries that contain sentences & word mapping dicts
self.src = [
{"data": [], "word2id": None, "id2word": None}
for i in range(len(self.fname_src))
]
self.trg = [
{"data": [], "word2id": None, "id2word": None}
for i in range(len(self.fname_trg))
]
self.build_vocab()
"""Reset file pointers to the start after reading the file to
build vocabularies."""
for idx in range(len(self.src)):
self._reset_filepointer(idx)
for idx in range(len(self.src)):
self.fetch_buffer(idx)
def _reset_filepointer(self, idx):
"""Reset file pointer.
Args:
idx(int): Index used to reset file pointer.
"""
self.f_src[idx] = open(self.fname_src[idx], "r", encoding="utf-8")
self.f_trg[idx] = open(self.fname_trg[idx], "r", encoding="utf-8")
def fetch_buffer(self, idx, reset=True):
"""Fetch sentences from the file into the buffer.
Args:
idx(int): Index used to fetch the sentences.
reset(bool): If need to reset the contents of the current buffer.
"""
# Reset the contents of the current buffer.
if reset:
self.src[idx]["data"] = []
self.trg[idx]["data"] = []
# Populate buffer
for src, trg in zip(self.f_src[idx], self.f_trg[idx]):
if len(self.src[idx]["data"]) == self.buffer_size:
break
if self.lowercase:
self.src[idx]["data"].append(src.lower().split())
self.trg[idx]["data"].append(trg.lower().split())
else:
self.src[idx]["data"].append(src.split())
self.trg[idx]["data"].append(trg.split())
# Sort sentences by decreasing length (hacky bucketing)
self.src[idx]["data"], self.trg[idx]["data"] = zip(
*sorted(
zip(self.src[idx]["data"], self.trg[idx]["data"]),
key=lambda x: len(x[0]),
reverse=True,
)
)
"""If buffer isn't full after reading the contents of the file,
cycle around. """
if len(self.src[idx]["data"]) < self.buffer_size:
assert len(self.src[idx]["data"]) == len(self.trg[idx]["data"])
# Cast things to list to avoid issue with calling .append above
self.src[idx]["data"] = list(self.src[idx]["data"])
self.trg[idx]["data"] = list(self.trg[idx]["data"])
self._reset_filepointer(idx)
self.fetch_buffer(idx, reset=False)
def build_vocab(self):
"""Build a memory efficient vocab."""
# Construct common source vocab.
# Check if save directory exists.
if not os.path.exists(self.save_dir):
raise ValueError("Could not find save dir : %s" % self.save_dir)
# Check if a cached vocab file exists.
if os.path.exists(os.path.join(self.save_dir, "src_vocab.pkl")):
vocab = pickle.load(
open(os.path.join(self.save_dir, "src_vocab.pkl"), "rb")
)
word2id, id2word = vocab["word2id"], vocab["id2word"]
# If not, compute the vocab from scratch and store a cache.
else:
word2id, id2word = self.construct_vocab(
itertools.chain.from_iterable(self.f_src),
self.src_vocab_size,
self.lowercase,
)
pickle.dump(
{"word2id": word2id, "id2word": id2word},
open(os.path.join(self.save_dir, "src_vocab.pkl"), "wb"),
)
for corpus in self.src:
corpus["word2id"], corpus["id2word"] = word2id, id2word
# Do the same for the target vocabulary.
if os.path.exists(os.path.join(self.save_dir, "trg_vocab.pkl")):
vocab = pickle.load(
open(os.path.join(self.save_dir, "trg_vocab.pkl"), "rb")
)
for idx, (corpus, fname) in enumerate(zip(self.trg, self.f_trg)):
word2id, id2word = (
vocab[self.tasknames[idx]]["word2id"],
vocab[self.tasknames[idx]]["id2word"],
)
corpus["word2id"], corpus["id2word"] = word2id, id2word
else:
trg_vocab_dump = {}
for idx, (corpus, fname) in enumerate(zip(self.trg, self.f_trg)):
word2id, id2word = self.construct_vocab(
fname, self.trg_vocab_size, self.lowercase
)
corpus["word2id"], corpus["id2word"] = word2id, id2word
trg_vocab_dump[self.tasknames[idx]] = {}
trg_vocab_dump[self.tasknames[idx]]["word2id"] = word2id
trg_vocab_dump[self.tasknames[idx]]["id2word"] = id2word
pickle.dump(
trg_vocab_dump,
open(os.path.join(self.save_dir, "trg_vocab.pkl"), "wb"),
)
def shuffle_dataset(self, idx):
"""Shuffle current buffer."""
self.src[idx]["data"], self.trg[idx]["data"] = shuffle(
self.src[idx]["data"],
self.trg[idx]["data"],
random_state=self.seed,
)
def get_parallel_minibatch(
self, corpus_idx, index, batch_size, max_len_src, max_len_trg
):
"""Prepare minibatch.
Args:
corpus_idx(int): Corpus Index.
index(int): Index.
batch_size(int): Batch Size.
max_len_src(int): Max length for resource.
max_len_trg(int): Max length ofr target.
Returns: minibatch of src-trg pairs(dict).
"""
src_lines = [
["<s>"] + line[: max_len_src - 2] + ["</s>"]
for line in self.src[corpus_idx]["data"][
index : index + batch_size
]
]
trg_lines = [
["<s>"] + line[: max_len_trg - 2] + ["</s>"]
for line in self.trg[corpus_idx]["data"][
index : index + batch_size
]
]
"""Sort sentences by decreasing length within a minibatch for
`torch.nn.utils.packed_padded_sequence`"""
src_lens = [len(line) for line in src_lines]
sorted_indices = np.argsort(src_lens)[::-1]
sorted_src_lines = [src_lines[idx] for idx in sorted_indices]
sorted_trg_lines = [trg_lines[idx] for idx in sorted_indices]
sorted_src_lens = [len(line) for line in sorted_src_lines]
sorted_trg_lens = [len(line) for line in sorted_trg_lines]
max_src_len = max(sorted_src_lens)
max_trg_len = max(sorted_trg_lens)
# Map words to indices
input_lines_src = [
[
self.src[corpus_idx]["word2id"][w]
if w in self.src[corpus_idx]["word2id"]
else self.src[corpus_idx]["word2id"]["<unk>"]
for w in line
]
+ [self.src[corpus_idx]["word2id"]["<pad>"]]
* (max_src_len - len(line))
for line in sorted_src_lines
]
input_lines_trg = [
[
self.trg[corpus_idx]["word2id"][w]
if w in self.trg[corpus_idx]["word2id"]
else self.trg[corpus_idx]["word2id"]["<unk>"]
for w in line[:-1]
]
+ [self.trg[corpus_idx]["word2id"]["<pad>"]]
* (max_trg_len - len(line))
for line in sorted_trg_lines
]
output_lines_trg = [
[
self.trg[corpus_idx]["word2id"][w]
if w in self.trg[corpus_idx]["word2id"]
else self.trg[corpus_idx]["word2id"]["<unk>"]
for w in line[1:]
]
+ [self.trg[corpus_idx]["word2id"]["<pad>"]]
* (max_trg_len - len(line))
for line in sorted_trg_lines
]
# Cast lists to torch tensors
input_lines_src = Variable(torch.LongTensor(input_lines_src)).cuda()
input_lines_trg = Variable(torch.LongTensor(input_lines_trg)).cuda()
output_lines_trg = Variable(torch.LongTensor(output_lines_trg)).cuda()
sorted_src_lens = (
Variable(torch.LongTensor(sorted_src_lens), volatile=True)
.squeeze()
.cuda()
)
# Return minibatch of src-trg pairs
return {
"input_src": input_lines_src,
"input_trg": input_lines_trg,
"output_trg": output_lines_trg,
"src_lens": sorted_src_lens,
"type": "seq2seq",
}
class NLIIterator(DataIterator):
"""Data iterator for tokenized NLI datasets."""
def __init__(
self, train, dev, test, vocab_size, lowercase=True, vocab=None, seed=0
):
"""Initialize params.
Each of train/dev/test is a tab-separate file of the form
premise \t hypothesis \t label.
Args:
train(torch.Tensor): Training dataset.
dev(torch.Tensor): Validation dataset.
test(torch.Tensor): Testing dataset.
vocab_size(int): The size of the vocabulary.
lowercase(bool): If lowercase the dataset.
vocab(Union[bytes,str): The list of the vocabulary.
"""
self.seed = seed
self.train = train
self.dev = dev
self.test = test
self.vocab_size = vocab_size
self.lowercase = lowercase
self.vocab = vocab
self.train_lines = [
line.strip().lower().split("\t")
for line in open(self.train, encoding="utf-8")
]
self.dev_lines = [
line.strip().lower().split("\t")
for line in open(self.dev, encoding="utf-8")
]
self.test_lines = [
line.strip().lower().split("\t")
for line in open(self.test, encoding="utf-8")
]
if self.vocab is not None:
# binary mode doesn't take an encoding argument
self.vocab = pickle.load(open(self.vocab, "rb"))
self.word2id = self.vocab["word2id"]
self.id2word = self.vocab["id2word"]
self.vocab_size = len(self.word2id)
else:
self.word2id, self.id2word = self.construct_vocab(
[x[0] for x in self.train_lines]
+ [x[1] for x in self.train_lines],
self.vocab_size,
lowercase=self.lowercase,
)
# Label text to class mapping.
self.text2label = {"entailment": 0, "neutral": 1, "contradiction": 2}
self.shuffle_dataset()
def shuffle_dataset(self):
"""Shuffle training data."""
self.train_lines = shuffle(self.train_lines, random_state=self.seed)
def get_parallel_minibatch(self, index, batch_size, sent_type="train"):
"""Prepare minibatch.
Args:
index(int): The index for line.
batch_size(int): Batch size.
sent_type(str): Type of dataset.
Returns:
dict for batch training.
"""
if sent_type == "train":
lines = self.train_lines
elif sent_type == "dev":
lines = self.dev_lines
else:
lines = self.test_lines
sent1 = [
["<s>"] + line[0].split() + ["</s>"]
for line in lines[index : index + batch_size]
]
sent2 = [
["<s>"] + line[1].split() + ["</s>"]
for line in lines[index : index + batch_size]
]
labels = [
self.text2label[line[2]]
for line in lines[index : index + batch_size]
]
sent1_lens = [len(line) for line in sent1]
sorted_sent1_indices = np.argsort(sent1_lens)[::-1]
sorted_sent1_lines = [sent1[idx] for idx in sorted_sent1_indices]
rev_sent1 = np.argsort(sorted_sent1_indices)
sent2_lens = [len(line) for line in sent2]
sorted_sent2_indices = np.argsort(sent2_lens)[::-1]
sorted_sent2_lines = [sent2[idx] for idx in sorted_sent2_indices]
rev_sent2 = np.argsort(sorted_sent2_indices)
sorted_sent1_lens = [len(line) for line in sorted_sent1_lines]
sorted_sent2_lens = [len(line) for line in sorted_sent2_lines]
max_sent1_len = max(sorted_sent1_lens)
max_sent2_len = max(sorted_sent2_lens)
sent1 = [
[
self.word2id[w] if w in self.word2id else self.word2id["<unk>"]
for w in line
]
+ [self.word2id["<pad>"]] * (max_sent1_len - len(line))
for line in sorted_sent1_lines
]
sent2 = [
[
self.word2id[w] if w in self.word2id else self.word2id["<unk>"]
for w in line
]
+ [self.word2id["<pad>"]] * (max_sent2_len - len(line))
for line in sorted_sent2_lines
]
sent1 = Variable(torch.LongTensor(sent1)).cuda()
sent2 = Variable(torch.LongTensor(sent2)).cuda()
labels = Variable(torch.LongTensor(labels)).cuda()
sent1_lens = (
Variable(torch.LongTensor(sorted_sent1_lens), requires_grad=False)
.squeeze()
.cuda()
)
sent2_lens = (
Variable(torch.LongTensor(sorted_sent2_lens), requires_grad=False)
.squeeze()
.cuda()
)
rev_sent1 = (
Variable(torch.LongTensor(rev_sent1), requires_grad=False)
.squeeze()
.cuda()
)
rev_sent2 = (
Variable(torch.LongTensor(rev_sent2), requires_grad=False)
.squeeze()
.cuda()
)
return {
"sent1": sent1,
"sent2": sent2,
"sent1_lens": sent1_lens,
"sent2_lens": sent2_lens,
"rev_sent1": rev_sent1,
"rev_sent2": rev_sent2,
"labels": labels,
"type": "nli",
}
def get_validation_minibatch(
src, trg, index, batch_size, src_word2id, trg_word2id
):
"""Prepare minibatch.
Args:
src(list): source data.
trg(list): target data.
index(int): index for the file.
batch_size(int): batch size.
src_word2id(list): Word to index for source.
trg_word2id(list): Word to index for target.
Returns:
Dict for seq2seq model.
"""
src_lines = [
["<s>"] + line + ["</s>"] for line in src[index : index + batch_size]
]
trg_lines = [
["<s>"] + line + ["</s>"] for line in trg[index : index + batch_size]
]
src_lens = [len(line) for line in src_lines]
sorted_indices = np.argsort(src_lens)[::-1]
sorted_src_lines = [src_lines[idx] for idx in sorted_indices]
sorted_trg_lines = [trg_lines[idx] for idx in sorted_indices]
sorted_src_lens = [len(line) for line in sorted_src_lines]
sorted_trg_lens = [len(line) for line in sorted_trg_lines]
max_src_len = max(sorted_src_lens)
max_trg_len = max(sorted_trg_lens)
input_lines_src = [
[src_word2id[w] if w in src else src_word2id["<unk>"] for w in line]
+ [src_word2id["<pad>"]] * (max_src_len - len(line))
for line in sorted_src_lines
]
input_lines_trg = [
[
trg_word2id[w] if w in trg_word2id else trg_word2id["<unk>"]
for w in line[:-1]
]
+ [trg_word2id["<pad>"]] * (max_trg_len - len(line))
for line in sorted_trg_lines
]
output_lines_trg = [
[
trg_word2id[w] if w in trg_word2id else trg_word2id["<unk>"]
for w in line[1:]
]
+ [trg_word2id["<pad>"]] * (max_trg_len - len(line))
for line in sorted_trg_lines
]
# For pytroch 0.4
with torch.no_grad():
input_lines_src = Variable(torch.LongTensor(input_lines_src)).cuda()
input_lines_trg = Variable(torch.LongTensor(input_lines_trg)).cuda()
output_lines_trg = Variable(torch.LongTensor(output_lines_trg)).cuda()
# sorted_src_lens = Variable(
# torch.LongTensor(sorted_src_lens)
# ).squeeze().cuda()
sorted_src_lens = (
Variable(torch.LongTensor(sorted_src_lens))
.view(len(sorted_src_lens))
.cuda()
)
return {
"input_src": input_lines_src,
"input_trg": input_lines_trg,
"output_trg": output_lines_trg,
"src_lens": sorted_src_lens,
"type": "seq2seq",
}
def compute_validation_loss(
config, model, train_iterator, criterion, task_idx, lowercase=False
):
"""Compute validation loss for a task.
Args:
config(dict): configuration list.
model(MultitaskModel): model.
train_iterator(BufferedDataIterator): Multi Parallel corpus data iterator.
criterion(nn.CrossEntropyLoss): criterion function for loss.
task_idx(int): Task index.
lowercase(bool): If lowercase the data.
Returns: float as the mean of the loss.
"""
val_src = config["data"]["paths"][task_idx]["val_src"]
val_trg = config["data"]["paths"][task_idx]["val_trg"]
if lowercase:
val_src = [
line.strip().lower().split()
for line in open(val_src, "r", encoding="utf-8")
]
val_trg = [
line.strip().lower().split()
for line in open(val_trg, "r", encoding="utf-8")
]
else:
val_src = [
line.strip().split()
for line in open(val_src, "r", encoding="utf-8")
]
val_trg = [
line.strip().split()
for line in open(val_trg, "r", encoding="utf-8")
]
batch_size = config["training"]["batch_size"]
losses = []
for j in range(0, len(val_src), batch_size):
minibatch = get_validation_minibatch(
val_src,
val_trg,
j,
batch_size,
train_iterator.src[task_idx]["word2id"],
train_iterator.trg[task_idx]["word2id"],
)
decoder_logit = model(minibatch, task_idx)
loss = criterion(
decoder_logit.contiguous().view(-1, decoder_logit.size(2)),
minibatch["output_trg"].contiguous().view(-1),
)
# losses.append(loss.data[0])
losses.append(loss.item())
return np.mean(losses)
# Original source: https://github.com/Maluuba/gensen
| [
"itertools.chain.from_iterable",
"torch.LongTensor",
"os.path.exists",
"numpy.argsort",
"numpy.mean",
"sklearn.utils.shuffle",
"torch.no_grad",
"os.path.join",
"operator.itemgetter"
] | [((22510, 22525), 'numpy.mean', 'np.mean', (['losses'], {}), '(losses)\n', (22517, 22525), True, 'import numpy as np\n'), ((9204, 9281), 'sklearn.utils.shuffle', 'shuffle', (["self.src[idx]['data']", "self.trg[idx]['data']"], {'random_state': 'self.seed'}), "(self.src[idx]['data'], self.trg[idx]['data'], random_state=self.seed)\n", (9211, 9281), False, 'from sklearn.utils import shuffle\n'), ((14833, 14882), 'sklearn.utils.shuffle', 'shuffle', (['self.train_lines'], {'random_state': 'self.seed'}), '(self.train_lines, random_state=self.seed)\n', (14840, 14882), False, 'from sklearn.utils import shuffle\n'), ((15993, 16025), 'numpy.argsort', 'np.argsort', (['sorted_sent1_indices'], {}), '(sorted_sent1_indices)\n', (16003, 16025), True, 'import numpy as np\n'), ((16232, 16264), 'numpy.argsort', 'np.argsort', (['sorted_sent2_indices'], {}), '(sorted_sent2_indices)\n', (16242, 16264), True, 'import numpy as np\n'), ((18831, 18851), 'numpy.argsort', 'np.argsort', (['src_lens'], {}), '(src_lens)\n', (18841, 18851), True, 'import numpy as np\n'), ((19940, 19955), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (19953, 19955), False, 'import torch\n'), ((6862, 6891), 'os.path.exists', 'os.path.exists', (['self.save_dir'], {}), '(self.save_dir)\n', (6876, 6891), False, 'import os\n'), ((7044, 7088), 'os.path.join', 'os.path.join', (['self.save_dir', '"""src_vocab.pkl"""'], {}), "(self.save_dir, 'src_vocab.pkl')\n", (7056, 7088), False, 'import os\n'), ((7901, 7945), 'os.path.join', 'os.path.join', (['self.save_dir', '"""trg_vocab.pkl"""'], {}), "(self.save_dir, 'trg_vocab.pkl')\n", (7913, 7945), False, 'import os\n'), ((10373, 10393), 'numpy.argsort', 'np.argsort', (['src_lens'], {}), '(src_lens)\n', (10383, 10393), True, 'import numpy as np\n'), ((15870, 15892), 'numpy.argsort', 'np.argsort', (['sent1_lens'], {}), '(sent1_lens)\n', (15880, 15892), True, 'import numpy as np\n'), ((16109, 16131), 'numpy.argsort', 'np.argsort', (['sent2_lens'], {}), '(sent2_lens)\n', (16119, 16131), True, 'import numpy as np\n'), ((1228, 1250), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (1247, 1250), False, 'import operator\n'), ((7428, 7469), 'itertools.chain.from_iterable', 'itertools.chain.from_iterable', (['self.f_src'], {}), '(self.f_src)\n', (7457, 7469), False, 'import itertools\n'), ((7145, 7189), 'os.path.join', 'os.path.join', (['self.save_dir', '"""src_vocab.pkl"""'], {}), "(self.save_dir, 'src_vocab.pkl')\n", (7157, 7189), False, 'import os\n'), ((7658, 7702), 'os.path.join', 'os.path.join', (['self.save_dir', '"""src_vocab.pkl"""'], {}), "(self.save_dir, 'src_vocab.pkl')\n", (7670, 7702), False, 'import os\n'), ((8002, 8046), 'os.path.join', 'os.path.join', (['self.save_dir', '"""trg_vocab.pkl"""'], {}), "(self.save_dir, 'trg_vocab.pkl')\n", (8014, 8046), False, 'import os\n'), ((9007, 9051), 'os.path.join', 'os.path.join', (['self.save_dir', '"""trg_vocab.pkl"""'], {}), "(self.save_dir, 'trg_vocab.pkl')\n", (9019, 9051), False, 'import os\n'), ((12090, 12123), 'torch.LongTensor', 'torch.LongTensor', (['input_lines_src'], {}), '(input_lines_src)\n', (12106, 12123), False, 'import torch\n'), ((12167, 12200), 'torch.LongTensor', 'torch.LongTensor', (['input_lines_trg'], {}), '(input_lines_trg)\n', (12183, 12200), False, 'import torch\n'), ((12245, 12279), 'torch.LongTensor', 'torch.LongTensor', (['output_lines_trg'], {}), '(output_lines_trg)\n', (12261, 12279), False, 'import torch\n'), ((17085, 17108), 'torch.LongTensor', 'torch.LongTensor', (['sent1'], {}), '(sent1)\n', (17101, 17108), False, 'import torch\n'), ((17142, 17165), 'torch.LongTensor', 'torch.LongTensor', (['sent2'], {}), '(sent2)\n', (17158, 17165), False, 'import torch\n'), ((17200, 17224), 'torch.LongTensor', 'torch.LongTensor', (['labels'], {}), '(labels)\n', (17216, 17224), False, 'import torch\n'), ((19992, 20025), 'torch.LongTensor', 'torch.LongTensor', (['input_lines_src'], {}), '(input_lines_src)\n', (20008, 20025), False, 'import torch\n'), ((20069, 20102), 'torch.LongTensor', 'torch.LongTensor', (['input_lines_trg'], {}), '(input_lines_trg)\n', (20085, 20102), False, 'import torch\n'), ((20147, 20181), 'torch.LongTensor', 'torch.LongTensor', (['output_lines_trg'], {}), '(output_lines_trg)\n', (20163, 20181), False, 'import torch\n'), ((12337, 12370), 'torch.LongTensor', 'torch.LongTensor', (['sorted_src_lens'], {}), '(sorted_src_lens)\n', (12353, 12370), False, 'import torch\n'), ((17277, 17312), 'torch.LongTensor', 'torch.LongTensor', (['sorted_sent1_lens'], {}), '(sorted_sent1_lens)\n', (17293, 17312), False, 'import torch\n'), ((17432, 17467), 'torch.LongTensor', 'torch.LongTensor', (['sorted_sent2_lens'], {}), '(sorted_sent2_lens)\n', (17448, 17467), False, 'import torch\n'), ((17586, 17613), 'torch.LongTensor', 'torch.LongTensor', (['rev_sent1'], {}), '(rev_sent1)\n', (17602, 17613), False, 'import torch\n'), ((17732, 17759), 'torch.LongTensor', 'torch.LongTensor', (['rev_sent2'], {}), '(rev_sent2)\n', (17748, 17759), False, 'import torch\n'), ((20354, 20387), 'torch.LongTensor', 'torch.LongTensor', (['sorted_src_lens'], {}), '(sorted_src_lens)\n', (20370, 20387), False, 'import torch\n')] |
# Copyright 2018 AT&T Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.common import utils
from tempest.lib import decorators
from patrole_tempest_plugin import rbac_rule_validation
from patrole_tempest_plugin.tests.api.network import rbac_base as base
class AvailabilityZoneExtRbacTest(base.BaseNetworkExtRbacTest):
@classmethod
def skip_checks(cls):
super(AvailabilityZoneExtRbacTest, cls).skip_checks()
if not utils.is_extension_enabled('availability_zone',
'network'):
msg = "network_availability_zone extension not enabled."
raise cls.skipException(msg)
@rbac_rule_validation.action(service="neutron",
rules=["get_availability_zone"])
@decorators.idempotent_id('3c521be8-c32e-11e8-a611-080027758b73')
def test_list_availability_zone_rbac(self):
"""List all available zones.
RBAC test for the neutron ``list_availability_zones``
function and the ``get_availability_zone`` policy
"""
admin_resources = (self.ntp_client.list_availability_zones()
["availability_zones"])
with self.override_role_and_validate_list(
admin_resources=admin_resources) as ctx:
ctx.resources = (self.ntp_client.list_availability_zones()
['availability_zones'])
| [
"tempest.lib.decorators.idempotent_id",
"patrole_tempest_plugin.rbac_rule_validation.action",
"tempest.common.utils.is_extension_enabled"
] | [((1234, 1313), 'patrole_tempest_plugin.rbac_rule_validation.action', 'rbac_rule_validation.action', ([], {'service': '"""neutron"""', 'rules': "['get_availability_zone']"}), "(service='neutron', rules=['get_availability_zone'])\n", (1261, 1313), False, 'from patrole_tempest_plugin import rbac_rule_validation\n'), ((1352, 1416), 'tempest.lib.decorators.idempotent_id', 'decorators.idempotent_id', (['"""3c521be8-c32e-11e8-a611-080027758b73"""'], {}), "('3c521be8-c32e-11e8-a611-080027758b73')\n", (1376, 1416), False, 'from tempest.lib import decorators\n'), ((1016, 1074), 'tempest.common.utils.is_extension_enabled', 'utils.is_extension_enabled', (['"""availability_zone"""', '"""network"""'], {}), "('availability_zone', 'network')\n", (1042, 1074), False, 'from tempest.common import utils\n')] |
# Solution of;
# Project Euler Problem 113: Non-bouncy numbers
# https://projecteuler.net/problem=113
#
# Working from left-to-right if no digit is exceeded by the digit to its left
# it is called an increasing number; for example, 134468. Similarly if no
# digit is exceeded by the digit to its right it is called a decreasing
# number; for example, 66420. We shall call a positive integer that is neither
# increasing nor decreasing a "bouncy" number; for example, 155349. As n
# increases, the proportion of bouncy numbers below n increases such that
# there are only 12951 numbers below one-million that are not bouncy and only
# 277032 non-bouncy numbers below 1010. How many numbers below a googol
# (10100) are not bouncy?
#
# by lcsm29 http://github.com/lcsm29/project-euler
import timed
def dummy(n):
pass
if __name__ == '__main__':
n = 1000
i = 10000
prob_id = 113
timed.caller(dummy, n, i, prob_id)
| [
"timed.caller"
] | [((909, 943), 'timed.caller', 'timed.caller', (['dummy', 'n', 'i', 'prob_id'], {}), '(dummy, n, i, prob_id)\n', (921, 943), False, 'import timed\n')] |
import bot
if __name__ == '__main__':
zonbot = bot.Bot('!', pm_help = True)
zonbot.run(zonbot.token)
| [
"bot.Bot"
] | [((49, 75), 'bot.Bot', 'bot.Bot', (['"""!"""'], {'pm_help': '(True)'}), "('!', pm_help=True)\n", (56, 75), False, 'import bot\n')] |
import argparse
import os.path
import oyaml as yaml
def parse_constraint(con_str):
# sample: (0,1):(1,1,1)
agents_str, coefficients_str = con_str.split(':')
x, y = agents_str.replace('(', '').replace(')', '').split(',')
a, b, c = coefficients_str.replace('(', '').replace(')', '').split(',')
c1 = f'{a} * var{x}^2 + {b} * var{x} * var{y} + {c} * var{y}^2'
c2 = f'{c} * var{y}^2 + {b} * var{y} * var{x} + {a} * var{x}^2'
return c1, c2
def main(args):
lines_4_config = {}
with open(args.file, 'r') as f:
line = f.readline()
while line:
kv = line.split('=')
lines_4_config[kv[0]] = kv[1].strip()
line = f.readline()
yaml_dict = {
'name': args.name,
'objective': 'min',
}
# domains
domains = {}
domain_info = lines_4_config['domains'].split(' ')
agent_ids = []
for domain_str in domain_info:
agent_id, dvals = domain_str.split(':')
domains[f'd{agent_id}'] = {
'values': [int(v) for v in dvals.split(',')],
}
agent_ids.append(agent_id)
yaml_dict['domains'] = domains
# variables
variables = {}
for agent in agent_ids:
variables[f'var{agent}'] = {
'domain': f'd{agent}',
}
yaml_dict['variables'] = variables
# constraints
constraints = {}
for con in lines_4_config['cons'].split('>'):
eq1, eq2 = parse_constraint(con)
constraints[f'c{len(constraints)}'] = {
'type': 'intention',
'function': eq1,
}
constraints[f'c{len(constraints)}'] = {
'type': 'intention',
'function': eq2,
}
yaml_dict['constraints'] = constraints
# agents
agents = [f'a{agent_id}' for agent_id in agent_ids]
yaml_dict['agents'] = agents
# export to yaml
exported_file = args.file.split('/')[-1] + '.yaml'
yaml_file = os.path.join('./yaml-files', exported_file)
with open(yaml_file, 'w') as f:
yaml.dump(yaml_dict, f)
print(f'Simulation config file saved: {yaml_file}')
# create scenario file
events = [{
'id': 'w',
'delay': 1,
}]
scenarios = {'events': events}
for i, cmd in enumerate(lines_4_config['commands'].split(' ')):
cmd, agent = cmd.split(':')
if cmd == 'remove_agent': # only agent removal is supported by pydcop
events.append({
'id': f'e{i}',
'actions': {
'type': cmd,
'agent': f'a{agent}'
}
})
events.append({
'id': 'w',
'delay': 1,
})
exported_file = args.file.split('/')[-1] + '-scenario.yaml'
yaml_file = os.path.join('./yaml-files', exported_file)
with open(yaml_file, 'w') as f:
yaml.dump(scenarios, f)
print(f'Simulation scenario file saved: {yaml_file}')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert DynaGraph sim file to pyDCOP compatible yaml config')
parser.add_argument('-f', '--file', type=str, required=True, help='sim file path')
parser.add_argument('-n', '--name', type=str, required=True, help='DCOP name')
args = parser.parse_args()
main(args)
| [
"oyaml.dump",
"argparse.ArgumentParser"
] | [((3013, 3116), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Convert DynaGraph sim file to pyDCOP compatible yaml config"""'}), "(description=\n 'Convert DynaGraph sim file to pyDCOP compatible yaml config')\n", (3036, 3116), False, 'import argparse\n'), ((2032, 2055), 'oyaml.dump', 'yaml.dump', (['yaml_dict', 'f'], {}), '(yaml_dict, f)\n', (2041, 2055), True, 'import oyaml as yaml\n'), ((2885, 2908), 'oyaml.dump', 'yaml.dump', (['scenarios', 'f'], {}), '(scenarios, f)\n', (2894, 2908), True, 'import oyaml as yaml\n')] |
# -*- coding: utf-8 -*-
'''
This code calculates changes in the ratio between different population-weighted GDP deciles and quintiles
by <NAME> (<EMAIL>)
'''
import pandas as pd
import numpy as np
from netCDF4 import Dataset
import _env
datasets = _env.datasets
scenarios = _env.scenarios
gdp_year = 2010
sgdp_year = str(gdp_year)
idir_temp = _env.odir_root + '/sim_temperature/'
####summarize global and regional GDP changes####
gdp_year = 2010
sgdp_year = str(gdp_year)
boot_methods = ['country-lag0','country-lag1','country-lag5','year','year-blocks']
itbl_gdp_baseline = pd.read_csv(_env.odir_root + 'basic_stats' + '/Country_Basic_Stats.csv')
itbl_gdp_baseline.sort_values([sgdp_year + '_gdpcap'],inplace=True)
tot_pop = itbl_gdp_baseline[sgdp_year + '_pop'].sum()
#itbl_gdp_baseline['2010_pop_ratio'] = itbl_gdp_baseline['2010_pop']/tot_pop
itbl_gdp_baseline[sgdp_year + '_gdpsum'] = 0
#itbl_gdp_baseline['2010_popw_gdp'] = 0
itbl_gdp_baseline[sgdp_year + '_popsum'] = 0
#itbl_gdp_baseline['2010_pop_ratio_sum'] = 0
for irow, row in enumerate(itbl_gdp_baseline.index):
if irow == 0:
itbl_gdp_baseline.loc[row,sgdp_year + '_gdpsum'] = itbl_gdp_baseline.loc[row,sgdp_year + '_gdp']
itbl_gdp_baseline.loc[row, sgdp_year + '_popsum'] = itbl_gdp_baseline.loc[row,sgdp_year + '_pop']
else:
itbl_gdp_baseline.loc[row,sgdp_year + '_gdpsum'] = itbl_gdp_baseline[sgdp_year + '_gdpsum'].iloc[irow-1] + itbl_gdp_baseline.loc[row,sgdp_year + '_gdp']
itbl_gdp_baseline.loc[row, sgdp_year + '_popsum'] = itbl_gdp_baseline[sgdp_year + '_popsum'].iloc[irow-1] + itbl_gdp_baseline.loc[row,sgdp_year + '_pop']
itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum'] = itbl_gdp_baseline[sgdp_year + '_popsum']/tot_pop
#deciles (<=10% and >=90%)
deciles = {}
ind10 = np.where(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum']<=0.1)[0]
deciles[10] = itbl_gdp_baseline.iloc[ind10].copy()
ind90 = np.where(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum']>=0.9)[0]
deciles[90] = itbl_gdp_baseline.iloc[ind90].copy()
#quintiles (<=20% and >=80%)
ind20 = np.where(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum']<=0.2)[0]
deciles[20] = itbl_gdp_baseline.iloc[ind20].copy()
ind80 = np.where(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum']>=0.8)[0]
deciles[80] = itbl_gdp_baseline.iloc[ind80].copy()
for ds in datasets:
scens = ['No-Aerosol']
if ds == 'ERA-Interim':
scens = ['No-Aerosol','No-Sulfate']
idir_gdp = _env.odir_root + '/gdp_' + ds + '/'
odir_summary = _env.odir_root + '/summary_' + ds + '/'
_env.mkdirs(odir_summary)
for scen in scens:
writer = pd.ExcelWriter(odir_summary + 'Deciles_and_Quintile_ratio_changes_'+ds+'_'+scen+'_Burke.xls')
otbls_ctry_GDP_stat = {}
otbls = {}
otbl_ineq = pd.DataFrame(index = boot_methods,columns = ['median_ratio','5_ratio','95_ratio','10_ratio','90_ratio','probability_reduced'])
otbls['deciles'] = otbl_ineq.copy()
otbls['quintiles'] = otbl_ineq.copy()
for b_m in boot_methods:
inc_gdp = Dataset(idir_gdp + 'GDP_Changes_Burke_' + b_m + '_' + str(gdp_year) + '_'+ds+'_'+scen+'.nc')
imtrx_gdp = inc_gdp['GDP'][:]
dec_var = {}
dec_base = {}
for perc in [10,20,80,90]:
dec = deciles[perc].copy()
dec_pop_tot = dec[sgdp_year + '_pop'].sum()
dec_gdp_tot = dec[sgdp_year + '_gdp'].sum()
dec_base[perc] = dec_gdp_tot/dec_pop_tot
ind_ctry = dec.index
imtrx_dec = imtrx_gdp[:,ind_ctry,:]
imtrx_dec_sum = dec_gdp_tot-(imtrx_dec.data).sum(axis=1)
# print(perc, np.median(imtrx_dec_sum),dec_gdp_tot,np.median(imtrx_dec_sum)/dec_gdp_tot)
dec_gdpcap = imtrx_dec_sum/dec_pop_tot
dec_var[perc] = dec_gdpcap.copy()
dec_diff = (dec_var[90]/dec_var[10]-dec_base[90]/dec_base[10])/(dec_base[90]/dec_base[10])*100
quin_diff = (dec_var[80]/dec_var[20] - dec_base[80]/dec_base[20])/(dec_base[80]/dec_base[20])*100
otbls['deciles'].loc[b_m,'median_ratio'] = np.median(dec_diff)
otbls['deciles'].loc[b_m,'5_ratio'] = np.percentile(dec_diff,5)
otbls['deciles'].loc[b_m,'95_ratio'] = np.percentile(dec_diff,95)
otbls['deciles'].loc[b_m,'10_ratio'] = np.percentile(dec_diff,10)
otbls['deciles'].loc[b_m,'90_ratio'] = np.percentile(dec_diff,90)
otbls['deciles'].loc[b_m,'probability_reduced'] = len(dec_diff[dec_diff<0])/np.size(dec_diff)
otbls['quintiles'].loc[b_m,'median_ratio'] = np.median(quin_diff)
otbls['quintiles'].loc[b_m,'5_ratio'] = np.percentile(quin_diff,5)
otbls['quintiles'].loc[b_m,'95_ratio'] = np.percentile(quin_diff,95)
otbls['quintiles'].loc[b_m,'10_ratio'] = np.percentile(quin_diff,10)
otbls['quintiles'].loc[b_m,'90_ratio'] = np.percentile(quin_diff,90)
otbls['quintiles'].loc[b_m,'probability_reduced'] = len(quin_diff[quin_diff<0])/np.size(quin_diff)
otbls['deciles'].to_excel(writer,'deciles')
otbls['quintiles'].to_excel(writer,'quintiles')
writer.save()
| [
"pandas.DataFrame",
"numpy.size",
"_env.mkdirs",
"pandas.read_csv",
"numpy.median",
"numpy.percentile",
"numpy.where",
"pandas.ExcelWriter"
] | [((599, 671), 'pandas.read_csv', 'pd.read_csv', (["(_env.odir_root + 'basic_stats' + '/Country_Basic_Stats.csv')"], {}), "(_env.odir_root + 'basic_stats' + '/Country_Basic_Stats.csv')\n", (610, 671), True, 'import pandas as pd\n'), ((1834, 1898), 'numpy.where', 'np.where', (["(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum'] <= 0.1)"], {}), "(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum'] <= 0.1)\n", (1842, 1898), True, 'import numpy as np\n'), ((1962, 2026), 'numpy.where', 'np.where', (["(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum'] >= 0.9)"], {}), "(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum'] >= 0.9)\n", (1970, 2026), True, 'import numpy as np\n'), ((2120, 2184), 'numpy.where', 'np.where', (["(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum'] <= 0.2)"], {}), "(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum'] <= 0.2)\n", (2128, 2184), True, 'import numpy as np\n'), ((2246, 2310), 'numpy.where', 'np.where', (["(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum'] >= 0.8)"], {}), "(itbl_gdp_baseline[sgdp_year + '_pop_ratio_sum'] >= 0.8)\n", (2254, 2310), True, 'import numpy as np\n'), ((2618, 2643), '_env.mkdirs', '_env.mkdirs', (['odir_summary'], {}), '(odir_summary)\n', (2629, 2643), False, 'import _env\n'), ((2690, 2795), 'pandas.ExcelWriter', 'pd.ExcelWriter', (["(odir_summary + 'Deciles_and_Quintile_ratio_changes_' + ds + '_' + scen +\n '_Burke.xls')"], {}), "(odir_summary + 'Deciles_and_Quintile_ratio_changes_' + ds +\n '_' + scen + '_Burke.xls')\n", (2704, 2795), True, 'import pandas as pd\n'), ((2865, 2997), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'boot_methods', 'columns': "['median_ratio', '5_ratio', '95_ratio', '10_ratio', '90_ratio',\n 'probability_reduced']"}), "(index=boot_methods, columns=['median_ratio', '5_ratio',\n '95_ratio', '10_ratio', '90_ratio', 'probability_reduced'])\n", (2877, 2997), True, 'import pandas as pd\n'), ((4387, 4406), 'numpy.median', 'np.median', (['dec_diff'], {}), '(dec_diff)\n', (4396, 4406), True, 'import numpy as np\n'), ((4457, 4483), 'numpy.percentile', 'np.percentile', (['dec_diff', '(5)'], {}), '(dec_diff, 5)\n', (4470, 4483), True, 'import numpy as np\n'), ((4534, 4561), 'numpy.percentile', 'np.percentile', (['dec_diff', '(95)'], {}), '(dec_diff, 95)\n', (4547, 4561), True, 'import numpy as np\n'), ((4625, 4652), 'numpy.percentile', 'np.percentile', (['dec_diff', '(10)'], {}), '(dec_diff, 10)\n', (4638, 4652), True, 'import numpy as np\n'), ((4703, 4730), 'numpy.percentile', 'np.percentile', (['dec_diff', '(90)'], {}), '(dec_diff, 90)\n', (4716, 4730), True, 'import numpy as np\n'), ((4906, 4926), 'numpy.median', 'np.median', (['quin_diff'], {}), '(quin_diff)\n', (4915, 4926), True, 'import numpy as np\n'), ((4979, 5006), 'numpy.percentile', 'np.percentile', (['quin_diff', '(5)'], {}), '(quin_diff, 5)\n', (4992, 5006), True, 'import numpy as np\n'), ((5059, 5087), 'numpy.percentile', 'np.percentile', (['quin_diff', '(95)'], {}), '(quin_diff, 95)\n', (5072, 5087), True, 'import numpy as np\n'), ((5153, 5181), 'numpy.percentile', 'np.percentile', (['quin_diff', '(10)'], {}), '(quin_diff, 10)\n', (5166, 5181), True, 'import numpy as np\n'), ((5234, 5262), 'numpy.percentile', 'np.percentile', (['quin_diff', '(90)'], {}), '(quin_diff, 90)\n', (5247, 5262), True, 'import numpy as np\n'), ((4818, 4835), 'numpy.size', 'np.size', (['dec_diff'], {}), '(dec_diff)\n', (4825, 4835), True, 'import numpy as np\n'), ((5354, 5372), 'numpy.size', 'np.size', (['quin_diff'], {}), '(quin_diff)\n', (5361, 5372), True, 'import numpy as np\n')] |
__author__ = "<NAME>"
__copyright__ = "2021, Hamilton-Jacobi Analysis in Python"
__license__ = "Molux Licence"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Completed"
import argparse
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import os, sys
from os.path import abspath, dirname, exists, join
sys.path.append(dirname(dirname(abspath(__file__))))
from Grids import createGrid
from InitialConditions import *
from Visualization import *
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
"""
Test Implicit Functions
Lekan Molu, September 07, 2021
"""
parser = argparse.ArgumentParser(description='2D Plotter for Various Implicit Initial Conditions for the Value Function')
parser.add_argument('--delay', '-dl', type=float, default=3, help='pause time between successive updates of plots' )
args = parser.parse_args()
def levelset_viz(g, ax, fig, mesh, title='', savedict=None, fontdict=None, fc='c', ec='k'):
"""
Simultaneously visualize the level sets of a value function
on a 1X3 chart:
Chart 131: 2D Value function as a surface mesh
Chart 132: 2D Value function as colored contour levels
Chart 133: 2D Value zero - set as cyan contour.
Author: <NAME>, October 29, 2021
"""
ax[0].plot_surface(g.xs[0], g.xs[1], mesh, rstride=1, cstride=1,
cmap='viridis', edgecolor=ec, facecolor=fc)
ax[0].set_xlabel('X', fontdict=fontdict)
ax[0].set_ylabel('Y', fontdict=fontdict)
ax[0].set_zlabel('Z', fontdict=fontdict)
ax[0].set_title(f'{title}', fontdict=fontdict)
ax[1].contourf(g.xs[0], g.xs[1], mesh, colors=fc)
ax[1].set_xlabel('X', fontdict=fontdict)
ax[1].set_title(f'Contours', fontdict=fontdict)
ax[2].contour(g.xs[0], g.xs[1], mesh, levels=0, colors=fc)
ax[2].set_xlabel('X', fontdict=fontdict)
ax[2].set_ylabel('Y', fontdict=fontdict)
ax[2].grid('on')
ax[2].set_title(f'2D Zero level set', fontdict=fontdict)
fig.tight_layout()
if savedict["save"]:
plt.savefig(join(savedict["savepath"],savedict["savename"]),
bbox_inches='tight',facecolor='None')
fig.canvas.draw()
fig.canvas.flush_events()
def get_grid():
g2min = -2*np.ones((2, 1),dtype=np.float64)
g2max = +2*np.ones((2, 1),dtype=np.float64)
g2N = 51*np.ones((2, 1),dtype=np.int64)
g2 = createGrid(g2min, g2max, g2N, process=True)
return g2
def main(savedict):
# generate signed distance function for cylinder
center = np.array(([[-.5,.5]]), np.float64).T
g2 = get_grid()
# shapes generation
axis_align, radius=2, 1
cylinder = shapeCylinder(g2, axis_align, center, radius);
sphere = shapeSphere(g2, center, radius=1)
sphere2 = shapeSphere(g2, center=np.array(([-0., 0.])).T, radius=1)
rect = shapeRectangleByCorners(g2)
rect2 = shapeRectangleByCorners(g2, np.array([[ -1.0, -np.inf, ]]).T, np.array([[ np.inf, -1.0 ]]).T, )
rect3 = shapeRectangleByCorners(g2, np.array([[ -1.0, -0.5, ]]).T, np.array([[ .5, 1.0 ]]).T)
rect4 = shapeRectangleByCenter(g2, np.array([[ -1.0, -0.5, ]]).T, np.array([[ .5, 1.0 ]]).T)
# Set Ops
sphere_union = shapeUnion(sphere, sphere2)
rect_union = shapeUnion(rect, rect3)
rect_comp = shapeComplement(rect2)
sph_rect_diff = shapeDifference(sphere, rect)
fig = plt.figure(figsize=(16, 9))
gs = gridspec.GridSpec(1, 3, fig)
ax = [plt.subplot(gs[i], projection='3d') for i in range(2)] + [plt.subplot(gs[2])]
savedict["savename"] = "cylinder_2d.jpg"
levelset_viz(g2, ax, fig, cylinder, title='Cylinder', savedict=savedict, fontdict={'fontsize':12, 'fontweight':'bold'})
plt.pause(args.delay)
savedict["savename"] = "sphere_2d.jpg"
plt.clf()
ax = [plt.subplot(gs[i], projection='3d') for i in range(2)] + [plt.subplot(gs[2])]
levelset_viz(g2, ax, fig, sphere, title='Sphere', savedict=savedict, fontdict={'fontsize':12, 'fontweight':'bold'})
plt.pause(args.delay)
savedict["savename"]="sphere2_2d.jpg"
plt.clf()
ax = [plt.subplot(gs[i], projection='3d') for i in range(2)] + [plt.subplot(gs[2])]
levelset_viz(g2, ax, fig, sphere2, title='Sphere, C=(-.5, .5)', savedict=savedict, fontdict={'fontsize':12, 'fontweight':'bold'})
plt.pause(args.delay)
savedict["savename"]="rect_2d.jpg"
plt.clf()
ax = [plt.subplot(gs[i], projection='3d') for i in range(2)] + [plt.subplot(gs[2])]
levelset_viz(g2, ax, fig, rect, title='Unit Square@Origin', savedict=savedict, fontdict={'fontsize':12, 'fontweight':'bold'})
plt.pause(args.delay)
savedict["savename"]="rect2_2d.jpg"
plt.clf()
ax = [plt.subplot(gs[i], projection='3d') for i in range(2)] + [plt.subplot(gs[2])]
levelset_viz(g2, ax, fig, rect2, title='Rect by Corners', savedict=savedict, fontdict={'fontsize':12, 'fontweight':'bold'})
plt.pause(args.delay)
savedict["savename"]="rect3_2d.jpg"
plt.clf()
ax = [plt.subplot(gs[i], projection='3d') for i in range(2)] + [plt.subplot(gs[2])]
levelset_viz(g2, ax, fig, rect3, title='RectCorner: [1,-0.5], W: [0.5,1.0]', savedict=savedict, fontdict={'fontsize':12, 'fontweight':'bold'})
plt.pause(args.delay)
savedict["savename"]="rect4_2d.jpg"
plt.clf()
ax = [plt.subplot(gs[i], projection='3d') for i in range(2)] + [plt.subplot(gs[2])]
levelset_viz(g2, ax, fig, rect4, title='RectCent: [1,-0.5], W: [0.5,1.0]', savedict=savedict, fontdict={'fontsize':12, 'fontweight':'bold'})
plt.pause(args.delay)
# Show Unions
savedict["savename"]="sphere_union_2d.jpg"
plt.clf()
ax = [plt.subplot(gs[i], projection='3d') for i in range(2)] + [plt.subplot(gs[2])]
levelset_viz(g2, ax, fig, sphere_union, title='Spheres+Sphere', savedict=savedict, fontdict={'fontsize':12, 'fontweight':'bold'})
plt.pause(args.delay)
savedict["savename"]="rect_union_2d.jpg"
plt.clf()
ax = [plt.subplot(gs[i], projection='3d') for i in range(2)] + [plt.subplot(gs[2])]
levelset_viz(g2, ax, fig, rect_union, title='Union of 2 Rects', savedict=savedict, fontdict={'fontsize':12, 'fontweight':'bold'})
plt.pause(args.delay)
savedict["savename"]="rect_comp_2d.jpg"
plt.clf()
ax = [plt.subplot(gs[i], projection='3d') for i in range(2)] + [plt.subplot(gs[2])]
levelset_viz(g2, ax, fig, rect_comp, title='Rect Complement', savedict=savedict, fontdict={'fontsize':12, 'fontweight':'bold'})
plt.pause(args.delay)
savedict["savename"]="sph_rect_diff_2d.jpg"
plt.clf()
ax = [plt.subplot(gs[i], projection='3d') for i in range(2)] + [plt.subplot(gs[2])]
levelset_viz(g2, ax, fig, sph_rect_diff, title='Sphere-Rect Diff', savedict=savedict, fontdict={'fontsize':12, 'fontweight':'bold'})
plt.pause(args.delay)
if __name__ == '__main__':
savedict = dict(save=True, savename='cyl_2d.jpg',\
savepath=join("..", "jpeg_dumps"))
plt.ion()
main(savedict)
| [
"matplotlib.pyplot.subplot",
"os.path.abspath",
"argparse.ArgumentParser",
"os.path.join",
"matplotlib.pyplot.clf",
"numpy.ones",
"Grids.createGrid",
"matplotlib.pyplot.ion",
"matplotlib.pyplot.figure",
"numpy.array",
"matplotlib.gridspec.GridSpec",
"matplotlib.pyplot.pause"
] | [((625, 747), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""2D Plotter for Various Implicit Initial Conditions for the Value Function"""'}), "(description=\n '2D Plotter for Various Implicit Initial Conditions for the Value Function'\n )\n", (648, 747), False, 'import argparse\n'), ((2266, 2309), 'Grids.createGrid', 'createGrid', (['g2min', 'g2max', 'g2N'], {'process': '(True)'}), '(g2min, g2max, g2N, process=True)\n', (2276, 2309), False, 'from Grids import createGrid\n'), ((3196, 3223), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 9)'}), '(figsize=(16, 9))\n', (3206, 3223), True, 'import matplotlib.pyplot as plt\n'), ((3230, 3258), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['(1)', '(3)', 'fig'], {}), '(1, 3, fig)\n', (3247, 3258), True, 'import matplotlib.gridspec as gridspec\n'), ((3509, 3530), 'matplotlib.pyplot.pause', 'plt.pause', (['args.delay'], {}), '(args.delay)\n', (3518, 3530), True, 'import matplotlib.pyplot as plt\n'), ((3573, 3582), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3580, 3582), True, 'import matplotlib.pyplot as plt\n'), ((3787, 3808), 'matplotlib.pyplot.pause', 'plt.pause', (['args.delay'], {}), '(args.delay)\n', (3796, 3808), True, 'import matplotlib.pyplot as plt\n'), ((3850, 3859), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3857, 3859), True, 'import matplotlib.pyplot as plt\n'), ((4078, 4099), 'matplotlib.pyplot.pause', 'plt.pause', (['args.delay'], {}), '(args.delay)\n', (4087, 4099), True, 'import matplotlib.pyplot as plt\n'), ((4138, 4147), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (4145, 4147), True, 'import matplotlib.pyplot as plt\n'), ((4362, 4383), 'matplotlib.pyplot.pause', 'plt.pause', (['args.delay'], {}), '(args.delay)\n', (4371, 4383), True, 'import matplotlib.pyplot as plt\n'), ((4423, 4432), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (4430, 4432), True, 'import matplotlib.pyplot as plt\n'), ((4645, 4666), 'matplotlib.pyplot.pause', 'plt.pause', (['args.delay'], {}), '(args.delay)\n', (4654, 4666), True, 'import matplotlib.pyplot as plt\n'), ((4706, 4715), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (4713, 4715), True, 'import matplotlib.pyplot as plt\n'), ((4947, 4968), 'matplotlib.pyplot.pause', 'plt.pause', (['args.delay'], {}), '(args.delay)\n', (4956, 4968), True, 'import matplotlib.pyplot as plt\n'), ((5008, 5017), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (5015, 5017), True, 'import matplotlib.pyplot as plt\n'), ((5247, 5268), 'matplotlib.pyplot.pause', 'plt.pause', (['args.delay'], {}), '(args.delay)\n', (5256, 5268), True, 'import matplotlib.pyplot as plt\n'), ((5330, 5339), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (5337, 5339), True, 'import matplotlib.pyplot as plt\n'), ((5558, 5579), 'matplotlib.pyplot.pause', 'plt.pause', (['args.delay'], {}), '(args.delay)\n', (5567, 5579), True, 'import matplotlib.pyplot as plt\n'), ((5624, 5633), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (5631, 5633), True, 'import matplotlib.pyplot as plt\n'), ((5852, 5873), 'matplotlib.pyplot.pause', 'plt.pause', (['args.delay'], {}), '(args.delay)\n', (5861, 5873), True, 'import matplotlib.pyplot as plt\n'), ((5917, 5926), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (5924, 5926), True, 'import matplotlib.pyplot as plt\n'), ((6143, 6164), 'matplotlib.pyplot.pause', 'plt.pause', (['args.delay'], {}), '(args.delay)\n', (6152, 6164), True, 'import matplotlib.pyplot as plt\n'), ((6212, 6221), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (6219, 6221), True, 'import matplotlib.pyplot as plt\n'), ((6443, 6464), 'matplotlib.pyplot.pause', 'plt.pause', (['args.delay'], {}), '(args.delay)\n', (6452, 6464), True, 'import matplotlib.pyplot as plt\n'), ((6589, 6598), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (6596, 6598), True, 'import matplotlib.pyplot as plt\n'), ((2141, 2174), 'numpy.ones', 'np.ones', (['(2, 1)'], {'dtype': 'np.float64'}), '((2, 1), dtype=np.float64)\n', (2148, 2174), True, 'import numpy as np\n'), ((2186, 2219), 'numpy.ones', 'np.ones', (['(2, 1)'], {'dtype': 'np.float64'}), '((2, 1), dtype=np.float64)\n', (2193, 2219), True, 'import numpy as np\n'), ((2229, 2260), 'numpy.ones', 'np.ones', (['(2, 1)'], {'dtype': 'np.int64'}), '((2, 1), dtype=np.int64)\n', (2236, 2260), True, 'import numpy as np\n'), ((2403, 2438), 'numpy.array', 'np.array', (['[[-0.5, 0.5]]', 'np.float64'], {}), '([[-0.5, 0.5]], np.float64)\n', (2411, 2438), True, 'import numpy as np\n'), ((367, 384), 'os.path.abspath', 'abspath', (['__file__'], {}), '(__file__)\n', (374, 384), False, 'from os.path import abspath, dirname, exists, join\n'), ((1972, 2020), 'os.path.join', 'join', (["savedict['savepath']", "savedict['savename']"], {}), "(savedict['savepath'], savedict['savename'])\n", (1976, 2020), False, 'from os.path import abspath, dirname, exists, join\n'), ((2749, 2776), 'numpy.array', 'np.array', (['[[-1.0, -np.inf]]'], {}), '([[-1.0, -np.inf]])\n', (2757, 2776), True, 'import numpy as np\n'), ((2785, 2811), 'numpy.array', 'np.array', (['[[np.inf, -1.0]]'], {}), '([[np.inf, -1.0]])\n', (2793, 2811), True, 'import numpy as np\n'), ((2856, 2880), 'numpy.array', 'np.array', (['[[-1.0, -0.5]]'], {}), '([[-1.0, -0.5]])\n', (2864, 2880), True, 'import numpy as np\n'), ((2889, 2911), 'numpy.array', 'np.array', (['[[0.5, 1.0]]'], {}), '([[0.5, 1.0]])\n', (2897, 2911), True, 'import numpy as np\n'), ((2952, 2976), 'numpy.array', 'np.array', (['[[-1.0, -0.5]]'], {}), '([[-1.0, -0.5]])\n', (2960, 2976), True, 'import numpy as np\n'), ((2985, 3007), 'numpy.array', 'np.array', (['[[0.5, 1.0]]'], {}), '([[0.5, 1.0]])\n', (2993, 3007), True, 'import numpy as np\n'), ((3266, 3301), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[i]'], {'projection': '"""3d"""'}), "(gs[i], projection='3d')\n", (3277, 3301), True, 'import matplotlib.pyplot as plt\n'), ((3324, 3342), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2]'], {}), '(gs[2])\n', (3335, 3342), True, 'import matplotlib.pyplot as plt\n'), ((3590, 3625), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[i]'], {'projection': '"""3d"""'}), "(gs[i], projection='3d')\n", (3601, 3625), True, 'import matplotlib.pyplot as plt\n'), ((3648, 3666), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2]'], {}), '(gs[2])\n', (3659, 3666), True, 'import matplotlib.pyplot as plt\n'), ((3867, 3902), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[i]'], {'projection': '"""3d"""'}), "(gs[i], projection='3d')\n", (3878, 3902), True, 'import matplotlib.pyplot as plt\n'), ((3925, 3943), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2]'], {}), '(gs[2])\n', (3936, 3943), True, 'import matplotlib.pyplot as plt\n'), ((4155, 4190), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[i]'], {'projection': '"""3d"""'}), "(gs[i], projection='3d')\n", (4166, 4190), True, 'import matplotlib.pyplot as plt\n'), ((4213, 4231), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2]'], {}), '(gs[2])\n', (4224, 4231), True, 'import matplotlib.pyplot as plt\n'), ((4440, 4475), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[i]'], {'projection': '"""3d"""'}), "(gs[i], projection='3d')\n", (4451, 4475), True, 'import matplotlib.pyplot as plt\n'), ((4498, 4516), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2]'], {}), '(gs[2])\n', (4509, 4516), True, 'import matplotlib.pyplot as plt\n'), ((4723, 4758), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[i]'], {'projection': '"""3d"""'}), "(gs[i], projection='3d')\n", (4734, 4758), True, 'import matplotlib.pyplot as plt\n'), ((4781, 4799), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2]'], {}), '(gs[2])\n', (4792, 4799), True, 'import matplotlib.pyplot as plt\n'), ((5025, 5060), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[i]'], {'projection': '"""3d"""'}), "(gs[i], projection='3d')\n", (5036, 5060), True, 'import matplotlib.pyplot as plt\n'), ((5083, 5101), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2]'], {}), '(gs[2])\n', (5094, 5101), True, 'import matplotlib.pyplot as plt\n'), ((5347, 5382), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[i]'], {'projection': '"""3d"""'}), "(gs[i], projection='3d')\n", (5358, 5382), True, 'import matplotlib.pyplot as plt\n'), ((5405, 5423), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2]'], {}), '(gs[2])\n', (5416, 5423), True, 'import matplotlib.pyplot as plt\n'), ((5641, 5676), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[i]'], {'projection': '"""3d"""'}), "(gs[i], projection='3d')\n", (5652, 5676), True, 'import matplotlib.pyplot as plt\n'), ((5699, 5717), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2]'], {}), '(gs[2])\n', (5710, 5717), True, 'import matplotlib.pyplot as plt\n'), ((5934, 5969), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[i]'], {'projection': '"""3d"""'}), "(gs[i], projection='3d')\n", (5945, 5969), True, 'import matplotlib.pyplot as plt\n'), ((5992, 6010), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2]'], {}), '(gs[2])\n', (6003, 6010), True, 'import matplotlib.pyplot as plt\n'), ((6229, 6264), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[i]'], {'projection': '"""3d"""'}), "(gs[i], projection='3d')\n", (6240, 6264), True, 'import matplotlib.pyplot as plt\n'), ((6287, 6305), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2]'], {}), '(gs[2])\n', (6298, 6305), True, 'import matplotlib.pyplot as plt\n'), ((6562, 6586), 'os.path.join', 'join', (['""".."""', '"""jpeg_dumps"""'], {}), "('..', 'jpeg_dumps')\n", (6566, 6586), False, 'from os.path import abspath, dirname, exists, join\n'), ((2641, 2662), 'numpy.array', 'np.array', (['[-0.0, 0.0]'], {}), '([-0.0, 0.0])\n', (2649, 2662), True, 'import numpy as np\n')] |
# Module for cleaning messages from all unwanted content
import re
def clean_all(msg):
msg = clean_invite_embed(msg)
msg = clean_backticks(msg)
msg = clean_mentions(msg)
msg = clean_emojis(msg)
return msg
def clean_invite_embed(msg):
"""Prevents invites from embedding"""
return msg.replace("discord.gg/", "discord.gg/\u200b")
def clean_backticks(msg):
"""Prevents backticks from breaking code block formatting"""
return msg.replace("`", "\U0000ff40")
def clean_formatting(msg):
"""Escape formatting items in a string."""
return re.sub(r"([`*_])", r"\\\1", msg)
def clean_mentions(msg):
"""Prevent discord mentions"""
return msg.replace("@", "@\u200b")
def clean_emojis(msg):
"""Escape custom emojis."""
return re.sub(r"<(a)?:([a-zA-Z0-9_]+):([0-9]+)>", "<\u200b\\1:\\2:\\3>", msg)
| [
"re.sub"
] | [((582, 615), 're.sub', 're.sub', (['"""([`*_])"""', '"""\\\\\\\\\\\\1"""', 'msg'], {}), "('([`*_])', '\\\\\\\\\\\\1', msg)\n", (588, 615), False, 'import re\n'), ((784, 853), 're.sub', 're.sub', (['"""<(a)?:([a-zA-Z0-9_]+):([0-9]+)>"""', '"""<\u200b\\\\1:\\\\2:\\\\3>"""', 'msg'], {}), "('<(a)?:([a-zA-Z0-9_]+):([0-9]+)>', '<\\u200b\\\\1:\\\\2:\\\\3>', msg)\n", (790, 853), False, 'import re\n')] |
from ConfigParser import ConfigParser
import logging
import os
import sys
from wordsim.models import get_models
from wordsim.nn.utils import evaluate
from wordsim.nn.data import create_datasets
from wordsim.nn.model import KerasModel
def main():
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s : " +
"%(module)s (%(lineno)s) - %(levelname)s - %(message)s")
conf = ConfigParser(os.environ)
conf.read(sys.argv[1])
vectorizers = get_models(conf)
training_data, dev_data, test_data = create_datasets(conf)
if conf.getboolean('main', 'train'):
epochs = conf.getint('training', 'epochs')
batch_size = conf.getint('training', 'batch_size')
training_data.vectorize(vectorizers)
input_size, input_dim = training_data.vectors.shape
model = KerasModel(conf, input_dim, 1) # output is a score
model.train(training_data, epochs, batch_size)
model.save()
test_data.vectorize(vectorizers)
evaluate(model, dev_data)
if __name__ == "__main__":
main()
| [
"logging.basicConfig",
"wordsim.nn.model.KerasModel",
"wordsim.nn.data.create_datasets",
"wordsim.nn.utils.evaluate",
"ConfigParser.ConfigParser",
"wordsim.models.get_models"
] | [((253, 379), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': "('%(asctime)s : ' + '%(module)s (%(lineno)s) - %(levelname)s - %(message)s')"}), "(level=logging.INFO, format='%(asctime)s : ' +\n '%(module)s (%(lineno)s) - %(levelname)s - %(message)s')\n", (272, 379), False, 'import logging\n'), ((412, 436), 'ConfigParser.ConfigParser', 'ConfigParser', (['os.environ'], {}), '(os.environ)\n', (424, 436), False, 'from ConfigParser import ConfigParser\n'), ((482, 498), 'wordsim.models.get_models', 'get_models', (['conf'], {}), '(conf)\n', (492, 498), False, 'from wordsim.models import get_models\n'), ((540, 561), 'wordsim.nn.data.create_datasets', 'create_datasets', (['conf'], {}), '(conf)\n', (555, 561), False, 'from wordsim.nn.data import create_datasets\n'), ((1003, 1028), 'wordsim.nn.utils.evaluate', 'evaluate', (['model', 'dev_data'], {}), '(model, dev_data)\n', (1011, 1028), False, 'from wordsim.nn.utils import evaluate\n'), ((834, 864), 'wordsim.nn.model.KerasModel', 'KerasModel', (['conf', 'input_dim', '(1)'], {}), '(conf, input_dim, 1)\n', (844, 864), False, 'from wordsim.nn.model import KerasModel\n')] |
import requests
import hashlib
import time
import json
from pymongo import MongoClient
headers = {
'content-type': 'application/x-www-form-urlencoded',
}
userInfo = {
'player1':{
'uid': '玩家1号的uid',
'token': '玩家1号的token'
},
'player2':{
'uid': '玩家2号的uid',
'token': '玩家2号的token'
}
}
session = requests.session()
roomID = -1
#命中题库次数
successTime = 0
#时间戳生成
nowTime = lambda:int(round(time.time() * 1000))
#mongodb
conn = MongoClient('localhost',27017)
quizSet = conn.zhdtw.quizs
intoRoomUrl = 'https://question-zh.hortor.net/question/bat/intoRoom'
leaveRoomUrl = 'https://question-zh.hortor.net/question/bat/leaveRoom'
beginFightUrl = 'https://question-zh.hortor.net/question/bat/beginFight'
findQuizUrl = 'https://question-zh.hortor.net/question/bat/findQuiz'
chooseUrl = 'https://question-zh.hortor.net/question/bat/choose'
fightResultUrl = 'https://question-zh.hortor.net/question/bat/fightResult'
#生成签名
def genSign(params,player):
tempParams = params.copy()
tempParams['token'] = userInfo[player]['token']+userInfo[player]['uid']
tempParams = sorted(tempParams.items(), key=lambda e:e[0])
originStr = ''
for key, value in tempParams:
originStr = originStr + key + '=' + str(value)
m = hashlib.md5()
m.update(originStr.encode(encoding='utf-8'))
return m.hexdigest()
def intoRoom(player):
global roomID
params = {
'roomID' : roomID,
'uid' : userInfo[player]['uid'],
't' : nowTime()
}
params['sign'] = genSign(params,player)
resp = session.post(url=intoRoomUrl,data=params,headers=headers)
try:
jdata = json.loads(resp.text)
roomID = jdata.get('data')['roomId']
print(player + ' 进入房间成功...')
except:
print(resp.text)
print(player + ' 进入房间失败...')
leaveRoom(player)
def leaveRoom(player):
params = {
'roomID' : roomID,
'uid' : userInfo[player]['uid'],
't' : nowTime()
}
params['sign'] = genSign(params,player)
resp = session.post(url=leaveRoomUrl,data=params,headers=headers)
try:
jdata = json.loads(resp.text)
if jdata.get('errcode') == 0:
print(player + ' 退出房间成功...')
else:
print(jdata)
except:
print(resp.text)
print(player + ' 退出房间失败...')
def beginFight():
params = {
'roomID' : roomID,
'uid' : userInfo['player1']['uid'],
't' : nowTime()
}
params['sign'] = genSign(params,'player1')
resp = session.post(url=beginFightUrl,data=params,headers=headers)
try:
jdata = json.loads(resp.text)
if jdata.get('errcode') == 0:
print('开始好友对战...')
else:
print(jdata)
except:
print(resp.text)
def findQuiz(quizNum):
params = {
'roomID' : roomID,
'quizNum' : quizNum,
'uid' : userInfo['player1']['uid'],
't' : nowTime()
}
params['sign'] = genSign(params,'player1')
resp = session.post(url=findQuizUrl,data=params,headers=headers)
try:
jdata = json.loads(resp.text)
if jdata.get('errcode') == 0:
print('获取题目成功...')
return jdata.get('data')
else:
print(jdata)
except:
print(resp.text)
def choose(player,quizNum,option,cfTime,magic):
params = {
'roomID' : roomID,
'uid' : userInfo[player]['uid'],
't' : nowTime(),
'option' : option,
'quizNum': quizNum,
'cfTime': cfTime,
'ccTime' : nowTime(),
'magic' : magic
}
params['sign'] = genSign(params,player)
resp = session.post(url=chooseUrl,data=params,headers=headers)
try :
jdata = json.loads(resp.text)
if jdata.get('errcode') == 0:
print(player + ' 选择成功...')
return jdata.get('data')
else:
print(jdata)
except:
print(player + ' 选择失败...')
print(resp.text)
def fightResult(player):
params = {
'roomID' : roomID,
'type' : 0,
'uid' : userInfo[player]['uid'],
't' : nowTime()
}
params['sign'] = genSign(params,player)
resp = session.post(url=fightResultUrl,data=params,headers=headers)
try:
jdata = json.loads(resp.text)
if jdata.get('errcode') == 0:
print(player + ' 获取结果成功...')
return jdata.get('data')
else:
print(jdata)
except:
print(player + ' 获取结果失败...')
print(resp.text)
def genMagic(optionList):
optionList.sort()
originStr = optionList[0]+optionList[1]+optionList[2]+optionList[3]
m = hashlib.md5()
m.update(originStr.encode(encoding='utf-8'))
return m.hexdigest()
def startAnswer():
global successTime
for i in range(1,6):
#请求数据与接收到数据延时
cfTime = nowTime()
quizInfo = findQuiz(i)
cfTime = nowTime() - cfTime
time.sleep(0.1)
optionList = quizInfo['options']
quiz = quizInfo['quiz']
option = 1
#题库查找题目
#print(quiz)
localQuiz = quizSet.find_one({'quiz':quiz})
if localQuiz:
successTime += 1
for j in range(0,4):
if(optionList[j] == localQuiz['answer']):
option = j+1
break
magic = genMagic(optionList.copy())
chooseResult = choose('player1',i,option,cfTime,magic)
choose('player2',i,2,cfTime+10,magic)
if not localQuiz:
quizModel = {}
quizModel['quiz'] = quiz
quizModel['options'] = optionList
quizModel['school'] = quizInfo['school']
quizModel['type'] = quizInfo['type']
quizModel['typeID'] = quizInfo['typeID']
quizModel['contributor'] = quizInfo['contributor']
quizModel['answer'] = optionList[chooseResult['answer']-1]
quizSet.insert_one(quizModel)
#print(optionList[chooseResult['answer']-1])
if __name__ == '__main__':
#自行修改开房对战次数 i
i = 5
gameTime = 0
while(i > 0):
roomID = -1
intoRoom('player1')
intoRoom('player2')
beginFight()
startAnswer()
fightResult('player1')
fightResult('player2')
leaveRoom('player1')
leaveRoom('player2')
gameTime += 1
print('答题数 %d /命中题库次数 %d ' % (gameTime*5,successTime))
time.sleep(1)
i = i - 1
conn.close() | [
"requests.session",
"pymongo.MongoClient",
"hashlib.md5",
"json.loads",
"time.sleep",
"time.time"
] | [((345, 363), 'requests.session', 'requests.session', ([], {}), '()\n', (361, 363), False, 'import requests\n'), ((472, 503), 'pymongo.MongoClient', 'MongoClient', (['"""localhost"""', '(27017)'], {}), "('localhost', 27017)\n", (483, 503), False, 'from pymongo import MongoClient\n'), ((1276, 1289), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (1287, 1289), False, 'import hashlib\n'), ((4683, 4696), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (4694, 4696), False, 'import hashlib\n'), ((1663, 1684), 'json.loads', 'json.loads', (['resp.text'], {}), '(resp.text)\n', (1673, 1684), False, 'import json\n'), ((2143, 2164), 'json.loads', 'json.loads', (['resp.text'], {}), '(resp.text)\n', (2153, 2164), False, 'import json\n'), ((2635, 2656), 'json.loads', 'json.loads', (['resp.text'], {}), '(resp.text)\n', (2645, 2656), False, 'import json\n'), ((3112, 3133), 'json.loads', 'json.loads', (['resp.text'], {}), '(resp.text)\n', (3122, 3133), False, 'import json\n'), ((3755, 3776), 'json.loads', 'json.loads', (['resp.text'], {}), '(resp.text)\n', (3765, 3776), False, 'import json\n'), ((4302, 4323), 'json.loads', 'json.loads', (['resp.text'], {}), '(resp.text)\n', (4312, 4323), False, 'import json\n'), ((4967, 4982), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (4977, 4982), False, 'import time\n'), ((6463, 6476), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (6473, 6476), False, 'import time\n'), ((434, 445), 'time.time', 'time.time', ([], {}), '()\n', (443, 445), False, 'import time\n')] |
import random
import plotly.express as px
import plotly.figure_factory as ff
import statistics
dice_result = []
for i in range(0,1000):
dice1 = random.randint(1,6)
dice2 = random.randint(1,6)
dice_result.append(dice1+dice2)
mean = sum(dice_result)/len(dice_result)
print("mean of this data is {} ".format(mean))
median = statistics.median(dice_result)
print("median of this data is {} ".format(median))
mode= statistics.mode(dice_result)
print("mode of this data is {} ".format(mode))
std_deviation = statistics.stdev(dice_result)
print("stdev : {}".format(std_deviation) )
fig = ff.create_distplot([dice_result],["Result"], show_hist= False)
fig.show()
fig.show() | [
"statistics.median",
"random.randint",
"statistics.stdev",
"plotly.figure_factory.create_distplot",
"statistics.mode"
] | [((357, 387), 'statistics.median', 'statistics.median', (['dice_result'], {}), '(dice_result)\n', (374, 387), False, 'import statistics\n'), ((449, 477), 'statistics.mode', 'statistics.mode', (['dice_result'], {}), '(dice_result)\n', (464, 477), False, 'import statistics\n'), ((545, 574), 'statistics.stdev', 'statistics.stdev', (['dice_result'], {}), '(dice_result)\n', (561, 574), False, 'import statistics\n'), ((630, 692), 'plotly.figure_factory.create_distplot', 'ff.create_distplot', (['[dice_result]', "['Result']"], {'show_hist': '(False)'}), "([dice_result], ['Result'], show_hist=False)\n", (648, 692), True, 'import plotly.figure_factory as ff\n'), ((160, 180), 'random.randint', 'random.randint', (['(1)', '(6)'], {}), '(1, 6)\n', (174, 180), False, 'import random\n'), ((193, 213), 'random.randint', 'random.randint', (['(1)', '(6)'], {}), '(1, 6)\n', (207, 213), False, 'import random\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2020, Intel Corporation. All rights reserved.
# SPDX-License-Identifier: BSD-2-Clause
#
"""A signing utility for creating and signing a BIOS sub-region for UEFI
"""
from __future__ import print_function
import os
import sys
import subprocess
import argparse
import uuid
import struct
import re
from pathlib import Path
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from common.siip_constants import VERSION as __version__
from common.banner import banner
import common.utilities as utils
import common.logger as logging
LOGGER = logging.getLogger("subregion_sign")
__prog__ = "subregion_sign"
TOOLNAME = "Sub-Region Signing Tool"
if sys.version_info < (3, 6):
raise Exception("Python 3.6 is the minimal version required")
class UefiSubregAuthenClass:
""" Class define EFI subreation Authentication class """
# typedef struct {
# char Name[16 bytes]; // Name of the sub-region
# EFI_GUID VendorGuid; // Vendor GUID
# SUB_REGION_VERIFICATION CertParam; // Sub-Region Certificate Parameters
# } EFI_SUB_REGION_AUTHENTICATION;
# typedef struct {
# SUB_REGION_HEADER Hdr; // Certificate Header
# UINT8 CertData[1]; // Calculated Signature
# } SUB_REGION_VERIFICATION;
# typedef struct {
# UINT32 Revision; // Revision of Signature Structure
# UINT32 Length; // Length of the Signature + Header
# EFI_GUID CertType; // Signature type
# } SUB_REGION_HEADER;
# typedef struct {
# UINT8 PublicKey[384]; // Public Key pair of the Signing Key
# UINT8 Signature[384]; // SHA384-RSA3K Signature
# } EFI_CERT_BLOCK_RSA3072_SHA384;
_StructAuthInfoFormat = "<16s16sLL16s"
_StructAuthInfoSize = struct.calcsize(_StructAuthInfoFormat)
_StructSubRegionHdrFormat = "<LL16s"
_StructSubRegionHdrSize = struct.calcsize(_StructSubRegionHdrFormat)
def __init__(self, cert_info):
""" initialization of the variables for structure """
self._valid = False
self.w_name = cert_info["name"]
self.vendor_guid = cert_info["vendor_guid"]
self.w_revision = cert_info["revision"]
self.dw_length = self._StructAuthInfoSize
self.cert_type = cert_info["cert_type"]
self.cert_data = bytes()
self.payload = bytes()
def encode(self):
""" builds structure for subregion authentication header """
self.dw_length = self._StructSubRegionHdrSize + len(self.cert_data)
uefi_subreg_authen_hdr = struct.pack(
self._StructAuthInfoFormat,
self.w_name,
self.vendor_guid.bytes_le,
self.w_revision,
self.dw_length,
self.cert_type.bytes_le,
)
self._valid = True
return uefi_subreg_authen_hdr + self.cert_data + self.payload
def dump_info(self):
""" dump the information of subregion authentication structure """
if not self._valid:
raise ValueError
print(
"EFI_SUBREGION_AUTHENTICATION.AuthInfo.Hdr.dw_length = {dw_length:08X}".format(
dw_length=self.dw_length
)
)
print(
"EFI_SUBREGION_AUTHENTICATION.AuthInfo.Hdr.w_revision = {w_revision:04X}".format(
w_revision=self.w_revision
)
)
print(
"EFI_SUBREGION_AUTHENTICATION.AuthInfo.Hdr.wCertificateType = {Vendor_guid}".format(
Vendor_guid=str(self.vendor_guid).upper()
)
)
print(
"EFI_SUBREGION_AUTHENTICATION.AuthInfo.cert_type = {cert_type}".format(
cert_type=str(self.cert_type).upper()
)
)
print(
"sizeof (EFI_SUBREGION_AUTHENTICATION.AuthInfo.cert_data) = {Size:08X}".format(
Size=len(self.cert_data)
)
)
print(
"sizeof (payload) = {Size:08X}".format(
Size=len(self.payload)
)
)
def get_certifcation_info(cl_inputs, signer):
""" returns the certifcate type passed on subregion """
# different signature type supported by tool
CERT_TYPE = {
"pkcs7": [
"4aafd29d-68df-49ee-8aa9-347d375665a7",
"smime -sign -binary -outform DER -md sha256 -nodetach -signer",
None,
],
"rsa": [
"2ee9976f-9d4c-4442-a997-8cad1c875fa1",
"dgst -binary -keyform PEM -sha384 -sign",
"rsa -pubout -modulus -noout",
],
}
# Check if openssl is installed
path = utils.check_for_tool("openssl", "version", cl_inputs.tool_path)
# Get signing type information
cert_info = CERT_TYPE.get(cl_inputs.signer_type)
# Create openSSL command 1
cmd = f'{path} {cert_info[1]} "{signer}"'
# Create openSSL command 2
if cert_info[2] is not None:
cmd2 = f"{path} {cert_info[2]}"
else:
cmd2 = cert_info[2]
certification_info = {
"revision": 0x01,
"name": cl_inputs.name.encode("utf-8"),
"vendor_guid": uuid.UUID(cl_inputs.vendor_guid),
"cert_type": uuid.UUID(cert_info[0]),
"openssl_cmd": cmd,
"openssl_cmd2": cmd2,
}
return certification_info
def build_subreg_signed_file(cert_struct, outfile):
""" build output file """
try:
with open(outfile, mode="wb") as signed_file:
signed_file.write(cert_struct)
except ValueError:
LOGGER.critical("\nCannot write payload file: %s", outfile)
sys.exit(2)
def read_file(inputfile):
""" read input file to bytes """
try:
with open(inputfile, mode="rb") as file:
sign_file = file.read()
except ValueError:
LOGGER.critical("\nCannot read payload file: %s", inputfile)
sys.exit(2)
return sign_file
def generate_signature(openssl_cmd, payload):
""" signed input file """
# Run OpenSSL command with the specified private key and capture signature from STDOUT
try:
ssl_process = subprocess.run(
openssl_cmd,
input=payload,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
check=True,
)
signature = ssl_process.stdout
except:
LOGGER.warning("\nCannot run openssl.")
sys.exit(1)
if ssl_process.returncode != 0:
LOGGER.critical("\nopenssl failed.")
sys.exit(1)
return signature
def create_arg_parser():
""" Parsing and validating input arguments."""
def convert_arg_line_to_args(arg_line):
for arg in arg_line.split():
if not arg.strip():
continue
yield arg
my_parser = argparse.ArgumentParser(
prog=__prog__,
description=__doc__,
conflict_handler="resolve",
fromfile_prefix_chars="@",
)
my_parser.convert_arg_line_to_args = convert_arg_line_to_args
my_parser.add_argument(
"subregion_file", help="sub region data that needs to be signed."
)
my_parser.add_argument(
"-o",
"--output",
dest="signed_file",
help="Output capsule filename.",
metavar="Filename",
default="SIGNED_OUT.bin",
)
my_parser.add_argument(
"-n",
"--name",
help="The name of the subregion being signed. Max size is 16 bytes The name is stored in signed file.",
type=chk_string_size,
metavar="subregion",
required=True,
)
my_parser.add_argument(
"-vg",
"--vendor-guid",
help="Vender GUID is one specific value given by the vendor for the sub-region being signed.\
This is required. The format is '00000000-0000-0000-0000-000000000000'",
type=chk_guid_format,
metavar="v_guid",
required=True,
)
my_parser.add_argument(
"-t",
"--signer_type",
metavar="sign_type",
required=True,
help="Type of Signing pkcs7 or rsa.",
choices=["pkcs7", "rsa"],
)
my_parser.add_argument(
"-s",
"--signer",
dest="signerfile",
required=True,
help="OpenSSL signer private certificate filename.",
)
my_parser.add_argument(
"--toolpath",
dest="tool_path",
help="Path to signtool or OpenSSL tool. "
" Optional if path to tools are already in PATH.",
default=None,
)
my_parser.add_argument(
"--show",
help="Shows information about the subregion_authentication structure "
" Optional but requires all information in order to process.",
action="store_true",
)
my_parser.add_argument(
"-v",
"--version",
help="Shows the current version of the BIOS Stitching Tool",
action="version",
version="%(prog)s {version}".format(version=__version__),
)
return my_parser
def chk_string_size(string):
""""Check the size of the string"""
max_size = 16
size = len(string.encode("utf-8"))
msg = "The size of {} is {}. The {} size must not be greter than {}".format(
string, size, string, max_size
)
if size > max_size:
raise argparse.ArgumentTypeError(str(msg))
return string
def chk_guid_format(guid):
""" check for correct formate of GUID """
# format for guid xxxxxxxx-xxxx-xxxx-xxx-xxxxxxxxxxxx where x can be A-F or 0-9
guidFormat = re.compile(
r"([a-f\d]{8}[-][a-f\d]{4}[-][a-f\d]{4}[-][a-f\d]{4}[-]{1}[a-f\d]{12}$)", re.I
)
if guidFormat.match(guid) is None:
raise argparse.ArgumentTypeError(
"File guild value is not in correct format \
(xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx where x can be A-F or 0-9)\
{}".format(guid)
)
return guid
def sign_subregion(subregion_file, signer_file, signed_file, signer_type, subregion_name, vendor_guid, show = False, tool_path = None):
# Use absolute path for openSSL
sbrgn_file = Path(subregion_file).resolve()
signer_file = Path(signer_file).resolve()
outfile = Path(signed_file).resolve()
filenames = [str(sbrgn_file), str(signer_file)]
# Verify file input file exist
status = utils.file_exist(filenames, LOGGER)
if status != 0:
sys.exit(status)
if os.path.getsize(sbrgn_file) == 0:
LOGGER.critical("size of {} subregion file must be greater than 0!".format(sbrgn_file))
sys.exit(status)
status = utils.check_key(signer_file, signer_type, LOGGER)
if status != 0:
sys.exit(status)
outfile = utils.file_not_exist(outfile, LOGGER)
parser = argparse.ArgumentParser()
parser.add_argument("name, vendor_guid, tool_path, signer_type")
cl_inputs = parser.parse_args(['name={}'.format(subregion_name)])
cl_inputs.name = subregion_name
cl_inputs.vendor_guid = vendor_guid
cl_inputs.tool_path = tool_path
cl_inputs.signer_type = signer_type
cert_info = get_certifcation_info(cl_inputs, signer_file)
uefi_subreg_authen = UefiSubregAuthenClass(cert_info)
# read input file to store into structure
payload = read_file(sbrgn_file)
uefi_subreg_authen.payload = payload
# add Vendor Guid to Payload
payload = uefi_subreg_authen.vendor_guid.bytes_le + payload
# calculate the signature store in structure
cert_data = generate_signature(cert_info["openssl_cmd"], payload)
if cert_info["openssl_cmd2"]:
# Read in the private key
payload = read_file(signer_file)
# Extract the public key modulus from private key
cert_pub = generate_signature(cert_info["openssl_cmd2"], payload)
# convert public key from bytes to string
cert_pub_string = cert_pub.decode("utf-8")
# remove word Moudlus= from the file
cert_pubkey = cert_pub_string.replace("Modulus=", "")
# remove end of line from public key
cert_pubkey = cert_pubkey.rstrip()
# Conert to hex bytes and add to signature
cert_pubkey = bytes.fromhex(cert_pubkey)
# public key and signature are packed back to back
cert_data = cert_pubkey + cert_data
uefi_subreg_authen.cert_data = cert_data
# pack structure with signature and get update size of header
uefi_signed_data = uefi_subreg_authen.encode()
if show:
uefi_subreg_authen.dump_info()
# Create output EFI subregion authentication header and signature and original file
build_subreg_signed_file(uefi_signed_data, str(outfile))
print(
"Signed {} sub-region({}) was successfully generated.".format(
subregion_name, outfile
)
)
def main():
"""Entry to script."""
parser = create_arg_parser()
args = parser.parse_args()
sign_subregion(args.subregion_file, args.signerfile, args.signed_file,
args.signer_type, args.name, args.vendor_guid, args.show, args.tool_path)
if __name__ == "__main__":
banner(TOOLNAME, __version__)
main()
| [
"common.utilities.check_key",
"subprocess.run",
"os.path.abspath",
"common.logger.getLogger",
"argparse.ArgumentParser",
"os.path.getsize",
"common.banner.banner",
"struct.calcsize",
"struct.pack",
"common.utilities.check_for_tool",
"common.utilities.file_exist",
"uuid.UUID",
"pathlib.Path",
"common.utilities.file_not_exist",
"sys.exit",
"re.compile"
] | [((633, 668), 'common.logger.getLogger', 'logging.getLogger', (['"""subregion_sign"""'], {}), "('subregion_sign')\n", (650, 668), True, 'import common.logger as logging\n'), ((1904, 1942), 'struct.calcsize', 'struct.calcsize', (['_StructAuthInfoFormat'], {}), '(_StructAuthInfoFormat)\n', (1919, 1942), False, 'import struct\n'), ((2014, 2056), 'struct.calcsize', 'struct.calcsize', (['_StructSubRegionHdrFormat'], {}), '(_StructSubRegionHdrFormat)\n', (2029, 2056), False, 'import struct\n'), ((4848, 4911), 'common.utilities.check_for_tool', 'utils.check_for_tool', (['"""openssl"""', '"""version"""', 'cl_inputs.tool_path'], {}), "('openssl', 'version', cl_inputs.tool_path)\n", (4868, 4911), True, 'import common.utilities as utils\n'), ((7021, 7139), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '__prog__', 'description': '__doc__', 'conflict_handler': '"""resolve"""', 'fromfile_prefix_chars': '"""@"""'}), "(prog=__prog__, description=__doc__,\n conflict_handler='resolve', fromfile_prefix_chars='@')\n", (7044, 7139), False, 'import argparse\n'), ((9749, 9853), 're.compile', 're.compile', (['"""([a-f\\\\d]{8}[-][a-f\\\\d]{4}[-][a-f\\\\d]{4}[-][a-f\\\\d]{4}[-]{1}[a-f\\\\d]{12}$)"""', 're.I'], {}), "(\n '([a-f\\\\d]{8}[-][a-f\\\\d]{4}[-][a-f\\\\d]{4}[-][a-f\\\\d]{4}[-]{1}[a-f\\\\d]{12}$)'\n , re.I)\n", (9759, 9853), False, 'import re\n'), ((10592, 10627), 'common.utilities.file_exist', 'utils.file_exist', (['filenames', 'LOGGER'], {}), '(filenames, LOGGER)\n', (10608, 10627), True, 'import common.utilities as utils\n'), ((10850, 10899), 'common.utilities.check_key', 'utils.check_key', (['signer_file', 'signer_type', 'LOGGER'], {}), '(signer_file, signer_type, LOGGER)\n', (10865, 10899), True, 'import common.utilities as utils\n'), ((10960, 10997), 'common.utilities.file_not_exist', 'utils.file_not_exist', (['outfile', 'LOGGER'], {}), '(outfile, LOGGER)\n', (10980, 10997), True, 'import common.utilities as utils\n'), ((11012, 11037), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (11035, 11037), False, 'import argparse\n'), ((13334, 13363), 'common.banner.banner', 'banner', (['TOOLNAME', '__version__'], {}), '(TOOLNAME, __version__)\n', (13340, 13363), False, 'from common.banner import banner\n'), ((2689, 2831), 'struct.pack', 'struct.pack', (['self._StructAuthInfoFormat', 'self.w_name', 'self.vendor_guid.bytes_le', 'self.w_revision', 'self.dw_length', 'self.cert_type.bytes_le'], {}), '(self._StructAuthInfoFormat, self.w_name, self.vendor_guid.\n bytes_le, self.w_revision, self.dw_length, self.cert_type.bytes_le)\n', (2700, 2831), False, 'import struct\n'), ((5348, 5380), 'uuid.UUID', 'uuid.UUID', (['cl_inputs.vendor_guid'], {}), '(cl_inputs.vendor_guid)\n', (5357, 5380), False, 'import uuid\n'), ((5403, 5426), 'uuid.UUID', 'uuid.UUID', (['cert_info[0]'], {}), '(cert_info[0])\n', (5412, 5426), False, 'import uuid\n'), ((6323, 6442), 'subprocess.run', 'subprocess.run', (['openssl_cmd'], {'input': 'payload', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'shell': '(True)', 'check': '(True)'}), '(openssl_cmd, input=payload, stdout=subprocess.PIPE, stderr=\n subprocess.PIPE, shell=True, check=True)\n', (6337, 6442), False, 'import subprocess\n'), ((6731, 6742), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6739, 6742), False, 'import sys\n'), ((10656, 10672), 'sys.exit', 'sys.exit', (['status'], {}), '(status)\n', (10664, 10672), False, 'import sys\n'), ((10681, 10708), 'os.path.getsize', 'os.path.getsize', (['sbrgn_file'], {}), '(sbrgn_file)\n', (10696, 10708), False, 'import os\n'), ((10819, 10835), 'sys.exit', 'sys.exit', (['status'], {}), '(status)\n', (10827, 10835), False, 'import sys\n'), ((10928, 10944), 'sys.exit', 'sys.exit', (['status'], {}), '(status)\n', (10936, 10944), False, 'import sys\n'), ((439, 464), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (454, 464), False, 'import os\n'), ((5814, 5825), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (5822, 5825), False, 'import sys\n'), ((6087, 6098), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (6095, 6098), False, 'import sys\n'), ((6629, 6640), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6637, 6640), False, 'import sys\n'), ((10371, 10391), 'pathlib.Path', 'Path', (['subregion_file'], {}), '(subregion_file)\n', (10375, 10391), False, 'from pathlib import Path\n'), ((10420, 10437), 'pathlib.Path', 'Path', (['signer_file'], {}), '(signer_file)\n', (10424, 10437), False, 'from pathlib import Path\n'), ((10462, 10479), 'pathlib.Path', 'Path', (['signed_file'], {}), '(signed_file)\n', (10466, 10479), False, 'from pathlib import Path\n')] |
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from swagger_server.models.base_model_ import Model
from swagger_server import util
class DataUtility(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, url: str=None, accuracy: float=None, consistency: float=None, completeness: float=None, timeliness: float=None): # noqa: E501
"""DataUtility - a model defined in Swagger
:param url: The url of this DataUtility. # noqa: E501
:type url: str
:param accuracy: The accuracy of this DataUtility. # noqa: E501
:type accuracy: float
:param consistency: The consistency of this DataUtility. # noqa: E501
:type consistency: float
:param completeness: The completeness of this DataUtility. # noqa: E501
:type completeness: float
:param timeliness: The timeliness of this DataUtility. # noqa: E501
:type timeliness: float
"""
self.swagger_types = {
'url': str,
'accuracy': float,
'consistency': float,
'completeness': float,
'timeliness': float
}
self.attribute_map = {
'url': 'URL',
'accuracy': 'accuracy',
'consistency': 'consistency',
'completeness': 'completeness',
'timeliness': 'timeliness'
}
self._url = url
self._accuracy = accuracy
self._consistency = consistency
self._completeness = completeness
self._timeliness = timeliness
@classmethod
def from_dict(cls, dikt) -> 'DataUtility':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The DataUtility of this DataUtility. # noqa: E501
:rtype: DataUtility
"""
return util.deserialize_model(dikt, cls)
@property
def url(self) -> str:
"""Gets the url of this DataUtility.
:return: The url of this DataUtility.
:rtype: str
"""
return self._url
@url.setter
def url(self, url: str):
"""Sets the url of this DataUtility.
:param url: The url of this DataUtility.
:type url: str
"""
if url is None:
raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501
self._url = url
@property
def accuracy(self) -> float:
"""Gets the accuracy of this DataUtility.
:return: The accuracy of this DataUtility.
:rtype: float
"""
return self._accuracy
@accuracy.setter
def accuracy(self, accuracy: float):
"""Sets the accuracy of this DataUtility.
:param accuracy: The accuracy of this DataUtility.
:type accuracy: float
"""
if accuracy is None:
raise ValueError("Invalid value for `accuracy`, must not be `None`") # noqa: E501
self._accuracy = accuracy
@property
def consistency(self) -> float:
"""Gets the consistency of this DataUtility.
:return: The consistency of this DataUtility.
:rtype: float
"""
return self._consistency
@consistency.setter
def consistency(self, consistency: float):
"""Sets the consistency of this DataUtility.
:param consistency: The consistency of this DataUtility.
:type consistency: float
"""
if consistency is None:
raise ValueError("Invalid value for `consistency`, must not be `None`") # noqa: E501
self._consistency = consistency
@property
def completeness(self) -> float:
"""Gets the completeness of this DataUtility.
:return: The completeness of this DataUtility.
:rtype: float
"""
return self._completeness
@completeness.setter
def completeness(self, completeness: float):
"""Sets the completeness of this DataUtility.
:param completeness: The completeness of this DataUtility.
:type completeness: float
"""
if completeness is None:
raise ValueError("Invalid value for `completeness`, must not be `None`") # noqa: E501
self._completeness = completeness
@property
def timeliness(self) -> float:
"""Gets the timeliness of this DataUtility.
:return: The timeliness of this DataUtility.
:rtype: float
"""
return self._timeliness
@timeliness.setter
def timeliness(self, timeliness: float):
"""Sets the timeliness of this DataUtility.
:param timeliness: The timeliness of this DataUtility.
:type timeliness: float
"""
if timeliness is None:
raise ValueError("Invalid value for `timeliness`, must not be `None`") # noqa: E501
self._timeliness = timeliness
| [
"swagger_server.util.deserialize_model"
] | [((2013, 2046), 'swagger_server.util.deserialize_model', 'util.deserialize_model', (['dikt', 'cls'], {}), '(dikt, cls)\n', (2035, 2046), False, 'from swagger_server import util\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: <NAME>
@contact: <EMAIL>
@software: PyCharm
@file: random.py
@time: 2020/1/15 2:29
@desc:
"""
import numpy as np
import torch
import random
from typing import Union
__all__ = ['set_numpy_rand_seed', 'set_py_rand_seed', 'set_torch_rand_seed', 'set_rand_seed',
'set_rand_seed_according_torch']
def set_numpy_rand_seed(seed: Union[int, str]):
"""Set rand seed for numpy
Args:
seed (Union[int, str]): int seed or str, which will be hashed to get int seed
"""
if isinstance(seed, str):
seed = hash(seed)
elif not isinstance(int(seed), int):
raise ValueError(f"seed={seed} should be str or int")
seed = seed % (2**32)
np.random.seed(int(seed))
def set_torch_rand_seed(seed: Union[int, str]):
"""Set rand seed for torch on both cpu, cuda and cudnn
Args:
seed (Union[int, str]): int seed or str, which will be hashed to get int seed
"""
if isinstance(seed, str):
seed = hash(seed)
elif not isinstance(int(seed), int):
raise ValueError(f"seed={seed} should be str or int")
torch.manual_seed(int(seed))
if torch.cuda.is_available():
torch.cuda.manual_seed_all(int(seed))
torch.backends.cudnn.deterministic = True
def set_py_rand_seed(seed: Union[int, str]):
"""Set rand seed for python
Args:
seed (Union[int, str]): int seed or str, which will be hashed to get int seed
"""
if isinstance(seed, str):
seed = hash(seed)
elif not isinstance(int(seed), int):
raise ValueError(f"seed={seed} should be str or int")
random.seed(int(seed))
def set_rand_seed(seed: Union[int, str]):
"""Set rand seed for numpy, torch(cpu, cuda, cudnn), python
Args:
seed (Union[int, str]): int seed or str, which will be hashed to get int seed
"""
set_numpy_rand_seed(seed)
set_py_rand_seed(seed)
set_torch_rand_seed(seed)
def set_rand_seed_according_torch():
"""Set rand seed according to torch process's rand seed
The rand seed of non-torch libs may duplicate in several dataloader worker processes.
Use this function as dataloader's worker init function can solve this problem.
"""
seed = torch.initial_seed()
set_py_rand_seed(seed)
set_numpy_rand_seed(seed) | [
"torch.cuda.is_available",
"torch.initial_seed"
] | [((1179, 1204), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1202, 1204), False, 'import torch\n'), ((2259, 2279), 'torch.initial_seed', 'torch.initial_seed', ([], {}), '()\n', (2277, 2279), False, 'import torch\n')] |
"""
This module tests the creation of pipeline nodes from various different types
and combinations of types.
"""
import textwrap
import pytest
from find_kedro import find_kedro
contents = [
(
"single_nodes",
2,
"""\
from kedro.pipeline import node
node_a_b = node(lambda x: x, "a", "b", name="a_b")
node_b_c = node(lambda x: x, "b", "c", name="b_c")
""",
),
(
"list_nodes",
2,
"""\
from kedro.pipeline import node
nodes = [
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
]
""",
),
(
"set_nodes",
2,
"""\
from kedro.pipeline import node
nodes = {
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
}
""",
),
(
"tuple_nodes",
2,
"""\
from kedro.pipeline import node
nodes = (
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
)
""",
),
(
"pipeline_nodes",
2,
"""\
from kedro.pipeline import node, Pipeline
nodes = Pipeline([
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
])
""",
),
(
"pipeline_list_nodes",
4,
"""\
from kedro.pipeline import node, Pipeline
nodes_pipeline = Pipeline([
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
])
nodes_list = [
node(lambda x: x, "a2", "b2", name="a_b2"),
node(lambda x: x, "b2", "c2", name="b_c2"),
]
""",
),
(
"pipeline_nodes_nodes",
4,
"""\
from kedro.pipeline import node, Pipeline
nodes_pipeline = Pipeline([
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
])
node_a2 = node(lambda x: x, "a2", "b2", name="a_b2")
node_b2 = node(lambda x: x, "b2", "c2", name="b_c2")
""",
),
(
"list_nodes_nodes",
4,
"""\
from kedro.pipeline import node
nodes_pipeline = [
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
]
node_a2 = node(lambda x: x, "a2", "b2", name="a_b2")
node_b2 = node(lambda x: x, "b2", "c2", name="b_c2")
""",
),
(
"dynamic_list_nodes",
100,
"""\
from kedro.pipeline import node
nodes_pipeline = [ node(lambda x: x, f"a{n}", f"a{n+1}", name=f"a{n}_a{n+1}") for n in range(100)]
""",
),
(
"dynamic_pipeline_nodes",
100,
"""\
from kedro.pipeline import node, Pipeline
nodes_pipeline = Pipeline([ node(lambda x: x, f"a{n}", f"a{n+1}", name=f"a{n}_a{n+1}") for n in range(100)])
""",
),
(
"nested_list_nodes",
4,
"""\
from kedro.pipeline import node
nodes_pipeline = [
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
[
node(lambda x: x, "a2", "b2", name="a_b2"),
node(lambda x: x, "b2", "c2", name="b_c2"),
]
]
""",
),
(
"nested_tuple_nodes",
4,
"""\
from kedro.pipeline import node
nodes_pipeline = (
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
(
node(lambda x: x, "a2", "b2", name="a_b2"),
node(lambda x: x, "b2", "c2", name="b_c2"),
)
)
""",
),
(
"nested_set_nodes",
4,
"""\
from kedro.pipeline import node
nodes_pipeline = {
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
(
node(lambda x: x, "a2", "b2", name="a_b2"),
node(lambda x: x, "b2", "c2", name="b_c2"),
)
}
""",
),
(
"function_nodes",
2,
"""\
from kedro.pipeline import Pipeline, node
def create_pipeline():
return Pipeline([
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
]
)
""",
),
(
"function_single_nodes",
4,
"""\
from kedro.pipeline import Pipeline, node
node_a_b = node(lambda x: x, "a", "b", name="a_b")
node_b_c = node(lambda x: x, "b", "c", name="b_c")
def create_pipeline():
return Pipeline([
node(lambda x: x, "fa", "fb", name="fa_fb"),
node(lambda x: x, "fb", "fc", name="fb_fc"),
]
)
""",
),
(
"function_list_nodes",
4,
"""\
from kedro.pipeline import Pipeline, node
nodes = [
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
]
def create_pipeline():
return Pipeline([
node(lambda x: x, "fa", "fb", name="fa_fb"),
node(lambda x: x, "fb", "fc", name="fb_fc"),
]
)
""",
),
(
"list_create_pipeline",
2,
"""\
from kedro.pipeline import Pipeline, node
creaste_pipeline = [
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
]
""",
),
]
@pytest.mark.parametrize("name, num_nodes, content", contents)
def test_create_file(tmpdir, name, num_nodes, content):
p = tmpdir.mkdir("nodes").join(f"{ name }.py")
p.write(textwrap.dedent(content))
pipelines = find_kedro(directory=tmpdir, verbose=True)
assert list(pipelines.keys()) == [f"nodes.{ name }", "__default__"]
assert (
len(pipelines["__default__"].nodes) == num_nodes
), f"did not collect all nodes from { name }.py"
assert len(tmpdir.listdir()) == 1
| [
"find_kedro.find_kedro",
"pytest.mark.parametrize",
"textwrap.dedent"
] | [((5442, 5503), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""name, num_nodes, content"""', 'contents'], {}), "('name, num_nodes, content', contents)\n", (5465, 5503), False, 'import pytest\n'), ((5665, 5707), 'find_kedro.find_kedro', 'find_kedro', ([], {'directory': 'tmpdir', 'verbose': '(True)'}), '(directory=tmpdir, verbose=True)\n', (5675, 5707), False, 'from find_kedro import find_kedro\n'), ((5623, 5647), 'textwrap.dedent', 'textwrap.dedent', (['content'], {}), '(content)\n', (5638, 5647), False, 'import textwrap\n')] |
"""
Tests for the API /configfiles/ methods.
"""
import datetime
import mock
from oslo.config import cfg
from bricks.common import utils
from bricks.openstack.common import timeutils
from bricks.tests.api import base
from bricks.tests.api import utils as apiutils
from bricks.tests.db import utils as dbutils
class TestListConfigFiles(base.FunctionalTest):
def test_empty(self):
data = self.get_json('/configfiles?brickconfig_uuid=1be26c0b-03f2-4d2e-ae87-c02d7f33c123')
self.assertEqual([], data['configfiles'])
def test_one(self):
ndict = dbutils.get_test_configfile()
config = self.dbapi.create_configfile(ndict)
data = self.get_json('/configfiles?brickconfig_uuid=1be26c0b-03f2-4d2e-ae87-c02d7f33c123')
self.assertEqual(config['uuid'], data['configfiles'][0]["uuid"])
self.assertNotIn('environ', data['configfiles'][0])
def test_detail(self):
cdict = dbutils.get_test_configfile()
config = self.dbapi.create_configfile(cdict)
data = self.get_json('/configfiles/detail?brickconfig_uuid=1be26c0b-03f2-4d2e-ae87-c02d7f33c123')
self.assertEqual(config['uuid'], data['configfiles'][0]["uuid"])
self.assertIn('description', data['configfiles'][0])
def test_detail_against_single(self):
cdict = dbutils.get_test_configfile()
config = self.dbapi.create_configfile(cdict)
response = self.get_json('/configfiles/%s/detail' % config['uuid'],
expect_errors=True)
self.assertEqual(404, response.status_int)
def test_many(self):
cf_list = []
for id in range(5):
ndict = dbutils.get_test_configfile(
id=id, uuid=utils.generate_uuid())
cf = self.dbapi.create_configfile(ndict)
cf_list.append(cf['uuid'])
data = self.get_json('/configfiles?brickconfig_uuid=1be26c0b-03f2-4d2e-ae87-c02d7f33c123')
self.assertEqual(len(data['configfiles']), len(cf_list))
uuids = [n['uuid'] for n in data['configfiles']]
self.assertEqual(uuids.sort(), cf_list.sort())
def test_brickconfig_filter(self):
cf_list = []
brickconfig_uuid = utils.generate_uuid()
for id in range(5):
ndict = dbutils.get_test_configfile(
id=id, uuid=utils.generate_uuid(),
brickconfig_uuid=brickconfig_uuid)
cf = self.dbapi.create_configfile(ndict)
cf_list.append(cf['uuid'])
data = self.get_json(
'/configfiles?brickconfig_uuid=%s' % brickconfig_uuid)
self.assertEqual(len(data['configfiles']), len(cf_list))
uuids = [n['uuid'] for n in data['configfiles']]
self.assertEqual(uuids.sort(), cf_list.sort())
def test_links(self):
uuid = utils.generate_uuid()
cdict = dbutils.get_test_configfile(id=1, uuid=uuid)
self.dbapi.create_configfile(cdict)
data = self.get_json('/configfiles/%s' % uuid)
self.assertIn('links', data.keys())
self.assertEqual(2, len(data['links']))
self.assertIn(uuid, data['links'][0]['href'])
self.assertTrue(self.validate_link(data['links'][0]['href']))
self.assertTrue(self.validate_link(data['links'][1]['href']))
def test_collection_links(self):
configs = []
for id in range(5):
ndict = dbutils.get_test_configfile(
id=id, uuid=utils.generate_uuid())
cf = self.dbapi.create_configfile(ndict)
configs.append(cf['uuid'])
data = self.get_json('/configfiles?limit=3&brickconfig_uuid=1be26c0b-03f2-4d2e-ae87-c02d7f33c123')
self.assertEqual(3, len(data['configfiles']))
next_marker = data['configfiles'][-1]['uuid']
self.assertIn(next_marker, data['next'])
def test_collection_links_default_limit(self):
cfg.CONF.set_override('max_limit', 3, 'api')
configs = []
for id in range(5):
ndict = dbutils.get_test_configfile(
id=id, uuid=utils.generate_uuid())
cf = self.dbapi.create_configfile(ndict)
configs.append(cf['uuid'])
data = self.get_json('/configfiles?brickconfig_uuid=1be26c0b-03f2-4d2e-ae87-c02d7f33c123')
self.assertEqual(3, len(data['configfiles']))
next_marker = data['configfiles'][-1]['uuid']
self.assertIn(next_marker, data['next'])
class TestPatch(base.FunctionalTest):
def setUp(self):
super(TestPatch, self).setUp()
cdict = dbutils.get_test_configfile()
self.dbapi.create_configfile(cdict)
def test_update_not_found(self):
uuid = utils.generate_uuid()
response = self.patch_json('/configfiles/%s' % uuid,
[{'path': '/contents',
'value': 'RUN: ls -lash',
'op': 'replace'}],
expect_errors=True,
context=self.context)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
@mock.patch.object(timeutils, 'utcnow')
def test_replace_singular(self, mock_utcnow):
cdict = dbutils.get_test_configfile()
test_time = datetime.datetime(2000, 1, 1, 0, 0)
desc = 'foo'
mock_utcnow.return_value = test_time
response = self.patch_json('/configfiles/%s' % cdict['uuid'],
[{'path': '/description',
'value': desc, 'op': 'replace'}],
context=self.context)
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
result = self.get_json('/configfiles/%s' % cdict['uuid'])
self.assertEqual(desc, result['description'])
return_updated_at = timeutils.parse_isotime(
result['updated_at']).replace(tzinfo=None)
self.assertEqual(test_time, return_updated_at)
def test_remove_uuid(self):
cdict = dbutils.get_test_configfile()
response = self.patch_json('/configfiles/%s' % cdict['uuid'],
[{'path': '/uuid', 'op': 'remove'}],
expect_errors=True)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
class TestPost(base.FunctionalTest):
@mock.patch.object(timeutils, 'utcnow')
def test_create_configfile(self, mock_utcnow):
cdict = dbutils.get_test_configfile()
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
response = self.post_json(
'/configfiles', cdict, context=self.context)
self.assertEqual(201, response.status_int)
result = self.get_json('/configfiles/%s' % cdict['uuid'])
self.assertEqual(cdict['uuid'], result['uuid'])
self.assertFalse(result['updated_at'])
return_created_at = timeutils.parse_isotime(result['created_at']).replace(tzinfo=None)
self.assertEqual(test_time, return_created_at)
def test_create_configfile_generate_uuid(self):
cdict = dbutils.get_test_configfile()
del cdict['uuid']
self.post_json('/configfiles', cdict, context=self.context)
result = self.get_json('/configfiles?brickconfig_uuid=1be26c0b-03f2-4d2e-ae87-c02d7f33c123')
self.assertEqual(cdict['name'],
result['configfiles'][0]['name'])
self.assertTrue(utils.is_uuid_like(result['configfiles'][0]['uuid']))
def test_create_configfile_invalid_name(self):
cdict = dbutils.get_test_configfile()
del cdict['name']
response = self.post_json('/configfiles', cdict,
expect_errors=True, context=self.context)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
class TestDelete(base.FunctionalTest):
def test_delete_configfile(self):
cdict = dbutils.get_test_configfile()
self.dbapi.create_configfile(cdict)
self.delete('/configfiles/%s' % cdict['uuid'], context=self.context)
response = self.get_json('/configfiles/%s' % cdict['uuid'],
expect_errors=True, context=self.context)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_delete_brick_not_found(self):
uuid = utils.generate_uuid()
response = self.delete('/configfiles/%s' % uuid,
expect_errors=True, context=self.context)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
| [
"mock.patch.object",
"bricks.common.utils.generate_uuid",
"oslo.config.cfg.CONF.set_override",
"bricks.common.utils.is_uuid_like",
"bricks.openstack.common.timeutils.parse_isotime",
"datetime.datetime",
"bricks.tests.db.utils.get_test_configfile"
] | [((5243, 5281), 'mock.patch.object', 'mock.patch.object', (['timeutils', '"""utcnow"""'], {}), "(timeutils, 'utcnow')\n", (5260, 5281), False, 'import mock\n'), ((6661, 6699), 'mock.patch.object', 'mock.patch.object', (['timeutils', '"""utcnow"""'], {}), "(timeutils, 'utcnow')\n", (6678, 6699), False, 'import mock\n'), ((579, 608), 'bricks.tests.db.utils.get_test_configfile', 'dbutils.get_test_configfile', ([], {}), '()\n', (606, 608), True, 'from bricks.tests.db import utils as dbutils\n'), ((938, 967), 'bricks.tests.db.utils.get_test_configfile', 'dbutils.get_test_configfile', ([], {}), '()\n', (965, 967), True, 'from bricks.tests.db import utils as dbutils\n'), ((1320, 1349), 'bricks.tests.db.utils.get_test_configfile', 'dbutils.get_test_configfile', ([], {}), '()\n', (1347, 1349), True, 'from bricks.tests.db import utils as dbutils\n'), ((2216, 2237), 'bricks.common.utils.generate_uuid', 'utils.generate_uuid', ([], {}), '()\n', (2235, 2237), False, 'from bricks.common import utils\n'), ((2828, 2849), 'bricks.common.utils.generate_uuid', 'utils.generate_uuid', ([], {}), '()\n', (2847, 2849), False, 'from bricks.common import utils\n'), ((2866, 2910), 'bricks.tests.db.utils.get_test_configfile', 'dbutils.get_test_configfile', ([], {'id': '(1)', 'uuid': 'uuid'}), '(id=1, uuid=uuid)\n', (2893, 2910), True, 'from bricks.tests.db import utils as dbutils\n'), ((3902, 3946), 'oslo.config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""max_limit"""', '(3)', '"""api"""'], {}), "('max_limit', 3, 'api')\n", (3923, 3946), False, 'from oslo.config import cfg\n'), ((4563, 4592), 'bricks.tests.db.utils.get_test_configfile', 'dbutils.get_test_configfile', ([], {}), '()\n', (4590, 4592), True, 'from bricks.tests.db import utils as dbutils\n'), ((4690, 4711), 'bricks.common.utils.generate_uuid', 'utils.generate_uuid', ([], {}), '()\n', (4709, 4711), False, 'from bricks.common import utils\n'), ((5348, 5377), 'bricks.tests.db.utils.get_test_configfile', 'dbutils.get_test_configfile', ([], {}), '()\n', (5375, 5377), True, 'from bricks.tests.db import utils as dbutils\n'), ((5398, 5433), 'datetime.datetime', 'datetime.datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0)\n', (5415, 5433), False, 'import datetime\n'), ((6214, 6243), 'bricks.tests.db.utils.get_test_configfile', 'dbutils.get_test_configfile', ([], {}), '()\n', (6241, 6243), True, 'from bricks.tests.db import utils as dbutils\n'), ((6767, 6796), 'bricks.tests.db.utils.get_test_configfile', 'dbutils.get_test_configfile', ([], {}), '()\n', (6794, 6796), True, 'from bricks.tests.db import utils as dbutils\n'), ((6817, 6852), 'datetime.datetime', 'datetime.datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0)\n', (6834, 6852), False, 'import datetime\n'), ((7430, 7459), 'bricks.tests.db.utils.get_test_configfile', 'dbutils.get_test_configfile', ([], {}), '()\n', (7457, 7459), True, 'from bricks.tests.db import utils as dbutils\n'), ((7900, 7929), 'bricks.tests.db.utils.get_test_configfile', 'dbutils.get_test_configfile', ([], {}), '()\n', (7927, 7929), True, 'from bricks.tests.db import utils as dbutils\n'), ((8360, 8389), 'bricks.tests.db.utils.get_test_configfile', 'dbutils.get_test_configfile', ([], {}), '()\n', (8387, 8389), True, 'from bricks.tests.db import utils as dbutils\n'), ((8888, 8909), 'bricks.common.utils.generate_uuid', 'utils.generate_uuid', ([], {}), '()\n', (8907, 8909), False, 'from bricks.common import utils\n'), ((7778, 7830), 'bricks.common.utils.is_uuid_like', 'utils.is_uuid_like', (["result['configfiles'][0]['uuid']"], {}), "(result['configfiles'][0]['uuid'])\n", (7796, 7830), False, 'from bricks.common import utils\n'), ((6029, 6074), 'bricks.openstack.common.timeutils.parse_isotime', 'timeutils.parse_isotime', (["result['updated_at']"], {}), "(result['updated_at'])\n", (6052, 6074), False, 'from bricks.openstack.common import timeutils\n'), ((7239, 7284), 'bricks.openstack.common.timeutils.parse_isotime', 'timeutils.parse_isotime', (["result['created_at']"], {}), "(result['created_at'])\n", (7262, 7284), False, 'from bricks.openstack.common import timeutils\n'), ((1735, 1756), 'bricks.common.utils.generate_uuid', 'utils.generate_uuid', ([], {}), '()\n', (1754, 1756), False, 'from bricks.common import utils\n'), ((2343, 2364), 'bricks.common.utils.generate_uuid', 'utils.generate_uuid', ([], {}), '()\n', (2362, 2364), False, 'from bricks.common import utils\n'), ((3461, 3482), 'bricks.common.utils.generate_uuid', 'utils.generate_uuid', ([], {}), '()\n', (3480, 3482), False, 'from bricks.common import utils\n'), ((4073, 4094), 'bricks.common.utils.generate_uuid', 'utils.generate_uuid', ([], {}), '()\n', (4092, 4094), False, 'from bricks.common import utils\n')] |
# -*- coding: utf8 -*-
import itertools
def formatLine(r):
r = list(r)
l1 = ' '.join('{:02x}'.format(c) for c in r)
l2 = ''.join(chr(c) if 32 <= c < 127 else '.' for c in r)
return l1, l2
def hexDump(data):
size, over = divmod(len(data), 16)
if over:
size += 1
offsets = range(0, size * 16, 16)
for o in offsets:
row = itertools.islice(data, o, o + 16)
yield '{:010X}: {:48} {:16}'.format(o, *formatLine(row))
| [
"itertools.islice"
] | [((372, 405), 'itertools.islice', 'itertools.islice', (['data', 'o', '(o + 16)'], {}), '(data, o, o + 16)\n', (388, 405), False, 'import itertools\n')] |
from caos._internal.constants import ValidDependencyVersionRegex
from caos._internal.exceptions import InvalidDependencyVersionFormat, UnexpectedError
from typing import NewType
PipReadyDependency = NewType(name="PipReadyDependency", tp=str)
def _is_dependency_name_in_wheel(dependency_name: str, wheel: str, version: str) -> bool:
wheel = wheel[:-1*len("-{}".format(version))]\
.replace("_", "-")\
.lower()
return wheel.endswith(dependency_name.replace("_", "-").lower())
def _get_dependency_version_format(dependency_name: str, version: str) -> ValidDependencyVersionRegex:
"""
Raises:
InvalidDependencyVersionFormat
"""
if ValidDependencyVersionRegex.MAJOR_MINOR_PATCH.value.match(version):
return ValidDependencyVersionRegex.MAJOR_MINOR_PATCH
if ValidDependencyVersionRegex.MAJOR_MINOR.value.match(version):
return ValidDependencyVersionRegex.MAJOR_MINOR
if ValidDependencyVersionRegex.MAJOR.value.match(version):
return ValidDependencyVersionRegex.MAJOR
if ValidDependencyVersionRegex.LATEST.value.match(version):
return ValidDependencyVersionRegex.LATEST
wheel_info = ValidDependencyVersionRegex.WHEEL.value.match(version)
if wheel_info:
wheel = wheel_info.group("wheel")
wheel_version = wheel_info.group("version")
if not _is_dependency_name_in_wheel(dependency_name=dependency_name, wheel=wheel, version=wheel_version):
raise InvalidDependencyVersionFormat(
"The dependency '{dep}' is not present in the wheel filename '{wheel}'"
.format(dep=dependency_name, wheel=version)
)
if not ValidDependencyVersionRegex.MAJOR_MINOR_PATCH.value.match(wheel_version) and \
not ValidDependencyVersionRegex.MAJOR_MINOR.value.match(wheel_version) and \
not ValidDependencyVersionRegex.MAJOR.value.match(wheel_version):
raise InvalidDependencyVersionFormat(
"\nThe version format for the wheel dependency '{dep}' is invalid. Use a 'Final release' format "
"(see https://www.python.org/dev/peps/pep-0440/#final-releases)"
.format(dep=dependency_name)
)
return ValidDependencyVersionRegex.WHEEL
if ValidDependencyVersionRegex.TARGZ.value.match(version):
return ValidDependencyVersionRegex.TARGZ
raise InvalidDependencyVersionFormat(
"\nInvalid version format for the dependency '{dep}'. Only the following formats are allowed:"
"\n - 'latest' or 'LATEST'"
"\n - Final release format (see https://www.python.org/dev/peps/pep-0440/#final-releases)"
"\n - Wheel Binary Packages (see https://www.python.org/dev/peps/pep-0491/#file-format)"
"\n - .tar.gz Packages"
.format(dep=dependency_name)
)
def generate_pip_ready_dependency(dependency_name: str, version: str) -> PipReadyDependency:
"""
Raises:
InvalidDependencyVersionFormat
UnexpectedError
"""
dependency_regex: ValidDependencyVersionRegex = _get_dependency_version_format(
dependency_name=dependency_name,
version=version
)
if dependency_regex == ValidDependencyVersionRegex.MAJOR_MINOR_PATCH: # (^|~) X.X.X
if version.startswith("~"): # Allow patch updates
return version.replace("~", "~=") # ~=X.X.X
elif version.startswith("^"): # Allow minor updates
version = version.replace("^", "")
major, minor, patch = version.split(".")
return "~={}.{}".format(major, minor) # ~=X.X
else: # Allow exact version
return "=={}".format(version) # ==X.X.X
elif dependency_regex == ValidDependencyVersionRegex.MAJOR_MINOR:
if version.startswith("~"): # Allow patch updates
version = version.replace("~", "")
major, minor = version.split(".")
return "~={}.{}.0".format(major, minor) # ~=X.X.0
elif version.startswith("^"): # Allow minor updates
version = version.replace("^", "~=")
return version # ~=X.X
else: # Allow exact version
return "=={}".format(version) # ==X.X
elif dependency_regex == ValidDependencyVersionRegex.MAJOR:
if version.startswith("~"): # Allow patch updates
version = version.replace("~", "")
return "~={}.0.0".format(version) # ~=X.0.0
elif version.startswith("^"): # Allow minor updates
version = version.replace("^", "")
return "~={}.0".format(version) # ~=X.0
else: # Allow exact version
return "=={}".format(version) # ==X
elif dependency_regex == ValidDependencyVersionRegex.LATEST:
return dependency_name.lower()
elif dependency_regex == ValidDependencyVersionRegex.WHEEL:
return version
elif dependency_regex == ValidDependencyVersionRegex.TARGZ:
return version
raise UnexpectedError("The dependency given should have thrown 'InvalidDependencyVersionFormat' but it did not")
| [
"caos._internal.exceptions.UnexpectedError",
"caos._internal.constants.ValidDependencyVersionRegex.WHEEL.value.match",
"caos._internal.constants.ValidDependencyVersionRegex.MAJOR_MINOR.value.match",
"caos._internal.constants.ValidDependencyVersionRegex.TARGZ.value.match",
"caos._internal.constants.ValidDependencyVersionRegex.MAJOR_MINOR_PATCH.value.match",
"typing.NewType",
"caos._internal.constants.ValidDependencyVersionRegex.LATEST.value.match",
"caos._internal.constants.ValidDependencyVersionRegex.MAJOR.value.match"
] | [((200, 242), 'typing.NewType', 'NewType', ([], {'name': '"""PipReadyDependency"""', 'tp': 'str'}), "(name='PipReadyDependency', tp=str)\n", (207, 242), False, 'from typing import NewType\n'), ((687, 753), 'caos._internal.constants.ValidDependencyVersionRegex.MAJOR_MINOR_PATCH.value.match', 'ValidDependencyVersionRegex.MAJOR_MINOR_PATCH.value.match', (['version'], {}), '(version)\n', (744, 753), False, 'from caos._internal.constants import ValidDependencyVersionRegex\n'), ((824, 884), 'caos._internal.constants.ValidDependencyVersionRegex.MAJOR_MINOR.value.match', 'ValidDependencyVersionRegex.MAJOR_MINOR.value.match', (['version'], {}), '(version)\n', (875, 884), False, 'from caos._internal.constants import ValidDependencyVersionRegex\n'), ((949, 1003), 'caos._internal.constants.ValidDependencyVersionRegex.MAJOR.value.match', 'ValidDependencyVersionRegex.MAJOR.value.match', (['version'], {}), '(version)\n', (994, 1003), False, 'from caos._internal.constants import ValidDependencyVersionRegex\n'), ((1062, 1117), 'caos._internal.constants.ValidDependencyVersionRegex.LATEST.value.match', 'ValidDependencyVersionRegex.LATEST.value.match', (['version'], {}), '(version)\n', (1108, 1117), False, 'from caos._internal.constants import ValidDependencyVersionRegex\n'), ((1187, 1241), 'caos._internal.constants.ValidDependencyVersionRegex.WHEEL.value.match', 'ValidDependencyVersionRegex.WHEEL.value.match', (['version'], {}), '(version)\n', (1232, 1241), False, 'from caos._internal.constants import ValidDependencyVersionRegex\n'), ((2304, 2358), 'caos._internal.constants.ValidDependencyVersionRegex.TARGZ.value.match', 'ValidDependencyVersionRegex.TARGZ.value.match', (['version'], {}), '(version)\n', (2349, 2358), False, 'from caos._internal.constants import ValidDependencyVersionRegex\n'), ((5016, 5132), 'caos._internal.exceptions.UnexpectedError', 'UnexpectedError', (['"""The dependency given should have thrown \'InvalidDependencyVersionFormat\' but it did not"""'], {}), '(\n "The dependency given should have thrown \'InvalidDependencyVersionFormat\' but it did not"\n )\n', (5031, 5132), False, 'from caos._internal.exceptions import InvalidDependencyVersionFormat, UnexpectedError\n'), ((1697, 1769), 'caos._internal.constants.ValidDependencyVersionRegex.MAJOR_MINOR_PATCH.value.match', 'ValidDependencyVersionRegex.MAJOR_MINOR_PATCH.value.match', (['wheel_version'], {}), '(wheel_version)\n', (1754, 1769), False, 'from caos._internal.constants import ValidDependencyVersionRegex\n'), ((1791, 1857), 'caos._internal.constants.ValidDependencyVersionRegex.MAJOR_MINOR.value.match', 'ValidDependencyVersionRegex.MAJOR_MINOR.value.match', (['wheel_version'], {}), '(wheel_version)\n', (1842, 1857), False, 'from caos._internal.constants import ValidDependencyVersionRegex\n'), ((1879, 1939), 'caos._internal.constants.ValidDependencyVersionRegex.MAJOR.value.match', 'ValidDependencyVersionRegex.MAJOR.value.match', (['wheel_version'], {}), '(wheel_version)\n', (1924, 1939), False, 'from caos._internal.constants import ValidDependencyVersionRegex\n')] |
import os
from django.shortcuts import render,get_object_or_404, redirect
from django.http import FileResponse
from .models import GameCategory, Game
from comment.forms import GameCommentForm,SubGCommentForm
from comment.models import SubGComment
from .forms import UploadGameForm
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def portfllio(request):
categories = GameCategory.objects.all().order_by("name")
gameList = []
for cate in categories:
games = Game.objects.filter(category = cate.pk).order_by("-createTime")
temp = (cate,games)
gameList.append(temp)
return render(request, 'home/portfolio.html', context={'gameList': gameList})
def gameInfo(request,pk):
game = get_object_or_404(Game, pk=pk)
form = GameCommentForm()
subForm = SubGCommentForm()
c = game.gamecomment_set.all()
comments = []
for comment in c:
subComment = SubGComment.objects.filter(parentComment=comment.pk).order_by("createTime")
temp = (comment,subComment)
comments.append(temp)
context = {
'game': game,
'form': form,
'subForm': subForm,
'comments': comments,
}
return render(request, 'game/game.html', context=context)
def downloadGame(request, pk):
gameObj = get_object_or_404(Game, pk=pk)
url = BASE_DIR+str(gameObj.game.url).replace('/', '\\')
name = str(gameObj.game)
file = open(url, 'rb')
response = FileResponse(file)
response['Content-Type'] = 'application/octet-stream'
response['Content-Disposition'] = 'attachment;filename="{0}"'.format(name)
gameObj.increase_times()
return response
def uploadGame(request):
categories = GameCategory.objects.all()
if request.method == 'POST':
form = UploadGameForm(request.POST)
gamelication = request.FILES['game']
if form.is_valid():
game = form.save(commit=False)
game.game = gamelication
if 'icon' not in request.POST:
game.icon = request.FILES['icon']
if 'foreImg' not in request.POST:
game.foreImg = request.FILES['foreImg']
game.save()
return redirect('/')
else:
form = UploadGameForm()
return render(request, 'game/upload.html', context={'form': form, 'categories': categories})
def deleteGame(request, pk):
Game.objects.filter(pk=pk).delete()
return redirect("/user/")
def editGame(request, pk):
categories = GameCategory.objects.all()
game = get_object_or_404(Game, pk=pk)
if request.method == 'POST':
content = request.POST
game.name = content['name']
game.version = content['version']
game.category.pk = content['category']
game.inTro = content['inTro']
if 'icon' not in request.POST:
game.icon = request.FILES['icon']
if 'foreImg' not in request.POST:
game.foreImg = request.FILES['foreImg']
if 'game' not in request.POST:
game.game = request.FILES['game']
game.save()
return redirect("/user/")
context = {'categories': categories,'game': game}
return render(request, 'game/edit.html',context=context) | [
"os.path.abspath",
"comment.models.SubGComment.objects.filter",
"django.shortcuts.redirect",
"django.http.FileResponse",
"comment.forms.GameCommentForm",
"django.shortcuts.get_object_or_404",
"comment.forms.SubGCommentForm",
"django.shortcuts.render"
] | [((637, 707), 'django.shortcuts.render', 'render', (['request', '"""home/portfolio.html"""'], {'context': "{'gameList': gameList}"}), "(request, 'home/portfolio.html', context={'gameList': gameList})\n", (643, 707), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((747, 777), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Game'], {'pk': 'pk'}), '(Game, pk=pk)\n', (764, 777), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((789, 806), 'comment.forms.GameCommentForm', 'GameCommentForm', ([], {}), '()\n', (804, 806), False, 'from comment.forms import GameCommentForm, SubGCommentForm\n'), ((821, 838), 'comment.forms.SubGCommentForm', 'SubGCommentForm', ([], {}), '()\n', (836, 838), False, 'from comment.forms import GameCommentForm, SubGCommentForm\n'), ((1213, 1263), 'django.shortcuts.render', 'render', (['request', '"""game/game.html"""'], {'context': 'context'}), "(request, 'game/game.html', context=context)\n", (1219, 1263), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((1310, 1340), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Game'], {'pk': 'pk'}), '(Game, pk=pk)\n', (1327, 1340), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((1472, 1490), 'django.http.FileResponse', 'FileResponse', (['file'], {}), '(file)\n', (1484, 1490), False, 'from django.http import FileResponse\n'), ((2285, 2374), 'django.shortcuts.render', 'render', (['request', '"""game/upload.html"""'], {'context': "{'form': form, 'categories': categories}"}), "(request, 'game/upload.html', context={'form': form, 'categories':\n categories})\n", (2291, 2374), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2452, 2470), 'django.shortcuts.redirect', 'redirect', (['"""/user/"""'], {}), "('/user/')\n", (2460, 2470), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2554, 2584), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Game'], {'pk': 'pk'}), '(Game, pk=pk)\n', (2571, 2584), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((3197, 3247), 'django.shortcuts.render', 'render', (['request', '"""game/edit.html"""'], {'context': 'context'}), "(request, 'game/edit.html', context=context)\n", (3203, 3247), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((327, 352), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (342, 352), False, 'import os\n'), ((3112, 3130), 'django.shortcuts.redirect', 'redirect', (['"""/user/"""'], {}), "('/user/')\n", (3120, 3130), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2218, 2231), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (2226, 2231), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((935, 987), 'comment.models.SubGComment.objects.filter', 'SubGComment.objects.filter', ([], {'parentComment': 'comment.pk'}), '(parentComment=comment.pk)\n', (961, 987), False, 'from comment.models import SubGComment\n')] |
#!/usr/bin/env python
#
# Dummy script to replace numactl in testing environment
#
import argparse
import subprocess
print("Using dummy numactl")
parser = argparse.ArgumentParser()
parser.add_argument("cmd", nargs="*")
args, unknown = parser.parse_known_args()
p = subprocess.Popen(args.cmd)
p.wait()
| [
"subprocess.Popen",
"argparse.ArgumentParser"
] | [((157, 182), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (180, 182), False, 'import argparse\n'), ((269, 295), 'subprocess.Popen', 'subprocess.Popen', (['args.cmd'], {}), '(args.cmd)\n', (285, 295), False, 'import subprocess\n')] |
import json
import os
from pathlib import Path
current_path = os.path.abspath(__file__)
default_raw_path = os.path.join(current_path, '../../data/datasets/twitter/raw/')
unlabeled_data_path = Path(os.path.join(os.path.abspath(__file__), '../../../data/datasets/twitter/raw/unlabeled'))
def generate_label_data(file_name: str, stance_out_name: str, claim_out_name: str):
file_path = Path(unlabeled_data_path, file_name)
stance_out_path = Path(unlabeled_data_path, stance_out_name)
claim_out_path = Path(unlabeled_data_path, claim_out_name)
with file_path.open() as data, stance_out_path.open(mode='w') as stance_out, claim_out_path.open(mode='w') as claim_out:
for line in data:
tweet_dict = json.loads(line.split('\t')[1])
source_tweet = tweet_dict[line.split('\t')[0]]
source_tweet['text'] = source_tweet['full_text']
source_tweet['labels'] = []
json.dump(source_tweet, claim_out)
claim_out.write('\n')
for tweet_id, tweet in tweet_dict.items():
if source_tweet == tweet:
continue
tweet['text'] = 'Source: {}\n\nReply: {}'.format(source_tweet['full_text'], tweet['full_text'])
tweet['labels'] = []
json.dump(tweet, stance_out)
stance_out.write('\n')
def anno_agreement_check(anno_data_file: str, agree_file: str, disagree_file: str):
anno_data_path = Path(os.path.join(default_raw_path, anno_data_file))
agree_path = Path(os.path.join(default_raw_path, agree_file))
disagree_path = Path(os.path.join(default_raw_path, disagree_file))
with anno_data_path.open(encoding='utf-8') as anno_data, agree_path.open(mode='w', encoding='utf-8') as agree_data, disagree_path.open(
mode='w', encoding='utf-8') as disagree_data:
for line in anno_data:
disagreement = False
annotations = json.loads(line)['annotations']
if len(annotations) == 1:
line = json.loads(line)
line['annotations'] = [annotations[0]['label']]
json.dump(line, agree_data)
agree_data.write('\n')
else:
user_labels = {}
for annotation in annotations:
user_labels.setdefault(annotation['user'], set()).add(annotation['label'])
for user_id_a, labels_a in user_labels.items():
for user_id_b, labels_b in user_labels.items():
if labels_a != labels_b:
disagree_data.write(line)
disagreement = True
break
if disagreement:
break
if not disagreement:
line = json.loads(line)
if user_labels:
line['annotations'] = list(user_labels[1])
if not disagreement:
print(line)
json.dump(line, agree_data)
agree_data.write('\n')
def integrate_claim_label(annotation, tweet):
veracity_map = {5: 'True', 6: 'Unverified', 7: 'False'}
if 1 or 2 not in annotation['annotations']:
err_msg = "Error in claim labels, must contain either '1' or '2', denominating 'claim'" \
" and 'non-claim' respectively. Given labels: {}"
raise RuntimeError(
err_msg.format(annotation['annotations']))
if 2 in annotation['annotations']:
tweet['Claim'] = False
else:
tweet['Claim'] = True
if 3 or 4 not in annotation['annotations']:
err_msg = "Error in claim labels, must contain either '3' or '4', denominating " \
"'verifiable' and 'subjective' respectively. Given labels: {}"
raise RuntimeError(
err_msg.format(annotation['annotations']))
if 4 in annotation['annotations']:
tweet['Verifiability'] = 'Subjective'
else:
tweet['Verifiability'] = 'Verifiable'
if 5 or 6 or 7 not in annotation['annotations']:
err_msg = "Error in claim labels, must contain either '5', '6' or '7', " \
"denominating 'True', 'Unverified' and 'False' respectively. Given " \
"labels: {}"
raise RuntimeError(
err_msg.format(annotation['annotations']))
for x in [5, 6, 7]:
if x in annotation['annotations']:
tweet['TruthStatus'] = veracity_map[x]
def integrate_sdqc_label(annotation, tweet):
sdqc_map = {1: 'Supporting', 2: 'Denying', 3: 'Querying', 4: 'Commenting'}
if len(annotation['annotations']) > 1:
err_msg = "{} SDQC labels found, only one allowed"
raise RuntimeError(
err_msg.format(len(annotation['annotations'])))
tweet['SDQC_Submission'] = sdqc_map[annotation['annotations'][0]]
def integrate_label_data(anno_data_path: Path, database_path: Path, label_scheme: str):
if label_scheme not in ['claim', 'sdqc']:
err_msg = "Unrecognized label scheme: {}, please use 'sdqc' or 'claim'"
raise RuntimeError(
err_msg.format(label_scheme))
with anno_data_path.open(encoding='utf-8') as labeled_data, database_path.open(encoding='utf-8') as database:
data = []
for line in database:
not_altered = True
tweet_dict = json.loads(line.split('\t')[1])
for annotation in labeled_data:
annotation = json.loads(annotation)
# Data-point not yet annotated
if not annotation['annotations']:
continue
for tweet_id, tweet in tweet_dict.items():
if tweet['full_text'] == annotation['text']:
if label_scheme == 'claim':
integrate_claim_label(annotation, tweet)
if label_scheme == 'sdqc':
integrate_sdqc_label(annotation, tweet)
not_altered = False
break
if not_altered:
data.append(line)
else:
data.append(line.split('\t')[0] + '\t' + json.dumps(tweet_dict))
with database_path.open(mode='w', encoding='utf-8') as database:
for line in data:
database.write(line)
#anno_agreement_check(Path('test.json'), Path('agree.json'), Path('disagree.json'))
#generate_label_data(test_data, 'stance.jsonl', 'claim.jsonl')
| [
"json.dump",
"os.path.abspath",
"json.loads",
"json.dumps",
"pathlib.Path",
"os.path.join"
] | [((63, 88), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (78, 88), False, 'import os\n'), ((108, 170), 'os.path.join', 'os.path.join', (['current_path', '"""../../data/datasets/twitter/raw/"""'], {}), "(current_path, '../../data/datasets/twitter/raw/')\n", (120, 170), False, 'import os\n'), ((389, 425), 'pathlib.Path', 'Path', (['unlabeled_data_path', 'file_name'], {}), '(unlabeled_data_path, file_name)\n', (393, 425), False, 'from pathlib import Path\n'), ((448, 490), 'pathlib.Path', 'Path', (['unlabeled_data_path', 'stance_out_name'], {}), '(unlabeled_data_path, stance_out_name)\n', (452, 490), False, 'from pathlib import Path\n'), ((512, 553), 'pathlib.Path', 'Path', (['unlabeled_data_path', 'claim_out_name'], {}), '(unlabeled_data_path, claim_out_name)\n', (516, 553), False, 'from pathlib import Path\n'), ((211, 236), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (226, 236), False, 'import os\n'), ((1474, 1520), 'os.path.join', 'os.path.join', (['default_raw_path', 'anno_data_file'], {}), '(default_raw_path, anno_data_file)\n', (1486, 1520), False, 'import os\n'), ((1544, 1586), 'os.path.join', 'os.path.join', (['default_raw_path', 'agree_file'], {}), '(default_raw_path, agree_file)\n', (1556, 1586), False, 'import os\n'), ((1613, 1658), 'os.path.join', 'os.path.join', (['default_raw_path', 'disagree_file'], {}), '(default_raw_path, disagree_file)\n', (1625, 1658), False, 'import os\n'), ((934, 968), 'json.dump', 'json.dump', (['source_tweet', 'claim_out'], {}), '(source_tweet, claim_out)\n', (943, 968), False, 'import json\n'), ((1294, 1322), 'json.dump', 'json.dump', (['tweet', 'stance_out'], {}), '(tweet, stance_out)\n', (1303, 1322), False, 'import json\n'), ((1949, 1965), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1959, 1965), False, 'import json\n'), ((2042, 2058), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (2052, 2058), False, 'import json\n'), ((2139, 2166), 'json.dump', 'json.dump', (['line', 'agree_data'], {}), '(line, agree_data)\n', (2148, 2166), False, 'import json\n'), ((3044, 3071), 'json.dump', 'json.dump', (['line', 'agree_data'], {}), '(line, agree_data)\n', (3053, 3071), False, 'import json\n'), ((5630, 5652), 'json.loads', 'json.loads', (['annotation'], {}), '(annotation)\n', (5640, 5652), False, 'import json\n'), ((2847, 2863), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (2857, 2863), False, 'import json\n'), ((6354, 6376), 'json.dumps', 'json.dumps', (['tweet_dict'], {}), '(tweet_dict)\n', (6364, 6376), False, 'import json\n')] |
import datetime
from django.contrib.syndication.views import Feed
from django.utils import timezone
from django.urls import reverse
from django.views import generic
from .models import Post
class PostDetailView(generic.DetailView):
model = Post
queryset = Post.objects.exclude(status='D')
template_name = 'blog/detail.html'
class PostView(generic.ListView):
template_name = 'blog/list.html'
context_object_name = 'posts'
def get_queryset(self):
""" Fetch only published posts, and order by descending date """
return Post.objects.filter(
published_at__lte=timezone.now(), status="P"
).order_by('-published_at')
class RSSFeed(Feed):
title = "utf9k"
link = "/blog/"
description = "Blog posts from utf9k"
def items(self):
return Post.objects.order_by('-published_at')
def item_title(self, item):
return item.title
def item_pubdate(self, item):
return datetime.datetime.combine(item.published_at, datetime.time())
def item_description(self, item):
return item.render()
def item_link(self, item):
return reverse('blog:detail', args=[item.slug]) | [
"django.urls.reverse",
"datetime.time",
"django.utils.timezone.now"
] | [((1141, 1181), 'django.urls.reverse', 'reverse', (['"""blog:detail"""'], {'args': '[item.slug]'}), "('blog:detail', args=[item.slug])\n", (1148, 1181), False, 'from django.urls import reverse\n'), ((1009, 1024), 'datetime.time', 'datetime.time', ([], {}), '()\n', (1022, 1024), False, 'import datetime\n'), ((612, 626), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (624, 626), False, 'from django.utils import timezone\n')] |
from fractions import Fraction
from django.db import models
from django.contrib.auth.models import User
from django.dispatch import receiver
from django.db.models.signals import post_save
@receiver(post_save, sender=User)
def create_blank_statistics(sender, instance=None, created=False, **kwargs):
if created:
Statistic.objects.create(user=instance)
class Language(models.Model):
name = models.CharField(max_length=32)
users = models.ManyToManyField(User, related_name='selected_languages', blank=True)
language_code = models.CharField(max_length=32, null=True, blank=True)
def __str__(self):
return self.name
def __eq__(self, other):
if isinstance(other, Language):
return self.name == other.name
else:
return False
class Achievement(models.Model):
LEVEL_CHOICES = (
("1", "Bronze"),
("2", "Silver"),
("3", "Gold"),
("4", "Diamond"),
)
condition = models.TextField(max_length=2048)
name = models.CharField(max_length=128)
font_awesome_icon = models.TextField(max_length=2048)
users = models.ManyToManyField(User, related_name="achievements", blank=True)
level = models.CharField(max_length=1, choices=LEVEL_CHOICES)
score = models.IntegerField()
def __str__(self):
return str(self.name)
def try_award_to(self, user):
has_achievement = self in user.achievements.all()
if has_achievement:
return False
condition_result = eval(str(self.condition))
if condition_result:
user.achievements.add(self)
return True
else:
return False
class UserFollowing(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='following')
following = models.ForeignKey(User, on_delete=models.CASCADE, related_name='followed_by')
class Statistic(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='statistics')
correctly_swiped_taboo_cards = models.IntegerField(default=0)
swiped_taboo_cards = models.IntegerField(default=0)
correctly_ans_flashcards = models.IntegerField(default=0)
ans_flashcards = models.IntegerField(default=0)
translated_words = models.IntegerField(default=0)
@property
def taboo_efficiency(self):
if self.swiped_taboo_cards is not 0:
return round(Fraction(self.correctly_swiped_taboo_cards, self.swiped_taboo_cards), 2)
else:
return 0
class TabooCard(models.Model):
key_word = models.CharField(max_length=128)
black_list = models.CharField(max_length=2048)
owner = models.ForeignKey(User, on_delete=models.DO_NOTHING, related_name='cards')
language = models.ForeignKey(Language, on_delete=models.DO_NOTHING, related_name='cards')
times_shown = models.IntegerField(default=0)
answered_correctly = models.IntegerField(default=0)
@property
def difficulty(self):
if self.times_shown is 0:
return "NOT ENOUGH STATS"
ratio = Fraction(self.answered_correctly, self.times_shown)
if 0 <= ratio < 0.25:
return "INSANE"
elif 0.25 <= ratio < 0.5:
return "HARD"
elif 0.5 <= ratio < 0.75:
return "MEDIUM"
elif 0.75 <= ratio:
return "EASY"
@property
def card_efficiency(self):
if self.times_shown is not 0:
return round(Fraction(self.answered_correctly, self.times_shown), 2)
else:
return 0
def __str__(self):
return str(self.pk) + ' | ' + str(self.key_word) + ' | ' + str(self.language.language_code)
@receiver(post_save, sender=Statistic)
@receiver(post_save, sender=UserFollowing)
@receiver(post_save, sender=User)
def trigger_achievements_after_statistics_save(sender, instance=None, created=False, **kwargs):
if isinstance(instance, User):
if not created:
grant_achievements(instance)
else:
grant_achievements(instance.user)
def grant_achievements(user):
for achievement in Achievement.objects.all():
achievement.try_award_to(user)
| [
"django.db.models.TextField",
"django.db.models.OneToOneField",
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.dispatch.receiver",
"django.db.models.IntegerField",
"fractions.Fraction"
] | [((192, 224), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'User'}), '(post_save, sender=User)\n', (200, 224), False, 'from django.dispatch import receiver\n'), ((3704, 3741), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'Statistic'}), '(post_save, sender=Statistic)\n', (3712, 3741), False, 'from django.dispatch import receiver\n'), ((3743, 3784), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'UserFollowing'}), '(post_save, sender=UserFollowing)\n', (3751, 3784), False, 'from django.dispatch import receiver\n'), ((3786, 3818), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'User'}), '(post_save, sender=User)\n', (3794, 3818), False, 'from django.dispatch import receiver\n'), ((409, 440), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)'}), '(max_length=32)\n', (425, 440), False, 'from django.db import models\n'), ((453, 528), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['User'], {'related_name': '"""selected_languages"""', 'blank': '(True)'}), "(User, related_name='selected_languages', blank=True)\n", (475, 528), False, 'from django.db import models\n'), ((549, 603), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)', 'null': '(True)', 'blank': '(True)'}), '(max_length=32, null=True, blank=True)\n', (565, 603), False, 'from django.db import models\n'), ((984, 1017), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(2048)'}), '(max_length=2048)\n', (1000, 1017), False, 'from django.db import models\n'), ((1029, 1061), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (1045, 1061), False, 'from django.db import models\n'), ((1086, 1119), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(2048)'}), '(max_length=2048)\n', (1102, 1119), False, 'from django.db import models\n'), ((1132, 1201), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['User'], {'related_name': '"""achievements"""', 'blank': '(True)'}), "(User, related_name='achievements', blank=True)\n", (1154, 1201), False, 'from django.db import models\n'), ((1214, 1267), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1)', 'choices': 'LEVEL_CHOICES'}), '(max_length=1, choices=LEVEL_CHOICES)\n', (1230, 1267), False, 'from django.db import models\n'), ((1280, 1301), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1299, 1301), False, 'from django.db import models\n'), ((1735, 1810), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""following"""'}), "(User, on_delete=models.CASCADE, related_name='following')\n", (1752, 1810), False, 'from django.db import models\n'), ((1827, 1904), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""followed_by"""'}), "(User, on_delete=models.CASCADE, related_name='followed_by')\n", (1844, 1904), False, 'from django.db import models\n'), ((1949, 2028), 'django.db.models.OneToOneField', 'models.OneToOneField', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""statistics"""'}), "(User, on_delete=models.CASCADE, related_name='statistics')\n", (1969, 2028), False, 'from django.db import models\n'), ((2064, 2094), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2083, 2094), False, 'from django.db import models\n'), ((2120, 2150), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2139, 2150), False, 'from django.db import models\n'), ((2182, 2212), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2201, 2212), False, 'from django.db import models\n'), ((2234, 2264), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2253, 2264), False, 'from django.db import models\n'), ((2288, 2318), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2307, 2318), False, 'from django.db import models\n'), ((2592, 2624), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (2608, 2624), False, 'from django.db import models\n'), ((2642, 2675), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2048)'}), '(max_length=2048)\n', (2658, 2675), False, 'from django.db import models\n'), ((2688, 2762), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.DO_NOTHING', 'related_name': '"""cards"""'}), "(User, on_delete=models.DO_NOTHING, related_name='cards')\n", (2705, 2762), False, 'from django.db import models\n'), ((2778, 2856), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Language'], {'on_delete': 'models.DO_NOTHING', 'related_name': '"""cards"""'}), "(Language, on_delete=models.DO_NOTHING, related_name='cards')\n", (2795, 2856), False, 'from django.db import models\n'), ((2875, 2905), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2894, 2905), False, 'from django.db import models\n'), ((2931, 2961), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2950, 2961), False, 'from django.db import models\n'), ((3091, 3142), 'fractions.Fraction', 'Fraction', (['self.answered_correctly', 'self.times_shown'], {}), '(self.answered_correctly, self.times_shown)\n', (3099, 3142), False, 'from fractions import Fraction\n'), ((2436, 2504), 'fractions.Fraction', 'Fraction', (['self.correctly_swiped_taboo_cards', 'self.swiped_taboo_cards'], {}), '(self.correctly_swiped_taboo_cards, self.swiped_taboo_cards)\n', (2444, 2504), False, 'from fractions import Fraction\n'), ((3486, 3537), 'fractions.Fraction', 'Fraction', (['self.answered_correctly', 'self.times_shown'], {}), '(self.answered_correctly, self.times_shown)\n', (3494, 3537), False, 'from fractions import Fraction\n')] |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import scipy
from statsmodels import robust
class Singular_description(object):
'''
Display statistics from every numerical column in data set.
Base class for Mutual description instance.
Outcomes are represented from the beggining (after hoover),
in each histogram plot in the page.
Class covers the most general feature statistics used in data analysis.
'''
def __init__(self):
# Handled by cursor in common.py file in `Mutual_description`
self.column = ""
def histogram(self, plot_number):
# Generate histogram and save as a static file
# size and ticks are adjusted with accordance to display size
sns.set_style("whitegrid")
fig, ax = plt.subplots()
fig.set_size_inches(12, 12)
ax=sns.distplot(self.dataset.iloc[:, [plot_number]], rug=True, color='k')
fig.patch.set_alpha(0.0)
plt.xticks(fontsize=25)
plt.yticks(fontsize=25)
fig.savefig('static/plot{}.png'.format(plot_number + 1), dpi=fig.dpi)
# return fig
# plt.show()
def measurement(self):
# call for measurement category of the feature
# possible outcomes are:
# -- quantitive continous
# -- quantitive discrete categorical
# -- quantitive discrete numerical
if self.dataset[self.column].dtypes == 'float64':
for value in self.dataset[self.column].values:
if float(value) != int(value):
return 'quantitive continous'
if len(pd.unique(self.dataset[self.column])) == 2:
return 'quantitive discrete categorical'
else:
return 'quantitive discrete numerical'
def average(self):
# TODO: remove
return np.average(self.dataset[self.column])
def expected_value(self):
# call for expected value from feature distribution
return np.mean(self.dataset[self.column])
def median(self):
# call for median from feature distribution
return np.median(self.dataset[self.column])
def mode(self):
# call for mode from feature distribution
return scipy.stats.mode(self.dataset[self.column])
def standard_deviation(self):
# call for standard deviation from feature distribution
return np.std(self.dataset[self.column])
def absolute_deviation_from_mean(self):
# call for absolute deviation from mean from feature distribution
return np.mean(np.absolute(self.dataset[self.column] - np.mean(self.dataset[self.column])))
def absolute_deviation_from_median(self):
# call for mode from feature distribution
return scipy.stats.median_absolute_deviation(self.dataset[self.column])
def quarter_deviation(self):
# call for quarter devaition from feature distribution
q75, q25 = np.percentile(self.dataset[self.column], [75 ,25])
return (q75 - q25)
def coefficient_of_variation(self):
# call for coefficient of variation from feature distribution
return scipy.stats.variation(self.dataset[self.column])
def gini_coefficient(self):
# call for gini coefficient from feature distribution
# TODO: refactorize
mad = np.abs(np.subtract.outer(self.dataset[self.column], self.dataset[self.column])).mean()
rmad = mad/np.mean(self.dataset[self.column])
return 0.5 * rmad
def asymmetry_factor(self):
# call for asymmetry factor from feature distribution
return scipy.stats.skew(self.dataset[self.column])
def entropy(self):
# call for entropy from feature distribution
return scipy.stats.entropy(self.dataset[self.column])
| [
"seaborn.set_style",
"numpy.average",
"scipy.stats.mode",
"numpy.median",
"numpy.std",
"scipy.stats.entropy",
"matplotlib.pyplot.yticks",
"pandas.unique",
"numpy.percentile",
"scipy.stats.variation",
"scipy.stats.skew",
"numpy.mean",
"numpy.subtract.outer",
"seaborn.distplot",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.subplots",
"scipy.stats.median_absolute_deviation"
] | [((784, 810), 'seaborn.set_style', 'sns.set_style', (['"""whitegrid"""'], {}), "('whitegrid')\n", (797, 810), True, 'import seaborn as sns\n'), ((838, 852), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (850, 852), True, 'import matplotlib.pyplot as plt\n'), ((909, 979), 'seaborn.distplot', 'sns.distplot', (['self.dataset.iloc[:, [plot_number]]'], {'rug': '(True)', 'color': '"""k"""'}), "(self.dataset.iloc[:, [plot_number]], rug=True, color='k')\n", (921, 979), True, 'import seaborn as sns\n'), ((1030, 1053), 'matplotlib.pyplot.xticks', 'plt.xticks', ([], {'fontsize': '(25)'}), '(fontsize=25)\n', (1040, 1053), True, 'import matplotlib.pyplot as plt\n'), ((1062, 1085), 'matplotlib.pyplot.yticks', 'plt.yticks', ([], {'fontsize': '(25)'}), '(fontsize=25)\n', (1072, 1085), True, 'import matplotlib.pyplot as plt\n'), ((1958, 1995), 'numpy.average', 'np.average', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (1968, 1995), True, 'import numpy as np\n'), ((2103, 2137), 'numpy.mean', 'np.mean', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (2110, 2137), True, 'import numpy as np\n'), ((2228, 2264), 'numpy.median', 'np.median', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (2237, 2264), True, 'import numpy as np\n'), ((2351, 2394), 'scipy.stats.mode', 'scipy.stats.mode', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (2367, 2394), False, 'import scipy\n'), ((2509, 2542), 'numpy.std', 'np.std', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (2515, 2542), True, 'import numpy as np\n'), ((2874, 2938), 'scipy.stats.median_absolute_deviation', 'scipy.stats.median_absolute_deviation', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (2911, 2938), False, 'import scipy\n'), ((3055, 3105), 'numpy.percentile', 'np.percentile', (['self.dataset[self.column]', '[75, 25]'], {}), '(self.dataset[self.column], [75, 25])\n', (3068, 3105), True, 'import numpy as np\n'), ((3261, 3309), 'scipy.stats.variation', 'scipy.stats.variation', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (3282, 3309), False, 'import scipy\n'), ((3725, 3768), 'scipy.stats.skew', 'scipy.stats.skew', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (3741, 3768), False, 'import scipy\n'), ((3864, 3910), 'scipy.stats.entropy', 'scipy.stats.entropy', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (3883, 3910), False, 'import scipy\n'), ((3554, 3588), 'numpy.mean', 'np.mean', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (3561, 3588), True, 'import numpy as np\n'), ((1716, 1752), 'pandas.unique', 'pd.unique', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (1725, 1752), True, 'import pandas as pd\n'), ((2725, 2759), 'numpy.mean', 'np.mean', (['self.dataset[self.column]'], {}), '(self.dataset[self.column])\n', (2732, 2759), True, 'import numpy as np\n'), ((3455, 3526), 'numpy.subtract.outer', 'np.subtract.outer', (['self.dataset[self.column]', 'self.dataset[self.column]'], {}), '(self.dataset[self.column], self.dataset[self.column])\n', (3472, 3526), True, 'import numpy as np\n')] |
import os
from sqlalchemy.dialects.postgresql.json import JSONB
from sqlalchemy.types import ARRAY, JSON, Boolean, Float, Integer, String
PKG_NAME = "pipestat"
LOCK_PREFIX = "lock."
REPORT_CMD = "report"
INSPECT_CMD = "inspect"
REMOVE_CMD = "remove"
RETRIEVE_CMD = "retrieve"
STATUS_CMD = "status"
SUBPARSER_MSGS = {
REPORT_CMD: "Report a result.",
INSPECT_CMD: "Inspect a database.",
REMOVE_CMD: "Remove a result.",
RETRIEVE_CMD: "Retrieve a result.",
STATUS_CMD: "Manage pipeline status.",
}
STATUS_GET_CMD = "get"
STATUS_SET_CMD = "set"
STATUS_SUBPARSER_MESSAGES = {
STATUS_SET_CMD: "Set status.",
STATUS_GET_CMD: "Get status.",
}
DOC_URL = "http://pipestat.databio.org/en/latest/db_config/"
# DB config keys
CFG_DATABASE_KEY = "database"
CFG_NAME_KEY = "name"
CFG_HOST_KEY = "host"
CFG_PORT_KEY = "port"
CFG_PASSWORD_KEY = "password"
CFG_USER_KEY = "user"
CFG_DIALECT_KEY = "dialect" # sqlite, mysql, postgresql, oracle, or mssql
CFG_DRIVER_KEY = "driver"
DB_CREDENTIALS = [
CFG_HOST_KEY,
CFG_PORT_KEY,
CFG_PASSWORD_KEY,
CFG_USER_KEY,
CFG_NAME_KEY,
CFG_DIALECT_KEY,
CFG_DRIVER_KEY,
]
# object attribute names
DB_ONLY_KEY = "_database_only"
CONFIG_KEY = "_config"
SCHEMA_KEY = "_schema"
STATUS_KEY = "_status"
STATUS_SCHEMA_KEY = "_status_schema"
STATUS_SCHEMA_SOURCE_KEY = "_status_schema_source"
STATUS_FILE_DIR = "_status_file_dir"
RES_SCHEMAS_KEY = "_result_schemas"
DB_BASE_KEY = "_declarative_base"
DB_ORMS_KEY = "_orms"
DATA_KEY = "_data"
NAME_KEY = "_name"
FILE_KEY = "_file"
RECORD_ID_KEY = "_record_id"
DB_SESSION_KEY = "_db_session"
DB_SCOPED_SESSION_KEY = "_db_scoped_session"
DB_ENGINE_KEY = "_db_engine"
HIGHLIGHTED_KEY = "_highlighted"
DB_COLUMN_KEY = "db_column"
DB_RELATIONSHIP_KEY = "relationship"
DB_RELATIONSHIP_NAME_KEY = "name"
DB_RELATIONSHIP_TABLE_KEY = "table"
DB_RELATIONSHIP_COL_KEY = "column"
DB_RELATIONSHIP_BACKREF_KEY = "backref"
DB_RELATIONSHIP_ELEMENTS = [
DB_RELATIONSHIP_BACKREF_KEY,
DB_RELATIONSHIP_COL_KEY,
DB_RELATIONSHIP_NAME_KEY,
DB_RELATIONSHIP_TABLE_KEY,
]
# schema keys
SCHEMA_PROP_KEY = "properties"
SCHEMA_TYPE_KEY = "type"
SCHEMA_DESC_KEY = "description"
# DB column names
ID = "id"
RECORD_ID = "record_identifier"
STATUS = "status"
RESERVED_COLNAMES = [ID, RECORD_ID]
CANONICAL_TYPES = {
"image": {
"type": "object",
"properties": {
"path": {"type": "string"},
"thumbnail_path": {"type": "string"},
"title": {"type": "string"},
},
"required": ["path", "thumbnail_path", "title"],
},
"file": {
"type": "object",
"properties": {
"path": {"type": "string"},
"title": {"type": "string"},
},
"required": ["path", "title"],
},
}
ENV_VARS = {
"namespace": "PIPESTAT_NAMESPACE",
"config": "PIPESTAT_CONFIG",
"results_file": "PIPESTAT_RESULTS_FILE",
"schema": "PIPESTAT_RESULTS_SCHEMA",
"status_schema": "PIPESTAT_SATUS_SCHEMA",
"record_identifier": "PIPESTAT_RECORD_ID",
}
CLASSES_BY_TYPE = {
"number": float,
"integer": int,
"object": dict,
"image": dict,
"file": dict,
"string": str,
"array": list,
"boolean": bool,
}
SQL_CLASSES_BY_TYPE = {
"number": Float,
"integer": Integer,
"object": JSONB,
"image": JSONB,
"file": JSONB,
"string": String(500),
"array": JSONB,
"boolean": Boolean,
}
CFG_SCHEMA = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "schemas", "pipestat_config_schema.yaml"
)
STATUS_SCHEMA = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "schemas", "status_schema.yaml"
)
STATUS_TABLE_SCHEMA = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "schemas", "status_table_schema.yaml"
)
| [
"sqlalchemy.types.String",
"os.path.abspath"
] | [((3389, 3400), 'sqlalchemy.types.String', 'String', (['(500)'], {}), '(500)\n', (3395, 3400), False, 'from sqlalchemy.types import ARRAY, JSON, Boolean, Float, Integer, String\n'), ((3496, 3521), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (3511, 3521), False, 'import os\n'), ((3617, 3642), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (3632, 3642), False, 'import os\n'), ((3736, 3761), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (3751, 3761), False, 'import os\n')] |
import logging
import random
import requests
import board
import neopixel
import smbus2
from apscheduler.schedulers.blocking import BlockingScheduler
class LedController:
def reset(self):
pass
def set(self, id):
pass
class LoggingLedController(LedController):
def reset(self):
logging.info('Reset')
def set(self, id):
logging.info('set {}'.format(id))
# Controller for I2C connected LEDs
class I2CLedController(LoggingLedController):
def __init__(self):
self.bus = smbus2.SMBus(1)
self.bus.write_byte_data(0x20, 0x00, 0x00)
self.bus.write_byte_data(0x20, 0x01, 0x00)
def reset(self):
super(I2CLedController, self).reset()
self.bus.write_byte_data(0x20, 0x14, 0x00)
self.bus.write_byte_data(0x20, 0x15, 0x00)
def set(self, id):
super(I2CLedController, self).set(id)
register = 0x14
if id / 8 > 0:
register = 0x15
bitmask = id % 8
self.bus.write_byte_data(0x20, register, bitmask)
# Controller for WS2812 connected LEDs
class WS2812LedController(LedController):
def __init__(self, color):
self._color = color
self._pixels = neopixel.NeoPixel(board.D18, 144, auto_write=False)
self._pixels.fill((0, 0, 0))
self._pixels.show()
def reset(self):
super(WS2812LedController, self).reset()
self._pixels.fill((0, 0, 0))
self._pixels.show()
def set(self, id):
super(WS2812LedController, self).set(id)
self._pixels.fill((0, 0, 0))
self._pixels[id] = self._color
self._pixels.show()
# BASIC OPTIONS
logging.basicConfig(level=logging.INFO)
TEST_ENV = 'http://192.168.0.199:8080/v1/display'
PROD_ENV = 'http://10.24.6.35/api/v1/display'
url = TEST_ENV
color = (0, 0, 255)
controller = WS2812LedController(color)
def job():
address = get_active_address()
if address < 0:
controller.reset()
else:
controller.set(address)
def get_mock_address():
return random.randint(-1, 100)
def get_active_address():
try:
r = requests.get(url, timeout=2)
data = r.json()
if (data['state'] != 'OPERATION'):
logging.debug('Not operation state.')
return -1
if 'operation' not in data:
logging.debug('No operation.')
return -1
operation = data['operation']
if 'realEstate' not in operation:
logging.debug('No realEstate.')
return -1
realEstate = operation['realEstate']
if 'folderAddress' not in realEstate:
logging.debug('No folderAddress.')
return -1
folderAddress = int(realEstate['folderAddress'])
return folderAddress
except Exception as e:
logging.warn('Exception when getting data.')
logging.warn(e)
return -1
def init():
logging.info('Starting process.')
scheduler = BlockingScheduler()
scheduler.add_job(job, 'interval', seconds=5)
try:
scheduler.start()
except (KeyboardInterrupt):
controller.reset()
logging.info('Stopping process.')
if __name__ == "__main__":
init()
| [
"logging.debug",
"random.randint",
"logging.basicConfig",
"logging.warn",
"smbus2.SMBus",
"logging.info",
"requests.get",
"neopixel.NeoPixel",
"apscheduler.schedulers.blocking.BlockingScheduler"
] | [((1672, 1711), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (1691, 1711), False, 'import logging\n'), ((2060, 2083), 'random.randint', 'random.randint', (['(-1)', '(100)'], {}), '(-1, 100)\n', (2074, 2083), False, 'import random\n'), ((2940, 2973), 'logging.info', 'logging.info', (['"""Starting process."""'], {}), "('Starting process.')\n", (2952, 2973), False, 'import logging\n'), ((2990, 3009), 'apscheduler.schedulers.blocking.BlockingScheduler', 'BlockingScheduler', ([], {}), '()\n', (3007, 3009), False, 'from apscheduler.schedulers.blocking import BlockingScheduler\n'), ((319, 340), 'logging.info', 'logging.info', (['"""Reset"""'], {}), "('Reset')\n", (331, 340), False, 'import logging\n'), ((536, 551), 'smbus2.SMBus', 'smbus2.SMBus', (['(1)'], {}), '(1)\n', (548, 551), False, 'import smbus2\n'), ((1222, 1273), 'neopixel.NeoPixel', 'neopixel.NeoPixel', (['board.D18', '(144)'], {'auto_write': '(False)'}), '(board.D18, 144, auto_write=False)\n', (1239, 1273), False, 'import neopixel\n'), ((2133, 2161), 'requests.get', 'requests.get', (['url'], {'timeout': '(2)'}), '(url, timeout=2)\n', (2145, 2161), False, 'import requests\n'), ((2241, 2278), 'logging.debug', 'logging.debug', (['"""Not operation state."""'], {}), "('Not operation state.')\n", (2254, 2278), False, 'import logging\n'), ((2350, 2380), 'logging.debug', 'logging.debug', (['"""No operation."""'], {}), "('No operation.')\n", (2363, 2380), False, 'import logging\n'), ((2497, 2528), 'logging.debug', 'logging.debug', (['"""No realEstate."""'], {}), "('No realEstate.')\n", (2510, 2528), False, 'import logging\n'), ((2656, 2690), 'logging.debug', 'logging.debug', (['"""No folderAddress."""'], {}), "('No folderAddress.')\n", (2669, 2690), False, 'import logging\n'), ((2835, 2879), 'logging.warn', 'logging.warn', (['"""Exception when getting data."""'], {}), "('Exception when getting data.')\n", (2847, 2879), False, 'import logging\n'), ((2888, 2903), 'logging.warn', 'logging.warn', (['e'], {}), '(e)\n', (2900, 2903), False, 'import logging\n'), ((3162, 3195), 'logging.info', 'logging.info', (['"""Stopping process."""'], {}), "('Stopping process.')\n", (3174, 3195), False, 'import logging\n')] |
from flask_restful import Resource, request, abort
from flask_restful_swagger import swagger
from hpcpm.api import log
from hpcpm.api.helpers.database import database
from hpcpm.api.helpers.utils import abort_when_not_int, abort_when_node_not_found
from hpcpm.api.helpers.constants import COMPUTATION_NODE_PARAM_NAME, COMPUTATION_NODE_NOT_FOUND_RESPONSE, \
COMPUTATION_NODE_FETCHED_RESPONSE, DEVICE_IDENTIFIER_PARAM, DEVICE_SOFT_LIMIT_PARAM, \
DEVICE_SOFT_LIMIT_SET_RESPONSE, DEVICE_NOT_FOUND_RESPONSE, \
NODE_AND_DEVICE_PARAMS, DEVICE_SOFT_LIMIT_SET_RESPONSE_FAILURE
class SoftLimit(Resource):
@swagger.operation(
notes='This endpoint is used for setting soft limit for given device.',
nickname='/nodes/computation_node/<string:name>/<string:device_id>/soft_limit',
parameters=[
COMPUTATION_NODE_PARAM_NAME,
DEVICE_IDENTIFIER_PARAM,
DEVICE_SOFT_LIMIT_PARAM
],
responseMessages=[
DEVICE_SOFT_LIMIT_SET_RESPONSE,
DEVICE_SOFT_LIMIT_SET_RESPONSE_FAILURE,
COMPUTATION_NODE_NOT_FOUND_RESPONSE
]
)
def put(self, name, device_id):
soft_limit = request.args.get('soft_limit')
abort_when_not_int(soft_limit)
computation_node = abort_when_node_not_found(name)
if int(soft_limit) < 0:
log.error(str.format('Number is not positive: {}', soft_limit))
abort(400)
if not any(d['id'] == device_id for d in computation_node['backend_info']['devices']):
log.error('There is no such device: %s', device_id)
abort(404)
limit_info = {
'name': name,
'device_id': device_id,
'soft_limit': soft_limit
}
upsert_result = database.replace_soft_limit_for_device(name, device_id, limit_info)
if upsert_result.modified_count:
log.info('Power limit for device %s:%s was already set in a database to %s', name, device_id, soft_limit)
log.info('Stored power limit info %s', limit_info)
else:
log.info('Stored power limit info %s on id %s', limit_info, upsert_result.upserted_id)
return 'Soft limit successfully set', 201
@swagger.operation(
notes='This endpoint is used for getting soft limit information from database',
nickname='/nodes/computation_node/<string:name>/<string:device_id>/soft_limit',
parameters=NODE_AND_DEVICE_PARAMS,
responseMessages=[
COMPUTATION_NODE_FETCHED_RESPONSE,
DEVICE_NOT_FOUND_RESPONSE
]
)
def get(self, name, device_id):
result = database.get_soft_limit_for_device(name, device_id)
if not result:
log.info('No such device %s:%s', name, device_id)
abort(404)
log.info('Successfully get device %s:%s soft limit info: %s', name, device_id, result)
return result, 200
@swagger.operation(
notes='This endpoint is used for removing soft limit information from database and device',
nickname='/nodes/computation_node/<string:name>/<string:device_id>/soft_limit',
parameters=NODE_AND_DEVICE_PARAMS,
responseMessages=[
COMPUTATION_NODE_FETCHED_RESPONSE,
DEVICE_NOT_FOUND_RESPONSE,
]
)
def delete(self, name, device_id):
result = database.delete_soft_limit_info(name, device_id)
if not result:
log.info('No such device %s:%s', name, device_id)
abort(404)
log.info('Successfully removed soft limit for device %s:%s soft limit info: %s', name, device_id,
result)
return result, 200
| [
"hpcpm.api.helpers.utils.abort_when_node_not_found",
"flask_restful.request.args.get",
"hpcpm.api.helpers.database.database.replace_soft_limit_for_device",
"hpcpm.api.helpers.database.database.delete_soft_limit_info",
"flask_restful.abort",
"hpcpm.api.log.info",
"flask_restful_swagger.swagger.operation",
"hpcpm.api.helpers.utils.abort_when_not_int",
"hpcpm.api.log.error",
"hpcpm.api.helpers.database.database.get_soft_limit_for_device"
] | [((615, 1035), 'flask_restful_swagger.swagger.operation', 'swagger.operation', ([], {'notes': '"""This endpoint is used for setting soft limit for given device."""', 'nickname': '"""/nodes/computation_node/<string:name>/<string:device_id>/soft_limit"""', 'parameters': '[COMPUTATION_NODE_PARAM_NAME, DEVICE_IDENTIFIER_PARAM, DEVICE_SOFT_LIMIT_PARAM]', 'responseMessages': '[DEVICE_SOFT_LIMIT_SET_RESPONSE, DEVICE_SOFT_LIMIT_SET_RESPONSE_FAILURE,\n COMPUTATION_NODE_NOT_FOUND_RESPONSE]'}), "(notes=\n 'This endpoint is used for setting soft limit for given device.',\n nickname=\n '/nodes/computation_node/<string:name>/<string:device_id>/soft_limit',\n parameters=[COMPUTATION_NODE_PARAM_NAME, DEVICE_IDENTIFIER_PARAM,\n DEVICE_SOFT_LIMIT_PARAM], responseMessages=[\n DEVICE_SOFT_LIMIT_SET_RESPONSE, DEVICE_SOFT_LIMIT_SET_RESPONSE_FAILURE,\n COMPUTATION_NODE_NOT_FOUND_RESPONSE])\n", (632, 1035), False, 'from flask_restful_swagger import swagger\n'), ((2254, 2570), 'flask_restful_swagger.swagger.operation', 'swagger.operation', ([], {'notes': '"""This endpoint is used for getting soft limit information from database"""', 'nickname': '"""/nodes/computation_node/<string:name>/<string:device_id>/soft_limit"""', 'parameters': 'NODE_AND_DEVICE_PARAMS', 'responseMessages': '[COMPUTATION_NODE_FETCHED_RESPONSE, DEVICE_NOT_FOUND_RESPONSE]'}), "(notes=\n 'This endpoint is used for getting soft limit information from database',\n nickname=\n '/nodes/computation_node/<string:name>/<string:device_id>/soft_limit',\n parameters=NODE_AND_DEVICE_PARAMS, responseMessages=[\n COMPUTATION_NODE_FETCHED_RESPONSE, DEVICE_NOT_FOUND_RESPONSE])\n", (2271, 2570), False, 'from flask_restful_swagger import swagger\n'), ((2961, 3290), 'flask_restful_swagger.swagger.operation', 'swagger.operation', ([], {'notes': '"""This endpoint is used for removing soft limit information from database and device"""', 'nickname': '"""/nodes/computation_node/<string:name>/<string:device_id>/soft_limit"""', 'parameters': 'NODE_AND_DEVICE_PARAMS', 'responseMessages': '[COMPUTATION_NODE_FETCHED_RESPONSE, DEVICE_NOT_FOUND_RESPONSE]'}), "(notes=\n 'This endpoint is used for removing soft limit information from database and device'\n , nickname=\n '/nodes/computation_node/<string:name>/<string:device_id>/soft_limit',\n parameters=NODE_AND_DEVICE_PARAMS, responseMessages=[\n COMPUTATION_NODE_FETCHED_RESPONSE, DEVICE_NOT_FOUND_RESPONSE])\n", (2978, 3290), False, 'from flask_restful_swagger import swagger\n'), ((1192, 1222), 'flask_restful.request.args.get', 'request.args.get', (['"""soft_limit"""'], {}), "('soft_limit')\n", (1208, 1222), False, 'from flask_restful import Resource, request, abort\n'), ((1231, 1261), 'hpcpm.api.helpers.utils.abort_when_not_int', 'abort_when_not_int', (['soft_limit'], {}), '(soft_limit)\n', (1249, 1261), False, 'from hpcpm.api.helpers.utils import abort_when_not_int, abort_when_node_not_found\n'), ((1289, 1320), 'hpcpm.api.helpers.utils.abort_when_node_not_found', 'abort_when_node_not_found', (['name'], {}), '(name)\n', (1314, 1320), False, 'from hpcpm.api.helpers.utils import abort_when_not_int, abort_when_node_not_found\n'), ((1793, 1860), 'hpcpm.api.helpers.database.database.replace_soft_limit_for_device', 'database.replace_soft_limit_for_device', (['name', 'device_id', 'limit_info'], {}), '(name, device_id, limit_info)\n', (1831, 1860), False, 'from hpcpm.api.helpers.database import database\n'), ((2673, 2724), 'hpcpm.api.helpers.database.database.get_soft_limit_for_device', 'database.get_soft_limit_for_device', (['name', 'device_id'], {}), '(name, device_id)\n', (2707, 2724), False, 'from hpcpm.api.helpers.database import database\n'), ((2841, 2931), 'hpcpm.api.log.info', 'log.info', (['"""Successfully get device %s:%s soft limit info: %s"""', 'name', 'device_id', 'result'], {}), "('Successfully get device %s:%s soft limit info: %s', name,\n device_id, result)\n", (2849, 2931), False, 'from hpcpm.api import log\n'), ((3396, 3444), 'hpcpm.api.helpers.database.database.delete_soft_limit_info', 'database.delete_soft_limit_info', (['name', 'device_id'], {}), '(name, device_id)\n', (3427, 3444), False, 'from hpcpm.api.helpers.database import database\n'), ((3562, 3672), 'hpcpm.api.log.info', 'log.info', (['"""Successfully removed soft limit for device %s:%s soft limit info: %s"""', 'name', 'device_id', 'result'], {}), "('Successfully removed soft limit for device %s:%s soft limit info: %s'\n , name, device_id, result)\n", (3570, 3672), False, 'from hpcpm.api import log\n'), ((1441, 1451), 'flask_restful.abort', 'abort', (['(400)'], {}), '(400)\n', (1446, 1451), False, 'from flask_restful import Resource, request, abort\n'), ((1560, 1611), 'hpcpm.api.log.error', 'log.error', (['"""There is no such device: %s"""', 'device_id'], {}), "('There is no such device: %s', device_id)\n", (1569, 1611), False, 'from hpcpm.api import log\n'), ((1624, 1634), 'flask_restful.abort', 'abort', (['(404)'], {}), '(404)\n', (1629, 1634), False, 'from flask_restful import Resource, request, abort\n'), ((1915, 2024), 'hpcpm.api.log.info', 'log.info', (['"""Power limit for device %s:%s was already set in a database to %s"""', 'name', 'device_id', 'soft_limit'], {}), "('Power limit for device %s:%s was already set in a database to %s',\n name, device_id, soft_limit)\n", (1923, 2024), False, 'from hpcpm.api import log\n'), ((2033, 2083), 'hpcpm.api.log.info', 'log.info', (['"""Stored power limit info %s"""', 'limit_info'], {}), "('Stored power limit info %s', limit_info)\n", (2041, 2083), False, 'from hpcpm.api import log\n'), ((2110, 2201), 'hpcpm.api.log.info', 'log.info', (['"""Stored power limit info %s on id %s"""', 'limit_info', 'upsert_result.upserted_id'], {}), "('Stored power limit info %s on id %s', limit_info, upsert_result.\n upserted_id)\n", (2118, 2201), False, 'from hpcpm.api import log\n'), ((2760, 2809), 'hpcpm.api.log.info', 'log.info', (['"""No such device %s:%s"""', 'name', 'device_id'], {}), "('No such device %s:%s', name, device_id)\n", (2768, 2809), False, 'from hpcpm.api import log\n'), ((2822, 2832), 'flask_restful.abort', 'abort', (['(404)'], {}), '(404)\n', (2827, 2832), False, 'from flask_restful import Resource, request, abort\n'), ((3480, 3529), 'hpcpm.api.log.info', 'log.info', (['"""No such device %s:%s"""', 'name', 'device_id'], {}), "('No such device %s:%s', name, device_id)\n", (3488, 3529), False, 'from hpcpm.api import log\n'), ((3542, 3552), 'flask_restful.abort', 'abort', (['(404)'], {}), '(404)\n', (3547, 3552), False, 'from flask_restful import Resource, request, abort\n')] |
import imageio
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from skimage.transform import resize
from IPython.display import HTML
import warnings
import sys
import os
from demo import load_checkpoints
from demo import make_animation
from skimage import img_as_ubyte
warnings.filterwarnings("ignore")
if len(sys.argv) < 6:
print("Usage: deepfake_multiple.py <source name> <template name> <final_vid_name> <rows> <columns> <no shuffle arg>")
sys.exit()
source_folder = os.path.join(os.curdir, "resources", "combos", sys.argv[1])
image_folder = os.path.join(os.curdir, "resources", "combos", sys.argv[1], "images")
template_video = os.path.join(os.curdir, "resources", "combos", sys.argv[1], sys.argv[2])
template_video_name = sys.argv[2]
gen_vid_folder = os.path.join(os.curdir, "resources", "combos", sys.argv[1], "gen")
final_vid = os.path.join(os.curdir, "resources", "combos", sys.argv[1], sys.argv[3])
final_vid_name = sys.argv[3]
x = int(sys.argv[4])
y = int(sys.argv[5])
shuffle = ""
if len(sys.argv) > 6:
print("SHOULD NOT CREATE SHUFFLE")
shuffle="noshuffle"
list_images = os.listdir(image_folder)
driving_video = imageio.mimread(template_video)
driving_video = [resize(frame, (256, 256))[..., :3] for frame in driving_video]
generator, kp_detector = load_checkpoints(config_path='config/vox-256.yaml',
checkpoint_path='vox-cpk.pth.tar')
for image in list_images:
image_path = os.path.join(image_folder, image)
source_image = imageio.imread(image_path)
source_image = resize(source_image, (256, 256))[..., :3]
gen_vid_name = image.split(".")[0]
gen_vid_name = f"{gen_vid_name}_gen.mp4"
gen_vid = os.path.join(gen_vid_folder, gen_vid_name)
if not os.path.exists(gen_vid):
predictions = make_animation(source_image, driving_video, generator, kp_detector, relative=True)
imageio.mimsave(gen_vid, [img_as_ubyte(frame) for frame in predictions])
combiner = os.path.join(os.curdir, "resources", "combos", "createcombo.py")
os.system(f"python3 {combiner} {source_folder} {template_video_name} {final_vid_name} {x} {y} {shuffle}")
sys.exit()
#Resize image and video to 256x256
#save resulting video
#predictions2 = make_animation(source_image, driving_video, generator, kp_detector, relative=False, adapt_movement_scale=True)
#imageio.mimsave("testing.mp4", [img_as_ubyte(frame) for frame in predictions2])
#os.system(f"python3 {createvid} {template_video} {gen_vid} {final_vid}")
#print(f"VIDEO GENERATED: {final_vid}") | [
"warnings.filterwarnings",
"demo.load_checkpoints",
"imageio.imread",
"os.path.exists",
"os.system",
"skimage.img_as_ubyte",
"imageio.mimread",
"skimage.transform.resize",
"demo.make_animation",
"os.path.join",
"os.listdir",
"sys.exit"
] | [((314, 347), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (337, 347), False, 'import warnings\n'), ((525, 584), 'os.path.join', 'os.path.join', (['os.curdir', '"""resources"""', '"""combos"""', 'sys.argv[1]'], {}), "(os.curdir, 'resources', 'combos', sys.argv[1])\n", (537, 584), False, 'import os\n'), ((600, 669), 'os.path.join', 'os.path.join', (['os.curdir', '"""resources"""', '"""combos"""', 'sys.argv[1]', '"""images"""'], {}), "(os.curdir, 'resources', 'combos', sys.argv[1], 'images')\n", (612, 669), False, 'import os\n'), ((687, 759), 'os.path.join', 'os.path.join', (['os.curdir', '"""resources"""', '"""combos"""', 'sys.argv[1]', 'sys.argv[2]'], {}), "(os.curdir, 'resources', 'combos', sys.argv[1], sys.argv[2])\n", (699, 759), False, 'import os\n'), ((811, 877), 'os.path.join', 'os.path.join', (['os.curdir', '"""resources"""', '"""combos"""', 'sys.argv[1]', '"""gen"""'], {}), "(os.curdir, 'resources', 'combos', sys.argv[1], 'gen')\n", (823, 877), False, 'import os\n'), ((890, 962), 'os.path.join', 'os.path.join', (['os.curdir', '"""resources"""', '"""combos"""', 'sys.argv[1]', 'sys.argv[3]'], {}), "(os.curdir, 'resources', 'combos', sys.argv[1], sys.argv[3])\n", (902, 962), False, 'import os\n'), ((1147, 1171), 'os.listdir', 'os.listdir', (['image_folder'], {}), '(image_folder)\n', (1157, 1171), False, 'import os\n'), ((1188, 1219), 'imageio.mimread', 'imageio.mimread', (['template_video'], {}), '(template_video)\n', (1203, 1219), False, 'import imageio\n'), ((1325, 1416), 'demo.load_checkpoints', 'load_checkpoints', ([], {'config_path': '"""config/vox-256.yaml"""', 'checkpoint_path': '"""vox-cpk.pth.tar"""'}), "(config_path='config/vox-256.yaml', checkpoint_path=\n 'vox-cpk.pth.tar')\n", (1341, 1416), False, 'from demo import load_checkpoints\n'), ((2000, 2064), 'os.path.join', 'os.path.join', (['os.curdir', '"""resources"""', '"""combos"""', '"""createcombo.py"""'], {}), "(os.curdir, 'resources', 'combos', 'createcombo.py')\n", (2012, 2064), False, 'import os\n'), ((2065, 2180), 'os.system', 'os.system', (['f"""python3 {combiner} {source_folder} {template_video_name} {final_vid_name} {x} {y} {shuffle}"""'], {}), "(\n f'python3 {combiner} {source_folder} {template_video_name} {final_vid_name} {x} {y} {shuffle}'\n )\n", (2074, 2180), False, 'import os\n'), ((2171, 2181), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2179, 2181), False, 'import sys\n'), ((497, 507), 'sys.exit', 'sys.exit', ([], {}), '()\n', (505, 507), False, 'import sys\n'), ((1484, 1517), 'os.path.join', 'os.path.join', (['image_folder', 'image'], {}), '(image_folder, image)\n', (1496, 1517), False, 'import os\n'), ((1537, 1563), 'imageio.imread', 'imageio.imread', (['image_path'], {}), '(image_path)\n', (1551, 1563), False, 'import imageio\n'), ((1723, 1765), 'os.path.join', 'os.path.join', (['gen_vid_folder', 'gen_vid_name'], {}), '(gen_vid_folder, gen_vid_name)\n', (1735, 1765), False, 'import os\n'), ((1237, 1262), 'skimage.transform.resize', 'resize', (['frame', '(256, 256)'], {}), '(frame, (256, 256))\n', (1243, 1262), False, 'from skimage.transform import resize\n'), ((1583, 1615), 'skimage.transform.resize', 'resize', (['source_image', '(256, 256)'], {}), '(source_image, (256, 256))\n', (1589, 1615), False, 'from skimage.transform import resize\n'), ((1777, 1800), 'os.path.exists', 'os.path.exists', (['gen_vid'], {}), '(gen_vid)\n', (1791, 1800), False, 'import os\n'), ((1824, 1910), 'demo.make_animation', 'make_animation', (['source_image', 'driving_video', 'generator', 'kp_detector'], {'relative': '(True)'}), '(source_image, driving_video, generator, kp_detector,\n relative=True)\n', (1838, 1910), False, 'from demo import make_animation\n'), ((1941, 1960), 'skimage.img_as_ubyte', 'img_as_ubyte', (['frame'], {}), '(frame)\n', (1953, 1960), False, 'from skimage import img_as_ubyte\n')] |
'''
Script to convert a MAF to a vcf4.2 file using python >=3.6.
Created by <NAME>
8 March 2018
'''
import os
import sys
from optparse import OptionParser
import subprocess
from functools import wraps
import datetime
import time
import numpy as np
def OptionParsing():
usage = 'usage: %prog -i <*.maf> -o <directory> -r <ref.fa>'
parser = OptionParser(usage)
parser.add_option('-i', '--input_maf', dest="maf", default=None, help=".maf file to be converted.")
parser.add_option('-o', '--output_dir', dest="outDir", default=None, help="Output directory for .vcf file")
parser.add_option('-r', '--ref_genome', dest="refGenome", default="/Users/schencro/Desktop/Bioinformatics_Tools/Ref_Genomes/Ensembl/GRCh37.75/GRCh37.75.fa", help="Reference genome to be used for maf2vcf conversion.")
parser.add_option('-s', '--spotCheckMaf', dest='spotcheck', default=False, action='store_true', help="Use this flag to verify reference matching to maf file. Default=False")
parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', help="Use this flag to turn on verbose mode. Default=False")
(options, args) = parser.parse_args()
if options.maf is None or options.outDir is None or options.refGenome is None:
print("ERROR: Please include arguments for maf file, output directory, and reference genome (single fasta file).")
sys.exit()
else:
pass
return (options, parser)
def fn_timer(function):
'''
Use this as a wrapper at the top of any function you want to get run time information about.
:param function: Function of interest.
:return: A function to wrap around a function.
'''
@wraps(function)
def function_timer(*args, **kwargs):
t0 = time.time()
result = function(*args, **kwargs)
t1 = time.time()
print ("INFO: Total time running %s: %s minutes" %
(function.__name__, str(round((t1-t0)/60.,2)))
)
return result
return function_timer
def UpdateProgressGetN(fileName):
if fileName[len(fileName)-1]=="z":
cmd = "gzip -cd %s | wc -l" % (fileName)
pipe = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout
else:
cmd = "wc -l %s" % (fileName)
pipe = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout
return(int(pipe.read().decode("utf-8").lstrip(" ").split(" ")[0]))
def UpdateProgress(i, n, DisplayText):
'''
Prints a progress bar where appropriate.
:param i: Current Step
:param n: Total number of steps.
:param DisplayText: A string that you want to print out that is informative.
:return: None
'''
sys.stdout.write('\r')
j = (i + 1) / n
sys.stdout.write("[%-20s] %d%%\t INFO: %s" % ('=' * int(20 * j), 100 * j, DisplayText))
sys.stdout.flush()
def SamtoolsFaidx(refGenome, genomicPos, ref='', check=True):
'''
Obtain reference sequence and perform check if needed.
:param check: Whether or not to throw error if the provided reference matches
:param refGenome: Reference Fasta file
:param genomicPos: Genomic Position of interest.
:param ref: Reference sequence to compare to fetched sequence.
:return: Fetched reference sequence.
'''
proc = subprocess.Popen(['samtools','faidx',refGenome, genomicPos], stdout=subprocess.PIPE)
proc.wait()
outInfo = proc.stdout.readlines()
refSeq = ''.join([line.decode('utf-8').rstrip('\n') for line in outInfo[1:]])
if check:
if refSeq == ref:
return(True)
else:
print('ERROR: May not be proper reference genome')
print('ERROR: Improper reference. Found %s at %s. Reference genome shows %s' % (ref, genomicPos, refSeq))
sys.exit()
return(None)
else:
return(refSeq)
def SpotCheckProperReference(mafFile, Options, fileLength):
'''
Randomly samples the file to ensure proper reference file is used. Random sampling is employed to ensure proper
reference is used. Will spot check 2% of a file of more than 200 variants.
:param mafFile: Input mafFile object (opened)
:param Options: Parser Options
:param fileLength: Length of the file being read
:return: None
'''
print("INFO: Verifying maf file.")
if fileLength > 200:
n=0.02
else:
n=1.
a = np.arange(fileLength)
np.random.shuffle(a)
a = list(a[:int(fileLength*n)])
i = 0
count = 0
for line in mafFile:
if i != 0 and line.startswith('Hugo_Symbol Chromosome Start_position') == False:
# checkIt = len([k for k in a if k==i])
# if checkIt==1:
UpdateProgress(count, len(a), "INFO: Verifying maf file")
count+=1
line = line.rstrip('\n').split('\t')
genomicPos = line[1] + ":" + line[2] + "-" + line[3]
ref = line[7]
mutType = line[5]
variantClass = line[6]
if variantClass != "INS" and variantClass != "TNP" and variantClass !="ONP":
toContinue = SamtoolsFaidx(Options.refGenome, genomicPos, ref)
if count == len(a):
print('')
return(toContinue)
# else:
# print(checkIt)
# print(line)
# print([k for k in a])
# sys.exit("Problem here")
elif i != 0 and line.startswith('Hugo_Symbol Chromosome Start_position') == False:
print("")
print("ERROR: No header found in maf file.")
elif line.startswith('Hugo_Symbol Chromosome Start_position') == True:
toContinue = True
else:
sys.exit("What the fuck")
i+=1
print('')
return(toContinue)
def processSNP(line, chrom, pos, rsid, mutType, variantType, strand, errorFile, Options):
ref = line[7]
tAllele1 = line[8] # Normal Allele
tAllele2 = line[9] # Alt Allele
QUAL = line[42]
if QUAL == 'None' or QUAL == 'NA' or QUAL == '':
QUAL = '.'
if ref == tAllele1:
altAllele = tAllele1
refAllele = tAllele2
else:
altAllele = tAllele2
refAllele = tAllele1
ref_reads = line[39]
alt_reads = line[38]
reportedVAF = line[28]
# Get phasing information and determine reads for vaf==1
if ref_reads == 'NA' or alt_reads == 'NA' and reportedVAF == '1':
GT = "1/1" # Appears to be homozygous for alternative allele (germline unlikely since it is called w.r.t normal?)
vaf = reportedVAF # Sets VAF equal to 1
if ref_reads == 'NA':
ref_reads = '.'
total_reads = alt_reads
else:
alt_reads = '.'
total_reads = ref_reads
sampleField = ':'.join([GT, ','.join([ref_reads, alt_reads]), total_reads, vaf])
# Tossing these very strange mutations within the MAF file.
elif ref_reads == 'NA' or alt_reads == 'NA' and reportedVAF == 'NA':
with open(errorFile, 'a') as errerOut:
errerOut.write('\t'.join(line)+'\n')
if Options.verbose:
print("WARNING: %s" % '\t'.join(line))
return(None)
# Simple SNV cases
else:
total_reads = str(int(ref_reads) + int(alt_reads))
vaf = repr(round(int(alt_reads) / float(total_reads), 4))
if vaf != '1.' and strand=="+" or strand=="-":
GT="0|1"
else:
GT="0/1"
sampleField = ':'.join([GT, ','.join([ref_reads, alt_reads]), total_reads, vaf])
# Last check for interesting but unresolved MAF line
if (ref != tAllele1 and ref != tAllele2) or (strand != '+' and strand != '-'):
with open(errorFile, 'a') as errerOut:
errerOut.write('\t'.join(line)+'\n')
if Options.verbose:
print("WARNING: %s" % '\t'.join(line))
return(None)
# Create INFO field
INFO = "MAF_Hugo_Symbol=" + line[0] + ";MAF_ref_context=" + line[15].upper() + ";MAF_Genome_Change=" + line[14] + ";MAF_Variant_Type=" + variantType + ";MAF_Variant_Classification=" + mutType +";DCC_Project_Code=" + line[44]
# Normal variant field if anything
if line[41]=="NA":
normalGenotype = ".:.,.:.:."
else:
normalGenotype = ".:.,.:.:%s"%(line[41])
# Final vcf line out
lineOut = [chrom, pos, rsid, refAllele, altAllele, QUAL, '.', INFO, "GT:AD:DP:VF", normalGenotype, sampleField]
return(lineOut)
def processDEL(line, chrom, pos, rsid, mutType, variantType, strand, errorFile, Options):
ref = line[7]
tAllele1 = line[8] # Normal Allele Typically
tAllele2 = line[9] # Alt Allele Typically
QUAL = line[42]
if QUAL == 'None' or QUAL == 'NA' or QUAL == '':
QUAL = '.'
if ref == tAllele1:
altAllele = tAllele1
refAllele = tAllele2
else:
altAllele = tAllele2
refAllele = tAllele1
# Obtain the reference sequence + 1 preceding base for the DEL
refAnchorPos = str(int(pos)-1) # Fetch the base that precedes the deletion.
refSeq = SamtoolsFaidx(Options.refGenome, chrom + ":" + refAnchorPos + "-" + line[3], check=False)
if refSeq[1:] != altAllele:
print("ERROR: Deletion alternative allele does not match reference sequence. %s" % ('\t'.join(line)))
sys.exit()
# VCF reference is the preceding base plus the reported deletion in the MAF file.
vcfRef = refSeq
# VCF has base directly preceding the deletion as the alternative base and the variant pos
vcfAlt=refSeq[0]
vcfPos=refAnchorPos
# Get read information
iref_reads = line[37]
ialt_reads = line[36]
ref_reads = line[39]
alt_reads = line[38]
reportedVAF = line[28]
i_t_vaf = line[43]
# Get phasing information and determine reads for vaf==1
if (ref_reads != 'NA' or iref_reads!='NA') and (alt_reads != 'NA' or ialt_reads!='NA'):
GT="0/1"
ref_reads = [read for read in [ref_reads, iref_reads] if read != "NA"][0]
alt_reads = [read for read in [alt_reads, ialt_reads] if read != "NA"][0]
total_reads = str(int(ref_reads) + int(alt_reads))
vaf = str(int(alt_reads)/float(total_reads))
elif i_t_vaf!="" and i_t_vaf!="NA" and ref_reads == 'NA' and iref_reads=='NA' and alt_reads == 'NA' and ialt_reads=='NA':
vaf=i_t_vaf
GT="./."
ref_reads = '.'
alt_reads = '.'
total_reads = '.'
elif (i_t_vaf=="" or i_t_vaf=="NA") and ref_reads == 'NA' and iref_reads=='NA' and alt_reads == 'NA' and ialt_reads=='NA':
GT='./.'
ref_reads='.'
alt_reads='.'
total_reads='.'
vaf='.'
else:
sys.exit("ERROR: Problem processing DEL %s"%('\t'.join(line)))
sampleField = ':'.join([GT, ','.join([ref_reads, alt_reads]), total_reads, vaf])
# Create INFO field
INFO = "MAF_Hugo_Symbol=" + line[0] + ";MAF_ref_context=" + line[15].upper() + ";MAF_Genome_Change=" + line[
14] + ";MAF_Variant_Type=" + variantType + ";MAF_Variant_Classification=" + mutType + ";DCC_Project_Code=" + \
line[44]
# Normal variant field if anything
if line[41] == "NA":
normalGenotype = ".:.,.:.:."
else:
normalGenotype = ".:.,.:.:%s" % (line[41])
lineOut = [chrom, vcfPos, rsid, vcfRef, vcfAlt, QUAL, '.', INFO, "GT:AD:DP:VF", normalGenotype, sampleField]
return(lineOut)
def processINS(line, chrom, pos, rsid, mutType, variantType, strand, errorFile, Options):
ref = line[7]
tAllele1 = line[8] # Normal Allele Typically
tAllele2 = line[9] # Alt Allele Typically
QUAL = line[42]
if QUAL == 'None' or QUAL == 'NA' or QUAL == '':
QUAL = '.'
if tAllele1 == '-':
altAllele = tAllele2
else:
altAllele = tAllele1
# Obtain the reference sequence + 1 preceding base for the DEL
refAnchorPos = str(int(pos) - 1) # Fetch the base that precedes the deletion.
refSeq = SamtoolsFaidx(Options.refGenome, chrom + ":" + refAnchorPos + "-" + line[3], check=False)
# VCF reference is the preceding base in the insertion in MAF
vcfRef = refSeq[0]
# VCF has base directly preceding the deletion as the alternative base and the variant pos
vcfAlt = refSeq[0]+altAllele
vcfPos = refAnchorPos
# Get read information
iref_reads = line[37]
ialt_reads = line[36]
ref_reads = line[39]
alt_reads = line[38]
reportedVAF = line[28]
i_t_vaf = line[43]
# Get phasing information and determine reads for vaf==1
if (ref_reads != 'NA' or iref_reads != 'NA') and (alt_reads != 'NA' or ialt_reads != 'NA'):
GT = "0/1"
ref_reads = [read for read in [ref_reads, iref_reads] if read != "NA"][0]
alt_reads = [read for read in [alt_reads, ialt_reads] if read != "NA"][0]
total_reads = str(int(ref_reads) + int(alt_reads))
vaf = str(int(alt_reads) / float(total_reads))
elif i_t_vaf != "" and i_t_vaf != "NA" and ref_reads == 'NA' and iref_reads == 'NA' and alt_reads == 'NA' and ialt_reads == 'NA':
vaf = i_t_vaf
GT = "./."
ref_reads = '.'
alt_reads = '.'
total_reads = '.'
elif (
i_t_vaf == "" or i_t_vaf == "NA") and ref_reads == 'NA' and iref_reads == 'NA' and alt_reads == 'NA' and ialt_reads == 'NA':
GT = './.'
ref_reads = '.'
alt_reads = '.'
total_reads = '.'
vaf = '.'
else:
sys.exit("ERROR: Problem processing INS %s" % ('\t'.join(line)))
sampleField = ':'.join([GT, ','.join([ref_reads, alt_reads]), total_reads, vaf])
# Create INFO field
INFO = "MAF_Hugo_Symbol=" + line[0] + ";MAF_ref_context=" + line[15].upper() + ";MAF_Genome_Change=" + line[
14] + ";MAF_Variant_Type=" + variantType + ";MAF_Variant_Classification=" + mutType + ";DCC_Project_Code=" + \
line[44]
# Normal variant field if anything
if line[41] == "NA":
normalGenotype = ".:.,.:.:."
else:
normalGenotype = ".:.,.:.:%s" % (line[41])
lineOut = [chrom, vcfPos, rsid, vcfRef, vcfAlt, QUAL, '.', INFO, "GT:AD:DP:VF", normalGenotype, sampleField]
return (lineOut)
def CreateVCFLine(line, errorFile, Options):
line = line.rstrip('\n').split('\t')
# Genomic Position
chrom, pos, id = line[1], line[2], line[10]
# Get rs ID
rsid = line[10]
if rsid == '':
rsid = '.'
elif rsid.startswith("rs") == False:
if Options.verbose:
print("ERROR: %s"%(line))
sys.exit("ERROR: Problem in id column")
# Strand Information
strand = line[4]
# Variant Classification/Type (Type is SNP, INS, DEL, etc.)
mutType = line[5]
variantType = line[6]
# Create proper vcf formatted information
if mutType == '':
mutType = '.'
if variantType == '':
variantType = '.'
# Determine type of variant to continue processing.
linetowrite = None
if variantType=="SNP":
linetowrite = processSNP(line, chrom, pos, rsid, mutType, variantType, strand, errorFile, Options)
elif variantType=="DEL":
linetowrite = processDEL(line, chrom, pos, rsid, mutType, variantType, strand, errorFile, Options)
elif variantType=="INS":
linetowrite = processINS(line, chrom, pos, rsid, mutType, variantType, strand, errorFile, Options)
elif variantType=="TNP" or variantType=="ONP":
with open(errorFile, 'a') as errerOut:
errerOut.write('\t'.join(line)+'\n')
else: # This may seem duplicitious, but I explicityly want to know as much of what I'm choosing to filter out as possible...
if Options.verbose:
print("WARNING: Malformed MAF entry. %s"%('\t'.join(line)))
print('')
with open(errorFile, 'a') as errerOut:
errerOut.write('\t'.join(line)+'\n')
# print(line)
# sys.exit("ERROR: Malformed MAF entry.")
return(linetowrite)
def CreateHeader(ioObject, Options, tumorID, normalID):
now = datetime.datetime.now()
ioObject.write("##fileformat=VCFv4.2\n")
ioObject.write("##fileDate=%s\n"%(now.date()))
ioObject.write("##source=maf2vcf.py\n")
ioObject.write("##reference=%s\n"%(Options.refGenome))
ioObject.write("##sampleColumns=Normal.Tumor\n")
ioObject.write("##INFO=<ID=MAF_Hugo_Symbol,Number=1,Type=String,Description=\"HUGO Symbol in original MAF file.\">\n")
ioObject.write("##INFO=<ID=MAF_ref_context,Number=1,Type=String,Description=\"Reference context in original MAF file.\">\n")
ioObject.write("##INFO=<ID=MAF_Genome_Change,Number=1,Type=String,Description=\"Genome change in original MAF file.\">\n")
ioObject.write("##INFO=<ID=MAF_Variant_Type,Number=1,Type=String,Description=\"Variant type (SNP,INS,DEL) in original MAF file.\">\n")
ioObject.write("##INFO=<ID=MAF_Variant_Classification,Number=1,Type=String,Description=\"Variant Classification (if SNP) in original MAF file.\">\n")
ioObject.write("##INFO=<ID=DCC_Project_Code,Number=1,Type=String,Description=\"DCC Project Code in original MAF file.\">\n")
ioObject.write("##FORMAT=<ID=GT,Number=1,Type=String,Description=\"Genotype\">\n")
ioObject.write("##FORMAT=<ID=AD,Number=2,Type=Integer,Description=\"Allelic depths of REF and ALT(s) in the order listed\">\n")
ioObject.write("##FORMAT=<ID=DP,Number=1,Type=Integer,Description=\"Total read depth across this site\">\n")
ioObject.write("##FORMAT=<ID=VF,Number=1,Type=Float,Description=\"Variant Allele Frequency.\">\n")
ioObject.write("#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\t%s\t%s\n"%(normalID,tumorID))
@fn_timer
def ProcessFile(Options):
n = UpdateProgressGetN(Options.maf)
if Options.spotcheck:
with open(Options.maf, 'r') as inFile:
SpotCheckProperReference(inFile, Options, n)
with open(Options.maf,'r') as inFile:
i = 0
for line in inFile:
if i == 1:
toPullIDs = line.rstrip('\n').split('\t')
break
else:
header = line
i+=1
tumorID = toPullIDs[12]
normalID = toPullIDs[13]
count = 0
i = 0
with open(Options.maf, 'r') as inFile:
with open(Options.outDir + Options.maf.split('/')[len(Options.maf.split('/'))-1].replace('.maf','.vcf'), 'w') as outVCF:
errorFile = Options.outDir + Options.maf.split('/')[len(Options.maf.split('/')) - 1].replace('.maf', '.ignoredSNVs.maf')
with open(errorFile, 'w') as errorOut:
errorOut.write(header)
CreateHeader(outVCF, Options, tumorID, normalID)
for line in inFile:
UpdateProgress(i, n, "Processing Maf File")
if line.startswith('Hugo_Symbol Chromosome Start_position'):
count+=1
i += 1
else:
i += 1
linetoWrite = CreateVCFLine(line, errorFile, Options)
if linetoWrite is not None:
outVCF.write('\t'.join(linetoWrite)+'\n')
print('')
print("INFO: Sorting vcf file.")
vcfFile = Options.outDir + Options.maf.split('/')[len(Options.maf.split('/'))-1].replace('.maf','.vcf')
vcfFileSorted = Options.outDir + Options.maf.split('/')[len(Options.maf.split('/'))-1].replace('.head.maf','.sorted.vcf.gz')
os.system("cat %s | awk '$1 ~ /^#/ {print $0;next} {print $0 | \"LC_ALL=C sort -k1,1 -k2,2n\"}' | gzip > %s"%(vcfFile, vcfFileSorted))
os.system("rm %s"%(vcfFile))
os.system("gzip %s"%(errorFile))
def main():
print("INFO: Processing MAF file.")
FilePath = os.path.dirname(os.path.abspath(__file__))
(Options, Parser) = OptionParsing()
ProcessFile(Options)
if __name__=="__main__":
main() | [
"sys.stdout.write",
"subprocess.Popen",
"os.path.abspath",
"optparse.OptionParser",
"os.system",
"time.time",
"sys.stdout.flush",
"numpy.arange",
"functools.wraps",
"sys.exit",
"datetime.datetime.now",
"numpy.random.shuffle"
] | [((349, 368), 'optparse.OptionParser', 'OptionParser', (['usage'], {}), '(usage)\n', (361, 368), False, 'from optparse import OptionParser\n'), ((1695, 1710), 'functools.wraps', 'wraps', (['function'], {}), '(function)\n', (1700, 1710), False, 'from functools import wraps\n'), ((2702, 2724), 'sys.stdout.write', 'sys.stdout.write', (["'\\r'"], {}), "('\\r')\n", (2718, 2724), False, 'import sys\n'), ((2841, 2859), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2857, 2859), False, 'import sys\n'), ((3296, 3387), 'subprocess.Popen', 'subprocess.Popen', (["['samtools', 'faidx', refGenome, genomicPos]"], {'stdout': 'subprocess.PIPE'}), "(['samtools', 'faidx', refGenome, genomicPos], stdout=\n subprocess.PIPE)\n", (3312, 3387), False, 'import subprocess\n'), ((4394, 4415), 'numpy.arange', 'np.arange', (['fileLength'], {}), '(fileLength)\n', (4403, 4415), True, 'import numpy as np\n'), ((4420, 4440), 'numpy.random.shuffle', 'np.random.shuffle', (['a'], {}), '(a)\n', (4437, 4440), True, 'import numpy as np\n'), ((16034, 16057), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (16055, 16057), False, 'import datetime\n'), ((19407, 19553), 'os.system', 'os.system', (['(\'cat %s | awk \\\'$1 ~ /^#/ {print $0;next} {print $0 | "LC_ALL=C sort -k1,1 -k2,2n"}\\\' | gzip > %s\'\n % (vcfFile, vcfFileSorted))'], {}), '(\n \'cat %s | awk \\\'$1 ~ /^#/ {print $0;next} {print $0 | "LC_ALL=C sort -k1,1 -k2,2n"}\\\' | gzip > %s\'\n % (vcfFile, vcfFileSorted))\n', (19416, 19553), False, 'import os\n'), ((19546, 19574), 'os.system', 'os.system', (["('rm %s' % vcfFile)"], {}), "('rm %s' % vcfFile)\n", (19555, 19574), False, 'import os\n'), ((19579, 19611), 'os.system', 'os.system', (["('gzip %s' % errorFile)"], {}), "('gzip %s' % errorFile)\n", (19588, 19611), False, 'import os\n'), ((1394, 1404), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1402, 1404), False, 'import sys\n'), ((1765, 1776), 'time.time', 'time.time', ([], {}), '()\n', (1774, 1776), False, 'import time\n'), ((1833, 1844), 'time.time', 'time.time', ([], {}), '()\n', (1842, 1844), False, 'import time\n'), ((9331, 9341), 'sys.exit', 'sys.exit', ([], {}), '()\n', (9339, 9341), False, 'import sys\n'), ((19696, 19721), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (19711, 19721), False, 'import os\n'), ((2169, 2226), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE)\n', (2185, 2226), False, 'import subprocess\n'), ((2297, 2354), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE)\n', (2313, 2354), False, 'import subprocess\n'), ((3790, 3800), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3798, 3800), False, 'import sys\n'), ((14550, 14589), 'sys.exit', 'sys.exit', (['"""ERROR: Problem in id column"""'], {}), "('ERROR: Problem in id column')\n", (14558, 14589), False, 'import sys\n'), ((5725, 5750), 'sys.exit', 'sys.exit', (['"""What the fuck"""'], {}), "('What the fuck')\n", (5733, 5750), False, 'import sys\n')] |
from tuneit.graph import visualize
from tuneit.tunable import *
from tuneit.variable import *
from tuneit.tunable import Tunable
from tuneit.finalize import finalize
from pytest import raises
def test_finalize():
with raises(TypeError):
finalize(1)
a = variable(range(10), default=2)
assert finalize(a)[finalize(a).value] == finalize(a).value
c = variable(range(10))
b = finalize(a * a + c)
assert set(b.variables) == set([finalize(a).key, finalize(c).key])
assert b.tunable_variables == b.variables
assert b.compute() == 4
assert b.fixed_variables == b.variables
assert not b.tunable_variables
assert len(b.functions) == 2
assert not b.depends_on(1)
assert b.depends_on(a)
assert b.depends_on(finalize(a).value)
b = b.copy(reset=True)
assert b.tunable_variables == b.variables
assert finalize(a).value.fixed
d = b.copy()
assert d.compute() == 4
assert b.tunable_variables == b.variables
assert d.fixed_variables == b.variables
b.fix("a")
b.fix(finalize(c).value, 1)
assert b.compute() == 5
assert b.fixed_variables == b.variables
with raises(KeyError):
b.fix("foo")
a = variable(range(10), uid=True)
with raises(KeyError):
finalize(a * b).fix("a")
| [
"pytest.raises",
"tuneit.finalize.finalize"
] | [((404, 423), 'tuneit.finalize.finalize', 'finalize', (['(a * a + c)'], {}), '(a * a + c)\n', (412, 423), False, 'from tuneit.finalize import finalize\n'), ((224, 241), 'pytest.raises', 'raises', (['TypeError'], {}), '(TypeError)\n', (230, 241), False, 'from pytest import raises\n'), ((251, 262), 'tuneit.finalize.finalize', 'finalize', (['(1)'], {}), '(1)\n', (259, 262), False, 'from tuneit.finalize import finalize\n'), ((1159, 1175), 'pytest.raises', 'raises', (['KeyError'], {}), '(KeyError)\n', (1165, 1175), False, 'from pytest import raises\n'), ((1246, 1262), 'pytest.raises', 'raises', (['KeyError'], {}), '(KeyError)\n', (1252, 1262), False, 'from pytest import raises\n'), ((315, 326), 'tuneit.finalize.finalize', 'finalize', (['a'], {}), '(a)\n', (323, 326), False, 'from tuneit.finalize import finalize\n'), ((349, 360), 'tuneit.finalize.finalize', 'finalize', (['a'], {}), '(a)\n', (357, 360), False, 'from tuneit.finalize import finalize\n'), ((765, 776), 'tuneit.finalize.finalize', 'finalize', (['a'], {}), '(a)\n', (773, 776), False, 'from tuneit.finalize import finalize\n'), ((869, 880), 'tuneit.finalize.finalize', 'finalize', (['a'], {}), '(a)\n', (877, 880), False, 'from tuneit.finalize import finalize\n'), ((1055, 1066), 'tuneit.finalize.finalize', 'finalize', (['c'], {}), '(c)\n', (1063, 1066), False, 'from tuneit.finalize import finalize\n'), ((327, 338), 'tuneit.finalize.finalize', 'finalize', (['a'], {}), '(a)\n', (335, 338), False, 'from tuneit.finalize import finalize\n'), ((1272, 1287), 'tuneit.finalize.finalize', 'finalize', (['(a * b)'], {}), '(a * b)\n', (1280, 1287), False, 'from tuneit.finalize import finalize\n'), ((461, 472), 'tuneit.finalize.finalize', 'finalize', (['a'], {}), '(a)\n', (469, 472), False, 'from tuneit.finalize import finalize\n'), ((478, 489), 'tuneit.finalize.finalize', 'finalize', (['c'], {}), '(c)\n', (486, 489), False, 'from tuneit.finalize import finalize\n')] |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# pylint: disable=import-error
# pylint: disable=no-member
# pylint: disable=no-name-in-module
import time
import requests
from opentelemetry import metrics
from opentelemetry.instrumentation.requests import RequestsInstrumentor
from opentelemetry.sdk.metrics import MeterProvider
from azure_monitor import AzureMonitorMetricsExporter
# Use the default sdk implementation
metrics.set_meter_provider(MeterProvider(stateful=False))
# Track telemetry from the requests library
RequestsInstrumentor().instrument()
meter = RequestsInstrumentor().meter
exporter = AzureMonitorMetricsExporter(
connection_string="InstrumentationKey=<INSTRUMENTATION KEY HERE>"
)
# Export standard metrics from requests library to Azure Monitor
metrics.get_meter_provider().start_pipeline(meter, exporter, 5)
for x in range(10):
for y in range(10):
requests.get("http://example.com")
time.sleep(2)
time.sleep(5)
input("Press any key to exit...")
| [
"opentelemetry.metrics.get_meter_provider",
"time.sleep",
"opentelemetry.instrumentation.requests.RequestsInstrumentor",
"requests.get",
"azure_monitor.AzureMonitorMetricsExporter",
"opentelemetry.sdk.metrics.MeterProvider"
] | [((656, 755), 'azure_monitor.AzureMonitorMetricsExporter', 'AzureMonitorMetricsExporter', ([], {'connection_string': '"""InstrumentationKey=<INSTRUMENTATION KEY HERE>"""'}), "(connection_string=\n 'InstrumentationKey=<INSTRUMENTATION KEY HERE>')\n", (683, 755), False, 'from azure_monitor import AzureMonitorMetricsExporter\n'), ((496, 525), 'opentelemetry.sdk.metrics.MeterProvider', 'MeterProvider', ([], {'stateful': '(False)'}), '(stateful=False)\n', (509, 525), False, 'from opentelemetry.sdk.metrics import MeterProvider\n'), ((616, 638), 'opentelemetry.instrumentation.requests.RequestsInstrumentor', 'RequestsInstrumentor', ([], {}), '()\n', (636, 638), False, 'from opentelemetry.instrumentation.requests import RequestsInstrumentor\n'), ((1000, 1013), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1010, 1013), False, 'import time\n'), ((572, 594), 'opentelemetry.instrumentation.requests.RequestsInstrumentor', 'RequestsInstrumentor', ([], {}), '()\n', (592, 594), False, 'from opentelemetry.instrumentation.requests import RequestsInstrumentor\n'), ((822, 850), 'opentelemetry.metrics.get_meter_provider', 'metrics.get_meter_provider', ([], {}), '()\n', (848, 850), False, 'from opentelemetry import metrics\n'), ((939, 973), 'requests.get', 'requests.get', (['"""http://example.com"""'], {}), "('http://example.com')\n", (951, 973), False, 'import requests\n'), ((982, 995), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (992, 995), False, 'import time\n')] |
"""
Fabfile template for python3
"""
# -*- coding: utf-8 -*-
from __future__ import print_function
from slackclient import SlackClient
from fabric.api import cd, env, task, run, settings, local
from fabfile_config import *
import traceback
from fabric.contrib.files import exists
LAST_CID_FILE = "last_commit_id.txt"
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class FabSlack(metaclass=Singleton):
sc = SlackClient(SLACK_API_KEY)
def send(self, **kargs):
try:
self.sc.api_call(
"chat.postMessage",
channel="#log-info",
username='Deployment',
# as_user=True,
icon_emoji=":gear:",
**kargs
)
except Exception:
traceback.print_exc()
sc = FabSlack()
@task
def test(target_host):
pass
@task
def set_host(target_host='dev'):
"""Set host before deploy,
NOTE: plz configure ssh config file on your local machine first.
Eg use: `fab set_host:dev deploy`
:param: target_host string
"""
env.use_ssh_config = True
env.hosts = [target_host]
@task
def deploy():
try:
target_host = env.hosts[0]
except IndexError:
target_host = 'dev'
with cd(HOST_API[target_host]['dir']):
do_deploy()
def run_cmd(cmd, target_host=None, local_capture=True):
"""
Run cmd base on local or remote host and return output or print output to terminal screen
:param string cmd: Command to run
:param string target_host: local or remote host name
:param bool local_capture: If true then return output and not print anything to terminal, if false then print output to terminal
:return: Output string if capture=True or return nothing if capture=false
"""
result = ''
with settings(warn_only=True):
fn = "local" if target_host == 'local' else "run"
if fn == 'local':
result = local(cmd, local_capture) # Do not print to terminal and get the output
else:
result = run(cmd, warn_only=True, pty=False)
if result.failed:
print(result.stdout)
attachments = [{
"title": 'Command: {}'.format(result.command),
"color": "danger",
"pretext": 'Detail: {}'.format(result),
"mrkdwn_in": ["text", "pretext"]
}]
sc.send(attachments=attachments, text="Deploy to *{}* error".format(env.hosts[0]))
raise SystemExit()
else:
return result
def do_deploy():
if not exists("{}/{}".format(HOST_API[env.hosts[0]]['dir'], LAST_CID_FILE)):
save_last_commit()
run_cmd("git pull")
run_testing()
restart_api()
send_commit_applied()
save_last_commit()
def run_testing():
pass
def restart_api():
pass
def get_current_commit():
return run_cmd("git rev-parse HEAD")
def save_last_commit():
run_cmd("git rev-parse HEAD > {}".format(LAST_CID_FILE))
def get_last_commit():
return run_cmd("cat {}".format(LAST_CID_FILE))
def get_git_logs(last_commit_id, current_commit_id):
return run_cmd("git log {}...{} --oneline --pretty=format:'%s'".format(last_commit_id, current_commit_id))
def send_commit_applied():
last_commit_id = get_last_commit()
current_commit_id = get_current_commit()
commit_applied = get_git_logs(last_commit_id, current_commit_id)
if commit_applied:
commit_applied = "••• " + commit_applied
commit_applied = commit_applied.replace("\n", "\n••• ")
attachments = [
{
"color": "good",
"title": "Commit applied:",
"text": commit_applied,
},
]
sc.send(attachments=attachments, text="Deploy to *{}* success".format(env.hosts[0]))
| [
"traceback.print_exc",
"fabric.api.cd",
"fabric.api.settings",
"slackclient.SlackClient",
"fabric.api.local",
"fabric.api.run"
] | [((608, 634), 'slackclient.SlackClient', 'SlackClient', (['SLACK_API_KEY'], {}), '(SLACK_API_KEY)\n', (619, 634), False, 'from slackclient import SlackClient\n'), ((1451, 1483), 'fabric.api.cd', 'cd', (["HOST_API[target_host]['dir']"], {}), "(HOST_API[target_host]['dir'])\n", (1453, 1483), False, 'from fabric.api import cd, env, task, run, settings, local\n'), ((2007, 2031), 'fabric.api.settings', 'settings', ([], {'warn_only': '(True)'}), '(warn_only=True)\n', (2015, 2031), False, 'from fabric.api import cd, env, task, run, settings, local\n'), ((2138, 2163), 'fabric.api.local', 'local', (['cmd', 'local_capture'], {}), '(cmd, local_capture)\n', (2143, 2163), False, 'from fabric.api import cd, env, task, run, settings, local\n'), ((2246, 2281), 'fabric.api.run', 'run', (['cmd'], {'warn_only': '(True)', 'pty': '(False)'}), '(cmd, warn_only=True, pty=False)\n', (2249, 2281), False, 'from fabric.api import cd, env, task, run, settings, local\n'), ((965, 986), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (984, 986), False, 'import traceback\n')] |
import torch
import torch.nn as nn
import math
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class MonotonicGruCell(nn.Module):
def __init__(self, input_size, hidden_size, bias=True):
super().__init__()
"""
For each element in the input sequence, each layer computes the following
function:
MonotonicGru Math
\begin{array}{ll}
r_t = \sigma(W_{ir} x_t + b_{ir} + W_{hr} h_{(t-1)} + b_{hr}) \\
z_t = \sigma(W_{iz} x_t + b_{iz} + W_{hz} h_{(t-1)} + b_{hz}) \\
n_t = \tanh(W_{in} x_t + b_{in} + (W_{hn}(r_t* h_{(t-1)})+ b_{hn})) \\
h_t = (1 - z_t) * n_t + h_{(t-1)}
\end{array}
"""
self.input_size = input_size
self.hidden_size = hidden_size
self.i2h = nn.Linear(input_size, 3 * hidden_size, bias=bias)
self.h2h = nn.Linear(hidden_size, 3 * hidden_size, bias=bias)
self.reset_parameters()
def reset_parameters(self):
std = 1.0 / math.sqrt(self.hidden_size)
for w in self.parameters():
w.data.uniform_(-std, std)
def forward(self, x, hidden=None):
# x is B, input_size
if hidden is None:
hidden = torch.zeros(x.size(0), self.hidden_size).to(device)
gi = self.i2h(x) # B, 3H
gh = self.h2h(hidden) # B, 3H
i_r, i_i, i_n = gi.chunk(3, 1)
h_r, h_i, h_n = gh.chunk(3, 1)
resetgate_tmp = i_r + h_r
inputgate_tmp = i_i + h_i
sigmoid = nn.Sigmoid()
resetgate = sigmoid(resetgate_tmp)
inputgate = sigmoid(inputgate_tmp)
hr = self.h2h(hidden * resetgate)
_, _, h_n = hr.chunk(3, 1)
newgate = sigmoid(i_n + h_n)
hy = hidden + (1.-hidden) * inputgate * newgate
return hy
class MonotonicGru(nn.Module):
def __init__(self, input_size, hidden_size, bias=True, num_layers=1, batch_first=False, dropout=0.0):
super().__init__()
self.cell = MonotonicGruCell(
input_size=input_size, hidden_size=hidden_size, bias=True)
self.batch_first = batch_first
def forward(self, input_, lengths, hidden=None):
# input_ is of dimensionalty (T, B, input_size, ...)
# lenghths is B,
dim = 1 if self.batch_first else 0
outputs = []
for x in torch.unbind(input_, dim=dim): # x dim is B, I
hidden = self.cell(x, hidden)
outputs.append(hidden.clone())
hidden_states = torch.stack(outputs) # T, B, H
last_states = []
for idx, l in enumerate(lengths):
last_states.append(hidden_states[l-1, idx, :])
last_states = torch.stack(last_states)
return hidden_states, last_states
| [
"torch.stack",
"math.sqrt",
"torch.cuda.is_available",
"torch.nn.Linear",
"torch.unbind",
"torch.nn.Sigmoid"
] | [((79, 104), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (102, 104), False, 'import torch\n'), ((819, 868), 'torch.nn.Linear', 'nn.Linear', (['input_size', '(3 * hidden_size)'], {'bias': 'bias'}), '(input_size, 3 * hidden_size, bias=bias)\n', (828, 868), True, 'import torch.nn as nn\n'), ((888, 938), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', '(3 * hidden_size)'], {'bias': 'bias'}), '(hidden_size, 3 * hidden_size, bias=bias)\n', (897, 938), True, 'import torch.nn as nn\n'), ((1535, 1547), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (1545, 1547), True, 'import torch.nn as nn\n'), ((2358, 2387), 'torch.unbind', 'torch.unbind', (['input_'], {'dim': 'dim'}), '(input_, dim=dim)\n', (2370, 2387), False, 'import torch\n'), ((2516, 2536), 'torch.stack', 'torch.stack', (['outputs'], {}), '(outputs)\n', (2527, 2536), False, 'import torch\n'), ((2696, 2720), 'torch.stack', 'torch.stack', (['last_states'], {}), '(last_states)\n', (2707, 2720), False, 'import torch\n'), ((1024, 1051), 'math.sqrt', 'math.sqrt', (['self.hidden_size'], {}), '(self.hidden_size)\n', (1033, 1051), False, 'import math\n')] |
import numpy as np
from pomegranate import *
import json
################################################################################
# LOGGING
################################################################################
import logging
# Logging format
FORMAT = '%(asctime)s SigMa %(levelname)-10s: %(message)s'
logging.basicConfig(format=FORMAT)
def get_logger(verbosity=logging.INFO):
'''
Returns logger object
'''
logger = logging.getLogger(__name__)
logger.setLevel(verbosity)
return logger
################################################################################
# UTILS
################################################################################
def sample_and_noise(model, noise_dist, n_seqs, seqs_len):
noise_change_dist = DiscreteDistribution(dict(zip(range(96), [1.0 / 96] * 96)))
seqs = []
noised_seqs = []
for i in range(n_seqs):
seq = np.array(model.sample(seqs_len))
seqs.append(seq)
noised_seq = seq.copy()
hits = noise_dist.sample(seqs_len)
for j, hit in enumerate(hits):
if hit == 0:
noised_seq[j] = noise_change_dist.sample()
noised_seqs.append(noised_seq)
return seqs, noised_seqs
def get_emissions(file='data\emissions_for_breast_cancer'):
return np.load(file + '.npy')
def sample_uniform_between_a_b(n_states, a=0.0, b=1.0):
return (b - a) * np.random.sample(n_states) + a
def random_seqs_from_json(file_name, n_seqs=10):
seqs = []
seqs_names = []
json_file = json.load(open(file_name))
samples = json_file[u'samples']
samples_to_seq = json_file[u'sampleToSequence']
samples = np.random.permutation(samples)
for i in range(n_seqs):
seqs.append(samples_to_seq[samples[i]])
seqs_names.append(samples[i])
return seqs, seqs_names
def to_json(file_name, dict_to_save):
with open(file_name + '.json', 'w') as fp:
json.dump(dict_to_save, fp)
def full_sample_to_chromosomes_seqs(sample, dists_sample):
np_sample = np.array(sample)
starting_chromosome_idxs = np.where(np.array(dists_sample) >= 1e100)[0]
return np.split(np_sample, starting_chromosome_idxs)[1:]
def load_json(file_name):
return json.load(open(file_name))
def get_split_sequences(file_name, sample_numbers=None):
json_file = json.load(open(file_name))
samples = json_file[u'samples']
samples_to_seq = json_file[u'sampleToSequence']
samples_dists = json_file[u'sampleToPrevMutDists']
out_seqs = []
out_names = []
if sample_numbers is None:
sample_numbers = range(len(samples))
for i in sample_numbers:
n = samples[i]
out_names.append(n)
out_seqs.append(full_sample_to_chromosomes_seqs(samples_to_seq[n], samples_dists[n]))
return zip(out_names, out_seqs)
def get_full_sequences(file_name='data/nik-zainal2016-wgs-brca-mutations-for-hmm.json'):
json_file = json.load(open(file_name))
samples = json_file[u'samples']
samples_to_seq = json_file[u'sampleToSequence']
out_seqs = []
out_names = []
for n in samples:
out_names.append(n)
out_seqs.append(samples_to_seq[n])
return zip(out_names, out_seqs)
def get_count_sequences_as_mat(file_name='data/nik-zainal2016-wgs-brca-mutations-for-hmm.json'):
json_file = json.load(open(file_name))
samples = json_file[u'samples']
samples_to_seq = json_file[u'sampleToSequence']
# finding num_object + counting
num_objects = 0
samples_objects = []
samples_counts = []
for sample in samples:
objects, counts = np.unique(samples_to_seq[sample], return_counts=True)
samples_objects.append(objects)
samples_counts.append(counts)
num_objects = max(num_objects, np.max(objects))
num_objects += 1
count_mat = np.zeros((len(samples), num_objects))
for i in range(len(samples)):
count_mat[i, samples_objects[i]] = samples_counts[i]
return count_mat
def get_samples_names(file_name='data/nik-zainal2016-wgs-brca-mutations-for-hmm.json'):
json_file = json.load(open(file_name))
samples = json_file[u'samples']
return samples
def get_split_sequences_by_threshold(file_name, threshold, sample_numbers=None):
json_file = json.load(open(file_name))
samples = json_file[u'samples']
samples_to_seq = json_file[u'sampleToSequence']
samples_dists = json_file[u'sampleToPrevMutDists']
out_seqs = []
out_names = []
if sample_numbers is None:
sample_numbers = range(len(samples))
for i in sample_numbers:
n = samples[i]
out_names.append(n)
out_seqs.append(full_sample_to_chromosomes_seqs_by_threshold(samples_to_seq[n], samples_dists[n], threshold))
return zip(out_names, out_seqs)
def full_sample_to_chromosomes_seqs_by_threshold(sample, dists_sample, threshold):
np_sample = np.array(sample)
np_dists = np.array(dists_sample)
starting_chromosome_idxs = np.where(np_dists >= 1e100)[0]
chromosomes = np.split(np_sample, starting_chromosome_idxs)[1:]
chromosomes_dists = np.split(np_dists, starting_chromosome_idxs)[1:]
out = []
for i in range(len(chromosomes)):
chromosome = chromosomes[i]
chromosome_dists = chromosomes_dists[i]
starting_seqs_idxs = np.where(chromosome_dists >= threshold)[0]
seqs = np.split(chromosome, starting_seqs_idxs)[1:]
out.append(seqs)
return out
def seqs_to_seq(seqs):
out = []
for seq in seqs:
out.extend(seq)
return np.array(out)
def seqs_to_seq_of_prefix(seqs):
out = []
for seq in seqs:
out.append(seq[0])
return np.array(out)
def sample_indices_not_in_dir(dir_path):
import os
samples_in_dir = [f[:-5] for f in os.listdir(dir_path)]
samples = get_samples_names()
missing_indices = []
for i in range(len(samples)):
if samples[i] not in samples_in_dir:
missing_indices.append(i)
return missing_indices
| [
"json.dump",
"numpy.load",
"logging.basicConfig",
"numpy.unique",
"numpy.split",
"numpy.max",
"numpy.where",
"numpy.array",
"numpy.random.permutation",
"os.listdir",
"logging.getLogger",
"numpy.random.sample"
] | [((322, 356), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': 'FORMAT'}), '(format=FORMAT)\n', (341, 356), False, 'import logging\n'), ((453, 480), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (470, 480), False, 'import logging\n'), ((1318, 1340), 'numpy.load', 'np.load', (["(file + '.npy')"], {}), "(file + '.npy')\n", (1325, 1340), True, 'import numpy as np\n'), ((1681, 1711), 'numpy.random.permutation', 'np.random.permutation', (['samples'], {}), '(samples)\n', (1702, 1711), True, 'import numpy as np\n'), ((2054, 2070), 'numpy.array', 'np.array', (['sample'], {}), '(sample)\n', (2062, 2070), True, 'import numpy as np\n'), ((4908, 4924), 'numpy.array', 'np.array', (['sample'], {}), '(sample)\n', (4916, 4924), True, 'import numpy as np\n'), ((4940, 4962), 'numpy.array', 'np.array', (['dists_sample'], {}), '(dists_sample)\n', (4948, 4962), True, 'import numpy as np\n'), ((5571, 5584), 'numpy.array', 'np.array', (['out'], {}), '(out)\n', (5579, 5584), True, 'import numpy as np\n'), ((5692, 5705), 'numpy.array', 'np.array', (['out'], {}), '(out)\n', (5700, 5705), True, 'import numpy as np\n'), ((1949, 1976), 'json.dump', 'json.dump', (['dict_to_save', 'fp'], {}), '(dict_to_save, fp)\n', (1958, 1976), False, 'import json\n'), ((2158, 2203), 'numpy.split', 'np.split', (['np_sample', 'starting_chromosome_idxs'], {}), '(np_sample, starting_chromosome_idxs)\n', (2166, 2203), True, 'import numpy as np\n'), ((3621, 3674), 'numpy.unique', 'np.unique', (['samples_to_seq[sample]'], {'return_counts': '(True)'}), '(samples_to_seq[sample], return_counts=True)\n', (3630, 3674), True, 'import numpy as np\n'), ((4995, 5023), 'numpy.where', 'np.where', (['(np_dists >= 1e+100)'], {}), '(np_dists >= 1e+100)\n', (5003, 5023), True, 'import numpy as np\n'), ((5045, 5090), 'numpy.split', 'np.split', (['np_sample', 'starting_chromosome_idxs'], {}), '(np_sample, starting_chromosome_idxs)\n', (5053, 5090), True, 'import numpy as np\n'), ((5119, 5163), 'numpy.split', 'np.split', (['np_dists', 'starting_chromosome_idxs'], {}), '(np_dists, starting_chromosome_idxs)\n', (5127, 5163), True, 'import numpy as np\n'), ((1420, 1446), 'numpy.random.sample', 'np.random.sample', (['n_states'], {}), '(n_states)\n', (1436, 1446), True, 'import numpy as np\n'), ((3792, 3807), 'numpy.max', 'np.max', (['objects'], {}), '(objects)\n', (3798, 3807), True, 'import numpy as np\n'), ((5334, 5373), 'numpy.where', 'np.where', (['(chromosome_dists >= threshold)'], {}), '(chromosome_dists >= threshold)\n', (5342, 5373), True, 'import numpy as np\n'), ((5392, 5432), 'numpy.split', 'np.split', (['chromosome', 'starting_seqs_idxs'], {}), '(chromosome, starting_seqs_idxs)\n', (5400, 5432), True, 'import numpy as np\n'), ((5801, 5821), 'os.listdir', 'os.listdir', (['dir_path'], {}), '(dir_path)\n', (5811, 5821), False, 'import os\n'), ((2111, 2133), 'numpy.array', 'np.array', (['dists_sample'], {}), '(dists_sample)\n', (2119, 2133), True, 'import numpy as np\n')] |
import serial
import time
import sys,ast
message='';
c=' '.join(sys.argv[1:])
num=c.replace("[","").replace("]","").split(",")
message=num.pop()
class TextMessage:
# def __init__(self):
# self.recipient = recipient
# self.content = message
def connectPhone(self):
self.ser = serial.Serial('COM7', 9600, timeout=5, xonxoff = False, rtscts = False, bytesize = serial.EIGHTBITS, parity = serial.PARITY_NONE, stopbits = serial.STOPBITS_ONE)
time.sleep(1)
def sendMessage(self,recipient, message):
self.ser.write('ATZ\r'.encode())
time.sleep(0.5)
self.ser.write('AT+CMGF=1\r'.encode())
time.sleep(0.5)
self.ser.write(('''AT+CMGS="''' + recipient + '''"\r''').encode())
time.sleep(0.5)
self.ser.write((message + "\r").encode())
time.sleep(0.5)
self.ser.write(chr(26).encode())
time.sleep(0.5)
def disconnectPhone(self):
self.ser.close()
sms = TextMessage()
sms.connectPhone()
for numbers in num:
print(numbers)
sms.sendMessage(numbers,message)
#time.sleep(0.5)
sms.disconnectPhone()
print ("1")
| [
"serial.Serial",
"time.sleep"
] | [((351, 511), 'serial.Serial', 'serial.Serial', (['"""COM7"""', '(9600)'], {'timeout': '(5)', 'xonxoff': '(False)', 'rtscts': '(False)', 'bytesize': 'serial.EIGHTBITS', 'parity': 'serial.PARITY_NONE', 'stopbits': 'serial.STOPBITS_ONE'}), "('COM7', 9600, timeout=5, xonxoff=False, rtscts=False,\n bytesize=serial.EIGHTBITS, parity=serial.PARITY_NONE, stopbits=serial.\n STOPBITS_ONE)\n", (364, 511), False, 'import serial\n'), ((526, 539), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (536, 539), False, 'import time\n'), ((652, 667), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (662, 667), False, 'import time\n'), ((733, 748), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (743, 748), False, 'import time\n'), ((842, 857), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (852, 857), False, 'import time\n'), ((926, 941), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (936, 941), False, 'import time\n'), ((1001, 1016), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1011, 1016), False, 'import time\n')] |
from flask import Flask
import baritone
import json
app = Flask(__name__)
@app.route('/')
def hello():
print("Hello from terminal")
return "Hello world"
@app.route('/youtube/<link>')
def youtube(link):
print("ENTERED")
url = 'https://www.youtube.com/watch?v='+link
print(url)
result,status = (baritone.pipeline(url,'youtube'))
convert = {
'url': url,
'text': result,
'converted':status
}
return json.dumps(convert)
if __name__ == '__main__':
print("Starting server")
app.run(host='0.0.0.0') | [
"flask.Flask",
"baritone.pipeline",
"json.dumps"
] | [((60, 75), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (65, 75), False, 'from flask import Flask\n'), ((306, 339), 'baritone.pipeline', 'baritone.pipeline', (['url', '"""youtube"""'], {}), "(url, 'youtube')\n", (323, 339), False, 'import baritone\n'), ((417, 436), 'json.dumps', 'json.dumps', (['convert'], {}), '(convert)\n', (427, 436), False, 'import json\n')] |
#!/usr/bin/env python3
# Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
from subprocess import call, STDOUT
from shutil import copyfile
import sys
import os
import fileinput
ORIGINAL_FIX_VERSION_HS = "gen-source/Version.hs.template"
GENERATED_VERSION_HS = "DA/Sdk/Cli/Version.hs"
ORIGINAL_MOCKSERVER_HS = "gen-source/Mockserver.hs.template"
GENERATED_MOCKSERVER_HS = "Mockserver.hs"
def main(version=None):
if version is None:
version = "HEAD"
print("Stack Builder started...")
try:
basedir = os.path.dirname(os.path.realpath(__file__))
gen_vsn_hs = "%s/%s" % (basedir, GENERATED_VERSION_HS)
print("Generating %s..." % GENERATED_VERSION_HS)
copyfile("%s/%s" % (basedir, ORIGINAL_FIX_VERSION_HS), gen_vsn_hs)
replace_template_var(gen_vsn_hs, "<VERSION-VAR>", version)
print("Generating %s..." % GENERATED_MOCKSERVER_HS)
copyfile("%s/%s" % (basedir, ORIGINAL_MOCKSERVER_HS), "%s/%s" % (basedir, GENERATED_MOCKSERVER_HS))
print("Running stack build...")
call(["stack", "build"], stderr=sys.stderr, stdout=sys.stdout)
finally:
try:
print("Removing generated files...")
os.remove(GENERATED_VERSION_HS)
os.remove(GENERATED_MOCKSERVER_HS)
except OSError:
pass
def replace_template_var(template_file, var, value):
with fileinput.FileInput(template_file, inplace=True, backup='.bak') as file:
for line in file:
print(line.replace(var, value), end='')
if __name__== "__main__":
if len(sys.argv) > 1:
version = sys.argv[1]
else:
version = None
main(version) | [
"os.remove",
"os.path.realpath",
"fileinput.FileInput",
"subprocess.call",
"shutil.copyfile"
] | [((758, 824), 'shutil.copyfile', 'copyfile', (["('%s/%s' % (basedir, ORIGINAL_FIX_VERSION_HS))", 'gen_vsn_hs'], {}), "('%s/%s' % (basedir, ORIGINAL_FIX_VERSION_HS), gen_vsn_hs)\n", (766, 824), False, 'from shutil import copyfile\n'), ((948, 1051), 'shutil.copyfile', 'copyfile', (["('%s/%s' % (basedir, ORIGINAL_MOCKSERVER_HS))", "('%s/%s' % (basedir, GENERATED_MOCKSERVER_HS))"], {}), "('%s/%s' % (basedir, ORIGINAL_MOCKSERVER_HS), '%s/%s' % (basedir,\n GENERATED_MOCKSERVER_HS))\n", (956, 1051), False, 'from shutil import copyfile\n'), ((1089, 1151), 'subprocess.call', 'call', (["['stack', 'build']"], {'stderr': 'sys.stderr', 'stdout': 'sys.stdout'}), "(['stack', 'build'], stderr=sys.stderr, stdout=sys.stdout)\n", (1093, 1151), False, 'from subprocess import call, STDOUT\n'), ((1386, 1449), 'fileinput.FileInput', 'fileinput.FileInput', (['template_file'], {'inplace': '(True)', 'backup': '""".bak"""'}), "(template_file, inplace=True, backup='.bak')\n", (1405, 1449), False, 'import fileinput\n'), ((614, 640), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (630, 640), False, 'import os\n'), ((1221, 1252), 'os.remove', 'os.remove', (['GENERATED_VERSION_HS'], {}), '(GENERATED_VERSION_HS)\n', (1230, 1252), False, 'import os\n'), ((1259, 1293), 'os.remove', 'os.remove', (['GENERATED_MOCKSERVER_HS'], {}), '(GENERATED_MOCKSERVER_HS)\n', (1268, 1293), False, 'import os\n')] |
# vim: set fileencoding=utf-8 filetype=python :
import logging
log = logging.getLogger(__name__) | [
"logging.getLogger"
] | [((71, 98), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (88, 98), False, 'import logging\n')] |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from ondewo.survey import survey_pb2 as ondewo_dot_survey_dot_survey__pb2
class SurveysStub(object):
"""///// Services ///////
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/CreateSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
)
self.GetSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/GetSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
)
self.UpdateSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/UpdateSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
)
self.DeleteSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/DeleteSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListSurveys = channel.unary_unary(
'/ondewo.survey.Surveys/ListSurveys',
request_serializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.FromString,
)
self.GetSurveyAnswers = channel.unary_unary(
'/ondewo.survey.Surveys/GetSurveyAnswers',
request_serializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
)
self.GetAllSurveyAnswers = channel.unary_unary(
'/ondewo.survey.Surveys/GetAllSurveyAnswers',
request_serializer=ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
)
self.CreateAgentSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/CreateAgentSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
)
self.UpdateAgentSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/UpdateAgentSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
)
self.DeleteAgentSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/DeleteAgentSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class SurveysServicer(object):
"""///// Services ///////
"""
def CreateSurvey(self, request, context):
"""Create a Survey and an empty NLU Agent for it
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSurvey(self, request, context):
"""Retrieve a Survey message from the Database and return it
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateSurvey(self, request, context):
"""Update an existing Survey message from the Database and return it
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteSurvey(self, request, context):
"""Delete a survey and its associated agent (if existent)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListSurveys(self, request, context):
"""Returns the list of all surveys in the server
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSurveyAnswers(self, request, context):
"""Retrieve answers to survey questions collected in interactions with a survey agent for a specific session
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAllSurveyAnswers(self, request, context):
"""Retrieve all answers to survey questions collected in interactions with a survey agent in any session
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateAgentSurvey(self, request, context):
"""Populate and configures an NLU Agent from a Survey
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateAgentSurvey(self, request, context):
"""Update an NLU agent from a survey
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAgentSurvey(self, request, context):
"""Deletes all data of an NLU agent associated to a survey
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_SurveysServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateSurvey': grpc.unary_unary_rpc_method_handler(
servicer.CreateSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString,
),
'GetSurvey': grpc.unary_unary_rpc_method_handler(
servicer.GetSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString,
),
'UpdateSurvey': grpc.unary_unary_rpc_method_handler(
servicer.UpdateSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString,
),
'DeleteSurvey': grpc.unary_unary_rpc_method_handler(
servicer.DeleteSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListSurveys': grpc.unary_unary_rpc_method_handler(
servicer.ListSurveys,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.SerializeToString,
),
'GetSurveyAnswers': grpc.unary_unary_rpc_method_handler(
servicer.GetSurveyAnswers,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.SerializeToString,
),
'GetAllSurveyAnswers': grpc.unary_unary_rpc_method_handler(
servicer.GetAllSurveyAnswers,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.SerializeToString,
),
'CreateAgentSurvey': grpc.unary_unary_rpc_method_handler(
servicer.CreateAgentSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.SerializeToString,
),
'UpdateAgentSurvey': grpc.unary_unary_rpc_method_handler(
servicer.UpdateAgentSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.SerializeToString,
),
'DeleteAgentSurvey': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAgentSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'ondewo.survey.Surveys', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Surveys(object):
"""///// Services ///////
"""
@staticmethod
def CreateSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/CreateSurvey',
ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/GetSurvey',
ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/UpdateSurvey',
ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/DeleteSurvey',
ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListSurveys(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/ListSurveys',
ondewo_dot_survey_dot_survey__pb2.ListSurveysRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetSurveyAnswers(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/GetSurveyAnswers',
ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAllSurveyAnswers(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/GetAllSurveyAnswers',
ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateAgentSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/CreateAgentSurvey',
ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateAgentSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/UpdateAgentSurvey',
ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteAgentSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/DeleteAgentSurvey',
ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| [
"grpc.method_handlers_generic_handler",
"grpc.unary_unary_rpc_method_handler",
"grpc.experimental.unary_unary"
] | [((10488, 10574), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""ondewo.survey.Surveys"""', 'rpc_method_handlers'], {}), "('ondewo.survey.Surveys',\n rpc_method_handlers)\n", (10524, 10574), False, 'import grpc\n'), ((7071, 7310), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.CreateSurvey'], {'request_deserializer': 'ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.FromString', 'response_serializer': 'ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString'}), '(servicer.CreateSurvey,\n request_deserializer=ondewo_dot_survey_dot_survey__pb2.\n CreateSurveyRequest.FromString, response_serializer=\n ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString)\n', (7106, 7310), False, 'import grpc\n'), ((7398, 7631), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetSurvey'], {'request_deserializer': 'ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest.FromString', 'response_serializer': 'ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString'}), '(servicer.GetSurvey,\n request_deserializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest\n .FromString, response_serializer=ondewo_dot_survey_dot_survey__pb2.\n Survey.SerializeToString)\n', (7433, 7631), False, 'import grpc\n'), ((7722, 7961), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.UpdateSurvey'], {'request_deserializer': 'ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.FromString', 'response_serializer': 'ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString'}), '(servicer.UpdateSurvey,\n request_deserializer=ondewo_dot_survey_dot_survey__pb2.\n UpdateSurveyRequest.FromString, response_serializer=\n ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString)\n', (7757, 7961), False, 'import grpc\n'), ((8052, 8291), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DeleteSurvey'], {'request_deserializer': 'ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.DeleteSurvey,\n request_deserializer=ondewo_dot_survey_dot_survey__pb2.\n DeleteSurveyRequest.FromString, response_serializer=\n google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString)\n', (8087, 8291), False, 'import grpc\n'), ((8381, 8631), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.ListSurveys'], {'request_deserializer': 'ondewo_dot_survey_dot_survey__pb2.ListSurveysRequest.FromString', 'response_serializer': 'ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.SerializeToString'}), '(servicer.ListSurveys,\n request_deserializer=ondewo_dot_survey_dot_survey__pb2.\n ListSurveysRequest.FromString, response_serializer=\n ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.SerializeToString)\n', (8416, 8631), False, 'import grpc\n'), ((8726, 8988), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetSurveyAnswers'], {'request_deserializer': 'ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.FromString', 'response_serializer': 'ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.SerializeToString'}), '(servicer.GetSurveyAnswers,\n request_deserializer=ondewo_dot_survey_dot_survey__pb2.\n GetSurveyAnswersRequest.FromString, response_serializer=\n ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.SerializeToString)\n', (8761, 8988), False, 'import grpc\n'), ((9086, 9354), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetAllSurveyAnswers'], {'request_deserializer': 'ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.FromString', 'response_serializer': 'ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.SerializeToString'}), '(servicer.GetAllSurveyAnswers,\n request_deserializer=ondewo_dot_survey_dot_survey__pb2.\n GetAllSurveyAnswersRequest.FromString, response_serializer=\n ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.SerializeToString)\n', (9121, 9354), False, 'import grpc\n'), ((9450, 9706), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.CreateAgentSurvey'], {'request_deserializer': 'ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString', 'response_serializer': 'ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.SerializeToString'}), '(servicer.CreateAgentSurvey,\n request_deserializer=ondewo_dot_survey_dot_survey__pb2.\n AgentSurveyRequest.FromString, response_serializer=\n ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.SerializeToString)\n', (9485, 9706), False, 'import grpc\n'), ((9802, 10058), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.UpdateAgentSurvey'], {'request_deserializer': 'ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString', 'response_serializer': 'ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.SerializeToString'}), '(servicer.UpdateAgentSurvey,\n request_deserializer=ondewo_dot_survey_dot_survey__pb2.\n AgentSurveyRequest.FromString, response_serializer=\n ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.SerializeToString)\n', (9837, 10058), False, 'import grpc\n'), ((10154, 10397), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DeleteAgentSurvey'], {'request_deserializer': 'ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.DeleteAgentSurvey,\n request_deserializer=ondewo_dot_survey_dot_survey__pb2.\n AgentSurveyRequest.FromString, response_serializer=\n google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString)\n', (10189, 10397), False, 'import grpc\n'), ((11076, 11413), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/ondewo.survey.Surveys/CreateSurvey"""', 'ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.SerializeToString', 'ondewo_dot_survey_dot_survey__pb2.Survey.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/ondewo.survey.Surveys/CreateSurvey',\n ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.SerializeToString,\n ondewo_dot_survey_dot_survey__pb2.Survey.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (11105, 11413), False, 'import grpc\n'), ((11765, 12094), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/ondewo.survey.Surveys/GetSurvey"""', 'ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest.SerializeToString', 'ondewo_dot_survey_dot_survey__pb2.Survey.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/ondewo.survey.Surveys/GetSurvey', ondewo_dot_survey_dot_survey__pb2.\n GetSurveyRequest.SerializeToString, ondewo_dot_survey_dot_survey__pb2.\n Survey.FromString, options, channel_credentials, insecure,\n call_credentials, compression, wait_for_ready, timeout, metadata)\n", (11794, 12094), False, 'import grpc\n'), ((12451, 12788), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/ondewo.survey.Surveys/UpdateSurvey"""', 'ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.SerializeToString', 'ondewo_dot_survey_dot_survey__pb2.Survey.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/ondewo.survey.Surveys/UpdateSurvey',\n ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.SerializeToString,\n ondewo_dot_survey_dot_survey__pb2.Survey.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (12480, 12788), False, 'import grpc\n'), ((13143, 13480), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/ondewo.survey.Surveys/DeleteSurvey"""', 'ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/ondewo.survey.Surveys/DeleteSurvey',\n ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (13172, 13480), False, 'import grpc\n'), ((13834, 14183), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/ondewo.survey.Surveys/ListSurveys"""', 'ondewo_dot_survey_dot_survey__pb2.ListSurveysRequest.SerializeToString', 'ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/ondewo.survey.Surveys/ListSurveys', ondewo_dot_survey_dot_survey__pb2\n .ListSurveysRequest.SerializeToString,\n ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.FromString,\n options, channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (13863, 14183), False, 'import grpc\n'), ((14541, 14903), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/ondewo.survey.Surveys/GetSurveyAnswers"""', 'ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.SerializeToString', 'ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/ondewo.survey.Surveys/GetSurveyAnswers',\n ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.\n SerializeToString, ondewo_dot_survey_dot_survey__pb2.\n SurveyAnswersResponse.FromString, options, channel_credentials,\n insecure, call_credentials, compression, wait_for_ready, timeout, metadata)\n", (14570, 14903), False, 'import grpc\n'), ((15263, 15631), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/ondewo.survey.Surveys/GetAllSurveyAnswers"""', 'ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.SerializeToString', 'ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/ondewo.survey.Surveys/GetAllSurveyAnswers',\n ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.\n SerializeToString, ondewo_dot_survey_dot_survey__pb2.\n SurveyAnswersResponse.FromString, options, channel_credentials,\n insecure, call_credentials, compression, wait_for_ready, timeout, metadata)\n", (15292, 15631), False, 'import grpc\n'), ((15989, 16343), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/ondewo.survey.Surveys/CreateAgentSurvey"""', 'ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString', 'ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/ondewo.survey.Surveys/CreateAgentSurvey',\n ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,\n ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,\n options, channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (16018, 16343), False, 'import grpc\n'), ((16703, 17057), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/ondewo.survey.Surveys/UpdateAgentSurvey"""', 'ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString', 'ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/ondewo.survey.Surveys/UpdateAgentSurvey',\n ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,\n ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,\n options, channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (16732, 17057), False, 'import grpc\n'), ((17417, 17758), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/ondewo.survey.Surveys/DeleteAgentSurvey"""', 'ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/ondewo.survey.Surveys/DeleteAgentSurvey',\n ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (17446, 17758), False, 'import grpc\n')] |
from rpython.rlib.unroll import unrolling_iterable
dual_implementation_opcodes = [
'Add_Subtract_Multiply_Divide_Remainder',
'AggFinal',
'AggStep',
'Affinity',
'Cast',
'CollSeq',
'Compare',
'Copy',
'EndCoroutine',
'Function',
'Gosub',
'Goto',
'IdxLE_IdxGT_IdxLT_IdxGE',
'IdxRowid',
'IfPos',
'IfZero',
'If_IfNot',
'InitCoroutine',
'Integer',
'IsNull',
'Jump',
'MakeRecord',
'Move',
'MustBeInt',
'Ne_Eq_Gt_Le_Lt_Ge',
'Next',
'NextIfOpen',
'NotExists',
'NotNull',
'Null',
'Once',
'OpenRead_OpenWrite',
'Real',
'RealAffinity',
'ResultRow',
'Return',
'SCopy',
'Seek',
'SeekLT_SeekLE_SeekGE_SeekGT',
'Sequence',
'Variable',
'Yield',
]
unrolling_dual_implementation_opcodes = unrolling_iterable(dual_implementation_opcodes)
class OpcodeDefaults(object):
OpenRead_OpenWrite = False
Cast = False
OpcodeDefaults = OpcodeDefaults()
for op in dual_implementation_opcodes:
if not hasattr(OpcodeDefaults, op):
setattr(OpcodeDefaults, op, True)
class OpcodeStatus(object):
_immutable_fields_ = ["frozen", "use_flag_cache"] + dual_implementation_opcodes
def __init__(self, use_flag_cache):
self.use_flag_cache = use_flag_cache
self.frozen = False
for op in unrolling_dual_implementation_opcodes:
setattr(self, op, getattr(OpcodeDefaults, op))
def set_use_translated(self, op, value):
if self.frozen:
raise TypeError("too late to change")
if self.use_flag_cache:
raise TypeError("can't change if flag cache is used")
for whichop in unrolling_dual_implementation_opcodes:
if whichop == op:
setattr(self, whichop, value)
if whichop == "Compare":
self.Jump = value
elif whichop == "Jump":
self.Compare = value
elif whichop == "AggStep":
self.AggFinal = value
elif whichop == "AggFinal":
self.AggStep = value
def freeze(self):
if not self.frozen:
self.frozen = True
def disable_from_cmdline(self, s):
if s == "all":
for op in unrolling_dual_implementation_opcodes:
setattr(self, op, False)
return
specs = s.split(":")
for spec in specs:
if spec:
self.set_use_translated(spec, False)
| [
"rpython.rlib.unroll.unrolling_iterable"
] | [((839, 886), 'rpython.rlib.unroll.unrolling_iterable', 'unrolling_iterable', (['dual_implementation_opcodes'], {}), '(dual_implementation_opcodes)\n', (857, 886), False, 'from rpython.rlib.unroll import unrolling_iterable\n')] |
import os
import datetime
import psycopg2
import numpy as np
import pandas as pd
#import statsmodels.api as sm
from statsmodels.genmod.generalized_linear_model import GLM
from statsmodels.genmod.families import Binomial
from statsmodels.genmod.families.links import probit
DATABASE_URL = os.environ['DATABASE_URL']
conn = psycopg2.connect(DATABASE_URL, sslmode='require')
cur = conn.cursor() # cursor needed to perform db ops
cur.execute("SELECT * FROM Iris;")
iris_df = pd.DataFrame(cur.fetchall())
X = np.array(iris_df.iloc[:, 0:4])
y = np.array(iris_df.iloc[:, 4] == 'virginica', dtype=int)
weight = np.ones(150)
probit_link = probit()
bin_family = Binomial(probit_link)
my_glm = GLM(y, X, freq_weights = weight, family = bin_family)
my_glm_fit = my_glm.fit()
theta = my_glm_fit.params
current_dt = datetime.datetime.now()
cur.execute("INSERT INTO scores VALUES (%s, %s, %s, %s, %s)",
(str(current_dt), theta[0], theta[1], theta[2], theta[3]))
conn.commit()
cur.close()
conn.close()
| [
"statsmodels.genmod.generalized_linear_model.GLM",
"statsmodels.genmod.families.links.probit",
"numpy.ones",
"numpy.array",
"statsmodels.genmod.families.Binomial",
"datetime.datetime.now",
"psycopg2.connect"
] | [((327, 376), 'psycopg2.connect', 'psycopg2.connect', (['DATABASE_URL'], {'sslmode': '"""require"""'}), "(DATABASE_URL, sslmode='require')\n", (343, 376), False, 'import psycopg2\n'), ((511, 541), 'numpy.array', 'np.array', (['iris_df.iloc[:, 0:4]'], {}), '(iris_df.iloc[:, 0:4])\n', (519, 541), True, 'import numpy as np\n'), ((546, 600), 'numpy.array', 'np.array', (["(iris_df.iloc[:, 4] == 'virginica')"], {'dtype': 'int'}), "(iris_df.iloc[:, 4] == 'virginica', dtype=int)\n", (554, 600), True, 'import numpy as np\n'), ((611, 623), 'numpy.ones', 'np.ones', (['(150)'], {}), '(150)\n', (618, 623), True, 'import numpy as np\n'), ((640, 648), 'statsmodels.genmod.families.links.probit', 'probit', ([], {}), '()\n', (646, 648), False, 'from statsmodels.genmod.families.links import probit\n'), ((662, 683), 'statsmodels.genmod.families.Binomial', 'Binomial', (['probit_link'], {}), '(probit_link)\n', (670, 683), False, 'from statsmodels.genmod.families import Binomial\n'), ((693, 742), 'statsmodels.genmod.generalized_linear_model.GLM', 'GLM', (['y', 'X'], {'freq_weights': 'weight', 'family': 'bin_family'}), '(y, X, freq_weights=weight, family=bin_family)\n', (696, 742), False, 'from statsmodels.genmod.generalized_linear_model import GLM\n'), ((813, 836), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (834, 836), False, 'import datetime\n')] |
import numpy
from sympy import Rational as frac
from sympy import pi, sqrt
from ..helpers import article, fsd, pm, pm_roll, untangle
from ._helpers import E3rScheme
citation = article(
authors=["<NAME>", "<NAME>"],
title="Approximate integration formulas for certain spherically symmetric regions",
journal="Math. Comp.",
volume="17",
year="1963",
pages="105-135",
url="https://doi.org/10.1090/S0025-5718-1963-0161473-0",
)
def stroud_secrest_07():
nu, xi = [sqrt(15 - p_m * 3 * sqrt(5)) for p_m in [+1, -1]]
A = frac(3, 5)
B = frac(1, 30)
data = [(A, numpy.array([[0, 0, 0]])), (B, pm_roll(3, [xi, nu]))]
points, weights = untangle(data)
weights *= 8 * pi
return E3rScheme("Stroud-Secrest VII", weights, points, 5, citation)
def stroud_secrest_08():
nu = sqrt(30)
eta = sqrt(10)
A = frac(3, 5)
B = frac(2, 75)
C = frac(3, 100)
data = [(A, numpy.array([[0, 0, 0]])), (B, fsd(3, (nu, 1))), (C, pm(3, eta))]
points, weights = untangle(data)
weights *= 8 * pi
return E3rScheme("Stroud-Secrest VIII", weights, points, 5, citation)
def stroud_secrest_09():
eta = sqrt(10)
xi, nu = [sqrt(15 - p_m * 5 * sqrt(5)) for p_m in [+1, -1]]
A = frac(3, 5)
B = frac(1, 50)
data = [(A, numpy.array([[0, 0, 0]])), (B, pm(3, eta)), (B, pm_roll(3, [xi, nu]))]
points, weights = untangle(data)
weights *= 8 * pi
return E3rScheme("Stroud-Secrest IX", weights, points, 5, citation)
def stroud_secrest_10():
sqrt130 = sqrt(130)
nu = sqrt((720 - 24 * sqrt130) / 11)
xi = sqrt(288 + 24 * sqrt130)
eta = sqrt((-216 + 24 * sqrt130) / 7)
A = (5175 - 13 * sqrt130) / 8820
B = (3870 + 283 * sqrt130) / 493920
C = (3204 - 281 * sqrt130) / 197568
# ERR in Stroud's book: 917568 vs. 197568
D = (4239 + 373 * sqrt130) / 197568
data = [
(A, numpy.array([[0, 0, 0]])),
(B, fsd(3, (nu, 1))),
(C, fsd(3, (xi, 2))),
(D, pm(3, eta)),
]
points, weights = untangle(data)
weights *= 8 * pi
return E3rScheme("Stroud-Secrest X", weights, points, 7, citation)
def stroud_secrest_11():
sqrt5 = sqrt(5)
sqrt39 = sqrt(39)
sqrt195 = sqrt(195)
nu, xi = [
sqrt(-50 + p_m * 10 * sqrt5 + 10 * sqrt39 - p_m * 2 * sqrt195)
for p_m in [+1, -1]
]
eta = sqrt(36 + 4 * sqrt39)
mu, lmbda = [
sqrt(54 + p_m * 18 * sqrt5 + 6 * sqrt39 + p_m * 2 * sqrt195) for p_m in [+1, -1]
]
A = (1725 - 26 * sqrt39) / 2940
B = (1065 + 171 * sqrt39) / 54880
C = (297 - 47 * sqrt39) / 32928
data = [
(A, numpy.array([[0, 0, 0]])),
(B, pm_roll(3, [xi, nu])),
(C, pm(3, eta)),
(C, pm_roll(3, [lmbda, mu])),
]
points, weights = untangle(data)
weights *= 8 * pi
return E3rScheme("Stroud-Secrest XI", weights, points, 7, citation)
| [
"numpy.array",
"sympy.sqrt",
"sympy.Rational"
] | [((553, 563), 'sympy.Rational', 'frac', (['(3)', '(5)'], {}), '(3, 5)\n', (557, 563), True, 'from sympy import Rational as frac\n'), ((572, 583), 'sympy.Rational', 'frac', (['(1)', '(30)'], {}), '(1, 30)\n', (576, 583), True, 'from sympy import Rational as frac\n'), ((824, 832), 'sympy.sqrt', 'sqrt', (['(30)'], {}), '(30)\n', (828, 832), False, 'from sympy import pi, sqrt\n'), ((843, 851), 'sympy.sqrt', 'sqrt', (['(10)'], {}), '(10)\n', (847, 851), False, 'from sympy import pi, sqrt\n'), ((860, 870), 'sympy.Rational', 'frac', (['(3)', '(5)'], {}), '(3, 5)\n', (864, 870), True, 'from sympy import Rational as frac\n'), ((879, 890), 'sympy.Rational', 'frac', (['(2)', '(75)'], {}), '(2, 75)\n', (883, 890), True, 'from sympy import Rational as frac\n'), ((899, 911), 'sympy.Rational', 'frac', (['(3)', '(100)'], {}), '(3, 100)\n', (903, 911), True, 'from sympy import Rational as frac\n'), ((1165, 1173), 'sympy.sqrt', 'sqrt', (['(10)'], {}), '(10)\n', (1169, 1173), False, 'from sympy import pi, sqrt\n'), ((1246, 1256), 'sympy.Rational', 'frac', (['(3)', '(5)'], {}), '(3, 5)\n', (1250, 1256), True, 'from sympy import Rational as frac\n'), ((1265, 1276), 'sympy.Rational', 'frac', (['(1)', '(50)'], {}), '(1, 50)\n', (1269, 1276), True, 'from sympy import Rational as frac\n'), ((1537, 1546), 'sympy.sqrt', 'sqrt', (['(130)'], {}), '(130)\n', (1541, 1546), False, 'from sympy import pi, sqrt\n'), ((1557, 1588), 'sympy.sqrt', 'sqrt', (['((720 - 24 * sqrt130) / 11)'], {}), '((720 - 24 * sqrt130) / 11)\n', (1561, 1588), False, 'from sympy import pi, sqrt\n'), ((1598, 1622), 'sympy.sqrt', 'sqrt', (['(288 + 24 * sqrt130)'], {}), '(288 + 24 * sqrt130)\n', (1602, 1622), False, 'from sympy import pi, sqrt\n'), ((1633, 1664), 'sympy.sqrt', 'sqrt', (['((-216 + 24 * sqrt130) / 7)'], {}), '((-216 + 24 * sqrt130) / 7)\n', (1637, 1664), False, 'from sympy import pi, sqrt\n'), ((2183, 2190), 'sympy.sqrt', 'sqrt', (['(5)'], {}), '(5)\n', (2187, 2190), False, 'from sympy import pi, sqrt\n'), ((2204, 2212), 'sympy.sqrt', 'sqrt', (['(39)'], {}), '(39)\n', (2208, 2212), False, 'from sympy import pi, sqrt\n'), ((2227, 2236), 'sympy.sqrt', 'sqrt', (['(195)'], {}), '(195)\n', (2231, 2236), False, 'from sympy import pi, sqrt\n'), ((2368, 2389), 'sympy.sqrt', 'sqrt', (['(36 + 4 * sqrt39)'], {}), '(36 + 4 * sqrt39)\n', (2372, 2389), False, 'from sympy import pi, sqrt\n'), ((2261, 2323), 'sympy.sqrt', 'sqrt', (['(-50 + p_m * 10 * sqrt5 + 10 * sqrt39 - p_m * 2 * sqrt195)'], {}), '(-50 + p_m * 10 * sqrt5 + 10 * sqrt39 - p_m * 2 * sqrt195)\n', (2265, 2323), False, 'from sympy import pi, sqrt\n'), ((2416, 2476), 'sympy.sqrt', 'sqrt', (['(54 + p_m * 18 * sqrt5 + 6 * sqrt39 + p_m * 2 * sqrt195)'], {}), '(54 + p_m * 18 * sqrt5 + 6 * sqrt39 + p_m * 2 * sqrt195)\n', (2420, 2476), False, 'from sympy import pi, sqrt\n'), ((601, 625), 'numpy.array', 'numpy.array', (['[[0, 0, 0]]'], {}), '([[0, 0, 0]])\n', (612, 625), False, 'import numpy\n'), ((929, 953), 'numpy.array', 'numpy.array', (['[[0, 0, 0]]'], {}), '([[0, 0, 0]])\n', (940, 953), False, 'import numpy\n'), ((1294, 1318), 'numpy.array', 'numpy.array', (['[[0, 0, 0]]'], {}), '([[0, 0, 0]])\n', (1305, 1318), False, 'import numpy\n'), ((1894, 1918), 'numpy.array', 'numpy.array', (['[[0, 0, 0]]'], {}), '([[0, 0, 0]])\n', (1905, 1918), False, 'import numpy\n'), ((2639, 2663), 'numpy.array', 'numpy.array', (['[[0, 0, 0]]'], {}), '([[0, 0, 0]])\n', (2650, 2663), False, 'import numpy\n'), ((515, 522), 'sympy.sqrt', 'sqrt', (['(5)'], {}), '(5)\n', (519, 522), False, 'from sympy import pi, sqrt\n'), ((1208, 1215), 'sympy.sqrt', 'sqrt', (['(5)'], {}), '(5)\n', (1212, 1215), False, 'from sympy import pi, sqrt\n')] |
# V0
# V1
# https://blog.csdn.net/coder_orz/article/details/51317748
# IDEA : "unly number" : a number is an ugly number
# if all its prime factors are within [2, 3, 5].
# e.g. 6, 8 are ugly number ; while 14 is not
# please note that 1 is ugly number as well
# IDEA : ITERATION
class Solution(object):
def isUgly(self, num):
"""
:type num: int
:rtype: bool
"""
if num <= 0:
return False
for i in [2, 3, 5]:
while num%i == 0:
num = num / i
return True if num == 1 else False
# V1'
# https://blog.csdn.net/coder_orz/article/details/51317748
# IDEA : RECURSION
class Solution(object):
def isUgly(self, num):
"""
:type num: int
:rtype: bool
"""
if num <= 0:
return False
if num == 1:
return True
if num % 2 == 0:
return self.isUgly(num/2)
elif num % 3 == 0:
return self.isUgly(num/3)
elif num % 5 == 0:
return self.isUgly(num/5)
else:
return False
# V1''
# https://blog.csdn.net/coder_orz/article/details/51317748
class Solution(object):
def isUgly(self, num):
"""
:type num: int
:rtype: bool
"""
return num > 0 == 30**30 % num
# V2
# Time: O(n)
# Space: O(1)
import heapq
class Solution(object):
# @param {integer} n
# @return {integer}
def nthUglyNumber(self, n):
ugly_number = 0
heap = []
heapq.heappush(heap, 1)
for _ in range(n):
ugly_number = heapq.heappop(heap)
if ugly_number % 2 == 0:
heapq.heappush(heap, ugly_number * 2)
elif ugly_number % 3 == 0:
heapq.heappush(heap, ugly_number * 2)
heapq.heappush(heap, ugly_number * 3)
else:
heapq.heappush(heap, ugly_number * 2)
heapq.heappush(heap, ugly_number * 3)
heapq.heappush(heap, ugly_number * 5)
return ugly_number
def nthUglyNumber2(self, n):
ugly = [1]
i2 = i3 = i5 = 0
while len(ugly) < n:
while ugly[i2] * 2 <= ugly[-1]: i2 += 1
while ugly[i3] * 3 <= ugly[-1]: i3 += 1
while ugly[i5] * 5 <= ugly[-1]: i5 += 1
ugly.append(min(ugly[i2] * 2, ugly[i3] * 3, ugly[i5] * 5))
return ugly[-1]
def nthUglyNumber3(self, n):
q2, q3, q5 = [2], [3], [5]
ugly = 1
for u in heapq.merge(q2, q3, q5):
if n == 1:
return ugly
if u > ugly:
ugly = u
n -= 1
q2 += 2 * u,
q3 += 3 * u,
q5 += 5 * u,
class Solution2(object):
ugly = sorted(2**a * 3**b * 5**c
for a in range(32) for b in range(20) for c in range(14))
def nthUglyNumber(self, n):
return self.ugly[n-1]
| [
"heapq.merge",
"heapq.heappush",
"heapq.heappop"
] | [((1545, 1568), 'heapq.heappush', 'heapq.heappush', (['heap', '(1)'], {}), '(heap, 1)\n', (1559, 1568), False, 'import heapq\n'), ((2549, 2572), 'heapq.merge', 'heapq.merge', (['q2', 'q3', 'q5'], {}), '(q2, q3, q5)\n', (2560, 2572), False, 'import heapq\n'), ((1622, 1641), 'heapq.heappop', 'heapq.heappop', (['heap'], {}), '(heap)\n', (1635, 1641), False, 'import heapq\n'), ((1695, 1732), 'heapq.heappush', 'heapq.heappush', (['heap', '(ugly_number * 2)'], {}), '(heap, ugly_number * 2)\n', (1709, 1732), False, 'import heapq\n'), ((1788, 1825), 'heapq.heappush', 'heapq.heappush', (['heap', '(ugly_number * 2)'], {}), '(heap, ugly_number * 2)\n', (1802, 1825), False, 'import heapq\n'), ((1842, 1879), 'heapq.heappush', 'heapq.heappush', (['heap', '(ugly_number * 3)'], {}), '(heap, ugly_number * 3)\n', (1856, 1879), False, 'import heapq\n'), ((1914, 1951), 'heapq.heappush', 'heapq.heappush', (['heap', '(ugly_number * 2)'], {}), '(heap, ugly_number * 2)\n', (1928, 1951), False, 'import heapq\n'), ((1968, 2005), 'heapq.heappush', 'heapq.heappush', (['heap', '(ugly_number * 3)'], {}), '(heap, ugly_number * 3)\n', (1982, 2005), False, 'import heapq\n'), ((2022, 2059), 'heapq.heappush', 'heapq.heappush', (['heap', '(ugly_number * 5)'], {}), '(heap, ugly_number * 5)\n', (2036, 2059), False, 'import heapq\n')] |
import boto3
from boto3.dynamodb.types import TypeDeserializer, TypeSerializer
import json
import logging
DYNAMODB_TABLE_NAME = "quizzes_questions"
def setup_logging():
""" Basic logging setup """
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
return logging
def load_questions():
with open("questions.json", "r") as f:
questions = json.loads(f.read())
return questions
def check_limitations(question):
if not "explanation" in question:
raise KeyError(f"explanation key not found in question id: {question['question_id']}")
elif not "question" in question:
raise KeyError(f"question key not found in question id: {question['question_id']}")
elif not "options" in question:
raise KeyError(f"options key not found in question id: {question['question_id']}")
elif not "correct_option" in question:
raise KeyError(f"correct_option key not found in question id: {question['question_id']}")
if len(question["explanation"]) > 200:
raise ValueError("explanation value is greater than 200 chars")
if len(question["question"]) > 255:
raise ValueError("question value is greater than 255 chars")
if len(question["options"]) > 10:
raise ValueError("options array is greater than 10")
for option in question["options"]:
if len(option) > 100:
raise ValueError(f"option: {option} is grater than 100 chars")
def serialize(question, type_serializer = TypeSerializer()):
question = {k: type_serializer.serialize(v) for k,v in question.items()}
return question
def upload_to_dynamo(client, question):
raw_question = serialize(question)
client.put_item(
TableName=DYNAMODB_TABLE_NAME,
Item=raw_question
)
def main():
client_dynamo = boto3.client('dynamodb')
logger = setup_logging()
logger.info("loadding questions from questions.json")
questions = load_questions()
logger.info("start processing questions")
for question in questions:
logger.info(f"check limitations for question id: {question['question_id']} ")
check_limitations(question)
logger.info(f"Limitation check pass, start uploading to dynamodb")
upload_to_dynamo(client_dynamo, question)
if __name__ == "__main__":
main() | [
"boto3.dynamodb.types.TypeSerializer",
"logging.basicConfig",
"boto3.client"
] | [((208, 315), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""', 'level': 'logging.INFO'}), "(format=\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)\n", (227, 315), False, 'import logging\n'), ((1554, 1570), 'boto3.dynamodb.types.TypeSerializer', 'TypeSerializer', ([], {}), '()\n', (1568, 1570), False, 'from boto3.dynamodb.types import TypeDeserializer, TypeSerializer\n'), ((1875, 1899), 'boto3.client', 'boto3.client', (['"""dynamodb"""'], {}), "('dynamodb')\n", (1887, 1899), False, 'import boto3\n')] |
import pkgutil
import xml.etree.ElementTree
import docutils.core
def load_pairs():
# Load pairs of "example ID, rules code" for the test suite.
rst_code = _load_rst()
xml_code = docutils.core.publish_string(rst_code, writer_name='xml')
tree = xml.etree.ElementTree.fromstring(xml_code)
parsed = []
for section in tree.findall('./section'):
slug = section.get('ids').replace('-', '_')
for i, block in enumerate(section.findall('./literal_block'), start=1):
parsed.append(('%s_%d' % (slug, i), block.text))
return parsed
def load_html(initial_header_level):
# Render an HTML fragment ready for inclusion into a page.
rst_code = _load_rst()
parts = docutils.core.publish_parts(
rst_code, writer_name='html',
settings_overrides={'initial_header_level': initial_header_level})
return parts['fragment']
def _load_rst():
return pkgutil.get_data('turq', 'examples.rst')
| [
"pkgutil.get_data"
] | [((920, 960), 'pkgutil.get_data', 'pkgutil.get_data', (['"""turq"""', '"""examples.rst"""'], {}), "('turq', 'examples.rst')\n", (936, 960), False, 'import pkgutil\n')] |
# Generated by Django 3.2.7 on 2021-09-11 13:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Thread',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=128, verbose_name='标题')),
('pub_date', models.DateTimeField(auto_now_add=True, verbose_name='发布日期')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='用户')),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pub_date', models.DateTimeField(auto_now_add=True, verbose_name='发布日期')),
('content', models.TextField(verbose_name='内容')),
('upvotes', models.IntegerField(default=0)),
('downvotes', models.IntegerField(default=0)),
('relpy_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='forum.post', verbose_name='回复')),
('thread', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='forum.thread')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='用户')),
],
),
]
| [
"django.db.models.TextField",
"django.db.migrations.swappable_dependency",
"django.db.models.BigAutoField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] | [((247, 304), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (278, 304), False, 'from django.db import migrations, models\n'), ((435, 531), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (454, 531), False, 'from django.db import migrations, models\n'), ((556, 607), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'verbose_name': '"""标题"""'}), "(max_length=128, verbose_name='标题')\n", (572, 607), False, 'from django.db import migrations, models\n'), ((639, 699), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""发布日期"""'}), "(auto_now_add=True, verbose_name='发布日期')\n", (659, 699), False, 'from django.db import migrations, models\n'), ((727, 853), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""用户"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to=settings.AUTH_USER_MODEL, verbose_name='用户')\n", (744, 853), False, 'from django.db import migrations, models\n'), ((979, 1075), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (998, 1075), False, 'from django.db import migrations, models\n'), ((1103, 1163), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""发布日期"""'}), "(auto_now_add=True, verbose_name='发布日期')\n", (1123, 1163), False, 'from django.db import migrations, models\n'), ((1194, 1229), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""内容"""'}), "(verbose_name='内容')\n", (1210, 1229), False, 'from django.db import migrations, models\n'), ((1260, 1290), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1279, 1290), False, 'from django.db import migrations, models\n'), ((1323, 1353), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1342, 1353), False, 'from django.db import migrations, models\n'), ((1385, 1512), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""forum.post"""', 'verbose_name': '"""回复"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, to='forum.post', verbose_name='回复')\n", (1402, 1512), False, 'from django.db import migrations, models\n'), ((1537, 1623), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""forum.thread"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'forum.thread')\n", (1554, 1623), False, 'from django.db import migrations, models\n'), ((1646, 1772), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""用户"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to=settings.AUTH_USER_MODEL, verbose_name='用户')\n", (1663, 1772), False, 'from django.db import migrations, models\n')] |
# 先确认在VSCode的Settings中,勾选“Terminal:Excute In File Dir”
# 在当前文件夹下将hello_world.txt文件复制为hello_world_bak.txt
src = r'hello_world.txt'
dst = r'hello_world_bak.txt'
import shutil
shutil.copyfile(src, dst) | [
"shutil.copyfile"
] | [((175, 200), 'shutil.copyfile', 'shutil.copyfile', (['src', 'dst'], {}), '(src, dst)\n', (190, 200), False, 'import shutil\n')] |
class FeatureImportance(object):
def __init__(self, md, test_x, test_z):
self._skater_model, self._skater_interpreter = _create_skater_stuff(md, test_x, test_z)
def save_plot_feature_importance(self, file_path):
fig, ax = self._skater_interpreter.feature_importance.plot_feature_importance(
self._skater_model,
ascending=True,
ax=None,
progressbar=False,
# model-scoring: difference in log_loss or MAE of training_labels
# given perturbations. Note this vary rarely makes any significant
# differences
method='model-scoring')
# corss entropy or f1 ('f1', 'cross_entropy')
#scorer_type='cross_entropy') # type: Figure, axes
#scorer_type='f1') # type: Figure, axes
import matplotlib.pyplot as plt
plt.tight_layout()
fig.savefig(file_path, dpi=fig.dpi)
plt.close(fig)
def _create_skater_stuff(mdl, test_x, test_z):
from skater.model import InMemoryModel
from skater.core.explanations import Interpretation
from hassbrain_algorithm.benchmark.interpretation import ModelWrapper
from hassbrain_algorithm.benchmark.interpretation import _boolean2str
wrapped_model = ModelWrapper(mdl)
class_names = mdl.get_state_lbl_lst()
feature_names = mdl.get_obs_lbl_lst()
# this has to be done in order for skater to recognize the values as categorical and not numerical
test_x = _boolean2str(test_x)
# create interpretation
interpreter = Interpretation(test_x,
#class_names=class_names,
feature_names=feature_names)
# create model
# supports classifiers with or without probability scores
examples = test_x[:10]
skater_model = InMemoryModel(wrapped_model.predict,
#target_names=class_names,
feature_names=feature_names,
model_type='classifier',
unique_values=class_names,
probability=False,
examples=examples)
interpreter.load_data(test_x,
training_labels=test_z,
feature_names=feature_names)
# todo flag for deletion (3lines below)
# if this can savely be deleted
tmp = interpreter.data_set.feature_info
for key, val in tmp.items():
val['numeric'] = False
return skater_model, interpreter
| [
"skater.core.explanations.Interpretation",
"hassbrain_algorithm.benchmark.interpretation._boolean2str",
"matplotlib.pyplot.close",
"hassbrain_algorithm.benchmark.interpretation.ModelWrapper",
"matplotlib.pyplot.tight_layout",
"skater.model.InMemoryModel"
] | [((1279, 1296), 'hassbrain_algorithm.benchmark.interpretation.ModelWrapper', 'ModelWrapper', (['mdl'], {}), '(mdl)\n', (1291, 1296), False, 'from hassbrain_algorithm.benchmark.interpretation import ModelWrapper\n'), ((1514, 1534), 'hassbrain_algorithm.benchmark.interpretation._boolean2str', '_boolean2str', (['test_x'], {}), '(test_x)\n', (1526, 1534), False, 'from hassbrain_algorithm.benchmark.interpretation import _boolean2str\n'), ((1590, 1641), 'skater.core.explanations.Interpretation', 'Interpretation', (['test_x'], {'feature_names': 'feature_names'}), '(test_x, feature_names=feature_names)\n', (1604, 1641), False, 'from skater.core.explanations import Interpretation\n'), ((1886, 2049), 'skater.model.InMemoryModel', 'InMemoryModel', (['wrapped_model.predict'], {'feature_names': 'feature_names', 'model_type': '"""classifier"""', 'unique_values': 'class_names', 'probability': '(False)', 'examples': 'examples'}), "(wrapped_model.predict, feature_names=feature_names,\n model_type='classifier', unique_values=class_names, probability=False,\n examples=examples)\n", (1899, 2049), False, 'from skater.model import InMemoryModel\n'), ((856, 874), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (872, 874), True, 'import matplotlib.pyplot as plt\n'), ((927, 941), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (936, 941), True, 'import matplotlib.pyplot as plt\n')] |
import pygame
import random
from pygame.math import Vector2
#from .config import xSize, ySize, cell_size, cell_number
from .loc_conf import xSize, ySize, cell_number, cell_size
class NonEatable():
def __init__(self, screen, ip1,ip2,ip3,ip4):
# Lade Textur
self._load_texture(ip1,ip2,ip3,ip4)
# Zufällige Koordinaten für Futter
xPos1 = random.randint(0,cell_number - 2)
yPos1 = random.randint(0,cell_number - 2)
while(not self.is_start_pos_ok(xPos1,yPos1)):
xPos1 = random.randint(0,cell_number - 2)
yPos1 = random.randint(0,cell_number - 2)
xPos2 = xPos1
yPos2 = yPos1 + 1
xPos3 = xPos1 + 1
yPos3 = yPos1
xPos4 = xPos1 + 1
yPos4 = yPos1 + 1
self.lis = [Vector2(xPos1,yPos1),Vector2(xPos2,yPos2),Vector2(xPos3,yPos3),Vector2(xPos4,yPos4)]
self.pyScreen = screen
def is_start_pos_ok(self,xPos1,yPos1):
if(xPos1 == 6 and yPos1 == 10):
return False
if(xPos1 == 7 and yPos1 == 10):
return False
if(xPos1 == 8 and yPos1 == 10):
return False
return True
def _load_texture(self, ip1,ip2,ip3,ip4):
''' Laden der Texutren '''
self.ft1 = pygame.image.load(ip1).convert_alpha()
self.ft2 = pygame.image.load(ip2).convert_alpha()
self.ft3 = pygame.image.load(ip3).convert_alpha()
self.ft4 = pygame.image.load(ip4).convert_alpha()
def draw_barrier(self):
food_obj1 = pygame.Rect(int(self.lis[0].x*cell_size),int(self.lis[0].y*cell_size),cell_size,cell_size)
food_obj2 = pygame.Rect(int(self.lis[1].x*cell_size),int(self.lis[1].y*cell_size),cell_size,cell_size)
food_obj3 = pygame.Rect(int(self.lis[2].x*cell_size),int(self.lis[2].y*cell_size),cell_size,cell_size)
food_obj4 = pygame.Rect(int(self.lis[3].x*cell_size),int(self.lis[3].y*cell_size),cell_size,cell_size)
self.pyScreen.blit(self.ft1, food_obj1)
self.pyScreen.blit(self.ft2, food_obj2)
self.pyScreen.blit(self.ft3, food_obj3)
self.pyScreen.blit(self.ft4, food_obj4)
def change_position(self):
xPos1 = random.randint(0,cell_number - 2)
yPos1 = random.randint(0,cell_number - 2)
self.lis = [Vector2(xPos1,yPos1),Vector2(xPos1,yPos1+1),Vector2(xPos1+1,yPos1),Vector2(xPos1+1,yPos1+1)]
| [
"pygame.image.load",
"pygame.math.Vector2",
"random.randint"
] | [((393, 427), 'random.randint', 'random.randint', (['(0)', '(cell_number - 2)'], {}), '(0, cell_number - 2)\n', (407, 427), False, 'import random\n'), ((443, 477), 'random.randint', 'random.randint', (['(0)', '(cell_number - 2)'], {}), '(0, cell_number - 2)\n', (457, 477), False, 'import random\n'), ((2239, 2273), 'random.randint', 'random.randint', (['(0)', '(cell_number - 2)'], {}), '(0, cell_number - 2)\n', (2253, 2273), False, 'import random\n'), ((2289, 2323), 'random.randint', 'random.randint', (['(0)', '(cell_number - 2)'], {}), '(0, cell_number - 2)\n', (2303, 2323), False, 'import random\n'), ((552, 586), 'random.randint', 'random.randint', (['(0)', '(cell_number - 2)'], {}), '(0, cell_number - 2)\n', (566, 586), False, 'import random\n'), ((606, 640), 'random.randint', 'random.randint', (['(0)', '(cell_number - 2)'], {}), '(0, cell_number - 2)\n', (620, 640), False, 'import random\n'), ((825, 846), 'pygame.math.Vector2', 'Vector2', (['xPos1', 'yPos1'], {}), '(xPos1, yPos1)\n', (832, 846), False, 'from pygame.math import Vector2\n'), ((846, 867), 'pygame.math.Vector2', 'Vector2', (['xPos2', 'yPos2'], {}), '(xPos2, yPos2)\n', (853, 867), False, 'from pygame.math import Vector2\n'), ((867, 888), 'pygame.math.Vector2', 'Vector2', (['xPos3', 'yPos3'], {}), '(xPos3, yPos3)\n', (874, 888), False, 'from pygame.math import Vector2\n'), ((888, 909), 'pygame.math.Vector2', 'Vector2', (['xPos4', 'yPos4'], {}), '(xPos4, yPos4)\n', (895, 909), False, 'from pygame.math import Vector2\n'), ((2343, 2364), 'pygame.math.Vector2', 'Vector2', (['xPos1', 'yPos1'], {}), '(xPos1, yPos1)\n', (2350, 2364), False, 'from pygame.math import Vector2\n'), ((2364, 2389), 'pygame.math.Vector2', 'Vector2', (['xPos1', '(yPos1 + 1)'], {}), '(xPos1, yPos1 + 1)\n', (2371, 2389), False, 'from pygame.math import Vector2\n'), ((2387, 2412), 'pygame.math.Vector2', 'Vector2', (['(xPos1 + 1)', 'yPos1'], {}), '(xPos1 + 1, yPos1)\n', (2394, 2412), False, 'from pygame.math import Vector2\n'), ((2410, 2439), 'pygame.math.Vector2', 'Vector2', (['(xPos1 + 1)', '(yPos1 + 1)'], {}), '(xPos1 + 1, yPos1 + 1)\n', (2417, 2439), False, 'from pygame.math import Vector2\n'), ((1310, 1332), 'pygame.image.load', 'pygame.image.load', (['ip1'], {}), '(ip1)\n', (1327, 1332), False, 'import pygame\n'), ((1368, 1390), 'pygame.image.load', 'pygame.image.load', (['ip2'], {}), '(ip2)\n', (1385, 1390), False, 'import pygame\n'), ((1426, 1448), 'pygame.image.load', 'pygame.image.load', (['ip3'], {}), '(ip3)\n', (1443, 1448), False, 'import pygame\n'), ((1484, 1506), 'pygame.image.load', 'pygame.image.load', (['ip4'], {}), '(ip4)\n', (1501, 1506), False, 'import pygame\n')] |
import pyautogui
from time import sleep
from random import choice
sleep(3)
names = open("names.txt","r").readlines()
names = [i[:-1] for i in names]
passwords = open("pass.txt",'r').readlines()
passwords = [i[:-1] for i in passwords if passwords.index(i) % 2 == 0]
for i in range(100):
print("hehe :) ")
pyautogui.click()
nametotake = names[i+100]
passtotake = choice(passwords)
if len(passtotake) < 8:
passtotake = nametotake[:nametotake.index('.')] +passtotake
pyautogui.write(nametotake)
pyautogui.press("TAB")
pyautogui.write(passtotake)
pyautogui.press("ENTER")
sleep(1)
with pyautogui.hold('alt'):
pyautogui.press('left')
print(nametotake)
print(passtotake)
print("Done\n\n")
sleep(6) | [
"pyautogui.write",
"pyautogui.press",
"random.choice",
"time.sleep",
"pyautogui.hold",
"pyautogui.click"
] | [((66, 74), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (71, 74), False, 'from time import sleep\n'), ((314, 331), 'pyautogui.click', 'pyautogui.click', ([], {}), '()\n', (329, 331), False, 'import pyautogui\n'), ((379, 396), 'random.choice', 'choice', (['passwords'], {}), '(passwords)\n', (385, 396), False, 'from random import choice\n'), ((497, 524), 'pyautogui.write', 'pyautogui.write', (['nametotake'], {}), '(nametotake)\n', (512, 524), False, 'import pyautogui\n'), ((529, 551), 'pyautogui.press', 'pyautogui.press', (['"""TAB"""'], {}), "('TAB')\n", (544, 551), False, 'import pyautogui\n'), ((556, 583), 'pyautogui.write', 'pyautogui.write', (['passtotake'], {}), '(passtotake)\n', (571, 583), False, 'import pyautogui\n'), ((588, 612), 'pyautogui.press', 'pyautogui.press', (['"""ENTER"""'], {}), "('ENTER')\n", (603, 612), False, 'import pyautogui\n'), ((617, 625), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (622, 625), False, 'from time import sleep\n'), ((761, 769), 'time.sleep', 'sleep', (['(6)'], {}), '(6)\n', (766, 769), False, 'from time import sleep\n'), ((635, 656), 'pyautogui.hold', 'pyautogui.hold', (['"""alt"""'], {}), "('alt')\n", (649, 656), False, 'import pyautogui\n'), ((666, 689), 'pyautogui.press', 'pyautogui.press', (['"""left"""'], {}), "('left')\n", (681, 689), False, 'import pyautogui\n')] |
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the FLAdmin Client to send the request message to the admin server."""
import threading
from multiprocessing.dummy import Pool as ThreadPool
import grpc
import nvflare.private.fed.protos.admin_pb2 as admin_msg
import nvflare.private.fed.protos.admin_pb2_grpc as admin_service
from nvflare.private.admin_defs import Message
from nvflare.private.fed.utils.messageproto import message_to_proto, proto_to_message
from .admin import Sender
lock = threading.Lock()
class AdminMessageSender(Sender):
"""AdminMessageSender to send the request message to the admin server."""
def __init__(
self,
client_name,
root_cert=None,
ssl_cert=None,
private_key=None,
server_args=None,
secure=False,
is_multi_gpu=False,
rank=0,
):
"""To init the AdminMessageSender.
Args:
client_name: client name
root_cert: root certificate
ssl_cert: SSL certificate
private_key: private key
server_args: server args
secure: True/False
is_multi_gpu: True/False
rank: local process rank
"""
self.client_name = client_name
self.root_cert = root_cert
self.ssl_cert = ssl_cert
self.private_key = private_key
self.secure = secure
self.servers = server_args
self.multi_gpu = is_multi_gpu
self.rank = rank
self.pool = ThreadPool(len(self.servers))
def send_reply(self, message: Message):
"""Call to send the request message.
Args:
message: request message
"""
if self.rank == 0:
# self.send_client_reply(message)
for taskname in tuple(self.servers):
self._send_client_reply(message, taskname)
def _send_client_reply(self, message, taskname):
try:
with self._set_up_channel(self.servers[taskname]) as channel:
stub = admin_service.AdminCommunicatingStub(channel)
reply = admin_msg.Reply()
reply.client_name = self.client_name
reply.message.CopyFrom(message_to_proto(message))
# reply.message = message_to_proto(message)
stub.SendReply(reply)
except BaseException:
pass
def retrieve_requests(self) -> [Message]:
"""Send the message to retrieve pending requests from the Server.
Returns: list of messages.
"""
messages = []
if self.rank == 0:
items = self.pool.map(self._retrieve_client_requests, tuple(self.servers))
for item in items:
messages.extend(item)
return messages
def _retrieve_client_requests(self, taskname):
try:
message_list = []
with self._set_up_channel(self.servers[taskname]) as channel:
stub = admin_service.AdminCommunicatingStub(channel)
client = admin_msg.Client()
client.client_name = self.client_name
messages = stub.Retrieve(client)
for i in messages.message:
message_list.append(proto_to_message(i))
except Exception as e:
messages = None
return message_list
def send_result(self, message: Message):
"""Send the processor results to server.
Args:
message: message
"""
if self.rank == 0:
for taskname in tuple(self.servers):
try:
with self._set_up_channel(self.servers[taskname]) as channel:
stub = admin_service.AdminCommunicatingStub(channel)
reply = admin_msg.Reply()
reply.client_name = self.client_name
reply.message.CopyFrom(message_to_proto(message))
stub.SendResult(reply)
except BaseException:
pass
def _set_up_channel(self, channel_dict):
"""Connect client to the server.
Args:
channel_dict: grpc channel parameters
Returns: an initialised grpc channel
"""
if self.secure:
with open(self.root_cert, "rb") as f:
trusted_certs = f.read()
with open(self.private_key, "rb") as f:
private_key = f.read()
with open(self.ssl_cert, "rb") as f:
certificate_chain = f.read()
call_credentials = grpc.metadata_call_credentials(
lambda context, callback: callback((("x-custom-token", self.client_name),), None)
)
credentials = grpc.ssl_channel_credentials(
certificate_chain=certificate_chain, private_key=private_key, root_certificates=trusted_certs
)
composite_credentials = grpc.composite_channel_credentials(credentials, call_credentials)
channel = grpc.secure_channel(**channel_dict, credentials=composite_credentials)
else:
channel = grpc.insecure_channel(**channel_dict)
return channel
| [
"nvflare.private.fed.utils.messageproto.proto_to_message",
"grpc.secure_channel",
"nvflare.private.fed.protos.admin_pb2.Reply",
"nvflare.private.fed.utils.messageproto.message_to_proto",
"nvflare.private.fed.protos.admin_pb2.Client",
"grpc.insecure_channel",
"threading.Lock",
"nvflare.private.fed.protos.admin_pb2_grpc.AdminCommunicatingStub",
"grpc.ssl_channel_credentials",
"grpc.composite_channel_credentials"
] | [((1074, 1090), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (1088, 1090), False, 'import threading\n'), ((5366, 5493), 'grpc.ssl_channel_credentials', 'grpc.ssl_channel_credentials', ([], {'certificate_chain': 'certificate_chain', 'private_key': 'private_key', 'root_certificates': 'trusted_certs'}), '(certificate_chain=certificate_chain,\n private_key=private_key, root_certificates=trusted_certs)\n', (5394, 5493), False, 'import grpc\n'), ((5557, 5622), 'grpc.composite_channel_credentials', 'grpc.composite_channel_credentials', (['credentials', 'call_credentials'], {}), '(credentials, call_credentials)\n', (5591, 5622), False, 'import grpc\n'), ((5645, 5715), 'grpc.secure_channel', 'grpc.secure_channel', ([], {'credentials': 'composite_credentials'}), '(**channel_dict, credentials=composite_credentials)\n', (5664, 5715), False, 'import grpc\n'), ((5752, 5789), 'grpc.insecure_channel', 'grpc.insecure_channel', ([], {}), '(**channel_dict)\n', (5773, 5789), False, 'import grpc\n'), ((2619, 2664), 'nvflare.private.fed.protos.admin_pb2_grpc.AdminCommunicatingStub', 'admin_service.AdminCommunicatingStub', (['channel'], {}), '(channel)\n', (2655, 2664), True, 'import nvflare.private.fed.protos.admin_pb2_grpc as admin_service\n'), ((2690, 2707), 'nvflare.private.fed.protos.admin_pb2.Reply', 'admin_msg.Reply', ([], {}), '()\n', (2705, 2707), True, 'import nvflare.private.fed.protos.admin_pb2 as admin_msg\n'), ((3564, 3609), 'nvflare.private.fed.protos.admin_pb2_grpc.AdminCommunicatingStub', 'admin_service.AdminCommunicatingStub', (['channel'], {}), '(channel)\n', (3600, 3609), True, 'import nvflare.private.fed.protos.admin_pb2_grpc as admin_service\n'), ((3636, 3654), 'nvflare.private.fed.protos.admin_pb2.Client', 'admin_msg.Client', ([], {}), '()\n', (3652, 3654), True, 'import nvflare.private.fed.protos.admin_pb2 as admin_msg\n'), ((2800, 2825), 'nvflare.private.fed.utils.messageproto.message_to_proto', 'message_to_proto', (['message'], {}), '(message)\n', (2816, 2825), False, 'from nvflare.private.fed.utils.messageproto import message_to_proto, proto_to_message\n'), ((3841, 3860), 'nvflare.private.fed.utils.messageproto.proto_to_message', 'proto_to_message', (['i'], {}), '(i)\n', (3857, 3860), False, 'from nvflare.private.fed.utils.messageproto import message_to_proto, proto_to_message\n'), ((4311, 4356), 'nvflare.private.fed.protos.admin_pb2_grpc.AdminCommunicatingStub', 'admin_service.AdminCommunicatingStub', (['channel'], {}), '(channel)\n', (4347, 4356), True, 'import nvflare.private.fed.protos.admin_pb2_grpc as admin_service\n'), ((4390, 4407), 'nvflare.private.fed.protos.admin_pb2.Reply', 'admin_msg.Reply', ([], {}), '()\n', (4405, 4407), True, 'import nvflare.private.fed.protos.admin_pb2 as admin_msg\n'), ((4516, 4541), 'nvflare.private.fed.utils.messageproto.message_to_proto', 'message_to_proto', (['message'], {}), '(message)\n', (4532, 4541), False, 'from nvflare.private.fed.utils.messageproto import message_to_proto, proto_to_message\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: EPL-2.0
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import os
import signal
import subprocess
import sys
from argparse import ArgumentParser
from common import base_benchmark_util
class LaunchBenchmark(base_benchmark_util.BaseBenchmarkUtil):
"""Launches benchmarking job based on the specified args """
def main(self):
args, unknown = self.parse_args(sys.argv[1:])
try:
self.validate_args(args)
except (IOError, ValueError) as e:
print("\nError: {}".format(e))
sys.exit(1)
self.run_docker_container(args)
def parse_args(self, args):
super(LaunchBenchmark, self).define_args()
# Additional args that are only used with the launch script
arg_parser = ArgumentParser(
parents=[self._common_arg_parser],
description="Parse args for benchmark interface")
arg_parser.add_argument(
"--docker-image", help="Specify the docker image/tag to use",
dest="docker_image", default=None, required=True)
arg_parser.add_argument(
"--debug", help="Launches debug mode which doesn't execute "
"start.sh", action="store_true")
return arg_parser.parse_known_args(args)
def validate_args(self, args):
"""validate the args"""
# validate the shared args first
super(LaunchBenchmark, self).validate_args(args)
# Check for spaces in docker image
if ' ' in args.docker_image:
raise ValueError("docker image string "
"should not have whitespace(s)")
# validate that we support this framework by checking folder names
benchmark_dir = os.path.dirname(os.path.realpath(__file__))
if glob.glob("{}/*/{}".format(benchmark_dir, args.framework)) == []:
raise ValueError("The specified framework is not supported: {}".
format(args.framework))
# if neither benchmark_only or accuracy_only are specified, then enable
# benchmark_only as the default
if not args.benchmark_only and not args.accuracy_only:
args.benchmark_only = True
def run_docker_container(self, args):
"""
Runs a docker container with the specified image and environment
variables to start running the benchmarking job.
"""
benchmark_scripts = os.path.dirname(os.path.realpath(__file__))
intelai_models = os.path.join(benchmark_scripts, os.pardir, "models")
if args.model_name:
# find the path to the model's benchmarks folder
search_path = os.path.join(
benchmark_scripts, "*", args.framework, args.model_name,
args.mode, args.precision)
matches = glob.glob(search_path)
if len(matches) > 1:
# we should never get more than one match
raise ValueError("Found multiple model locations for {} {} {}"
.format(args.framework,
args.model_name,
args.precision))
elif len(matches) == 0:
raise ValueError("No model was found for {} {} {}"
.format(args.framework,
args.model_name,
args.precision))
# use the benchmarks directory path to find the use case
dir_list = matches[0].split("/")
# find the last occurrence of framework in the list
framework_index = len(dir_list) - 1 - dir_list[::-1].index(
args.framework)
# grab the use case name from the path
use_case = str(dir_list[framework_index - 1])
# find the intelai_optimized model directory
optimized_model_dir = os.path.join(
benchmark_scripts, os.pardir, "models", use_case,
args.framework, args.model_name)
# if we find an optimized model, then we will use that path
if os.path.isdir(intelai_models):
intelai_models = optimized_model_dir
mount_benchmark = "/workspace/benchmarks"
mount_external_models_source = "/workspace/models"
mount_intelai_models = "/workspace/intelai_models"
workspace = os.path.join(mount_benchmark, "common", args.framework)
mount_output_dir = False
output_dir = os.path.join(workspace, 'logs')
if args.output_dir != "/models/benchmarks/common/tensorflow/logs":
# we don't need to mount log dir otherwise since default is workspace folder
mount_output_dir = True
output_dir = args.output_dir
in_graph_dir = os.path.dirname(args.input_graph) if args.input_graph \
else ""
in_graph_filename = os.path.basename(args.input_graph) if \
args.input_graph else ""
env_vars = ["--env", "DATASET_LOCATION_VOL={}".format(args.data_location),
"--env", "CHECKPOINT_DIRECTORY_VOL={}".format(args.checkpoint),
"--env", "EXTERNAL_MODELS_SOURCE_DIRECTORY={}".format(args.model_source_dir),
"--env", "INTELAI_MODELS={}".format(intelai_models),
"--env", "BENCHMARK_SCRIPTS={}".format(benchmark_scripts),
"--env", "SOCKET_ID={}".format(args.socket_id),
"--env", "MODEL_NAME={}".format(args.model_name),
"--env", "MODE={}".format(args.mode),
"--env", "PRECISION={}".format(args.precision),
"--env", "VERBOSE={}".format(args.verbose),
"--env", "BATCH_SIZE={}".format(args.batch_size),
"--env", "WORKSPACE={}".format(workspace),
"--env", "IN_GRAPH=/in_graph/{}".format(in_graph_filename),
"--env", "MOUNT_BENCHMARK={}".format(mount_benchmark),
"--env", "MOUNT_EXTERNAL_MODELS_SOURCE={}".format(mount_external_models_source),
"--env", "MOUNT_INTELAI_MODELS_SOURCE={}".format(mount_intelai_models),
"--env", "USE_CASE={}".format(use_case),
"--env", "FRAMEWORK={}".format(args.framework),
"--env", "NUM_CORES={}".format(args.num_cores),
"--env", "NUM_INTER_THREADS={}".format(args.num_inter_threads),
"--env", "NUM_INTRA_THREADS={}".format(args.num_intra_threads),
"--env", "DATASET_LOCATION=/dataset",
"--env", "CHECKPOINT_DIRECTORY=/checkpoints",
"--env", "BENCHMARK_ONLY={}".format(args.benchmark_only),
"--env", "ACCURACY_ONLY={}".format(args.accuracy_only),
"--env", "OUTPUT_RESULTS={}".format(args.output_results),
"--env", "NOINSTALL=False",
"--env", "OUTPUT_DIR={}".format(output_dir)]
# by default we will install, user needs to set NOINSTALL=True
# manually after they get into `--debug` mode
# since they need to run one time without this flag
# to get stuff installed
# Add custom model args as env vars
for custom_arg in args.model_args:
if "=" not in custom_arg:
raise ValueError("Expected model args in the format "
"`name=value` but received: {}".
format(custom_arg))
env_vars.append("--env")
env_vars.append("{}".format(custom_arg))
# Add proxy to env variables if any set on host
for environment_proxy_setting in [
"http_proxy",
"ftp_proxy",
"https_proxy",
"no_proxy",
]:
if not os.environ.get(environment_proxy_setting):
continue
env_vars.append("--env")
env_vars.append("{}={}".format(
environment_proxy_setting,
os.environ.get(environment_proxy_setting)
))
volume_mounts = ["--volume", "{}:{}".format(benchmark_scripts, mount_benchmark),
"--volume", "{}:{}".format(args.model_source_dir, mount_external_models_source),
"--volume", "{}:{}".format(intelai_models, mount_intelai_models),
"--volume", "{}:/dataset".format(args.data_location),
"--volume", "{}:/checkpoints".format(args.checkpoint),
"--volume", "{}:/in_graph".format(in_graph_dir)]
if mount_output_dir:
volume_mounts.extend([
"--volume", "{}:{}".format(output_dir, output_dir)])
docker_run_cmd = ["docker", "run"]
# only use -it when debugging, otherwise we might get TTY error
if args.debug:
docker_run_cmd.append("-it")
docker_run_cmd = docker_run_cmd + env_vars + volume_mounts + [
"--privileged", "-u", "root:root", "-w",
workspace, args.docker_image, "/bin/bash"]
if not args.debug:
docker_run_cmd.append("start.sh")
if args.verbose:
print("Docker run command:\n{}".format(docker_run_cmd))
self._run_docker_cmd(docker_run_cmd)
def _run_docker_cmd(self, docker_run_cmd):
"""runs docker proc and exits on ctrl c"""
p = subprocess.Popen(docker_run_cmd, preexec_fn=os.setsid)
try:
p.communicate()
except KeyboardInterrupt:
os.killpg(os.getpgid(p.pid), signal.SIGKILL)
if __name__ == "__main__":
util = LaunchBenchmark()
util.main()
| [
"os.getpgid",
"subprocess.Popen",
"argparse.ArgumentParser",
"os.path.basename",
"os.path.isdir",
"os.path.realpath",
"os.path.dirname",
"os.environ.get",
"glob.glob",
"os.path.join",
"sys.exit"
] | [((1500, 1604), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'parents': '[self._common_arg_parser]', 'description': '"""Parse args for benchmark interface"""'}), "(parents=[self._common_arg_parser], description=\n 'Parse args for benchmark interface')\n", (1514, 1604), False, 'from argparse import ArgumentParser\n'), ((3227, 3279), 'os.path.join', 'os.path.join', (['benchmark_scripts', 'os.pardir', '"""models"""'], {}), "(benchmark_scripts, os.pardir, 'models')\n", (3239, 3279), False, 'import os\n'), ((5166, 5221), 'os.path.join', 'os.path.join', (['mount_benchmark', '"""common"""', 'args.framework'], {}), "(mount_benchmark, 'common', args.framework)\n", (5178, 5221), False, 'import os\n'), ((5277, 5308), 'os.path.join', 'os.path.join', (['workspace', '"""logs"""'], {}), "(workspace, 'logs')\n", (5289, 5308), False, 'import os\n'), ((10277, 10331), 'subprocess.Popen', 'subprocess.Popen', (['docker_run_cmd'], {'preexec_fn': 'os.setsid'}), '(docker_run_cmd, preexec_fn=os.setsid)\n', (10293, 10331), False, 'import subprocess\n'), ((2475, 2501), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2491, 2501), False, 'import os\n'), ((3174, 3200), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (3190, 3200), False, 'import os\n'), ((3396, 3497), 'os.path.join', 'os.path.join', (['benchmark_scripts', '"""*"""', 'args.framework', 'args.model_name', 'args.mode', 'args.precision'], {}), "(benchmark_scripts, '*', args.framework, args.model_name, args.\n mode, args.precision)\n", (3408, 3497), False, 'import os\n'), ((3548, 3570), 'glob.glob', 'glob.glob', (['search_path'], {}), '(search_path)\n', (3557, 3570), False, 'import glob\n'), ((4676, 4776), 'os.path.join', 'os.path.join', (['benchmark_scripts', 'os.pardir', '"""models"""', 'use_case', 'args.framework', 'args.model_name'], {}), "(benchmark_scripts, os.pardir, 'models', use_case, args.\n framework, args.model_name)\n", (4688, 4776), False, 'import os\n'), ((4893, 4922), 'os.path.isdir', 'os.path.isdir', (['intelai_models'], {}), '(intelai_models)\n', (4906, 4922), False, 'import os\n'), ((5574, 5607), 'os.path.dirname', 'os.path.dirname', (['args.input_graph'], {}), '(args.input_graph)\n', (5589, 5607), False, 'import os\n'), ((5678, 5712), 'os.path.basename', 'os.path.basename', (['args.input_graph'], {}), '(args.input_graph)\n', (5694, 5712), False, 'import os\n'), ((1274, 1285), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1282, 1285), False, 'import sys\n'), ((8672, 8713), 'os.environ.get', 'os.environ.get', (['environment_proxy_setting'], {}), '(environment_proxy_setting)\n', (8686, 8713), False, 'import os\n'), ((8880, 8921), 'os.environ.get', 'os.environ.get', (['environment_proxy_setting'], {}), '(environment_proxy_setting)\n', (8894, 8921), False, 'import os\n'), ((10429, 10446), 'os.getpgid', 'os.getpgid', (['p.pid'], {}), '(p.pid)\n', (10439, 10446), False, 'import os\n')] |
import unittest
from apps.api.segmenter.road_segmenter import geometry_to_list
from apps.data.road_segmenting.road_fetcher import vegnet_to_geojson
from apps.data.road_segmenting.road_filter import filter_road
from vapi.constants import MAX_SEGMENT_LENGTH, MIN_COORDINATES_LENGTH
from api.segmenter.calculate_distance import calculate_road_length_simple
from api.segmenter.road_segmenter import segment_network, split_segment
def convert(road):
road = filter_road(road)
road["the_geom"] = geometry_to_list(road["the_geom"])
return road
class TestSegmenting(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.kommune = 5001
cls.vegref = "kg"
cls.max_segment_distance = MAX_SEGMENT_LENGTH
cls.min_coordinates_length = MIN_COORDINATES_LENGTH
network = vegnet_to_geojson(cls.kommune, cls.vegref)
cls.count, cls.road_net = network[0], network[1]["features"]
# Apparently the setUpClass is a bit funky and the road_net does not stay filtered after setUpClass is run,
# so instead it is done in each test function it is needed instead of here.
road_net_list = []
for road in cls.road_net:
road_net_list.append(filter_road(road))
cls.road_net_segmented = segment_network(road_net_list, cls.max_segment_distance, cls.min_coordinates_length)
def setUp(self):
"""
Needs to be here for the tests to run
"""
pass
def test_road_segmenter_list(self):
"""
The road_segmenter function should return a list
:return: Nothing
"""
self.assertIsInstance(self.road_net_segmented, list, "The road segmenter did not return a list")
def test_road_segmenter_list_elements(self):
"""
Every element in the split segments should be a dict
:return: Nothing
"""
error_message = "Not all elements in the split list are of type dict \n"
for segment in self.road_net_segmented:
self.assertIsInstance(segment, dict, error_message)
def test_split_segment_geometry_len(self):
"""
Given a list of roads segments, the split segments should always have a length
of 2 or more
:return: Nothing
"""
error_message = "Segment has less than " + str(self.min_coordinates_length) + " GPS coordinates"
for segment in self.road_net_segmented:
# coordinates are split by commas, so the count of commas+1 should be the same as the count of coordinates
coordinates_amount = segment["the_geom"].count(",")
self.assertGreaterEqual(coordinates_amount+1, self.min_coordinates_length, error_message)
def test_road_filter(self):
"""
The road_filter function should return a string, otherwise segmentation will crash in later stages
:return: Nothing
"""
for road in self.road_net:
road = filter_road(road)
self.assertIsInstance(road["the_geom"], str, "road_filter should turn geometry into a string")
def test_geometry_conversion(self):
"""
The geometry_to_list function should return a dictionary containing coordinates as a list,
otherwise the segmenter can't split segments
:return: Nothing
"""
for road in self.road_net:
road = convert(road)
self.assertIsInstance(road["the_geom"], dict, "geometry_to_list should return a "
"dictionary")
self.assertIsInstance(road["the_geom"]["coordinates"], list, "geometry_to_list should return a turn the "
"coordinates into a list")
def test_calculate_road_length(self):
"""
The total distance of the segmented road should be similar to the length before segmentation, within
a margin given by the variable "margin"
:return: Nothing
"""
margin = 3
for road in self.road_net:
road = convert(road)
length_before = calculate_road_length_simple(road["the_geom"]["coordinates"])
road_segmented = split_segment(road, self.max_segment_distance, [], self.min_coordinates_length)
length_after = 0
for segment in road_segmented:
length_after += calculate_road_length_simple(segment["the_geom"]["coordinates"])
self.assertLess(abs(length_after - length_before), margin, "The difference between the original "
"length and the segmented length is "
"too large")
def test_split_segment_chaining(self):
"""
Every connected segment should start with the end gps point of the previous segment
:return: Nothing
"""
for road in self.road_net:
road = convert(road)
road_segmented = split_segment(road, self.max_segment_distance, [], self.min_coordinates_length)
for i in range(1, len(road_segmented)):
curr_segment = road_segmented[i]
prev_segment = road_segmented[i-1]
end_coordinate = len(prev_segment["the_geom"]["coordinates"])-1
self.assertEqual(curr_segment["the_geom"]["coordinates"][0],
prev_segment["the_geom"]["coordinates"][end_coordinate],
"Segments are not correctly chained")
def test_split_segment_negative_length(self):
"""
No road segments should have a negative road length
:return: Nothing
"""
for segment in self.road_net_segmented:
self.assertGreater(segment["stretchdistance"], 0, "Stretchdistance must be of at least 1 meter")
def test_duplicate_segments(self):
"""
Test if there are multiple segments with the same coordinates
"""
length = len(self.road_net_segmented)-1
for i in range(length):
road = self.road_net_segmented[i]["the_geom"]
for x in range(i+1, length):
other_road = self.road_net_segmented[x]["the_geom"]
self.assertNotEqual(road, other_road, "Duplicate segment geometry coordinates")
def test_missing_coordinates(self):
"""
All original coordinates should still be present after segmenting road network
:return: Nothing
"""
for road in self.road_net:
road = convert(road)
coordinates_original = road["the_geom"]["coordinates"]
road_segmented = split_segment(road, self.max_segment_distance, [], self.min_coordinates_length)
coordinates_segmented = []
for segment in road_segmented:
coordinates_segmented.extend(segment["the_geom"]["coordinates"])
for coordinate in coordinates_original:
self.assertTrue(coordinate in coordinates_segmented, "Missing coordinate after segmenting")
def test_over_and_undersegmenting(self):
"""
The segmenter should only run on segments that are over the limit in length, it should never segment something
shorter than that. In other words the segmented road should still be only one segment
:return: Nothing
"""
i = 0
for road in self.road_net:
i += 1
converted_road = convert(road)
road_coords_length = len(converted_road["the_geom"]["coordinates"])
road_distance = calculate_road_length_simple(converted_road["the_geom"]["coordinates"])
road_segmented = segment_network([filter_road(road)], self.max_segment_distance,
self.min_coordinates_length)
road_segmented_length = len(road_segmented)
if road_distance < self.max_segment_distance:
self.assertTrue(road_segmented_length == 1, "This road was segmented, but should not have been.")
elif road_coords_length >= 2*self.min_coordinates_length and road_distance > self.max_segment_distance:
self.assertTrue(road_segmented_length > 1, ("This road should have been segmented, but was not. "
"Stretchdistance:", road_distance, "Coordinates:",
converted_road["the_geom"]["coordinates"], i))
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"api.segmenter.calculate_distance.calculate_road_length_simple",
"apps.api.segmenter.road_segmenter.geometry_to_list",
"apps.data.road_segmenting.road_fetcher.vegnet_to_geojson",
"apps.data.road_segmenting.road_filter.filter_road",
"api.segmenter.road_segmenter.split_segment",
"api.segmenter.road_segmenter.segment_network"
] | [((460, 477), 'apps.data.road_segmenting.road_filter.filter_road', 'filter_road', (['road'], {}), '(road)\n', (471, 477), False, 'from apps.data.road_segmenting.road_filter import filter_road\n'), ((501, 535), 'apps.api.segmenter.road_segmenter.geometry_to_list', 'geometry_to_list', (["road['the_geom']"], {}), "(road['the_geom'])\n", (517, 535), False, 'from apps.api.segmenter.road_segmenter import geometry_to_list\n'), ((8614, 8629), 'unittest.main', 'unittest.main', ([], {}), '()\n', (8627, 8629), False, 'import unittest\n'), ((823, 865), 'apps.data.road_segmenting.road_fetcher.vegnet_to_geojson', 'vegnet_to_geojson', (['cls.kommune', 'cls.vegref'], {}), '(cls.kommune, cls.vegref)\n', (840, 865), False, 'from apps.data.road_segmenting.road_fetcher import vegnet_to_geojson\n'), ((1282, 1371), 'api.segmenter.road_segmenter.segment_network', 'segment_network', (['road_net_list', 'cls.max_segment_distance', 'cls.min_coordinates_length'], {}), '(road_net_list, cls.max_segment_distance, cls.\n min_coordinates_length)\n', (1297, 1371), False, 'from api.segmenter.road_segmenter import segment_network, split_segment\n'), ((2963, 2980), 'apps.data.road_segmenting.road_filter.filter_road', 'filter_road', (['road'], {}), '(road)\n', (2974, 2980), False, 'from apps.data.road_segmenting.road_filter import filter_road\n'), ((4147, 4208), 'api.segmenter.calculate_distance.calculate_road_length_simple', 'calculate_road_length_simple', (["road['the_geom']['coordinates']"], {}), "(road['the_geom']['coordinates'])\n", (4175, 4208), False, 'from api.segmenter.calculate_distance import calculate_road_length_simple\n'), ((4239, 4318), 'api.segmenter.road_segmenter.split_segment', 'split_segment', (['road', 'self.max_segment_distance', '[]', 'self.min_coordinates_length'], {}), '(road, self.max_segment_distance, [], self.min_coordinates_length)\n', (4252, 4318), False, 'from api.segmenter.road_segmenter import segment_network, split_segment\n'), ((5075, 5154), 'api.segmenter.road_segmenter.split_segment', 'split_segment', (['road', 'self.max_segment_distance', '[]', 'self.min_coordinates_length'], {}), '(road, self.max_segment_distance, [], self.min_coordinates_length)\n', (5088, 5154), False, 'from api.segmenter.road_segmenter import segment_network, split_segment\n'), ((6763, 6842), 'api.segmenter.road_segmenter.split_segment', 'split_segment', (['road', 'self.max_segment_distance', '[]', 'self.min_coordinates_length'], {}), '(road, self.max_segment_distance, [], self.min_coordinates_length)\n', (6776, 6842), False, 'from api.segmenter.road_segmenter import segment_network, split_segment\n'), ((7694, 7765), 'api.segmenter.calculate_distance.calculate_road_length_simple', 'calculate_road_length_simple', (["converted_road['the_geom']['coordinates']"], {}), "(converted_road['the_geom']['coordinates'])\n", (7722, 7765), False, 'from api.segmenter.calculate_distance import calculate_road_length_simple\n'), ((1230, 1247), 'apps.data.road_segmenting.road_filter.filter_road', 'filter_road', (['road'], {}), '(road)\n', (1241, 1247), False, 'from apps.data.road_segmenting.road_filter import filter_road\n'), ((4424, 4488), 'api.segmenter.calculate_distance.calculate_road_length_simple', 'calculate_road_length_simple', (["segment['the_geom']['coordinates']"], {}), "(segment['the_geom']['coordinates'])\n", (4452, 4488), False, 'from api.segmenter.calculate_distance import calculate_road_length_simple\n'), ((7812, 7829), 'apps.data.road_segmenting.road_filter.filter_road', 'filter_road', (['road'], {}), '(road)\n', (7823, 7829), False, 'from apps.data.road_segmenting.road_filter import filter_road\n')] |
import numpy as np
import pickle
import os
from pathlib import Path
from metrics.class_imbalance import get_classes, class_proportion
from metrics.phi_div import average_dkl
from metrics.wasserstein import wasserstein_2
def compute_metrics(ds,
split,
inv_temp,
num_parties,
num_classes,
alpha,
lengthscale,
party_datasets,
party_labels,
reference_dataset,
candidate_datasets,
candidate_labels,
rewards,
deltas,
mus):
print("Computing metrics")
party_datasets_with_rewards = []
for i in range(num_parties):
party_datasets_with_rewards.append(np.concatenate([party_datasets[i], rewards[i]], axis=0))
print("Length of rewards: {}".format([len(r) for r in rewards]))
print("alpha:\n{}".format(alpha))
print("Calculating average DKLs before")
dkls_before = average_dkl(party_datasets, reference_dataset)
print(dkls_before)
print("Calculating average DKLs after")
dkls_after = average_dkl(party_datasets_with_rewards, reference_dataset)
print(dkls_after)
print("Correlation coefficient with alpha: \n{}".format(np.corrcoef(alpha, dkls_after)[0, 1]))
class_props = []
for result in rewards:
class_props.append(
class_proportion(get_classes(np.array(result), candidate_datasets[0], candidate_labels), num_classes))
print("Class proportions and class imbalance of rewards: {}".format(class_props))
print("Calculating Wasserstein-2 before")
wass_before = [wasserstein_2(party_datasets[i], reference_dataset) for i in range(num_parties)]
wass_after = [wasserstein_2(np.concatenate([party_datasets[i], np.array(rewards[i])], axis=0), reference_dataset)
for i in range(num_parties)]
print("Wasserstein-2 before: \n{}".format(wass_before))
print("Wasserstein-2 after: \n{}".format(wass_after))
print("Correlation coefficient with alpha: \n{}".format(np.corrcoef(alpha, wass_after)[0, 1]))
#Save metrics
Path(os.getcwd() + '/data/metrics').mkdir(parents=True, exist_ok=True)
pickle.dump((party_datasets, party_labels, reference_dataset, candidate_datasets, candidate_labels,
rewards, deltas, mus, alpha, lengthscale, class_props, wass_before, wass_after, dkls_before, dkls_after),
open("data/metrics/metrics-{}-{}-{}.p".format(ds, split, inv_temp), 'wb'))
| [
"os.getcwd",
"numpy.corrcoef",
"metrics.wasserstein.wasserstein_2",
"numpy.array",
"metrics.phi_div.average_dkl",
"numpy.concatenate"
] | [((1107, 1153), 'metrics.phi_div.average_dkl', 'average_dkl', (['party_datasets', 'reference_dataset'], {}), '(party_datasets, reference_dataset)\n', (1118, 1153), False, 'from metrics.phi_div import average_dkl\n'), ((1243, 1302), 'metrics.phi_div.average_dkl', 'average_dkl', (['party_datasets_with_rewards', 'reference_dataset'], {}), '(party_datasets_with_rewards, reference_dataset)\n', (1254, 1302), False, 'from metrics.phi_div import average_dkl\n'), ((1779, 1830), 'metrics.wasserstein.wasserstein_2', 'wasserstein_2', (['party_datasets[i]', 'reference_dataset'], {}), '(party_datasets[i], reference_dataset)\n', (1792, 1830), False, 'from metrics.wasserstein import wasserstein_2\n'), ((870, 925), 'numpy.concatenate', 'np.concatenate', (['[party_datasets[i], rewards[i]]'], {'axis': '(0)'}), '([party_datasets[i], rewards[i]], axis=0)\n', (884, 925), True, 'import numpy as np\n'), ((1387, 1417), 'numpy.corrcoef', 'np.corrcoef', (['alpha', 'dkls_after'], {}), '(alpha, dkls_after)\n', (1398, 1417), True, 'import numpy as np\n'), ((2208, 2238), 'numpy.corrcoef', 'np.corrcoef', (['alpha', 'wass_after'], {}), '(alpha, wass_after)\n', (2219, 2238), True, 'import numpy as np\n'), ((1549, 1565), 'numpy.array', 'np.array', (['result'], {}), '(result)\n', (1557, 1565), True, 'import numpy as np\n'), ((1928, 1948), 'numpy.array', 'np.array', (['rewards[i]'], {}), '(rewards[i])\n', (1936, 1948), True, 'import numpy as np\n'), ((2278, 2289), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2287, 2289), False, 'import os\n')] |
# Generated by Django 2.0 on 2020-02-07 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('addresses', '0001_initial'),
('orders', '0002_auto_20200204_1253'),
]
operations = [
migrations.AddField(
model_name='order',
name='billing_address',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='addresses.Address'),
),
migrations.AddField(
model_name='order',
name='shipping_address',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='shipping_address', to='addresses.Address'),
),
]
| [
"django.db.models.ForeignKey"
] | [((411, 525), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""addresses.Address"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='addresses.Address')\n", (428, 525), False, 'from django.db import migrations, models\n'), ((649, 796), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""shipping_address"""', 'to': '"""addresses.Address"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='shipping_address', to='addresses.Address')\n", (666, 796), False, 'from django.db import migrations, models\n')] |
from helper import unittest, PillowTestCase, hopper, py3
import os
import io
from PIL import Image, TiffImagePlugin
class LibTiffTestCase(PillowTestCase):
def setUp(self):
codecs = dir(Image.core)
if "libtiff_encoder" not in codecs or "libtiff_decoder" not in codecs:
self.skipTest("tiff support not available")
def _assert_noerr(self, im):
"""Helper tests that assert basic sanity about the g4 tiff reading"""
# 1 bit
self.assertEqual(im.mode, "1")
# Does the data actually load
im.load()
im.getdata()
try:
self.assertEqual(im._compression, 'group4')
except:
print("No _compression")
print (dir(im))
# can we write it back out, in a different form.
out = self.tempfile("temp.png")
im.save(out)
class TestFileLibTiff(LibTiffTestCase):
def test_g4_tiff(self):
"""Test the ordinary file path load path"""
file = "Tests/images/hopper_g4_500.tif"
im = Image.open(file)
self.assertEqual(im.size, (500, 500))
self._assert_noerr(im)
def test_g4_large(self):
file = "Tests/images/pport_g4.tif"
im = Image.open(file)
self._assert_noerr(im)
def test_g4_tiff_file(self):
"""Testing the string load path"""
file = "Tests/images/hopper_g4_500.tif"
with open(file, 'rb') as f:
im = Image.open(f)
self.assertEqual(im.size, (500, 500))
self._assert_noerr(im)
def test_g4_tiff_bytesio(self):
"""Testing the stringio loading code path"""
file = "Tests/images/hopper_g4_500.tif"
s = io.BytesIO()
with open(file, 'rb') as f:
s.write(f.read())
s.seek(0)
im = Image.open(s)
self.assertEqual(im.size, (500, 500))
self._assert_noerr(im)
def test_g4_eq_png(self):
""" Checking that we're actually getting the data that we expect"""
png = Image.open('Tests/images/hopper_bw_500.png')
g4 = Image.open('Tests/images/hopper_g4_500.tif')
self.assert_image_equal(g4, png)
# see https://github.com/python-pillow/Pillow/issues/279
def test_g4_fillorder_eq_png(self):
""" Checking that we're actually getting the data that we expect"""
png = Image.open('Tests/images/g4-fillorder-test.png')
g4 = Image.open('Tests/images/g4-fillorder-test.tif')
self.assert_image_equal(g4, png)
def test_g4_write(self):
"""Checking to see that the saved image is the same as what we wrote"""
file = "Tests/images/hopper_g4_500.tif"
orig = Image.open(file)
out = self.tempfile("temp.tif")
rot = orig.transpose(Image.ROTATE_90)
self.assertEqual(rot.size, (500, 500))
rot.save(out)
reread = Image.open(out)
self.assertEqual(reread.size, (500, 500))
self._assert_noerr(reread)
self.assert_image_equal(reread, rot)
self.assertEqual(reread.info['compression'], 'group4')
self.assertEqual(reread.info['compression'], orig.info['compression'])
self.assertNotEqual(orig.tobytes(), reread.tobytes())
def test_adobe_deflate_tiff(self):
file = "Tests/images/tiff_adobe_deflate.tif"
im = Image.open(file)
self.assertEqual(im.mode, "RGB")
self.assertEqual(im.size, (278, 374))
self.assertEqual(
im.tile[0][:3], ('tiff_adobe_deflate', (0, 0, 278, 374), 0))
im.load()
def test_write_metadata(self):
""" Test metadata writing through libtiff """
img = Image.open('Tests/images/hopper_g4.tif')
f = self.tempfile('temp.tiff')
img.save(f, tiffinfo=img.tag)
loaded = Image.open(f)
original = img.tag.named()
reloaded = loaded.tag.named()
# PhotometricInterpretation is set from SAVE_INFO,
# not the original image.
ignored = [
'StripByteCounts', 'RowsPerStrip',
'PageNumber', 'PhotometricInterpretation']
for tag, value in reloaded.items():
if tag not in ignored:
if tag.endswith('Resolution'):
val = original[tag]
self.assert_almost_equal(
val[0][0]/val[0][1], value[0][0]/value[0][1],
msg="%s didn't roundtrip" % tag)
else:
self.assertEqual(
original[tag], value, "%s didn't roundtrip" % tag)
for tag, value in original.items():
if tag not in ignored:
if tag.endswith('Resolution'):
val = reloaded[tag]
self.assert_almost_equal(
val[0][0]/val[0][1], value[0][0]/value[0][1],
msg="%s didn't roundtrip" % tag)
else:
self.assertEqual(
value, reloaded[tag], "%s didn't roundtrip" % tag)
def test_g3_compression(self):
i = Image.open('Tests/images/hopper_g4_500.tif')
out = self.tempfile("temp.tif")
i.save(out, compression='group3')
reread = Image.open(out)
self.assertEqual(reread.info['compression'], 'group3')
self.assert_image_equal(reread, i)
def test_little_endian(self):
im = Image.open('Tests/images/16bit.deflate.tif')
self.assertEqual(im.getpixel((0, 0)), 480)
self.assertEqual(im.mode, 'I;16')
b = im.tobytes()
# Bytes are in image native order (little endian)
if py3:
self.assertEqual(b[0], ord(b'\xe0'))
self.assertEqual(b[1], ord(b'\x01'))
else:
self.assertEqual(b[0], b'\xe0')
self.assertEqual(b[1], b'\x01')
out = self.tempfile("temp.tif")
# out = "temp.le.tif"
im.save(out)
reread = Image.open(out)
self.assertEqual(reread.info['compression'], im.info['compression'])
self.assertEqual(reread.getpixel((0, 0)), 480)
# UNDONE - libtiff defaults to writing in native endian, so
# on big endian, we'll get back mode = 'I;16B' here.
def test_big_endian(self):
im = Image.open('Tests/images/16bit.MM.deflate.tif')
self.assertEqual(im.getpixel((0, 0)), 480)
self.assertEqual(im.mode, 'I;16B')
b = im.tobytes()
# Bytes are in image native order (big endian)
if py3:
self.assertEqual(b[0], ord(b'\x01'))
self.assertEqual(b[1], ord(b'\xe0'))
else:
self.assertEqual(b[0], b'\x01')
self.assertEqual(b[1], b'\xe0')
out = self.tempfile("temp.tif")
im.save(out)
reread = Image.open(out)
self.assertEqual(reread.info['compression'], im.info['compression'])
self.assertEqual(reread.getpixel((0, 0)), 480)
def test_g4_string_info(self):
"""Tests String data in info directory"""
file = "Tests/images/hopper_g4_500.tif"
orig = Image.open(file)
out = self.tempfile("temp.tif")
orig.tag[269] = 'temp.tif'
orig.save(out)
reread = Image.open(out)
self.assertEqual('temp.tif', reread.tag[269])
def test_12bit_rawmode(self):
""" Are we generating the same interpretation
of the image as Imagemagick is? """
TiffImagePlugin.READ_LIBTIFF = True
# Image.DEBUG = True
im = Image.open('Tests/images/12bit.cropped.tif')
im.load()
TiffImagePlugin.READ_LIBTIFF = False
# to make the target --
# convert 12bit.cropped.tif -depth 16 tmp.tif
# convert tmp.tif -evaluate RightShift 4 12in16bit2.tif
# imagemagick will auto scale so that a 12bit FFF is 16bit FFF0,
# so we need to unshift so that the integer values are the same.
im2 = Image.open('Tests/images/12in16bit.tif')
if Image.DEBUG:
print (im.getpixel((0, 0)))
print (im.getpixel((0, 1)))
print (im.getpixel((0, 2)))
print (im2.getpixel((0, 0)))
print (im2.getpixel((0, 1)))
print (im2.getpixel((0, 2)))
self.assert_image_equal(im, im2)
def test_blur(self):
# test case from irc, how to do blur on b/w image
# and save to compressed tif.
from PIL import ImageFilter
out = self.tempfile('temp.tif')
im = Image.open('Tests/images/pport_g4.tif')
im = im.convert('L')
im = im.filter(ImageFilter.GaussianBlur(4))
im.save(out, compression='tiff_adobe_deflate')
im2 = Image.open(out)
im2.load()
self.assert_image_equal(im, im2)
def test_compressions(self):
im = hopper('RGB')
out = self.tempfile('temp.tif')
for compression in ('packbits', 'tiff_lzw'):
im.save(out, compression=compression)
im2 = Image.open(out)
self.assert_image_equal(im, im2)
im.save(out, compression='jpeg')
im2 = Image.open(out)
self.assert_image_similar(im, im2, 30)
def test_cmyk_save(self):
im = hopper('CMYK')
out = self.tempfile('temp.tif')
im.save(out, compression='tiff_adobe_deflate')
im2 = Image.open(out)
self.assert_image_equal(im, im2)
def xtest_bw_compression_w_rgb(self):
""" This test passes, but when running all tests causes a failure due
to output on stderr from the error thrown by libtiff. We need to
capture that but not now"""
im = hopper('RGB')
out = self.tempfile('temp.tif')
self.assertRaises(
IOError, lambda: im.save(out, compression='tiff_ccitt'))
self.assertRaises(IOError, lambda: im.save(out, compression='group3'))
self.assertRaises(IOError, lambda: im.save(out, compression='group4'))
def test_fp_leak(self):
im = Image.open("Tests/images/hopper_g4_500.tif")
fn = im.fp.fileno()
os.fstat(fn)
im.load() # this should close it.
self.assertRaises(OSError, lambda: os.fstat(fn))
im = None # this should force even more closed.
self.assertRaises(OSError, lambda: os.fstat(fn))
self.assertRaises(OSError, lambda: os.close(fn))
def test_multipage(self):
# issue #862
TiffImagePlugin.READ_LIBTIFF = True
im = Image.open('Tests/images/multipage.tiff')
# file is a multipage tiff, 10x10 green, 10x10 red, 20x20 blue
im.seek(0)
self.assertEqual(im.size, (10, 10))
self.assertEqual(im.convert('RGB').getpixel((0, 0)), (0, 128, 0))
self.assertTrue(im.tag.next)
im.seek(1)
self.assertEqual(im.size, (10, 10))
self.assertEqual(im.convert('RGB').getpixel((0, 0)), (255, 0, 0))
self.assertTrue(im.tag.next)
im.seek(2)
self.assertFalse(im.tag.next)
self.assertEqual(im.size, (20, 20))
self.assertEqual(im.convert('RGB').getpixel((0, 0)), (0, 0, 255))
TiffImagePlugin.READ_LIBTIFF = False
def test__next(self):
TiffImagePlugin.READ_LIBTIFF = True
im = Image.open('Tests/images/hopper.tif')
self.assertFalse(im.tag.next)
im.load()
self.assertFalse(im.tag.next)
def test_4bit(self):
# Arrange
test_file = "Tests/images/hopper_gray_4bpp.tif"
original = hopper("L")
# Act
TiffImagePlugin.READ_LIBTIFF = True
im = Image.open(test_file)
TiffImagePlugin.READ_LIBTIFF = False
# Assert
self.assertEqual(im.size, (128, 128))
self.assertEqual(im.mode, "L")
self.assert_image_similar(im, original, 7.3)
def test_save_bytesio(self):
# PR 1011
# Test TIFF saving to io.BytesIO() object.
TiffImagePlugin.WRITE_LIBTIFF = True
TiffImagePlugin.READ_LIBTIFF = True
# Generate test image
pilim = hopper()
def save_bytesio(compression=None):
buffer_io = io.BytesIO()
pilim.save(buffer_io, format="tiff", compression=compression)
buffer_io.seek(0)
pilim_load = Image.open(buffer_io)
self.assert_image_similar(pilim, pilim_load, 0)
# save_bytesio()
save_bytesio('raw')
save_bytesio("packbits")
save_bytesio("tiff_lzw")
TiffImagePlugin.WRITE_LIBTIFF = False
TiffImagePlugin.READ_LIBTIFF = False
if __name__ == '__main__':
unittest.main()
# End of file
| [
"PIL.ImageFilter.GaussianBlur",
"io.BytesIO",
"helper.unittest.main",
"PIL.Image.open",
"os.close",
"os.fstat",
"helper.hopper"
] | [((12614, 12629), 'helper.unittest.main', 'unittest.main', ([], {}), '()\n', (12627, 12629), False, 'from helper import unittest, PillowTestCase, hopper, py3\n'), ((1050, 1066), 'PIL.Image.open', 'Image.open', (['file'], {}), '(file)\n', (1060, 1066), False, 'from PIL import Image, TiffImagePlugin\n'), ((1231, 1247), 'PIL.Image.open', 'Image.open', (['file'], {}), '(file)\n', (1241, 1247), False, 'from PIL import Image, TiffImagePlugin\n'), ((1708, 1720), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (1718, 1720), False, 'import io\n'), ((1822, 1835), 'PIL.Image.open', 'Image.open', (['s'], {}), '(s)\n', (1832, 1835), False, 'from PIL import Image, TiffImagePlugin\n'), ((2035, 2079), 'PIL.Image.open', 'Image.open', (['"""Tests/images/hopper_bw_500.png"""'], {}), "('Tests/images/hopper_bw_500.png')\n", (2045, 2079), False, 'from PIL import Image, TiffImagePlugin\n'), ((2093, 2137), 'PIL.Image.open', 'Image.open', (['"""Tests/images/hopper_g4_500.tif"""'], {}), "('Tests/images/hopper_g4_500.tif')\n", (2103, 2137), False, 'from PIL import Image, TiffImagePlugin\n'), ((2372, 2420), 'PIL.Image.open', 'Image.open', (['"""Tests/images/g4-fillorder-test.png"""'], {}), "('Tests/images/g4-fillorder-test.png')\n", (2382, 2420), False, 'from PIL import Image, TiffImagePlugin\n'), ((2434, 2482), 'PIL.Image.open', 'Image.open', (['"""Tests/images/g4-fillorder-test.tif"""'], {}), "('Tests/images/g4-fillorder-test.tif')\n", (2444, 2482), False, 'from PIL import Image, TiffImagePlugin\n'), ((2698, 2714), 'PIL.Image.open', 'Image.open', (['file'], {}), '(file)\n', (2708, 2714), False, 'from PIL import Image, TiffImagePlugin\n'), ((2889, 2904), 'PIL.Image.open', 'Image.open', (['out'], {}), '(out)\n', (2899, 2904), False, 'from PIL import Image, TiffImagePlugin\n'), ((3347, 3363), 'PIL.Image.open', 'Image.open', (['file'], {}), '(file)\n', (3357, 3363), False, 'from PIL import Image, TiffImagePlugin\n'), ((3673, 3713), 'PIL.Image.open', 'Image.open', (['"""Tests/images/hopper_g4.tif"""'], {}), "('Tests/images/hopper_g4.tif')\n", (3683, 3713), False, 'from PIL import Image, TiffImagePlugin\n'), ((3810, 3823), 'PIL.Image.open', 'Image.open', (['f'], {}), '(f)\n', (3820, 3823), False, 'from PIL import Image, TiffImagePlugin\n'), ((5112, 5156), 'PIL.Image.open', 'Image.open', (['"""Tests/images/hopper_g4_500.tif"""'], {}), "('Tests/images/hopper_g4_500.tif')\n", (5122, 5156), False, 'from PIL import Image, TiffImagePlugin\n'), ((5257, 5272), 'PIL.Image.open', 'Image.open', (['out'], {}), '(out)\n', (5267, 5272), False, 'from PIL import Image, TiffImagePlugin\n'), ((5427, 5471), 'PIL.Image.open', 'Image.open', (['"""Tests/images/16bit.deflate.tif"""'], {}), "('Tests/images/16bit.deflate.tif')\n", (5437, 5471), False, 'from PIL import Image, TiffImagePlugin\n'), ((5974, 5989), 'PIL.Image.open', 'Image.open', (['out'], {}), '(out)\n', (5984, 5989), False, 'from PIL import Image, TiffImagePlugin\n'), ((6297, 6344), 'PIL.Image.open', 'Image.open', (['"""Tests/images/16bit.MM.deflate.tif"""'], {}), "('Tests/images/16bit.MM.deflate.tif')\n", (6307, 6344), False, 'from PIL import Image, TiffImagePlugin\n'), ((6817, 6832), 'PIL.Image.open', 'Image.open', (['out'], {}), '(out)\n', (6827, 6832), False, 'from PIL import Image, TiffImagePlugin\n'), ((7115, 7131), 'PIL.Image.open', 'Image.open', (['file'], {}), '(file)\n', (7125, 7131), False, 'from PIL import Image, TiffImagePlugin\n'), ((7250, 7265), 'PIL.Image.open', 'Image.open', (['out'], {}), '(out)\n', (7260, 7265), False, 'from PIL import Image, TiffImagePlugin\n'), ((7539, 7583), 'PIL.Image.open', 'Image.open', (['"""Tests/images/12bit.cropped.tif"""'], {}), "('Tests/images/12bit.cropped.tif')\n", (7549, 7583), False, 'from PIL import Image, TiffImagePlugin\n'), ((7958, 7998), 'PIL.Image.open', 'Image.open', (['"""Tests/images/12in16bit.tif"""'], {}), "('Tests/images/12in16bit.tif')\n", (7968, 7998), False, 'from PIL import Image, TiffImagePlugin\n'), ((8521, 8560), 'PIL.Image.open', 'Image.open', (['"""Tests/images/pport_g4.tif"""'], {}), "('Tests/images/pport_g4.tif')\n", (8531, 8560), False, 'from PIL import Image, TiffImagePlugin\n'), ((8713, 8728), 'PIL.Image.open', 'Image.open', (['out'], {}), '(out)\n', (8723, 8728), False, 'from PIL import Image, TiffImagePlugin\n'), ((8837, 8850), 'helper.hopper', 'hopper', (['"""RGB"""'], {}), "('RGB')\n", (8843, 8850), False, 'from helper import unittest, PillowTestCase, hopper, py3\n'), ((9130, 9145), 'PIL.Image.open', 'Image.open', (['out'], {}), '(out)\n', (9140, 9145), False, 'from PIL import Image, TiffImagePlugin\n'), ((9237, 9251), 'helper.hopper', 'hopper', (['"""CMYK"""'], {}), "('CMYK')\n", (9243, 9251), False, 'from helper import unittest, PillowTestCase, hopper, py3\n'), ((9362, 9377), 'PIL.Image.open', 'Image.open', (['out'], {}), '(out)\n', (9372, 9377), False, 'from PIL import Image, TiffImagePlugin\n'), ((9671, 9684), 'helper.hopper', 'hopper', (['"""RGB"""'], {}), "('RGB')\n", (9677, 9684), False, 'from helper import unittest, PillowTestCase, hopper, py3\n'), ((10022, 10066), 'PIL.Image.open', 'Image.open', (['"""Tests/images/hopper_g4_500.tif"""'], {}), "('Tests/images/hopper_g4_500.tif')\n", (10032, 10066), False, 'from PIL import Image, TiffImagePlugin\n'), ((10104, 10116), 'os.fstat', 'os.fstat', (['fn'], {}), '(fn)\n', (10112, 10116), False, 'import os\n'), ((10497, 10538), 'PIL.Image.open', 'Image.open', (['"""Tests/images/multipage.tiff"""'], {}), "('Tests/images/multipage.tiff')\n", (10507, 10538), False, 'from PIL import Image, TiffImagePlugin\n'), ((11267, 11304), 'PIL.Image.open', 'Image.open', (['"""Tests/images/hopper.tif"""'], {}), "('Tests/images/hopper.tif')\n", (11277, 11304), False, 'from PIL import Image, TiffImagePlugin\n'), ((11518, 11529), 'helper.hopper', 'hopper', (['"""L"""'], {}), "('L')\n", (11524, 11529), False, 'from helper import unittest, PillowTestCase, hopper, py3\n'), ((11602, 11623), 'PIL.Image.open', 'Image.open', (['test_file'], {}), '(test_file)\n', (11612, 11623), False, 'from PIL import Image, TiffImagePlugin\n'), ((12065, 12073), 'helper.hopper', 'hopper', ([], {}), '()\n', (12071, 12073), False, 'from helper import unittest, PillowTestCase, hopper, py3\n'), ((1458, 1471), 'PIL.Image.open', 'Image.open', (['f'], {}), '(f)\n', (1468, 1471), False, 'from PIL import Image, TiffImagePlugin\n'), ((8614, 8641), 'PIL.ImageFilter.GaussianBlur', 'ImageFilter.GaussianBlur', (['(4)'], {}), '(4)\n', (8638, 8641), False, 'from PIL import ImageFilter\n'), ((9013, 9028), 'PIL.Image.open', 'Image.open', (['out'], {}), '(out)\n', (9023, 9028), False, 'from PIL import Image, TiffImagePlugin\n'), ((12144, 12156), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (12154, 12156), False, 'import io\n'), ((12287, 12308), 'PIL.Image.open', 'Image.open', (['buffer_io'], {}), '(buffer_io)\n', (12297, 12308), False, 'from PIL import Image, TiffImagePlugin\n'), ((10203, 10215), 'os.fstat', 'os.fstat', (['fn'], {}), '(fn)\n', (10211, 10215), False, 'import os\n'), ((10317, 10329), 'os.fstat', 'os.fstat', (['fn'], {}), '(fn)\n', (10325, 10329), False, 'import os\n'), ((10374, 10386), 'os.close', 'os.close', (['fn'], {}), '(fn)\n', (10382, 10386), False, 'import os\n')] |
from stdiomask import getpass
from cowsay import daemon, ghostbusters, kitty
from check_validation_ID_Post import check_validation
from driver_chrome import *
from DataScrapting import *
tracking_ID = getpass("Enter Your Post Id(24 digit): ")
check_validation(tracking_ID)
URL = f"https://tracking.post.ir/?id={tracking_ID}&client=app"
driver = driverChomre(URL)
page_source = PageSource(driver)
soup = mining(page_source)
warning = soup.warning()
security = soup.security()
if warning == None and security == None :
dst_lst = soup.FindAll()
new_lst = [(i.text, j.text) for i,j in zip(
[i for i in dst_lst if not dst_lst.index(i)%2],
[i for i in dst_lst if dst_lst.index(i)%2]
)
]
new_lst.reverse()
print("\n*******************************************************************")
for i,dst in enumerate(new_lst):
print(f"\t\t\t{i+1}\n")
print(f"{dst[0]}\n")
print(f"{dst[1]}")
print("========================================================================")
elif warning != None :
ghostbusters(f"\n {warning.text}")
else:
daemon("از سمت شما ترافیک بالایی سمت سرویس های ما ارسال می شود! لطفا چند دقیقه دیگر امتحان کنید.") | [
"cowsay.ghostbusters",
"stdiomask.getpass",
"check_validation_ID_Post.check_validation",
"cowsay.daemon"
] | [((202, 243), 'stdiomask.getpass', 'getpass', (['"""Enter Your Post Id(24 digit): """'], {}), "('Enter Your Post Id(24 digit): ')\n", (209, 243), False, 'from stdiomask import getpass\n'), ((244, 273), 'check_validation_ID_Post.check_validation', 'check_validation', (['tracking_ID'], {}), '(tracking_ID)\n', (260, 273), False, 'from check_validation_ID_Post import check_validation\n'), ((1156, 1190), 'cowsay.ghostbusters', 'ghostbusters', (['f"""\n {warning.text}"""'], {}), "(f'\\n {warning.text}')\n", (1168, 1190), False, 'from cowsay import daemon, ghostbusters, kitty\n'), ((1202, 1310), 'cowsay.daemon', 'daemon', (['"""از سمت شما ترافیک بالایی سمت سرویس های ما ارسال می شود! لطفا چند دقیقه دیگر امتحان کنید."""'], {}), "(\n 'از سمت شما ترافیک بالایی سمت سرویس های ما ارسال می شود! لطفا چند دقیقه دیگر امتحان کنید.'\n )\n", (1208, 1310), False, 'from cowsay import daemon, ghostbusters, kitty\n')] |
from time import sleep
from pitop import TiltRollHeadController
# Create a head controller object
head = TiltRollHeadController()
# Initialize the servo angles
head.roll.target_angle = 0
head.tilt.target_angle = 50
sleep(1)
# Nod 6 times at max speed 5 degrees either side of current angle. Blocks program execution until finished.
head.nod(times=6, angle=5, speed=100, block=True)
# Shake 4 times at half speed 10 degrees either side of current angle. Blocks program execution until finished.
head.shake(times=4, angle=10, speed=50, block=True)
# Shake and nod at the same time with default speed and angle
# Setting nod with block=False ensures the program continues to the next command
head.nod(times=6, block=False)
head.shake(times=6, block=True)
| [
"pitop.TiltRollHeadController",
"time.sleep"
] | [((107, 131), 'pitop.TiltRollHeadController', 'TiltRollHeadController', ([], {}), '()\n', (129, 131), False, 'from pitop import TiltRollHeadController\n'), ((218, 226), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (223, 226), False, 'from time import sleep\n')] |
import sys
sys.path.insert(0, '../models')
from get_data import GetData
# from python.ultilities.get_data import GetData
import unittest
import csv
class TestGetData(unittest.TestCase):
def test_getAllFeatures1(self):
getData = GetData()
features = getData.getAllFeatures()
self.assertIsNotNone(features)
def test_getAllFeatures2(self):
getData = GetData(101)
features = getData.getAllFeatures()
self.assertIsNotNone(features)
self.assertEqual(len(features), 100)
def test_getAllFeatures3(self):
getData = GetData(5)
features = getData.getAllFeatures('open', 'close')
self.assertIsNotNone(features)
self.assertEqual(len(features[0][0]), 2)
if __name__ == '__main__':
unittest.main() | [
"unittest.main",
"sys.path.insert",
"get_data.GetData"
] | [((11, 42), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../models"""'], {}), "(0, '../models')\n", (26, 42), False, 'import sys\n'), ((779, 794), 'unittest.main', 'unittest.main', ([], {}), '()\n', (792, 794), False, 'import unittest\n'), ((241, 250), 'get_data.GetData', 'GetData', ([], {}), '()\n', (248, 250), False, 'from get_data import GetData\n'), ((389, 401), 'get_data.GetData', 'GetData', (['(101)'], {}), '(101)\n', (396, 401), False, 'from get_data import GetData\n'), ((589, 599), 'get_data.GetData', 'GetData', (['(5)'], {}), '(5)\n', (596, 599), False, 'from get_data import GetData\n')] |
# pylint: disable=missing-docstring
from __future__ import print_function
from mock import patch
from gitflow_easyrelease import cli
@patch('gitflow_easyrelease.cli_file.ColorOutput')
@patch('gitflow_easyrelease.cli_file.Subcommand')
@patch('gitflow_easyrelease.cli_file.Application')
def test_execution(mock_app, mock_sub, mock_color):
mock_color.assert_not_called()
mock_sub.assert_not_called()
mock_app.assert_not_called()
cli()
mock_color.assert_called_once()
assert 1 <= mock_sub.call_count
mock_app.assert_called_once()
| [
"mock.patch",
"gitflow_easyrelease.cli"
] | [((140, 189), 'mock.patch', 'patch', (['"""gitflow_easyrelease.cli_file.ColorOutput"""'], {}), "('gitflow_easyrelease.cli_file.ColorOutput')\n", (145, 189), False, 'from mock import patch\n'), ((191, 239), 'mock.patch', 'patch', (['"""gitflow_easyrelease.cli_file.Subcommand"""'], {}), "('gitflow_easyrelease.cli_file.Subcommand')\n", (196, 239), False, 'from mock import patch\n'), ((241, 290), 'mock.patch', 'patch', (['"""gitflow_easyrelease.cli_file.Application"""'], {}), "('gitflow_easyrelease.cli_file.Application')\n", (246, 290), False, 'from mock import patch\n'), ((448, 453), 'gitflow_easyrelease.cli', 'cli', ([], {}), '()\n', (451, 453), False, 'from gitflow_easyrelease import cli\n')] |
import os
from flask import Flask, render_template, request, json
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/save', methods=['POST'])
def save():
with open('data.json', 'w+') as f:
f.write(json.dumps(request.get_json()))
return ''
@app.route('/load')
def load():
result = '{ "markers": [] }'
if os.path.isfile('data.json'):
with open('data.json', 'r') as f:
result = f.read()
return json.jsonify(result)
if __name__ == '__main__':
app.run()
| [
"flask.Flask",
"flask.json.jsonify",
"os.path.isfile",
"flask.render_template",
"flask.request.get_json"
] | [((74, 89), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (79, 89), False, 'from flask import Flask, render_template, request, json\n'), ((128, 157), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (143, 157), False, 'from flask import Flask, render_template, request, json\n'), ((367, 394), 'os.path.isfile', 'os.path.isfile', (['"""data.json"""'], {}), "('data.json')\n", (381, 394), False, 'import os\n'), ((462, 482), 'flask.json.jsonify', 'json.jsonify', (['result'], {}), '(result)\n', (474, 482), False, 'from flask import Flask, render_template, request, json\n'), ((266, 284), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (282, 284), False, 'from flask import Flask, render_template, request, json\n')] |
import pytest
from align.schema.types import BaseModel, Optional, List, Dict
from align.schema.visitor import Visitor, Transformer, cache
@pytest.fixture
def dummy():
class DummyModel(BaseModel):
arg1: str
arg2: Optional[str]
arg3: List[str]
arg4: List[Optional[str]]
arg5: Dict[str, str]
arg6: Dict[str, Optional[str]]
arg7: "Optional[DummyModel]"
arg8: "Optional[List[DummyModel]]"
DummyModel.update_forward_refs()
base = DummyModel(
arg1 = 'arg1',
arg3 = ['arg3_1', 'arg3_2'],
arg4 = [],
arg5 = {'arg5_k': 'arg5_v'},
arg6 = {'arg6_k': None}
)
dummy = DummyModel(
arg1 = 'arg1',
arg3 = ['arg3_1', 'arg3_2'],
arg4 = [],
arg5 = {'arg5_k': 'arg5_v'},
arg6 = {'arg6_k': None},
arg7 = base,
arg8 = [base, base]
)
return dummy
def test_visitor_no_output(dummy):
assert Visitor().visit(dummy) == []
def test_visitor_raw_output(dummy):
class StrValVisitor(Visitor):
def visit_str(self, node):
return node
assert StrValVisitor().visit(dummy) == [
'arg1',
'arg3_1',
'arg3_2',
'arg5_v',
'arg1',
'arg3_1',
'arg3_2',
'arg5_v',
'arg1',
'arg3_1',
'arg3_2',
'arg5_v',
'arg1',
'arg3_1',
'arg3_2',
'arg5_v',
]
def test_visitor_processed_output(dummy):
class DummyCounter(Visitor):
'''Simply counts the number of times the dummy class is encountered'''
def visit_DummyModel(self, node):
return sum(self.generic_visit(node)) + 1
assert DummyCounter().visit(dummy) == 4
def test_transformer_no_visitor(dummy):
assert Transformer().visit(dummy.arg1) is dummy.arg1
assert Transformer().visit(dummy.arg2) is dummy.arg2
assert Transformer().visit(dummy.arg3) is dummy.arg3
assert Transformer().visit(dummy.arg4) is dummy.arg4
assert Transformer().visit(dummy.arg5) is dummy.arg5
assert Transformer().visit(dummy.arg6) is dummy.arg6
assert Transformer().visit(dummy.arg7) is dummy.arg7
assert Transformer().visit(dummy.arg8) is dummy.arg8
assert Transformer().visit(dummy) is dummy
def test_transformer_string_visitor(dummy):
class AddStringPrefix(Transformer):
def visit_str(self, node):
return 'prefix_' + node
transformed = AddStringPrefix().visit(dummy)
assert isinstance(transformed, dummy.__class__)
# String in subtree
assert transformed.arg1 == 'prefix_arg1'
assert transformed.arg1 is not dummy.arg1
# No string in subtree
assert transformed.arg2 == None
assert transformed.arg2 is dummy.arg2
# String in subtree
assert transformed.arg3 == ['prefix_arg3_1', 'prefix_arg3_2']
assert transformed.arg3 is not dummy.arg3
# No string in subtree
assert transformed.arg4 == []
assert transformed.arg4 is dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})'
# String in subtree
assert transformed.arg5 == {'arg5_k': 'prefix_arg5_v'}
assert transformed.arg5 is not dummy.arg5
# No string in subtree
assert transformed.arg6 == {'arg6_k': None}
assert transformed.arg6 is dummy.arg6
# Expected result for arg7 and arg8
basedict = {'arg1': 'prefix_arg1',
'arg2': None,
'arg3': ['prefix_arg3_1',
'prefix_arg3_2'],
'arg4': [],
'arg5': {'arg5_k': 'prefix_arg5_v'},
'arg6': {'arg6_k': None},
'arg7': None,
'arg8': None}
# String in subtree
assert transformed.arg7 == basedict
assert transformed.arg7 is not dummy.arg7
# String in subtree
assert transformed.arg8 == [basedict, basedict]
assert transformed.arg8 is not dummy.arg8
# Ensure cache is working for generic_visitor
assert transformed.arg7 is transformed.arg8[0]
assert transformed.arg8[0] is transformed.arg8[1]
def test_cache(dummy):
class UncachedTransformer(Transformer):
def visit_DummyModel(self, node):
if not hasattr(self, 'top'):
self.top = node
return self.generic_visit(node)
else:
return node.copy()
control = UncachedTransformer().visit(dummy)
assert control.arg7 is not control.arg8[0]
assert control.arg8[0] is not control.arg8[1]
class CachedTransformer(Transformer):
@cache # DO THIS FOR MOST VISITORS
def visit_DummyModel(self, node):
if not hasattr(self, 'top'):
self.top = node
return self.generic_visit(node)
else:
return node.copy()
transformed = CachedTransformer().visit(dummy)
assert transformed.arg7 is transformed.arg8[0]
assert transformed.arg8[0] is transformed.arg8[1]
| [
"align.schema.visitor.Visitor",
"align.schema.visitor.Transformer"
] | [((961, 970), 'align.schema.visitor.Visitor', 'Visitor', ([], {}), '()\n', (968, 970), False, 'from align.schema.visitor import Visitor, Transformer, cache\n'), ((1801, 1814), 'align.schema.visitor.Transformer', 'Transformer', ([], {}), '()\n', (1812, 1814), False, 'from align.schema.visitor import Visitor, Transformer, cache\n'), ((1858, 1871), 'align.schema.visitor.Transformer', 'Transformer', ([], {}), '()\n', (1869, 1871), False, 'from align.schema.visitor import Visitor, Transformer, cache\n'), ((1915, 1928), 'align.schema.visitor.Transformer', 'Transformer', ([], {}), '()\n', (1926, 1928), False, 'from align.schema.visitor import Visitor, Transformer, cache\n'), ((1972, 1985), 'align.schema.visitor.Transformer', 'Transformer', ([], {}), '()\n', (1983, 1985), False, 'from align.schema.visitor import Visitor, Transformer, cache\n'), ((2029, 2042), 'align.schema.visitor.Transformer', 'Transformer', ([], {}), '()\n', (2040, 2042), False, 'from align.schema.visitor import Visitor, Transformer, cache\n'), ((2086, 2099), 'align.schema.visitor.Transformer', 'Transformer', ([], {}), '()\n', (2097, 2099), False, 'from align.schema.visitor import Visitor, Transformer, cache\n'), ((2143, 2156), 'align.schema.visitor.Transformer', 'Transformer', ([], {}), '()\n', (2154, 2156), False, 'from align.schema.visitor import Visitor, Transformer, cache\n'), ((2200, 2213), 'align.schema.visitor.Transformer', 'Transformer', ([], {}), '()\n', (2211, 2213), False, 'from align.schema.visitor import Visitor, Transformer, cache\n'), ((2257, 2270), 'align.schema.visitor.Transformer', 'Transformer', ([], {}), '()\n', (2268, 2270), False, 'from align.schema.visitor import Visitor, Transformer, cache\n')] |
# coding: utf-8
import sys
import logging
import settings
logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s')
logger = logging.getLogger()
fileHandler = logging.FileHandler('{0}'.format(settings.LOG_FILE_PATH))
fileHandler.setFormatter(logFormatter)
logger.addHandler(fileHandler)
consoleHandler = logging.StreamHandler(sys.stdout)
consoleHandler.setFormatter(logFormatter)
logger.addHandler(consoleHandler) | [
"logging.Formatter",
"logging.StreamHandler",
"logging.getLogger"
] | [((76, 141), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s [%(levelname)-5.5s] %(message)s"""'], {}), "('%(asctime)s [%(levelname)-5.5s] %(message)s')\n", (93, 141), False, 'import logging\n'), ((151, 170), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (168, 170), False, 'import logging\n'), ((332, 365), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (353, 365), False, 'import logging\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-01-31 11:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ciudadano', '0005_auto_20170127_1841'),
]
operations = [
migrations.RemoveField(
model_name='ciudadano',
name='uuid',
),
migrations.AlterField(
model_name='ciudadano',
name='numero_documento',
field=models.CharField(blank=True, max_length=11, null=True, unique=True,
verbose_name='Número de documento'),
),
]
| [
"django.db.migrations.RemoveField",
"django.db.models.CharField"
] | [((301, 360), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""ciudadano"""', 'name': '"""uuid"""'}), "(model_name='ciudadano', name='uuid')\n", (323, 360), False, 'from django.db import migrations, models\n'), ((519, 626), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(11)', 'null': '(True)', 'unique': '(True)', 'verbose_name': '"""Número de documento"""'}), "(blank=True, max_length=11, null=True, unique=True,\n verbose_name='Número de documento')\n", (535, 626), False, 'from django.db import migrations, models\n')] |
# -*- coding:utf-8 -*-
import json
from datetime import timedelta
from markdown2 import markdown
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.utils import timezone
from django.db.models import Max
from django.utils.timezone import now
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, redirect
from django.template.loader import render_to_string
from django.views.generic import DetailView, TemplateView, CreateView, View
from django.views.generic.edit import UpdateView
from django.db.models import Count
from blog.models import Post
from premises.utils import int_or_zero
from premises.models import Contention, Premise
from premises.forms import (ArgumentCreationForm, PremiseCreationForm,
PremiseEditForm, ReportForm)
from premises.signals import (added_premise_for_premise,
added_premise_for_contention, reported_as_fallacy,
supported_a_premise)
from premises.templatetags.premise_tags import check_content_deletion
from newsfeed.models import Entry
class ContentionDetailView(DetailView):
template_name = "premises/contention_detail.html"
model = Contention
def get_context_data(self, **kwargs):
contention = self.get_object()
view = ("list-view" if self.request.GET.get("view") == "list"
else "tree-view")
edit_mode = (
self.request.user.is_superuser or
self.request.user.is_staff or
contention.user == self.request.user)
return super(ContentionDetailView, self).get_context_data(
view=view,
path=contention.get_absolute_url(),
edit_mode=edit_mode,
**kwargs)
class ContentionJsonView(DetailView):
model = Contention
def render_to_response(self, context, **response_kwargs):
contention = self.get_object(self.get_queryset())
return HttpResponse(json.dumps({
"nodes": self.build_tree(contention, self.request.user),
}), content_type="application/json")
def build_tree(self, contention, user):
return {
"name": contention.title,
"parent": None,
"pk": contention.pk,
"owner": contention.owner,
"sources": contention.sources,
"is_singular": self.is_singular(contention),
"children": self.get_premises(contention, user)
}
def get_premises(self, contention, user, parent=None):
children = [{
"pk": premise.pk,
"name": premise.text,
"parent": parent.text if parent else None,
"reportable_by_authenticated_user": self.user_can_report(premise, user),
"report_count": premise.reports.count(),
"user": {
"id": premise.user.id,
"username": premise.user.username,
"absolute_url": reverse("auth_profile",
args=[premise.user.username])
},
"sources": premise.sources,
"premise_type": premise.premise_class(),
"children": (self.get_premises(contention, user, parent=premise)
if premise.published_children().exists() else [])
} for premise in contention.published_premises(parent)]
return children
def user_can_report(self, premise, user):
if user.is_authenticated() and user != premise.user:
return not premise.reported_by(user)
return False
def is_singular(self, contention):
result = (contention
.premises
.all()
.aggregate(max_sibling=Max('sibling_count')))
return result['max_sibling'] <= 1
class HomeView(TemplateView):
template_name = "index.html"
tab_class = "featured"
paginate_by = 20
def get_context_data(self, **kwargs):
contentions = self.get_contentions()
if self.request.user.is_authenticated():
notifications_qs = self.get_unread_notifications()
notifications = list(notifications_qs)
self.mark_as_read(notifications_qs)
else:
notifications = None
return super(HomeView, self).get_context_data(
next_page_url=self.get_next_page_url(),
tab_class=self.tab_class,
notifications=notifications,
has_next_page=self.has_next_page(),
announcements=self.get_announcements(),
contentions=contentions, **kwargs)
def get_announcements(self):
return Post.objects.filter(is_announcement=True)
def get_offset(self):
return int_or_zero(self.request.GET.get("offset"))
def get_limit(self):
return self.get_offset() + self.paginate_by
def has_next_page(self):
total = self.get_contentions(paginate=False).count()
return total > (self.get_offset() + self.paginate_by)
def get_next_page_url(self):
offset = self.get_offset() + self.paginate_by
return '?offset=%(offset)s' % {
"offset": offset
}
def get_unread_notifications(self):
return (self.request.user
.notifications
.filter(is_read=False)
[:5])
def mark_as_read(self, notifications):
pks = notifications.values_list("id", flat=True)
(self.request.user
.notifications
.filter(id__in=pks)
.update(is_read=True))
def get_contentions(self, paginate=True):
contentions = (Contention
.objects
.featured())
if paginate:
contentions = (contentions[self.get_offset(): self.get_limit()])
return contentions
class NotificationsView(HomeView):
template_name = "notifications.html"
def get_context_data(self, **kwargs):
notifications_qs = self.request.user.notifications.all()[:40]
notifications = list(notifications_qs)
self.mark_as_read(notifications_qs)
return super(HomeView, self).get_context_data(
notifications=notifications,
**kwargs)
class SearchView(HomeView):
tab_class = 'search'
def get_context_data(self, **kwargs):
return super(SearchView, self).get_context_data(
keywords=self.get_keywords(),
**kwargs
)
def get_keywords(self):
return self.request.GET.get('keywords') or ""
def get_next_page_url(self):
offset = self.get_offset() + self.paginate_by
return '?offset=%(offset)s&keywords=%(keywords)s' % {
"offset": offset,
"keywords": self.get_keywords()
}
def get_contentions(self, paginate=True):
keywords = self.request.GET.get('keywords')
if not keywords or len(keywords) < 2:
result = Contention.objects.none()
else:
result = (Contention
.objects
.filter(title__icontains=keywords))
if paginate:
result = result[self.get_offset():self.get_limit()]
return result
class NewsView(HomeView):
tab_class = "news"
def get_contentions(self, paginate=True):
contentions = Contention.objects.filter(
is_published=True)
if paginate:
contentions = contentions[self.get_offset():self.get_limit()]
return contentions
class UpdatedArgumentsView(HomeView):
tab_class = "updated"
def get_contentions(self, paginate=True):
contentions = (Contention
.objects
.filter(is_published=True)
.order_by('-date_modification'))
if paginate:
contentions = contentions[self.get_offset():self.get_limit()]
return contentions
class ControversialArgumentsView(HomeView):
tab_class = "controversial"
def get_contentions(self, paginate=True):
last_week = now() - timedelta(days=3)
contentions = (Contention
.objects
.annotate(num_children=Count('premises'))
.order_by('-num_children')
.filter(date_modification__gte=last_week))
if paginate:
return contentions[self.get_offset():self.get_limit()]
return contentions
class AboutView(TemplateView):
template_name = "about.html"
def get_context_data(self, **kwargs):
content = markdown(render_to_string("about.md"))
return super(AboutView, self).get_context_data(
content=content, **kwargs)
class TosView(TemplateView):
template_name = "tos.html"
def get_context_data(self, **kwargs):
content = markdown(render_to_string("tos.md"))
return super(TosView, self).get_context_data(
content=content, **kwargs)
class ArgumentCreationView(CreateView):
template_name = "premises/new_contention.html"
form_class = ArgumentCreationForm
def form_valid(self, form):
form.instance.user = self.request.user
form.instance.ip_address = self.request.META['REMOTE_ADDR']
response = super(ArgumentCreationView, self).form_valid(form)
form.instance.update_sibling_counts()
return response
class ArgumentUpdateView(UpdateView):
template_name = "premises/edit_contention.html"
form_class = ArgumentCreationForm
def get_queryset(self):
contentions = Contention.objects.all()
if self.request.user.is_superuser:
return contentions
return contentions.filter(user=self.request.user)
def form_valid(self, form):
form.instance.user = self.request.user
response = super(ArgumentUpdateView, self).form_valid(form)
form.instance.update_sibling_counts()
return response
class ArgumentPublishView(DetailView):
def get_queryset(self):
return Contention.objects.filter(user=self.request.user)
def post(self, request, slug):
contention = self.get_object()
if contention.premises.exists():
contention.is_published = True
contention.save()
messages.info(request, u"Argüman yayına alındı.")
else:
messages.info(request, u"Argümanı yayına almadan önce en az 1 "
u"önerme ekleyin.")
return redirect(contention)
class ArgumentUnpublishView(DetailView):
def get_queryset(self):
return Contention.objects.filter(user=self.request.user)
def post(self, request, slug):
contention = self.get_object()
contention.is_published = False
contention.save()
messages.info(request, u"Argüman yayından kaldırıldı.")
return redirect(contention)
class ArgumentDeleteView(DetailView):
def get_queryset(self):
return Contention.objects.filter(user=self.request.user)
def post(self, request, slug):
contention = self.get_object()
if check_content_deletion(contention):
# remove notification
Entry.objects.delete(contention.get_newsfeed_type(), contention.id)
contention.delete()
messages.info(request, u"Argümanınız silindi.")
return redirect("home")
else:
messages.info(request, u"Argümanınız silinecek durumda değil.")
return redirect(contention)
delete = post
class PremiseEditView(UpdateView):
template_name = "premises/edit_premise.html"
form_class = PremiseEditForm
def get_queryset(self):
premises = Premise.objects.all()
if self.request.user.is_superuser:
return premises
return premises.filter(user=self.request.user)
def form_valid(self, form):
response = super(PremiseEditView, self).form_valid(form)
form.instance.argument.update_sibling_counts()
return response
def get_context_data(self, **kwargs):
return super(PremiseEditView, self).get_context_data(
#contention=self.get_contention(),
**kwargs)
class PremiseCreationView(CreateView):
template_name = "premises/new_premise.html"
form_class = PremiseCreationForm
def get_context_data(self, **kwargs):
return super(PremiseCreationView, self).get_context_data(
contention=self.get_contention(),
parent=self.get_parent(),
**kwargs)
def form_valid(self, form):
contention = self.get_contention()
form.instance.user = self.request.user
form.instance.argument = contention
form.instance.parent = self.get_parent()
form.instance.is_approved = True
form.instance.ip_address = self.request.META['REMOTE_ADDR']
form.save()
contention.update_sibling_counts()
if form.instance.parent:
added_premise_for_premise.send(sender=self,
premise=form.instance)
else:
added_premise_for_contention.send(sender=self,
premise=form.instance)
contention.date_modification = timezone.now()
contention.save()
return redirect(contention)
def get_contention(self):
return get_object_or_404(Contention, slug=self.kwargs['slug'])
def get_parent(self):
parent_pk = self.kwargs.get("pk")
if parent_pk:
return get_object_or_404(Premise, pk=parent_pk)
class PremiseSupportView(View):
def get_premise(self):
premises = Premise.objects.exclude(user=self.request.user)
return get_object_or_404(premises, pk=self.kwargs['pk'])
def post(self, request, *args, **kwargs):
premise = self.get_premise()
premise.supporters.add(self.request.user)
supported_a_premise.send(sender=self, premise=premise,
user=self.request.user)
return redirect(self.get_contention())
def get_contention(self):
return get_object_or_404(Contention, slug=self.kwargs['slug'])
class PremiseUnsupportView(PremiseSupportView):
def delete(self, request, *args, **kwargs):
premise = self.get_premise()
premise.supporters.remove(self.request.user)
return redirect(self.get_contention())
post = delete
class PremiseDeleteView(View):
def get_premise(self):
if self.request.user.is_staff:
premises = Premise.objects.all()
else:
premises = Premise.objects.filter(user=self.request.user)
return get_object_or_404(premises,
pk=self.kwargs['pk'])
def delete(self, request, *args, **kwargs):
premise = self.get_premise()
premise.delete()
premise.update_sibling_counts()
contention = self.get_contention()
if not contention.premises.exists():
contention.is_published = False
contention.save()
return redirect(contention)
post = delete
def get_contention(self):
return get_object_or_404(Contention, slug=self.kwargs['slug'])
class ReportView(CreateView):
form_class = ReportForm
template_name = "premises/report.html"
def get_context_data(self, **kwargs):
return super(ReportView, self).get_context_data(
premise=self.get_premise(),
**kwargs)
def get_contention(self):
return get_object_or_404(Contention, slug=self.kwargs['slug'])
def get_premise(self):
return get_object_or_404(Premise, pk=self.kwargs['pk'])
def get_initial(self):
return {
'contention': self.get_contention(),
'premise': self.get_premise(),
'reporter': self.request.user
}
def form_valid(self, form):
contention = self.get_contention()
premise = self.get_premise()
form.instance.contention = contention
form.instance.premise = premise
form.instance.reporter = self.request.user
form.save()
reported_as_fallacy.send(sender=self, report=form.instance)
return redirect(contention)
| [
"django.core.urlresolvers.reverse",
"premises.models.Contention.objects.none",
"premises.signals.supported_a_premise.send",
"premises.models.Premise.objects.all",
"premises.signals.added_premise_for_contention.send",
"django.contrib.messages.info",
"premises.models.Premise.objects.exclude",
"django.db.models.Max",
"django.utils.timezone.now",
"premises.signals.reported_as_fallacy.send",
"premises.models.Premise.objects.filter",
"datetime.timedelta",
"premises.templatetags.premise_tags.check_content_deletion",
"premises.models.Contention.objects.featured",
"premises.signals.added_premise_for_premise.send",
"blog.models.Post.objects.filter",
"django.shortcuts.get_object_or_404",
"premises.models.Contention.objects.all",
"django.shortcuts.redirect",
"django.template.loader.render_to_string",
"premises.models.Contention.objects.filter",
"django.db.models.Count"
] | [((4685, 4726), 'blog.models.Post.objects.filter', 'Post.objects.filter', ([], {'is_announcement': '(True)'}), '(is_announcement=True)\n', (4704, 4726), False, 'from blog.models import Post\n'), ((5685, 5714), 'premises.models.Contention.objects.featured', 'Contention.objects.featured', ([], {}), '()\n', (5712, 5714), False, 'from premises.models import Contention, Premise\n'), ((7402, 7446), 'premises.models.Contention.objects.filter', 'Contention.objects.filter', ([], {'is_published': '(True)'}), '(is_published=True)\n', (7427, 7446), False, 'from premises.models import Contention, Premise\n'), ((9644, 9668), 'premises.models.Contention.objects.all', 'Contention.objects.all', ([], {}), '()\n', (9666, 9668), False, 'from premises.models import Contention, Premise\n'), ((10104, 10153), 'premises.models.Contention.objects.filter', 'Contention.objects.filter', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (10129, 10153), False, 'from premises.models import Contention, Premise\n'), ((10565, 10585), 'django.shortcuts.redirect', 'redirect', (['contention'], {}), '(contention)\n', (10573, 10585), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((10673, 10722), 'premises.models.Contention.objects.filter', 'Contention.objects.filter', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (10698, 10722), False, 'from premises.models import Contention, Premise\n'), ((10872, 10927), 'django.contrib.messages.info', 'messages.info', (['request', 'u"""Argüman yayından kaldırıldı."""'], {}), "(request, u'Argüman yayından kaldırıldı.')\n", (10885, 10927), False, 'from django.contrib import messages\n'), ((10943, 10963), 'django.shortcuts.redirect', 'redirect', (['contention'], {}), '(contention)\n', (10951, 10963), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((11048, 11097), 'premises.models.Contention.objects.filter', 'Contention.objects.filter', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (11073, 11097), False, 'from premises.models import Contention, Premise\n'), ((11184, 11218), 'premises.templatetags.premise_tags.check_content_deletion', 'check_content_deletion', (['contention'], {}), '(contention)\n', (11206, 11218), False, 'from premises.templatetags.premise_tags import check_content_deletion\n'), ((11778, 11799), 'premises.models.Premise.objects.all', 'Premise.objects.all', ([], {}), '()\n', (11797, 11799), False, 'from premises.models import Contention, Premise\n'), ((13344, 13358), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (13356, 13358), False, 'from django.utils import timezone\n'), ((13401, 13421), 'django.shortcuts.redirect', 'redirect', (['contention'], {}), '(contention)\n', (13409, 13421), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((13468, 13523), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Contention'], {'slug': "self.kwargs['slug']"}), "(Contention, slug=self.kwargs['slug'])\n", (13485, 13523), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((13754, 13801), 'premises.models.Premise.objects.exclude', 'Premise.objects.exclude', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (13777, 13801), False, 'from premises.models import Contention, Premise\n'), ((13817, 13866), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['premises'], {'pk': "self.kwargs['pk']"}), "(premises, pk=self.kwargs['pk'])\n", (13834, 13866), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((14009, 14087), 'premises.signals.supported_a_premise.send', 'supported_a_premise.send', ([], {'sender': 'self', 'premise': 'premise', 'user': 'self.request.user'}), '(sender=self, premise=premise, user=self.request.user)\n', (14033, 14087), False, 'from premises.signals import added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise\n'), ((14214, 14269), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Contention'], {'slug': "self.kwargs['slug']"}), "(Contention, slug=self.kwargs['slug'])\n", (14231, 14269), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((14767, 14816), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['premises'], {'pk': "self.kwargs['pk']"}), "(premises, pk=self.kwargs['pk'])\n", (14784, 14816), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((15178, 15198), 'django.shortcuts.redirect', 'redirect', (['contention'], {}), '(contention)\n', (15186, 15198), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((15264, 15319), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Contention'], {'slug': "self.kwargs['slug']"}), "(Contention, slug=self.kwargs['slug'])\n", (15281, 15319), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((15631, 15686), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Contention'], {'slug': "self.kwargs['slug']"}), "(Contention, slug=self.kwargs['slug'])\n", (15648, 15686), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((15730, 15778), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Premise'], {'pk': "self.kwargs['pk']"}), "(Premise, pk=self.kwargs['pk'])\n", (15747, 15778), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((16246, 16305), 'premises.signals.reported_as_fallacy.send', 'reported_as_fallacy.send', ([], {'sender': 'self', 'report': 'form.instance'}), '(sender=self, report=form.instance)\n', (16270, 16305), False, 'from premises.signals import added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise\n'), ((16321, 16341), 'django.shortcuts.redirect', 'redirect', (['contention'], {}), '(contention)\n', (16329, 16341), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((7003, 7028), 'premises.models.Contention.objects.none', 'Contention.objects.none', ([], {}), '()\n', (7026, 7028), False, 'from premises.models import Contention, Premise\n'), ((7065, 7117), 'premises.models.Contention.objects.filter', 'Contention.objects.filter', ([], {'title__icontains': 'keywords'}), '(title__icontains=keywords)\n', (7090, 7117), False, 'from premises.models import Contention, Premise\n'), ((8142, 8147), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (8145, 8147), False, 'from django.utils.timezone import now\n'), ((8150, 8167), 'datetime.timedelta', 'timedelta', ([], {'days': '(3)'}), '(days=3)\n', (8159, 8167), False, 'from datetime import timedelta\n'), ((8667, 8695), 'django.template.loader.render_to_string', 'render_to_string', (['"""about.md"""'], {}), "('about.md')\n", (8683, 8695), False, 'from django.template.loader import render_to_string\n'), ((8923, 8949), 'django.template.loader.render_to_string', 'render_to_string', (['"""tos.md"""'], {}), "('tos.md')\n", (8939, 8949), False, 'from django.template.loader import render_to_string\n'), ((10355, 10404), 'django.contrib.messages.info', 'messages.info', (['request', 'u"""Argüman yayına alındı."""'], {}), "(request, u'Argüman yayına alındı.')\n", (10368, 10404), False, 'from django.contrib import messages\n'), ((10431, 10510), 'django.contrib.messages.info', 'messages.info', (['request', 'u"""Argümanı yayına almadan önce en az 1 önerme ekleyin."""'], {}), "(request, u'Argümanı yayına almadan önce en az 1 önerme ekleyin.')\n", (10444, 10510), False, 'from django.contrib import messages\n'), ((11378, 11425), 'django.contrib.messages.info', 'messages.info', (['request', 'u"""Argümanınız silindi."""'], {}), "(request, u'Argümanınız silindi.')\n", (11391, 11425), False, 'from django.contrib import messages\n'), ((11445, 11461), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (11453, 11461), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((11488, 11551), 'django.contrib.messages.info', 'messages.info', (['request', 'u"""Argümanınız silinecek durumda değil."""'], {}), "(request, u'Argümanınız silinecek durumda değil.')\n", (11501, 11551), False, 'from django.contrib import messages\n'), ((11571, 11591), 'django.shortcuts.redirect', 'redirect', (['contention'], {}), '(contention)\n', (11579, 11591), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((13052, 13118), 'premises.signals.added_premise_for_premise.send', 'added_premise_for_premise.send', ([], {'sender': 'self', 'premise': 'form.instance'}), '(sender=self, premise=form.instance)\n', (13082, 13118), False, 'from premises.signals import added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise\n'), ((13188, 13257), 'premises.signals.added_premise_for_contention.send', 'added_premise_for_contention.send', ([], {'sender': 'self', 'premise': 'form.instance'}), '(sender=self, premise=form.instance)\n', (13221, 13257), False, 'from premises.signals import added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise\n'), ((13634, 13674), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Premise'], {'pk': 'parent_pk'}), '(Premise, pk=parent_pk)\n', (13651, 13674), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((14646, 14667), 'premises.models.Premise.objects.all', 'Premise.objects.all', ([], {}), '()\n', (14665, 14667), False, 'from premises.models import Contention, Premise\n'), ((14705, 14751), 'premises.models.Premise.objects.filter', 'Premise.objects.filter', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (14727, 14751), False, 'from premises.models import Contention, Premise\n'), ((3778, 3798), 'django.db.models.Max', 'Max', (['"""sibling_count"""'], {}), "('sibling_count')\n", (3781, 3798), False, 'from django.db.models import Max\n'), ((7721, 7765), 'premises.models.Contention.objects.filter', 'Contention.objects.filter', ([], {'is_published': '(True)'}), '(is_published=True)\n', (7746, 7765), False, 'from premises.models import Contention, Premise\n'), ((2991, 3044), 'django.core.urlresolvers.reverse', 'reverse', (['"""auth_profile"""'], {'args': '[premise.user.username]'}), "('auth_profile', args=[premise.user.username])\n", (2998, 3044), False, 'from django.core.urlresolvers import reverse\n'), ((8280, 8297), 'django.db.models.Count', 'Count', (['"""premises"""'], {}), "('premises')\n", (8285, 8297), False, 'from django.db.models import Count\n')] |
""" Implements the commands for viewing and manipulating the training manifest """
import json
import time
import os
from blaze.action import Policy
from blaze.logger import logger as log
from blaze.mahimahi.server import start_server
from . import command
@command.argument("replay_dir", help="The directory containing the save files captured by mahimahi")
@command.argument("--policy", help="The file path to a JSON-formatted push policy to serve")
@command.argument("--cert_path", help="Location of the server certificate")
@command.argument("--key_path", help="Location of the server key")
@command.argument(
"--cache_time", help="Do not cache objects which expire in less than this time (in seconds)", type=int, default=None
)
@command.argument(
"--extract_critical_requests",
help="true or false to specify if server should inject critical request extractor",
action="store_true",
)
@command.command
def replay(args):
"""
Starts a replay environment for the given replay directory, including setting up interfaces, running
a DNS server, and configuring and running an nginx server to serve the requests
"""
policy = None
cert_path = os.path.abspath(args.cert_path) if args.cert_path else None
key_path = os.path.abspath(args.key_path) if args.key_path else None
if args.policy:
log.debug("reading policy", push_policy=args.policy)
with open(args.policy, "r") as policy_file:
policy_dict = json.load(policy_file)
policy = Policy.from_dict(policy_dict)
with start_server(
args.replay_dir,
cert_path,
key_path,
policy,
cache_time=args.cache_time,
extract_critical_requests=args.extract_critical_requests,
):
while True:
time.sleep(86400)
| [
"os.path.abspath",
"json.load",
"blaze.mahimahi.server.start_server",
"blaze.logger.logger.debug",
"time.sleep",
"blaze.action.Policy.from_dict"
] | [((1183, 1214), 'os.path.abspath', 'os.path.abspath', (['args.cert_path'], {}), '(args.cert_path)\n', (1198, 1214), False, 'import os\n'), ((1258, 1288), 'os.path.abspath', 'os.path.abspath', (['args.key_path'], {}), '(args.key_path)\n', (1273, 1288), False, 'import os\n'), ((1345, 1397), 'blaze.logger.logger.debug', 'log.debug', (['"""reading policy"""'], {'push_policy': 'args.policy'}), "('reading policy', push_policy=args.policy)\n", (1354, 1397), True, 'from blaze.logger import logger as log\n'), ((1516, 1545), 'blaze.action.Policy.from_dict', 'Policy.from_dict', (['policy_dict'], {}), '(policy_dict)\n', (1532, 1545), False, 'from blaze.action import Policy\n'), ((1556, 1705), 'blaze.mahimahi.server.start_server', 'start_server', (['args.replay_dir', 'cert_path', 'key_path', 'policy'], {'cache_time': 'args.cache_time', 'extract_critical_requests': 'args.extract_critical_requests'}), '(args.replay_dir, cert_path, key_path, policy, cache_time=args.\n cache_time, extract_critical_requests=args.extract_critical_requests)\n', (1568, 1705), False, 'from blaze.mahimahi.server import start_server\n'), ((1476, 1498), 'json.load', 'json.load', (['policy_file'], {}), '(policy_file)\n', (1485, 1498), False, 'import json\n'), ((1789, 1806), 'time.sleep', 'time.sleep', (['(86400)'], {}), '(86400)\n', (1799, 1806), False, 'import time\n')] |
import os
import aiofiles
from pydub import AudioSegment
from amocrm_asterisk_ng.domain import File
from amocrm_asterisk_ng.domain import Filetype
from ..core import IFileConverter
from ...CallRecordsConfig import CallRecordsConfig
__all__ = [
"PydubFileConverter",
]
class PydubFileConverter(IFileConverter):
__slots__ = (
"__config",
)
def __init__(
self,
config: CallRecordsConfig,
) -> None:
self.__config = config
async def __get_content_from_file(
self,
path: str
) -> bytes:
async with aiofiles.open(path, mode='rb') as f:
content = await f.read()
return content
async def convert(self, file: File, new_filetype: Filetype) -> File:
if file.type == new_filetype:
return file
if not os.path.exists(self.__config.tmp_directory):
try:
os.makedirs(self.__config.tmp_directory)
except OSError as exc:
raise Exception(
f"FileConverter: conversion directory error: `{exc!r}`."
)
filepath = os.path.join(self.__config.tmp_directory, file.name)
async with aiofiles.open(filepath, mode='wb') as f:
await f.write(file.content)
if file.type == Filetype.MP3:
audio = AudioSegment.from_mp3(filepath)
elif file.type == Filetype.WAV:
audio = AudioSegment.from_wav(filepath)
elif file.type == Filetype.WAVE:
audio = AudioSegment.from_WAVE(filepath)
else:
raise Exception(f"Non-convertible type: `{file.type}`.")
new_filepath = os.path.join(
self.__config.tmp_directory,
"converted_" + file.name,
)
if new_filetype == Filetype.MP3:
new_format = "mp3"
elif new_filetype == Filetype.WAV:
new_format = "wav"
elif new_filetype == Filetype.WAVE:
new_format = "wave"
else:
raise Exception(
f"Non-convertible type: `{new_filetype}`."
)
audio.export(
new_filepath,
format=new_format,
bitrate='16k'
)
content = await self.__get_content_from_file(new_filepath)
os.remove(filepath)
os.remove(new_filepath)
return File(
name=file.name,
type=new_filetype,
content=content,
)
| [
"os.remove",
"pydub.AudioSegment.from_mp3",
"os.makedirs",
"aiofiles.open",
"os.path.exists",
"pydub.AudioSegment.from_wav",
"pydub.AudioSegment.from_WAVE",
"amocrm_asterisk_ng.domain.File",
"os.path.join"
] | [((1139, 1191), 'os.path.join', 'os.path.join', (['self.__config.tmp_directory', 'file.name'], {}), '(self.__config.tmp_directory, file.name)\n', (1151, 1191), False, 'import os\n'), ((1676, 1743), 'os.path.join', 'os.path.join', (['self.__config.tmp_directory', "('converted_' + file.name)"], {}), "(self.__config.tmp_directory, 'converted_' + file.name)\n", (1688, 1743), False, 'import os\n'), ((2310, 2329), 'os.remove', 'os.remove', (['filepath'], {}), '(filepath)\n', (2319, 2329), False, 'import os\n'), ((2338, 2361), 'os.remove', 'os.remove', (['new_filepath'], {}), '(new_filepath)\n', (2347, 2361), False, 'import os\n'), ((2378, 2434), 'amocrm_asterisk_ng.domain.File', 'File', ([], {'name': 'file.name', 'type': 'new_filetype', 'content': 'content'}), '(name=file.name, type=new_filetype, content=content)\n', (2382, 2434), False, 'from amocrm_asterisk_ng.domain import File\n'), ((587, 617), 'aiofiles.open', 'aiofiles.open', (['path'], {'mode': '"""rb"""'}), "(path, mode='rb')\n", (600, 617), False, 'import aiofiles\n'), ((837, 880), 'os.path.exists', 'os.path.exists', (['self.__config.tmp_directory'], {}), '(self.__config.tmp_directory)\n', (851, 880), False, 'import os\n'), ((1211, 1245), 'aiofiles.open', 'aiofiles.open', (['filepath'], {'mode': '"""wb"""'}), "(filepath, mode='wb')\n", (1224, 1245), False, 'import aiofiles\n'), ((1351, 1382), 'pydub.AudioSegment.from_mp3', 'AudioSegment.from_mp3', (['filepath'], {}), '(filepath)\n', (1372, 1382), False, 'from pydub import AudioSegment\n'), ((915, 955), 'os.makedirs', 'os.makedirs', (['self.__config.tmp_directory'], {}), '(self.__config.tmp_directory)\n', (926, 955), False, 'import os\n'), ((1443, 1474), 'pydub.AudioSegment.from_wav', 'AudioSegment.from_wav', (['filepath'], {}), '(filepath)\n', (1464, 1474), False, 'from pydub import AudioSegment\n'), ((1536, 1568), 'pydub.AudioSegment.from_WAVE', 'AudioSegment.from_WAVE', (['filepath'], {}), '(filepath)\n', (1558, 1568), False, 'from pydub import AudioSegment\n')] |
import unittest
from typing import List
import numpy as np
from py_headless_daw.processing.stream.stream_gain import StreamGain
from py_headless_daw.schema.dto.time_interval import TimeInterval
from py_headless_daw.schema.events.event import Event
from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent
class StreamGainStrategyTest(unittest.TestCase):
def test_stream_gain_strategy(self):
strategy = StreamGain(np.float32(0.25))
interval = TimeInterval()
interval.start_in_bars = 0
interval.end_in_bars = 1
in_stream_buffer = np.ones(shape=(100,), dtype=np.float32)
out_stream_buffer = np.zeros(shape=(100,), dtype=np.float32)
setter_event: ParameterValueEvent = ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55)
input_event_buffer: List[Event] = [setter_event]
output_event_buffer: List[Event] = []
strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [input_event_buffer], [output_event_buffer])
# the first few samples are closer to the initial value
for x in range(0, 3):
self.assertTrue(0.24 < out_stream_buffer[x] < 0.26)
# while the last few are closer to the target one
for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]):
self.assertTrue(0.24 < out_stream_buffer[x] > 0.45)
strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [[]], [[]])
# now we render without any events in the input, the logic in the
# strategy is slightly different in this case
for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]):
self.assertTrue(out_stream_buffer[x] > 0.45)
| [
"py_headless_daw.schema.dto.time_interval.TimeInterval",
"py_headless_daw.schema.events.parameter_value_event.ParameterValueEvent",
"numpy.float32",
"numpy.zeros",
"numpy.ones"
] | [((495, 509), 'py_headless_daw.schema.dto.time_interval.TimeInterval', 'TimeInterval', ([], {}), '()\n', (507, 509), False, 'from py_headless_daw.schema.dto.time_interval import TimeInterval\n'), ((606, 645), 'numpy.ones', 'np.ones', ([], {'shape': '(100,)', 'dtype': 'np.float32'}), '(shape=(100,), dtype=np.float32)\n', (613, 645), True, 'import numpy as np\n'), ((674, 714), 'numpy.zeros', 'np.zeros', ([], {'shape': '(100,)', 'dtype': 'np.float32'}), '(shape=(100,), dtype=np.float32)\n', (682, 714), True, 'import numpy as np\n'), ((760, 815), 'py_headless_daw.schema.events.parameter_value_event.ParameterValueEvent', 'ParameterValueEvent', (['(0)', 'StreamGain.PARAMETER_GAIN', '(0.55)'], {}), '(0, StreamGain.PARAMETER_GAIN, 0.55)\n', (779, 815), False, 'from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent\n'), ((457, 473), 'numpy.float32', 'np.float32', (['(0.25)'], {}), '(0.25)\n', (467, 473), True, 'import numpy as np\n')] |
import six
import os
import yaml
import logging
import logging.config
from appdirs import AppDirs
from pkg_resources import resource_filename
def setup_logging(log_level):
log_config_file = os.path.join(resource_filename('ansibleroler', 'static'), 'config', 'logging.yml')
level = logging.getLevelName(log_level)
with open(log_config_file, 'rt') as f:
log_config = yaml.safe_load(f.read())
logging.config.dictConfig(log_config)
if level:
logging.getLogger("ansibleroler").setLevel(level)
return
def update_log_level(log_level):
level = logging.getLevelName(log_level)
if level:
logging.getLogger("ansibleroler").setLevel(level)
return
def normalize_path(path):
normalized = os.path.abspath(os.path.expanduser(path))
return normalized
def convert_bool(obj):
true_values = (True, 'True', 'true', 'yes', '1')
false_values = (False, 'False', 'false', 'no', '0')
if obj in true_values:
return True
elif obj in false_values:
return False
else:
if not isinstance(obj, six.text_type):
obj = six.text_type(obj, "utf-8")
return obj
class Settings(object):
def __init__(
self,
config_file=os.path.join(AppDirs("ansible-roler").user_config_dir, "config.ini"),
role_name=None,
base_path=os.getcwd(),
log_level='WARNING',
subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', 'main.yml.j2'),
root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', '.drone.yml.j2'),
exclude_subdirs=['templates', 'files', 'vars'],
enable_templating=False,
template_vars={}
):
self.config_file = config_file
self.role_name = role_name
self.base_path = base_path
self.log_level = log_level
self.subdir_template = subdir_template
self.root_template = root_template
self.exclude_subdirs = exclude_subdirs
self.enable_templating = enable_templating
self.template_vars = template_vars
| [
"appdirs.AppDirs",
"os.getcwd",
"pkg_resources.resource_filename",
"six.text_type",
"logging.getLevelName",
"logging.config.dictConfig",
"os.path.expanduser",
"logging.getLogger"
] | [((291, 322), 'logging.getLevelName', 'logging.getLevelName', (['log_level'], {}), '(log_level)\n', (311, 322), False, 'import logging\n'), ((416, 453), 'logging.config.dictConfig', 'logging.config.dictConfig', (['log_config'], {}), '(log_config)\n', (441, 453), False, 'import logging\n'), ((584, 615), 'logging.getLevelName', 'logging.getLevelName', (['log_level'], {}), '(log_level)\n', (604, 615), False, 'import logging\n'), ((209, 252), 'pkg_resources.resource_filename', 'resource_filename', (['"""ansibleroler"""', '"""static"""'], {}), "('ansibleroler', 'static')\n", (226, 252), False, 'from pkg_resources import resource_filename\n'), ((760, 784), 'os.path.expanduser', 'os.path.expanduser', (['path'], {}), '(path)\n', (778, 784), False, 'import os\n'), ((1354, 1365), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1363, 1365), False, 'import os\n'), ((1433, 1476), 'pkg_resources.resource_filename', 'resource_filename', (['"""ansibleroler"""', '"""static"""'], {}), "('ansibleroler', 'static')\n", (1450, 1476), False, 'from pkg_resources import resource_filename\n'), ((1542, 1585), 'pkg_resources.resource_filename', 'resource_filename', (['"""ansibleroler"""', '"""static"""'], {}), "('ansibleroler', 'static')\n", (1559, 1585), False, 'from pkg_resources import resource_filename\n'), ((476, 509), 'logging.getLogger', 'logging.getLogger', (['"""ansibleroler"""'], {}), "('ansibleroler')\n", (493, 509), False, 'import logging\n'), ((638, 671), 'logging.getLogger', 'logging.getLogger', (['"""ansibleroler"""'], {}), "('ansibleroler')\n", (655, 671), False, 'import logging\n'), ((1117, 1144), 'six.text_type', 'six.text_type', (['obj', '"""utf-8"""'], {}), "(obj, 'utf-8')\n", (1130, 1144), False, 'import six\n'), ((1255, 1279), 'appdirs.AppDirs', 'AppDirs', (['"""ansible-roler"""'], {}), "('ansible-roler')\n", (1262, 1279), False, 'from appdirs import AppDirs\n')] |
import os
os.system('apt install rustc')
os.environ['PATH'] += ':/root/.cargo/bin'
os.environ['USER'] = 'user'
| [
"os.system"
] | [((11, 41), 'os.system', 'os.system', (['"""apt install rustc"""'], {}), "('apt install rustc')\n", (20, 41), False, 'import os\n')] |
# User provided config file
import config
import requests
import attrdict
import logging
def attrdict_or_list(thing):
if type(thing) == dict:
return attrdict.AttrMap(thing)
elif type(thing) == list:
return thing
else:
assert False, "DON'T PANIC. Something that wasn't a list or dict."
class NotInHabiticaObject(Exception):
pass
class HabiticaObject(object):
"""Abstract class for custom HTTP requests commands for Habitica. """
def __init__(self, uuid, apikey, json=None, endpoint=None):
# json must be created with __dict__ to avoid referencing itself in __setattr__
# self.__dict__["json"] = attrdict.AttrMap()
self.__dict__["_uuid"] = uuid
self.__dict__["_apikey"] = apikey
self.__dict__["_habitica_api"] = config.HABITICA_URL+"/api/v2"
if json:
self.__dict__["json"] = attrdict.AttrMap(json)
elif endpoint:
self.__dict__["json"] = self._get_or_except(endpoint)
else:
self.__dict__["json"] = attrdict.AttrMap()
def __getstate__(self):
return self.__dict__
def __setstate__(self, d):
# Use the ordinary, plain, boring, normal setattr so that
# pickle doesn't freak out.
super(HabiticaObject, self).__setattr__("__dict__", d)
def _put_or_except(self, endpoint, json=None):
"""Return json from PUT request or raise an exception."""
if json:
r = requests.put(
self._habitica_api+endpoint,
headers={
'x-api-user':self._uuid,
'x-api-key':self._apikey
},
json=dict(json)
)
else:
r = requests.put(
self._habitica_api+endpoint,
headers={
'x-api-user':self._uuid,
'x-api-key':self._apikey
},
)
try:
r.raise_for_status()
except Exception as e:
print(r)
raise(e)
return attrdict_or_list(r.json())
def _get_or_except(self, endpoint):
"""Return json from GET request or raise an exception."""
r = requests.get(
self._habitica_api+endpoint,
headers={
'x-api-user':self._uuid,
'x-api-key':self._apikey
}
)
r.raise_for_status()
return attrdict_or_list(r.json())
def _post_or_except(self, endpoint, json={}, query={}):
"""Return json from POST request or raise an exception."""
r = requests.post(
self._habitica_api+endpoint,
headers={
'x-api-user':self._uuid,
'x-api-key':self._apikey
},
json=dict(json),
params=query
)
r.raise_for_status()
return attrdict_or_list(r.json())
def _delete_or_except(self, endpoint):
"""Return json from POST request or raise an exception."""
r = requests.delete(
self._habitica_api+endpoint,
headers={
'x-api-user':self._uuid,
'x-api-key':self._apikey
}
)
r.raise_for_status()
return attrdict_or_list(r.json())
def __str__(self):
return "HabiticaObject: \n"+str(self.__dict__)
| [
"requests.put",
"requests.delete",
"requests.get",
"attrdict.AttrMap"
] | [((164, 187), 'attrdict.AttrMap', 'attrdict.AttrMap', (['thing'], {}), '(thing)\n', (180, 187), False, 'import attrdict\n'), ((2237, 2348), 'requests.get', 'requests.get', (['(self._habitica_api + endpoint)'], {'headers': "{'x-api-user': self._uuid, 'x-api-key': self._apikey}"}), "(self._habitica_api + endpoint, headers={'x-api-user': self.\n _uuid, 'x-api-key': self._apikey})\n", (2249, 2348), False, 'import requests\n'), ((3066, 3180), 'requests.delete', 'requests.delete', (['(self._habitica_api + endpoint)'], {'headers': "{'x-api-user': self._uuid, 'x-api-key': self._apikey}"}), "(self._habitica_api + endpoint, headers={'x-api-user': self.\n _uuid, 'x-api-key': self._apikey})\n", (3081, 3180), False, 'import requests\n'), ((890, 912), 'attrdict.AttrMap', 'attrdict.AttrMap', (['json'], {}), '(json)\n', (906, 912), False, 'import attrdict\n'), ((1748, 1859), 'requests.put', 'requests.put', (['(self._habitica_api + endpoint)'], {'headers': "{'x-api-user': self._uuid, 'x-api-key': self._apikey}"}), "(self._habitica_api + endpoint, headers={'x-api-user': self.\n _uuid, 'x-api-key': self._apikey})\n", (1760, 1859), False, 'import requests\n'), ((1052, 1070), 'attrdict.AttrMap', 'attrdict.AttrMap', ([], {}), '()\n', (1068, 1070), False, 'import attrdict\n')] |
"""
part_finder.py
Look through two files and search for an internal part number that looks like `match_pattern`
"""
#!/usr/bin/env python3
import sys, regex, click
#first arg is a digikey csv cart
#second is a newline deliminated list of eoi partnumbers
match_pattern = "\w{3}-\w{4}-\w{2}"
@click.argument("--first", "-f", type=str, required=True, help="Design BOM to compare to. Should have the part number somewhere in the line")
@click.argument("--second", "-s", type=str, required=True, help="Main BOM to search. Typically the distributer BOM or a text schematic")
@click.command
def main(first, second):
regx = regex.compile(match_pattern)
with open(first, 'r') as f:
first_parts = [part.strip() for part in f.read().strip().split('\n')]
with open(second, 'r') as f:
st = f.read().strip()
second_parts = regx.findall(st)
nfirst = []
nsecond = []
for part in first_parts:
if part not in nfirst:
nfirst.append(part)
for part in second_parts:
if part not in parts:
nsecond.append(part)
print("Not in first: ", nfirst)
print("Not in second: ", nsecond)
if __name__ == "__main__":
main()
| [
"regex.compile",
"click.argument"
] | [((295, 445), 'click.argument', 'click.argument', (['"""--first"""', '"""-f"""'], {'type': 'str', 'required': '(True)', 'help': '"""Design BOM to compare to. Should have the part number somewhere in the line"""'}), "('--first', '-f', type=str, required=True, help=\n 'Design BOM to compare to. Should have the part number somewhere in the line'\n )\n", (309, 445), False, 'import sys, regex, click\n'), ((437, 577), 'click.argument', 'click.argument', (['"""--second"""', '"""-s"""'], {'type': 'str', 'required': '(True)', 'help': '"""Main BOM to search. Typically the distributer BOM or a text schematic"""'}), "('--second', '-s', type=str, required=True, help=\n 'Main BOM to search. Typically the distributer BOM or a text schematic')\n", (451, 577), False, 'import sys, regex, click\n'), ((624, 652), 'regex.compile', 'regex.compile', (['match_pattern'], {}), '(match_pattern)\n', (637, 652), False, 'import sys, regex, click\n')] |
import re
from typing import Iterator
from xps_convert.read.errors import ParseError
from xps_convert.read.xmp import Xmp
FIELD_RE = re.compile(r"([\w\s]+):\s(.*)")
def parse_xmp(filename: str, lines: Iterator[str]) -> Xmp:
xmp = Xmp()
# First line is always a comment, skip it
next(lines)
# Match each line and enumerate (line numbers are needed for errors)
for n, match in enumerate((FIELD_RE.match(line) for line in lines)):
if match is not None:
xmp.values[match.group(1)] = match.group(2)
else:
raise ParseError("unable to parse line", filename, n)
# Verify that required fields are present
for field in ("MHS File", "Device", "Package", "SpeedGrade"):
if field not in xmp.values:
raise ParseError(f"missing required field ‘{field}’", filename)
return xmp
| [
"xps_convert.read.xmp.Xmp",
"xps_convert.read.errors.ParseError",
"re.compile"
] | [((135, 168), 're.compile', 're.compile', (['"""([\\\\w\\\\s]+):\\\\s(.*)"""'], {}), "('([\\\\w\\\\s]+):\\\\s(.*)')\n", (145, 168), False, 'import re\n'), ((238, 243), 'xps_convert.read.xmp.Xmp', 'Xmp', ([], {}), '()\n', (241, 243), False, 'from xps_convert.read.xmp import Xmp\n'), ((570, 617), 'xps_convert.read.errors.ParseError', 'ParseError', (['"""unable to parse line"""', 'filename', 'n'], {}), "('unable to parse line', filename, n)\n", (580, 617), False, 'from xps_convert.read.errors import ParseError\n'), ((784, 841), 'xps_convert.read.errors.ParseError', 'ParseError', (['f"""missing required field ‘{field}’"""', 'filename'], {}), "(f'missing required field ‘{field}’', filename)\n", (794, 841), False, 'from xps_convert.read.errors import ParseError\n')] |
# Copyright 2015 Nicta
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/env python
from setuptools import setup
import os
package_name = 'bdkd-laser-data'
webdir = 'wsgi'
datafiles = [(os.path.join(package_name, root), [os.path.join(root, f) for f in files])
for root, dirs, files in os.walk(webdir)]
setup(
name=package_name,
version='0.1.0',
description='Access dataset data',
author='Sirca Ltd',
author_email='<EMAIL>',
url='http://github.com/sirca/bdkd',
package_dir={'': 'lib'},
packages=['bdkd.laser', 'bdkd.laser.util'],
data_files = datafiles,
scripts=[
'bin/pack_maps.py',
'bin/pack_raw.py',
],
entry_points = {
'console_scripts': [
'datastore-add-laser = bdkd.laser.util.add:add_laser_util',
],
},
install_requires=['boto', 'PyYAML', 'bdkd-datastore', 'h5py']
)
| [
"os.walk",
"os.path.join",
"setuptools.setup"
] | [((825, 1307), 'setuptools.setup', 'setup', ([], {'name': 'package_name', 'version': '"""0.1.0"""', 'description': '"""Access dataset data"""', 'author': '"""Sirca Ltd"""', 'author_email': '"""<EMAIL>"""', 'url': '"""http://github.com/sirca/bdkd"""', 'package_dir': "{'': 'lib'}", 'packages': "['bdkd.laser', 'bdkd.laser.util']", 'data_files': 'datafiles', 'scripts': "['bin/pack_maps.py', 'bin/pack_raw.py']", 'entry_points': "{'console_scripts': [\n 'datastore-add-laser = bdkd.laser.util.add:add_laser_util']}", 'install_requires': "['boto', 'PyYAML', 'bdkd-datastore', 'h5py']"}), "(name=package_name, version='0.1.0', description='Access dataset data',\n author='Sirca Ltd', author_email='<EMAIL>', url=\n 'http://github.com/sirca/bdkd', package_dir={'': 'lib'}, packages=[\n 'bdkd.laser', 'bdkd.laser.util'], data_files=datafiles, scripts=[\n 'bin/pack_maps.py', 'bin/pack_raw.py'], entry_points={'console_scripts':\n ['datastore-add-laser = bdkd.laser.util.add:add_laser_util']},\n install_requires=['boto', 'PyYAML', 'bdkd-datastore', 'h5py'])\n", (830, 1307), False, 'from setuptools import setup\n'), ((696, 728), 'os.path.join', 'os.path.join', (['package_name', 'root'], {}), '(package_name, root)\n', (708, 728), False, 'import os\n'), ((807, 822), 'os.walk', 'os.walk', (['webdir'], {}), '(webdir)\n', (814, 822), False, 'import os\n'), ((731, 752), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (743, 752), False, 'import os\n')] |
from datetime import datetime
from ... import db
class Story(db.Model):
""" This model holds information about Story """
__tablename__ = 'story'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.Text, nullable=False)
content = db.Column(db.Text, nullable=False)
featured_img_url = db.Column(db.Text, nullable=False)
approved_count = db.Column(db.Integer, nullable=False)
fake_count = db.Column(db.Integer, nullable=False)
mixedvote_count = db.Column(db.Integer, nullable=False)
date_added = db.Column(db.Text, default=datetime.now())
def __init__(self, title, content, featured_img_url, approved_count, fake_count, mixedvote_count):
"""
Initialize the instance
"""
self.title = title
self.content = content
self.featured_img_url = featured_img_url
self.approved_count = approved_count
self.fake_count = fake_count
self.mixedvote_count = mixedvote_count
def __repr__(self):
"""
Returns the object reprensentation
"""
return '<Task %r>' % self.content
def to_json(self):
"""
Returns a JSON object
:return: story JSON object
"""
json_story = {
'title': self.title,
'content': self.content,
'featured_img_url': self.featured_img_url,
'approved_count': self.approved_count,
'fake_count': self.fake_count,
'mixedvote_count': self.mixedvote_count,
'date_added': self.date_added
}
return json_story
def save(self):
"""
Save a story to the database.
This includes creating a new story and editing one.
"""
db.session.add(self)
db.session.commit()
| [
"datetime.datetime.now"
] | [((578, 592), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (590, 592), False, 'from datetime import datetime\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
def setup_site(apps, schema_editor):
"""Populate the sites model"""
Site = apps.get_model('sites', 'Site')
Site.objects.all().delete()
# Register SITE_ID = 1
try:
domain = settings.DOMAIN
except:
domain = 'example.com'
Site.objects.create(domain=domain, name='ProjMan')
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.RunPython(setup_site)
]
| [
"django.db.migrations.RunPython"
] | [((590, 622), 'django.db.migrations.RunPython', 'migrations.RunPython', (['setup_site'], {}), '(setup_site)\n', (610, 622), False, 'from django.db import migrations, models\n')] |
import matplotlib.pyplot as plt
# import pydicom
import os
from pydicom.filereader import dcmread, read_dicomdir
from glob import glob
import cv2
import numpy as np
cv2.destroyAllWindows()
# window prop
screensize = ((-1440,0),(0,900))
screenwidth = screensize[0][1]-screensize[0][0]
screenheight = screensize[1][1]-screensize[1][0]
headertop= 30
headerbottom = 8
headerside = 8
n = 3
m = 2
windowwidth = int((screenwidth - n * headerside*2)/ n)
windowheight = int((screenheight - m * (headertop + headerbottom)) /m)
# input directory
dicom_dir = r"E:\BTSynchSGH\datasets\necklysis\input\dicom"
fps = glob(os.path.join(dicom_dir,"*.dcm"))
ds_list = [dcmread(filename) for filename in fps]
# select image
image = ds_list[10].pixel_array
# image details
image_height, image_width = image.shape
# image pre-processing
image_norm = cv2.normalize(image, dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX) # so that can see better
image_norm_uint8 = cv2.convertScaleAbs(image_norm)
min_head_thresh = 10000
max_head_thresh = 65535
# get outline of head
ret, image_thresh = cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO)
image_thresh_uint8 = cv2.convertScaleAbs(image_thresh)
image_canny = cv2.Canny(image_thresh_uint8,100,150)
# get contour
im2, contours, hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE)
image_norm_3chan = np.stack([image_norm]*3,axis=-1)
# get largest contour
perimeter = [cv2.arcLength(cnt,True) for cnt in contours]
idx_max = np.argmax(np.array(perimeter))
image_contours = cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0, (0,65535,0), 3)
# display process images
# original image
cv2.namedWindow("image_norm",cv2.WINDOW_NORMAL)
cv2.moveWindow("image_norm",screensize[0][0],0)
cv2.resizeWindow("image_norm",(windowwidth,windowheight))
cv2.imshow("image_norm", image_norm)
# canny
cv2.namedWindow("image_canny",cv2.WINDOW_NORMAL)
cv2.imshow("image_canny", image_canny)
cv2.resizeWindow("image_canny",(windowwidth,windowheight))
cv2.moveWindow("image_canny",screensize[0][0]+(windowwidth+headerside*2),0)
# contours
cv2.namedWindow("contours",cv2.WINDOW_NORMAL)
cv2.imshow("contours", image_contours)
cv2.resizeWindow("contours",(windowwidth,windowheight))
cv2.moveWindow("contours",screensize[0][0]+(windowwidth+headerside)*2,0)
# cv2.waitKey(1)
# cv2.destroyAllWindows()
| [
"numpy.stack",
"cv2.Canny",
"pydicom.filereader.dcmread",
"cv2.arcLength",
"cv2.threshold",
"cv2.imshow",
"cv2.resizeWindow",
"cv2.namedWindow",
"numpy.array",
"cv2.convertScaleAbs",
"cv2.normalize",
"cv2.moveWindow",
"cv2.destroyAllWindows",
"os.path.join",
"cv2.findContours"
] | [((166, 189), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (187, 189), False, 'import cv2\n'), ((838, 916), 'cv2.normalize', 'cv2.normalize', (['image'], {'dst': 'None', 'alpha': '(0)', 'beta': '(65536)', 'norm_type': 'cv2.NORM_MINMAX'}), '(image, dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX)\n', (851, 916), False, 'import cv2\n'), ((961, 992), 'cv2.convertScaleAbs', 'cv2.convertScaleAbs', (['image_norm'], {}), '(image_norm)\n', (980, 992), False, 'import cv2\n'), ((1085, 1163), 'cv2.threshold', 'cv2.threshold', (['image_norm', 'min_head_thresh', 'max_head_thresh', 'cv2.THRESH_TOZERO'], {}), '(image_norm, min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO)\n', (1098, 1163), False, 'import cv2\n'), ((1184, 1217), 'cv2.convertScaleAbs', 'cv2.convertScaleAbs', (['image_thresh'], {}), '(image_thresh)\n', (1203, 1217), False, 'import cv2\n'), ((1232, 1271), 'cv2.Canny', 'cv2.Canny', (['image_thresh_uint8', '(100)', '(150)'], {}), '(image_thresh_uint8, 100, 150)\n', (1241, 1271), False, 'import cv2\n'), ((1312, 1379), 'cv2.findContours', 'cv2.findContours', (['image_canny', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_NONE'], {}), '(image_canny, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)\n', (1328, 1379), False, 'import cv2\n'), ((1397, 1432), 'numpy.stack', 'np.stack', (['([image_norm] * 3)'], {'axis': '(-1)'}), '([image_norm] * 3, axis=-1)\n', (1405, 1432), True, 'import numpy as np\n'), ((1696, 1744), 'cv2.namedWindow', 'cv2.namedWindow', (['"""image_norm"""', 'cv2.WINDOW_NORMAL'], {}), "('image_norm', cv2.WINDOW_NORMAL)\n", (1711, 1744), False, 'import cv2\n'), ((1744, 1793), 'cv2.moveWindow', 'cv2.moveWindow', (['"""image_norm"""', 'screensize[0][0]', '(0)'], {}), "('image_norm', screensize[0][0], 0)\n", (1758, 1793), False, 'import cv2\n'), ((1792, 1851), 'cv2.resizeWindow', 'cv2.resizeWindow', (['"""image_norm"""', '(windowwidth, windowheight)'], {}), "('image_norm', (windowwidth, windowheight))\n", (1808, 1851), False, 'import cv2\n'), ((1850, 1886), 'cv2.imshow', 'cv2.imshow', (['"""image_norm"""', 'image_norm'], {}), "('image_norm', image_norm)\n", (1860, 1886), False, 'import cv2\n'), ((1896, 1945), 'cv2.namedWindow', 'cv2.namedWindow', (['"""image_canny"""', 'cv2.WINDOW_NORMAL'], {}), "('image_canny', cv2.WINDOW_NORMAL)\n", (1911, 1945), False, 'import cv2\n'), ((1945, 1983), 'cv2.imshow', 'cv2.imshow', (['"""image_canny"""', 'image_canny'], {}), "('image_canny', image_canny)\n", (1955, 1983), False, 'import cv2\n'), ((1984, 2044), 'cv2.resizeWindow', 'cv2.resizeWindow', (['"""image_canny"""', '(windowwidth, windowheight)'], {}), "('image_canny', (windowwidth, windowheight))\n", (2000, 2044), False, 'import cv2\n'), ((2043, 2130), 'cv2.moveWindow', 'cv2.moveWindow', (['"""image_canny"""', '(screensize[0][0] + (windowwidth + headerside * 2))', '(0)'], {}), "('image_canny', screensize[0][0] + (windowwidth + headerside *\n 2), 0)\n", (2057, 2130), False, 'import cv2\n'), ((2131, 2177), 'cv2.namedWindow', 'cv2.namedWindow', (['"""contours"""', 'cv2.WINDOW_NORMAL'], {}), "('contours', cv2.WINDOW_NORMAL)\n", (2146, 2177), False, 'import cv2\n'), ((2177, 2215), 'cv2.imshow', 'cv2.imshow', (['"""contours"""', 'image_contours'], {}), "('contours', image_contours)\n", (2187, 2215), False, 'import cv2\n'), ((2216, 2273), 'cv2.resizeWindow', 'cv2.resizeWindow', (['"""contours"""', '(windowwidth, windowheight)'], {}), "('contours', (windowwidth, windowheight))\n", (2232, 2273), False, 'import cv2\n'), ((2272, 2357), 'cv2.moveWindow', 'cv2.moveWindow', (['"""contours"""', '(screensize[0][0] + (windowwidth + headerside) * 2)', '(0)'], {}), "('contours', screensize[0][0] + (windowwidth + headerside) * 2, 0\n )\n", (2286, 2357), False, 'import cv2\n'), ((612, 644), 'os.path.join', 'os.path.join', (['dicom_dir', '"""*.dcm"""'], {}), "(dicom_dir, '*.dcm')\n", (624, 644), False, 'import os\n'), ((657, 674), 'pydicom.filereader.dcmread', 'dcmread', (['filename'], {}), '(filename)\n', (664, 674), False, 'from pydicom.filereader import dcmread, read_dicomdir\n'), ((1467, 1491), 'cv2.arcLength', 'cv2.arcLength', (['cnt', '(True)'], {}), '(cnt, True)\n', (1480, 1491), False, 'import cv2\n'), ((1532, 1551), 'numpy.array', 'np.array', (['perimeter'], {}), '(perimeter)\n', (1540, 1551), True, 'import numpy as np\n')] |
import socket
import threading
import multiprocessing
import os
def worker(sock):
while True:
conn, addr = sock.accept()
print("PID:", os.getpid())
thread = threading.Thread(target=process_request, args=(conn, addr))
thread.start()
def process_request(conn, addr):
print("addr: ", addr)
with conn:
while True:
data = conn.recv(1024)
if not data:
break
print(data.decode("utf-8"))
if __name__ == "__main__":
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.bind(("127.0.0.1", 10001))
sock.listen(socket.SOMAXCONN)
PROCESS_COUNT = 6
process_list = [multiprocessing.Process(target=worker,
args=(sock,)) for _ in range(PROCESS_COUNT)]
for process in process_list:
process.start()
for process in process_list:
process.join()
| [
"threading.Thread",
"multiprocessing.Process",
"socket.socket",
"os.getpid"
] | [((188, 247), 'threading.Thread', 'threading.Thread', ([], {'target': 'process_request', 'args': '(conn, addr)'}), '(target=process_request, args=(conn, addr))\n', (204, 247), False, 'import threading\n'), ((528, 577), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (541, 577), False, 'import socket\n'), ((157, 168), 'os.getpid', 'os.getpid', ([], {}), '()\n', (166, 168), False, 'import os\n'), ((716, 768), 'multiprocessing.Process', 'multiprocessing.Process', ([], {'target': 'worker', 'args': '(sock,)'}), '(target=worker, args=(sock,))\n', (739, 768), False, 'import multiprocessing\n')] |
import glob
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
import xarray as xr
from mpl_toolkits.basemap import Basemap
import gc
import matplotlib
matplotlib.rc('font', size=12)
data_path = 'processed_netcdf'
multibeam_files = glob.glob(data_path + '/*.nc')
multibeam_files.sort()
lon0, lon1 = -122.2, -121.7
lat0, lat1 = 36.6, 37.
parallels = np.arange(lat0, lat1 + 0.1, 0.1)
meridians = np.arange(lon0, lon1 + 0.1, 0.1)
fig = plt.figure(figsize=(8, 6))
map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \
resolution='f')
map.drawcoastlines()
map.drawparallels(parallels, labels=~np.isnan(parallels))
map.drawmeridians(meridians, labels=~np.isnan(meridians))
skip = 4
for f in multibeam_files:
print('Plotting ', f)
ds = xr.open_dataset(f)
lon = np.array(ds.longitude[::skip,::skip])
lat = np.array(ds.latitude[::skip,::skip])
depth = np.array(ds.depth[::skip,::skip])
plt.pcolor(lon, lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r)
del lon, lat, depth, ds
gc.collect()
plt.colorbar()
fig.suptitle('Monterey Bay bathymetry from shipboard Multibeam EM-712')
plt.savefig('monterey_bay_multibeam_bathymetry.png', dpi=300)
plt.close(fig)
| [
"matplotlib.pyplot.pcolor",
"matplotlib.rc",
"matplotlib.pyplot.close",
"xarray.open_dataset",
"numpy.isnan",
"matplotlib.pyplot.colorbar",
"gc.collect",
"matplotlib.pyplot.figure",
"numpy.arange",
"numpy.array",
"glob.glob",
"mpl_toolkits.basemap.Basemap",
"matplotlib.pyplot.savefig"
] | [((180, 210), 'matplotlib.rc', 'matplotlib.rc', (['"""font"""'], {'size': '(12)'}), "('font', size=12)\n", (193, 210), False, 'import matplotlib\n'), ((262, 292), 'glob.glob', 'glob.glob', (["(data_path + '/*.nc')"], {}), "(data_path + '/*.nc')\n", (271, 292), False, 'import glob\n'), ((381, 413), 'numpy.arange', 'np.arange', (['lat0', '(lat1 + 0.1)', '(0.1)'], {}), '(lat0, lat1 + 0.1, 0.1)\n', (390, 413), True, 'import numpy as np\n'), ((426, 458), 'numpy.arange', 'np.arange', (['lon0', '(lon1 + 0.1)', '(0.1)'], {}), '(lon0, lon1 + 0.1, 0.1)\n', (435, 458), True, 'import numpy as np\n'), ((466, 492), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 6)'}), '(figsize=(8, 6))\n', (476, 492), True, 'import matplotlib.pyplot as plt\n'), ((499, 590), 'mpl_toolkits.basemap.Basemap', 'Basemap', ([], {'llcrnrlon': 'lon0', 'llcrnrlat': 'lat0', 'urcrnrlon': 'lon1', 'urcrnrlat': 'lat1', 'resolution': '"""f"""'}), "(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1,\n resolution='f')\n", (506, 590), False, 'from mpl_toolkits.basemap import Basemap\n'), ((1087, 1101), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (1099, 1101), True, 'import matplotlib.pyplot as plt\n'), ((1175, 1236), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""monterey_bay_multibeam_bathymetry.png"""'], {'dpi': '(300)'}), "('monterey_bay_multibeam_bathymetry.png', dpi=300)\n", (1186, 1236), True, 'import matplotlib.pyplot as plt\n'), ((1237, 1251), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (1246, 1251), True, 'import matplotlib.pyplot as plt\n'), ((812, 830), 'xarray.open_dataset', 'xr.open_dataset', (['f'], {}), '(f)\n', (827, 830), True, 'import xarray as xr\n'), ((841, 879), 'numpy.array', 'np.array', (['ds.longitude[::skip, ::skip]'], {}), '(ds.longitude[::skip, ::skip])\n', (849, 879), True, 'import numpy as np\n'), ((889, 926), 'numpy.array', 'np.array', (['ds.latitude[::skip, ::skip]'], {}), '(ds.latitude[::skip, ::skip])\n', (897, 926), True, 'import numpy as np\n'), ((938, 972), 'numpy.array', 'np.array', (['ds.depth[::skip, ::skip]'], {}), '(ds.depth[::skip, ::skip])\n', (946, 972), True, 'import numpy as np\n'), ((976, 1040), 'matplotlib.pyplot.pcolor', 'plt.pcolor', (['lon', 'lat', 'depth'], {'vmin': '(0)', 'vmax': '(100)', 'cmap': 'cm.viridis_r'}), '(lon, lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r)\n', (986, 1040), True, 'import matplotlib.pyplot as plt\n'), ((1073, 1085), 'gc.collect', 'gc.collect', ([], {}), '()\n', (1083, 1085), False, 'import gc\n'), ((661, 680), 'numpy.isnan', 'np.isnan', (['parallels'], {}), '(parallels)\n', (669, 680), True, 'import numpy as np\n'), ((719, 738), 'numpy.isnan', 'np.isnan', (['meridians'], {}), '(meridians)\n', (727, 738), True, 'import numpy as np\n')] |
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
#
import math
import numpy as np
def quadratic(**kwargs) -> float:
return sum(x_i ** 2 for _, x_i in kwargs.items())
def ackley(x_1=None, x_2=None, a=20, b=0.2, c=2*math.pi):
d = 2
return -a * np.exp(-b * np.sqrt((x_1**2 + x_2**2) / d)) - np.exp(np.cos(c * x_1) + np.cos(c * x_2)) + a + np.exp(1)
def flower(**kwargs):
a = 1
b = 2
c = 4
x_1 = kwargs['x_1']
x_2 = kwargs['x_2']
x_norm = np.sqrt(x_1**2 + x_2**2)
return a * x_norm + b * np.sin(c * np.arctan2(x_1, x_2))
| [
"numpy.arctan2",
"numpy.exp",
"numpy.cos",
"numpy.sqrt"
] | [((526, 554), 'numpy.sqrt', 'np.sqrt', (['(x_1 ** 2 + x_2 ** 2)'], {}), '(x_1 ** 2 + x_2 ** 2)\n', (533, 554), True, 'import numpy as np\n'), ((390, 399), 'numpy.exp', 'np.exp', (['(1)'], {}), '(1)\n', (396, 399), True, 'import numpy as np\n'), ((593, 613), 'numpy.arctan2', 'np.arctan2', (['x_1', 'x_2'], {}), '(x_1, x_2)\n', (603, 613), True, 'import numpy as np\n'), ((349, 364), 'numpy.cos', 'np.cos', (['(c * x_1)'], {}), '(c * x_1)\n', (355, 364), True, 'import numpy as np\n'), ((367, 382), 'numpy.cos', 'np.cos', (['(c * x_2)'], {}), '(c * x_2)\n', (373, 382), True, 'import numpy as np\n'), ((308, 342), 'numpy.sqrt', 'np.sqrt', (['((x_1 ** 2 + x_2 ** 2) / d)'], {}), '((x_1 ** 2 + x_2 ** 2) / d)\n', (315, 342), True, 'import numpy as np\n')] |
from flask import Flask, url_for, render_template, request, redirect
from flask_sqlalchemy import SQLAlchemy
from datetime import datetime
from werkzeug.utils import secure_filename
from werkzeug.serving import run_simple
from id_class_locator import id_class_detector
import os
import time
from cv2 import cv2
app=Flask(__name__)
#app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db'
#db=SQLAlchemy(app)
path2File= os.path.dirname(os.path.realpath(__file__))
pathToModel=path2File+'/WorkArea/FRCNN'
PATH = path2File+'/static/input'
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif'])
app.config['PATH']=PATH
#app.config["TEMPLATES_AUTO_RELOAD"] = True
model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt')
@app.route('/hello', methods=['POST', 'GET'])
def hello():
return('Hello')
@app.route('/', methods=['POST', 'GET'])
def index():
return render_template('home.html')
@app.route('/upload', methods=['POST', 'GET'])
def upload():
if request.method == 'POST':
# check if the post request has the file part
file = request.files['imageUploadForm']
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['PATH'], filename))
print(filename)
img=cv2.imread(os.path.join(app.config['PATH'], filename))
id_class_detector(img, model, filename, debug=False)
#time.sleep(2)
return render_template('home.html', value=filename)
if __name__=="__main__":
run_simple('127.0.0.1', 9100, app, use_reloader=False)
| [
"os.path.join",
"os.path.realpath",
"flask.Flask",
"werkzeug.utils.secure_filename",
"werkzeug.serving.run_simple",
"flask.render_template",
"cv2.cv2.dnn.readNetFromTensorflow",
"id_class_locator.id_class_detector"
] | [((316, 331), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (321, 331), False, 'from flask import Flask, url_for, render_template, request, redirect\n'), ((671, 779), 'cv2.cv2.dnn.readNetFromTensorflow', 'cv2.dnn.readNetFromTensorflow', (["(pathToModel + '/frozen_inference_graph.pb')", "(pathToModel + '/frcnn.pbtxt')"], {}), "(pathToModel + '/frozen_inference_graph.pb', \n pathToModel + '/frcnn.pbtxt')\n", (700, 779), False, 'from cv2 import cv2\n'), ((436, 462), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (452, 462), False, 'import os\n'), ((911, 939), 'flask.render_template', 'render_template', (['"""home.html"""'], {}), "('home.html')\n", (926, 939), False, 'from flask import Flask, url_for, render_template, request, redirect\n'), ((1390, 1434), 'flask.render_template', 'render_template', (['"""home.html"""'], {'value': 'filename'}), "('home.html', value=filename)\n", (1405, 1434), False, 'from flask import Flask, url_for, render_template, request, redirect\n'), ((1463, 1517), 'werkzeug.serving.run_simple', 'run_simple', (['"""127.0.0.1"""', '(9100)', 'app'], {'use_reloader': '(False)'}), "('127.0.0.1', 9100, app, use_reloader=False)\n", (1473, 1517), False, 'from werkzeug.serving import run_simple\n'), ((1141, 1171), 'werkzeug.utils.secure_filename', 'secure_filename', (['file.filename'], {}), '(file.filename)\n', (1156, 1171), False, 'from werkzeug.utils import secure_filename\n'), ((1309, 1361), 'id_class_locator.id_class_detector', 'id_class_detector', (['img', 'model', 'filename'], {'debug': '(False)'}), '(img, model, filename, debug=False)\n', (1326, 1361), False, 'from id_class_locator import id_class_detector\n'), ((1184, 1226), 'os.path.join', 'os.path.join', (["app.config['PATH']", 'filename'], {}), "(app.config['PATH'], filename)\n", (1196, 1226), False, 'import os\n'), ((1263, 1305), 'os.path.join', 'os.path.join', (["app.config['PATH']", 'filename'], {}), "(app.config['PATH'], filename)\n", (1275, 1305), False, 'import os\n')] |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from configparser import ConfigParser
from s3transfer import RetriesExceededError
from s3transfer.exceptions import TransferNotDoneError
import os
import sys
S3 = "s3://"
S3A = "s3a://"
class ConfigReader:
def __init__(self, logger, config_location):
self.config_location = config_location
self.logger = logger
def read_config(self):
config = ConfigParser()
config.read(self.config_location)
enable_insert_overwrite = 'True'
enable_external_table_drop = 'True'
if 'aws' in config and 'region' in config['aws']:
aws_region = config['aws']['region']
else:
self.logger.error("Not able to read the region from the config ")
sys.exit(os.EX_CONFIG)
if 'athena' in config:
if 'ATHENA_OUTPUT_LOCATION' in config['athena']:
athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION']
else:
self.logger.error("Not able to read the ATHENA_OUTPUT_LOCATION from the config ")
sys.exit(os.EX_CONFIG)
if 'STAGING_DB' in config['athena']:
staging_db = config['athena']['STAGING_DB']
else:
self.logger.error("Not able to read the STAGING_DB from the config ")
sys.exit(os.EX_CONFIG)
if 'ENABLE_INSERT_OVERWRITE' in config['athena']:
enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE']
if 'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']:
enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE']
else:
self.logger.error("Not able to read the athena config")
sys.exit(os.EX_CONFIG)
return aws_region, athena_output_location, staging_db, enable_insert_overwrite, enable_external_table_drop
class FileReader:
def __init__(self, logger, s3_resource):
self.logger = logger
self.s3_resource = s3_resource
def split_s3_path(self, s3_location):
path_parts = s3_location.replace(S3, "").replace(S3A, "").split("/")
s3_bucket = path_parts.pop(0)
prefix = "/".join(path_parts)
return s3_bucket, prefix
def download_input_from_s3(self, s3_bucket, prefix, destination_location):
try:
self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location)
except RetriesExceededError as e:
self.logger.fatal("Unable to download the file {0}".format(e))
self.logger.fatal("Unable to download the file from s3 to local : {0}/{1}".format(s3_bucket, prefix))
sys.exit(os.EX_DATAERR)
except TransferNotDoneError as e:
self.logger.fatal("Unable to download the file {0}".format(e))
sys.exit(os.EX_OSERR)
return destination_location
def get_file(self,file_type, source_location, destination_location):
if source_location.startswith(S3) or source_location.startswith(S3A):
self.logger.info("Downloading the {0} from {1} to {2}".format(file_type,
source_location,
destination_location))
s3_bucket, prefix = self.split_s3_path(source_location)
return self.download_input_from_s3(s3_bucket, prefix, destination_location)
else:
return source_location
| [
"configparser.ConfigParser",
"sys.exit"
] | [((1166, 1180), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (1178, 1180), False, 'from configparser import ConfigParser\n'), ((1519, 1541), 'sys.exit', 'sys.exit', (['os.EX_CONFIG'], {}), '(os.EX_CONFIG)\n', (1527, 1541), False, 'import sys\n'), ((2523, 2545), 'sys.exit', 'sys.exit', (['os.EX_CONFIG'], {}), '(os.EX_CONFIG)\n', (2531, 2545), False, 'import sys\n'), ((1850, 1872), 'sys.exit', 'sys.exit', (['os.EX_CONFIG'], {}), '(os.EX_CONFIG)\n', (1858, 1872), False, 'import sys\n'), ((2102, 2124), 'sys.exit', 'sys.exit', (['os.EX_CONFIG'], {}), '(os.EX_CONFIG)\n', (2110, 2124), False, 'import sys\n'), ((3456, 3479), 'sys.exit', 'sys.exit', (['os.EX_DATAERR'], {}), '(os.EX_DATAERR)\n', (3464, 3479), False, 'import sys\n'), ((3609, 3630), 'sys.exit', 'sys.exit', (['os.EX_OSERR'], {}), '(os.EX_OSERR)\n', (3617, 3630), False, 'import sys\n')] |
import frappe
import datetime
@frappe.whitelist()
def get_batch_nos(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql("""select batch_id, expiry_date
from `tabBatch`
where
item = {item_code} and disabled = 0 and (expiry_date is null or expiry_date > '{cur_date}')"""
.format(item_code = frappe.db.escape(filters.get("item")), cur_date = datetime.datetime.today()
))
| [
"frappe.whitelist",
"datetime.datetime.today"
] | [((32, 50), 'frappe.whitelist', 'frappe.whitelist', ([], {}), '()\n', (48, 50), False, 'import frappe\n'), ((373, 398), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (396, 398), False, 'import datetime\n')] |
#!/usr/bin/python
# ex:set fileencoding=utf-8:
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from djangobmf.dashboards import Accounting
from djangobmf.sites import Module
from djangobmf.sites import ViewMixin
from djangobmf.sites import register
from djangobmf.sites import site
from .categories import TransactionCategory
from .models import ACCOUNTING_INCOME
from .models import ACCOUNTING_EXPENSE
from .models import ACCOUNTING_ASSET
from .models import ACCOUNTING_LIABILITY
from .models import Account
from .models import Transaction
from .models import TransactionItem
from .views import TransactionCreateView
from .views import TransactionUpdateView
@register(dashboard=Accounting)
class AccountModule(Module):
model = Account
default = True
@register(dashboard=Accounting)
class TransactionModule(Module):
model = Transaction
default = True
create = TransactionCreateView
update = TransactionUpdateView
@register(dashboard=Accounting)
class TransactionItemModule(Module):
model = TransactionItem
default = True
site.register_settings('bmfcontrib_accounting', {
'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)),
'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)),
'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)),
'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)),
})
@register(category=TransactionCategory)
class AllAccounts(ViewMixin):
model = Account
name = _("All Accounts")
slug = "accounts"
@register(category=TransactionCategory)
class OpenTransactions(ViewMixin):
model = Transaction
name = _("Open transactions")
slug = "open"
def filter_queryset(self, request, queryset, view):
return queryset.filter(draft=True).order_by('-modified')
@register(category=TransactionCategory)
class ClosedTrancations(ViewMixin):
model = Transaction
name = _("Closed transactions")
slug = "closed"
date_resolution = "month"
def filter_queryset(self, request, queryset, view):
return queryset.filter(draft=False).order_by('modified')
@register(category=TransactionCategory)
class Archive(ViewMixin):
model = TransactionItem
name = _("Transaction archive")
slug = "archive"
date_resolution = "week"
| [
"django.utils.translation.ugettext_lazy",
"djangobmf.sites.register"
] | [((739, 769), 'djangobmf.sites.register', 'register', ([], {'dashboard': 'Accounting'}), '(dashboard=Accounting)\n', (747, 769), False, 'from djangobmf.sites import register\n'), ((841, 871), 'djangobmf.sites.register', 'register', ([], {'dashboard': 'Accounting'}), '(dashboard=Accounting)\n', (849, 871), False, 'from djangobmf.sites import register\n'), ((1021, 1051), 'djangobmf.sites.register', 'register', ([], {'dashboard': 'Accounting'}), '(dashboard=Accounting)\n', (1029, 1051), False, 'from djangobmf.sites import register\n'), ((1582, 1620), 'djangobmf.sites.register', 'register', ([], {'category': 'TransactionCategory'}), '(category=TransactionCategory)\n', (1590, 1620), False, 'from djangobmf.sites import register\n'), ((1725, 1763), 'djangobmf.sites.register', 'register', ([], {'category': 'TransactionCategory'}), '(category=TransactionCategory)\n', (1733, 1763), False, 'from djangobmf.sites import register\n'), ((2000, 2038), 'djangobmf.sites.register', 'register', ([], {'category': 'TransactionCategory'}), '(category=TransactionCategory)\n', (2008, 2038), False, 'from djangobmf.sites import register\n'), ((2310, 2348), 'djangobmf.sites.register', 'register', ([], {'category': 'TransactionCategory'}), '(category=TransactionCategory)\n', (2318, 2348), False, 'from djangobmf.sites import register\n'), ((1682, 1699), 'django.utils.translation.ugettext_lazy', '_', (['"""All Accounts"""'], {}), "('All Accounts')\n", (1683, 1699), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1834, 1856), 'django.utils.translation.ugettext_lazy', '_', (['"""Open transactions"""'], {}), "('Open transactions')\n", (1835, 1856), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2110, 2134), 'django.utils.translation.ugettext_lazy', '_', (['"""Closed transactions"""'], {}), "('Closed transactions')\n", (2111, 2134), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2414, 2438), 'django.utils.translation.ugettext_lazy', '_', (['"""Transaction archive"""'], {}), "('Transaction archive')\n", (2415, 2438), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
# coding: utf-8
'''Centralized logger factory.'''
import logging
from .config import config
# The level options supported in configuration.
LEVEL_OPTIONS = list((
'notset', 'debug', 'info', 'warning', 'error', 'critical'
))
def _setup_logger_supply():
'''Create and return a logger generator.'''
configured_level = config.development.log_level
# Perform basic configuration.
logging.basicConfig(
level=20, # Configure 3rd party loggers to the INFO level.
format='%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s'
)
def create_log(name):
'''Create a log and elevate it to the configured level.'''
log = logging.getLogger(name)
log.setLevel(LEVEL_OPTIONS.index(configured_level)*10)
return log
return create_log
# Define the callable that can be used to create properly configured loggers.
logger = _setup_logger_supply() # pylint: disable=invalid-name
| [
"logging.getLogger",
"logging.basicConfig"
] | [((384, 483), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': '(20)', 'format': '"""%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s"""'}), "(level=20, format=\n '%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s')\n", (403, 483), False, 'import logging\n'), ((628, 651), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (645, 651), False, 'import logging\n')] |
import sys
import numpy as np
import quaternionic
import pytest
def test_self_return():
def f1(a, b, c):
d = np.asarray(a).copy()
assert isinstance(a, np.ndarray) and isinstance(a, quaternionic.array)
assert isinstance(b, np.ndarray) and isinstance(b, quaternionic.array)
assert isinstance(c, np.ndarray) and isinstance(c, quaternionic.array)
assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array)
return d
a = quaternionic.array.random((17, 3, 4))
b = quaternionic.array.random((13, 3, 4))
c = quaternionic.array.random((11, 3, 4))
d1 = f1(a, b, c)
assert isinstance(d1, np.ndarray) and not isinstance(d1, quaternionic.array)
f2 = quaternionic.utilities.type_self_return(f1)
d2 = f2(a, b, c)
assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array)
f1.nin = 3
f3 = quaternionic.utilities.type_self_return(f1)
d3 = f3(a, b, c)
assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array)
def test_ndarray_args():
def f1(a, b, c):
d = np.asarray(a).copy()
assert isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array)
assert isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array)
assert isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array)
assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array)
return d
a = quaternionic.array.random((17, 3, 4))
b = quaternionic.array.random((13, 3, 4))
c = quaternionic.array.random((11, 3, 4))
f2 = quaternionic.utilities.ndarray_args(f1)
d2 = f2(a, b, c)
assert isinstance(d2, np.ndarray) and not isinstance(d2, quaternionic.array)
f1.nin = 3
f3 = quaternionic.utilities.ndarray_args(f1)
d3 = f3(a, b, c)
assert isinstance(d3, np.ndarray) and not isinstance(d3, quaternionic.array)
def test_ndarray_args_and_return():
def f1(a, b, c):
d = np.asarray(a).copy()
assert isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array)
assert isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array)
assert isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array)
assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array)
return d
a = quaternionic.array.random((17, 3, 4))
b = quaternionic.array.random((13, 3, 4))
c = quaternionic.array.random((11, 3, 4))
f2 = quaternionic.utilities.ndarray_args_and_return(f1)
d2 = f2(a, b, c)
assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array)
f1.nin = 3
f3 = quaternionic.utilities.ndarray_args_and_return(f1)
d3 = f3(a, b, c)
assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array)
@pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason="No numba on pypy")
def test_types_to_ftylist():
import numba
types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist
types = '?bhilqpBHILQPfdgF->D'
ftylist = numba.complex128(
numba.boolean,
numba.byte,
numba.short,
numba.intc,
numba.int_,
numba.longlong,
numba.intp,
numba.char,
numba.ushort,
numba.uintc,
numba.uint,
numba.ulonglong,
numba.uintp,
numba.float32,
numba.float_,
numba.double,
numba.complex64,
)
assert types_to_ftylist([types]) == [ftylist]
def test_pyguvectorize():
_quaternion_resolution = 10 * np.finfo(float).resolution
np.random.seed(1234)
one = quaternionic.array(1, 0, 0, 0)
x = quaternionic.array.random((7, 13, 4))
y = quaternionic.array.random((13, 4))
z = np.random.rand(13)
arg0s = [one, -(1+2*_quaternion_resolution)*one, -one, x]
for k in dir(quaternionic.algebra_ufuncs):
if not k.startswith('__'):
f1 = getattr(quaternionic.algebra_ufuncs, k)
f2 = getattr(quaternionic.algebra, k)
sig = f2.signature
inputs = sig.split('->')[0].split(',')
for arg0 in arg0s:
args = [arg0.ndarray] if inputs[0] == '(n)' else [z,]
if len(inputs) > 1:
args.append(y.ndarray if inputs[1] == '(n)' else z)
assert np.allclose(
f1(*args),
quaternionic.utilities.pyguvectorize(f2.types, f2.signature)(f2)(*args),
atol=0.0,
rtol=_quaternion_resolution
)
| [
"quaternionic.utilities.ndarray_args",
"numpy.random.seed",
"sys.implementation.name.lower",
"numpy.random.rand",
"numba.complex128",
"numpy.asarray",
"numpy.finfo",
"quaternionic.array",
"quaternionic.array.random",
"quaternionic.utilities.pyguvectorize",
"quaternionic.utilities.type_self_return",
"quaternionic.utilities.ndarray_args_and_return"
] | [((489, 526), 'quaternionic.array.random', 'quaternionic.array.random', (['(17, 3, 4)'], {}), '((17, 3, 4))\n', (514, 526), False, 'import quaternionic\n'), ((535, 572), 'quaternionic.array.random', 'quaternionic.array.random', (['(13, 3, 4)'], {}), '((13, 3, 4))\n', (560, 572), False, 'import quaternionic\n'), ((581, 618), 'quaternionic.array.random', 'quaternionic.array.random', (['(11, 3, 4)'], {}), '((11, 3, 4))\n', (606, 618), False, 'import quaternionic\n'), ((730, 773), 'quaternionic.utilities.type_self_return', 'quaternionic.utilities.type_self_return', (['f1'], {}), '(f1)\n', (769, 773), False, 'import quaternionic\n'), ((896, 939), 'quaternionic.utilities.type_self_return', 'quaternionic.utilities.type_self_return', (['f1'], {}), '(f1)\n', (935, 939), False, 'import quaternionic\n'), ((1476, 1513), 'quaternionic.array.random', 'quaternionic.array.random', (['(17, 3, 4)'], {}), '((17, 3, 4))\n', (1501, 1513), False, 'import quaternionic\n'), ((1522, 1559), 'quaternionic.array.random', 'quaternionic.array.random', (['(13, 3, 4)'], {}), '((13, 3, 4))\n', (1547, 1559), False, 'import quaternionic\n'), ((1568, 1605), 'quaternionic.array.random', 'quaternionic.array.random', (['(11, 3, 4)'], {}), '((11, 3, 4))\n', (1593, 1605), False, 'import quaternionic\n'), ((1615, 1654), 'quaternionic.utilities.ndarray_args', 'quaternionic.utilities.ndarray_args', (['f1'], {}), '(f1)\n', (1650, 1654), False, 'import quaternionic\n'), ((1781, 1820), 'quaternionic.utilities.ndarray_args', 'quaternionic.utilities.ndarray_args', (['f1'], {}), '(f1)\n', (1816, 1820), False, 'import quaternionic\n'), ((2372, 2409), 'quaternionic.array.random', 'quaternionic.array.random', (['(17, 3, 4)'], {}), '((17, 3, 4))\n', (2397, 2409), False, 'import quaternionic\n'), ((2418, 2455), 'quaternionic.array.random', 'quaternionic.array.random', (['(13, 3, 4)'], {}), '((13, 3, 4))\n', (2443, 2455), False, 'import quaternionic\n'), ((2464, 2501), 'quaternionic.array.random', 'quaternionic.array.random', (['(11, 3, 4)'], {}), '((11, 3, 4))\n', (2489, 2501), False, 'import quaternionic\n'), ((2511, 2561), 'quaternionic.utilities.ndarray_args_and_return', 'quaternionic.utilities.ndarray_args_and_return', (['f1'], {}), '(f1)\n', (2557, 2561), False, 'import quaternionic\n'), ((2684, 2734), 'quaternionic.utilities.ndarray_args_and_return', 'quaternionic.utilities.ndarray_args_and_return', (['f1'], {}), '(f1)\n', (2730, 2734), False, 'import quaternionic\n'), ((3108, 3370), 'numba.complex128', 'numba.complex128', (['numba.boolean', 'numba.byte', 'numba.short', 'numba.intc', 'numba.int_', 'numba.longlong', 'numba.intp', 'numba.char', 'numba.ushort', 'numba.uintc', 'numba.uint', 'numba.ulonglong', 'numba.uintp', 'numba.float32', 'numba.float_', 'numba.double', 'numba.complex64'], {}), '(numba.boolean, numba.byte, numba.short, numba.intc, numba.\n int_, numba.longlong, numba.intp, numba.char, numba.ushort, numba.uintc,\n numba.uint, numba.ulonglong, numba.uintp, numba.float32, numba.float_,\n numba.double, numba.complex64)\n', (3124, 3370), False, 'import numba\n'), ((3644, 3664), 'numpy.random.seed', 'np.random.seed', (['(1234)'], {}), '(1234)\n', (3658, 3664), True, 'import numpy as np\n'), ((3675, 3705), 'quaternionic.array', 'quaternionic.array', (['(1)', '(0)', '(0)', '(0)'], {}), '(1, 0, 0, 0)\n', (3693, 3705), False, 'import quaternionic\n'), ((3714, 3751), 'quaternionic.array.random', 'quaternionic.array.random', (['(7, 13, 4)'], {}), '((7, 13, 4))\n', (3739, 3751), False, 'import quaternionic\n'), ((3760, 3794), 'quaternionic.array.random', 'quaternionic.array.random', (['(13, 4)'], {}), '((13, 4))\n', (3785, 3794), False, 'import quaternionic\n'), ((3803, 3821), 'numpy.random.rand', 'np.random.rand', (['(13)'], {}), '(13)\n', (3817, 3821), True, 'import numpy as np\n'), ((2855, 2886), 'sys.implementation.name.lower', 'sys.implementation.name.lower', ([], {}), '()\n', (2884, 2886), False, 'import sys\n'), ((3613, 3628), 'numpy.finfo', 'np.finfo', (['float'], {}), '(float)\n', (3621, 3628), True, 'import numpy as np\n'), ((123, 136), 'numpy.asarray', 'np.asarray', (['a'], {}), '(a)\n', (133, 136), True, 'import numpy as np\n'), ((1098, 1111), 'numpy.asarray', 'np.asarray', (['a'], {}), '(a)\n', (1108, 1111), True, 'import numpy as np\n'), ((1994, 2007), 'numpy.asarray', 'np.asarray', (['a'], {}), '(a)\n', (2004, 2007), True, 'import numpy as np\n'), ((4453, 4513), 'quaternionic.utilities.pyguvectorize', 'quaternionic.utilities.pyguvectorize', (['f2.types', 'f2.signature'], {}), '(f2.types, f2.signature)\n', (4489, 4513), False, 'import quaternionic\n')] |
from unittest import TestCase
import numpy as np
import os
from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE
from xaitk_saliency import GenerateDetectorProposalSaliency
from smqtk_core.configuration import configuration_test_helper
from tests import DATA_DIR, EXPECTED_MASKS_4x6
class TestSimilarityScoring (TestCase):
def test_init_(self) -> None:
"""
Test if implementation is usable.
"""
impl = DetectorRISE()
assert impl.is_usable() and isinstance(impl, GenerateDetectorProposalSaliency)
def test_default_param(self) -> None:
"""
Test default construction.
"""
impl = DetectorRISE()
assert impl.proximity_metric == 'cosine'
def test_get_config(self) -> None:
"""
Test expected configuation behavior.
"""
impl = DetectorRISE('euclidean')
for i in configuration_test_helper(impl):
assert i.proximity_metric == 'euclidean'
def test_metric_args(self) -> None:
"""
Test non-default metric type.
"""
impl = DetectorRISE('hamming')
assert impl.proximity_metric == 'hamming'
def test_shape_sanity(self) -> None:
"""
Test basic scoring with a single feature for broadcasting sanity check.
"""
impl = DetectorRISE()
np.random.seed(2)
image1_dets = np.random.rand(2, (7))
pertb_dets = np.random.rand(10, 2, (7))
pertb_mask = np.random.randint(low=0, high=2, size=(10, 15, 25), dtype='int')
sal = impl.generate(image1_dets, pertb_dets, pertb_mask)
assert sal.shape == (2, 15, 25)
def test_standard_detection(self) -> None:
"""
Test basic scoring on known values and non-square masks.
"""
impl = DetectorRISE()
image1_dets = np.array([[1, 1, 4, 3, 0, 1, 0.89]])
pertb_dets = np.array([[[1, 2, 6, 6, 0.3, 1, 0.995]],
[[0, 1, 2, 2, 0.2, 2, 0.03]],
[[1, 0, 2, 2, 0.45, 1, 0.81]],
[[1, 1, 6, 6, 0.5, 1, 0.625]],
[[0, 2, 3, 5, 0.03, 1, 0.56]],
[[1, 2, 6, 3, 0.01, 1, 0.07]],])
sal = impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6)
standard_sal = np.load(os.path.join(DATA_DIR, 'drisesal.npy'))
assert sal.shape == (1, 4, 6)
assert np.allclose(standard_sal, sal)
| [
"xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring.DetectorRISE",
"numpy.random.seed",
"numpy.allclose",
"smqtk_core.configuration.configuration_test_helper",
"numpy.random.randint",
"numpy.array",
"numpy.random.rand",
"os.path.join"
] | [((472, 486), 'xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring.DetectorRISE', 'DetectorRISE', ([], {}), '()\n', (484, 486), False, 'from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE\n'), ((691, 705), 'xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring.DetectorRISE', 'DetectorRISE', ([], {}), '()\n', (703, 705), False, 'from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE\n'), ((879, 904), 'xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring.DetectorRISE', 'DetectorRISE', (['"""euclidean"""'], {}), "('euclidean')\n", (891, 904), False, 'from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE\n'), ((922, 953), 'smqtk_core.configuration.configuration_test_helper', 'configuration_test_helper', (['impl'], {}), '(impl)\n', (947, 953), False, 'from smqtk_core.configuration import configuration_test_helper\n'), ((1126, 1149), 'xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring.DetectorRISE', 'DetectorRISE', (['"""hamming"""'], {}), "('hamming')\n", (1138, 1149), False, 'from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE\n'), ((1361, 1375), 'xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring.DetectorRISE', 'DetectorRISE', ([], {}), '()\n', (1373, 1375), False, 'from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE\n'), ((1384, 1401), 'numpy.random.seed', 'np.random.seed', (['(2)'], {}), '(2)\n', (1398, 1401), True, 'import numpy as np\n'), ((1424, 1444), 'numpy.random.rand', 'np.random.rand', (['(2)', '(7)'], {}), '(2, 7)\n', (1438, 1444), True, 'import numpy as np\n'), ((1468, 1492), 'numpy.random.rand', 'np.random.rand', (['(10)', '(2)', '(7)'], {}), '(10, 2, 7)\n', (1482, 1492), True, 'import numpy as np\n'), ((1516, 1580), 'numpy.random.randint', 'np.random.randint', ([], {'low': '(0)', 'high': '(2)', 'size': '(10, 15, 25)', 'dtype': '"""int"""'}), "(low=0, high=2, size=(10, 15, 25), dtype='int')\n", (1533, 1580), True, 'import numpy as np\n'), ((1839, 1853), 'xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring.DetectorRISE', 'DetectorRISE', ([], {}), '()\n', (1851, 1853), False, 'from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE\n'), ((1876, 1912), 'numpy.array', 'np.array', (['[[1, 1, 4, 3, 0, 1, 0.89]]'], {}), '([[1, 1, 4, 3, 0, 1, 0.89]])\n', (1884, 1912), True, 'import numpy as np\n'), ((1934, 2138), 'numpy.array', 'np.array', (['[[[1, 2, 6, 6, 0.3, 1, 0.995]], [[0, 1, 2, 2, 0.2, 2, 0.03]], [[1, 0, 2, 2,\n 0.45, 1, 0.81]], [[1, 1, 6, 6, 0.5, 1, 0.625]], [[0, 2, 3, 5, 0.03, 1, \n 0.56]], [[1, 2, 6, 3, 0.01, 1, 0.07]]]'], {}), '([[[1, 2, 6, 6, 0.3, 1, 0.995]], [[0, 1, 2, 2, 0.2, 2, 0.03]], [[1,\n 0, 2, 2, 0.45, 1, 0.81]], [[1, 1, 6, 6, 0.5, 1, 0.625]], [[0, 2, 3, 5, \n 0.03, 1, 0.56]], [[1, 2, 6, 3, 0.01, 1, 0.07]]])\n', (1942, 2138), True, 'import numpy as np\n'), ((2483, 2513), 'numpy.allclose', 'np.allclose', (['standard_sal', 'sal'], {}), '(standard_sal, sal)\n', (2494, 2513), True, 'import numpy as np\n'), ((2390, 2428), 'os.path.join', 'os.path.join', (['DATA_DIR', '"""drisesal.npy"""'], {}), "(DATA_DIR, 'drisesal.npy')\n", (2402, 2428), False, 'import os\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import re
import os
import glob
import imageio
import argparse
import subprocess
import numpy as np
from tqdm import tqdm
from PIL import Image
from pygifsicle import optimize
from obj.arg_formatter import arg_metav_formatter
def sorted_alphanumeric(data):
"""
Function to sort number-containing strings
Args:
data (list): list of strings to sort
Returns:
(list): sorted list
"""
convert = lambda text: int(text) if text.isdigit() else text.lower()
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
return sorted(data, key=alphanum_key)
def make_plot(direct, number_ticks):
"""
Function to plot values from log csv file
Args:
direct (str): base directory of logged model
number_ticks (int): number of ticks to have on graph
"""
direct = re.sub(r"(\/)?$", "", direct)
direct = re.sub(r"(\.\/)?pickles\/", "", direct)
directLong = "./pickles/" + direct
if not os.path.isdir(directLong):
sys.exit(directLong + " does not exist")
# make vis directory within log directory
os.makedirs(directLong + "/vis", exist_ok=True)
subprocess.call(
["Rscript", "gg.R", "-d", directLong, "-t",
str(number_ticks)])
def make_gif(direct,
shrink_factor=4,
skip_rate=2,
interval=0.1,
until=None,
progress_bar=False):
"""
Function to create gif from images
Args:
direct (str): base directory of logged model
shrink_factor (int): factor by which to downsample images
skip_rate (int): interval to images to use for gif
interval (float): temporal interval for gif construction or speed
until (int): upper limit for epoch to be used in gif construction
progress_bar (bool): True if progress bar should be added to gif
"""
print("creating training evolution gif")
# clean up directory input
direct = re.sub(r"(\/)?$", "", direct)
direct = re.sub(r"(\.\/)?pickles\/", "", direct)
directLong = "./pickles/" + direct
if not os.path.isdir(directLong):
sys.exit(directLong + " does not exist")
# get sorted image list
sorted_list = sorted_alphanumeric(glob.glob(directLong + "/img/*png"))
# assume all images are of same size
size = Image.open(sorted_list[0]).size
new_size = tuple([int(el / shrink_factor) for el in size])
if isinstance(until, int):
sorted_list = sorted_list[:until]
sorted_list = [
Image.open(img).resize(new_size, Image.ANTIALIAS)
for i, img in enumerate(tqdm(sorted_list))
if ((i + 1) % skip_rate == 0 or i == 0)
]
kargs = {'duration': interval}
imageio.mimsave(directLong + "/vis/vis.gif", sorted_list, **kargs)
optimize(directLong + "/vis/vis.gif", directLong + "/vis/vis.gif")
if progress_bar:
print("adding progress bar to gif")
output = subprocess.call("cat " + directLong + "/vis/vis.gif" +
" | gif-progress --bar-color '#000'" + " > " +
directLong + "/vis/out.gif",
shell=True)
if output != 0:
sys.exit("error occurred with gif progress bar, do manual check")
if __name__ == "__main__":
parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter)
required = parser.add_argument_group("required name arguments")
required.add_argument("--log-dir",
type=str,
required=True,
help="base directory within pickles from which to" +
" visualize")
parser.add_argument("--number-ticks",
type=int,
default=10,
help="number of x-axis ticks to use in main plots")
parser.add_argument("--create-gif",
default=False,
action="store_true",
help="option to activate gif creation")
parser.add_argument("--shrink-factor",
type=int,
default=4,
help="shrinking factor for images, applies only" +
" when --create-gif is supplied")
parser.add_argument("--skip-rate",
type=int,
default=2,
help="skip interval when using images to construct" +
" gif applies only when --create-gif is supplied")
parser.add_argument("--interval",
type=float,
default=0.1,
help="time interval when constructing gifs from" +
" images, applies only when --create-gif is supplied")
parser.add_argument("--until",
type=int,
default=None,
help="set upper epoch limit for gif creation," +
" applies only when --create-gif is supplied")
parser.add_argument(
"--progress-bar",
default=False,
action="store_true",
help="option to add progress bar to gifs, applies" +
"only when --create-gif is supplied; check readme for" +
" additional go package installation instructions")
args = parser.parse_args()
# make plot
make_plot(args.log_dir, args.number_ticks)
# if necessary, make gif
if args.create_gif:
make_gif(args.log_dir, args.shrink_factor, args.skip_rate,
args.interval, args.until, args.progress_bar)
| [
"tqdm.tqdm",
"re.split",
"argparse.ArgumentParser",
"os.makedirs",
"os.path.isdir",
"pygifsicle.optimize",
"PIL.Image.open",
"subprocess.call",
"glob.glob",
"sys.exit",
"re.sub",
"imageio.mimsave"
] | [((912, 941), 're.sub', 're.sub', (['"""(\\\\/)?$"""', '""""""', 'direct'], {}), "('(\\\\/)?$', '', direct)\n", (918, 941), False, 'import re\n'), ((955, 996), 're.sub', 're.sub', (['"""(\\\\.\\\\/)?pickles\\\\/"""', '""""""', 'direct'], {}), "('(\\\\.\\\\/)?pickles\\\\/', '', direct)\n", (961, 996), False, 'import re\n'), ((1171, 1218), 'os.makedirs', 'os.makedirs', (["(directLong + '/vis')"], {'exist_ok': '(True)'}), "(directLong + '/vis', exist_ok=True)\n", (1182, 1218), False, 'import os\n'), ((2040, 2069), 're.sub', 're.sub', (['"""(\\\\/)?$"""', '""""""', 'direct'], {}), "('(\\\\/)?$', '', direct)\n", (2046, 2069), False, 'import re\n'), ((2083, 2124), 're.sub', 're.sub', (['"""(\\\\.\\\\/)?pickles\\\\/"""', '""""""', 'direct'], {}), "('(\\\\.\\\\/)?pickles\\\\/', '', direct)\n", (2089, 2124), False, 'import re\n'), ((2794, 2860), 'imageio.mimsave', 'imageio.mimsave', (["(directLong + '/vis/vis.gif')", 'sorted_list'], {}), "(directLong + '/vis/vis.gif', sorted_list, **kargs)\n", (2809, 2860), False, 'import imageio\n'), ((2865, 2931), 'pygifsicle.optimize', 'optimize', (["(directLong + '/vis/vis.gif')", "(directLong + '/vis/vis.gif')"], {}), "(directLong + '/vis/vis.gif', directLong + '/vis/vis.gif')\n", (2873, 2931), False, 'from pygifsicle import optimize\n'), ((3400, 3460), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'arg_metav_formatter'}), '(formatter_class=arg_metav_formatter)\n', (3423, 3460), False, 'import argparse\n'), ((1045, 1070), 'os.path.isdir', 'os.path.isdir', (['directLong'], {}), '(directLong)\n', (1058, 1070), False, 'import os\n'), ((1080, 1120), 'sys.exit', 'sys.exit', (["(directLong + ' does not exist')"], {}), "(directLong + ' does not exist')\n", (1088, 1120), False, 'import sys\n'), ((2173, 2198), 'os.path.isdir', 'os.path.isdir', (['directLong'], {}), '(directLong)\n', (2186, 2198), False, 'import os\n'), ((2208, 2248), 'sys.exit', 'sys.exit', (["(directLong + ' does not exist')"], {}), "(directLong + ' does not exist')\n", (2216, 2248), False, 'import sys\n'), ((2315, 2350), 'glob.glob', 'glob.glob', (["(directLong + '/img/*png')"], {}), "(directLong + '/img/*png')\n", (2324, 2350), False, 'import glob\n'), ((2404, 2430), 'PIL.Image.open', 'Image.open', (['sorted_list[0]'], {}), '(sorted_list[0])\n', (2414, 2430), False, 'from PIL import Image\n'), ((3014, 3164), 'subprocess.call', 'subprocess.call', (['(\'cat \' + directLong + \'/vis/vis.gif\' +\n " | gif-progress --bar-color \'#000\'" + \' > \' + directLong + \'/vis/out.gif\')'], {'shell': '(True)'}), '(\'cat \' + directLong + \'/vis/vis.gif\' +\n " | gif-progress --bar-color \'#000\'" + \' > \' + directLong +\n \'/vis/out.gif\', shell=True)\n', (3029, 3164), False, 'import subprocess\n'), ((3292, 3357), 'sys.exit', 'sys.exit', (['"""error occurred with gif progress bar, do manual check"""'], {}), "('error occurred with gif progress bar, do manual check')\n", (3300, 3357), False, 'import sys\n'), ((604, 629), 're.split', 're.split', (['"""([0-9]+)"""', 'key'], {}), "('([0-9]+)', key)\n", (612, 629), False, 'import re\n'), ((2600, 2615), 'PIL.Image.open', 'Image.open', (['img'], {}), '(img)\n', (2610, 2615), False, 'from PIL import Image\n'), ((2682, 2699), 'tqdm.tqdm', 'tqdm', (['sorted_list'], {}), '(sorted_list)\n', (2686, 2699), False, 'from tqdm import tqdm\n')] |
import torch
import time
import torch.nn as nn
class CNN(nn.Module):
def __init__(self):
super(CNN, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2),
)
self.out = nn.Linear(in_features=32 * 7 * 7, out_features=10)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = x.view(x.size(0), -1)
output = self.out(x)
return output, x
class AlexNet(nn.Module):
def __init__(self):
super(AlexNet, self).__init__()
self.conv_layers = nn.Sequential(
nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3, stride=2, padding=5),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, stride=1, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
)
self.fc_layers = nn.Sequential(
nn.Linear(256 * 1 * 1, 4096),
nn.Dropout(0.5),
nn.Linear(4096, 4096),
nn.Dropout(0.5),
nn.Linear(4096, 10),
)
def forward(self, x):
x = self.conv_layers(x)
x = x.view(x.size(0), -1)
output = self.fc_layers(x)
return output, x
| [
"torch.nn.Dropout",
"torch.nn.ReLU",
"torch.nn.Conv2d",
"torch.nn.Linear",
"torch.nn.MaxPool2d"
] | [((578, 628), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': '(32 * 7 * 7)', 'out_features': '(10)'}), '(in_features=32 * 7 * 7, out_features=10)\n', (587, 628), True, 'import torch.nn as nn\n'), ((180, 257), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(1)', 'out_channels': '(16)', 'kernel_size': '(5)', 'stride': '(1)', 'padding': '(2)'}), '(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2)\n', (189, 257), True, 'import torch.nn as nn\n'), ((271, 292), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (278, 292), True, 'import torch.nn as nn\n'), ((306, 333), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)'}), '(kernel_size=2)\n', (318, 333), True, 'import torch.nn as nn\n'), ((393, 471), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(16)', 'out_channels': '(32)', 'kernel_size': '(5)', 'stride': '(1)', 'padding': '(2)'}), '(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2)\n', (402, 471), True, 'import torch.nn as nn\n'), ((485, 506), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (492, 506), True, 'import torch.nn as nn\n'), ((520, 547), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)'}), '(kernel_size=2)\n', (532, 547), True, 'import torch.nn as nn\n'), ((943, 1020), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(1)', 'out_channels': '(96)', 'kernel_size': '(3)', 'stride': '(2)', 'padding': '(5)'}), '(in_channels=1, out_channels=96, kernel_size=3, stride=2, padding=5)\n', (952, 1020), True, 'import torch.nn as nn\n'), ((1034, 1055), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1041, 1055), True, 'import torch.nn as nn\n'), ((1069, 1106), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)', 'stride': '(2)'}), '(kernel_size=2, stride=2)\n', (1081, 1106), True, 'import torch.nn as nn\n'), ((1121, 1200), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(96)', 'out_channels': '(256)', 'kernel_size': '(5)', 'stride': '(1)', 'padding': '(2)'}), '(in_channels=96, out_channels=256, kernel_size=5, stride=1, padding=2)\n', (1130, 1200), True, 'import torch.nn as nn\n'), ((1214, 1235), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1221, 1235), True, 'import torch.nn as nn\n'), ((1249, 1286), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)'}), '(kernel_size=3, stride=2)\n', (1261, 1286), True, 'import torch.nn as nn\n'), ((1301, 1386), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(256)', 'out_channels': '(384)', 'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1\n )\n', (1310, 1386), True, 'import torch.nn as nn\n'), ((1395, 1416), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1402, 1416), True, 'import torch.nn as nn\n'), ((1431, 1516), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(384)', 'out_channels': '(384)', 'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1\n )\n', (1440, 1516), True, 'import torch.nn as nn\n'), ((1525, 1546), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1532, 1546), True, 'import torch.nn as nn\n'), ((1561, 1646), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(384)', 'out_channels': '(256)', 'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1\n )\n', (1570, 1646), True, 'import torch.nn as nn\n'), ((1655, 1676), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1662, 1676), True, 'import torch.nn as nn\n'), ((1690, 1727), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)'}), '(kernel_size=3, stride=2)\n', (1702, 1727), True, 'import torch.nn as nn\n'), ((1792, 1820), 'torch.nn.Linear', 'nn.Linear', (['(256 * 1 * 1)', '(4096)'], {}), '(256 * 1 * 1, 4096)\n', (1801, 1820), True, 'import torch.nn as nn\n'), ((1834, 1849), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (1844, 1849), True, 'import torch.nn as nn\n'), ((1863, 1884), 'torch.nn.Linear', 'nn.Linear', (['(4096)', '(4096)'], {}), '(4096, 4096)\n', (1872, 1884), True, 'import torch.nn as nn\n'), ((1898, 1913), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (1908, 1913), True, 'import torch.nn as nn\n'), ((1927, 1946), 'torch.nn.Linear', 'nn.Linear', (['(4096)', '(10)'], {}), '(4096, 10)\n', (1936, 1946), True, 'import torch.nn as nn\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.