nwo
stringlengths
6
76
sha
stringlengths
40
40
path
stringlengths
5
118
language
stringclasses
1 value
identifier
stringlengths
1
89
parameters
stringlengths
2
5.4k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
51.1k
docstring
stringlengths
1
17.6k
docstring_summary
stringlengths
0
7.02k
docstring_tokens
sequence
function
stringlengths
30
51.1k
function_tokens
sequence
url
stringlengths
85
218
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/solver.py
python
BaseSolver.write_log
(self, log_name, log_dict)
Write log to TensorBoard log_name - <str> Name of tensorboard variable log_value - <dict>/<array> Value of variable (e.g. dict of losses), passed if value = None
Write log to TensorBoard log_name - <str> Name of tensorboard variable log_value - <dict>/<array> Value of variable (e.g. dict of losses), passed if value = None
[ "Write", "log", "to", "TensorBoard", "log_name", "-", "<str", ">", "Name", "of", "tensorboard", "variable", "log_value", "-", "<dict", ">", "/", "<array", ">", "Value", "of", "variable", "(", "e", ".", "g", ".", "dict", "of", "losses", ")", "passed", "if", "value", "=", "None" ]
def write_log(self, log_name, log_dict): ''' Write log to TensorBoard log_name - <str> Name of tensorboard variable log_value - <dict>/<array> Value of variable (e.g. dict of losses), passed if value = None ''' if type(log_dict) is dict: log_dict = {key: val for key, val in log_dict.items() if ( val is not None and not math.isnan(val))} if log_dict is None: pass elif len(log_dict) > 0: if 'align' in log_name or 'spec' in log_name: img, form = log_dict self.log.add_image( log_name, img, global_step=self.step, dataformats=form) elif 'text' in log_name or 'hyp' in log_name: self.log.add_text(log_name, log_dict, self.step) else: self.log.add_scalars(log_name, log_dict, self.step)
[ "def", "write_log", "(", "self", ",", "log_name", ",", "log_dict", ")", ":", "if", "type", "(", "log_dict", ")", "is", "dict", ":", "log_dict", "=", "{", "key", ":", "val", "for", "key", ",", "val", "in", "log_dict", ".", "items", "(", ")", "if", "(", "val", "is", "not", "None", "and", "not", "math", ".", "isnan", "(", "val", ")", ")", "}", "if", "log_dict", "is", "None", ":", "pass", "elif", "len", "(", "log_dict", ")", ">", "0", ":", "if", "'align'", "in", "log_name", "or", "'spec'", "in", "log_name", ":", "img", ",", "form", "=", "log_dict", "self", ".", "log", ".", "add_image", "(", "log_name", ",", "img", ",", "global_step", "=", "self", ".", "step", ",", "dataformats", "=", "form", ")", "elif", "'text'", "in", "log_name", "or", "'hyp'", "in", "log_name", ":", "self", ".", "log", ".", "add_text", "(", "log_name", ",", "log_dict", ",", "self", ".", "step", ")", "else", ":", "self", ".", "log", ".", "add_scalars", "(", "log_name", ",", "log_dict", ",", "self", ".", "step", ")" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/solver.py#L136-L155
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/solver.py
python
BaseSolver.save_checkpoint
(self, f_name, metric, score, show_msg=True)
Ckpt saver f_name - <str> the name phnof ckpt file (w/o prefix) to store, overwrite if existed score - <float> The value of metric used to evaluate model
Ckpt saver f_name - <str> the name phnof ckpt file (w/o prefix) to store, overwrite if existed score - <float> The value of metric used to evaluate model
[ "Ckpt", "saver", "f_name", "-", "<str", ">", "the", "name", "phnof", "ckpt", "file", "(", "w", "/", "o", "prefix", ")", "to", "store", "overwrite", "if", "existed", "score", "-", "<float", ">", "The", "value", "of", "metric", "used", "to", "evaluate", "model" ]
def save_checkpoint(self, f_name, metric, score, show_msg=True): '''' Ckpt saver f_name - <str> the name phnof ckpt file (w/o prefix) to store, overwrite if existed score - <float> The value of metric used to evaluate model ''' ckpt_path = os.path.join(self.ckpdir, f_name) full_dict = { "model": self.model.state_dict(), "optimizer": self.optimizer.get_opt_state_dict(), "global_step": self.step, metric: score } # Additional modules to save # if self.amp: # full_dict['amp'] = self.amp_lib.state_dict() if self.emb_decoder is not None: full_dict['emb_decoder'] = self.emb_decoder.state_dict() torch.save(full_dict, ckpt_path) if show_msg: self.verbose("Saved checkpoint (step = {}, {} = {:.2f}) and status @ {}". format(human_format(self.step), metric, score, ckpt_path))
[ "def", "save_checkpoint", "(", "self", ",", "f_name", ",", "metric", ",", "score", ",", "show_msg", "=", "True", ")", ":", "ckpt_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "ckpdir", ",", "f_name", ")", "full_dict", "=", "{", "\"model\"", ":", "self", ".", "model", ".", "state_dict", "(", ")", ",", "\"optimizer\"", ":", "self", ".", "optimizer", ".", "get_opt_state_dict", "(", ")", ",", "\"global_step\"", ":", "self", ".", "step", ",", "metric", ":", "score", "}", "# Additional modules to save", "# if self.amp:", "# full_dict['amp'] = self.amp_lib.state_dict()", "if", "self", ".", "emb_decoder", "is", "not", "None", ":", "full_dict", "[", "'emb_decoder'", "]", "=", "self", ".", "emb_decoder", ".", "state_dict", "(", ")", "torch", ".", "save", "(", "full_dict", ",", "ckpt_path", ")", "if", "show_msg", ":", "self", ".", "verbose", "(", "\"Saved checkpoint (step = {}, {} = {:.2f}) and status @ {}\"", ".", "format", "(", "human_format", "(", "self", ".", "step", ")", ",", "metric", ",", "score", ",", "ckpt_path", ")", ")" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/solver.py#L157-L179
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/solver.py
python
BaseSolver.load_data
(self)
Called by main to load all data After this call, data related attributes should be setup (e.g. self.tr_set, self.dev_set) No return value
Called by main to load all data After this call, data related attributes should be setup (e.g. self.tr_set, self.dev_set) No return value
[ "Called", "by", "main", "to", "load", "all", "data", "After", "this", "call", "data", "related", "attributes", "should", "be", "setup", "(", "e", ".", "g", ".", "self", ".", "tr_set", "self", ".", "dev_set", ")", "No", "return", "value" ]
def load_data(self): ''' Called by main to load all data After this call, data related attributes should be setup (e.g. self.tr_set, self.dev_set) No return value ''' raise NotImplementedError
[ "def", "load_data", "(", "self", ")", ":", "raise", "NotImplementedError" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/solver.py#L193-L199
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/solver.py
python
BaseSolver.set_model
(self)
Called by main to set models After this call, model related attributes should be setup (e.g. self.l2_loss) The followings MUST be setup - self.model (torch.nn.Module) - self.optimizer (src.Optimizer), init. w/ self.optimizer = src.Optimizer(self.model.parameters(),**self.config['hparas']) Loading pre-trained model should also be performed here No return value
Called by main to set models After this call, model related attributes should be setup (e.g. self.l2_loss) The followings MUST be setup - self.model (torch.nn.Module) - self.optimizer (src.Optimizer), init. w/ self.optimizer = src.Optimizer(self.model.parameters(),**self.config['hparas']) Loading pre-trained model should also be performed here No return value
[ "Called", "by", "main", "to", "set", "models", "After", "this", "call", "model", "related", "attributes", "should", "be", "setup", "(", "e", ".", "g", ".", "self", ".", "l2_loss", ")", "The", "followings", "MUST", "be", "setup", "-", "self", ".", "model", "(", "torch", ".", "nn", ".", "Module", ")", "-", "self", ".", "optimizer", "(", "src", ".", "Optimizer", ")", "init", ".", "w", "/", "self", ".", "optimizer", "=", "src", ".", "Optimizer", "(", "self", ".", "model", ".", "parameters", "()", "**", "self", ".", "config", "[", "hparas", "]", ")", "Loading", "pre", "-", "trained", "model", "should", "also", "be", "performed", "here", "No", "return", "value" ]
def set_model(self): ''' Called by main to set models After this call, model related attributes should be setup (e.g. self.l2_loss) The followings MUST be setup - self.model (torch.nn.Module) - self.optimizer (src.Optimizer), init. w/ self.optimizer = src.Optimizer(self.model.parameters(),**self.config['hparas']) Loading pre-trained model should also be performed here No return value ''' raise NotImplementedError
[ "def", "set_model", "(", "self", ")", ":", "raise", "NotImplementedError" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/solver.py#L202-L213
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/solver.py
python
BaseSolver.exec
(self)
Called by main to execute training/inference
Called by main to execute training/inference
[ "Called", "by", "main", "to", "execute", "training", "/", "inference" ]
def exec(self): ''' Called by main to execute training/inference ''' raise NotImplementedError
[ "def", "exec", "(", "self", ")", ":", "raise", "NotImplementedError" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/solver.py#L216-L220
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/decode.py
python
Hypothesis.avgScore
(self)
return sum(self.output_scores) / len(self.output_scores)
Return the averaged log probability of hypothesis
Return the averaged log probability of hypothesis
[ "Return", "the", "averaged", "log", "probability", "of", "hypothesis" ]
def avgScore(self): '''Return the averaged log probability of hypothesis''' assert len(self.output_scores) != 0 return sum(self.output_scores) / len(self.output_scores)
[ "def", "avgScore", "(", "self", ")", ":", "assert", "len", "(", "self", ".", "output_scores", ")", "!=", "0", "return", "sum", "(", "self", ".", "output_scores", ")", "/", "len", "(", "self", ".", "output_scores", ")" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/decode.py#L204-L207
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/decode.py
python
Hypothesis.addTopk
(self, topi, topv, decoder_state, att_map=None, lm_state=None, ctc_state=None, ctc_prob=0.0, ctc_candidates=[])
return None, new_hypothesis
Expand current hypothesis with a given beam size
Expand current hypothesis with a given beam size
[ "Expand", "current", "hypothesis", "with", "a", "given", "beam", "size" ]
def addTopk(self, topi, topv, decoder_state, att_map=None, lm_state=None, ctc_state=None, ctc_prob=0.0, ctc_candidates=[]): '''Expand current hypothesis with a given beam size''' new_hypothesis = [] term_score = None ctc_s, ctc_p = None, None beam_size = topi.shape[-1] for i in range(beam_size): # Detect <eos> if topi[i].item() == 1: term_score = topv[i].cpu() continue idxes = self.output_seq[:] # pass by value scores = self.output_scores[:] # pass by value idxes.append(topi[i].cpu()) scores.append(topv[i].cpu()) if ctc_state is not None: # ToDo: Handle out-of-candidate case. idx = ctc_candidates.index(topi[i].item()) ctc_s = ctc_state[idx, :, :] ctc_p = ctc_prob[idx] new_hypothesis.append(Hypothesis(decoder_state, output_seq=idxes, output_scores=scores, lm_state=lm_state, ctc_state=ctc_s, ctc_prob=ctc_p, att_map=att_map)) if term_score is not None: self.output_seq.append(torch.tensor(1)) self.output_scores.append(term_score) return self, new_hypothesis return None, new_hypothesis
[ "def", "addTopk", "(", "self", ",", "topi", ",", "topv", ",", "decoder_state", ",", "att_map", "=", "None", ",", "lm_state", "=", "None", ",", "ctc_state", "=", "None", ",", "ctc_prob", "=", "0.0", ",", "ctc_candidates", "=", "[", "]", ")", ":", "new_hypothesis", "=", "[", "]", "term_score", "=", "None", "ctc_s", ",", "ctc_p", "=", "None", ",", "None", "beam_size", "=", "topi", ".", "shape", "[", "-", "1", "]", "for", "i", "in", "range", "(", "beam_size", ")", ":", "# Detect <eos>", "if", "topi", "[", "i", "]", ".", "item", "(", ")", "==", "1", ":", "term_score", "=", "topv", "[", "i", "]", ".", "cpu", "(", ")", "continue", "idxes", "=", "self", ".", "output_seq", "[", ":", "]", "# pass by value", "scores", "=", "self", ".", "output_scores", "[", ":", "]", "# pass by value", "idxes", ".", "append", "(", "topi", "[", "i", "]", ".", "cpu", "(", ")", ")", "scores", ".", "append", "(", "topv", "[", "i", "]", ".", "cpu", "(", ")", ")", "if", "ctc_state", "is", "not", "None", ":", "# ToDo: Handle out-of-candidate case.", "idx", "=", "ctc_candidates", ".", "index", "(", "topi", "[", "i", "]", ".", "item", "(", ")", ")", "ctc_s", "=", "ctc_state", "[", "idx", ",", ":", ",", ":", "]", "ctc_p", "=", "ctc_prob", "[", "idx", "]", "new_hypothesis", ".", "append", "(", "Hypothesis", "(", "decoder_state", ",", "output_seq", "=", "idxes", ",", "output_scores", "=", "scores", ",", "lm_state", "=", "lm_state", ",", "ctc_state", "=", "ctc_s", ",", "ctc_prob", "=", "ctc_p", ",", "att_map", "=", "att_map", ")", ")", "if", "term_score", "is", "not", "None", ":", "self", ".", "output_seq", ".", "append", "(", "torch", ".", "tensor", "(", "1", ")", ")", "self", ".", "output_scores", ".", "append", "(", "term_score", ")", "return", "self", ",", "new_hypothesis", "return", "None", ",", "new_hypothesis" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/decode.py#L209-L239
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/plugin.py
python
EmbeddingRegularizer.fuse_prob
(self, x_emb, dec_logit)
return log_fused_prob
Takes context and decoder logit to perform word embedding fusion
Takes context and decoder logit to perform word embedding fusion
[ "Takes", "context", "and", "decoder", "logit", "to", "perform", "word", "embedding", "fusion" ]
def fuse_prob(self, x_emb, dec_logit): ''' Takes context and decoder logit to perform word embedding fusion ''' # Compute distribution for dec/emb if self.fuse_normalize: emb_logit = nn.functional.linear(nn.functional.normalize(x_emb, dim=-1), nn.functional.normalize(self.emb_table.weight, dim=-1)) else: emb_logit = nn.functional.linear(x_emb, self.emb_table.weight) emb_prob = (nn.functional.relu(self.temp)*emb_logit).softmax(dim=-1) dec_prob = dec_logit.softmax(dim=-1) # Mix distribution if self.fuse_learnable: fused_prob = (1-torch.sigmoid(self.fuse_lambda))*dec_prob +\ torch.sigmoid(self.fuse_lambda)*emb_prob else: fused_prob = (1-self.fuse_lambda)*dec_prob + \ self.fuse_lambda*emb_prob # Log-prob log_fused_prob = (fused_prob+self.eps).log() return log_fused_prob
[ "def", "fuse_prob", "(", "self", ",", "x_emb", ",", "dec_logit", ")", ":", "# Compute distribution for dec/emb", "if", "self", ".", "fuse_normalize", ":", "emb_logit", "=", "nn", ".", "functional", ".", "linear", "(", "nn", ".", "functional", ".", "normalize", "(", "x_emb", ",", "dim", "=", "-", "1", ")", ",", "nn", ".", "functional", ".", "normalize", "(", "self", ".", "emb_table", ".", "weight", ",", "dim", "=", "-", "1", ")", ")", "else", ":", "emb_logit", "=", "nn", ".", "functional", ".", "linear", "(", "x_emb", ",", "self", ".", "emb_table", ".", "weight", ")", "emb_prob", "=", "(", "nn", ".", "functional", ".", "relu", "(", "self", ".", "temp", ")", "*", "emb_logit", ")", ".", "softmax", "(", "dim", "=", "-", "1", ")", "dec_prob", "=", "dec_logit", ".", "softmax", "(", "dim", "=", "-", "1", ")", "# Mix distribution", "if", "self", ".", "fuse_learnable", ":", "fused_prob", "=", "(", "1", "-", "torch", ".", "sigmoid", "(", "self", ".", "fuse_lambda", ")", ")", "*", "dec_prob", "+", "torch", ".", "sigmoid", "(", "self", ".", "fuse_lambda", ")", "*", "emb_prob", "else", ":", "fused_prob", "=", "(", "1", "-", "self", ".", "fuse_lambda", ")", "*", "dec_prob", "+", "self", ".", "fuse_lambda", "*", "emb_prob", "# Log-prob", "log_fused_prob", "=", "(", "fused_prob", "+", "self", ".", "eps", ")", ".", "log", "(", ")", "return", "log_fused_prob" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/plugin.py#L103-L123
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/data.py
python
collect_audio_batch
(batch, audio_transform, mode)
return file, audio_feat, audio_len, text
Collects a batch, should be list of tuples (audio_path <str>, list of int token <list>) e.g. [(file1,txt1),(file2,txt2),...]
Collects a batch, should be list of tuples (audio_path <str>, list of int token <list>) e.g. [(file1,txt1),(file2,txt2),...]
[ "Collects", "a", "batch", "should", "be", "list", "of", "tuples", "(", "audio_path", "<str", ">", "list", "of", "int", "token", "<list", ">", ")", "e", ".", "g", ".", "[", "(", "file1", "txt1", ")", "(", "file2", "txt2", ")", "...", "]" ]
def collect_audio_batch(batch, audio_transform, mode): '''Collects a batch, should be list of tuples (audio_path <str>, list of int token <list>) e.g. [(file1,txt1),(file2,txt2),...] ''' # Bucketed batch should be [[(file1,txt1),(file2,txt2),...]] if type(batch[0]) is not tuple: batch = batch[0] # Make sure that batch size is reasonable first_len = audio_transform(str(batch[0][0])).shape[0] if first_len > HALF_BATCHSIZE_AUDIO_LEN and mode == 'train': batch = batch[:len(batch)//2] # Read batch file, audio_feat, audio_len, text = [], [], [], [] with torch.no_grad(): for b in batch: file.append(str(b[0]).split('/')[-1].split('.')[0]) feat = audio_transform(str(b[0])) audio_feat.append(feat) audio_len.append(len(feat)) text.append(torch.LongTensor(b[1])) # Descending audio length within each batch audio_len, file, audio_feat, text = zip(*[(feat_len, f_name, feat, txt) for feat_len, f_name, feat, txt in sorted(zip(audio_len, file, audio_feat, text), reverse=True, key=lambda x:x[0])]) # Zero-padding audio_feat = pad_sequence(audio_feat, batch_first=True) text = pad_sequence(text, batch_first=True) audio_len = torch.LongTensor(audio_len) return file, audio_feat, audio_len, text
[ "def", "collect_audio_batch", "(", "batch", ",", "audio_transform", ",", "mode", ")", ":", "# Bucketed batch should be [[(file1,txt1),(file2,txt2),...]]", "if", "type", "(", "batch", "[", "0", "]", ")", "is", "not", "tuple", ":", "batch", "=", "batch", "[", "0", "]", "# Make sure that batch size is reasonable", "first_len", "=", "audio_transform", "(", "str", "(", "batch", "[", "0", "]", "[", "0", "]", ")", ")", ".", "shape", "[", "0", "]", "if", "first_len", ">", "HALF_BATCHSIZE_AUDIO_LEN", "and", "mode", "==", "'train'", ":", "batch", "=", "batch", "[", ":", "len", "(", "batch", ")", "//", "2", "]", "# Read batch", "file", ",", "audio_feat", ",", "audio_len", ",", "text", "=", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", "with", "torch", ".", "no_grad", "(", ")", ":", "for", "b", "in", "batch", ":", "file", ".", "append", "(", "str", "(", "b", "[", "0", "]", ")", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", "feat", "=", "audio_transform", "(", "str", "(", "b", "[", "0", "]", ")", ")", "audio_feat", ".", "append", "(", "feat", ")", "audio_len", ".", "append", "(", "len", "(", "feat", ")", ")", "text", ".", "append", "(", "torch", ".", "LongTensor", "(", "b", "[", "1", "]", ")", ")", "# Descending audio length within each batch", "audio_len", ",", "file", ",", "audio_feat", ",", "text", "=", "zip", "(", "*", "[", "(", "feat_len", ",", "f_name", ",", "feat", ",", "txt", ")", "for", "feat_len", ",", "f_name", ",", "feat", ",", "txt", "in", "sorted", "(", "zip", "(", "audio_len", ",", "file", ",", "audio_feat", ",", "text", ")", ",", "reverse", "=", "True", ",", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ")", "]", ")", "# Zero-padding", "audio_feat", "=", "pad_sequence", "(", "audio_feat", ",", "batch_first", "=", "True", ")", "text", "=", "pad_sequence", "(", "text", ",", "batch_first", "=", "True", ")", "audio_len", "=", "torch", ".", "LongTensor", "(", "audio_len", ")", "return", "file", ",", "audio_feat", ",", "audio_len", ",", "text" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/data.py#L14-L43
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/data.py
python
collect_text_batch
(batch, mode)
return text
Collects a batch of text, should be list of list of int token e.g. [txt1 <list>,txt2 <list>,...]
Collects a batch of text, should be list of list of int token e.g. [txt1 <list>,txt2 <list>,...]
[ "Collects", "a", "batch", "of", "text", "should", "be", "list", "of", "list", "of", "int", "token", "e", ".", "g", ".", "[", "txt1", "<list", ">", "txt2", "<list", ">", "...", "]" ]
def collect_text_batch(batch, mode): '''Collects a batch of text, should be list of list of int token e.g. [txt1 <list>,txt2 <list>,...] ''' # Bucketed batch should be [[txt1, txt2,...]] if type(batch[0][0]) is list: batch = batch[0] # Half batch size if input to long if len(batch[0]) > HALF_BATCHSIZE_TEXT_LEN and mode == 'train': batch = batch[:len(batch)//2] # Read batch text = [torch.LongTensor(b) for b in batch] # Zero-padding text = pad_sequence(text, batch_first=True) return text
[ "def", "collect_text_batch", "(", "batch", ",", "mode", ")", ":", "# Bucketed batch should be [[txt1, txt2,...]]", "if", "type", "(", "batch", "[", "0", "]", "[", "0", "]", ")", "is", "list", ":", "batch", "=", "batch", "[", "0", "]", "# Half batch size if input to long", "if", "len", "(", "batch", "[", "0", "]", ")", ">", "HALF_BATCHSIZE_TEXT_LEN", "and", "mode", "==", "'train'", ":", "batch", "=", "batch", "[", ":", "len", "(", "batch", ")", "//", "2", "]", "# Read batch", "text", "=", "[", "torch", ".", "LongTensor", "(", "b", ")", "for", "b", "in", "batch", "]", "# Zero-padding", "text", "=", "pad_sequence", "(", "text", ",", "batch_first", "=", "True", ")", "return", "text" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/data.py#L46-L61
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/data.py
python
create_dataset
(tokenizer, ascending, name, path, bucketing, batch_size, train_split=None, dev_split=None, test_split=None)
Interface for creating all kinds of dataset
Interface for creating all kinds of dataset
[ "Interface", "for", "creating", "all", "kinds", "of", "dataset" ]
def create_dataset(tokenizer, ascending, name, path, bucketing, batch_size, train_split=None, dev_split=None, test_split=None): ''' Interface for creating all kinds of dataset''' # Recognize corpus if name.lower() == "librispeech": from corpus.librispeech import LibriDataset as Dataset else: raise NotImplementedError # Create dataset if train_split is not None: # Training mode mode = 'train' tr_loader_bs = 1 if bucketing and (not ascending) else batch_size bucket_size = batch_size if bucketing and ( not ascending) else 1 # Ascending without bucketing # Do not use bucketing for dev set dv_set = Dataset(path, dev_split, tokenizer, 1) tr_set = Dataset(path, train_split, tokenizer, bucket_size, ascending=ascending) # Messages to show msg_list = _data_msg(name, path, train_split.__str__(), len(tr_set), dev_split.__str__(), len(dv_set), batch_size, bucketing) return tr_set, dv_set, tr_loader_bs, batch_size, mode, msg_list else: # Testing model mode = 'test' # Do not use bucketing for dev set dv_set = Dataset(path, dev_split, tokenizer, 1) # Do not use bucketing for test set tt_set = Dataset(path, test_split, tokenizer, 1) # Messages to show msg_list = _data_msg(name, path, dev_split.__str__(), len(dv_set), test_split.__str__(), len(tt_set), batch_size, False) msg_list = [m.replace('Dev', 'Test').replace( 'Train', 'Dev') for m in msg_list] return dv_set, tt_set, batch_size, batch_size, mode, msg_list
[ "def", "create_dataset", "(", "tokenizer", ",", "ascending", ",", "name", ",", "path", ",", "bucketing", ",", "batch_size", ",", "train_split", "=", "None", ",", "dev_split", "=", "None", ",", "test_split", "=", "None", ")", ":", "# Recognize corpus", "if", "name", ".", "lower", "(", ")", "==", "\"librispeech\"", ":", "from", "corpus", ".", "librispeech", "import", "LibriDataset", "as", "Dataset", "else", ":", "raise", "NotImplementedError", "# Create dataset", "if", "train_split", "is", "not", "None", ":", "# Training mode", "mode", "=", "'train'", "tr_loader_bs", "=", "1", "if", "bucketing", "and", "(", "not", "ascending", ")", "else", "batch_size", "bucket_size", "=", "batch_size", "if", "bucketing", "and", "(", "not", "ascending", ")", "else", "1", "# Ascending without bucketing", "# Do not use bucketing for dev set", "dv_set", "=", "Dataset", "(", "path", ",", "dev_split", ",", "tokenizer", ",", "1", ")", "tr_set", "=", "Dataset", "(", "path", ",", "train_split", ",", "tokenizer", ",", "bucket_size", ",", "ascending", "=", "ascending", ")", "# Messages to show", "msg_list", "=", "_data_msg", "(", "name", ",", "path", ",", "train_split", ".", "__str__", "(", ")", ",", "len", "(", "tr_set", ")", ",", "dev_split", ".", "__str__", "(", ")", ",", "len", "(", "dv_set", ")", ",", "batch_size", ",", "bucketing", ")", "return", "tr_set", ",", "dv_set", ",", "tr_loader_bs", ",", "batch_size", ",", "mode", ",", "msg_list", "else", ":", "# Testing model", "mode", "=", "'test'", "# Do not use bucketing for dev set", "dv_set", "=", "Dataset", "(", "path", ",", "dev_split", ",", "tokenizer", ",", "1", ")", "# Do not use bucketing for test set", "tt_set", "=", "Dataset", "(", "path", ",", "test_split", ",", "tokenizer", ",", "1", ")", "# Messages to show", "msg_list", "=", "_data_msg", "(", "name", ",", "path", ",", "dev_split", ".", "__str__", "(", ")", ",", "len", "(", "dv_set", ")", ",", "test_split", ".", "__str__", "(", ")", ",", "len", "(", "tt_set", ")", ",", "batch_size", ",", "False", ")", "msg_list", "=", "[", "m", ".", "replace", "(", "'Dev'", ",", "'Test'", ")", ".", "replace", "(", "'Train'", ",", "'Dev'", ")", "for", "m", "in", "msg_list", "]", "return", "dv_set", ",", "tt_set", ",", "batch_size", ",", "batch_size", ",", "mode", ",", "msg_list" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/data.py#L64-L102
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/data.py
python
create_textset
(tokenizer, train_split, dev_split, name, path, bucketing, batch_size)
return tr_set, dv_set, tr_loader_bs, batch_size, msg_list
Interface for creating all kinds of text dataset
Interface for creating all kinds of text dataset
[ "Interface", "for", "creating", "all", "kinds", "of", "text", "dataset" ]
def create_textset(tokenizer, train_split, dev_split, name, path, bucketing, batch_size): ''' Interface for creating all kinds of text dataset''' msg_list = [] # Recognize corpus if name.lower() == "librispeech": from corpus.librispeech import LibriTextDataset as Dataset else: raise NotImplementedError # Create dataset bucket_size = batch_size if bucketing else 1 tr_loader_bs = 1 if bucketing else batch_size # Do not use bucketing for dev set dv_set = Dataset(path, dev_split, tokenizer, 1) tr_set = Dataset(path, train_split, tokenizer, bucket_size) # Messages to show msg_list = _data_msg(name, path, train_split.__str__(), len(tr_set), dev_split.__str__(), len(dv_set), batch_size, bucketing) return tr_set, dv_set, tr_loader_bs, batch_size, msg_list
[ "def", "create_textset", "(", "tokenizer", ",", "train_split", ",", "dev_split", ",", "name", ",", "path", ",", "bucketing", ",", "batch_size", ")", ":", "msg_list", "=", "[", "]", "# Recognize corpus", "if", "name", ".", "lower", "(", ")", "==", "\"librispeech\"", ":", "from", "corpus", ".", "librispeech", "import", "LibriTextDataset", "as", "Dataset", "else", ":", "raise", "NotImplementedError", "# Create dataset", "bucket_size", "=", "batch_size", "if", "bucketing", "else", "1", "tr_loader_bs", "=", "1", "if", "bucketing", "else", "batch_size", "# Do not use bucketing for dev set", "dv_set", "=", "Dataset", "(", "path", ",", "dev_split", ",", "tokenizer", ",", "1", ")", "tr_set", "=", "Dataset", "(", "path", ",", "train_split", ",", "tokenizer", ",", "bucket_size", ")", "# Messages to show", "msg_list", "=", "_data_msg", "(", "name", ",", "path", ",", "train_split", ".", "__str__", "(", ")", ",", "len", "(", "tr_set", ")", ",", "dev_split", ".", "__str__", "(", ")", ",", "len", "(", "dv_set", ")", ",", "batch_size", ",", "bucketing", ")", "return", "tr_set", ",", "dv_set", ",", "tr_loader_bs", ",", "batch_size", ",", "msg_list" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/data.py#L105-L126
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/data.py
python
load_dataset
(n_jobs, use_gpu, pin_memory, ascending, corpus, audio, text)
return tr_set, dv_set, feat_dim, tokenizer.vocab_size, tokenizer, data_msg
Prepare dataloader for training/validation
Prepare dataloader for training/validation
[ "Prepare", "dataloader", "for", "training", "/", "validation" ]
def load_dataset(n_jobs, use_gpu, pin_memory, ascending, corpus, audio, text): ''' Prepare dataloader for training/validation''' # Audio feature extractor audio_transform, feat_dim = create_transform(audio.copy()) # Text tokenizer tokenizer = load_text_encoder(**text) # Dataset (in testing mode, tr_set=dv_set, dv_set=tt_set) tr_set, dv_set, tr_loader_bs, dv_loader_bs, mode, data_msg = create_dataset( tokenizer, ascending, **corpus) # Collect function collect_tr = partial(collect_audio_batch, audio_transform=audio_transform, mode=mode) collect_dv = partial(collect_audio_batch, audio_transform=audio_transform, mode='test') # Shuffle/drop applied to training set only shuffle = (mode == 'train' and not ascending) drop_last = shuffle # Create data loader tr_set = DataLoader(tr_set, batch_size=tr_loader_bs, shuffle=shuffle, drop_last=drop_last, collate_fn=collect_tr, num_workers=n_jobs, pin_memory=use_gpu) dv_set = DataLoader(dv_set, batch_size=dv_loader_bs, shuffle=False, drop_last=False, collate_fn=collect_dv, num_workers=n_jobs, pin_memory=pin_memory) # Messages to show data_msg.append('I/O spec. | Audio feature = {}\t| feature dim = {}\t| Token type = {}\t| Vocab size = {}' .format(audio['feat_type'], feat_dim, tokenizer.token_type, tokenizer.vocab_size)) return tr_set, dv_set, feat_dim, tokenizer.vocab_size, tokenizer, data_msg
[ "def", "load_dataset", "(", "n_jobs", ",", "use_gpu", ",", "pin_memory", ",", "ascending", ",", "corpus", ",", "audio", ",", "text", ")", ":", "# Audio feature extractor", "audio_transform", ",", "feat_dim", "=", "create_transform", "(", "audio", ".", "copy", "(", ")", ")", "# Text tokenizer", "tokenizer", "=", "load_text_encoder", "(", "*", "*", "text", ")", "# Dataset (in testing mode, tr_set=dv_set, dv_set=tt_set)", "tr_set", ",", "dv_set", ",", "tr_loader_bs", ",", "dv_loader_bs", ",", "mode", ",", "data_msg", "=", "create_dataset", "(", "tokenizer", ",", "ascending", ",", "*", "*", "corpus", ")", "# Collect function", "collect_tr", "=", "partial", "(", "collect_audio_batch", ",", "audio_transform", "=", "audio_transform", ",", "mode", "=", "mode", ")", "collect_dv", "=", "partial", "(", "collect_audio_batch", ",", "audio_transform", "=", "audio_transform", ",", "mode", "=", "'test'", ")", "# Shuffle/drop applied to training set only", "shuffle", "=", "(", "mode", "==", "'train'", "and", "not", "ascending", ")", "drop_last", "=", "shuffle", "# Create data loader", "tr_set", "=", "DataLoader", "(", "tr_set", ",", "batch_size", "=", "tr_loader_bs", ",", "shuffle", "=", "shuffle", ",", "drop_last", "=", "drop_last", ",", "collate_fn", "=", "collect_tr", ",", "num_workers", "=", "n_jobs", ",", "pin_memory", "=", "use_gpu", ")", "dv_set", "=", "DataLoader", "(", "dv_set", ",", "batch_size", "=", "dv_loader_bs", ",", "shuffle", "=", "False", ",", "drop_last", "=", "False", ",", "collate_fn", "=", "collect_dv", ",", "num_workers", "=", "n_jobs", ",", "pin_memory", "=", "pin_memory", ")", "# Messages to show", "data_msg", ".", "append", "(", "'I/O spec. | Audio feature = {}\\t| feature dim = {}\\t| Token type = {}\\t| Vocab size = {}'", ".", "format", "(", "audio", "[", "'feat_type'", "]", ",", "feat_dim", ",", "tokenizer", ".", "token_type", ",", "tokenizer", ".", "vocab_size", ")", ")", "return", "tr_set", ",", "dv_set", ",", "feat_dim", ",", "tokenizer", ".", "vocab_size", ",", "tokenizer", ",", "data_msg" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/data.py#L129-L156
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/data.py
python
_data_msg
(name, path, train_split, tr_set, dev_split, dv_set, batch_size, bucketing)
return msg_list
List msg for verbose function
List msg for verbose function
[ "List", "msg", "for", "verbose", "function" ]
def _data_msg(name, path, train_split, tr_set, dev_split, dv_set, batch_size, bucketing): ''' List msg for verbose function ''' msg_list = [] msg_list.append('Data spec. | Corpus = {} (from {})'.format(name, path)) msg_list.append(' | Train sets = {}\t| Number of utts = {}'.format( train_split, tr_set)) msg_list.append( ' | Dev sets = {}\t| Number of utts = {}'.format(dev_split, dv_set)) msg_list.append(' | Batch size = {}\t\t| Bucketing = {}'.format( batch_size, bucketing)) return msg_list
[ "def", "_data_msg", "(", "name", ",", "path", ",", "train_split", ",", "tr_set", ",", "dev_split", ",", "dv_set", ",", "batch_size", ",", "bucketing", ")", ":", "msg_list", "=", "[", "]", "msg_list", ".", "append", "(", "'Data spec. | Corpus = {} (from {})'", ".", "format", "(", "name", ",", "path", ")", ")", "msg_list", ".", "append", "(", "' | Train sets = {}\\t| Number of utts = {}'", ".", "format", "(", "train_split", ",", "tr_set", ")", ")", "msg_list", ".", "append", "(", "' | Dev sets = {}\\t| Number of utts = {}'", ".", "format", "(", "dev_split", ",", "dv_set", ")", ")", "msg_list", ".", "append", "(", "' | Batch size = {}\\t\\t| Bucketing = {}'", ".", "format", "(", "batch_size", ",", "bucketing", ")", ")", "return", "msg_list" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/data.py#L181-L191
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/asr.py
python
ASR.set_state
(self, prev_state, prev_attn)
Setting up all memory states for beam decoding
Setting up all memory states for beam decoding
[ "Setting", "up", "all", "memory", "states", "for", "beam", "decoding" ]
def set_state(self, prev_state, prev_attn): ''' Setting up all memory states for beam decoding''' self.decoder.set_state(prev_state) self.attention.set_mem(prev_attn)
[ "def", "set_state", "(", "self", ",", "prev_state", ",", "prev_attn", ")", ":", "self", ".", "decoder", ".", "set_state", "(", "prev_state", ")", "self", ".", "attention", ".", "set_mem", "(", "prev_attn", ")" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/asr.py#L48-L51
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/asr.py
python
ASR.forward
(self, audio_feature, feature_len, decode_step, tf_rate=0.0, teacher=None, emb_decoder=None, get_dec_state=False)
return ctc_output, encode_len, att_output, att_seq, dec_state
Arguments audio_feature - [BxTxD] Acoustic feature with shape feature_len - [B] Length of each sample in a batch decode_step - [int] The maximum number of attention decoder steps tf_rate - [0,1] The probability to perform teacher forcing for each step teacher - [BxL] Ground truth for teacher forcing with sentence length L emb_decoder - [obj] Introduces the word embedding decoder, different behavior for training/inference At training stage, this ONLY affects self-sampling (output remains the same) At inference stage, this affects output to become log prob. with distribution fusion get_dec_state - [bool] If true, return decoder state [BxLxD] for other purpose
Arguments audio_feature - [BxTxD] Acoustic feature with shape feature_len - [B] Length of each sample in a batch decode_step - [int] The maximum number of attention decoder steps tf_rate - [0,1] The probability to perform teacher forcing for each step teacher - [BxL] Ground truth for teacher forcing with sentence length L emb_decoder - [obj] Introduces the word embedding decoder, different behavior for training/inference At training stage, this ONLY affects self-sampling (output remains the same) At inference stage, this affects output to become log prob. with distribution fusion get_dec_state - [bool] If true, return decoder state [BxLxD] for other purpose
[ "Arguments", "audio_feature", "-", "[", "BxTxD", "]", "Acoustic", "feature", "with", "shape", "feature_len", "-", "[", "B", "]", "Length", "of", "each", "sample", "in", "a", "batch", "decode_step", "-", "[", "int", "]", "The", "maximum", "number", "of", "attention", "decoder", "steps", "tf_rate", "-", "[", "0", "1", "]", "The", "probability", "to", "perform", "teacher", "forcing", "for", "each", "step", "teacher", "-", "[", "BxL", "]", "Ground", "truth", "for", "teacher", "forcing", "with", "sentence", "length", "L", "emb_decoder", "-", "[", "obj", "]", "Introduces", "the", "word", "embedding", "decoder", "different", "behavior", "for", "training", "/", "inference", "At", "training", "stage", "this", "ONLY", "affects", "self", "-", "sampling", "(", "output", "remains", "the", "same", ")", "At", "inference", "stage", "this", "affects", "output", "to", "become", "log", "prob", ".", "with", "distribution", "fusion", "get_dec_state", "-", "[", "bool", "]", "If", "true", "return", "decoder", "state", "[", "BxLxD", "]", "for", "other", "purpose" ]
def forward(self, audio_feature, feature_len, decode_step, tf_rate=0.0, teacher=None, emb_decoder=None, get_dec_state=False): ''' Arguments audio_feature - [BxTxD] Acoustic feature with shape feature_len - [B] Length of each sample in a batch decode_step - [int] The maximum number of attention decoder steps tf_rate - [0,1] The probability to perform teacher forcing for each step teacher - [BxL] Ground truth for teacher forcing with sentence length L emb_decoder - [obj] Introduces the word embedding decoder, different behavior for training/inference At training stage, this ONLY affects self-sampling (output remains the same) At inference stage, this affects output to become log prob. with distribution fusion get_dec_state - [bool] If true, return decoder state [BxLxD] for other purpose ''' # Init bs = audio_feature.shape[0] ctc_output, att_output, att_seq = None, None, None dec_state = [] if get_dec_state else None # Encode encode_feature, encode_len = self.encoder(audio_feature, feature_len) # CTC based decoding if self.enable_ctc: ctc_output = F.log_softmax(self.ctc_layer(encode_feature), dim=-1) # Attention based decoding if self.enable_att: # Init (init char = <SOS>, reset all rnn state and cell) self.decoder.init_state(bs) self.attention.reset_mem() last_char = self.pre_embed(torch.zeros( (bs), dtype=torch.long, device=encode_feature.device)) att_seq, output_seq = [], [] # Preprocess data for teacher forcing if teacher is not None: teacher = self.embed_drop(self.pre_embed(teacher)) # Decode for t in range(decode_step): # Attend (inputs current state of first layer, encoded features) attn, context = self.attention( self.decoder.get_query(), encode_feature, encode_len) # Decode (inputs context + embedded last character) decoder_input = torch.cat([last_char, context], dim=-1) cur_char, d_state = self.decoder(decoder_input) # Prepare output as input of next step if (teacher is not None): # Training stage if (tf_rate == 1) or (torch.rand(1).item() <= tf_rate): # teacher forcing last_char = teacher[:, t, :] else: # self-sampling (replace by argmax may be another choice) with torch.no_grad(): if (emb_decoder is not None) and emb_decoder.apply_fuse: _, cur_prob = emb_decoder( d_state, cur_char, return_loss=False) else: cur_prob = cur_char.softmax(dim=-1) sampled_char = Categorical(cur_prob).sample() last_char = self.embed_drop( self.pre_embed(sampled_char)) else: # Inference stage if (emb_decoder is not None) and emb_decoder.apply_fuse: _, cur_char = emb_decoder( d_state, cur_char, return_loss=False) # argmax for inference last_char = self.pre_embed(torch.argmax(cur_char, dim=-1)) # save output of each step output_seq.append(cur_char) att_seq.append(attn) if get_dec_state: dec_state.append(d_state) att_output = torch.stack(output_seq, dim=1) # BxTxV att_seq = torch.stack(att_seq, dim=2) # BxNxDtxT if get_dec_state: dec_state = torch.stack(dec_state, dim=1) return ctc_output, encode_len, att_output, att_seq, dec_state
[ "def", "forward", "(", "self", ",", "audio_feature", ",", "feature_len", ",", "decode_step", ",", "tf_rate", "=", "0.0", ",", "teacher", "=", "None", ",", "emb_decoder", "=", "None", ",", "get_dec_state", "=", "False", ")", ":", "# Init", "bs", "=", "audio_feature", ".", "shape", "[", "0", "]", "ctc_output", ",", "att_output", ",", "att_seq", "=", "None", ",", "None", ",", "None", "dec_state", "=", "[", "]", "if", "get_dec_state", "else", "None", "# Encode", "encode_feature", ",", "encode_len", "=", "self", ".", "encoder", "(", "audio_feature", ",", "feature_len", ")", "# CTC based decoding", "if", "self", ".", "enable_ctc", ":", "ctc_output", "=", "F", ".", "log_softmax", "(", "self", ".", "ctc_layer", "(", "encode_feature", ")", ",", "dim", "=", "-", "1", ")", "# Attention based decoding", "if", "self", ".", "enable_att", ":", "# Init (init char = <SOS>, reset all rnn state and cell)", "self", ".", "decoder", ".", "init_state", "(", "bs", ")", "self", ".", "attention", ".", "reset_mem", "(", ")", "last_char", "=", "self", ".", "pre_embed", "(", "torch", ".", "zeros", "(", "(", "bs", ")", ",", "dtype", "=", "torch", ".", "long", ",", "device", "=", "encode_feature", ".", "device", ")", ")", "att_seq", ",", "output_seq", "=", "[", "]", ",", "[", "]", "# Preprocess data for teacher forcing", "if", "teacher", "is", "not", "None", ":", "teacher", "=", "self", ".", "embed_drop", "(", "self", ".", "pre_embed", "(", "teacher", ")", ")", "# Decode", "for", "t", "in", "range", "(", "decode_step", ")", ":", "# Attend (inputs current state of first layer, encoded features)", "attn", ",", "context", "=", "self", ".", "attention", "(", "self", ".", "decoder", ".", "get_query", "(", ")", ",", "encode_feature", ",", "encode_len", ")", "# Decode (inputs context + embedded last character)", "decoder_input", "=", "torch", ".", "cat", "(", "[", "last_char", ",", "context", "]", ",", "dim", "=", "-", "1", ")", "cur_char", ",", "d_state", "=", "self", ".", "decoder", "(", "decoder_input", ")", "# Prepare output as input of next step", "if", "(", "teacher", "is", "not", "None", ")", ":", "# Training stage", "if", "(", "tf_rate", "==", "1", ")", "or", "(", "torch", ".", "rand", "(", "1", ")", ".", "item", "(", ")", "<=", "tf_rate", ")", ":", "# teacher forcing", "last_char", "=", "teacher", "[", ":", ",", "t", ",", ":", "]", "else", ":", "# self-sampling (replace by argmax may be another choice)", "with", "torch", ".", "no_grad", "(", ")", ":", "if", "(", "emb_decoder", "is", "not", "None", ")", "and", "emb_decoder", ".", "apply_fuse", ":", "_", ",", "cur_prob", "=", "emb_decoder", "(", "d_state", ",", "cur_char", ",", "return_loss", "=", "False", ")", "else", ":", "cur_prob", "=", "cur_char", ".", "softmax", "(", "dim", "=", "-", "1", ")", "sampled_char", "=", "Categorical", "(", "cur_prob", ")", ".", "sample", "(", ")", "last_char", "=", "self", ".", "embed_drop", "(", "self", ".", "pre_embed", "(", "sampled_char", ")", ")", "else", ":", "# Inference stage", "if", "(", "emb_decoder", "is", "not", "None", ")", "and", "emb_decoder", ".", "apply_fuse", ":", "_", ",", "cur_char", "=", "emb_decoder", "(", "d_state", ",", "cur_char", ",", "return_loss", "=", "False", ")", "# argmax for inference", "last_char", "=", "self", ".", "pre_embed", "(", "torch", ".", "argmax", "(", "cur_char", ",", "dim", "=", "-", "1", ")", ")", "# save output of each step", "output_seq", ".", "append", "(", "cur_char", ")", "att_seq", ".", "append", "(", "attn", ")", "if", "get_dec_state", ":", "dec_state", ".", "append", "(", "d_state", ")", "att_output", "=", "torch", ".", "stack", "(", "output_seq", ",", "dim", "=", "1", ")", "# BxTxV", "att_seq", "=", "torch", ".", "stack", "(", "att_seq", ",", "dim", "=", "2", ")", "# BxNxDtxT", "if", "get_dec_state", ":", "dec_state", "=", "torch", ".", "stack", "(", "dec_state", ",", "dim", "=", "1", ")", "return", "ctc_output", ",", "encode_len", ",", "att_output", ",", "att_seq", ",", "dec_state" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/asr.py#L72-L155
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/asr.py
python
Decoder.init_state
(self, bs)
return self.get_state()
Set all hidden states to zeros
Set all hidden states to zeros
[ "Set", "all", "hidden", "states", "to", "zeros" ]
def init_state(self, bs): ''' Set all hidden states to zeros ''' device = next(self.parameters()).device if self.enable_cell: self.hidden_state = (torch.zeros((self.layer, bs, self.dim), device=device), torch.zeros((self.layer, bs, self.dim), device=device)) else: self.hidden_state = torch.zeros( (self.layer, bs, self.dim), device=device) return self.get_state()
[ "def", "init_state", "(", "self", ",", "bs", ")", ":", "device", "=", "next", "(", "self", ".", "parameters", "(", ")", ")", ".", "device", "if", "self", ".", "enable_cell", ":", "self", ".", "hidden_state", "=", "(", "torch", ".", "zeros", "(", "(", "self", ".", "layer", ",", "bs", ",", "self", ".", "dim", ")", ",", "device", "=", "device", ")", ",", "torch", ".", "zeros", "(", "(", "self", ".", "layer", ",", "bs", ",", "self", ".", "dim", ")", ",", "device", "=", "device", ")", ")", "else", ":", "self", ".", "hidden_state", "=", "torch", ".", "zeros", "(", "(", "self", ".", "layer", ",", "bs", ",", "self", ".", "dim", ")", ",", "device", "=", "device", ")", "return", "self", ".", "get_state", "(", ")" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/asr.py#L180-L189
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/asr.py
python
Decoder.set_state
(self, hidden_state)
Set all hidden states/cells, for decoding purpose
Set all hidden states/cells, for decoding purpose
[ "Set", "all", "hidden", "states", "/", "cells", "for", "decoding", "purpose" ]
def set_state(self, hidden_state): ''' Set all hidden states/cells, for decoding purpose''' device = next(self.parameters()).device if self.enable_cell: self.hidden_state = (hidden_state[0].to( device), hidden_state[1].to(device)) else: self.hidden_state = hidden_state.to(device)
[ "def", "set_state", "(", "self", ",", "hidden_state", ")", ":", "device", "=", "next", "(", "self", ".", "parameters", "(", ")", ")", ".", "device", "if", "self", ".", "enable_cell", ":", "self", ".", "hidden_state", "=", "(", "hidden_state", "[", "0", "]", ".", "to", "(", "device", ")", ",", "hidden_state", "[", "1", "]", ".", "to", "(", "device", ")", ")", "else", ":", "self", ".", "hidden_state", "=", "hidden_state", ".", "to", "(", "device", ")" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/asr.py#L191-L198
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/asr.py
python
Decoder.get_state
(self)
Return all hidden states/cells, for decoding purpose
Return all hidden states/cells, for decoding purpose
[ "Return", "all", "hidden", "states", "/", "cells", "for", "decoding", "purpose" ]
def get_state(self): ''' Return all hidden states/cells, for decoding purpose''' if self.enable_cell: return (self.hidden_state[0].cpu(), self.hidden_state[1].cpu()) else: return self.hidden_state.cpu()
[ "def", "get_state", "(", "self", ")", ":", "if", "self", ".", "enable_cell", ":", "return", "(", "self", ".", "hidden_state", "[", "0", "]", ".", "cpu", "(", ")", ",", "self", ".", "hidden_state", "[", "1", "]", ".", "cpu", "(", ")", ")", "else", ":", "return", "self", ".", "hidden_state", ".", "cpu", "(", ")" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/asr.py#L200-L205
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/asr.py
python
Decoder.get_query
(self)
Return state of all layers as query for attention
Return state of all layers as query for attention
[ "Return", "state", "of", "all", "layers", "as", "query", "for", "attention" ]
def get_query(self): ''' Return state of all layers as query for attention ''' if self.enable_cell: return self.hidden_state[0].transpose(0, 1).reshape(-1, self.dim*self.layer) else: return self.hidden_state.transpose(0, 1).reshape(-1, self.dim*self.layer)
[ "def", "get_query", "(", "self", ")", ":", "if", "self", ".", "enable_cell", ":", "return", "self", ".", "hidden_state", "[", "0", "]", ".", "transpose", "(", "0", ",", "1", ")", ".", "reshape", "(", "-", "1", ",", "self", ".", "dim", "*", "self", ".", "layer", ")", "else", ":", "return", "self", ".", "hidden_state", ".", "transpose", "(", "0", ",", "1", ")", ".", "reshape", "(", "-", "1", ",", "self", ".", "dim", "*", "self", ".", "layer", ")" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/asr.py#L207-L212
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/asr.py
python
Decoder.forward
(self, x)
return char, x
Decode and transform into vocab
Decode and transform into vocab
[ "Decode", "and", "transform", "into", "vocab" ]
def forward(self, x): ''' Decode and transform into vocab ''' if not self.training: self.layers.flatten_parameters() x, self.hidden_state = self.layers(x.unsqueeze(1), self.hidden_state) x = x.squeeze(1) char = self.char_trans(self.final_dropout(x)) return char, x
[ "def", "forward", "(", "self", ",", "x", ")", ":", "if", "not", "self", ".", "training", ":", "self", ".", "layers", ".", "flatten_parameters", "(", ")", "x", ",", "self", ".", "hidden_state", "=", "self", ".", "layers", "(", "x", ".", "unsqueeze", "(", "1", ")", ",", "self", ".", "hidden_state", ")", "x", "=", "x", ".", "squeeze", "(", "1", ")", "char", "=", "self", ".", "char_trans", "(", "self", ".", "final_dropout", "(", "x", ")", ")", "return", "char", ",", "x" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/asr.py#L214-L221
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/ctc.py
python
CTCPrefixScore.full_compute
(self, g, r_prev)
return psi, np.rollaxis(r, 2)
Given prefix g, return the probability of all possible sequence y (where y = concat(g,c)) This function computes all possible tokens for c (memory inefficient)
Given prefix g, return the probability of all possible sequence y (where y = concat(g,c)) This function computes all possible tokens for c (memory inefficient)
[ "Given", "prefix", "g", "return", "the", "probability", "of", "all", "possible", "sequence", "y", "(", "where", "y", "=", "concat", "(", "g", "c", "))", "This", "function", "computes", "all", "possible", "tokens", "for", "c", "(", "memory", "inefficient", ")" ]
def full_compute(self, g, r_prev): '''Given prefix g, return the probability of all possible sequence y (where y = concat(g,c)) This function computes all possible tokens for c (memory inefficient)''' prefix_length = len(g) last_char = g[-1] if prefix_length > 0 else 0 # init. r r = np.full((self.input_length, 2, self.odim), self.logzero, dtype=np.float32) # start from len(g) because is impossible for CTC to generate |y|>|X| start = max(1, prefix_length) if prefix_length == 0: r[0, 0, :] = self.x[0, :] # if g = <sos> psi = r[start-1, 0, :] phi = np.logaddexp(r_prev[:, 0], r_prev[:, 1]) for t in range(start, self.input_length): # prev_blank prev_blank = np.full((self.odim), r_prev[t-1, 1], dtype=np.float32) # prev_nonblank prev_nonblank = np.full( (self.odim), r_prev[t-1, 0], dtype=np.float32) prev_nonblank[last_char] = self.logzero phi = np.logaddexp(prev_nonblank, prev_blank) # P(h|current step is non-blank) = [ P(prev. step = y) + P()]*P(c) r[t, 0, :] = np.logaddexp(r[t-1, 0, :], phi) + self.x[t, :] # P(h|current step is blank) = [P(prev. step is blank) + P(prev. step is non-blank)]*P(now=blank) r[t, 1, :] = np.logaddexp( r[t-1, 1, :], r[t-1, 0, :]) + self.x[t, self.blank] psi = np.logaddexp(psi, phi+self.x[t, :]) #psi[self.eos] = np.logaddexp(r_prev[-1,0], r_prev[-1,1]) return psi, np.rollaxis(r, 2)
[ "def", "full_compute", "(", "self", ",", "g", ",", "r_prev", ")", ":", "prefix_length", "=", "len", "(", "g", ")", "last_char", "=", "g", "[", "-", "1", "]", "if", "prefix_length", ">", "0", "else", "0", "# init. r", "r", "=", "np", ".", "full", "(", "(", "self", ".", "input_length", ",", "2", ",", "self", ".", "odim", ")", ",", "self", ".", "logzero", ",", "dtype", "=", "np", ".", "float32", ")", "# start from len(g) because is impossible for CTC to generate |y|>|X|", "start", "=", "max", "(", "1", ",", "prefix_length", ")", "if", "prefix_length", "==", "0", ":", "r", "[", "0", ",", "0", ",", ":", "]", "=", "self", ".", "x", "[", "0", ",", ":", "]", "# if g = <sos>", "psi", "=", "r", "[", "start", "-", "1", ",", "0", ",", ":", "]", "phi", "=", "np", ".", "logaddexp", "(", "r_prev", "[", ":", ",", "0", "]", ",", "r_prev", "[", ":", ",", "1", "]", ")", "for", "t", "in", "range", "(", "start", ",", "self", ".", "input_length", ")", ":", "# prev_blank", "prev_blank", "=", "np", ".", "full", "(", "(", "self", ".", "odim", ")", ",", "r_prev", "[", "t", "-", "1", ",", "1", "]", ",", "dtype", "=", "np", ".", "float32", ")", "# prev_nonblank", "prev_nonblank", "=", "np", ".", "full", "(", "(", "self", ".", "odim", ")", ",", "r_prev", "[", "t", "-", "1", ",", "0", "]", ",", "dtype", "=", "np", ".", "float32", ")", "prev_nonblank", "[", "last_char", "]", "=", "self", ".", "logzero", "phi", "=", "np", ".", "logaddexp", "(", "prev_nonblank", ",", "prev_blank", ")", "# P(h|current step is non-blank) = [ P(prev. step = y) + P()]*P(c)", "r", "[", "t", ",", "0", ",", ":", "]", "=", "np", ".", "logaddexp", "(", "r", "[", "t", "-", "1", ",", "0", ",", ":", "]", ",", "phi", ")", "+", "self", ".", "x", "[", "t", ",", ":", "]", "# P(h|current step is blank) = [P(prev. step is blank) + P(prev. step is non-blank)]*P(now=blank)", "r", "[", "t", ",", "1", ",", ":", "]", "=", "np", ".", "logaddexp", "(", "r", "[", "t", "-", "1", ",", "1", ",", ":", "]", ",", "r", "[", "t", "-", "1", ",", "0", ",", ":", "]", ")", "+", "self", ".", "x", "[", "t", ",", "self", ".", "blank", "]", "psi", "=", "np", ".", "logaddexp", "(", "psi", ",", "phi", "+", "self", ".", "x", "[", "t", ",", ":", "]", ")", "#psi[self.eos] = np.logaddexp(r_prev[-1,0], r_prev[-1,1])", "return", "psi", ",", "np", ".", "rollaxis", "(", "r", ",", "2", ")" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/ctc.py#L37-L74
Alexander-H-Liu/End-to-end-ASR-Pytorch
1103d144423e8e692f1d18cd9db27a96cb49fb9d
src/ctc.py
python
CTCPrefixScore.cheap_compute
(self, g, r_prev, candidates)
return psi, np.rollaxis(r, 2)
Given prefix g, return the probability of all possible sequence y (where y = concat(g,c)) This function considers only those tokens in candidates for c (memory efficient)
Given prefix g, return the probability of all possible sequence y (where y = concat(g,c)) This function considers only those tokens in candidates for c (memory efficient)
[ "Given", "prefix", "g", "return", "the", "probability", "of", "all", "possible", "sequence", "y", "(", "where", "y", "=", "concat", "(", "g", "c", "))", "This", "function", "considers", "only", "those", "tokens", "in", "candidates", "for", "c", "(", "memory", "efficient", ")" ]
def cheap_compute(self, g, r_prev, candidates): '''Given prefix g, return the probability of all possible sequence y (where y = concat(g,c)) This function considers only those tokens in candidates for c (memory efficient)''' prefix_length = len(g) odim = len(candidates) last_char = g[-1] if prefix_length > 0 else 0 # init. r r = np.full((self.input_length, 2, len(candidates)), self.logzero, dtype=np.float32) # start from len(g) because is impossible for CTC to generate |y|>|X| start = max(1, prefix_length) if prefix_length == 0: r[0, 0, :] = self.x[0, candidates] # if g = <sos> psi = r[start-1, 0, :] # Phi = (prev_nonblank,prev_blank) sum_prev = np.logaddexp(r_prev[:, 0], r_prev[:, 1]) phi = np.repeat(sum_prev[..., None],odim,axis=-1) # Handle edge case : last tok of prefix in candidates if prefix_length>0 and last_char in candidates: phi[:,candidates.index(last_char)] = r_prev[:,1] for t in range(start, self.input_length): # prev_blank # prev_blank = np.full((odim), r_prev[t-1, 1], dtype=np.float32) # prev_nonblank # prev_nonblank = np.full((odim), r_prev[t-1, 0], dtype=np.float32) # phi = np.logaddexp(prev_nonblank, prev_blank) # P(h|current step is non-blank) = P(prev. step = y)*P(c) r[t, 0, :] = np.logaddexp( r[t-1, 0, :], phi[t-1]) + self.x[t, candidates] # P(h|current step is blank) = [P(prev. step is blank) + P(prev. step is non-blank)]*P(now=blank) r[t, 1, :] = np.logaddexp( r[t-1, 1, :], r[t-1, 0, :]) + self.x[t, self.blank] psi = np.logaddexp(psi, phi[t-1,]+self.x[t, candidates]) # P(end of sentence) = P(g) if self.eos in candidates: psi[candidates.index(self.eos)] = sum_prev[-1] return psi, np.rollaxis(r, 2)
[ "def", "cheap_compute", "(", "self", ",", "g", ",", "r_prev", ",", "candidates", ")", ":", "prefix_length", "=", "len", "(", "g", ")", "odim", "=", "len", "(", "candidates", ")", "last_char", "=", "g", "[", "-", "1", "]", "if", "prefix_length", ">", "0", "else", "0", "# init. r", "r", "=", "np", ".", "full", "(", "(", "self", ".", "input_length", ",", "2", ",", "len", "(", "candidates", ")", ")", ",", "self", ".", "logzero", ",", "dtype", "=", "np", ".", "float32", ")", "# start from len(g) because is impossible for CTC to generate |y|>|X|", "start", "=", "max", "(", "1", ",", "prefix_length", ")", "if", "prefix_length", "==", "0", ":", "r", "[", "0", ",", "0", ",", ":", "]", "=", "self", ".", "x", "[", "0", ",", "candidates", "]", "# if g = <sos>", "psi", "=", "r", "[", "start", "-", "1", ",", "0", ",", ":", "]", "# Phi = (prev_nonblank,prev_blank)", "sum_prev", "=", "np", ".", "logaddexp", "(", "r_prev", "[", ":", ",", "0", "]", ",", "r_prev", "[", ":", ",", "1", "]", ")", "phi", "=", "np", ".", "repeat", "(", "sum_prev", "[", "...", ",", "None", "]", ",", "odim", ",", "axis", "=", "-", "1", ")", "# Handle edge case : last tok of prefix in candidates", "if", "prefix_length", ">", "0", "and", "last_char", "in", "candidates", ":", "phi", "[", ":", ",", "candidates", ".", "index", "(", "last_char", ")", "]", "=", "r_prev", "[", ":", ",", "1", "]", "for", "t", "in", "range", "(", "start", ",", "self", ".", "input_length", ")", ":", "# prev_blank", "# prev_blank = np.full((odim), r_prev[t-1, 1], dtype=np.float32)", "# prev_nonblank", "# prev_nonblank = np.full((odim), r_prev[t-1, 0], dtype=np.float32)", "# phi = np.logaddexp(prev_nonblank, prev_blank)", "# P(h|current step is non-blank) = P(prev. step = y)*P(c)", "r", "[", "t", ",", "0", ",", ":", "]", "=", "np", ".", "logaddexp", "(", "r", "[", "t", "-", "1", ",", "0", ",", ":", "]", ",", "phi", "[", "t", "-", "1", "]", ")", "+", "self", ".", "x", "[", "t", ",", "candidates", "]", "# P(h|current step is blank) = [P(prev. step is blank) + P(prev. step is non-blank)]*P(now=blank)", "r", "[", "t", ",", "1", ",", ":", "]", "=", "np", ".", "logaddexp", "(", "r", "[", "t", "-", "1", ",", "1", ",", ":", "]", ",", "r", "[", "t", "-", "1", ",", "0", ",", ":", "]", ")", "+", "self", ".", "x", "[", "t", ",", "self", ".", "blank", "]", "psi", "=", "np", ".", "logaddexp", "(", "psi", ",", "phi", "[", "t", "-", "1", ",", "]", "+", "self", ".", "x", "[", "t", ",", "candidates", "]", ")", "# P(end of sentence) = P(g)", "if", "self", ".", "eos", "in", "candidates", ":", "psi", "[", "candidates", ".", "index", "(", "self", ".", "eos", ")", "]", "=", "sum_prev", "[", "-", "1", "]", "return", "psi", ",", "np", ".", "rollaxis", "(", "r", ",", "2", ")" ]
https://github.com/Alexander-H-Liu/End-to-end-ASR-Pytorch/blob/1103d144423e8e692f1d18cd9db27a96cb49fb9d/src/ctc.py#L76-L116
AlfredXiangWu/LightCNN
8b33107e836374a892efecd149d2016170167fdd
train.py
python
accuracy
(output, target, topk=(1,))
return res
Computes the precision@k for the specified values of k
Computes the precision
[ "Computes", "the", "precision" ]
def accuracy(output, target, topk=(1,)): """Computes the precision@k for the specified values of k""" maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0) res.append(correct_k.mul_(100.0 / batch_size)) return res
[ "def", "accuracy", "(", "output", ",", "target", ",", "topk", "=", "(", "1", ",", ")", ")", ":", "maxk", "=", "max", "(", "topk", ")", "batch_size", "=", "target", ".", "size", "(", "0", ")", "_", ",", "pred", "=", "output", ".", "topk", "(", "maxk", ",", "1", ",", "True", ",", "True", ")", "pred", "=", "pred", ".", "t", "(", ")", "correct", "=", "pred", ".", "eq", "(", "target", ".", "view", "(", "1", ",", "-", "1", ")", ".", "expand_as", "(", "pred", ")", ")", "res", "=", "[", "]", "for", "k", "in", "topk", ":", "correct_k", "=", "correct", "[", ":", "k", "]", ".", "view", "(", "-", "1", ")", ".", "float", "(", ")", ".", "sum", "(", "0", ")", "res", ".", "append", "(", "correct_k", ".", "mul_", "(", "100.0", "/", "batch_size", ")", ")", "return", "res" ]
https://github.com/AlfredXiangWu/LightCNN/blob/8b33107e836374a892efecd149d2016170167fdd/train.py#L269-L282
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/debytes.py
python
debytes
(string)
Decode string if it is a bytes object. This is necessary since Neovim, correctly, gives strings as a str, but regular Vim leaves them encoded as bytes.
Decode string if it is a bytes object.
[ "Decode", "string", "if", "it", "is", "a", "bytes", "object", "." ]
def debytes(string): """ Decode string if it is a bytes object. This is necessary since Neovim, correctly, gives strings as a str, but regular Vim leaves them encoded as bytes. """ try: return string.decode() except AttributeError: return string
[ "def", "debytes", "(", "string", ")", ":", "try", ":", "return", "string", ".", "decode", "(", ")", "except", "AttributeError", ":", "return", "string" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/debytes.py#L1-L11
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/window.py
python
cursor_in_same_position
(a, b)
return a.lnum == b.lnum and a.col == b.col
Check if the given views have the cursor on the same position. The scroll position and other properties may differ.
Check if the given views have the cursor on the same position.
[ "Check", "if", "the", "given", "views", "have", "the", "cursor", "on", "the", "same", "position", "." ]
def cursor_in_same_position(a, b): """ Check if the given views have the cursor on the same position. The scroll position and other properties may differ. """ return a.lnum == b.lnum and a.col == b.col
[ "def", "cursor_in_same_position", "(", "a", ",", "b", ")", ":", "return", "a", ".", "lnum", "==", "b", ".", "lnum", "and", "a", ".", "col", "==", "b", ".", "col" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/window.py#L19-L25
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/server.py
python
Server.message_loop
(self)
Continuously wait for and handle instructions from the client. This waiting blocks Vim, but that does not matter since nobody is looking at it. Blocking also prevents CPU resources from being wasted on redrawing. :raises EOFError: when the connection is closed.
Continuously wait for and handle instructions from the client.
[ "Continuously", "wait", "for", "and", "handle", "instructions", "from", "the", "client", "." ]
def message_loop(self): """ Continuously wait for and handle instructions from the client. This waiting blocks Vim, but that does not matter since nobody is looking at it. Blocking also prevents CPU resources from being wasted on redrawing. :raises EOFError: when the connection is closed. """ while True: try: data = self.client_connection.recv() # If there is still data waiting, then multiple requests were sent, # so we skip pathfinding and move on to the next one if not self.client_connection.poll(): self.do_action(data) except: # Send any unexpected exceptions back to the client # to be displayed for debugging purposes self.client_connection.send(("ERROR", traceback.format_exc()))
[ "def", "message_loop", "(", "self", ")", ":", "while", "True", ":", "try", ":", "data", "=", "self", ".", "client_connection", ".", "recv", "(", ")", "# If there is still data waiting, then multiple requests were sent,", "# so we skip pathfinding and move on to the next one", "if", "not", "self", ".", "client_connection", ".", "poll", "(", ")", ":", "self", ".", "do_action", "(", "data", ")", "except", ":", "# Send any unexpected exceptions back to the client", "# to be displayed for debugging purposes", "self", ".", "client_connection", ".", "send", "(", "(", "\"ERROR\"", ",", "traceback", ".", "format_exc", "(", ")", ")", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/server.py#L41-L61
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/server.py
python
Server.do_action
(self, data)
Process an instruction from the client.
Process an instruction from the client.
[ "Process", "an", "instruction", "from", "the", "client", "." ]
def do_action(self, data): """Process an instruction from the client.""" self.start_view = data["start"] self.target_view = data["target"] self.min_line = data["min_line"] self.max_line = data["max_line"] vim.current.buffer[:] = data["buffer"] vim.current.window.options["wrap"] = data["wrap"] vim.options["scrolloff"] = data["scrolloff"] vim.options["sidescrolloff"] = data["sidescrolloff"] # Set size of the entire Vim display to match the size of the # corresponding window in the client vim.options["columns"] = int(data["size"][0]) vim.options["lines"] = vim.options["cmdheight"] + int(data["size"][1]) self.pathfind()
[ "def", "do_action", "(", "self", ",", "data", ")", ":", "self", ".", "start_view", "=", "data", "[", "\"start\"", "]", "self", ".", "target_view", "=", "data", "[", "\"target\"", "]", "self", ".", "min_line", "=", "data", "[", "\"min_line\"", "]", "self", ".", "max_line", "=", "data", "[", "\"max_line\"", "]", "vim", ".", "current", ".", "buffer", "[", ":", "]", "=", "data", "[", "\"buffer\"", "]", "vim", ".", "current", ".", "window", ".", "options", "[", "\"wrap\"", "]", "=", "data", "[", "\"wrap\"", "]", "vim", ".", "options", "[", "\"scrolloff\"", "]", "=", "data", "[", "\"scrolloff\"", "]", "vim", ".", "options", "[", "\"sidescrolloff\"", "]", "=", "data", "[", "\"sidescrolloff\"", "]", "# Set size of the entire Vim display to match the size of the", "# corresponding window in the client", "vim", ".", "options", "[", "\"columns\"", "]", "=", "int", "(", "data", "[", "\"size\"", "]", "[", "0", "]", ")", "vim", ".", "options", "[", "\"lines\"", "]", "=", "vim", ".", "options", "[", "\"cmdheight\"", "]", "+", "int", "(", "data", "[", "\"size\"", "]", "[", "1", "]", ")", "self", ".", "pathfind", "(", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/server.py#L63-L81
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/server.py
python
Server.pathfind
(self)
Run the pathfinder, then send the result back to the client.
Run the pathfinder, then send the result back to the client.
[ "Run", "the", "pathfinder", "then", "send", "the", "result", "back", "to", "the", "client", "." ]
def pathfind(self): """Run the pathfinder, then send the result back to the client.""" dijkstra = Dijkstra( self.start_view, self.target_view, self.min_line, self.max_line ) motions = dijkstra.find_path(self.client_connection) # If motions is None, that means we cancelled pathfinding because a new # request was received. We also check for another request now in case one was # sent during the last iteration of the pathfinding loop. if not (motions is None or self.client_connection.poll()): self.client_connection.send(("RESULT", motions))
[ "def", "pathfind", "(", "self", ")", ":", "dijkstra", "=", "Dijkstra", "(", "self", ".", "start_view", ",", "self", ".", "target_view", ",", "self", ".", "min_line", ",", "self", ".", "max_line", ")", "motions", "=", "dijkstra", ".", "find_path", "(", "self", ".", "client_connection", ")", "# If motions is None, that means we cancelled pathfinding because a new", "# request was received. We also check for another request now in case one was", "# sent during the last iteration of the pathfinding loop.", "if", "not", "(", "motions", "is", "None", "or", "self", ".", "client_connection", ".", "poll", "(", ")", ")", ":", "self", ".", "client_connection", ".", "send", "(", "(", "\"RESULT\"", ",", "motions", ")", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/server.py#L83-L94
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/dijkstra.py
python
Dijkstra.find_path
(self, client_connection)
Use Dijkstra's algorithm to find the optimal sequence of motions. :param client_connection: If another pathfinding request is waiting on this connection, exit (returning None) as soon as possible. This cancels the pathfinding, moving on to the new request immediately.
Use Dijkstra's algorithm to find the optimal sequence of motions.
[ "Use", "Dijkstra", "s", "algorithm", "to", "find", "the", "optimal", "sequence", "of", "motions", "." ]
def find_path(self, client_connection): """ Use Dijkstra's algorithm to find the optimal sequence of motions. :param client_connection: If another pathfinding request is waiting on this connection, exit (returning None) as soon as possible. This cancels the pathfinding, moving on to the new request immediately. """ while len(self._open_queue) > 0 and not client_connection.poll(): current_node_key, current_distance = self._open_queue.popitem() current_node = self._open_nodes.pop(current_node_key) self._closed_nodes.add(current_node_key) if current_node.is_target(): return current_node.reconstruct_path() for node in current_node.get_neighbours(): if node.key in self._closed_nodes: continue new_distance = current_distance + current_node.motion_weight( node.came_by_motion ) if ( node.key not in self._open_nodes or new_distance < self._open_queue[node.key] ): node.set_came_from(current_node) self._open_nodes[node.key] = node self._open_queue[node.key] = new_distance
[ "def", "find_path", "(", "self", ",", "client_connection", ")", ":", "while", "len", "(", "self", ".", "_open_queue", ")", ">", "0", "and", "not", "client_connection", ".", "poll", "(", ")", ":", "current_node_key", ",", "current_distance", "=", "self", ".", "_open_queue", ".", "popitem", "(", ")", "current_node", "=", "self", ".", "_open_nodes", ".", "pop", "(", "current_node_key", ")", "self", ".", "_closed_nodes", ".", "add", "(", "current_node_key", ")", "if", "current_node", ".", "is_target", "(", ")", ":", "return", "current_node", ".", "reconstruct_path", "(", ")", "for", "node", "in", "current_node", ".", "get_neighbours", "(", ")", ":", "if", "node", ".", "key", "in", "self", ".", "_closed_nodes", ":", "continue", "new_distance", "=", "current_distance", "+", "current_node", ".", "motion_weight", "(", "node", ".", "came_by_motion", ")", "if", "(", "node", ".", "key", "not", "in", "self", ".", "_open_nodes", "or", "new_distance", "<", "self", ".", "_open_queue", "[", "node", ".", "key", "]", ")", ":", "node", ".", "set_came_from", "(", "current_node", ")", "self", ".", "_open_nodes", "[", "node", ".", "key", "]", "=", "node", "self", ".", "_open_queue", "[", "node", ".", "key", "]", "=", "new_distance" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/dijkstra.py#L39-L68
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/node.py
python
Node.get_neighbours
(self)
Yield all neighbours of this node.
Yield all neighbours of this node.
[ "Yield", "all", "neighbours", "of", "this", "node", "." ]
def get_neighbours(self): """Yield all neighbours of this node.""" for motion_generator in self.dijkstra.motion_generators: for node in motion_generator.generate(self.view): if ( node.view.lnum >= self.dijkstra.min_line and node.view.lnum <= self.dijkstra.max_line ): yield node
[ "def", "get_neighbours", "(", "self", ")", ":", "for", "motion_generator", "in", "self", ".", "dijkstra", ".", "motion_generators", ":", "for", "node", "in", "motion_generator", ".", "generate", "(", "self", ".", "view", ")", ":", "if", "(", "node", ".", "view", ".", "lnum", ">=", "self", ".", "dijkstra", ".", "min_line", "and", "node", ".", "view", ".", "lnum", "<=", "self", ".", "dijkstra", ".", "max_line", ")", ":", "yield", "node" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/node.py#L17-L25
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/node.py
python
Node.motion_weight
(self, motion)
Return the weight of using a motion from this node.
Return the weight of using a motion from this node.
[ "Return", "the", "weight", "of", "using", "a", "motion", "from", "this", "node", "." ]
def motion_weight(self, motion): """Return the weight of using a motion from this node.""" if motion != self.came_by_motion: # First repetition, return number of characters in the motion return len(motion.motion) + ( 0 if motion.argument is None else len(motion.argument) ) elif self.came_by_motion_repetitions == 1: # Second repetition, adding a "2" is 1 extra character return 1 else: # Difference in length of current and future count # 2j -> 3j = 0 # 9j -> 10j = 1 return len(str(self.came_by_motion_repetitions + 1)) - len( str(self.came_by_motion_repetitions) )
[ "def", "motion_weight", "(", "self", ",", "motion", ")", ":", "if", "motion", "!=", "self", ".", "came_by_motion", ":", "# First repetition, return number of characters in the motion", "return", "len", "(", "motion", ".", "motion", ")", "+", "(", "0", "if", "motion", ".", "argument", "is", "None", "else", "len", "(", "motion", ".", "argument", ")", ")", "elif", "self", ".", "came_by_motion_repetitions", "==", "1", ":", "# Second repetition, adding a \"2\" is 1 extra character", "return", "1", "else", ":", "# Difference in length of current and future count", "# 2j -> 3j = 0", "# 9j -> 10j = 1", "return", "len", "(", "str", "(", "self", ".", "came_by_motion_repetitions", "+", "1", ")", ")", "-", "len", "(", "str", "(", "self", ".", "came_by_motion_repetitions", ")", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/node.py#L27-L43
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/node.py
python
Node.set_came_from
(self, node)
Set the node this node was reached from.
Set the node this node was reached from.
[ "Set", "the", "node", "this", "node", "was", "reached", "from", "." ]
def set_came_from(self, node): """Set the node this node was reached from.""" self.came_from = node if node.came_by_motion == self.came_by_motion: self.came_by_motion_repetitions = node.came_by_motion_repetitions + 1 else: self.came_by_motion_repetitions = 1
[ "def", "set_came_from", "(", "self", ",", "node", ")", ":", "self", ".", "came_from", "=", "node", "if", "node", ".", "came_by_motion", "==", "self", ".", "came_by_motion", ":", "self", ".", "came_by_motion_repetitions", "=", "node", ".", "came_by_motion_repetitions", "+", "1", "else", ":", "self", ".", "came_by_motion_repetitions", "=", "1" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/node.py#L45-L52
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/node.py
python
Node.reconstruct_path
(self)
return motions
Return the sequence of motions used to reach this node.
Return the sequence of motions used to reach this node.
[ "Return", "the", "sequence", "of", "motions", "used", "to", "reach", "this", "node", "." ]
def reconstruct_path(self): """Return the sequence of motions used to reach this node.""" motions = list() node = self while node.came_from is not None: motions.insert(0, node.came_by_motion) node = node.came_from return motions
[ "def", "reconstruct_path", "(", "self", ")", ":", "motions", "=", "list", "(", ")", "node", "=", "self", "while", "node", ".", "came_from", "is", "not", "None", ":", "motions", ".", "insert", "(", "0", ",", "node", ".", "came_by_motion", ")", "node", "=", "node", ".", "came_from", "return", "motions" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/node.py#L57-L64
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/motions/search.py
python
SearchMotionGenerator._escape_magic
(self, search_query)
return search_query
Add backslash escapes to any "magic" characters in a query.
Add backslash escapes to any "magic" characters in a query.
[ "Add", "backslash", "escapes", "to", "any", "magic", "characters", "in", "a", "query", "." ]
def _escape_magic(self, search_query): """Add backslash escapes to any "magic" characters in a query.""" for char in r"\^$.*[~/": search_query = search_query.replace(char, "\\" + char) return search_query
[ "def", "_escape_magic", "(", "self", ",", "search_query", ")", ":", "for", "char", "in", "r\"\\^$.*[~/\"", ":", "search_query", "=", "search_query", ".", "replace", "(", "char", ",", "\"\\\\\"", "+", "char", ")", "return", "search_query" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/motions/search.py#L27-L31
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/motions/search.py
python
SearchMotionGenerator._search
(self, text, start, target)
Return the simplest possible searching motion to reach the given target. :param text: Contents of the file. :param start: Index in ``text`` to start the search from. :param target: Index of the target position in ``text``.
Return the simplest possible searching motion to reach the given target.
[ "Return", "the", "simplest", "possible", "searching", "motion", "to", "reach", "the", "given", "target", "." ]
def _search(self, text, start, target): """ Return the simplest possible searching motion to reach the given target. :param text: Contents of the file. :param start: Index in ``text`` to start the search from. :param target: Index of the target position in ``text``. """ search_text = text[target:] # ("a", "ab", "abc", "abcd"...) until we reach # the end of search_text or find a working query for query_length in range(1, len(search_text) + 1): query = search_text[:query_length] # Get a list of all match positions for this search query # query="x" text="x___x_xx" == [0, 4, 6, 7] pattern = re.escape(query) matches = [m.start() for m in re.finditer(pattern, text)] if matches: # Sort the list so it begins with matches after `start`, rather # than matches at the beginning of the file # sorted([True, False]) == [False, True] matches.sort(key=lambda position: position <= start) if matches[0] == target: return self._create_motion(query) if matches[-1] == target: return self._create_motion(query, "?")
[ "def", "_search", "(", "self", ",", "text", ",", "start", ",", "target", ")", ":", "search_text", "=", "text", "[", "target", ":", "]", "# (\"a\", \"ab\", \"abc\", \"abcd\"...) until we reach", "# the end of search_text or find a working query", "for", "query_length", "in", "range", "(", "1", ",", "len", "(", "search_text", ")", "+", "1", ")", ":", "query", "=", "search_text", "[", ":", "query_length", "]", "# Get a list of all match positions for this search query", "# query=\"x\" text=\"x___x_xx\" == [0, 4, 6, 7]", "pattern", "=", "re", ".", "escape", "(", "query", ")", "matches", "=", "[", "m", ".", "start", "(", ")", "for", "m", "in", "re", ".", "finditer", "(", "pattern", ",", "text", ")", "]", "if", "matches", ":", "# Sort the list so it begins with matches after `start`, rather", "# than matches at the beginning of the file", "# sorted([True, False]) == [False, True]", "matches", ".", "sort", "(", "key", "=", "lambda", "position", ":", "position", "<=", "start", ")", "if", "matches", "[", "0", "]", "==", "target", ":", "return", "self", ".", "_create_motion", "(", "query", ")", "if", "matches", "[", "-", "1", "]", "==", "target", ":", "return", "self", ".", "_create_motion", "(", "query", ",", "\"?\"", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/motions/search.py#L33-L62
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/motions/search.py
python
SearchMotionGenerator._search_lines
(self, lines, start_line, start_col, target_line, target_col)
return self._search(text, start, target)
Wrapper around _search which handles 2d coordinates and a list of lines. :param lines: List of lines. :param start_line: Starting line, indexed from 0. :param start_col: Starting column. :param target_line: Target line, indexed from 0. :param target_col: Target column.
Wrapper around _search which handles 2d coordinates and a list of lines.
[ "Wrapper", "around", "_search", "which", "handles", "2d", "coordinates", "and", "a", "list", "of", "lines", "." ]
def _search_lines(self, lines, start_line, start_col, target_line, target_col): """ Wrapper around _search which handles 2d coordinates and a list of lines. :param lines: List of lines. :param start_line: Starting line, indexed from 0. :param start_col: Starting column. :param target_line: Target line, indexed from 0. :param target_col: Target column. """ text = "\n".join(lines) start = sum(len(line) + 1 for line in lines[:start_line]) + start_col target = sum(len(line) + 1 for line in lines[:target_line]) + target_col return self._search(text, start, target)
[ "def", "_search_lines", "(", "self", ",", "lines", ",", "start_line", ",", "start_col", ",", "target_line", ",", "target_col", ")", ":", "text", "=", "\"\\n\"", ".", "join", "(", "lines", ")", "start", "=", "sum", "(", "len", "(", "line", ")", "+", "1", "for", "line", "in", "lines", "[", ":", "start_line", "]", ")", "+", "start_col", "target", "=", "sum", "(", "len", "(", "line", ")", "+", "1", "for", "line", "in", "lines", "[", ":", "target_line", "]", ")", "+", "target_col", "return", "self", ".", "_search", "(", "text", ",", "start", ",", "target", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/motions/search.py#L64-L77
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/motions/simple.py
python
SimpleMotionGenerator._try_motion
(self, view, motion)
return winsaveview()
Use a motion inside Vim, starting from the given view. If the motion causes an error, return None.
Use a motion inside Vim, starting from the given view.
[ "Use", "a", "motion", "inside", "Vim", "starting", "from", "the", "given", "view", "." ]
def _try_motion(self, view, motion): """ Use a motion inside Vim, starting from the given view. If the motion causes an error, return None. """ winrestview(view) try: vim.command(f"silent! normal! {motion}") except: return None return winsaveview()
[ "def", "_try_motion", "(", "self", ",", "view", ",", "motion", ")", ":", "winrestview", "(", "view", ")", "try", ":", "vim", ".", "command", "(", "f\"silent! normal! {motion}\"", ")", "except", ":", "return", "None", "return", "winsaveview", "(", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/motions/simple.py#L73-L84
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/server/motions/__init__.py
python
MotionGenerator.generate
(self, view)
Yield all neighbouring nodes found from the given view.
Yield all neighbouring nodes found from the given view.
[ "Yield", "all", "neighbouring", "nodes", "found", "from", "the", "given", "view", "." ]
def generate(self, view): """Yield all neighbouring nodes found from the given view.""" pass
[ "def", "generate", "(", "self", ",", "view", ")", ":", "pass" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/server/motions/__init__.py#L16-L18
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/explore_lines.py
python
get_explore_lines
(search_area_lines)
:param search_area_lines: Number of lines between, and including, the start and target positions. :returns: Number of lines to explore either side of the search area.
:param search_area_lines: Number of lines between, and including, the start and target positions. :returns: Number of lines to explore either side of the search area.
[ ":", "param", "search_area_lines", ":", "Number", "of", "lines", "between", "and", "including", "the", "start", "and", "target", "positions", ".", ":", "returns", ":", "Number", "of", "lines", "to", "explore", "either", "side", "of", "the", "search", "area", "." ]
def get_explore_lines(search_area_lines): """ :param search_area_lines: Number of lines between, and including, the start and target positions. :returns: Number of lines to explore either side of the search area. """ # Get setting values from Vim variables explore_scale = float(vim.vars["pf_explore_scale"]) max_explore = int(vim.vars["pf_max_explore"]) if explore_scale < 0: # This filtering is disabled, explore the entire buffer return len(vim.current.buffer) # Number of lines to explore above and below the search area is scaled based # on the length of the area. This setting defaults to 0.5, if the search area # was e.g. 6 lines then 3 more lines would be explored on either side. explore_lines = search_area_lines * explore_scale if max_explore >= 0: # Limit to no more than max_explore lines return min(max_explore, explore_lines) else: # Do not limit return explore_lines
[ "def", "get_explore_lines", "(", "search_area_lines", ")", ":", "# Get setting values from Vim variables", "explore_scale", "=", "float", "(", "vim", ".", "vars", "[", "\"pf_explore_scale\"", "]", ")", "max_explore", "=", "int", "(", "vim", ".", "vars", "[", "\"pf_max_explore\"", "]", ")", "if", "explore_scale", "<", "0", ":", "# This filtering is disabled, explore the entire buffer", "return", "len", "(", "vim", ".", "current", ".", "buffer", ")", "# Number of lines to explore above and below the search area is scaled based", "# on the length of the area. This setting defaults to 0.5, if the search area", "# was e.g. 6 lines then 3 more lines would be explored on either side.", "explore_lines", "=", "search_area_lines", "*", "explore_scale", "if", "max_explore", ">=", "0", ":", "# Limit to no more than max_explore lines", "return", "min", "(", "max_explore", ",", "explore_lines", ")", "else", ":", "# Do not limit", "return", "explore_lines" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/explore_lines.py#L4-L27
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/explore_lines.py
python
get_line_limits
(start_view, target_view)
return ( max(1, min_line - explore_lines), min(len(vim.current.buffer), max_line + explore_lines), )
Return the minimum and maximum line numbers to explore. :param start_view: The start position. :param target_view: The target position. :returns: Tuple of (min line, max line)
Return the minimum and maximum line numbers to explore.
[ "Return", "the", "minimum", "and", "maximum", "line", "numbers", "to", "explore", "." ]
def get_line_limits(start_view, target_view): """ Return the minimum and maximum line numbers to explore. :param start_view: The start position. :param target_view: The target position. :returns: Tuple of (min line, max line) """ min_line = min(int(start_view.lnum), int(target_view.lnum)) max_line = max(int(start_view.lnum), int(target_view.lnum)) explore_lines = get_explore_lines(max_line - min_line) return ( max(1, min_line - explore_lines), min(len(vim.current.buffer), max_line + explore_lines), )
[ "def", "get_line_limits", "(", "start_view", ",", "target_view", ")", ":", "min_line", "=", "min", "(", "int", "(", "start_view", ".", "lnum", ")", ",", "int", "(", "target_view", ".", "lnum", ")", ")", "max_line", "=", "max", "(", "int", "(", "start_view", ".", "lnum", ")", ",", "int", "(", "target_view", ".", "lnum", ")", ")", "explore_lines", "=", "get_explore_lines", "(", "max_line", "-", "min_line", ")", "return", "(", "max", "(", "1", ",", "min_line", "-", "explore_lines", ")", ",", "min", "(", "len", "(", "vim", ".", "current", ".", "buffer", ")", ",", "max_line", "+", "explore_lines", ")", ",", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/explore_lines.py#L30-L45
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/state_tracker.py
python
StateTracker.choose_action_using
(self, function)
return result
Choose an action to take using the given function. Function will be called with the arguments (start state, current state, time of most recent update). It may return "reset" or "set_target" to call the corresponding method, or any other value to do nothing. The function's return value is passed through this method, so can be used to take further actions elsewhere.
Choose an action to take using the given function.
[ "Choose", "an", "action", "to", "take", "using", "the", "given", "function", "." ]
def choose_action_using(self, function): """ Choose an action to take using the given function. Function will be called with the arguments (start state, current state, time of most recent update). It may return "reset" or "set_target" to call the corresponding method, or any other value to do nothing. The function's return value is passed through this method, so can be used to take further actions elsewhere. """ current_state = self._record_state() result = function(self.start_state, current_state, self.update_time) if result == "reset": self._reset(current_state) elif result == "set_target": self._set_target(current_state) return result
[ "def", "choose_action_using", "(", "self", ",", "function", ")", ":", "current_state", "=", "self", ".", "_record_state", "(", ")", "result", "=", "function", "(", "self", ".", "start_state", ",", "current_state", ",", "self", ".", "update_time", ")", "if", "result", "==", "\"reset\"", ":", "self", ".", "_reset", "(", "current_state", ")", "elif", "result", "==", "\"set_target\"", ":", "self", ".", "_set_target", "(", "current_state", ")", "return", "result" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/state_tracker.py#L45-L63
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/output.py
python
strtrans
(string)
return vim.eval(f"strtrans('{escaped_string}')")
Convert special characters like '' to '^D'.
Convert special characters like '' to '^D'.
[ "Convert", "special", "characters", "like", "\u0004", "to", "^D", "." ]
def strtrans(string): """Convert special characters like '' to '^D'.""" escaped_string = string.replace("'", "\\'").replace("\\", "\\\\") return vim.eval(f"strtrans('{escaped_string}')")
[ "def", "strtrans", "(", "string", ")", ":", "escaped_string", "=", "string", ".", "replace", "(", "\"'\"", ",", "\"\\\\'\"", ")", ".", "replace", "(", "\"\\\\\"", ",", "\"\\\\\\\\\"", ")", "return", "vim", ".", "eval", "(", "f\"strtrans('{escaped_string}')\"", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/output.py#L10-L13
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/output.py
python
get_count
(motion, count)
return str(count) + motion_str
Build a string like 'k', 'hh', '15w
Build a string like 'k', 'hh', '15w
[ "Build", "a", "string", "like", "k", "hh", "15w" ]
def get_count(motion, count): """Build a string like 'k', 'hh', '15w'""" motion_str = strtrans(motion.motion + (motion.argument or "")) if count == 1: return motion_str elif count == 2 and len(motion_str) == 1: # It's easier to press a single-character motion twice # than to type a 2 before it return (motion_str) * 2 return str(count) + motion_str
[ "def", "get_count", "(", "motion", ",", "count", ")", ":", "motion_str", "=", "strtrans", "(", "motion", ".", "motion", "+", "(", "motion", ".", "argument", "or", "\"\"", ")", ")", "if", "count", "==", "1", ":", "return", "motion_str", "elif", "count", "==", "2", "and", "len", "(", "motion_str", ")", "==", "1", ":", "# It's easier to press a single-character motion twice", "# than to type a 2 before it", "return", "(", "motion_str", ")", "*", "2", "return", "str", "(", "count", ")", "+", "motion_str" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/output.py#L16-L27
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/output.py
python
compact_motions
(motions)
return " ".join( [ get_count(motion, len(list(group))) for motion, group in itertools.groupby(motions) ] )
Return the given motion sequence in single-line form. e.g. 2* 5j $
Return the given motion sequence in single-line form.
[ "Return", "the", "given", "motion", "sequence", "in", "single", "-", "line", "form", "." ]
def compact_motions(motions): """ Return the given motion sequence in single-line form. e.g. 2* 5j $ """ return " ".join( [ get_count(motion, len(list(group))) for motion, group in itertools.groupby(motions) ] )
[ "def", "compact_motions", "(", "motions", ")", ":", "return", "\" \"", ".", "join", "(", "[", "get_count", "(", "motion", ",", "len", "(", "list", "(", "group", ")", ")", ")", "for", "motion", ",", "group", "in", "itertools", ".", "groupby", "(", "motions", ")", "]", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/output.py#L30-L41
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/output.py
python
explained_motions
(motions)
Yield each motion in the form "motion <padding> help" e.g. ['5j Down 5 lines', '$ To the end of the line']
Yield each motion in the form "motion <padding> help"
[ "Yield", "each", "motion", "in", "the", "form", "motion", "<padding", ">", "help" ]
def explained_motions(motions): """ Yield each motion in the form "motion <padding> help" e.g. ['5j Down 5 lines', '$ To the end of the line'] """ for motion, group in itertools.groupby(motions): repetitions = len(list(group)) yield ( get_count(motion, repetitions) + " " + get_description(motion, repetitions) )
[ "def", "explained_motions", "(", "motions", ")", ":", "for", "motion", ",", "group", "in", "itertools", ".", "groupby", "(", "motions", ")", ":", "repetitions", "=", "len", "(", "list", "(", "group", ")", ")", "yield", "(", "get_count", "(", "motion", ",", "repetitions", ")", "+", "\" \"", "+", "get_description", "(", "motion", ",", "repetitions", ")", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/output.py#L52-L62
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/client.py
python
Client.open
(self)
Launch and connect to the server Vim.
Launch and connect to the server Vim.
[ "Launch", "and", "connect", "to", "the", "server", "Vim", "." ]
def open(self): """Launch and connect to the server Vim.""" # Create a file used to communicate with the server self.file_path = os.path.join( tempfile.gettempdir(), "pathfinder_vim_" + vim.eval("getpid()") ) self.server_process = subprocess.Popen( self._build_server_cmd(), stdout=subprocess.PIPE, stderr=subprocess.PIPE ) self.server_connection = None self.to_send = None
[ "def", "open", "(", "self", ")", ":", "# Create a file used to communicate with the server", "self", ".", "file_path", "=", "os", ".", "path", ".", "join", "(", "tempfile", ".", "gettempdir", "(", ")", ",", "\"pathfinder_vim_\"", "+", "vim", ".", "eval", "(", "\"getpid()\"", ")", ")", "self", ".", "server_process", "=", "subprocess", ".", "Popen", "(", "self", ".", "_build_server_cmd", "(", ")", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "self", ".", "server_connection", "=", "None", "self", ".", "to_send", "=", "None" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/client.py#L26-L38
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/client.py
python
Client._build_server_cmd
(self)
return options
Build the command used to launch the server Vim.
Build the command used to launch the server Vim.
[ "Build", "the", "command", "used", "to", "launch", "the", "server", "Vim", "." ]
def _build_server_cmd(self): """Build the command used to launch the server Vim.""" progpath = vim.eval("v:progpath") options = [ progpath, "--clean", "--cmd", f"let g:pf_server_communication_file='{self.file_path}'", "-u", os.path.normpath( # serverrc.vim in the root of this repository, instead of the user's # regular .vimrc or init.vim os.path.join(os.path.dirname(__file__), "..", "..", "serverrc.vim") ), ] if progpath.endswith("nvim"): python3_host_prog = vim.eval("g:python3_host_prog") options += [ "--headless", "--cmd", f"let g:python3_host_prog='{python3_host_prog}'", ] else: options += ["-v", "--not-a-term"] return options
[ "def", "_build_server_cmd", "(", "self", ")", ":", "progpath", "=", "vim", ".", "eval", "(", "\"v:progpath\"", ")", "options", "=", "[", "progpath", ",", "\"--clean\"", ",", "\"--cmd\"", ",", "f\"let g:pf_server_communication_file='{self.file_path}'\"", ",", "\"-u\"", ",", "os", ".", "path", ".", "normpath", "(", "# serverrc.vim in the root of this repository, instead of the user's", "# regular .vimrc or init.vim", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "\"..\"", ",", "\"..\"", ",", "\"serverrc.vim\"", ")", ")", ",", "]", "if", "progpath", ".", "endswith", "(", "\"nvim\"", ")", ":", "python3_host_prog", "=", "vim", ".", "eval", "(", "\"g:python3_host_prog\"", ")", "options", "+=", "[", "\"--headless\"", ",", "\"--cmd\"", ",", "f\"let g:python3_host_prog='{python3_host_prog}'\"", ",", "]", "else", ":", "options", "+=", "[", "\"-v\"", ",", "\"--not-a-term\"", "]", "return", "options" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/client.py#L40-L67
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/client.py
python
Client.close
(self)
Shut down the server Vim.
Shut down the server Vim.
[ "Shut", "down", "the", "server", "Vim", "." ]
def close(self): """Shut down the server Vim.""" if self.server_connection is not None: # Server will shut down Vim gracefully when we disconnect self.server_connection.close() elif self.server_process is not None: # Not connected yet, terminate the process instead self.server_process.terminate()
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "server_connection", "is", "not", "None", ":", "# Server will shut down Vim gracefully when we disconnect", "self", ".", "server_connection", ".", "close", "(", ")", "elif", "self", ".", "server_process", "is", "not", "None", ":", "# Not connected yet, terminate the process instead", "self", ".", "server_process", ".", "terminate", "(", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/client.py#L69-L76
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/client.py
python
Client.connect
(self)
Attempt to connect to the server. :returns: whether a connection is ready.
Attempt to connect to the server.
[ "Attempt", "to", "connect", "to", "the", "server", "." ]
def connect(self): """ Attempt to connect to the server. :returns: whether a connection is ready. """ if self.server_connection is not None: return True if self.server_process is None: # Server process has exited but we already raised an exception return False return_code = self.server_process.poll() if return_code is not None: # Server process has exited stdout, stderr = self.server_process.communicate() self.server_process = None raise Exception( f"Pathfinding server process exited with return code {return_code}:\n" + stderr.decode() ) try: # Attempt to connect self.server_connection = connection.Client(self.file_path) return True except FileNotFoundError: return False
[ "def", "connect", "(", "self", ")", ":", "if", "self", ".", "server_connection", "is", "not", "None", ":", "return", "True", "if", "self", ".", "server_process", "is", "None", ":", "# Server process has exited but we already raised an exception", "return", "False", "return_code", "=", "self", ".", "server_process", ".", "poll", "(", ")", "if", "return_code", "is", "not", "None", ":", "# Server process has exited", "stdout", ",", "stderr", "=", "self", ".", "server_process", ".", "communicate", "(", ")", "self", ".", "server_process", "=", "None", "raise", "Exception", "(", "f\"Pathfinding server process exited with return code {return_code}:\\n\"", "+", "stderr", ".", "decode", "(", ")", ")", "try", ":", "# Attempt to connect", "self", ".", "server_connection", "=", "connection", ".", "Client", "(", "self", ".", "file_path", ")", "return", "True", "except", "FileNotFoundError", ":", "return", "False" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/client.py#L93-L121
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/client.py
python
Client.handle_response
(self, response_type, data)
Process a response recieved from the server. This will be one of: - ``RESULT`` - A pathfinding result. Call the first queued callback. - ``ERROR`` - An unexpected exception was caught and the server has exited. Relay the traceback to the user for debugging.
Process a response recieved from the server.
[ "Process", "a", "response", "recieved", "from", "the", "server", "." ]
def handle_response(self, response_type, data): """ Process a response recieved from the server. This will be one of: - ``RESULT`` - A pathfinding result. Call the first queued callback. - ``ERROR`` - An unexpected exception was caught and the server has exited. Relay the traceback to the user for debugging. """ if response_type == "RESULT": # Get the first callback function and pass the result to it self.callback(data) del self.callback elif response_type == "ERROR": raise Exception("Pathfinding server encountered an exception:\n" + data) else: raise Exception("Received an unexpected response " + response_type)
[ "def", "handle_response", "(", "self", ",", "response_type", ",", "data", ")", ":", "if", "response_type", "==", "\"RESULT\"", ":", "# Get the first callback function and pass the result to it", "self", ".", "callback", "(", "data", ")", "del", "self", ".", "callback", "elif", "response_type", "==", "\"ERROR\"", ":", "raise", "Exception", "(", "\"Pathfinding server encountered an exception:\\n\"", "+", "data", ")", "else", ":", "raise", "Exception", "(", "\"Received an unexpected response \"", "+", "response_type", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/client.py#L123-L139
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/client.py
python
Client.pathfind
(self, buffer_contents, start_view, target_view, callback)
Request a pathfinding result from the server. :param buffer_contents: List of lines we are pathfinding inside. :param start_view: The start position, in the current window. :param target_view: The target position, in the current window. :param callback: Function to be called once a path is found. Recieves a list of motions as a parameter.
Request a pathfinding result from the server.
[ "Request", "a", "pathfinding", "result", "from", "the", "server", "." ]
def pathfind(self, buffer_contents, start_view, target_view, callback): """ Request a pathfinding result from the server. :param buffer_contents: List of lines we are pathfinding inside. :param start_view: The start position, in the current window. :param target_view: The target position, in the current window. :param callback: Function to be called once a path is found. Recieves a list of motions as a parameter. """ self.callback = callback min_line, max_line = get_line_limits(start_view, target_view) self.to_send = { "start": start_view, "target": target_view, "min_line": min_line, "max_line": max_line, "size": ( # WindowTextWidth() - see plugin/dimensions.vim vim.eval("WindowTextWidth()"), vim.eval("winheight(0)"), ), "buffer": buffer_contents, "wrap": vim.current.window.options["wrap"], "scrolloff": vim.options["scrolloff"], "sidescrolloff": vim.options["sidescrolloff"], }
[ "def", "pathfind", "(", "self", ",", "buffer_contents", ",", "start_view", ",", "target_view", ",", "callback", ")", ":", "self", ".", "callback", "=", "callback", "min_line", ",", "max_line", "=", "get_line_limits", "(", "start_view", ",", "target_view", ")", "self", ".", "to_send", "=", "{", "\"start\"", ":", "start_view", ",", "\"target\"", ":", "target_view", ",", "\"min_line\"", ":", "min_line", ",", "\"max_line\"", ":", "max_line", ",", "\"size\"", ":", "(", "# WindowTextWidth() - see plugin/dimensions.vim", "vim", ".", "eval", "(", "\"WindowTextWidth()\"", ")", ",", "vim", ".", "eval", "(", "\"winheight(0)\"", ")", ",", ")", ",", "\"buffer\"", ":", "buffer_contents", ",", "\"wrap\"", ":", "vim", ".", "current", ".", "window", ".", "options", "[", "\"wrap\"", "]", ",", "\"scrolloff\"", ":", "vim", ".", "options", "[", "\"scrolloff\"", "]", ",", "\"sidescrolloff\"", ":", "vim", ".", "options", "[", "\"sidescrolloff\"", "]", ",", "}" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/client.py#L141-L168
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/popup.py
python
_neovim_popup
(text, line_offset)
Create a popup using Neovim 0.4+ floating windows.
Create a popup using Neovim 0.4+ floating windows.
[ "Create", "a", "popup", "using", "Neovim", "0", ".", "4", "+", "floating", "windows", "." ]
def _neovim_popup(text, line_offset): """Create a popup using Neovim 0.4+ floating windows.""" # Insert text into a scratch buffer buffer = vim.api.create_buf(False, True) vim.api.buf_set_lines(buffer, 0, -1, True, [f" {text} "]) # Create a window containing the buffer window = vim.api.open_win( buffer, 0, { "relative": "cursor", "row": int(line_offset), "col": 0, "style": "minimal", "focusable": 0, "height": 1, "width": len(text) + 2, }, ) # Set the highlight of the window to match the cursor vim.api.win_set_option(window, "winhl", "Normal:PathfinderPopup") # Create a timer to close the window popup_time = int(vim.vars["pf_popup_time"]) vim.eval(f"timer_start({popup_time}, {{-> nvim_win_close({window.handle}, 1)}})")
[ "def", "_neovim_popup", "(", "text", ",", "line_offset", ")", ":", "# Insert text into a scratch buffer", "buffer", "=", "vim", ".", "api", ".", "create_buf", "(", "False", ",", "True", ")", "vim", ".", "api", ".", "buf_set_lines", "(", "buffer", ",", "0", ",", "-", "1", ",", "True", ",", "[", "f\" {text} \"", "]", ")", "# Create a window containing the buffer", "window", "=", "vim", ".", "api", ".", "open_win", "(", "buffer", ",", "0", ",", "{", "\"relative\"", ":", "\"cursor\"", ",", "\"row\"", ":", "int", "(", "line_offset", ")", ",", "\"col\"", ":", "0", ",", "\"style\"", ":", "\"minimal\"", ",", "\"focusable\"", ":", "0", ",", "\"height\"", ":", "1", ",", "\"width\"", ":", "len", "(", "text", ")", "+", "2", ",", "}", ",", ")", "# Set the highlight of the window to match the cursor", "vim", ".", "api", ".", "win_set_option", "(", "window", ",", "\"winhl\"", ",", "\"Normal:PathfinderPopup\"", ")", "# Create a timer to close the window", "popup_time", "=", "int", "(", "vim", ".", "vars", "[", "\"pf_popup_time\"", "]", ")", "vim", ".", "eval", "(", "f\"timer_start({popup_time}, {{-> nvim_win_close({window.handle}, 1)}})\"", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/popup.py#L6-L31
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/popup.py
python
_vim_popup
(text, line_offset)
Create a popup using Vim +popupwin.
Create a popup using Vim +popupwin.
[ "Create", "a", "popup", "using", "Vim", "+", "popupwin", "." ]
def _vim_popup(text, line_offset): """Create a popup using Vim +popupwin.""" vim.Function("popup_create")( text, { "line": f"cursor{line_offset}", "col": "cursor", "wrap": False, "padding": (0, 1, 0, 1), "highlight": "PathfinderPopup", "time": int(vim.vars["pf_popup_time"]), "zindex": 1000, }, )
[ "def", "_vim_popup", "(", "text", ",", "line_offset", ")", ":", "vim", ".", "Function", "(", "\"popup_create\"", ")", "(", "text", ",", "{", "\"line\"", ":", "f\"cursor{line_offset}\"", ",", "\"col\"", ":", "\"cursor\"", ",", "\"wrap\"", ":", "False", ",", "\"padding\"", ":", "(", "0", ",", "1", ",", "0", ",", "1", ")", ",", "\"highlight\"", ":", "\"PathfinderPopup\"", ",", "\"time\"", ":", "int", "(", "vim", ".", "vars", "[", "\"pf_popup_time\"", "]", ")", ",", "\"zindex\"", ":", "1000", ",", "}", ",", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/popup.py#L34-L47
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/plugin.py
python
Plugin._run
(self)
Start calculating a path in the background.
Start calculating a path in the background.
[ "Start", "calculating", "a", "path", "in", "the", "background", "." ]
def _run(self): """Start calculating a path in the background.""" self.client.pathfind( self.state_tracker.start_state.lines, self.state_tracker.start_state.view, self.state_tracker.target_state.view, self.popup, )
[ "def", "_run", "(", "self", ")", ":", "self", ".", "client", ".", "pathfind", "(", "self", ".", "state_tracker", ".", "start_state", ".", "lines", ",", "self", ".", "state_tracker", ".", "start_state", ".", "view", ",", "self", ".", "state_tracker", ".", "target_state", ".", "view", ",", "self", ".", "popup", ",", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/plugin.py#L19-L26
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/plugin.py
python
Plugin.autorun
(self)
Called on a timer several times per second.
Called on a timer several times per second.
[ "Called", "on", "a", "timer", "several", "times", "per", "second", "." ]
def autorun(self): """Called on a timer several times per second.""" if self.state_tracker.choose_action_using(choose_action) == "pathfind": if not cursor_in_same_position( self.state_tracker.start_state.view, self.state_tracker.target_state.view, ): self._run() self.state_tracker.reset()
[ "def", "autorun", "(", "self", ")", ":", "if", "self", ".", "state_tracker", ".", "choose_action_using", "(", "choose_action", ")", "==", "\"pathfind\"", ":", "if", "not", "cursor_in_same_position", "(", "self", ".", "state_tracker", ".", "start_state", ".", "view", ",", "self", ".", "state_tracker", ".", "target_state", ".", "view", ",", ")", ":", "self", ".", "_run", "(", ")", "self", ".", "state_tracker", ".", "reset", "(", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/plugin.py#L32-L40
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/plugin.py
python
Plugin.command_begin
(self)
Called for the :PathfinderBegin command.
Called for the :PathfinderBegin command.
[ "Called", "for", "the", ":", "PathfinderBegin", "command", "." ]
def command_begin(self): """Called for the :PathfinderBegin command.""" self.state_tracker.reset()
[ "def", "command_begin", "(", "self", ")", ":", "self", ".", "state_tracker", ".", "reset", "(", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/plugin.py#L42-L44
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/plugin.py
python
Plugin.command_run
(self)
Called for the :PathfinderRun command.
Called for the :PathfinderRun command.
[ "Called", "for", "the", ":", "PathfinderRun", "command", "." ]
def command_run(self): """Called for the :PathfinderRun command.""" self.state_tracker.set_target() if cursor_in_same_position( self.state_tracker.start_state.view, self.state_tracker.target_state.view, ): print("You must move the cursor to a new location first!") else: self._run()
[ "def", "command_run", "(", "self", ")", ":", "self", ".", "state_tracker", ".", "set_target", "(", ")", "if", "cursor_in_same_position", "(", "self", ".", "state_tracker", ".", "start_state", ".", "view", ",", "self", ".", "state_tracker", ".", "target_state", ".", "view", ",", ")", ":", "print", "(", "\"You must move the cursor to a new location first!\"", ")", "else", ":", "self", ".", "_run", "(", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/plugin.py#L46-L56
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/plugin.py
python
Plugin.command_explain
(self)
Called for the :PathfinderExplain command.
Called for the :PathfinderExplain command.
[ "Called", "for", "the", ":", "PathfinderExplain", "command", "." ]
def command_explain(self): """Called for the :PathfinderExplain command.""" if self.last_output is None: print("No suggestion to explain.") else: # explained_motions yields each line # sep tells print to put \n between them rather than space print(*output.explained_motions(self.last_output), sep="\n")
[ "def", "command_explain", "(", "self", ")", ":", "if", "self", ".", "last_output", "is", "None", ":", "print", "(", "\"No suggestion to explain.\"", ")", "else", ":", "# explained_motions yields each line", "# sep tells print to put \\n between them rather than space", "print", "(", "*", "output", ".", "explained_motions", "(", "self", ".", "last_output", ")", ",", "sep", "=", "\"\\n\"", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/plugin.py#L58-L65
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/plugin.py
python
Plugin.stop
(self)
Called when Vim is about to shut down.
Called when Vim is about to shut down.
[ "Called", "when", "Vim", "is", "about", "to", "shut", "down", "." ]
def stop(self): """Called when Vim is about to shut down.""" self.client.close()
[ "def", "stop", "(", "self", ")", ":", "self", ".", "client", ".", "close", "(", ")" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/plugin.py#L67-L69
danth/pathfinder.vim
4f67053cbea56a45020d004b6bd059e38934a21a
pathfinder/client/autorun.py
python
choose_action
(start_state, current_state, update_time)
Select an action to take automatically. This is intended for use with StateTracker.choose_action_using(). Returns one of: "reset" - Set start and target to the current state "set_target" - Set target to the current state "pathfind" - Start pathfinding, using the target from last time it was set None - Do nothing
Select an action to take automatically.
[ "Select", "an", "action", "to", "take", "automatically", "." ]
def choose_action(start_state, current_state, update_time): """ Select an action to take automatically. This is intended for use with StateTracker.choose_action_using(). Returns one of: "reset" - Set start and target to the current state "set_target" - Set target to the current state "pathfind" - Start pathfinding, using the target from last time it was set None - Do nothing """ if ( start_state.window != current_state.window or start_state.buffer != current_state.buffer ): # Reset to ensure the start or target view isn't set to a location # which is now impossible to access return "reset" delay = vim.vars["pf_autorun_delay"] if delay > 0: # If delay <= 0, then the user disabled autorun if start_state.mode not in {"n", "v", "V", ""}: # Motions are not used in this mode, so pathfinding is useless return "reset" if ( time.time() >= update_time + delay or start_state.mode != current_state.mode or start_state.lines != current_state.lines ): return "pathfind" else: return "set_target"
[ "def", "choose_action", "(", "start_state", ",", "current_state", ",", "update_time", ")", ":", "if", "(", "start_state", ".", "window", "!=", "current_state", ".", "window", "or", "start_state", ".", "buffer", "!=", "current_state", ".", "buffer", ")", ":", "# Reset to ensure the start or target view isn't set to a location", "# which is now impossible to access", "return", "\"reset\"", "delay", "=", "vim", ".", "vars", "[", "\"pf_autorun_delay\"", "]", "if", "delay", ">", "0", ":", "# If delay <= 0, then the user disabled autorun", "if", "start_state", ".", "mode", "not", "in", "{", "\"n\"", ",", "\"v\"", ",", "\"V\"", ",", "\"\u0016\"", "}", ":", "# Motions are not used in this mode, so pathfinding is useless", "return", "\"reset\"", "if", "(", "time", ".", "time", "(", ")", ">=", "update_time", "+", "delay", "or", "start_state", ".", "mode", "!=", "current_state", ".", "mode", "or", "start_state", ".", "lines", "!=", "current_state", ".", "lines", ")", ":", "return", "\"pathfind\"", "else", ":", "return", "\"set_target\"" ]
https://github.com/danth/pathfinder.vim/blob/4f67053cbea56a45020d004b6bd059e38934a21a/pathfinder/client/autorun.py#L6-L39
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blackboard.py
python
BlackBoard.execute_graph
(self)
Tries to execute the graph, if some block is tainted it prevents the execution, if not it starts running the backend.
Tries to execute the graph, if some block is tainted it prevents the execution, if not it starts running the backend.
[ "Tries", "to", "execute", "the", "graph", "if", "some", "block", "is", "tainted", "it", "prevents", "the", "execution", "if", "not", "it", "starts", "running", "the", "backend", "." ]
def execute_graph(self): """ Tries to execute the graph, if some block is tainted it prevents the execution, if not it starts running the backend. """ logger.debug('Checking taint') # Check if any block is tainted if any(map(lambda block: block.tainted, self.block_div.children)): # Get tainted block tainted_block = reduce(lambda l, r: l if l.tainted else r, self.block_div.children) logger.debug('Some block is tainted') Notification(title='Warning', message=tainted_block.tainted_msg).open() self.parent.on_graph_executed() else: logger.debug('No block is tainted') for block in self.block_div.children: if block.kindled: block.unkindle() self.backend.exec_graph(self.to_ir())
[ "def", "execute_graph", "(", "self", ")", ":", "logger", ".", "debug", "(", "'Checking taint'", ")", "# Check if any block is tainted", "if", "any", "(", "map", "(", "lambda", "block", ":", "block", ".", "tainted", ",", "self", ".", "block_div", ".", "children", ")", ")", ":", "# Get tainted block", "tainted_block", "=", "reduce", "(", "lambda", "l", ",", "r", ":", "l", "if", "l", ".", "tainted", "else", "r", ",", "self", ".", "block_div", ".", "children", ")", "logger", ".", "debug", "(", "'Some block is tainted'", ")", "Notification", "(", "title", "=", "'Warning'", ",", "message", "=", "tainted_block", ".", "tainted_msg", ")", ".", "open", "(", ")", "self", ".", "parent", ".", "on_graph_executed", "(", ")", "else", ":", "logger", ".", "debug", "(", "'No block is tainted'", ")", "for", "block", "in", "self", ".", "block_div", ".", "children", ":", "if", "block", ".", "kindled", ":", "block", ".", "unkindle", "(", ")", "self", ".", "backend", ".", "exec_graph", "(", "self", ".", "to_ir", "(", ")", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blackboard.py#L34-L52
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blackboard.py
python
BlackBoard.get_relations
(self)
return ''.join(chain(ins, outs))
Gets the relations between pins as a string.
Gets the relations between pins as a string.
[ "Gets", "the", "relations", "between", "pins", "as", "a", "string", "." ]
def get_relations(self) -> str: """ Gets the relations between pins as a string. """ # generator expressions are cool ins = ('{} -> {}\n'.format(block.title, in_pin.origin.end.block.title) for block in self.block_div.children for in_pin in block.input_pins if in_pin.origin) outs = ('{} <- {}\n'.format(block.title, destination.start.block.title) for block in self.block_div.children for out_pin in block.output_pins for destination in out_pin.destinations) return ''.join(chain(ins, outs))
[ "def", "get_relations", "(", "self", ")", "->", "str", ":", "# generator expressions are cool", "ins", "=", "(", "'{} -> {}\\n'", ".", "format", "(", "block", ".", "title", ",", "in_pin", ".", "origin", ".", "end", ".", "block", ".", "title", ")", "for", "block", "in", "self", ".", "block_div", ".", "children", "for", "in_pin", "in", "block", ".", "input_pins", "if", "in_pin", ".", "origin", ")", "outs", "=", "(", "'{} <- {}\\n'", ".", "format", "(", "block", ".", "title", ",", "destination", ".", "start", ".", "block", ".", "title", ")", "for", "block", "in", "self", ".", "block_div", ".", "children", "for", "out_pin", "in", "block", ".", "output_pins", "for", "destination", "in", "out_pin", ".", "destinations", ")", "return", "''", ".", "join", "(", "chain", "(", "ins", ",", "outs", ")", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blackboard.py#L54-L65
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blackboard.py
python
BlackBoard.to_ir
(self)
return backend.IR(blocks=ir_blocks, inputs=ir_inputs, outputs=ir_outputs)
Transforms the relations between blocks into an intermediate representation in O(n), n being the number of pins.
Transforms the relations between blocks into an intermediate representation in O(n), n being the number of pins.
[ "Transforms", "the", "relations", "between", "blocks", "into", "an", "intermediate", "representation", "in", "O", "(", "n", ")", "n", "being", "the", "number", "of", "pins", "." ]
def to_ir(self) -> IR: """ Transforms the relations between blocks into an intermediate representation in O(n), n being the number of pins. """ ir_blocks = {} ir_inputs = {} ir_outputs = {} logger.debug('Transforming to IR') for block in self.block_div.children: if block.is_orphan(): # Ignore orphaned blocks continue block_hash = id(block) block_inputs, block_outputs = [], [] avoid = False for in_pin in block.input_pins: pin_hash = id(in_pin) block_inputs.append(pin_hash) other = id(in_pin.origin.end) # Always origin ir_inputs[pin_hash] = backend.InputEntry(origin=other, pin=in_pin, block=block_hash) for out_pin in block.output_pins: pin_hash = id(out_pin) block_outputs.append(pin_hash) dest = list(map(id, out_pin.destinations)) ir_outputs[pin_hash] = backend.OutputEntry(destinations=dest, pin=out_pin, block=block_hash) ir_blocks[block_hash] = backend.BlockEntry(inputs=block_inputs, function=block.function, outputs=block_outputs) self.block_hashes = ir_blocks return backend.IR(blocks=ir_blocks, inputs=ir_inputs, outputs=ir_outputs)
[ "def", "to_ir", "(", "self", ")", "->", "IR", ":", "ir_blocks", "=", "{", "}", "ir_inputs", "=", "{", "}", "ir_outputs", "=", "{", "}", "logger", ".", "debug", "(", "'Transforming to IR'", ")", "for", "block", "in", "self", ".", "block_div", ".", "children", ":", "if", "block", ".", "is_orphan", "(", ")", ":", "# Ignore orphaned blocks", "continue", "block_hash", "=", "id", "(", "block", ")", "block_inputs", ",", "block_outputs", "=", "[", "]", ",", "[", "]", "avoid", "=", "False", "for", "in_pin", "in", "block", ".", "input_pins", ":", "pin_hash", "=", "id", "(", "in_pin", ")", "block_inputs", ".", "append", "(", "pin_hash", ")", "other", "=", "id", "(", "in_pin", ".", "origin", ".", "end", ")", "# Always origin", "ir_inputs", "[", "pin_hash", "]", "=", "backend", ".", "InputEntry", "(", "origin", "=", "other", ",", "pin", "=", "in_pin", ",", "block", "=", "block_hash", ")", "for", "out_pin", "in", "block", ".", "output_pins", ":", "pin_hash", "=", "id", "(", "out_pin", ")", "block_outputs", ".", "append", "(", "pin_hash", ")", "dest", "=", "list", "(", "map", "(", "id", ",", "out_pin", ".", "destinations", ")", ")", "ir_outputs", "[", "pin_hash", "]", "=", "backend", ".", "OutputEntry", "(", "destinations", "=", "dest", ",", "pin", "=", "out_pin", ",", "block", "=", "block_hash", ")", "ir_blocks", "[", "block_hash", "]", "=", "backend", ".", "BlockEntry", "(", "inputs", "=", "block_inputs", ",", "function", "=", "block", ".", "function", ",", "outputs", "=", "block_outputs", ")", "self", ".", "block_hashes", "=", "ir_blocks", "return", "backend", ".", "IR", "(", "blocks", "=", "ir_blocks", ",", "inputs", "=", "ir_inputs", ",", "outputs", "=", "ir_outputs", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blackboard.py#L67-L98
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blackboard.py
python
BlackBoard.on_block_executed
(self, block_hash: int)
Callback that kindles a block, pulses future connections and stops the pulse of past connections.
Callback that kindles a block, pulses future connections and stops the pulse of past connections.
[ "Callback", "that", "kindles", "a", "block", "pulses", "future", "connections", "and", "stops", "the", "pulse", "of", "past", "connections", "." ]
def on_block_executed(self, block_hash: int): """ Callback that kindles a block, pulses future connections and stops the pulse of past connections. """ block_idx = list(map(id, self.block_div.children)).index(block_hash) block = self.block_div.children[block_idx] block.kindle() logger.debug('Kindling block {}'.format(block.__class__.__name__)) # Python list comprehensions can be nested forwards, but also backwards # http://rhodesmill.org/brandon/2009/nested-comprehensions/ [connection.pulse() for out_pin in block.output_pins for connection in out_pin.destinations] [in_pin.origin.stop_pulse() for in_pin in block.input_pins]
[ "def", "on_block_executed", "(", "self", ",", "block_hash", ":", "int", ")", ":", "block_idx", "=", "list", "(", "map", "(", "id", ",", "self", ".", "block_div", ".", "children", ")", ")", ".", "index", "(", "block_hash", ")", "block", "=", "self", ".", "block_div", ".", "children", "[", "block_idx", "]", "block", ".", "kindle", "(", ")", "logger", ".", "debug", "(", "'Kindling block {}'", ".", "format", "(", "block", ".", "__class__", ".", "__name__", ")", ")", "# Python list comprehensions can be nested forwards, but also backwards", "# http://rhodesmill.org/brandon/2009/nested-comprehensions/", "[", "connection", ".", "pulse", "(", ")", "for", "out_pin", "in", "block", ".", "output_pins", "for", "connection", "in", "out_pin", ".", "destinations", "]", "[", "in_pin", ".", "origin", ".", "stop_pulse", "(", ")", "for", "in_pin", "in", "block", ".", "input_pins", "]" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blackboard.py#L101-L113
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blackboard.py
python
BlackBoard.on_touch_up
(self, touch: MotionEvent)
return False
Inherited from https://github.com/kivy/kivy/blob/master/kivy/uix/scatter.py#L590.
Inherited from https://github.com/kivy/kivy/blob/master/kivy/uix/scatter.py#L590.
[ "Inherited", "from", "https", ":", "//", "github", ".", "com", "/", "kivy", "/", "kivy", "/", "blob", "/", "master", "/", "kivy", "/", "uix", "/", "scatter", ".", "py#L590", "." ]
def on_touch_up(self, touch: MotionEvent) -> bool: """ Inherited from https://github.com/kivy/kivy/blob/master/kivy/uix/scatter.py#L590. """ if self.disabled: return False x, y = touch.x, touch.y # if the touch isnt on the widget we do nothing, just try children if not touch.grab_current == self: touch.push() touch.apply_transform_2d(self.to_local) for child in self.children: if child.dispatch('on_touch_up', touch): touch.pop() return True touch.pop() # remove it from our saved touches if touch in self._touches and touch.grab_state: touch.ungrab(self) del self._last_touch_pos[touch] self._touches.remove(touch) # if no connection was made if 'cur_line' in touch.ud.keys() and touch.button == 'left': logger.info('Finish connection through smart bubble') connection = touch.ud['cur_line'] if connection.forward: edge = connection.start else: edge = connection.end self.add_widget(blocks.SmartBubble(pos=touch.pos, backdrop=self, pin=edge)) return True # stop propagating if its within our bounds if self.collide_point(x, y): return True return False
[ "def", "on_touch_up", "(", "self", ",", "touch", ":", "MotionEvent", ")", "->", "bool", ":", "if", "self", ".", "disabled", ":", "return", "False", "x", ",", "y", "=", "touch", ".", "x", ",", "touch", ".", "y", "# if the touch isnt on the widget we do nothing, just try children", "if", "not", "touch", ".", "grab_current", "==", "self", ":", "touch", ".", "push", "(", ")", "touch", ".", "apply_transform_2d", "(", "self", ".", "to_local", ")", "for", "child", "in", "self", ".", "children", ":", "if", "child", ".", "dispatch", "(", "'on_touch_up'", ",", "touch", ")", ":", "touch", ".", "pop", "(", ")", "return", "True", "touch", ".", "pop", "(", ")", "# remove it from our saved touches", "if", "touch", "in", "self", ".", "_touches", "and", "touch", ".", "grab_state", ":", "touch", ".", "ungrab", "(", "self", ")", "del", "self", ".", "_last_touch_pos", "[", "touch", "]", "self", ".", "_touches", ".", "remove", "(", "touch", ")", "# if no connection was made", "if", "'cur_line'", "in", "touch", ".", "ud", ".", "keys", "(", ")", "and", "touch", ".", "button", "==", "'left'", ":", "logger", ".", "info", "(", "'Finish connection through smart bubble'", ")", "connection", "=", "touch", ".", "ud", "[", "'cur_line'", "]", "if", "connection", ".", "forward", ":", "edge", "=", "connection", ".", "start", "else", ":", "edge", "=", "connection", ".", "end", "self", ".", "add_widget", "(", "blocks", ".", "SmartBubble", "(", "pos", "=", "touch", ".", "pos", ",", "backdrop", "=", "self", ",", "pin", "=", "edge", ")", ")", "return", "True", "# stop propagating if its within our bounds", "if", "self", ".", "collide_point", "(", "x", ",", "y", ")", ":", "return", "True", "return", "False" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blackboard.py#L138-L175
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blackboard.py
python
BlackBoard.in_block
(self, x: float, y: float)
return None
Check if a position hits a block.
Check if a position hits a block.
[ "Check", "if", "a", "position", "hits", "a", "block", "." ]
def in_block(self, x: float, y: float) -> Optional[blocks.Block]: """ Check if a position hits a block. """ for block in self.block_div.children: if block.collide_point(x, y): return block return None
[ "def", "in_block", "(", "self", ",", "x", ":", "float", ",", "y", ":", "float", ")", "->", "Optional", "[", "blocks", ".", "Block", "]", ":", "for", "block", "in", "self", ".", "block_div", ".", "children", ":", "if", "block", ".", "collide_point", "(", "x", ",", "y", ")", ":", "return", "block", "return", "None" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blackboard.py#L177-L182
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blackboard.py
python
Blocks.add_widget
(self, widget: Widget, index: int = 0, canvas: str = None)
Add widget override.
Add widget override.
[ "Add", "widget", "override", "." ]
def add_widget(self, widget: Widget, index: int = 0, canvas: str = None): """ Add widget override. """ if (widget.__class__ == blocks.PrintBlock and any(map(lambda w: w.__class__ == blocks.PrintBlock, self.children))): Notification(title='Warning', message='Only one print block allowed!').open() return if not self.children: self.parent.parent.parent.remove_hint() super().add_widget(widget, index, canvas)
[ "def", "add_widget", "(", "self", ",", "widget", ":", "Widget", ",", "index", ":", "int", "=", "0", ",", "canvas", ":", "str", "=", "None", ")", ":", "if", "(", "widget", ".", "__class__", "==", "blocks", ".", "PrintBlock", "and", "any", "(", "map", "(", "lambda", "w", ":", "w", ".", "__class__", "==", "blocks", ".", "PrintBlock", ",", "self", ".", "children", ")", ")", ")", ":", "Notification", "(", "title", "=", "'Warning'", ",", "message", "=", "'Only one print block allowed!'", ")", ".", "open", "(", ")", "return", "if", "not", "self", ".", "children", ":", "self", ".", "parent", ".", "parent", ".", "parent", ".", "remove_hint", "(", ")", "super", "(", ")", ".", "add_widget", "(", "widget", ",", "index", ",", "canvas", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blackboard.py#L188-L197
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blackboard.py
python
Blocks.remove_widget
(self, widget: Widget)
Remove widget override.
Remove widget override.
[ "Remove", "widget", "override", "." ]
def remove_widget(self, widget: Widget): """ Remove widget override. """ super().remove_widget(widget) if not self.children: self.parent.parent.parent.add_hint()
[ "def", "remove_widget", "(", "self", ",", "widget", ":", "Widget", ")", ":", "super", "(", ")", ".", "remove_widget", "(", "widget", ")", "if", "not", "self", ".", "children", ":", "self", ".", "parent", ".", "parent", ".", "parent", ".", "add_hint", "(", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blackboard.py#L199-L203
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection.__init__
(self, **kwargs)
On this initializer the connection has to check whether the connection is being made forward or backwards.
On this initializer the connection has to check whether the connection is being made forward or backwards.
[ "On", "this", "initializer", "the", "connection", "has", "to", "check", "whether", "the", "connection", "is", "being", "made", "forward", "or", "backwards", "." ]
def __init__(self, **kwargs): """ On this initializer the connection has to check whether the connection is being made forward or backwards. """ super().__init__(**kwargs) if self.start: self.forward = True # The value is repeated for correctness sake self.bez_start, self.bez_end = [self.start.center] * 2 with self.canvas.before: Color(*self.color) self.lin = Line(bezier=self.bez_start * 4, width=1.5) self._bind_pin(self.start) else: self.forward = False self.bez_start, self.bez_end = [self.end.center] * 2 with self.canvas.before: Color(*self.color) self.lin = Line(bezier=self.bez_end * 4, width=1.5) self._bind_pin(self.end) self.warned = False self.info = Factory.Info(pos=self.bez_start) Window.add_widget(self.info)
[ "def", "__init__", "(", "self", ",", "*", "*", "kwargs", ")", ":", "super", "(", ")", ".", "__init__", "(", "*", "*", "kwargs", ")", "if", "self", ".", "start", ":", "self", ".", "forward", "=", "True", "# The value is repeated for correctness sake", "self", ".", "bez_start", ",", "self", ".", "bez_end", "=", "[", "self", ".", "start", ".", "center", "]", "*", "2", "with", "self", ".", "canvas", ".", "before", ":", "Color", "(", "*", "self", ".", "color", ")", "self", ".", "lin", "=", "Line", "(", "bezier", "=", "self", ".", "bez_start", "*", "4", ",", "width", "=", "1.5", ")", "self", ".", "_bind_pin", "(", "self", ".", "start", ")", "else", ":", "self", ".", "forward", "=", "False", "self", ".", "bez_start", ",", "self", ".", "bez_end", "=", "[", "self", ".", "end", ".", "center", "]", "*", "2", "with", "self", ".", "canvas", ".", "before", ":", "Color", "(", "*", "self", ".", "color", ")", "self", ".", "lin", "=", "Line", "(", "bezier", "=", "self", ".", "bez_end", "*", "4", ",", "width", "=", "1.5", ")", "self", ".", "_bind_pin", "(", "self", ".", "end", ")", "self", ".", "warned", "=", "False", "self", ".", "info", "=", "Factory", ".", "Info", "(", "pos", "=", "self", ".", "bez_start", ")", "Window", ".", "add_widget", "(", "self", ".", "info", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L52-L73
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection.finish_connection
(self, pin: 'Pin')
This functions finishes a connection that has only start or end and is being currently dragged
This functions finishes a connection that has only start or end and is being currently dragged
[ "This", "functions", "finishes", "a", "connection", "that", "has", "only", "start", "or", "end", "and", "is", "being", "currently", "dragged" ]
def finish_connection(self, pin: 'Pin'): """ This functions finishes a connection that has only start or end and is being currently dragged """ self.remove_info() if self.forward: self.end = pin self._bind_pin(self.end) else: self.start = pin self._bind_pin(self.start)
[ "def", "finish_connection", "(", "self", ",", "pin", ":", "'Pin'", ")", ":", "self", ".", "remove_info", "(", ")", "if", "self", ".", "forward", ":", "self", ".", "end", "=", "pin", "self", ".", "_bind_pin", "(", "self", ".", "end", ")", "else", ":", "self", ".", "start", "=", "pin", "self", ".", "_bind_pin", "(", "self", ".", "start", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L75-L84
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection.on_touch_down
(self, touch: MotionEvent)
On touch down on connection means we are modifying an already existing connection, not creating a new one.
On touch down on connection means we are modifying an already existing connection, not creating a new one.
[ "On", "touch", "down", "on", "connection", "means", "we", "are", "modifying", "an", "already", "existing", "connection", "not", "creating", "a", "new", "one", "." ]
def on_touch_down(self, touch: MotionEvent) -> bool: """ On touch down on connection means we are modifying an already existing connection, not creating a new one. """ # TODO: remove start check? if self.start and self.start.collide_point(*touch.pos): self.forward = False # Remove start edge self._unbind_pin(self.start) self.start.on_connection_delete(self) self.start = None # This signals that we are dragging a connection touch.ud['cur_line'] = self Window.add_widget(self.info) return True elif self.end and self.end.collide_point(*touch.pos): # Same as before but with the other edge self.forward = True self._unbind_pin(self.end) self.end.on_connection_delete(self) self.end = None touch.ud['cur_line'] = self Window.add_widget(self.info) return True else: return False
[ "def", "on_touch_down", "(", "self", ",", "touch", ":", "MotionEvent", ")", "->", "bool", ":", "# TODO: remove start check?", "if", "self", ".", "start", "and", "self", ".", "start", ".", "collide_point", "(", "*", "touch", ".", "pos", ")", ":", "self", ".", "forward", "=", "False", "# Remove start edge", "self", ".", "_unbind_pin", "(", "self", ".", "start", ")", "self", ".", "start", ".", "on_connection_delete", "(", "self", ")", "self", ".", "start", "=", "None", "# This signals that we are dragging a connection", "touch", ".", "ud", "[", "'cur_line'", "]", "=", "self", "Window", ".", "add_widget", "(", "self", ".", "info", ")", "return", "True", "elif", "self", ".", "end", "and", "self", ".", "end", ".", "collide_point", "(", "*", "touch", ".", "pos", ")", ":", "# Same as before but with the other edge", "self", ".", "forward", "=", "True", "self", ".", "_unbind_pin", "(", "self", ".", "end", ")", "self", ".", "end", ".", "on_connection_delete", "(", "self", ")", "self", ".", "end", "=", "None", "touch", ".", "ud", "[", "'cur_line'", "]", "=", "self", "Window", ".", "add_widget", "(", "self", ".", "info", ")", "return", "True", "else", ":", "return", "False" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L87-L111
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection.follow_cursor
(self, newpos, blackboard)
This functions makes sure the current end being dragged follows the cursor. It also checks for type safety and changes the line color if needed.
This functions makes sure the current end being dragged follows the cursor. It also checks for type safety and changes the line color if needed.
[ "This", "functions", "makes", "sure", "the", "current", "end", "being", "dragged", "follows", "the", "cursor", ".", "It", "also", "checks", "for", "type", "safety", "and", "changes", "the", "line", "color", "if", "needed", "." ]
def follow_cursor(self, newpos, blackboard): """ This functions makes sure the current end being dragged follows the cursor. It also checks for type safety and changes the line color if needed.""" if self.forward: fixed_edge = self.start self.bez_end = [*newpos] self._rebezier() else: fixed_edge = self.end self.bez_start = [*newpos] self._rebezier() self.info.pos = [*newpos] # The conditionals are so complicated because it is necessary to check # whether or not a pin in a block has been touched, and then check # the typesafety. if (self._in_pin(blackboard, newpos) and not self._in_pin(blackboard, newpos).typesafe(fixed_edge)): # This conditional represents that the cursor stepped out the pin self.info.text = 'Connection is not possible' self._warn() elif (self._in_pin(blackboard, newpos) and self._in_pin(blackboard, newpos).typesafe(fixed_edge)): self.info.text = 'Connect' if self.warned: self._unwarn() else: self.info.text = 'Spawn new block' if self.warned: self._unwarn()
[ "def", "follow_cursor", "(", "self", ",", "newpos", ",", "blackboard", ")", ":", "if", "self", ".", "forward", ":", "fixed_edge", "=", "self", ".", "start", "self", ".", "bez_end", "=", "[", "*", "newpos", "]", "self", ".", "_rebezier", "(", ")", "else", ":", "fixed_edge", "=", "self", ".", "end", "self", ".", "bez_start", "=", "[", "*", "newpos", "]", "self", ".", "_rebezier", "(", ")", "self", ".", "info", ".", "pos", "=", "[", "*", "newpos", "]", "# The conditionals are so complicated because it is necessary to check", "# whether or not a pin in a block has been touched, and then check", "# the typesafety.", "if", "(", "self", ".", "_in_pin", "(", "blackboard", ",", "newpos", ")", "and", "not", "self", ".", "_in_pin", "(", "blackboard", ",", "newpos", ")", ".", "typesafe", "(", "fixed_edge", ")", ")", ":", "# This conditional represents that the cursor stepped out the pin", "self", ".", "info", ".", "text", "=", "'Connection is not possible'", "self", ".", "_warn", "(", ")", "elif", "(", "self", ".", "_in_pin", "(", "blackboard", ",", "newpos", ")", "and", "self", ".", "_in_pin", "(", "blackboard", ",", "newpos", ")", ".", "typesafe", "(", "fixed_edge", ")", ")", ":", "self", ".", "info", ".", "text", "=", "'Connect'", "if", "self", ".", "warned", ":", "self", ".", "_unwarn", "(", ")", "else", ":", "self", ".", "info", ".", "text", "=", "'Spawn new block'", "if", "self", ".", "warned", ":", "self", ".", "_unwarn", "(", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L113-L142
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection.delete_connection
(self)
This function deletes both ends (if they exist) and the connection itself.
This function deletes both ends (if they exist) and the connection itself.
[ "This", "function", "deletes", "both", "ends", "(", "if", "they", "exist", ")", "and", "the", "connection", "itself", "." ]
def delete_connection(self): """ This function deletes both ends (if they exist) and the connection itself. """ self.parent.remove_widget(self) # Self-destruct self.remove_info() if self.start: self._unbind_pin(self.start) self.start.on_connection_delete(self) if self.end: self._unbind_pin(self.end) self.end.on_connection_delete(self)
[ "def", "delete_connection", "(", "self", ")", ":", "self", ".", "parent", ".", "remove_widget", "(", "self", ")", "# Self-destruct", "self", ".", "remove_info", "(", ")", "if", "self", ".", "start", ":", "self", ".", "_unbind_pin", "(", "self", ".", "start", ")", "self", ".", "start", ".", "on_connection_delete", "(", "self", ")", "if", "self", ".", "end", ":", "self", ".", "_unbind_pin", "(", "self", ".", "end", ")", "self", ".", "end", ".", "on_connection_delete", "(", "self", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L144-L154
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection.pulse
(self)
Makes a connection appear to pulse by modifying its width continuosly.
Makes a connection appear to pulse by modifying its width continuosly.
[ "Makes", "a", "connection", "appear", "to", "pulse", "by", "modifying", "its", "width", "continuosly", "." ]
def pulse(self): """ Makes a connection appear to pulse by modifying its width continuosly. """ self.it = self._change_width() next(self.it) Clock.schedule_interval(lambda _: next(self.it), 0.05)
[ "def", "pulse", "(", "self", ")", ":", "self", ".", "it", "=", "self", ".", "_change_width", "(", ")", "next", "(", "self", ".", "it", ")", "Clock", ".", "schedule_interval", "(", "lambda", "_", ":", "next", "(", "self", ".", "it", ")", ",", "0.05", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L156-L161
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection.stop_pulse
(self)
Stops vibrating a connection. It will throw an execution if the connection is not pulsing right now.
Stops vibrating a connection. It will throw an execution if the connection is not pulsing right now.
[ "Stops", "vibrating", "a", "connection", ".", "It", "will", "throw", "an", "execution", "if", "the", "connection", "is", "not", "pulsing", "right", "now", "." ]
def stop_pulse(self): """ Stops vibrating a connection. It will throw an execution if the connection is not pulsing right now. """ self.it.throw(StopIteration)
[ "def", "stop_pulse", "(", "self", ")", ":", "self", ".", "it", ".", "throw", "(", "StopIteration", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L163-L166
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection._unbind_pin
(self, pin: 'Pin')
Undos pin's circle and line binding.
Undos pin's circle and line binding.
[ "Undos", "pin", "s", "circle", "and", "line", "binding", "." ]
def _unbind_pin(self, pin: 'Pin'): """ Undos pin's circle and line binding. """ pin.funbind('pos', self._line_bind)
[ "def", "_unbind_pin", "(", "self", ",", "pin", ":", "'Pin'", ")", ":", "pin", ".", "funbind", "(", "'pos'", ",", "self", ".", "_line_bind", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L181-L183
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection._bind_pin
(self, pin: 'Pin')
Performs pin circle and line binding.
Performs pin circle and line binding.
[ "Performs", "pin", "circle", "and", "line", "binding", "." ]
def _bind_pin(self, pin: 'Pin'): """ Performs pin circle and line binding. """ pin.fbind('pos', self._line_bind) self._line_bind(pin, pin.pos)
[ "def", "_bind_pin", "(", "self", ",", "pin", ":", "'Pin'", ")", ":", "pin", ".", "fbind", "(", "'pos'", ",", "self", ".", "_line_bind", ")", "self", ".", "_line_bind", "(", "pin", ",", "pin", ".", "pos", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L185-L188
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection._change_width
(self)
Ok, so let me explain what is going on, this generator/coroutine changes the width of the line continuosly using the width_gen generator. We use it by calling it 20 times per second. The tricky part is stopping the scheduled calls. The way to tell Kivy to stop calling is to return a False value, and to do that we need to call this coroutine itself, which may be executing or not at that precise moment. That is where throw comes in, allowing for exceptions to be thrown on during the execution, hijacking the current execution (like a fast interruption), we need to return from this exception, in which we do not care about the value, and then return False on the regular execution in order to stop the calls.
Ok, so let me explain what is going on, this generator/coroutine changes the width of the line continuosly using the width_gen generator. We use it by calling it 20 times per second. The tricky part is stopping the scheduled calls. The way to tell Kivy to stop calling is to return a False value, and to do that we need to call this coroutine itself, which may be executing or not at that precise moment.
[ "Ok", "so", "let", "me", "explain", "what", "is", "going", "on", "this", "generator", "/", "coroutine", "changes", "the", "width", "of", "the", "line", "continuosly", "using", "the", "width_gen", "generator", ".", "We", "use", "it", "by", "calling", "it", "20", "times", "per", "second", ".", "The", "tricky", "part", "is", "stopping", "the", "scheduled", "calls", ".", "The", "way", "to", "tell", "Kivy", "to", "stop", "calling", "is", "to", "return", "a", "False", "value", "and", "to", "do", "that", "we", "need", "to", "call", "this", "coroutine", "itself", "which", "may", "be", "executing", "or", "not", "at", "that", "precise", "moment", "." ]
def _change_width(self): """ Ok, so let me explain what is going on, this generator/coroutine changes the width of the line continuosly using the width_gen generator. We use it by calling it 20 times per second. The tricky part is stopping the scheduled calls. The way to tell Kivy to stop calling is to return a False value, and to do that we need to call this coroutine itself, which may be executing or not at that precise moment. That is where throw comes in, allowing for exceptions to be thrown on during the execution, hijacking the current execution (like a fast interruption), we need to return from this exception, in which we do not care about the value, and then return False on the regular execution in order to stop the calls.""" try: for value in self._width_gen(): self.lin.width = value yield except StopIteration: self.lin.width = 2 yield yield False
[ "def", "_change_width", "(", "self", ")", ":", "try", ":", "for", "value", "in", "self", ".", "_width_gen", "(", ")", ":", "self", ".", "lin", ".", "width", "=", "value", "yield", "except", "StopIteration", ":", "self", ".", "lin", ".", "width", "=", "2", "yield", "yield", "False" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L201-L222
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection._width_gen
(self)
Infinity oscillating generator (between 2 and 4)
Infinity oscillating generator (between 2 and 4)
[ "Infinity", "oscillating", "generator", "(", "between", "2", "and", "4", ")" ]
def _width_gen(self): """ Infinity oscillating generator (between 2 and 4) """ val = 0 while True: yield np.sin(val) + 3 val += pi / 20
[ "def", "_width_gen", "(", "self", ")", ":", "val", "=", "0", "while", "True", ":", "yield", "np", ".", "sin", "(", "val", ")", "+", "3", "val", "+=", "pi", "/", "20" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L224-L229
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection._warn
(self)
Changes the current line to a red thick connection.
Changes the current line to a red thick connection.
[ "Changes", "the", "current", "line", "to", "a", "red", "thick", "connection", "." ]
def _warn(self): """ Changes the current line to a red thick connection. """ self.warned = True self.canvas.before.remove(self.lin) with self.canvas.before: Color(1, 0, 0) self.lin = Line(points=self.lin.points, width=3) self._rebezier()
[ "def", "_warn", "(", "self", ")", ":", "self", ".", "warned", "=", "True", "self", ".", "canvas", ".", "before", ".", "remove", "(", "self", ".", "lin", ")", "with", "self", ".", "canvas", ".", "before", ":", "Color", "(", "1", ",", "0", ",", "0", ")", "self", ".", "lin", "=", "Line", "(", "points", "=", "self", ".", "lin", ".", "points", ",", "width", "=", "3", ")", "self", ".", "_rebezier", "(", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L232-L239
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection._unwarn
(self)
Returns the red thick connection to its normal state.
Returns the red thick connection to its normal state.
[ "Returns", "the", "red", "thick", "connection", "to", "its", "normal", "state", "." ]
def _unwarn(self): """ Returns the red thick connection to its normal state. """ self.warned = False self.canvas.before.remove(self.lin) with self.canvas.before: Color(*self.color) self.lin = Line(points=self.lin.points, width=1.5) self._rebezier()
[ "def", "_unwarn", "(", "self", ")", ":", "self", ".", "warned", "=", "False", "self", ".", "canvas", ".", "before", ".", "remove", "(", "self", ".", "lin", ")", "with", "self", ".", "canvas", ".", "before", ":", "Color", "(", "*", "self", ".", "color", ")", "self", ".", "lin", "=", "Line", "(", "points", "=", "self", ".", "lin", ".", "points", ",", "width", "=", "1.5", ")", "self", ".", "_rebezier", "(", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L241-L248
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/connection.py
python
Connection._rebezier
(self)
Refreshes bezier curve according to start and end. It uses the arctan to force the bèzier curve always going a bit forward before drifting.
Refreshes bezier curve according to start and end. It uses the arctan to force the bèzier curve always going a bit forward before drifting.
[ "Refreshes", "bezier", "curve", "according", "to", "start", "and", "end", ".", "It", "uses", "the", "arctan", "to", "force", "the", "bèzier", "curve", "always", "going", "a", "bit", "forward", "before", "drifting", "." ]
def _rebezier(self): """ Refreshes bezier curve according to start and end. It uses the arctan to force the bèzier curve always going a bit forward before drifting.""" arc_tan = np.arctan2(self.bez_start[1] - self.bez_end[1], self.bez_start[0] - self.bez_end[0]) abs_angle = np.abs(np.degrees(arc_tan)) # We use the angle value plus a fixed amount to steer the line a bit start_right = [self.bez_start[0] - 5 - 0.6 * abs_angle, self.bez_start[1]] end_left = [self.bez_end[0] + 5 + 0.6 * abs_angle, self.bez_end[1]] # Y distance to mid point dist = (min(self.bez_start[0], self.bez_end[0]) + abs(self.bez_start[0] - self.bez_end[0]) / 2) # This updates the bèzier curve graphics self.lin.bezier = (self.bez_start + start_right + [dist, self.bez_start[1]] + [dist, self.bez_end[1]] + end_left + self.bez_end)
[ "def", "_rebezier", "(", "self", ")", ":", "arc_tan", "=", "np", ".", "arctan2", "(", "self", ".", "bez_start", "[", "1", "]", "-", "self", ".", "bez_end", "[", "1", "]", ",", "self", ".", "bez_start", "[", "0", "]", "-", "self", ".", "bez_end", "[", "0", "]", ")", "abs_angle", "=", "np", ".", "abs", "(", "np", ".", "degrees", "(", "arc_tan", ")", ")", "# We use the angle value plus a fixed amount to steer the line a bit", "start_right", "=", "[", "self", ".", "bez_start", "[", "0", "]", "-", "5", "-", "0.6", "*", "abs_angle", ",", "self", ".", "bez_start", "[", "1", "]", "]", "end_left", "=", "[", "self", ".", "bez_end", "[", "0", "]", "+", "5", "+", "0.6", "*", "abs_angle", ",", "self", ".", "bez_end", "[", "1", "]", "]", "# Y distance to mid point", "dist", "=", "(", "min", "(", "self", ".", "bez_start", "[", "0", "]", ",", "self", ".", "bez_end", "[", "0", "]", ")", "+", "abs", "(", "self", ".", "bez_start", "[", "0", "]", "-", "self", ".", "bez_end", "[", "0", "]", ")", "/", "2", ")", "# This updates the bèzier curve graphics", "self", ".", "lin", ".", "bezier", "=", "(", "self", ".", "bez_start", "+", "start_right", "+", "[", "dist", ",", "self", ".", "bez_start", "[", "1", "]", "]", "+", "[", "dist", ",", "self", ".", "bez_end", "[", "1", "]", "]", "+", "end_left", "+", "self", ".", "bez_end", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/connection.py#L251-L268
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/util/play_button.py
python
PlayButton.on_angle
(self, instance, values)
Only needed so spinner goes after 360.
Only needed so spinner goes after 360.
[ "Only", "needed", "so", "spinner", "goes", "after", "360", "." ]
def on_angle(self, instance, values): """ Only needed so spinner goes after 360. """ self.angle %= 360
[ "def", "on_angle", "(", "self", ",", "instance", ",", "values", ")", ":", "self", ".", "angle", "%=", "360" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/util/play_button.py#L38-L40
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/pins/pin.py
python
Pin.typesafe
(self, other: 'Pin')
Tells if a relation between two pins is typesafe.
Tells if a relation between two pins is typesafe.
[ "Tells", "if", "a", "relation", "between", "two", "pins", "is", "typesafe", "." ]
def typesafe(self, other: 'Pin') -> bool: """ Tells if a relation between two pins is typesafe. """ if self.block == other.block or self.__class__ == other.__class__: return False elif self.type_ == Type.ANY or other.type_ == Type.ANY: return True # Anything is possible with ANY else: return self.type_ == other.type_
[ "def", "typesafe", "(", "self", ",", "other", ":", "'Pin'", ")", "->", "bool", ":", "if", "self", ".", "block", "==", "other", ".", "block", "or", "self", ".", "__class__", "==", "other", ".", "__class__", ":", "return", "False", "elif", "self", ".", "type_", "==", "Type", ".", "ANY", "or", "other", ".", "type_", "==", "Type", ".", "ANY", ":", "return", "True", "# Anything is possible with ANY", "else", ":", "return", "self", ".", "type_", "==", "other", ".", "type_" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/pins/pin.py#L32-L39
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/pins/pin.py
python
Pin.on_type_
(self, instance: 'Pin', value: Type)
If the kv lang was a bit smarted this would not be needed
If the kv lang was a bit smarted this would not be needed
[ "If", "the", "kv", "lang", "was", "a", "bit", "smarted", "this", "would", "not", "be", "needed" ]
def on_type_(self, instance: 'Pin', value: Type): """ If the kv lang was a bit smarted this would not be needed """ self.color = value.value
[ "def", "on_type_", "(", "self", ",", "instance", ":", "'Pin'", ",", "value", ":", "Type", ")", ":", "self", ".", "color", "=", "value", ".", "value" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/pins/pin.py#L42-L45
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blocks/block.py
python
Block.is_orphan
(self)
return True
Tells if a block is orphan, i.e. whether it has any connection
Tells if a block is orphan, i.e. whether it has any connection
[ "Tells", "if", "a", "block", "is", "orphan", "i", ".", "e", ".", "whether", "it", "has", "any", "connection" ]
def is_orphan(self) -> bool: """ Tells if a block is orphan, i.e. whether it has any connection """ for in_pin in self.input_pins: if in_pin.origin: return False for out_pin in self.output_pins: if out_pin.destinations: return False return True
[ "def", "is_orphan", "(", "self", ")", "->", "bool", ":", "for", "in_pin", "in", "self", ".", "input_pins", ":", "if", "in_pin", ".", "origin", ":", "return", "False", "for", "out_pin", "in", "self", ".", "output_pins", ":", "if", "out_pin", ".", "destinations", ":", "return", "False", "return", "True" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blocks/block.py#L77-L85
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blocks/block.py
python
Block.in_pin
(self, x: float, y: float)
return None
Checks if a position collides with any of the pins in the block.
Checks if a position collides with any of the pins in the block.
[ "Checks", "if", "a", "position", "collides", "with", "any", "of", "the", "pins", "in", "the", "block", "." ]
def in_pin(self, x: float, y: float) -> Optional[Pin]: """ Checks if a position collides with any of the pins in the block. """ for pin in self.input_pins + self.output_pins: if pin.collide_point(x, y): return pin return None
[ "def", "in_pin", "(", "self", ",", "x", ":", "float", ",", "y", ":", "float", ")", "->", "Optional", "[", "Pin", "]", ":", "for", "pin", "in", "self", ".", "input_pins", "+", "self", ".", "output_pins", ":", "if", "pin", ".", "collide_point", "(", "x", ",", "y", ")", ":", "return", "pin", "return", "None" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blocks/block.py#L87-L93
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blocks/block.py
python
Block.kindle
(self)
Praise the sun \[T]/
Praise the sun \[T]/
[ "Praise", "the", "sun", "\\", "[", "T", "]", "/" ]
def kindle(self): """ Praise the sun \[T]/ """ with self.canvas.before: Color(1, 1, 1) self.kindled = BorderImage(pos=(self.x - 2, self.y - 2), size=(self.width + 4, self.height + 4), texture=self.border_texture) self.fbind('pos', self._bind_border)
[ "def", "kindle", "(", "self", ")", ":", "with", "self", ".", "canvas", ".", "before", ":", "Color", "(", "1", ",", "1", ",", "1", ")", "self", ".", "kindled", "=", "BorderImage", "(", "pos", "=", "(", "self", ".", "x", "-", "2", ",", "self", ".", "y", "-", "2", ")", ",", "size", "=", "(", "self", ".", "width", "+", "4", ",", "self", ".", "height", "+", "4", ")", ",", "texture", "=", "self", ".", "border_texture", ")", "self", ".", "fbind", "(", "'pos'", ",", "self", ".", "_bind_border", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blocks/block.py#L115-L123
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blocks/block.py
python
Block.unkindle
(self)
Reverts the border image.
Reverts the border image.
[ "Reverts", "the", "border", "image", "." ]
def unkindle(self): """ Reverts the border image. """ if self.kindled: self.canvas.before.remove(self.kindled) self.funbind('pos', self._bind_border) self.kindled = None else: logger.warning('Called unkindle on a block not kindled')
[ "def", "unkindle", "(", "self", ")", ":", "if", "self", ".", "kindled", ":", "self", ".", "canvas", ".", "before", ".", "remove", "(", "self", ".", "kindled", ")", "self", ".", "funbind", "(", "'pos'", ",", "self", ".", "_bind_border", ")", "self", ".", "kindled", "=", "None", "else", ":", "logger", ".", "warning", "(", "'Called unkindle on a block not kindled'", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blocks/block.py#L125-L132
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blocks/block.py
python
Block._bind_border
(self, block: 'Block', new_pos: Tuple[float, float])
Bind border to position.
Bind border to position.
[ "Bind", "border", "to", "position", "." ]
def _bind_border(self, block: 'Block', new_pos: Tuple[float, float]): """ Bind border to position. """ self.kindled.pos = new_pos[0] - 2, new_pos[1] - 2
[ "def", "_bind_border", "(", "self", ",", "block", ":", "'Block'", ",", "new_pos", ":", "Tuple", "[", "float", ",", "float", "]", ")", ":", "self", ".", "kindled", ".", "pos", "=", "new_pos", "[", "0", "]", "-", "2", ",", "new_pos", "[", "1", "]", "-", "2" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blocks/block.py#L135-L137
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/view/blocks/block.py
python
Block._bind_pin
(self, block: 'Block', new_pos: Tuple[float, float], pin: Pin, i: int, output: bool)
Keep pins on their respective places.
Keep pins on their respective places.
[ "Keep", "pins", "on", "their", "respective", "places", "." ]
def _bind_pin(self, block: 'Block', new_pos: Tuple[float, float], pin: Pin, i: int, output: bool): """ Keep pins on their respective places. """ pin.y = (block.y + (block.height - block.label.height) - i * self.gap + pin.height / 2) if output: pin.x = block.x + block.width - self.gap else: pin.x = block.x + 5
[ "def", "_bind_pin", "(", "self", ",", "block", ":", "'Block'", ",", "new_pos", ":", "Tuple", "[", "float", ",", "float", "]", ",", "pin", ":", "Pin", ",", "i", ":", "int", ",", "output", ":", "bool", ")", ":", "pin", ".", "y", "=", "(", "block", ".", "y", "+", "(", "block", ".", "height", "-", "block", ".", "label", ".", "height", ")", "-", "i", "*", "self", ".", "gap", "+", "pin", ".", "height", "/", "2", ")", "if", "output", ":", "pin", ".", "x", "=", "block", ".", "x", "+", "block", ".", "width", "-", "self", ".", "gap", "else", ":", "pin", ".", "x", "=", "block", ".", "x", "+", "5" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/view/blocks/block.py#L139-L147
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/backend/backend.py
python
Backend._exec_graph_parallel
(self)
Execution algorithm, introduces all blocks on a set, when a block is executed it is taken out of the set until the set is empty.
Execution algorithm, introduces all blocks on a set, when a block is executed it is taken out of the set until the set is empty.
[ "Execution", "algorithm", "introduces", "all", "blocks", "on", "a", "set", "when", "a", "block", "is", "executed", "it", "is", "taken", "out", "of", "the", "set", "until", "the", "set", "is", "empty", "." ]
def _exec_graph_parallel(self): """ Execution algorithm, introduces all blocks on a set, when a block is executed it is taken out of the set until the set is empty. """ unseen = set(self.ir.blocks.keys()) # All blocks are unseen at start # All output pins along their respectives values seen = {} # type: Dict[int, Any] while unseen: unseen, seen = self._exec_block(unseen.pop(), unseen, seen) logger.info('Execution done') self.emit('graph_executed')
[ "def", "_exec_graph_parallel", "(", "self", ")", ":", "unseen", "=", "set", "(", "self", ".", "ir", ".", "blocks", ".", "keys", "(", ")", ")", "# All blocks are unseen at start", "# All output pins along their respectives values", "seen", "=", "{", "}", "# type: Dict[int, Any]", "while", "unseen", ":", "unseen", ",", "seen", "=", "self", ".", "_exec_block", "(", "unseen", ".", "pop", "(", ")", ",", "unseen", ",", "seen", ")", "logger", ".", "info", "(", "'Execution done'", ")", "self", ".", "emit", "(", "'graph_executed'", ")" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/backend/backend.py#L29-L38
AlvarBer/Persimmon
da08ed854dd0305d7e4684e97ee828acffd76b4d
persimmon/backend/backend.py
python
Backend._exec_block
(self, current: int, unseen: set, seen: Dict[int, Any])
return unseen, seen
Execute a block, if any dependency is not yet executed we recurse into it first.
Execute a block, if any dependency is not yet executed we recurse into it first.
[ "Execute", "a", "block", "if", "any", "dependency", "is", "not", "yet", "executed", "we", "recurse", "into", "it", "first", "." ]
def _exec_block(self, current: int, unseen: set, seen: Dict[int, Any]) -> Tuple[set, Dict[int, Any]]: """ Execute a block, if any dependency is not yet executed we recurse into it first. """ logger.debug('Executing block {}'.format(current)) current_block = self.ir.blocks[current] for in_pin in map(lambda x: self.ir.inputs[x], current_block.inputs): origin = in_pin.origin if origin not in seen: dependency = self.ir.outputs[origin].block unseen.remove(dependency) unseen, seen = self._exec_block(dependency, unseen, seen) in_pin.pin.val = seen[origin] current_block.function() self.emit('block_executed', current) logger.debug('Block {} executed'.format(current)) for out_id in current_block.outputs: seen[out_id] = self.ir.outputs[out_id].pin.val return unseen, seen
[ "def", "_exec_block", "(", "self", ",", "current", ":", "int", ",", "unseen", ":", "set", ",", "seen", ":", "Dict", "[", "int", ",", "Any", "]", ")", "->", "Tuple", "[", "set", ",", "Dict", "[", "int", ",", "Any", "]", "]", ":", "logger", ".", "debug", "(", "'Executing block {}'", ".", "format", "(", "current", ")", ")", "current_block", "=", "self", ".", "ir", ".", "blocks", "[", "current", "]", "for", "in_pin", "in", "map", "(", "lambda", "x", ":", "self", ".", "ir", ".", "inputs", "[", "x", "]", ",", "current_block", ".", "inputs", ")", ":", "origin", "=", "in_pin", ".", "origin", "if", "origin", "not", "in", "seen", ":", "dependency", "=", "self", ".", "ir", ".", "outputs", "[", "origin", "]", ".", "block", "unseen", ".", "remove", "(", "dependency", ")", "unseen", ",", "seen", "=", "self", ".", "_exec_block", "(", "dependency", ",", "unseen", ",", "seen", ")", "in_pin", ".", "pin", ".", "val", "=", "seen", "[", "origin", "]", "current_block", ".", "function", "(", ")", "self", ".", "emit", "(", "'block_executed'", ",", "current", ")", "logger", ".", "debug", "(", "'Block {} executed'", ".", "format", "(", "current", ")", ")", "for", "out_id", "in", "current_block", ".", "outputs", ":", "seen", "[", "out_id", "]", "=", "self", ".", "ir", ".", "outputs", "[", "out_id", "]", ".", "pin", ".", "val", "return", "unseen", ",", "seen" ]
https://github.com/AlvarBer/Persimmon/blob/da08ed854dd0305d7e4684e97ee828acffd76b4d/persimmon/backend/backend.py#L40-L60
AnasAboureada/Penetration-Testing-Study-Notes
8152fd609cf818dba2f07e060738a24c56221687
enumeration/tools/recon_scan/samrdump.py
python
SAMRDump.dump
(self, addr)
Dumps the list of users and shares registered present at addr. Addr is a valid host name or IP address.
Dumps the list of users and shares registered present at addr. Addr is a valid host name or IP address.
[ "Dumps", "the", "list", "of", "users", "and", "shares", "registered", "present", "at", "addr", ".", "Addr", "is", "a", "valid", "host", "name", "or", "IP", "address", "." ]
def dump(self, addr): """Dumps the list of users and shares registered present at addr. Addr is a valid host name or IP address. """ encoding = sys.getdefaultencoding() print 'Retrieving endpoint list from %s' % addr # Try all requested protocols until one works. entries = [] for protocol in self.__protocols: protodef = SAMRDump.KNOWN_PROTOCOLS[protocol] port = protodef[1] print "Trying protocol %s..." % protocol rpctransport = transport.SMBTransport(addr, port, r'\samr', self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash) try: entries = self.__fetchList(rpctransport) except Exception, e: print 'Protocol failed: %s' % e raise else: # Got a response. No need for further iterations. break # Display results. for entry in entries: (username, uid, user) = entry base = "%s (%d)" % (username, uid) print base + '/Enabled:', ('false', 'true')[user.is_enabled()] print base + '/Last Logon:', user.get_logon_time() print base + '/Last Logoff:', user.get_logoff_time() print base + '/Kickoff:', user.get_kickoff_time() print base + '/Last PWD Set:', user.get_pwd_last_set() print base + '/PWD Can Change:', user.get_pwd_can_change() print base + '/PWD Must Change:', user.get_pwd_must_change() print base + '/Group id: %d' % user.get_group_id() print base + '/Bad pwd count: %d' % user.get_bad_pwd_count() print base + '/Logon count: %d' % user.get_logon_count() items = user.get_items() for i in samr.MSRPCUserInfo.ITEMS.keys(): name = items[samr.MSRPCUserInfo.ITEMS[i]].get_name() name = name.encode(encoding, 'replace') print base + '/' + i + ':', name if entries: num = len(entries) if 1 == num: print 'Received one entry.' else: print 'Received %d entries.' % num else: print 'No entries received.'
[ "def", "dump", "(", "self", ",", "addr", ")", ":", "encoding", "=", "sys", ".", "getdefaultencoding", "(", ")", "print", "'Retrieving endpoint list from %s'", "%", "addr", "# Try all requested protocols until one works.", "entries", "=", "[", "]", "for", "protocol", "in", "self", ".", "__protocols", ":", "protodef", "=", "SAMRDump", ".", "KNOWN_PROTOCOLS", "[", "protocol", "]", "port", "=", "protodef", "[", "1", "]", "print", "\"Trying protocol %s...\"", "%", "protocol", "rpctransport", "=", "transport", ".", "SMBTransport", "(", "addr", ",", "port", ",", "r'\\samr'", ",", "self", ".", "__username", ",", "self", ".", "__password", ",", "self", ".", "__domain", ",", "self", ".", "__lmhash", ",", "self", ".", "__nthash", ")", "try", ":", "entries", "=", "self", ".", "__fetchList", "(", "rpctransport", ")", "except", "Exception", ",", "e", ":", "print", "'Protocol failed: %s'", "%", "e", "raise", "else", ":", "# Got a response. No need for further iterations.", "break", "# Display results.", "for", "entry", "in", "entries", ":", "(", "username", ",", "uid", ",", "user", ")", "=", "entry", "base", "=", "\"%s (%d)\"", "%", "(", "username", ",", "uid", ")", "print", "base", "+", "'/Enabled:'", ",", "(", "'false'", ",", "'true'", ")", "[", "user", ".", "is_enabled", "(", ")", "]", "print", "base", "+", "'/Last Logon:'", ",", "user", ".", "get_logon_time", "(", ")", "print", "base", "+", "'/Last Logoff:'", ",", "user", ".", "get_logoff_time", "(", ")", "print", "base", "+", "'/Kickoff:'", ",", "user", ".", "get_kickoff_time", "(", ")", "print", "base", "+", "'/Last PWD Set:'", ",", "user", ".", "get_pwd_last_set", "(", ")", "print", "base", "+", "'/PWD Can Change:'", ",", "user", ".", "get_pwd_can_change", "(", ")", "print", "base", "+", "'/PWD Must Change:'", ",", "user", ".", "get_pwd_must_change", "(", ")", "print", "base", "+", "'/Group id: %d'", "%", "user", ".", "get_group_id", "(", ")", "print", "base", "+", "'/Bad pwd count: %d'", "%", "user", ".", "get_bad_pwd_count", "(", ")", "print", "base", "+", "'/Logon count: %d'", "%", "user", ".", "get_logon_count", "(", ")", "items", "=", "user", ".", "get_items", "(", ")", "for", "i", "in", "samr", ".", "MSRPCUserInfo", ".", "ITEMS", ".", "keys", "(", ")", ":", "name", "=", "items", "[", "samr", ".", "MSRPCUserInfo", ".", "ITEMS", "[", "i", "]", "]", ".", "get_name", "(", ")", "name", "=", "name", ".", "encode", "(", "encoding", ",", "'replace'", ")", "print", "base", "+", "'/'", "+", "i", "+", "':'", ",", "name", "if", "entries", ":", "num", "=", "len", "(", "entries", ")", "if", "1", "==", "num", ":", "print", "'Received one entry.'", "else", ":", "print", "'Received %d entries.'", "%", "num", "else", ":", "print", "'No entries received.'" ]
https://github.com/AnasAboureada/Penetration-Testing-Study-Notes/blob/8152fd609cf818dba2f07e060738a24c56221687/enumeration/tools/recon_scan/samrdump.py#L54-L110
AonCyberLabs/EvilAbigail
5bde1d49a76ef2e5a6e6bcda5b094441b41144ad
evilmaid.py
python
UI.__init__
(self)
Setup the main screen, progress bars and logging box
Setup the main screen, progress bars and logging box
[ "Setup", "the", "main", "screen", "progress", "bars", "and", "logging", "box" ]
def __init__(self): """ Setup the main screen, progress bars and logging box """ self.screen = curses.initscr() curses.curs_set(0) curses.start_color() curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK) curses.init_pair(2, curses.COLOR_GREEN, curses.COLOR_BLACK) curses.init_pair(3, curses.COLOR_MAGENTA, curses.COLOR_BLACK) curses.init_pair(4, curses.COLOR_CYAN, curses.COLOR_BLACK) curses.init_pair(5, curses.COLOR_BLUE, curses.COLOR_BLACK) curses.init_pair(6, curses.COLOR_YELLOW, curses.COLOR_BLACK) self.height, self.width = self.screen.getmaxyx() self.screen.border() self.preptotal() self.prepcurrent() self.preplog() self.banner() self.sig() self.drives = len(glob.glob("/dev/sd?1")) self.donedrives = 0 self.prevprogress = 0 self.loglines = [] self.idx = 1
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "screen", "=", "curses", ".", "initscr", "(", ")", "curses", ".", "curs_set", "(", "0", ")", "curses", ".", "start_color", "(", ")", "curses", ".", "init_pair", "(", "1", ",", "curses", ".", "COLOR_RED", ",", "curses", ".", "COLOR_BLACK", ")", "curses", ".", "init_pair", "(", "2", ",", "curses", ".", "COLOR_GREEN", ",", "curses", ".", "COLOR_BLACK", ")", "curses", ".", "init_pair", "(", "3", ",", "curses", ".", "COLOR_MAGENTA", ",", "curses", ".", "COLOR_BLACK", ")", "curses", ".", "init_pair", "(", "4", ",", "curses", ".", "COLOR_CYAN", ",", "curses", ".", "COLOR_BLACK", ")", "curses", ".", "init_pair", "(", "5", ",", "curses", ".", "COLOR_BLUE", ",", "curses", ".", "COLOR_BLACK", ")", "curses", ".", "init_pair", "(", "6", ",", "curses", ".", "COLOR_YELLOW", ",", "curses", ".", "COLOR_BLACK", ")", "self", ".", "height", ",", "self", ".", "width", "=", "self", ".", "screen", ".", "getmaxyx", "(", ")", "self", ".", "screen", ".", "border", "(", ")", "self", ".", "preptotal", "(", ")", "self", ".", "prepcurrent", "(", ")", "self", ".", "preplog", "(", ")", "self", ".", "banner", "(", ")", "self", ".", "sig", "(", ")", "self", ".", "drives", "=", "len", "(", "glob", ".", "glob", "(", "\"/dev/sd?1\"", ")", ")", "self", ".", "donedrives", "=", "0", "self", ".", "prevprogress", "=", "0", "self", ".", "loglines", "=", "[", "]", "self", ".", "idx", "=", "1" ]
https://github.com/AonCyberLabs/EvilAbigail/blob/5bde1d49a76ef2e5a6e6bcda5b094441b41144ad/evilmaid.py#L129-L157
AonCyberLabs/EvilAbigail
5bde1d49a76ef2e5a6e6bcda5b094441b41144ad
evilmaid.py
python
UI.banner
(self)
Print the above banner and copyight notice
Print the above banner and copyight notice
[ "Print", "the", "above", "banner", "and", "copyight", "notice" ]
def banner(self): """ Print the above banner and copyight notice """ bannerlines = banner.split('\n') for idx, line in enumerate(bannerlines): self.screen.addstr(2+idx, 1, line.center(self.width-2), curses.color_pair(3)) start = bannerlines[2].center(self.width-2).index('|')+1 self.screen.addstr(1+idx, start, copyrightlhs, curses.color_pair(1)) self.screen.addstr(1+idx, start+len(copyrightlhs)+7, copyrightrhs, curses.color_pair(1)) self.screen.addstr(2+idx, start, url.rjust(len(bannerlines[2])), curses.color_pair(4))
[ "def", "banner", "(", "self", ")", ":", "bannerlines", "=", "banner", ".", "split", "(", "'\\n'", ")", "for", "idx", ",", "line", "in", "enumerate", "(", "bannerlines", ")", ":", "self", ".", "screen", ".", "addstr", "(", "2", "+", "idx", ",", "1", ",", "line", ".", "center", "(", "self", ".", "width", "-", "2", ")", ",", "curses", ".", "color_pair", "(", "3", ")", ")", "start", "=", "bannerlines", "[", "2", "]", ".", "center", "(", "self", ".", "width", "-", "2", ")", ".", "index", "(", "'|'", ")", "+", "1", "self", ".", "screen", ".", "addstr", "(", "1", "+", "idx", ",", "start", ",", "copyrightlhs", ",", "curses", ".", "color_pair", "(", "1", ")", ")", "self", ".", "screen", ".", "addstr", "(", "1", "+", "idx", ",", "start", "+", "len", "(", "copyrightlhs", ")", "+", "7", ",", "copyrightrhs", ",", "curses", ".", "color_pair", "(", "1", ")", ")", "self", ".", "screen", ".", "addstr", "(", "2", "+", "idx", ",", "start", ",", "url", ".", "rjust", "(", "len", "(", "bannerlines", "[", "2", "]", ")", ")", ",", "curses", ".", "color_pair", "(", "4", ")", ")" ]
https://github.com/AonCyberLabs/EvilAbigail/blob/5bde1d49a76ef2e5a6e6bcda5b094441b41144ad/evilmaid.py#L159-L169
AonCyberLabs/EvilAbigail
5bde1d49a76ef2e5a6e6bcda5b094441b41144ad
evilmaid.py
python
UI.sig
(self)
Print author signature
Print author signature
[ "Print", "author", "signature" ]
def sig(self): """ Print author signature """ self.sig = self.screen.subwin((self.height/2)-6, (self.width-2)/2, (self.height/2)+6, ((self.width-2)/2)+1) self.sig.border() self.sig.addstr(1, 1, "Evil Abigail".center(((self.width-2)/2)-2), curses.color_pair(6)) self.sig.addstr(2, 1, "Rory McNamara".center(((self.width-2)/2)-2), curses.color_pair(6)) self.sig.addstr(3, 1, "rmcnamara@gdssecurity.com".center(((self.width-2)/2)-2), curses.color_pair(6))
[ "def", "sig", "(", "self", ")", ":", "self", ".", "sig", "=", "self", ".", "screen", ".", "subwin", "(", "(", "self", ".", "height", "/", "2", ")", "-", "6", ",", "(", "self", ".", "width", "-", "2", ")", "/", "2", ",", "(", "self", ".", "height", "/", "2", ")", "+", "6", ",", "(", "(", "self", ".", "width", "-", "2", ")", "/", "2", ")", "+", "1", ")", "self", ".", "sig", ".", "border", "(", ")", "self", ".", "sig", ".", "addstr", "(", "1", ",", "1", ",", "\"Evil Abigail\"", ".", "center", "(", "(", "(", "self", ".", "width", "-", "2", ")", "/", "2", ")", "-", "2", ")", ",", "curses", ".", "color_pair", "(", "6", ")", ")", "self", ".", "sig", ".", "addstr", "(", "2", ",", "1", ",", "\"Rory McNamara\"", ".", "center", "(", "(", "(", "self", ".", "width", "-", "2", ")", "/", "2", ")", "-", "2", ")", ",", "curses", ".", "color_pair", "(", "6", ")", ")", "self", ".", "sig", ".", "addstr", "(", "3", ",", "1", ",", "\"rmcnamara@gdssecurity.com\"", ".", "center", "(", "(", "(", "self", ".", "width", "-", "2", ")", "/", "2", ")", "-", "2", ")", ",", "curses", ".", "color_pair", "(", "6", ")", ")" ]
https://github.com/AonCyberLabs/EvilAbigail/blob/5bde1d49a76ef2e5a6e6bcda5b094441b41144ad/evilmaid.py#L171-L179
AonCyberLabs/EvilAbigail
5bde1d49a76ef2e5a6e6bcda5b094441b41144ad
evilmaid.py
python
UI.preptotal
(self)
Draw the total progress bar
Draw the total progress bar
[ "Draw", "the", "total", "progress", "bar" ]
def preptotal(self): """ Draw the total progress bar """ self.totalbar = self.screen.subwin(3, (self.width-2)/2, (self.height/2), ((self.width-2)/2)+1) self.totalbar.erase() self.totalbar.border() self.screen.addstr((self.height/2), ((self.width-2)/2)+4, "Total Progress")
[ "def", "preptotal", "(", "self", ")", ":", "self", ".", "totalbar", "=", "self", ".", "screen", ".", "subwin", "(", "3", ",", "(", "self", ".", "width", "-", "2", ")", "/", "2", ",", "(", "self", ".", "height", "/", "2", ")", ",", "(", "(", "self", ".", "width", "-", "2", ")", "/", "2", ")", "+", "1", ")", "self", ".", "totalbar", ".", "erase", "(", ")", "self", ".", "totalbar", ".", "border", "(", ")", "self", ".", "screen", ".", "addstr", "(", "(", "self", ".", "height", "/", "2", ")", ",", "(", "(", "self", ".", "width", "-", "2", ")", "/", "2", ")", "+", "4", ",", "\"Total Progress\"", ")" ]
https://github.com/AonCyberLabs/EvilAbigail/blob/5bde1d49a76ef2e5a6e6bcda5b094441b41144ad/evilmaid.py#L181-L188
AonCyberLabs/EvilAbigail
5bde1d49a76ef2e5a6e6bcda5b094441b41144ad
evilmaid.py
python
UI.prepcurrent
(self)
Draw the current progress bar
Draw the current progress bar
[ "Draw", "the", "current", "progress", "bar" ]
def prepcurrent(self): """ Draw the current progress bar """ self.currentbar = self.screen.subwin(3, (self.width-2)/2, (self.height/2)+3, ((self.width-2)/2)+1) self.currentbar.erase() self.currentbar.border() self.screen.addstr((self.height/2)+3, ((self.width-2)/2)+4, "Current Drive Progress")
[ "def", "prepcurrent", "(", "self", ")", ":", "self", ".", "currentbar", "=", "self", ".", "screen", ".", "subwin", "(", "3", ",", "(", "self", ".", "width", "-", "2", ")", "/", "2", ",", "(", "self", ".", "height", "/", "2", ")", "+", "3", ",", "(", "(", "self", ".", "width", "-", "2", ")", "/", "2", ")", "+", "1", ")", "self", ".", "currentbar", ".", "erase", "(", ")", "self", ".", "currentbar", ".", "border", "(", ")", "self", ".", "screen", ".", "addstr", "(", "(", "self", ".", "height", "/", "2", ")", "+", "3", ",", "(", "(", "self", ".", "width", "-", "2", ")", "/", "2", ")", "+", "4", ",", "\"Current Drive Progress\"", ")" ]
https://github.com/AonCyberLabs/EvilAbigail/blob/5bde1d49a76ef2e5a6e6bcda5b094441b41144ad/evilmaid.py#L190-L197